minting 2.2.5__tar.gz → 2.2.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {minting-2.2.5 → minting-2.2.7}/Minting/__init__.py +1 -1
- minting-2.2.7/Minting/client.py +489 -0
- {minting-2.2.5 → minting-2.2.7}/PKG-INFO +1 -1
- {minting-2.2.5 → minting-2.2.7}/minting.egg-info/PKG-INFO +1 -1
- {minting-2.2.5 → minting-2.2.7}/setup.py +1 -1
- minting-2.2.5/Minting/client.py +0 -497
- {minting-2.2.5 → minting-2.2.7}/minting.egg-info/SOURCES.txt +0 -0
- {minting-2.2.5 → minting-2.2.7}/minting.egg-info/dependency_links.txt +0 -0
- {minting-2.2.5 → minting-2.2.7}/minting.egg-info/requires.txt +0 -0
- {minting-2.2.5 → minting-2.2.7}/minting.egg-info/top_level.txt +0 -0
- {minting-2.2.5 → minting-2.2.7}/setup.cfg +0 -0
|
@@ -0,0 +1,489 @@
|
|
|
1
|
+
|
|
2
|
+
import requests
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import io
|
|
5
|
+
import time
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import hashlib
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from pymongo import MongoClient
|
|
11
|
+
from colorama import Fore, Style, init as colorama_init
|
|
12
|
+
|
|
13
|
+
colorama_init(autoreset=True)
|
|
14
|
+
|
|
15
|
+
# ----------------- Database Layer -----------------
|
|
16
|
+
class Database:
|
|
17
|
+
def __init__(self, mongo_uri, db_name):
|
|
18
|
+
try:
|
|
19
|
+
self.client = MongoClient(
|
|
20
|
+
mongo_uri,
|
|
21
|
+
serverSelectionTimeoutMS=50000,
|
|
22
|
+
connectTimeoutMS=100000
|
|
23
|
+
)
|
|
24
|
+
self.client.server_info()
|
|
25
|
+
self.db = self.client[db_name]
|
|
26
|
+
except Exception as e:
|
|
27
|
+
print(f"❌ Connection Error: {e}")
|
|
28
|
+
raise
|
|
29
|
+
|
|
30
|
+
self.users = self.db["users"]
|
|
31
|
+
self.access_tokens = self.db["accesstokens"]
|
|
32
|
+
self.user_plans = self.db["userplans"]
|
|
33
|
+
self.plans = self.db["plans"]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# ----------------- Token Manager -----------------
|
|
37
|
+
class TokenManager:
|
|
38
|
+
def __init__(self, db: Database, raw_token: str):
|
|
39
|
+
self.db = db
|
|
40
|
+
self.raw_token = raw_token.strip()
|
|
41
|
+
self.hashed_token = self._hash_token(self.raw_token)
|
|
42
|
+
|
|
43
|
+
def _hash_token(self, token: str) -> str:
|
|
44
|
+
return hashlib.sha256(token.encode("utf-8")).hexdigest()
|
|
45
|
+
|
|
46
|
+
def check_token_and_credits(self):
|
|
47
|
+
token_doc = self.db.access_tokens.find_one(
|
|
48
|
+
{"token": self.hashed_token, "isActive": True}
|
|
49
|
+
)
|
|
50
|
+
if not token_doc:
|
|
51
|
+
return {"valid": False, "error": "Invalid or inactive access token"}
|
|
52
|
+
|
|
53
|
+
user_id = token_doc["userId"]
|
|
54
|
+
plan_doc = self.db.user_plans.find_one({
|
|
55
|
+
"userId": user_id,
|
|
56
|
+
"isActive": True,
|
|
57
|
+
"expiresAt": {"$gte": datetime.utcnow()},
|
|
58
|
+
})
|
|
59
|
+
if not plan_doc:
|
|
60
|
+
return {"valid": False, "error": "No active subscription plan found"}
|
|
61
|
+
|
|
62
|
+
credits = plan_doc.get("creditsRemaining", 0)
|
|
63
|
+
if credits <= 0:
|
|
64
|
+
return {
|
|
65
|
+
"valid": False,
|
|
66
|
+
"error": "Credits exhausted. Please subscribe to a plan to continue.",
|
|
67
|
+
"creditsRemaining": 0,
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
"valid": True,
|
|
71
|
+
"userId": user_id,
|
|
72
|
+
"planId": plan_doc["_id"],
|
|
73
|
+
"creditsRemaining": credits,
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
def deduct_credit(self):
|
|
77
|
+
token_doc = self.db.access_tokens.find_one(
|
|
78
|
+
{"token": self.hashed_token, "isActive": True}
|
|
79
|
+
)
|
|
80
|
+
if not token_doc:
|
|
81
|
+
return False
|
|
82
|
+
|
|
83
|
+
user_id = token_doc["userId"]
|
|
84
|
+
plan_doc = self.db.user_plans.find_one({
|
|
85
|
+
"userId": user_id,
|
|
86
|
+
"isActive": True,
|
|
87
|
+
"expiresAt": {"$gte": datetime.utcnow()},
|
|
88
|
+
})
|
|
89
|
+
if not plan_doc:
|
|
90
|
+
return False
|
|
91
|
+
if plan_doc.get("creditsRemaining", 0) <= 0:
|
|
92
|
+
return False
|
|
93
|
+
|
|
94
|
+
result = self.db.user_plans.update_one(
|
|
95
|
+
{"_id": plan_doc["_id"], "creditsRemaining": {"$gt": 0}},
|
|
96
|
+
{
|
|
97
|
+
"$inc": {"creditsRemaining": -1},
|
|
98
|
+
"$set": {"updatedAt": datetime.utcnow()},
|
|
99
|
+
},
|
|
100
|
+
)
|
|
101
|
+
return result.modified_count > 0
|
|
102
|
+
|
|
103
|
+
def get_remaining_credits(self):
|
|
104
|
+
token_doc = self.db.access_tokens.find_one(
|
|
105
|
+
{"token": self.hashed_token, "isActive": True}
|
|
106
|
+
)
|
|
107
|
+
if not token_doc:
|
|
108
|
+
return 0
|
|
109
|
+
|
|
110
|
+
user_id = token_doc["userId"]
|
|
111
|
+
plan_doc = self.db.user_plans.find_one({
|
|
112
|
+
"userId": user_id,
|
|
113
|
+
"isActive": True,
|
|
114
|
+
"expiresAt": {"$gte": datetime.utcnow()},
|
|
115
|
+
})
|
|
116
|
+
if not plan_doc:
|
|
117
|
+
return 0
|
|
118
|
+
return max(plan_doc.get("creditsRemaining", 0), 0)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
# ----------------- Client Class -----------------
|
|
122
|
+
class Client:
|
|
123
|
+
SUPPORTED_TICKERS = {
|
|
124
|
+
"ABB", "ACC", "ADANIGREEN", "ADANITOTAL", "APOLLOHOSP",
|
|
125
|
+
"BAJAJHLDNG", "BANDHANBNK", "BERGEPAINT", "BOSCHLTD", "CANBK",
|
|
126
|
+
"CIPLA", "DABUR", "DLF", "DRREDDY", "HAVELLS",
|
|
127
|
+
"HDFCAMC", "ICICIGI", "ICICIPRULI", "INDUSTOWER", "INFOEDGE",
|
|
128
|
+
"JINDALSTEL", "JSWENERGY", "LUPIN", "MARICO", "MOTHERSON",
|
|
129
|
+
"MUTHOOTFIN", "NMDC", "OIL", "PAGEIND", "PIIND",
|
|
130
|
+
"PNB", "RECLTD", "SHREECEM", "SIEMENS", "SRF",
|
|
131
|
+
"TATACHEM", "TATACONSUM", "TATAELXSI", "TORNTPHARM", "TRENT",
|
|
132
|
+
"UBL", "ZOMATO", "ALKEM", "ASTRAL", "AUROPHARMA",
|
|
133
|
+
"COLPAL", "CONCOR", "FEDERALBNK", "LICI", "MRF",
|
|
134
|
+
"NAUKRI", "TORNTPOWER","TCS","HDFCBANK","BHARTIARTL","ICICIBANK","SBIN","INFY","BAJFINANCE",
|
|
135
|
+
"HINDUNILVR","ITC","MARUTI","HCLTECH","SUNPHARMA","KOTAKBANK","AXISBANK",
|
|
136
|
+
"ULTRACEMCO","BAJAJFINSV","ADANIPORTS","NTPC","ONGC","ASIANPAINT",
|
|
137
|
+
"JSWSTEEL","ADANIPOWER","WIPRO","ADANIENT","POWERGRID","NESTLEIND",
|
|
138
|
+
"COALINDIA","INDIGO","HINDZINC","TATASTEEL","VEDL","SBILIFE","EICHERMOT",
|
|
139
|
+
"GRASIM","HINDALCO","LTIM","TVSMOTOR","DIVISLAB","HDFCLIFE","PIDILITIND",
|
|
140
|
+
"CHOLAFIN","BRITANNIA","AMBUJACEM","GAIL","BANKBARODA","GODREJCP",
|
|
141
|
+
"HEROMOTOCO","TATAPOWER"
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
def __init__(
|
|
145
|
+
self,
|
|
146
|
+
access_token=None,
|
|
147
|
+
token=None,
|
|
148
|
+
base_url="http://34.70.223.89:8000/predict",
|
|
149
|
+
mongo_uri="mongodb+srv://ankitarrow:ankitarrow@cluster0.zcajdur.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0",
|
|
150
|
+
db_name="test",
|
|
151
|
+
):
|
|
152
|
+
if access_token is None and token is None:
|
|
153
|
+
raise ValueError("Access token must be provided as 'access_token' or 'token'.")
|
|
154
|
+
|
|
155
|
+
self.access_token = (access_token or token).strip()
|
|
156
|
+
self.base_url = base_url
|
|
157
|
+
|
|
158
|
+
print("=" * 60)
|
|
159
|
+
print(" Initializing Mintzy")
|
|
160
|
+
print("=" * 60)
|
|
161
|
+
|
|
162
|
+
self.db = Database(mongo_uri, db_name)
|
|
163
|
+
self.token_manager = TokenManager(self.db, self.access_token)
|
|
164
|
+
|
|
165
|
+
# ------------------------------------------------------------------
|
|
166
|
+
# INTERNAL: parse server response → dict { ticker -> list[dict] }
|
|
167
|
+
# ------------------------------------------------------------------
|
|
168
|
+
def _parse_response(self, response_json, tickers, parameters):
|
|
169
|
+
"""
|
|
170
|
+
Returns:
|
|
171
|
+
parsed : { ticker: [ {Date, Time, Predicted Price}, ... ] }
|
|
172
|
+
warnings : [ string ]
|
|
173
|
+
"""
|
|
174
|
+
parsed = {}
|
|
175
|
+
warnings = []
|
|
176
|
+
|
|
177
|
+
if not isinstance(response_json, dict):
|
|
178
|
+
warnings.append(f"Unexpected response type: {type(response_json)}")
|
|
179
|
+
return parsed, warnings
|
|
180
|
+
|
|
181
|
+
result = response_json.get("result", response_json)
|
|
182
|
+
|
|
183
|
+
for ticker in tickers:
|
|
184
|
+
ticker_data = result.get(ticker) if isinstance(result, dict) else None
|
|
185
|
+
|
|
186
|
+
if ticker_data is None:
|
|
187
|
+
warnings.append(f"No data returned for {ticker}")
|
|
188
|
+
continue
|
|
189
|
+
|
|
190
|
+
if isinstance(ticker_data, str):
|
|
191
|
+
warnings.append(f"{ticker}: server error — {ticker_data}")
|
|
192
|
+
continue
|
|
193
|
+
|
|
194
|
+
ticker_rows = []
|
|
195
|
+
|
|
196
|
+
for param in parameters:
|
|
197
|
+
param_data = (
|
|
198
|
+
ticker_data.get(param)
|
|
199
|
+
if isinstance(ticker_data, dict)
|
|
200
|
+
else None
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
if param_data is None:
|
|
204
|
+
warnings.append(f"{param} data missing for {ticker}")
|
|
205
|
+
continue
|
|
206
|
+
|
|
207
|
+
# ── structured dict response ──────────────────────────
|
|
208
|
+
if isinstance(param_data, dict):
|
|
209
|
+
timestamps = param_data.get("timestamps", [])
|
|
210
|
+
predicted_prices = param_data.get("predicted_prices", [])
|
|
211
|
+
dates = param_data.get("dates", [])
|
|
212
|
+
times = param_data.get("times", [])
|
|
213
|
+
|
|
214
|
+
if not predicted_prices:
|
|
215
|
+
warnings.append(f"Empty prediction data for {ticker}.{param}")
|
|
216
|
+
continue
|
|
217
|
+
|
|
218
|
+
for i, price in enumerate(predicted_prices):
|
|
219
|
+
if price is None:
|
|
220
|
+
price = 0.0 # keep slot visible rather than silently dropping it
|
|
221
|
+
|
|
222
|
+
# resolve timestamp
|
|
223
|
+
if i < len(timestamps):
|
|
224
|
+
ts_str = str(timestamps[i])
|
|
225
|
+
try:
|
|
226
|
+
dt = datetime.fromisoformat(ts_str)
|
|
227
|
+
date_str = dt.strftime("%Y-%m-%d")
|
|
228
|
+
time_str = dt.strftime("%H:%M:%S")
|
|
229
|
+
except Exception:
|
|
230
|
+
date_str = str(dates[i]) if i < len(dates) else ts_str
|
|
231
|
+
time_str = str(times[i]) if i < len(times) else ""
|
|
232
|
+
else:
|
|
233
|
+
date_str = str(dates[i]) if i < len(dates) else ""
|
|
234
|
+
time_str = str(times[i]) if i < len(times) else ""
|
|
235
|
+
|
|
236
|
+
ticker_rows.append({
|
|
237
|
+
"Date": date_str,
|
|
238
|
+
"Time": time_str,
|
|
239
|
+
"Predicted Price": float(price),
|
|
240
|
+
})
|
|
241
|
+
|
|
242
|
+
# ── legacy CSV string response ────────────────────────
|
|
243
|
+
# ── legacy CSV string response ────────────────────────
|
|
244
|
+
elif isinstance(param_data, str):
|
|
245
|
+
for line in param_data.strip().split("\n")[1:]:
|
|
246
|
+
parts = line.split()
|
|
247
|
+
if len(parts) >= 3:
|
|
248
|
+
ticker_rows.append({
|
|
249
|
+
"Date": parts[0],
|
|
250
|
+
"Time": parts[1],
|
|
251
|
+
"Predicted Price": float(parts[2]),
|
|
252
|
+
})
|
|
253
|
+
|
|
254
|
+
# ── flat list response (safety net for any unstructured list) ──
|
|
255
|
+
elif isinstance(param_data, list):
|
|
256
|
+
for i, price in enumerate(param_data):
|
|
257
|
+
if price is None:
|
|
258
|
+
continue
|
|
259
|
+
ticker_rows.append({
|
|
260
|
+
"Date": "",
|
|
261
|
+
"Time": f"slot_{i}",
|
|
262
|
+
"Predicted Price": float(price),
|
|
263
|
+
})
|
|
264
|
+
else:
|
|
265
|
+
warnings.append(
|
|
266
|
+
f"Unsupported payload type for {ticker}.{param}: {type(param_data)}"
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
if ticker_rows:
|
|
270
|
+
parsed[ticker] = ticker_rows
|
|
271
|
+
else:
|
|
272
|
+
warnings.append(f"No valid rows parsed for {ticker}")
|
|
273
|
+
|
|
274
|
+
return parsed, warnings
|
|
275
|
+
|
|
276
|
+
# ------------------------------------------------------------------
|
|
277
|
+
# INTERNAL: print one ticker's block immediately (avoids buffering)
|
|
278
|
+
# ------------------------------------------------------------------
|
|
279
|
+
def _print_ticker_block(self, ticker, rows, col_widths, spacing, divider_width, param=""):
|
|
280
|
+
"""Print a single ticker's rows directly to stdout and flush."""
|
|
281
|
+
|
|
282
|
+
thin_div = "-" * divider_width
|
|
283
|
+
|
|
284
|
+
def fmt(values):
|
|
285
|
+
return spacing.join(
|
|
286
|
+
f"{str(v):<{w}}" for v, w in zip(values, col_widths)
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
for row in rows:
|
|
290
|
+
if param in ("rrr",):
|
|
291
|
+
price = f"{float(row['Predicted Price']):.6f}"
|
|
292
|
+
else:
|
|
293
|
+
price = f"{float(row['Predicted Price']):.2f}"
|
|
294
|
+
line = fmt([ticker, row["Date"], row["Time"], price])
|
|
295
|
+
print(line)
|
|
296
|
+
|
|
297
|
+
print(thin_div)
|
|
298
|
+
sys.stdout.flush() # ← force flush after every ticker block
|
|
299
|
+
|
|
300
|
+
# ------------------------------------------------------------------
|
|
301
|
+
# PUBLIC: format + stream-print the full prediction table
|
|
302
|
+
# ------------------------------------------------------------------
|
|
303
|
+
def _render_and_print(self, parsed, tickers, param=""):
|
|
304
|
+
if not parsed:
|
|
305
|
+
print(" No prediction data available.")
|
|
306
|
+
return
|
|
307
|
+
|
|
308
|
+
headers = ["Ticker", "Date", "Time", "Predicted Price"]
|
|
309
|
+
spacing = " "
|
|
310
|
+
|
|
311
|
+
# ── compute column widths using ROUNDED prices ─────────────────
|
|
312
|
+
all_rows_flat = []
|
|
313
|
+
for ticker, rows in parsed.items():
|
|
314
|
+
for r in rows:
|
|
315
|
+
if param in ("rrr",):
|
|
316
|
+
price_str = f"{float(r['Predicted Price']):.6f}"
|
|
317
|
+
else:
|
|
318
|
+
price_str = f"{float(r['Predicted Price']):.2f}"
|
|
319
|
+
all_rows_flat.append(
|
|
320
|
+
[ticker, r["Date"], r["Time"], price_str]
|
|
321
|
+
)
|
|
322
|
+
col_widths = []
|
|
323
|
+
for idx, header in enumerate(headers):
|
|
324
|
+
col_values = [row[idx] for row in all_rows_flat]
|
|
325
|
+
col_widths.append(
|
|
326
|
+
max(len(header), max((len(v) for v in col_values), default=0))
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
divider_width = sum(col_widths) + len(spacing) * (len(headers) - 1)
|
|
330
|
+
divider = "=" * divider_width
|
|
331
|
+
|
|
332
|
+
def fmt(values):
|
|
333
|
+
return spacing.join(
|
|
334
|
+
f"{str(v):<{w}}" for v, w in zip(values, col_widths)
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
# ── header ─────────────────────────────────────────────────────
|
|
338
|
+
print(divider)
|
|
339
|
+
print(Fore.CYAN + fmt(headers) + Fore.RESET)
|
|
340
|
+
print(divider)
|
|
341
|
+
sys.stdout.flush()
|
|
342
|
+
|
|
343
|
+
# ── one block per ticker — flush after each ─────────────────────
|
|
344
|
+
for ticker in tickers:
|
|
345
|
+
rows = parsed.get(ticker)
|
|
346
|
+
if not rows:
|
|
347
|
+
continue
|
|
348
|
+
self._print_ticker_block(
|
|
349
|
+
ticker, rows, col_widths, spacing, divider_width, param=param
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
sys.stdout.flush()
|
|
353
|
+
# ------------------------------------------------------------------
|
|
354
|
+
# INTERNAL: build combined DataFrame (returned to caller)
|
|
355
|
+
# ------------------------------------------------------------------
|
|
356
|
+
def _build_dataframe(self, parsed, tickers):
|
|
357
|
+
frames = []
|
|
358
|
+
for ticker in tickers:
|
|
359
|
+
rows = parsed.get(ticker)
|
|
360
|
+
if not rows:
|
|
361
|
+
continue
|
|
362
|
+
df = pd.DataFrame(rows)
|
|
363
|
+
df.insert(0, "Ticker", ticker)
|
|
364
|
+
frames.append(df[["Ticker", "Date", "Time", "Predicted Price"]])
|
|
365
|
+
|
|
366
|
+
if frames:
|
|
367
|
+
return pd.concat(frames, ignore_index=True)
|
|
368
|
+
return pd.DataFrame(columns=["Ticker", "Date", "Time", "Predicted Price"])
|
|
369
|
+
|
|
370
|
+
# ------------------------------------------------------------------
|
|
371
|
+
# PUBLIC API
|
|
372
|
+
# ------------------------------------------------------------------
|
|
373
|
+
def get_prediction(self, tickers, time_frame, parameters, candle="1m"):
|
|
374
|
+
# normalise inputs
|
|
375
|
+
if isinstance(tickers, str):
|
|
376
|
+
tickers = [tickers]
|
|
377
|
+
if not isinstance(tickers, list):
|
|
378
|
+
return {"success": False, "error": "Tickers must be a string or list"}
|
|
379
|
+
|
|
380
|
+
invalid = [t for t in tickers if t.upper() not in self.SUPPORTED_TICKERS]
|
|
381
|
+
if invalid:
|
|
382
|
+
msg = f"Ticker(s) not supported currently: {', '.join(invalid)}"
|
|
383
|
+
print(f"❌ {msg}")
|
|
384
|
+
print(f"\n✅ Supported tickers: {', '.join(sorted(self.SUPPORTED_TICKERS))}")
|
|
385
|
+
return {"success": False, "error": msg}
|
|
386
|
+
|
|
387
|
+
tickers = [t.upper() for t in tickers]
|
|
388
|
+
|
|
389
|
+
if isinstance(parameters, str):
|
|
390
|
+
parameters = [parameters]
|
|
391
|
+
|
|
392
|
+
try:
|
|
393
|
+
print("\n" + "=" * 60)
|
|
394
|
+
print("Getting Predictions")
|
|
395
|
+
print("=" * 60)
|
|
396
|
+
|
|
397
|
+
token_check = self.token_manager.check_token_and_credits()
|
|
398
|
+
if not token_check["valid"]:
|
|
399
|
+
print(f"❌ {token_check['error']}")
|
|
400
|
+
return {"success": False, "error": token_check["error"]}
|
|
401
|
+
|
|
402
|
+
print(f"📊 Tickers : {', '.join(tickers)}")
|
|
403
|
+
print(f"⏰ Time Frame: {time_frame}")
|
|
404
|
+
print(f"🕒 Candle : {candle}")
|
|
405
|
+
print(f"📈 Parameters: {', '.join(parameters)}")
|
|
406
|
+
|
|
407
|
+
payload = {
|
|
408
|
+
"action": {
|
|
409
|
+
"action_type": "predict",
|
|
410
|
+
"predict": {
|
|
411
|
+
"given": {
|
|
412
|
+
"ticker": tickers,
|
|
413
|
+
"time_frame": time_frame,
|
|
414
|
+
"candle": candle,
|
|
415
|
+
},
|
|
416
|
+
"required": {"parameters": parameters},
|
|
417
|
+
},
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
print("\n🔄 Fetching Predictions ...")
|
|
422
|
+
response = requests.post(
|
|
423
|
+
self.base_url,
|
|
424
|
+
json=payload,
|
|
425
|
+
headers={"X-Access-Token": self.access_token},
|
|
426
|
+
timeout=300,
|
|
427
|
+
)
|
|
428
|
+
response.raise_for_status()
|
|
429
|
+
response_json = response.json()
|
|
430
|
+
|
|
431
|
+
# parse
|
|
432
|
+
parsed, warnings = self._parse_response(response_json, tickers, parameters)
|
|
433
|
+
|
|
434
|
+
# print any warnings first
|
|
435
|
+
for w in warnings:
|
|
436
|
+
print(f"⚠️ {w}")
|
|
437
|
+
sys.stdout.flush()
|
|
438
|
+
|
|
439
|
+
# stream-print table (flush per ticker — no truncation)
|
|
440
|
+
print("\n" + "=" * 60)
|
|
441
|
+
print(f"✅ Predictions ({time_frame}, candle={candle})")
|
|
442
|
+
print("=" * 60)
|
|
443
|
+
self._render_and_print(parsed, tickers, param=parameters[0] if parameters else "")
|
|
444
|
+
|
|
445
|
+
# deduct credit
|
|
446
|
+
if self.token_manager.deduct_credit():
|
|
447
|
+
remaining = self.token_manager.get_remaining_credits()
|
|
448
|
+
else:
|
|
449
|
+
remaining = 0
|
|
450
|
+
|
|
451
|
+
print(f"💳 Remaining credits: {remaining}")
|
|
452
|
+
if remaining <= 10:
|
|
453
|
+
print(f"⚠️ Warning: Only {remaining} credits remaining!")
|
|
454
|
+
if remaining == 0:
|
|
455
|
+
print("❌ Credits exhausted! Please subscribe to a plan.")
|
|
456
|
+
print("=" * 60)
|
|
457
|
+
sys.stdout.flush()
|
|
458
|
+
|
|
459
|
+
# build DataFrame for return value
|
|
460
|
+
df = self._build_dataframe(parsed, tickers)
|
|
461
|
+
|
|
462
|
+
return {
|
|
463
|
+
"success": True,
|
|
464
|
+
"data": df,
|
|
465
|
+
"credits_remaining": remaining,
|
|
466
|
+
"timestamp": datetime.now().isoformat(),
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
except requests.exceptions.Timeout:
|
|
470
|
+
msg = "Request timed out. Please try again."
|
|
471
|
+
print(f" {msg}")
|
|
472
|
+
return {"success": False, "error": msg}
|
|
473
|
+
|
|
474
|
+
except requests.exceptions.RequestException as e:
|
|
475
|
+
msg = f"API request failed: {str(e)}"
|
|
476
|
+
print(f" {msg}")
|
|
477
|
+
return {"success": False, "error": msg}
|
|
478
|
+
|
|
479
|
+
except Exception as e:
|
|
480
|
+
import traceback
|
|
481
|
+
traceback.print_exc()
|
|
482
|
+
msg = f"Unexpected error: {str(e)}"
|
|
483
|
+
print(f" {msg}")
|
|
484
|
+
return {"success": False, "error": msg}
|
|
485
|
+
|
|
486
|
+
def get_credits(self):
|
|
487
|
+
credits = self.token_manager.get_remaining_credits()
|
|
488
|
+
print(f"💳 Remaining credits: {credits}")
|
|
489
|
+
return credits
|
minting-2.2.5/Minting/client.py
DELETED
|
@@ -1,497 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
# updated code for client
|
|
3
|
-
import requests
|
|
4
|
-
import pandas as pd
|
|
5
|
-
import io
|
|
6
|
-
import time
|
|
7
|
-
import os
|
|
8
|
-
import sys
|
|
9
|
-
import hashlib
|
|
10
|
-
from datetime import datetime
|
|
11
|
-
from pymongo import MongoClient
|
|
12
|
-
from colorama import Fore, init as colorama_init
|
|
13
|
-
|
|
14
|
-
# # ----------------- Database Layer -----------------
|
|
15
|
-
# Initialize ANSI support for Windows terminals
|
|
16
|
-
colorama_init(autoreset=True)
|
|
17
|
-
# ----------------- Database Layer -----------------
|
|
18
|
-
class Database:
|
|
19
|
-
def __init__(self, mongo_uri, db_name):
|
|
20
|
-
|
|
21
|
-
try:
|
|
22
|
-
self.client = MongoClient(
|
|
23
|
-
mongo_uri,
|
|
24
|
-
serverSelectionTimeoutMS=50000,
|
|
25
|
-
connectTimeoutMS=100000
|
|
26
|
-
)
|
|
27
|
-
# Test connection
|
|
28
|
-
self.client.server_info()
|
|
29
|
-
self.db = self.client[db_name]
|
|
30
|
-
|
|
31
|
-
except Exception as e:
|
|
32
|
-
print(f"❌ Connection Error: {e}")
|
|
33
|
-
raise
|
|
34
|
-
|
|
35
|
-
self.users = self.db["users"]
|
|
36
|
-
self.access_tokens = self.db["accesstokens"]
|
|
37
|
-
self.user_plans = self.db["userplans"]
|
|
38
|
-
self.plans = self.db["plans"]
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
# ----------------- Token Manager -----------------
|
|
42
|
-
class TokenManager:
|
|
43
|
-
def __init__(self, db: Database, raw_token: str):
|
|
44
|
-
self.db = db
|
|
45
|
-
self.raw_token = raw_token.strip()
|
|
46
|
-
self.hashed_token = self._hash_token(self.raw_token)
|
|
47
|
-
|
|
48
|
-
def _hash_token(self, token: str) -> str:
|
|
49
|
-
"""Hash token using SHA-256 (equivalent to Node.js crypto.createHash)"""
|
|
50
|
-
return hashlib.sha256(token.encode('utf-8')).hexdigest()
|
|
51
|
-
|
|
52
|
-
def check_token_and_credits(self):
|
|
53
|
-
"""Validate token and check if user has credits"""
|
|
54
|
-
token_doc = self.db.access_tokens.find_one({
|
|
55
|
-
"token": self.hashed_token,
|
|
56
|
-
"isActive": True
|
|
57
|
-
})
|
|
58
|
-
|
|
59
|
-
if not token_doc:
|
|
60
|
-
return {"valid": False, "error": "Invalid or inactive access token"}
|
|
61
|
-
|
|
62
|
-
user_id = token_doc["userId"]
|
|
63
|
-
|
|
64
|
-
plan_doc = self.db.user_plans.find_one({
|
|
65
|
-
"userId": user_id,
|
|
66
|
-
"isActive": True,
|
|
67
|
-
"expiresAt": {"$gte": datetime.utcnow()}
|
|
68
|
-
})
|
|
69
|
-
|
|
70
|
-
if not plan_doc:
|
|
71
|
-
return {"valid": False, "error": "No active subscription plan found"}
|
|
72
|
-
|
|
73
|
-
credits = plan_doc.get("creditsRemaining", 0)
|
|
74
|
-
|
|
75
|
-
if credits <= 0:
|
|
76
|
-
return {
|
|
77
|
-
"valid": False,
|
|
78
|
-
"error": "Credits exhausted. Please subscribe to a plan to continue.",
|
|
79
|
-
"creditsRemaining": 0
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
return {
|
|
83
|
-
"valid": True,
|
|
84
|
-
"userId": user_id,
|
|
85
|
-
"planId": plan_doc["_id"],
|
|
86
|
-
"creditsRemaining": credits
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
def deduct_credit(self):
|
|
90
|
-
"""Deduct one credit (only if credits > 0)"""
|
|
91
|
-
token_doc = self.db.access_tokens.find_one({
|
|
92
|
-
"token": self.hashed_token,
|
|
93
|
-
"isActive": True
|
|
94
|
-
})
|
|
95
|
-
|
|
96
|
-
if not token_doc:
|
|
97
|
-
return False
|
|
98
|
-
|
|
99
|
-
user_id = token_doc["userId"]
|
|
100
|
-
|
|
101
|
-
plan_doc = self.db.user_plans.find_one({
|
|
102
|
-
"userId": user_id,
|
|
103
|
-
"isActive": True,
|
|
104
|
-
"expiresAt": {"$gte": datetime.utcnow()}
|
|
105
|
-
})
|
|
106
|
-
|
|
107
|
-
if not plan_doc:
|
|
108
|
-
return False
|
|
109
|
-
|
|
110
|
-
current_credits = plan_doc.get("creditsRemaining", 0)
|
|
111
|
-
if current_credits <= 0:
|
|
112
|
-
return False
|
|
113
|
-
|
|
114
|
-
result = self.db.user_plans.update_one(
|
|
115
|
-
{"_id": plan_doc["_id"], "creditsRemaining": {"$gt": 0}},
|
|
116
|
-
{
|
|
117
|
-
"$inc": {"creditsRemaining": -1},
|
|
118
|
-
"$set": {"updatedAt": datetime.utcnow()}
|
|
119
|
-
}
|
|
120
|
-
)
|
|
121
|
-
return result.modified_count > 0
|
|
122
|
-
|
|
123
|
-
def get_remaining_credits(self):
|
|
124
|
-
"""Get remaining credits"""
|
|
125
|
-
token_doc = self.db.access_tokens.find_one({
|
|
126
|
-
"token": self.hashed_token,
|
|
127
|
-
"isActive": True
|
|
128
|
-
})
|
|
129
|
-
|
|
130
|
-
if not token_doc:
|
|
131
|
-
return 0
|
|
132
|
-
|
|
133
|
-
user_id = token_doc["userId"]
|
|
134
|
-
|
|
135
|
-
plan_doc = self.db.user_plans.find_one({
|
|
136
|
-
"userId": user_id,
|
|
137
|
-
"isActive": True,
|
|
138
|
-
"expiresAt": {"$gte": datetime.utcnow()}
|
|
139
|
-
})
|
|
140
|
-
|
|
141
|
-
if not plan_doc:
|
|
142
|
-
return 0
|
|
143
|
-
|
|
144
|
-
return max(plan_doc.get("creditsRemaining", 0), 0)
|
|
145
|
-
|
|
146
|
-
# ----------------- Client Class -----------------
|
|
147
|
-
class Client:
|
|
148
|
-
# Supported tickers - only these are allowed
|
|
149
|
-
SUPPORTED_TICKERS = {
|
|
150
|
-
"TCS",
|
|
151
|
-
"HDFCBANK",
|
|
152
|
-
"BHARTIARTL",
|
|
153
|
-
"ICICIBANK",
|
|
154
|
-
"SBIN",
|
|
155
|
-
"INFY",
|
|
156
|
-
"BAJFINANCE",
|
|
157
|
-
"HINDUNILVR",
|
|
158
|
-
"ITC",
|
|
159
|
-
"MARUTI",
|
|
160
|
-
"HCLTECH",
|
|
161
|
-
"SUNPHARMA",
|
|
162
|
-
"KOTAKBANK",
|
|
163
|
-
"AXISBANK",
|
|
164
|
-
"TATAMOTORS",
|
|
165
|
-
"ULTRACEMCO",
|
|
166
|
-
"BAJAJFINSV",
|
|
167
|
-
"ADANIPORTS",
|
|
168
|
-
"NTPC",
|
|
169
|
-
"ONGC",
|
|
170
|
-
"ASIANPAINT",
|
|
171
|
-
"JSWSTEEL",
|
|
172
|
-
"ADANIPOWER",
|
|
173
|
-
"WIPRO",
|
|
174
|
-
"ADANIENT",
|
|
175
|
-
"POWERGRID",
|
|
176
|
-
"NESTLEIND",
|
|
177
|
-
"COALINDIA",
|
|
178
|
-
"INDIGO",
|
|
179
|
-
"HINDZINC",
|
|
180
|
-
"TATASTEEL",
|
|
181
|
-
"VEDL",
|
|
182
|
-
"SBILIFE",
|
|
183
|
-
"EICHERMOT",
|
|
184
|
-
"GRASIM",
|
|
185
|
-
"HINDALCO",
|
|
186
|
-
"LTIM",
|
|
187
|
-
"TVSMOTOR",
|
|
188
|
-
"DIVISLAB",
|
|
189
|
-
"HDFCLIFE",
|
|
190
|
-
"PIDILITIND",
|
|
191
|
-
"CHOLAFIN",
|
|
192
|
-
"BRITANNIA",
|
|
193
|
-
"AMBUJACEM",
|
|
194
|
-
"GAIL",
|
|
195
|
-
"BANKBARODA",
|
|
196
|
-
"GODREJCP",
|
|
197
|
-
"HEROMOTOCO",
|
|
198
|
-
"TATAPOWER",
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
def __init__(
|
|
202
|
-
self,
|
|
203
|
-
access_token=None, # new kw
|
|
204
|
-
token=None, # old kw / positional
|
|
205
|
-
base_url="http://34.70.223.89:8000/predict",
|
|
206
|
-
mongo_uri="mongodb+srv://ankitarrow:ankitarrow@cluster0.zcajdur.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0",
|
|
207
|
-
db_name="test"
|
|
208
|
-
):
|
|
209
|
-
# allow: Client(access_token="..."), Client(token="..."), Client("...")
|
|
210
|
-
if access_token is None and token is None:
|
|
211
|
-
raise ValueError("Access token must be provided as 'access_token' or 'token'.")
|
|
212
|
-
|
|
213
|
-
# if called positionally, first arg will land in access_token
|
|
214
|
-
self.access_token = (access_token or token).strip()
|
|
215
|
-
self.base_url = base_url
|
|
216
|
-
|
|
217
|
-
print("="*60)
|
|
218
|
-
print(" Initializing Mintzy")
|
|
219
|
-
print("="*60)
|
|
220
|
-
|
|
221
|
-
self.db = Database(mongo_uri, db_name)
|
|
222
|
-
self.token_manager = TokenManager(self.db, self.access_token)
|
|
223
|
-
|
|
224
|
-
def _format_table(self, response_json, tickers, parameters):
|
|
225
|
-
"""Format API response into pandas DataFrame"""
|
|
226
|
-
try:
|
|
227
|
-
if isinstance(response_json, str):
|
|
228
|
-
return pd.DataFrame([{"Error": response_json}])
|
|
229
|
-
|
|
230
|
-
if not isinstance(response_json, dict):
|
|
231
|
-
return pd.DataFrame([{"Error": f"Unexpected response type: {type(response_json)}"}])
|
|
232
|
-
|
|
233
|
-
result = response_json.get("result", response_json)
|
|
234
|
-
rows = []
|
|
235
|
-
|
|
236
|
-
for ticker in tickers:
|
|
237
|
-
ticker_data = result.get(ticker) if isinstance(result, dict) else None
|
|
238
|
-
if ticker_data is None:
|
|
239
|
-
print(f"⚠️ No data returned for {ticker}")
|
|
240
|
-
continue
|
|
241
|
-
|
|
242
|
-
if isinstance(ticker_data, str):
|
|
243
|
-
# ticker-level error string from the server
|
|
244
|
-
print(f"⚠️ {ticker}: {ticker_data}")
|
|
245
|
-
continue
|
|
246
|
-
|
|
247
|
-
for param in parameters:
|
|
248
|
-
param_data = ticker_data.get(param) if isinstance(ticker_data, dict) else None
|
|
249
|
-
|
|
250
|
-
if param_data is None:
|
|
251
|
-
print(f"⚠️ {param} data missing for {ticker}")
|
|
252
|
-
continue
|
|
253
|
-
|
|
254
|
-
if isinstance(param_data, dict):
|
|
255
|
-
timestamps = param_data.get("timestamps", [])
|
|
256
|
-
predicted_prices = param_data.get("predicted_prices", [])
|
|
257
|
-
dates = param_data.get("dates", [])
|
|
258
|
-
times = param_data.get("times", [])
|
|
259
|
-
|
|
260
|
-
if not predicted_prices:
|
|
261
|
-
print(f"⚠️ Empty prediction data for {ticker}.{param}")
|
|
262
|
-
continue
|
|
263
|
-
|
|
264
|
-
data_rows = []
|
|
265
|
-
for i, price in enumerate(predicted_prices):
|
|
266
|
-
if price is None:
|
|
267
|
-
continue
|
|
268
|
-
|
|
269
|
-
# Resolve date + time strings
|
|
270
|
-
if i < len(timestamps):
|
|
271
|
-
ts = str(timestamps[i])
|
|
272
|
-
try:
|
|
273
|
-
dt = datetime.fromisoformat(ts)
|
|
274
|
-
date_str = dt.strftime("%Y-%m-%d")
|
|
275
|
-
time_str = dt.strftime("%H:%M:%S")
|
|
276
|
-
except Exception:
|
|
277
|
-
# fall back to pre-split fields
|
|
278
|
-
date_str = str(dates[i]) if i < len(dates) else ts
|
|
279
|
-
time_str = str(times[i]) if i < len(times) else ""
|
|
280
|
-
else:
|
|
281
|
-
date_str = str(dates[i]) if i < len(dates) else ""
|
|
282
|
-
time_str = str(times[i]) if i < len(times) else ""
|
|
283
|
-
|
|
284
|
-
data_rows.append({
|
|
285
|
-
"Ticker": ticker,
|
|
286
|
-
"Date": date_str,
|
|
287
|
-
"Time": time_str,
|
|
288
|
-
"Predicted Price": float(price),
|
|
289
|
-
})
|
|
290
|
-
|
|
291
|
-
if data_rows:
|
|
292
|
-
rows.append(pd.DataFrame(data_rows))
|
|
293
|
-
else:
|
|
294
|
-
print(f"⚠️ No valid rows parsed for {ticker}.{param}")
|
|
295
|
-
|
|
296
|
-
elif isinstance(param_data, str):
|
|
297
|
-
# Legacy CSV string response
|
|
298
|
-
lines = param_data.strip().split('\n')
|
|
299
|
-
data_rows = []
|
|
300
|
-
for line in lines[1:]: # skip header
|
|
301
|
-
parts = line.split()
|
|
302
|
-
if len(parts) >= 3:
|
|
303
|
-
data_rows.append({
|
|
304
|
-
"Ticker": ticker,
|
|
305
|
-
"Date": parts[0],
|
|
306
|
-
"Time": parts[1],
|
|
307
|
-
"Predicted Price": float(parts[2]),
|
|
308
|
-
})
|
|
309
|
-
if data_rows:
|
|
310
|
-
rows.append(pd.DataFrame(data_rows))
|
|
311
|
-
else:
|
|
312
|
-
print(f"⚠️ Unsupported payload type for {ticker}.{param}: "
|
|
313
|
-
f"{type(param_data)}")
|
|
314
|
-
|
|
315
|
-
if rows:
|
|
316
|
-
combined = pd.concat(rows, ignore_index=True)
|
|
317
|
-
return combined[["Ticker", "Date", "Time", "Predicted Price"]]
|
|
318
|
-
|
|
319
|
-
return pd.DataFrame([{"Error": "No data to display"}])
|
|
320
|
-
|
|
321
|
-
except Exception as e:
|
|
322
|
-
import traceback
|
|
323
|
-
traceback.print_exc()
|
|
324
|
-
return pd.DataFrame([{"Error": str(e)}])
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
def _render_table(self, df: pd.DataFrame) -> str:
|
|
328
|
-
"""Return a formatted string for the console with a colored header."""
|
|
329
|
-
if df.empty or "Error" in df.columns:
|
|
330
|
-
msg = df.iloc[0].get("Error", "No prediction data available.")
|
|
331
|
-
return f" ⚠️ {msg}"
|
|
332
|
-
|
|
333
|
-
headers = ["Ticker", "Date", "Time", "Predicted Price"]
|
|
334
|
-
|
|
335
|
-
# Ensure all expected columns exist
|
|
336
|
-
for col in headers:
|
|
337
|
-
if col not in df.columns:
|
|
338
|
-
df[col] = ""
|
|
339
|
-
|
|
340
|
-
# ── FIX: compute col widths safely, never rely on zip truncation ──────
|
|
341
|
-
col_widths = []
|
|
342
|
-
for col in headers:
|
|
343
|
-
values_in_col = df[col].astype(str).tolist()
|
|
344
|
-
col_widths.append(max(len(col), max((len(v) for v in values_in_col), default=0)))
|
|
345
|
-
# ──────────────────────────────────────────────────────────────────────
|
|
346
|
-
|
|
347
|
-
spacing = " "
|
|
348
|
-
|
|
349
|
-
def format_row(row_values):
|
|
350
|
-
return spacing.join(
|
|
351
|
-
f"{str(val):<{width}}"
|
|
352
|
-
for val, width in zip(row_values, col_widths)
|
|
353
|
-
)
|
|
354
|
-
|
|
355
|
-
total_width = sum(col_widths) + len(spacing) * (len(headers) - 1)
|
|
356
|
-
divider = "=" * total_width
|
|
357
|
-
thin_div = "-" * total_width
|
|
358
|
-
|
|
359
|
-
lines = [
|
|
360
|
-
divider,
|
|
361
|
-
Fore.CYAN + format_row(headers) + Fore.RESET,
|
|
362
|
-
divider,
|
|
363
|
-
]
|
|
364
|
-
|
|
365
|
-
# ── FIX: iterate rows directly from DataFrame, not from a values list ─
|
|
366
|
-
prev_ticker = None
|
|
367
|
-
for _, row in df.iterrows():
|
|
368
|
-
row_values = [str(row[col]) for col in headers]
|
|
369
|
-
curr_ticker = row_values[0]
|
|
370
|
-
|
|
371
|
-
# Insert separator between tickers
|
|
372
|
-
if prev_ticker is not None and curr_ticker != prev_ticker:
|
|
373
|
-
lines.append(thin_div)
|
|
374
|
-
|
|
375
|
-
lines.append(format_row(row_values))
|
|
376
|
-
prev_ticker = curr_ticker
|
|
377
|
-
# ──────────────────────────────────────────────────────────────────────
|
|
378
|
-
|
|
379
|
-
lines.append(divider)
|
|
380
|
-
|
|
381
|
-
# ── FIX: flush immediately so Jupyter/terminal doesn't buffer-truncate ─
|
|
382
|
-
result = "\n".join(lines)
|
|
383
|
-
sys.stdout.flush()
|
|
384
|
-
return result
|
|
385
|
-
|
|
386
|
-
def get_prediction(self, tickers, time_frame, parameters, candle="1m"):
|
|
387
|
-
# Normalize tickers
|
|
388
|
-
if isinstance(tickers, str):
|
|
389
|
-
tickers = [tickers]
|
|
390
|
-
if not isinstance(tickers, list):
|
|
391
|
-
return {"success": False, "error": "Tickers must be a string or list"}
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
invalid_tickers = [t for t in tickers if t.upper() not in self.SUPPORTED_TICKERS]
|
|
395
|
-
if invalid_tickers:
|
|
396
|
-
error_msg = f"Ticker(s) not supported currently: {', '.join(invalid_tickers)}"
|
|
397
|
-
print(f"❌ {error_msg}")
|
|
398
|
-
print(f"\n✅ Supported tickers: {', '.join(sorted(self.SUPPORTED_TICKERS))}")
|
|
399
|
-
return {"success": False, "error": error_msg}
|
|
400
|
-
|
|
401
|
-
tickers = [t.upper() for t in tickers]
|
|
402
|
-
|
|
403
|
-
if isinstance(parameters, str):
|
|
404
|
-
parameters = [parameters]
|
|
405
|
-
|
|
406
|
-
try:
|
|
407
|
-
print("\n" + "="*60)
|
|
408
|
-
print("Getting Predictions")
|
|
409
|
-
print("="*60)
|
|
410
|
-
|
|
411
|
-
token_check = self.token_manager.check_token_and_credits()
|
|
412
|
-
if not token_check["valid"]:
|
|
413
|
-
print(f"❌ {token_check['error']}")
|
|
414
|
-
return {"success": False, "error": token_check["error"]}
|
|
415
|
-
|
|
416
|
-
print(f"📊 Tickers: {', '.join(tickers)}")
|
|
417
|
-
print(f"⏰ Time Frame: {time_frame}")
|
|
418
|
-
print(f"🕒 Candle: {candle}")
|
|
419
|
-
print(f"📈 Parameters: {', '.join(parameters)}")
|
|
420
|
-
|
|
421
|
-
payload = {
|
|
422
|
-
"action": {
|
|
423
|
-
"action_type": "predict",
|
|
424
|
-
"predict": {
|
|
425
|
-
"given": {
|
|
426
|
-
"ticker": tickers,
|
|
427
|
-
"time_frame": time_frame,
|
|
428
|
-
"candle": candle
|
|
429
|
-
},
|
|
430
|
-
"required": {
|
|
431
|
-
"parameters": parameters
|
|
432
|
-
}
|
|
433
|
-
}
|
|
434
|
-
}
|
|
435
|
-
}
|
|
436
|
-
|
|
437
|
-
print("\n🔄 Fetching Predictions ...")
|
|
438
|
-
response = requests.post(
|
|
439
|
-
self.base_url,
|
|
440
|
-
json=payload,
|
|
441
|
-
headers={"X-Access-Token": self.access_token},
|
|
442
|
-
timeout=300
|
|
443
|
-
)
|
|
444
|
-
response.raise_for_status()
|
|
445
|
-
response_json = response.json()
|
|
446
|
-
|
|
447
|
-
df = self._format_table(response_json, tickers, parameters)
|
|
448
|
-
|
|
449
|
-
if self.token_manager.deduct_credit():
|
|
450
|
-
remaining = self.token_manager.get_remaining_credits()
|
|
451
|
-
else:
|
|
452
|
-
remaining = 0
|
|
453
|
-
|
|
454
|
-
print("\n" + "="*60)
|
|
455
|
-
print(f"✅ Predictions ({time_frame}, candle={candle})")
|
|
456
|
-
print("="*60)
|
|
457
|
-
print(self._render_table(df))
|
|
458
|
-
print("="*60)
|
|
459
|
-
print(f"💳 Remaining credits: {remaining}")
|
|
460
|
-
|
|
461
|
-
if remaining <= 10:
|
|
462
|
-
print(f"⚠️ Warning: Only {remaining} credits remaining!")
|
|
463
|
-
if remaining == 0:
|
|
464
|
-
print("❌ Credits exhausted! Please subscribe to a plan to continue.")
|
|
465
|
-
|
|
466
|
-
print("="*60)
|
|
467
|
-
|
|
468
|
-
return {
|
|
469
|
-
"success": True,
|
|
470
|
-
"data": df,
|
|
471
|
-
"credits_remaining": remaining,
|
|
472
|
-
"timestamp": datetime.now().isoformat()
|
|
473
|
-
}
|
|
474
|
-
|
|
475
|
-
except requests.exceptions.Timeout:
|
|
476
|
-
error_msg = "Request timed out. Please try again."
|
|
477
|
-
print(f"❌ {error_msg}")
|
|
478
|
-
return {"success": False, "error": error_msg}
|
|
479
|
-
|
|
480
|
-
except requests.exceptions.RequestException as e:
|
|
481
|
-
error_msg = f"API request failed: {str(e)}"
|
|
482
|
-
print(f"❌ {error_msg}")
|
|
483
|
-
return {"success": False, "error": error_msg}
|
|
484
|
-
|
|
485
|
-
except Exception as e:
|
|
486
|
-
error_msg = f"Unexpected error: {str(e)}"
|
|
487
|
-
print(f"❌ {error_msg}")
|
|
488
|
-
return {"success": False, "error": error_msg}
|
|
489
|
-
|
|
490
|
-
def get_credits(self):
|
|
491
|
-
"""Get remaining credits for the current token"""
|
|
492
|
-
credits = self.token_manager.get_remaining_credits()
|
|
493
|
-
print(f"💳 Remaining credits: {credits}")
|
|
494
|
-
return credits
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
# -------------- local test --------------
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|