pybinbot 0.1.6__py3-none-any.whl → 0.4.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pybinbot/__init__.py +162 -0
- pybinbot/apis/binance/base.py +588 -0
- pybinbot/apis/binance/exceptions.py +17 -0
- pybinbot/apis/binbot/base.py +327 -0
- pybinbot/apis/binbot/exceptions.py +56 -0
- pybinbot/apis/kucoin/base.py +208 -0
- pybinbot/apis/kucoin/exceptions.py +9 -0
- pybinbot/apis/kucoin/market.py +92 -0
- pybinbot/apis/kucoin/orders.py +663 -0
- pybinbot/apis/kucoin/rest.py +33 -0
- pybinbot/models/__init__.py +0 -0
- {models → pybinbot/models}/bot_base.py +5 -5
- {models → pybinbot/models}/deal.py +24 -16
- {models → pybinbot/models}/order.py +41 -33
- pybinbot/models/routes.py +6 -0
- {models → pybinbot/models}/signals.py +5 -10
- pybinbot/py.typed +0 -0
- pybinbot/shared/__init__.py +0 -0
- pybinbot/shared/cache.py +32 -0
- {shared → pybinbot/shared}/enums.py +33 -22
- pybinbot/shared/handlers.py +89 -0
- pybinbot/shared/heikin_ashi.py +198 -0
- pybinbot/shared/indicators.py +271 -0
- {shared → pybinbot/shared}/logging_config.py +1 -3
- {shared → pybinbot/shared}/timestamps.py +5 -4
- pybinbot/shared/types.py +12 -0
- {pybinbot-0.1.6.dist-info → pybinbot-0.4.15.dist-info}/METADATA +22 -2
- pybinbot-0.4.15.dist-info/RECORD +32 -0
- pybinbot-0.4.15.dist-info/top_level.txt +1 -0
- pybinbot-0.1.6.dist-info/RECORD +0 -15
- pybinbot-0.1.6.dist-info/top_level.txt +0 -3
- pybinbot.py +0 -93
- shared/types.py +0 -8
- {shared → pybinbot/shared}/maths.py +0 -0
- {pybinbot-0.1.6.dist-info → pybinbot-0.4.15.dist-info}/WHEEL +0 -0
- {pybinbot-0.1.6.dist-info → pybinbot-0.4.15.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
from pydantic import BaseModel, Field, field_validator
|
|
2
|
-
|
|
3
|
-
from shared.
|
|
4
|
-
from shared.enums import (
|
|
5
|
-
QuoteAssets,
|
|
2
|
+
|
|
3
|
+
from pybinbot.shared.enums import (
|
|
6
4
|
BinanceKlineIntervals,
|
|
7
5
|
CloseConditions,
|
|
6
|
+
QuoteAssets,
|
|
8
7
|
Status,
|
|
9
8
|
Strategy,
|
|
10
9
|
)
|
|
11
|
-
from shared.timestamps import ts_to_humandate
|
|
10
|
+
from pybinbot.shared.timestamps import timestamp, ts_to_humandate
|
|
11
|
+
from pybinbot.shared.types import Amount
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
class BotBase(BaseModel):
|
|
@@ -1,56 +1,64 @@
|
|
|
1
1
|
from pydantic import BaseModel, Field, field_validator
|
|
2
|
-
|
|
2
|
+
|
|
3
|
+
from pybinbot.shared.types import Amount
|
|
3
4
|
|
|
4
5
|
|
|
5
6
|
class DealBase(BaseModel):
|
|
6
|
-
"""
|
|
7
|
-
Data model that is used for operations,
|
|
8
|
-
so it should all be numbers (int or float)
|
|
9
|
-
"""
|
|
7
|
+
"""Operational deal data model with numeric fields."""
|
|
10
8
|
|
|
11
9
|
base_order_size: Amount = Field(default=0, gt=-1)
|
|
12
10
|
current_price: Amount = Field(default=0)
|
|
13
11
|
take_profit_price: Amount = Field(default=0)
|
|
14
12
|
trailling_stop_loss_price: Amount = Field(
|
|
15
13
|
default=0,
|
|
16
|
-
description=
|
|
14
|
+
description=(
|
|
15
|
+
"take_profit but for trailling, to avoid confusion, "
|
|
16
|
+
"trailling_profit_price always be > trailling_stop_loss_price"
|
|
17
|
+
),
|
|
17
18
|
)
|
|
18
19
|
trailling_profit_price: Amount = Field(default=0)
|
|
19
20
|
stop_loss_price: Amount = Field(default=0)
|
|
20
|
-
|
|
21
|
-
# fields for margin trading
|
|
22
21
|
total_interests: float = Field(default=0, gt=-1)
|
|
23
22
|
total_commissions: float = Field(default=0, gt=-1)
|
|
24
23
|
margin_loan_id: int = Field(
|
|
25
24
|
default=0,
|
|
26
25
|
ge=0,
|
|
27
|
-
description=
|
|
26
|
+
description=(
|
|
27
|
+
"Txid from Binance. This is used to check if there is a loan, "
|
|
28
|
+
"0 means no loan"
|
|
29
|
+
),
|
|
28
30
|
)
|
|
29
31
|
margin_repay_id: int = Field(
|
|
30
32
|
default=0, ge=0, description="= 0, it has not been repaid"
|
|
31
33
|
)
|
|
32
|
-
|
|
33
|
-
# Refactored deal prices that combine both margin and spot
|
|
34
34
|
opening_price: Amount = Field(
|
|
35
35
|
default=0,
|
|
36
|
-
description=
|
|
36
|
+
description=(
|
|
37
|
+
"replaces previous buy_price or short_sell_price/margin_short_sell_price"
|
|
38
|
+
),
|
|
37
39
|
)
|
|
38
40
|
opening_qty: Amount = Field(
|
|
39
41
|
default=0,
|
|
40
|
-
description=
|
|
42
|
+
description=(
|
|
43
|
+
"replaces previous buy_total_qty or short_sell_qty/margin_short_sell_qty"
|
|
44
|
+
),
|
|
41
45
|
)
|
|
42
46
|
opening_timestamp: int = Field(default=0)
|
|
43
47
|
closing_price: Amount = Field(
|
|
44
48
|
default=0,
|
|
45
|
-
description=
|
|
49
|
+
description=(
|
|
50
|
+
"replaces previous sell_price or short_sell_price/margin_short_sell_price"
|
|
51
|
+
),
|
|
46
52
|
)
|
|
47
53
|
closing_qty: Amount = Field(
|
|
48
54
|
default=0,
|
|
49
|
-
description=
|
|
55
|
+
description=(
|
|
56
|
+
"replaces previous sell_qty or short_sell_qty/margin_short_sell_qty"
|
|
57
|
+
),
|
|
50
58
|
)
|
|
51
59
|
closing_timestamp: int = Field(
|
|
52
60
|
default=0,
|
|
53
|
-
description="replaces previous buy_timestamp or margin/short_sell timestamps",
|
|
61
|
+
description=("replaces previous buy_timestamp or margin/short_sell timestamps"),
|
|
54
62
|
)
|
|
55
63
|
|
|
56
64
|
@field_validator("margin_loan_id", mode="before")
|
|
@@ -1,22 +1,29 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field
|
|
2
|
-
|
|
3
|
-
from shared.enums import
|
|
4
|
-
|
|
5
|
-
OrderStatus,
|
|
6
|
-
)
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
|
|
3
|
+
from pybinbot.shared.enums import DealType, OrderStatus
|
|
4
|
+
from pybinbot.shared.types import Amount
|
|
7
5
|
|
|
8
6
|
|
|
9
7
|
class OrderBase(BaseModel):
|
|
10
8
|
order_type: str = Field(
|
|
11
|
-
description=
|
|
9
|
+
description=(
|
|
10
|
+
"Because every exchange has different naming, we should keep it as a "
|
|
11
|
+
"str rather than OrderType enum"
|
|
12
|
+
)
|
|
12
13
|
)
|
|
13
14
|
time_in_force: str
|
|
14
15
|
timestamp: int = Field(default=0)
|
|
15
16
|
order_id: int | str = Field(
|
|
16
|
-
description=
|
|
17
|
+
description=(
|
|
18
|
+
"Because every exchange has id type, we should keep it as loose as "
|
|
19
|
+
"possible. Int is for backwards compatibility"
|
|
20
|
+
)
|
|
17
21
|
)
|
|
18
22
|
order_side: str = Field(
|
|
19
|
-
description=
|
|
23
|
+
description=(
|
|
24
|
+
"Because every exchange has different naming, we should keep it as a "
|
|
25
|
+
"str rather than OrderType enum"
|
|
26
|
+
)
|
|
20
27
|
)
|
|
21
28
|
pair: str
|
|
22
29
|
qty: float
|
|
@@ -27,7 +34,10 @@ class OrderBase(BaseModel):
|
|
|
27
34
|
"from_attributes": True,
|
|
28
35
|
"use_enum_values": True,
|
|
29
36
|
"json_schema_extra": {
|
|
30
|
-
"description":
|
|
37
|
+
"description": (
|
|
38
|
+
"Most fields are optional. Deal field is generated internally, "
|
|
39
|
+
"orders are filled up by Exchange"
|
|
40
|
+
),
|
|
31
41
|
"examples": [
|
|
32
42
|
{
|
|
33
43
|
"order_type": "LIMIT",
|
|
@@ -51,7 +61,10 @@ class DealModel(BaseModel):
|
|
|
51
61
|
take_profit_price: Amount = Field(default=0)
|
|
52
62
|
trailling_stop_loss_price: Amount = Field(
|
|
53
63
|
default=0,
|
|
54
|
-
description=
|
|
64
|
+
description=(
|
|
65
|
+
"take_profit but for trailling, to avoid confusion, "
|
|
66
|
+
"trailling_profit_price always be > trailling_stop_loss_price"
|
|
67
|
+
),
|
|
55
68
|
)
|
|
56
69
|
trailling_profit_price: Amount = Field(default=0)
|
|
57
70
|
stop_loss_price: Amount = Field(default=0)
|
|
@@ -60,45 +73,40 @@ class DealModel(BaseModel):
|
|
|
60
73
|
margin_loan_id: int = Field(
|
|
61
74
|
default=0,
|
|
62
75
|
ge=0,
|
|
63
|
-
description=
|
|
76
|
+
description=(
|
|
77
|
+
"Txid from Binance. This is used to check if there is a loan, "
|
|
78
|
+
"0 means no loan"
|
|
79
|
+
),
|
|
64
80
|
)
|
|
65
81
|
margin_repay_id: int = Field(
|
|
66
82
|
default=0, ge=0, description="= 0, it has not been repaid"
|
|
67
83
|
)
|
|
68
84
|
opening_price: Amount = Field(
|
|
69
85
|
default=0,
|
|
70
|
-
description=
|
|
86
|
+
description=(
|
|
87
|
+
"replaces previous buy_price or short_sell_price/margin_short_sell_price"
|
|
88
|
+
),
|
|
71
89
|
)
|
|
72
90
|
opening_qty: Amount = Field(
|
|
73
91
|
default=0,
|
|
74
|
-
description=
|
|
92
|
+
description=(
|
|
93
|
+
"replaces previous buy_total_qty or short_sell_qty/margin_short_sell_qty"
|
|
94
|
+
),
|
|
75
95
|
)
|
|
76
96
|
opening_timestamp: int = Field(default=0)
|
|
77
97
|
closing_price: Amount = Field(
|
|
78
98
|
default=0,
|
|
79
|
-
description=
|
|
99
|
+
description=(
|
|
100
|
+
"replaces previous sell_price or short_sell_price/margin_short_sell_price"
|
|
101
|
+
),
|
|
80
102
|
)
|
|
81
103
|
closing_qty: Amount = Field(
|
|
82
104
|
default=0,
|
|
83
|
-
description=
|
|
105
|
+
description=(
|
|
106
|
+
"replaces previous sell_qty or short_sell_qty/margin_short_sell_qty"
|
|
107
|
+
),
|
|
84
108
|
)
|
|
85
109
|
closing_timestamp: int = Field(
|
|
86
110
|
default=0,
|
|
87
|
-
description="replaces previous buy_timestamp or margin/short_sell timestamps",
|
|
111
|
+
description=("replaces previous buy_timestamp or margin/short_sell timestamps"),
|
|
88
112
|
)
|
|
89
|
-
|
|
90
|
-
@field_validator("margin_loan_id", mode="before")
|
|
91
|
-
@classmethod
|
|
92
|
-
def validate_margin_loan_id(cls, value):
|
|
93
|
-
if isinstance(value, float):
|
|
94
|
-
return int(value)
|
|
95
|
-
else:
|
|
96
|
-
return value
|
|
97
|
-
|
|
98
|
-
@field_validator("margin_loan_id", mode="after")
|
|
99
|
-
@classmethod
|
|
100
|
-
def cast_float(cls, value):
|
|
101
|
-
if isinstance(value, float):
|
|
102
|
-
return int(value)
|
|
103
|
-
else:
|
|
104
|
-
return value
|
|
@@ -1,14 +1,11 @@
|
|
|
1
|
-
from pydantic import BaseModel, Field, field_validator, ConfigDict
|
|
2
|
-
from typing import Optional
|
|
3
1
|
from datetime import datetime
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
4
5
|
|
|
5
6
|
|
|
6
|
-
# Example shared model (copy actual model code from source files)
|
|
7
7
|
class HABollinguerSpread(BaseModel):
|
|
8
|
-
"""
|
|
9
|
-
Pydantic model for the Bollinguer spread.
|
|
10
|
-
(optional)
|
|
11
|
-
"""
|
|
8
|
+
"""Pydantic model for the Bollinguer spread."""
|
|
12
9
|
|
|
13
10
|
bb_high: float
|
|
14
11
|
bb_mid: float
|
|
@@ -16,9 +13,7 @@ class HABollinguerSpread(BaseModel):
|
|
|
16
13
|
|
|
17
14
|
|
|
18
15
|
class SignalsConsumer(BaseModel):
|
|
19
|
-
"""
|
|
20
|
-
Pydantic model for the signals consumer.
|
|
21
|
-
"""
|
|
16
|
+
"""Pydantic model for the signals consumer."""
|
|
22
17
|
|
|
23
18
|
type: str = Field(default="signal")
|
|
24
19
|
date: str = Field(
|
pybinbot/py.typed
ADDED
|
File without changes
|
|
File without changes
|
pybinbot/shared/cache.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from functools import wraps
|
|
3
|
+
from typing import Any, Callable, Dict, Tuple
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def cache(
|
|
7
|
+
ttl_seconds: int = 3600,
|
|
8
|
+
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
9
|
+
"""Simple in-process TTL cache decorator (per process).
|
|
10
|
+
Caches function results by args/kwargs for ttl_seconds.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
14
|
+
store: Dict[
|
|
15
|
+
Tuple[Tuple[Any, ...], Tuple[Tuple[str, Any], ...]], Tuple[float, Any]
|
|
16
|
+
] = {}
|
|
17
|
+
|
|
18
|
+
@wraps(func)
|
|
19
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
20
|
+
key = (args, tuple(sorted(kwargs.items())))
|
|
21
|
+
now = time.monotonic()
|
|
22
|
+
if key in store:
|
|
23
|
+
expiry, value = store[key]
|
|
24
|
+
if now < expiry:
|
|
25
|
+
return value
|
|
26
|
+
value = func(*args, **kwargs)
|
|
27
|
+
store[key] = (now + max(0, int(ttl_seconds)), value)
|
|
28
|
+
return value
|
|
29
|
+
|
|
30
|
+
return wrapper
|
|
31
|
+
|
|
32
|
+
return decorator
|
|
@@ -2,6 +2,18 @@ from enum import Enum
|
|
|
2
2
|
from pydantic import BaseModel, field_validator
|
|
3
3
|
|
|
4
4
|
|
|
5
|
+
class DealType(str, Enum):
|
|
6
|
+
base_order = "base_order"
|
|
7
|
+
take_profit = "take_profit"
|
|
8
|
+
stop_loss = "stop_loss"
|
|
9
|
+
short_sell = "short_sell"
|
|
10
|
+
short_buy = "short_buy"
|
|
11
|
+
margin_short = "margin_short"
|
|
12
|
+
panic_close = "panic_close"
|
|
13
|
+
trailling_profit = "trailling_profit"
|
|
14
|
+
conversion = "conversion" # converts one crypto to another
|
|
15
|
+
|
|
16
|
+
|
|
5
17
|
class CloseConditions(str, Enum):
|
|
6
18
|
dynamic_trailling = "dynamic_trailling"
|
|
7
19
|
# No trailling, standard stop loss
|
|
@@ -18,16 +30,6 @@ class KafkaTopics(str, Enum):
|
|
|
18
30
|
restart_autotrade = "restart-autotrade"
|
|
19
31
|
|
|
20
32
|
|
|
21
|
-
class DealType(str, Enum):
|
|
22
|
-
base_order = "base_order"
|
|
23
|
-
take_profit = "take_profit"
|
|
24
|
-
stop_loss = "stop_loss"
|
|
25
|
-
short_sell = "short_sell"
|
|
26
|
-
short_buy = "short_buy"
|
|
27
|
-
margin_short = "margin_short"
|
|
28
|
-
panic_close = "panic_close"
|
|
29
|
-
|
|
30
|
-
|
|
31
33
|
class BinanceOrderModel(BaseModel):
|
|
32
34
|
"""
|
|
33
35
|
Data model given by Binance,
|
|
@@ -112,18 +114,6 @@ class TrendEnum(str, Enum):
|
|
|
112
114
|
neutral = None
|
|
113
115
|
|
|
114
116
|
|
|
115
|
-
class DealType(str, Enum):
|
|
116
|
-
base_order = "base_order"
|
|
117
|
-
take_profit = "take_profit"
|
|
118
|
-
stop_loss = "stop_loss"
|
|
119
|
-
short_sell = "short_sell"
|
|
120
|
-
short_buy = "short_buy"
|
|
121
|
-
margin_short = "margin_short"
|
|
122
|
-
panic_close = "panic_close"
|
|
123
|
-
trailling_profit = "trailling_profit"
|
|
124
|
-
conversion = "conversion" # converts one crypto to another
|
|
125
|
-
|
|
126
|
-
|
|
127
117
|
class BinanceKlineIntervals(str, Enum):
|
|
128
118
|
one_minute = "1m"
|
|
129
119
|
three_minutes = "3m"
|
|
@@ -182,6 +172,27 @@ class BinanceKlineIntervals(str, Enum):
|
|
|
182
172
|
}
|
|
183
173
|
return interval_map.get(self.value, self.value)
|
|
184
174
|
|
|
175
|
+
def get_interval_ms(interval_str: str) -> int:
|
|
176
|
+
"""Convert Binance interval string to milliseconds"""
|
|
177
|
+
interval_map = {
|
|
178
|
+
"1m": 60 * 1000,
|
|
179
|
+
"3m": 3 * 60 * 1000,
|
|
180
|
+
"5m": 5 * 60 * 1000,
|
|
181
|
+
"15m": 15 * 60 * 1000,
|
|
182
|
+
"30m": 30 * 1000,
|
|
183
|
+
"1h": 60 * 60 * 1000,
|
|
184
|
+
"2h": 2 * 60 * 60 * 1000,
|
|
185
|
+
"4h": 4 * 60 * 60 * 1000,
|
|
186
|
+
"6h": 6 * 60 * 60 * 1000,
|
|
187
|
+
"8h": 8 * 60 * 60 * 1000,
|
|
188
|
+
"12h": 12 * 60 * 60 * 1000,
|
|
189
|
+
"1d": 24 * 60 * 60 * 1000,
|
|
190
|
+
"3d": 3 * 24 * 60 * 60 * 1000,
|
|
191
|
+
"1w": 7 * 24 * 60 * 60 * 1000,
|
|
192
|
+
"1M": 30 * 24 * 60 * 60 * 1000, # Approximate month as 30 days
|
|
193
|
+
}
|
|
194
|
+
return interval_map.get(interval_str, 60 * 1000) # Default to 1 minute
|
|
195
|
+
|
|
185
196
|
|
|
186
197
|
class KucoinKlineIntervals(str, Enum):
|
|
187
198
|
ONE_MINUTE = "1min"
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from time import sleep
|
|
3
|
+
from requests import Response, HTTPError
|
|
4
|
+
from aiohttp import ClientResponse
|
|
5
|
+
from pybinbot.apis.binbot.exceptions import (
|
|
6
|
+
BinbotErrors,
|
|
7
|
+
QuantityTooLow,
|
|
8
|
+
)
|
|
9
|
+
from pybinbot.apis.binance.exceptions import (
|
|
10
|
+
BinanceErrors,
|
|
11
|
+
InvalidSymbol,
|
|
12
|
+
NotEnoughFunds,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
async def aio_response_handler(response: ClientResponse):
|
|
17
|
+
content = await response.json()
|
|
18
|
+
return content
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def handle_binance_errors(response: Response) -> dict:
|
|
22
|
+
"""
|
|
23
|
+
Handles:
|
|
24
|
+
- HTTP codes, not authorized, rate limits...
|
|
25
|
+
- Bad request errors, binance internal e.g. {"code": -1013, "msg": "Invalid quantity"}
|
|
26
|
+
- Binbot internal errors - bot errors, returns "errored"
|
|
27
|
+
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
if "x-mbx-used-weight-1m" in response.headers:
|
|
31
|
+
logging.info(
|
|
32
|
+
f"Request to {response.url} weight: {response.headers.get('x-mbx-used-weight-1m')}"
|
|
33
|
+
)
|
|
34
|
+
# Binance doesn't seem to reach 418 or 429 even after 2000 weight requests
|
|
35
|
+
if (
|
|
36
|
+
response.headers.get("x-mbx-used-weight-1m")
|
|
37
|
+
and float(response.headers.get("x-mbx-used-weight-1m", 0)) > 7000
|
|
38
|
+
):
|
|
39
|
+
logging.warning("Request weight limit prevention pause, waiting 1 min")
|
|
40
|
+
sleep(120)
|
|
41
|
+
|
|
42
|
+
if response.status_code == 418 or response.status_code == 429:
|
|
43
|
+
logging.warning("Request weight limit hit, ban will come soon, waiting 1 hour")
|
|
44
|
+
sleep(3600)
|
|
45
|
+
|
|
46
|
+
# Cloudfront 403 error
|
|
47
|
+
if response.status_code == 403 and response.reason:
|
|
48
|
+
raise HTTPError(response=response)
|
|
49
|
+
|
|
50
|
+
content = response.json()
|
|
51
|
+
|
|
52
|
+
if response.status_code == 404:
|
|
53
|
+
raise HTTPError(response=response)
|
|
54
|
+
|
|
55
|
+
# Show error messsage for bad requests
|
|
56
|
+
if response.status_code >= 400:
|
|
57
|
+
# Binance errors
|
|
58
|
+
if "msg" in content and "code" in content:
|
|
59
|
+
raise BinanceErrors(content["msg"], content["code"])
|
|
60
|
+
|
|
61
|
+
# Binbot errors
|
|
62
|
+
if content and "error" in content and content["error"] == 1:
|
|
63
|
+
raise BinbotErrors(content["message"], content["error"])
|
|
64
|
+
|
|
65
|
+
# Binance errors
|
|
66
|
+
if content and "code" in content:
|
|
67
|
+
if content["code"] == -1013:
|
|
68
|
+
raise QuantityTooLow(content["message"], content["error"])
|
|
69
|
+
if content["code"] == 200:
|
|
70
|
+
return content
|
|
71
|
+
if (
|
|
72
|
+
content["code"] == -2010
|
|
73
|
+
or content["code"] == -1013
|
|
74
|
+
or content["code"] == -2015
|
|
75
|
+
):
|
|
76
|
+
# Not enough funds. Ignore, send to bot errors
|
|
77
|
+
# Need to be dealt with at higher levels
|
|
78
|
+
raise NotEnoughFunds(content["msg"], content["code"])
|
|
79
|
+
|
|
80
|
+
if content["code"] == -1003:
|
|
81
|
+
# Too many requests, most likely exceeded API rate limits
|
|
82
|
+
# Back off for > 5 minutes, which is Binance's ban time
|
|
83
|
+
print("Too many requests. Back off for 1 min...")
|
|
84
|
+
sleep(60)
|
|
85
|
+
|
|
86
|
+
if content["code"] == -1121:
|
|
87
|
+
raise InvalidSymbol(f"Binance error: {content['msg']}", content["code"])
|
|
88
|
+
|
|
89
|
+
return content
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
from typing import cast
|
|
2
|
+
|
|
3
|
+
from pandas import DataFrame, to_numeric, concat
|
|
4
|
+
from pandas.api.types import is_numeric_dtype
|
|
5
|
+
from pandas import to_datetime
|
|
6
|
+
from pybinbot.shared.enums import ExchangeId
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class HeikinAshi:
|
|
10
|
+
"""
|
|
11
|
+
Dataframe operations shared across projects and Heikin Ashi candle transformation.
|
|
12
|
+
This avoids circular imports and groups related functionality.
|
|
13
|
+
|
|
14
|
+
Canonical formulas applied to OHLC data:
|
|
15
|
+
HA_Close = (O + H + L + C) / 4
|
|
16
|
+
HA_Open = (prev_HA_Open + prev_HA_Close) / 2, seed = (O0 + C0) / 2
|
|
17
|
+
HA_High = max(H, HA_Open, HA_Close)
|
|
18
|
+
HA_Low = min(L, HA_Open, HA_Close)
|
|
19
|
+
|
|
20
|
+
This version:
|
|
21
|
+
* Works if a 'timestamp' column exists (sorted chronologically first).
|
|
22
|
+
* Does NOT mutate the original dataframe in-place; returns a copy.
|
|
23
|
+
* Validates required columns.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
binance_cols = [
|
|
27
|
+
"open_time",
|
|
28
|
+
"open",
|
|
29
|
+
"high",
|
|
30
|
+
"low",
|
|
31
|
+
"close",
|
|
32
|
+
"volume",
|
|
33
|
+
"close_time",
|
|
34
|
+
"quote_asset_volume",
|
|
35
|
+
"number_of_trades",
|
|
36
|
+
"taker_buy_base_asset_volume",
|
|
37
|
+
"taker_buy_quote_asset_volume",
|
|
38
|
+
]
|
|
39
|
+
kucoin_cols = [
|
|
40
|
+
"open_time",
|
|
41
|
+
"open",
|
|
42
|
+
"high",
|
|
43
|
+
"low",
|
|
44
|
+
"close",
|
|
45
|
+
"volume",
|
|
46
|
+
"close_time",
|
|
47
|
+
"quote_asset_volume",
|
|
48
|
+
]
|
|
49
|
+
|
|
50
|
+
numeric_cols = [
|
|
51
|
+
"open",
|
|
52
|
+
"high",
|
|
53
|
+
"low",
|
|
54
|
+
"close",
|
|
55
|
+
"open_time",
|
|
56
|
+
"close_time",
|
|
57
|
+
"volume",
|
|
58
|
+
"quote_asset_volume",
|
|
59
|
+
]
|
|
60
|
+
|
|
61
|
+
ohlc_cols = ["open", "high", "low", "close"]
|
|
62
|
+
|
|
63
|
+
REQUIRED_COLUMNS = kucoin_cols
|
|
64
|
+
|
|
65
|
+
def pre_process(self, exchange: ExchangeId, candles: list):
|
|
66
|
+
df_1h = DataFrame()
|
|
67
|
+
df_4h = DataFrame()
|
|
68
|
+
if exchange == ExchangeId.BINANCE:
|
|
69
|
+
# Binance API may return extra columns; only take the expected ones
|
|
70
|
+
df_raw = DataFrame(candles)
|
|
71
|
+
df = df_raw.iloc[:, : len(self.binance_cols)]
|
|
72
|
+
df.columns = self.binance_cols
|
|
73
|
+
columns = self.binance_cols
|
|
74
|
+
else:
|
|
75
|
+
df = DataFrame(candles, columns=self.kucoin_cols)
|
|
76
|
+
columns = self.kucoin_cols
|
|
77
|
+
|
|
78
|
+
# Ensure the dataframe has exactly the expected columns
|
|
79
|
+
if len(df.columns) != len(columns):
|
|
80
|
+
raise ValueError(
|
|
81
|
+
f"Column mismatch: {len(df.columns)} vs expected {len(columns)}"
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Convert only numeric columns safely
|
|
85
|
+
numeric_cols = ["open", "high", "low", "close", "volume"]
|
|
86
|
+
for col in numeric_cols:
|
|
87
|
+
df[col] = to_numeric(df[col], errors="coerce")
|
|
88
|
+
|
|
89
|
+
df = self.get_heikin_ashi(df)
|
|
90
|
+
|
|
91
|
+
# Ensure close_time is datetime and set as index for proper resampling
|
|
92
|
+
df["timestamp"] = to_datetime(df["close_time"], unit="ms")
|
|
93
|
+
df.set_index("timestamp", inplace=True)
|
|
94
|
+
df = df.sort_index()
|
|
95
|
+
df = df[~df.index.duplicated(keep="last")]
|
|
96
|
+
|
|
97
|
+
# Create aggregation dictionary without close_time and open_time since they're now index-based
|
|
98
|
+
resample_aggregation = {
|
|
99
|
+
"open": "first",
|
|
100
|
+
"close": "last",
|
|
101
|
+
"high": "max",
|
|
102
|
+
"low": "min",
|
|
103
|
+
"volume": "sum", # Add volume if it exists in your data
|
|
104
|
+
"close_time": "first",
|
|
105
|
+
"open_time": "first",
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
# Resample to 4 hour candles for TWAP (align to calendar hours like MongoDB)
|
|
109
|
+
df_4h = df.resample("4h").agg(cast(dict, resample_aggregation))
|
|
110
|
+
# Add open_time and close_time back as columns for 4h data
|
|
111
|
+
df_4h["open_time"] = df_4h.index
|
|
112
|
+
df_4h["close_time"] = df_4h.index
|
|
113
|
+
|
|
114
|
+
# Resample to 1 hour candles for Supertrend (align to calendar hours like MongoDB)
|
|
115
|
+
df_1h = df.resample("1h").agg(cast(dict, resample_aggregation))
|
|
116
|
+
# Add open_time and close_time back as columns for 1h data
|
|
117
|
+
df_1h["open_time"] = df_1h.index
|
|
118
|
+
df_1h["close_time"] = df_1h.index
|
|
119
|
+
|
|
120
|
+
return df, df_1h, df_4h
|
|
121
|
+
|
|
122
|
+
@staticmethod
|
|
123
|
+
def post_process(df: DataFrame) -> DataFrame:
|
|
124
|
+
"""
|
|
125
|
+
Post-process the DataFrame by filling missing values and
|
|
126
|
+
converting data types as needed.
|
|
127
|
+
"""
|
|
128
|
+
df.dropna(inplace=True)
|
|
129
|
+
df.reset_index(drop=True, inplace=True)
|
|
130
|
+
return df
|
|
131
|
+
|
|
132
|
+
def ensure_ohlc(self, df: DataFrame) -> DataFrame:
|
|
133
|
+
"""Validate & coerce a DataFrame into an DataFrame.
|
|
134
|
+
|
|
135
|
+
Steps:
|
|
136
|
+
- Verify all REQUIRED_COLUMNS are present (raises ValueError if missing).
|
|
137
|
+
- Coerce numeric columns (including *_time which are expected as ms epoch).
|
|
138
|
+
- Perform early failure if quote_asset_volume becomes entirely NaN.
|
|
139
|
+
- Return the same underlying object cast to DataFrame (no deep copy).
|
|
140
|
+
"""
|
|
141
|
+
missing = set(self.REQUIRED_COLUMNS) - set(df.columns)
|
|
142
|
+
if missing:
|
|
143
|
+
raise ValueError(f"Missing required OHLC columns: {missing}")
|
|
144
|
+
|
|
145
|
+
for col in self.numeric_cols:
|
|
146
|
+
if col in df.columns and not is_numeric_dtype(df[col]):
|
|
147
|
+
df[col] = to_numeric(df[col], errors="coerce")
|
|
148
|
+
|
|
149
|
+
if (
|
|
150
|
+
"quote_asset_volume" in df.columns
|
|
151
|
+
and df["quote_asset_volume"].notna().sum() == 0
|
|
152
|
+
):
|
|
153
|
+
raise ValueError(
|
|
154
|
+
"quote_asset_volume column is entirely non-numeric after coercion; cannot compute quote_volume_ratio"
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
return df
|
|
158
|
+
|
|
159
|
+
def get_heikin_ashi(self, df: DataFrame) -> DataFrame:
|
|
160
|
+
if df.empty:
|
|
161
|
+
return df
|
|
162
|
+
|
|
163
|
+
# Validate & coerce using the new type guard helper.
|
|
164
|
+
df = self.ensure_ohlc(df)
|
|
165
|
+
work = df.reset_index(drop=True).copy()
|
|
166
|
+
|
|
167
|
+
# Compute HA_Close from ORIGINAL OHLC (still intact in 'work').
|
|
168
|
+
# Ensure numeric dtypes (API feeds sometimes deliver strings)
|
|
169
|
+
for c in self.ohlc_cols:
|
|
170
|
+
# Only attempt conversion if dtype is not already numeric
|
|
171
|
+
if not is_numeric_dtype(work[c]):
|
|
172
|
+
work.loc[:, c] = to_numeric(work[c], errors="coerce")
|
|
173
|
+
|
|
174
|
+
if work[self.ohlc_cols].isna().any().any():
|
|
175
|
+
# Drop rows that became NaN after coercion (invalid numeric data)
|
|
176
|
+
work = work.dropna(subset=self.ohlc_cols).reset_index(drop=True)
|
|
177
|
+
if work.empty:
|
|
178
|
+
raise ValueError("All OHLC rows became NaN after numeric coercion.")
|
|
179
|
+
|
|
180
|
+
ha_close = (work["open"] + work["high"] + work["low"] + work["close"]) / 4.0
|
|
181
|
+
|
|
182
|
+
# Seed HA_Open with original O & C (not HA close).
|
|
183
|
+
ha_open = ha_close.copy()
|
|
184
|
+
ha_open.iloc[0] = (work["open"].iloc[0] + work["close"].iloc[0]) / 2.0
|
|
185
|
+
for i in range(1, len(work)):
|
|
186
|
+
ha_open.iloc[i] = (ha_open.iloc[i - 1] + ha_close.iloc[i - 1]) / 2.0
|
|
187
|
+
|
|
188
|
+
# High / Low derived from max/min of (raw high/low, ha_open, ha_close)
|
|
189
|
+
ha_high = concat([work["high"], ha_open, ha_close], axis=1).max(axis=1)
|
|
190
|
+
ha_low = concat([work["low"], ha_open, ha_close], axis=1).min(axis=1)
|
|
191
|
+
|
|
192
|
+
# Assign transformed values.
|
|
193
|
+
work.loc[:, "open"] = ha_open
|
|
194
|
+
work.loc[:, "high"] = ha_high
|
|
195
|
+
work.loc[:, "low"] = ha_low
|
|
196
|
+
work.loc[:, "close"] = ha_close
|
|
197
|
+
|
|
198
|
+
return work
|