forex_data_aggregator 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- forex_data/__init__.py +92 -0
- forex_data/config/__init__.py +20 -0
- forex_data/config/config_file.py +89 -0
- forex_data/data_management/__init__.py +84 -0
- forex_data/data_management/common.py +1773 -0
- forex_data/data_management/database.py +1322 -0
- forex_data/data_management/historicaldata.py +1262 -0
- forex_data/data_management/realtimedata.py +993 -0
- forex_data_aggregator-0.1.2.dist-info/LICENSE +21 -0
- forex_data_aggregator-0.1.2.dist-info/METADATA +562 -0
- forex_data_aggregator-0.1.2.dist-info/RECORD +12 -0
- forex_data_aggregator-0.1.2.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Nicola Fiorato
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,562 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: forex_data_aggregator
|
|
3
|
+
Version: 0.1.2
|
|
4
|
+
Summary: Data aggregation for forex market data
|
|
5
|
+
License: MIT
|
|
6
|
+
Author: Nicola Fiorato
|
|
7
|
+
Author-email: fiorato.nicola@gmail.com
|
|
8
|
+
Requires-Python: >=3.12,<3.13
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Requires-Dist: adbc-driver-sqlite (>=1.6.0,<2.0.0)
|
|
13
|
+
Requires-Dist: alpha-vantage (>=2.3.1,<3.0.0)
|
|
14
|
+
Requires-Dist: bs4 (>=0.0.2,<0.0.3)
|
|
15
|
+
Requires-Dist: connectorx (>=0.4.3,<0.5.0)
|
|
16
|
+
Requires-Dist: debugpy (>=1.8.5,<2.0.0)
|
|
17
|
+
Requires-Dist: dotty-dict (>=1.3.1,<2.0.0)
|
|
18
|
+
Requires-Dist: duckdb (>=1.3.1,<2.0.0)
|
|
19
|
+
Requires-Dist: flake8 (>=7.1.1,<8.0.0)
|
|
20
|
+
Requires-Dist: iteration-utilities (>=0.12.1,<0.13.0)
|
|
21
|
+
Requires-Dist: loguru (>=0.7.2,<0.8.0)
|
|
22
|
+
Requires-Dist: matplotlib (>=3.9.2)
|
|
23
|
+
Requires-Dist: mpl-finance (>=0.10.1,<0.11.0)
|
|
24
|
+
Requires-Dist: mplfinance (>=0.12.9b7,<0.13.0)
|
|
25
|
+
Requires-Dist: mypy (>=1.14.1,<2.0.0)
|
|
26
|
+
Requires-Dist: numpy (<2.0.0)
|
|
27
|
+
Requires-Dist: pandas (>=2.0.0)
|
|
28
|
+
Requires-Dist: pandas-stubs (>=2.2.3.241126,<3.0.0.0)
|
|
29
|
+
Requires-Dist: polars (>=0.20.7)
|
|
30
|
+
Requires-Dist: polygon-api-client (>=1.5.0,<2.0.0)
|
|
31
|
+
Requires-Dist: pyaml (>=21.10.1,<22.0.0)
|
|
32
|
+
Requires-Dist: pyarrow (>=14.0.2)
|
|
33
|
+
Requires-Dist: pytest (>=8.1.1,<9.0.0)
|
|
34
|
+
Requires-Dist: pytest-flake8 (>=1.3.0,<2.0.0)
|
|
35
|
+
Requires-Dist: pytest-mypy (>=0.10.3,<0.11.0)
|
|
36
|
+
Requires-Dist: requests (>=2.28.1,<3.0.0)
|
|
37
|
+
Requires-Dist: scipy (>=1.11.4,<2.0.0)
|
|
38
|
+
Requires-Dist: setuptools (>=69.0.2,<70.0.0)
|
|
39
|
+
Description-Content-Type: text/markdown
|
|
40
|
+
|
|
41
|
+
# <span style="font-size:1.5em;">FOREX DATA</span>
|
|
42
|
+
|
|
43
|
+
[](https://nikfio.github.io/forex_data/)
|
|
44
|
+
[](https://circleci.com/gh/nikfio/forex_data)
|
|
45
|
+
[](https://pypi.org/project/forex-data-aggregator/)
|
|
46
|
+
[](https://www.python.org/)
|
|
47
|
+
[](https://python-poetry.org/)
|
|
48
|
+
|
|
49
|
+
> 📚 **[View Full Documentation](https://nikfio.github.io/forex_data/)** | 🚀 **[Quick Start](#installation)** | 💡 **[Examples](#examples)**
|
|
50
|
+
|
|
51
|
+
The forex_data package offers ways to aggregate data from the Forex market into a dataframe having the the essential OHLC information, so the ouput will always have the columns:
|
|
52
|
+
|
|
53
|
+
* timestamp
|
|
54
|
+
* open
|
|
55
|
+
* high
|
|
56
|
+
* low
|
|
57
|
+
* close
|
|
58
|
+
|
|
59
|
+
The first purpose is to aggregate data in OHLC format and allow to have data in any timeframe specified in the most simple and efficient way.
|
|
60
|
+
The second purpose is to manage one or multiple sources, an interface layer will have primary functions with predefined name, inputs and ouput results: in order to ease the access and usage of multiple data sources.
|
|
61
|
+
|
|
62
|
+
At the moment, sources are divided in **historical sources** and **real-time sources**.
|
|
63
|
+
|
|
64
|
+
## SOURCES
|
|
65
|
+
|
|
66
|
+
### HISTORICAL SOURCE
|
|
67
|
+
|
|
68
|
+
A historical source is a source of data which makes data available but does not have a defined update policy for design reasons
|
|
69
|
+
On the contrary, it can provide a ton of history data, tipically from the first years of 2000s and the free tier is fine for the purposes of the package.
|
|
70
|
+
|
|
71
|
+
A perfect data source of this type is [histdata.com](http://www.histdata.com/), which work is really genuine and a lot appreciated.
|
|
72
|
+
|
|
73
|
+
Summarizing, a historical source can provide tons of data even from many years ago and with no limits at the downside of a slow update rate. For example, *histdata* updates data on a montly basis.
|
|
74
|
+
|
|
75
|
+
### REAL-TIME SOURCE
|
|
76
|
+
|
|
77
|
+
A real-time source is what is more tipically known as a source for forex market or stock market data. It offers APIs in determined clients or even just a minimal documentation to establish the API call in HTTP request format.
|
|
78
|
+
A minimal free or trial offering is proposed, but they rely on premium subscriptions offers based on:
|
|
79
|
+
|
|
80
|
+
* real time performance
|
|
81
|
+
* size of tickers list available
|
|
82
|
+
* how much history of a ticker
|
|
83
|
+
* and many other parameters ...
|
|
84
|
+
|
|
85
|
+
As of now, just [alpha-vantage](https://www.alphavantage.co/documentation/) and [polygon-io](https://polygon.io/docs/forex/getting-started) are managed. The intention is to make the most out of them and their free tier access to data.
|
|
86
|
+
|
|
87
|
+
Even if free subscription is limitated for these providers, the reasons to include them in the package are to have closer real-time update than any historical source and also the module is designed to ease the work of studying a new provider API calls: a real time data manager uses at the same time all the remote sources available and provides access to their API through easier interface.
|
|
88
|
+
|
|
89
|
+
### Considerations
|
|
90
|
+
|
|
91
|
+
*What is the trade-off between historical and real-time source? And why a simultaneous usage of both is powerful?*
|
|
92
|
+
|
|
93
|
+
This question is the primary key of usefulness of the package.
|
|
94
|
+
An historical source like the one managed by the package, tipically updates data every month so you would have a delay of a month in retrieving the latest data, but on the upside you can have data from like 20 or more years ago to last month with a under a minute resolution.
|
|
95
|
+
|
|
96
|
+
A real-time source usually lets you get data limiting the number of candles of the output.
|
|
97
|
+
Also, tipically the source free subscription does not let to get data older than a month o few time more: especially if it requested with low resolution like 1-minute timeframe.
|
|
98
|
+
The real time source fills the gap of the month delay explained for the historical source.
|
|
99
|
+
And it is widely agreed that latest data have more influence on next trading positions to be set.
|
|
100
|
+
|
|
101
|
+
Concluding, the combination of historical and real-time source gives a 1-minute or lower resolution for data starting over 20 years ago approximately until yesterday or today data.
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
## INSTALLATION
|
|
105
|
+
|
|
106
|
+
### From PyPI (Recommended)
|
|
107
|
+
|
|
108
|
+
The easiest way to install forex_data is via pip:
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
pip install forex-data-aggregator
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
Or with Poetry:
|
|
115
|
+
|
|
116
|
+
```bash
|
|
117
|
+
poetry add forex-data-aggregator
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
### From Source
|
|
121
|
+
|
|
122
|
+
If you want to install from source or contribute to development:
|
|
123
|
+
|
|
124
|
+
1. Ensure you have [Poetry](https://python-poetry.org/docs/) installed
|
|
125
|
+
2. Clone the repository:
|
|
126
|
+
```bash
|
|
127
|
+
git clone https://github.com/nikfio/forex_data.git -b master forex-data
|
|
128
|
+
cd forex-data
|
|
129
|
+
```
|
|
130
|
+
3. Install dependencies:
|
|
131
|
+
```bash
|
|
132
|
+
poetry install
|
|
133
|
+
```
|
|
134
|
+
4. Run tests to verify installation:
|
|
135
|
+
```bash
|
|
136
|
+
poetry run pytest
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
## DOCUMENTATION
|
|
140
|
+
|
|
141
|
+
📖 **Comprehensive documentation is available at [nikfio.github.io/forex_data](https://nikfio.github.io/forex_data/)**
|
|
142
|
+
|
|
143
|
+
The full documentation includes:
|
|
144
|
+
|
|
145
|
+
- **[Installation Guide](https://nikfio.github.io/forex_data/installation.html)** - Detailed setup instructions
|
|
146
|
+
- **[Quick Start Tutorial](https://nikfio.github.io/forex_data/quickstart.html)** - Get started in minutes
|
|
147
|
+
- **[Configuration Reference](https://nikfio.github.io/forex_data/configuration.html)** - All configuration options explained
|
|
148
|
+
- **[API Reference](https://nikfio.github.io/forex_data/forex_data.html)** - Complete API documentation with type hints
|
|
149
|
+
- **[Code Examples](https://nikfio.github.io/forex_data/examples.html)** - 15+ comprehensive examples
|
|
150
|
+
- **[Contributing Guide](https://nikfio.github.io/forex_data/contributing.html)** - How to contribute to the project
|
|
151
|
+
- **[Changelog](https://nikfio.github.io/forex_data/changelog.html)** - Version history and updates
|
|
152
|
+
|
|
153
|
+
## CONFIGURATION FILE
|
|
154
|
+
|
|
155
|
+
A configuration file can be passed in order to group fixed parameters values.
|
|
156
|
+
In repository folder clone, look for [appconfig folder](appconfig) to see the [example template file](appconfig/appconfig_template.yaml).
|
|
157
|
+
|
|
158
|
+
In data managers instantiation, you can pass directly the absolute path to the YAML file or also a folder.
|
|
159
|
+
In the second case, it will look for the configuration file ending with `data_config.yaml` in the specified folder.
|
|
160
|
+
Furthermore, any parameter value can be overridden by explicit assignment in object instantion.
|
|
161
|
+
The feature will be more clear following the [examples section](#examples).
|
|
162
|
+
|
|
163
|
+
#### ENGINE
|
|
164
|
+
|
|
165
|
+
Available options:
|
|
166
|
+
|
|
167
|
+
* pandas
|
|
168
|
+
* pyarrow
|
|
169
|
+
* polars
|
|
170
|
+
|
|
171
|
+
#### DATA_FILETYPE
|
|
172
|
+
|
|
173
|
+
Available options:
|
|
174
|
+
|
|
175
|
+
* csv
|
|
176
|
+
* parquet
|
|
177
|
+
|
|
178
|
+
*parquet* filetype is strongly suggested for read/write speed and disk space occupation.
|
|
179
|
+
Meanwhile, if you have any analysis application outside the Python environment, it would more likely accept csv files over parquet: so *csv* filetype could be a better choice for its broader acceptance.
|
|
180
|
+
|
|
181
|
+
#### PROVIDERS_KEY
|
|
182
|
+
|
|
183
|
+
To use real-time sources you need to provide an API key.
|
|
184
|
+
|
|
185
|
+
Look here to register and create a key from Alpha-Vantage provider
|
|
186
|
+
[Alpha-Vantage free API registration](https://www.alphavantage.co/support/#api-key)
|
|
187
|
+
|
|
188
|
+
Look here to register and create a key from Polygon-IO provider
|
|
189
|
+
[Polygon-IO home page](https://polygon.io/)
|
|
190
|
+
|
|
191
|
+
## LOGGING
|
|
192
|
+
|
|
193
|
+
Logging feature is added via loguru library.
|
|
194
|
+
By construction log is dumped in a file which location is determined by pathlib.
|
|
195
|
+
A generic usage folder for the package named `.database` is created at the current user home folder.
|
|
196
|
+
Here log is dumped in a file called `forexdata.log`, the complete location of the log file will be:
|
|
197
|
+
|
|
198
|
+
`~/.database/forexdata.log`
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
## EXAMPLES
|
|
202
|
+
|
|
203
|
+
You can find complete working examples in the [examples folder](examples/) showing the various modules and functionalities the package offers.
|
|
204
|
+
|
|
205
|
+
To run the examples:
|
|
206
|
+
|
|
207
|
+
```bash
|
|
208
|
+
# Historical data example
|
|
209
|
+
poetry run python examples/histdata_db_manager.py
|
|
210
|
+
|
|
211
|
+
# Real-time data example (requires API keys as environment variables)
|
|
212
|
+
export ALPHA_VANTAGE_API_KEY="your_key_here"
|
|
213
|
+
export POLYGON_IO_API_KEY="your_key_here"
|
|
214
|
+
poetry run python examples/realtime_data_manager.py
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
#### Historical data
|
|
218
|
+
|
|
219
|
+
Let's walk through the [example for historical data source](examples/histdata_db_manager.py):
|
|
220
|
+
|
|
221
|
+
1. **Configuration setup**
|
|
222
|
+
```python
|
|
223
|
+
# Use a runtime defined config yaml file
|
|
224
|
+
test_config_yaml = '''
|
|
225
|
+
DATA_FILETYPE: 'parquet'
|
|
226
|
+
|
|
227
|
+
ENGINE: 'polars_lazy'
|
|
228
|
+
'''
|
|
229
|
+
```
|
|
230
|
+
You can define configuration inline or use a file. The configuration can override specific settings.
|
|
231
|
+
<br>
|
|
232
|
+
|
|
233
|
+
2. **Data manager instance**
|
|
234
|
+
```python
|
|
235
|
+
from forex_data import HistoricalManagerDB
|
|
236
|
+
|
|
237
|
+
histmanager = HistoricalManagerDB(
|
|
238
|
+
config=test_config_yaml
|
|
239
|
+
)
|
|
240
|
+
```
|
|
241
|
+
Create an instance of the historical data manager with your configuration.
|
|
242
|
+
<br>
|
|
243
|
+
|
|
244
|
+
3. **Get data**
|
|
245
|
+
```python
|
|
246
|
+
ex_ticker = 'EURUSD'
|
|
247
|
+
ex_timeframe = '1d'
|
|
248
|
+
ex_start_date = '2018-10-03 10:00:00'
|
|
249
|
+
ex_end_date = '2018-12-03 10:00:00'
|
|
250
|
+
|
|
251
|
+
yeardata = histmanager.get_data(
|
|
252
|
+
ticker=ex_ticker,
|
|
253
|
+
timeframe=ex_timeframe,
|
|
254
|
+
start=ex_start_date,
|
|
255
|
+
end=ex_end_date
|
|
256
|
+
)
|
|
257
|
+
```
|
|
258
|
+
The call returns a dataframe with data having the timeframe, start, and end specified by the inputs.
|
|
259
|
+
The output dataframe type depends on the engine selected (polars_lazy, polars, pandas, pyarrow).
|
|
260
|
+
|
|
261
|
+
With `polars_lazy` as ENGINE option, the output dataframe:
|
|
262
|
+
```
|
|
263
|
+
┌─────────────────────┬─────────┬─────────┬─────────┬─────────┐
|
|
264
|
+
│ timestamp ┆ open ┆ high ┆ low ┆ close │
|
|
265
|
+
│ --- ┆ --- ┆ --- ┆ --- ┆ --- │
|
|
266
|
+
│ datetime[ms] ┆ f32 ┆ f32 ┆ f32 ┆ f32 │
|
|
267
|
+
╞═════════════════════╪═════════╪═════════╪═════════╪═════════╡
|
|
268
|
+
│ 2018-10-03 21:00:00 ┆ 1.1523 ┆ 1.1528 ┆ 1.1512 ┆ 1.1516 │
|
|
269
|
+
│ 2018-10-04 21:00:00 ┆ 1.1516 ┆ 1.1539 ┆ 1.1485 ┆ 1.1498 │
|
|
270
|
+
│ 2018-10-05 21:00:00 ┆ 1.1498 ┆ 1.1534 ┆ 1.1486 ┆ 1.1514 │
|
|
271
|
+
│ ... ┆ ... ┆ ... ┆ ... ┆ ... │
|
|
272
|
+
└─────────────────────┴─────────┴─────────┴─────────┴─────────┘
|
|
273
|
+
```
|
|
274
|
+
<br>
|
|
275
|
+
|
|
276
|
+
4. **Add a timeframe**
|
|
277
|
+
```python
|
|
278
|
+
histmanager.add_timeframe('1W')
|
|
279
|
+
```
|
|
280
|
+
Add a new timeframe. The data manager will create and cache the new timeframe data if not already present.
|
|
281
|
+
<br>
|
|
282
|
+
|
|
283
|
+
5. **Plot data**
|
|
284
|
+
```python
|
|
285
|
+
histmanager.plot(
|
|
286
|
+
ticker=ex_ticker,
|
|
287
|
+
timeframe='1D',
|
|
288
|
+
start_date='2016-02-02 18:00:00',
|
|
289
|
+
end_date='2016-06-23 23:00:00'
|
|
290
|
+
)
|
|
291
|
+
```
|
|
292
|
+
Generate a candlestick chart for the specified ticker and date range.
|
|
293
|
+
|
|
294
|
+
<br>
|
|
295
|
+
|
|
296
|
+

|
|
297
|
+
|
|
298
|
+
<br>
|
|
299
|
+
|
|
300
|
+
6. **Conditional Data Retrieval**
|
|
301
|
+
|
|
302
|
+
You can filter data directly during retrieval using SQL-like conditions.
|
|
303
|
+
|
|
304
|
+
```python
|
|
305
|
+
from forex_data import (
|
|
306
|
+
HistoricalManagerDB,
|
|
307
|
+
BASE_DATA_COLUMN_NAME,
|
|
308
|
+
SQL_COMPARISON_OPERATORS
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# 1. Simple condition: OPEN < 1.13
|
|
312
|
+
data = histmanager.get_data(
|
|
313
|
+
ticker='EURUSD',
|
|
314
|
+
timeframe='1D',
|
|
315
|
+
start='2018-01-01',
|
|
316
|
+
end='2018-12-31',
|
|
317
|
+
comparison_column_name=BASE_DATA_COLUMN_NAME.OPEN,
|
|
318
|
+
check_level=1.13,
|
|
319
|
+
comparison_operator=SQL_COMPARISON_OPERATORS.LESS_THAN
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
# 2. Multiple conditions (OR): HIGH > 1.145 OR LOW < 1.12
|
|
323
|
+
from forex_data import SQL_CONDITION_AGGREGATION_MODES
|
|
324
|
+
|
|
325
|
+
data = histmanager.get_data(
|
|
326
|
+
ticker='EURUSD',
|
|
327
|
+
timeframe='1D',
|
|
328
|
+
start='2019-01-01',
|
|
329
|
+
end='2019-12-31',
|
|
330
|
+
comparison_column_name=[
|
|
331
|
+
BASE_DATA_COLUMN_NAME.HIGH,
|
|
332
|
+
BASE_DATA_COLUMN_NAME.LOW
|
|
333
|
+
],
|
|
334
|
+
check_level=[1.145, 1.12],
|
|
335
|
+
comparison_operator=[
|
|
336
|
+
SQL_COMPARISON_OPERATORS.GREATER_THAN,
|
|
337
|
+
SQL_COMPARISON_OPERATORS.LESS_THAN
|
|
338
|
+
],
|
|
339
|
+
aggregation_mode=SQL_CONDITION_AGGREGATION_MODES.OR
|
|
340
|
+
)
|
|
341
|
+
```
|
|
342
|
+
|
|
343
|
+
<br>
|
|
344
|
+
|
|
345
|
+
#### Real-Time data
|
|
346
|
+
|
|
347
|
+
Let's walk through the [example for real-time data source](examples/realtime_data_manager.py):
|
|
348
|
+
|
|
349
|
+
**Important:** This example requires API keys set as environment variables:
|
|
350
|
+
```bash
|
|
351
|
+
export ALPHA_VANTAGE_API_KEY="your_alphavantage_key"
|
|
352
|
+
export POLYGON_IO_API_KEY="your_polygon_io_key"
|
|
353
|
+
```
|
|
354
|
+
|
|
355
|
+
1. **Configuration with API keys**
|
|
356
|
+
```python
|
|
357
|
+
from os import getenv
|
|
358
|
+
|
|
359
|
+
alpha_vantage_key = getenv('ALPHA_VANTAGE_API_KEY')
|
|
360
|
+
polygon_io_key = getenv('POLYGON_IO_API_KEY')
|
|
361
|
+
|
|
362
|
+
test_config_yaml = f'''
|
|
363
|
+
DATA_FILETYPE: 'parquet'
|
|
364
|
+
|
|
365
|
+
ENGINE: 'polars_lazy'
|
|
366
|
+
|
|
367
|
+
PROVIDERS_KEY:
|
|
368
|
+
ALPHA_VANTAGE_API_KEY : {alpha_vantage_key},
|
|
369
|
+
POLYGON_IO_API_KEY : {polygon_io_key}
|
|
370
|
+
'''
|
|
371
|
+
```
|
|
372
|
+
Configuration includes API keys for real-time data providers.
|
|
373
|
+
<br>
|
|
374
|
+
|
|
375
|
+
2. **Data manager instance**
|
|
376
|
+
```python
|
|
377
|
+
from forex_data import RealtimeManager
|
|
378
|
+
|
|
379
|
+
realtimedata_manager = RealtimeManager(
|
|
380
|
+
config=test_config_yaml
|
|
381
|
+
)
|
|
382
|
+
```
|
|
383
|
+
<br>
|
|
384
|
+
|
|
385
|
+
3. **Get last daily close**
|
|
386
|
+
```python
|
|
387
|
+
ex_ticker = 'EURCAD'
|
|
388
|
+
|
|
389
|
+
dayclose_quote = realtimedata_manager.get_daily_close(
|
|
390
|
+
ticker=ex_ticker,
|
|
391
|
+
last_close=True
|
|
392
|
+
)
|
|
393
|
+
```
|
|
394
|
+
|
|
395
|
+
Output:
|
|
396
|
+
```
|
|
397
|
+
┌─────────────────────┬─────────┬─────────┬─────────┬────────┐
|
|
398
|
+
│ timestamp ┆ open ┆ high ┆ low ┆ close │
|
|
399
|
+
│ --- ┆ --- ┆ --- ┆ --- ┆ --- │
|
|
400
|
+
│ datetime[ms] ┆ f32 ┆ f32 ┆ f32 ┆ f32 │
|
|
401
|
+
╞═════════════════════╪═════════╪═════════╪═════════╪════════╡
|
|
402
|
+
│ 2025-01-23 00:00:00 ┆ 1.4123 ┆ 1.4156 ┆ 1.4098 ┆ 1.4125 │
|
|
403
|
+
└─────────────────────┴─────────┴─────────┴─────────┴────────┘
|
|
404
|
+
```
|
|
405
|
+
|
|
406
|
+
4. **Get daily close for last N days**
|
|
407
|
+
```python
|
|
408
|
+
ex_n_days = 13
|
|
409
|
+
|
|
410
|
+
window_daily_ohlc = realtimedata_manager.get_daily_close(
|
|
411
|
+
ticker=ex_ticker,
|
|
412
|
+
recent_days_window=ex_n_days
|
|
413
|
+
)
|
|
414
|
+
```
|
|
415
|
+
Returns the last 13 days of daily OHLC data.
|
|
416
|
+
|
|
417
|
+
5. **Get daily close for specific date range**
|
|
418
|
+
```python
|
|
419
|
+
ex_start_date = '2025-01-15'
|
|
420
|
+
ex_end_date = '2025-01-23'
|
|
421
|
+
|
|
422
|
+
window_limits_daily_ohlc = realtimedata_manager.get_daily_close(
|
|
423
|
+
ticker=ex_ticker,
|
|
424
|
+
day_start=ex_start_date,
|
|
425
|
+
day_end=ex_end_date
|
|
426
|
+
)
|
|
427
|
+
```
|
|
428
|
+
|
|
429
|
+
Output:
|
|
430
|
+
```
|
|
431
|
+
┌─────────────────────┬────────┬────────┬────────┬────────┐
|
|
432
|
+
│ timestamp ┆ open ┆ high ┆ low ┆ close │
|
|
433
|
+
│ --- ┆ --- ┆ --- ┆ --- ┆ --- │
|
|
434
|
+
│ datetime[ms] ┆ f32 ┆ f32 ┆ f32 ┆ f32 │
|
|
435
|
+
╞═════════════════════╪════════╪════════╪════════╪════════╡
|
|
436
|
+
│ 2025-01-23 00:00:00 ┆ 1.4125 ┆ 1.4156 ┆ 1.4098 ┆ 1.4132 │
|
|
437
|
+
│ 2025-01-22 00:00:00 ┆ 1.4089 ┆ 1.4147 ┆ 1.4072 ┆ 1.4125 │
|
|
438
|
+
│ 2025-01-21 00:00:00 ┆ 1.4112 ┆ 1.4134 ┆ 1.4063 ┆ 1.4089 │
|
|
439
|
+
│ ... ┆ ... ┆ ... ┆ ... ┆ ... │
|
|
440
|
+
└─────────────────────┴────────┴────────┴────────┴────────┘
|
|
441
|
+
```
|
|
442
|
+
|
|
443
|
+
6. **Get OHLC data with custom timeframe**
|
|
444
|
+
```python
|
|
445
|
+
ex_start_date = '2024-04-10'
|
|
446
|
+
ex_end_date = '2024-04-15'
|
|
447
|
+
ex_timeframe = '1h'
|
|
448
|
+
|
|
449
|
+
window_data_ohlc = realtimedata_manager.get_data(
|
|
450
|
+
ticker=ex_ticker,
|
|
451
|
+
start=ex_start_date,
|
|
452
|
+
end=ex_end_date,
|
|
453
|
+
timeframe=ex_timeframe
|
|
454
|
+
)
|
|
455
|
+
```
|
|
456
|
+
|
|
457
|
+
Output:
|
|
458
|
+
```
|
|
459
|
+
Real time 1h window data: shape: (72, 5)
|
|
460
|
+
┌─────────────────────┬─────────┬─────────┬─────────┬─────────┐
|
|
461
|
+
│ timestamp ┆ open ┆ high ┆ low ┆ close │
|
|
462
|
+
│ --- ┆ --- ┆ --- ┆ --- ┆ --- │
|
|
463
|
+
│ datetime[ms] ┆ f32 ┆ f32 ┆ f32 ┆ f32 │
|
|
464
|
+
╞═════════════════════╪═════════╪═════════╪═════════╪═════════╡
|
|
465
|
+
│ 2024-04-10 00:00:00 ┆ 1.4765 ┆ 1.4768 ┆ 1.4752 ┆ 1.4761 │
|
|
466
|
+
│ 2024-04-10 01:00:00 ┆ 1.4761 ┆ 1.4768 ┆ 1.4755 ┆ 1.4762 │
|
|
467
|
+
│ 2024-04-10 02:00:00 ┆ 1.4762 ┆ 1.4778 ┆ 1.4751 ┆ 1.4771 │
|
|
468
|
+
│ ... ┆ ... ┆ ... ┆ ... ┆ ... │
|
|
469
|
+
└─────────────────────┴─────────┴─────────┴─────────┴─────────┘
|
|
470
|
+
```
|
|
471
|
+
|
|
472
|
+
7. **Intraday data with dynamic dates**
|
|
473
|
+
```python
|
|
474
|
+
from pandas import Timestamp, Timedelta
|
|
475
|
+
|
|
476
|
+
ex_start_date = Timestamp.now() - Timedelta('10D')
|
|
477
|
+
ex_end_date = Timestamp.now() - Timedelta('8D')
|
|
478
|
+
ex_timeframe = '5m'
|
|
479
|
+
|
|
480
|
+
window_data_ohlc = realtimedata_manager.get_data(
|
|
481
|
+
ticker='EURUSD',
|
|
482
|
+
start=ex_start_date,
|
|
483
|
+
end=ex_end_date,
|
|
484
|
+
timeframe=ex_timeframe
|
|
485
|
+
)
|
|
486
|
+
```
|
|
487
|
+
Get 5-minute data for recent days using dynamic date calculations.
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
## PYTEST and pipeline implementation
|
|
491
|
+
|
|
492
|
+
The project uses **pytest** for testing and **CircleCI** for continuous integration. The pipeline automatically runs on every commit to ensure code quality and functionality.
|
|
493
|
+
|
|
494
|
+
### Testing with Pytest
|
|
495
|
+
|
|
496
|
+
To run tests locally:
|
|
497
|
+
|
|
498
|
+
```bash
|
|
499
|
+
# Run all tests
|
|
500
|
+
poetry run pytest
|
|
501
|
+
|
|
502
|
+
# Run tests with flake8 linting (same as CI)
|
|
503
|
+
poetry run pytest --flake8
|
|
504
|
+
|
|
505
|
+
# Run tests with verbose output
|
|
506
|
+
poetry run pytest -v
|
|
507
|
+
|
|
508
|
+
# Run specific test file
|
|
509
|
+
poetry run pytest tests/test_file.py
|
|
510
|
+
```
|
|
511
|
+
|
|
512
|
+
### CircleCI Pipeline
|
|
513
|
+
|
|
514
|
+
The CI/CD pipeline is configured via `.circleci/config.yml` and automatically runs on every push to the repository.
|
|
515
|
+
|
|
516
|
+
#### Pipeline Configuration
|
|
517
|
+
|
|
518
|
+
**Version:** CircleCI 2.1
|
|
519
|
+
|
|
520
|
+
**Docker Image:** `cimg/python:3.12.12`
|
|
521
|
+
|
|
522
|
+
**Workflow:** `unit-tests`
|
|
523
|
+
|
|
524
|
+
#### Pipeline Steps
|
|
525
|
+
|
|
526
|
+
The pipeline executes the following steps for Python 3.12:
|
|
527
|
+
|
|
528
|
+
1. **Checkout**: Clone the repository code
|
|
529
|
+
2. **Install Poetry**: Install the Poetry package manager (`pip install poetry`)
|
|
530
|
+
3. **Restore Cache**: Restore dependencies from cache if available (cache key based on `poetry.lock` checksum)
|
|
531
|
+
4. **Install Dependencies**: Install project dependencies using `poetry install`
|
|
532
|
+
5. **Save Cache**: Cache the installed dependencies for faster future builds
|
|
533
|
+
6. **Run Tests**: Execute tests with flake8 linting using `poetry run pytest --flake8`
|
|
534
|
+
|
|
535
|
+
#### Caching Strategy
|
|
536
|
+
|
|
537
|
+
The pipeline uses CircleCI's caching mechanism to speed up builds:
|
|
538
|
+
|
|
539
|
+
- **Cache Key**: `v1-dependencies-{{ checksum "poetry.lock" }}`
|
|
540
|
+
- **Fallback**: `v1-dependencies-` (if no exact match)
|
|
541
|
+
- **Cached Paths**: `./repo` directory
|
|
542
|
+
|
|
543
|
+
This ensures that dependencies are only reinstalled when `poetry.lock` changes, significantly reducing build times.
|
|
544
|
+
|
|
545
|
+
#### Environment Variables
|
|
546
|
+
|
|
547
|
+
The pipeline supports the following environment variables (configured in CircleCI project settings):
|
|
548
|
+
|
|
549
|
+
- `DATABASE_URL`: Database connection string (if needed)
|
|
550
|
+
- `API_KEY`: API keys for external services (if needed for integration tests)
|
|
551
|
+
|
|
552
|
+
#### Jobs
|
|
553
|
+
|
|
554
|
+
- **py312**: Runs the complete test suite on Python 3.12
|
|
555
|
+
|
|
556
|
+
#### Workflow
|
|
557
|
+
|
|
558
|
+
The `unit-tests` workflow triggers on every commit and runs the `py312` job to validate:
|
|
559
|
+
- Code functionality through pytest
|
|
560
|
+
- Code quality and style through flake8 integration
|
|
561
|
+
```
|
|
562
|
+
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
forex_data/__init__.py,sha256=5oXkOm9O86adknutRNJqD1AIWCaepPxI2s5v1PzH_-E,2087
|
|
2
|
+
forex_data/config/__init__.py,sha256=qRC_dvGHWa7p_3lViyw-Wk_5ZKY3u1uypVSnoEYZ4xU,318
|
|
3
|
+
forex_data/config/config_file.py,sha256=baAThUgsLiowhI4G06W6u3vzMkzVXIJiezMFPpYN3lY,1803
|
|
4
|
+
forex_data/data_management/__init__.py,sha256=CjmEFotIoRVd2LMA_TCF46AN83l6OXHeQbTeXbFpRzM,1844
|
|
5
|
+
forex_data/data_management/common.py,sha256=WP7ygIDCt0fJO_xBCBpCN18iHTjUJpu6-6lXnfK18XE,45319
|
|
6
|
+
forex_data/data_management/database.py,sha256=HwZ_mUSrsFSZ5gF7XFBN5QVbmAgxZd678iWb7FZ-Jjg,42526
|
|
7
|
+
forex_data/data_management/historicaldata.py,sha256=aNUM3kA6CzlMbtMmUlu99B_ASrZNIxRT4G6MIMRMkxc,42945
|
|
8
|
+
forex_data/data_management/realtimedata.py,sha256=WzdL30skro-JRre3Whoi2Sb_E3tzyWNJn8FUekhz4vs,32436
|
|
9
|
+
forex_data_aggregator-0.1.2.dist-info/LICENSE,sha256=dQy-YLtlH30iwmb2BxJj-y3LWEXFbXe1OVnn8LP1X-o,1071
|
|
10
|
+
forex_data_aggregator-0.1.2.dist-info/METADATA,sha256=PWRlv5_wsa6lzkZ7Pjbh4dkgLVGhvegh9bFPr5oZEl4,22051
|
|
11
|
+
forex_data_aggregator-0.1.2.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
12
|
+
forex_data_aggregator-0.1.2.dist-info/RECORD,,
|