hawk-sdk 0.0.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hawk_sdk/__init__.py +0 -0
- hawk_sdk/api/__init__.py +4 -0
- hawk_sdk/api/equities/__init__.py +1 -0
- hawk_sdk/api/equities/main.py +44 -0
- hawk_sdk/api/equities/repository.py +158 -0
- hawk_sdk/api/equities/service.py +51 -0
- hawk_sdk/api/futures/__init__.py +1 -0
- hawk_sdk/api/futures/main.py +32 -0
- hawk_sdk/api/futures/repository.py +86 -0
- hawk_sdk/api/futures/service.py +41 -0
- hawk_sdk/api/ice_bofa_bonds_indices/__init__.py +1 -0
- hawk_sdk/api/ice_bofa_bonds_indices/main.py +32 -0
- hawk_sdk/api/ice_bofa_bonds_indices/repository.py +91 -0
- hawk_sdk/api/ice_bofa_bonds_indices/service.py +41 -0
- hawk_sdk/api/system/__init__.py +1 -0
- hawk_sdk/api/system/main.py +29 -0
- hawk_sdk/api/system/repository.py +53 -0
- hawk_sdk/api/system/service.py +39 -0
- hawk_sdk/core/__init__.py +0 -0
- hawk_sdk/core/common/__init__.py +0 -0
- hawk_sdk/core/common/base_enum.py +16 -0
- hawk_sdk/core/common/constants.py +1 -0
- hawk_sdk/core/common/data_object.py +43 -0
- hawk_sdk/core/common/utils.py +23 -0
- hawk_sdk-0.0.17.dist-info/METADATA +6 -0
- hawk_sdk-0.0.17.dist-info/RECORD +28 -0
- hawk_sdk-0.0.17.dist-info/WHEEL +5 -0
- hawk_sdk-0.0.17.dist-info/top_level.txt +1 -0
hawk_sdk/__init__.py
ADDED
|
File without changes
|
hawk_sdk/api/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from hawk_sdk.api.equities.main import Equities
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Datasource API for Equities data access and export functions.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from hawk_sdk.api.equities.repository import EquitiesRepository
|
|
8
|
+
from hawk_sdk.api.equities.service import EquitiesService
|
|
9
|
+
from hawk_sdk.core.common.data_object import DataObject
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Equities:
|
|
13
|
+
"""Datasource API for fetching Futures data."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, environment="production") -> None:
|
|
16
|
+
"""Initializes the Equities datasource with required configurations."""
|
|
17
|
+
self.repository = EquitiesRepository(environment=environment)
|
|
18
|
+
self.service = EquitiesService(self.repository)
|
|
19
|
+
|
|
20
|
+
def get_adjusted_ohlcv(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> DataObject:
|
|
21
|
+
"""Fetch open, high, low, close data for the given date range and hawk_ids.
|
|
22
|
+
|
|
23
|
+
:param start_date: The start date for the data query (YYYY-MM-DD).
|
|
24
|
+
:param end_date: The end date for the data query (YYYY-MM-DD).
|
|
25
|
+
:param interval: The interval for the data query (e.g., '1d', '1h', '1m').
|
|
26
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
27
|
+
:return: A hawk DataObject containing the data.
|
|
28
|
+
"""
|
|
29
|
+
return DataObject(
|
|
30
|
+
name="adjusted_equities_ohlcv",
|
|
31
|
+
data=self.service.get_adjusted_ohlcv(start_date, end_date, interval, hawk_ids)
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
def get_adjusted_ohlcv_snapshot(self, timestamp: str, hawk_ids: List[int]) -> DataObject:
|
|
35
|
+
"""Fetch snapshot data for the given date and hawk_ids.
|
|
36
|
+
|
|
37
|
+
:param timestamp: The timestamp for the data query (YYYY-MM-DD HH:MM:SS).
|
|
38
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
39
|
+
:return: A hawk DataObject containing the data.
|
|
40
|
+
"""
|
|
41
|
+
return DataObject(
|
|
42
|
+
name="equities_adjusted_ohlcv_snapshot",
|
|
43
|
+
data=self.service.get_adjusted_ohlcv_snapshot(timestamp, hawk_ids)
|
|
44
|
+
)
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Repository layer for fetching Equities data from BigQuery.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Iterator, List
|
|
7
|
+
|
|
8
|
+
from google.cloud import bigquery
|
|
9
|
+
|
|
10
|
+
from hawk_sdk.core.common.utils import get_bigquery_client
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class EquitiesRepository:
|
|
14
|
+
"""Repository for accessing Equities raw data."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, environment: str) -> None:
|
|
17
|
+
"""Initializes the repository with a BigQuery client.
|
|
18
|
+
|
|
19
|
+
:param environment: The environment to fetch data from (e.g., 'production', 'development').
|
|
20
|
+
"""
|
|
21
|
+
self.bq_client = get_bigquery_client()
|
|
22
|
+
self.environment = environment
|
|
23
|
+
|
|
24
|
+
def fetch_adjusted_ohlcv(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> Iterator[
|
|
25
|
+
dict]:
|
|
26
|
+
"""Fetches raw adjusted OHLCV data from BigQuery for the given date range and hawk_ids using query parameters."""
|
|
27
|
+
open_field = f"adjusted_open_{interval}"
|
|
28
|
+
high_field = f"adjusted_high_{interval}"
|
|
29
|
+
low_field = f"adjusted_low_{interval}"
|
|
30
|
+
close_field = f"adjusted_close_{interval}"
|
|
31
|
+
volume_field = f"volume_{interval}"
|
|
32
|
+
|
|
33
|
+
query = f"""
|
|
34
|
+
WITH records_data AS (
|
|
35
|
+
SELECT
|
|
36
|
+
r.record_timestamp AS date,
|
|
37
|
+
hi.value AS ticker,
|
|
38
|
+
MAX(CASE WHEN f.field_name = @open_field THEN r.double_value END) AS {open_field},
|
|
39
|
+
MAX(CASE WHEN f.field_name = @high_field THEN r.double_value END) AS {high_field},
|
|
40
|
+
MAX(CASE WHEN f.field_name = @low_field THEN r.double_value END) AS {low_field},
|
|
41
|
+
MAX(CASE WHEN f.field_name = @close_field THEN r.double_value END) AS {close_field},
|
|
42
|
+
MAX(CASE WHEN f.field_name = @volume_field THEN r.int_value END) AS {volume_field}
|
|
43
|
+
FROM
|
|
44
|
+
`wsb-hc-qasap-ae2e.{self.environment}.records` AS r
|
|
45
|
+
JOIN
|
|
46
|
+
`wsb-hc-qasap-ae2e.{self.environment}.fields` AS f
|
|
47
|
+
ON r.field_id = f.field_id
|
|
48
|
+
JOIN
|
|
49
|
+
`wsb-hc-qasap-ae2e.{self.environment}.hawk_identifiers` AS hi
|
|
50
|
+
ON r.hawk_id = hi.hawk_id
|
|
51
|
+
WHERE
|
|
52
|
+
r.hawk_id IN UNNEST(@hawk_ids)
|
|
53
|
+
AND f.field_name IN (@open_field, @high_field, @low_field, @close_field, @volume_field)
|
|
54
|
+
AND r.record_timestamp BETWEEN @start_date AND @end_date
|
|
55
|
+
GROUP BY
|
|
56
|
+
date, ticker
|
|
57
|
+
)
|
|
58
|
+
SELECT DISTINCT
|
|
59
|
+
date,
|
|
60
|
+
ticker,
|
|
61
|
+
{open_field},
|
|
62
|
+
{high_field},
|
|
63
|
+
{low_field},
|
|
64
|
+
{close_field},
|
|
65
|
+
{volume_field}
|
|
66
|
+
FROM
|
|
67
|
+
records_data
|
|
68
|
+
ORDER BY
|
|
69
|
+
date;
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
query_params = [
|
|
73
|
+
bigquery.ArrayQueryParameter("hawk_ids", "INT64", hawk_ids),
|
|
74
|
+
bigquery.ScalarQueryParameter("start_date", "STRING", start_date),
|
|
75
|
+
bigquery.ScalarQueryParameter("end_date", "STRING", end_date),
|
|
76
|
+
bigquery.ScalarQueryParameter("open_field", "STRING", open_field),
|
|
77
|
+
bigquery.ScalarQueryParameter("high_field", "STRING", high_field),
|
|
78
|
+
bigquery.ScalarQueryParameter("low_field", "STRING", low_field),
|
|
79
|
+
bigquery.ScalarQueryParameter("close_field", "STRING", close_field),
|
|
80
|
+
bigquery.ScalarQueryParameter("volume_field", "STRING", volume_field)
|
|
81
|
+
]
|
|
82
|
+
|
|
83
|
+
job_config = bigquery.QueryJobConfig(query_parameters=query_params)
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
query_job = self.bq_client.query(query, job_config=job_config)
|
|
87
|
+
return query_job.result()
|
|
88
|
+
except Exception as e:
|
|
89
|
+
logging.error(f"Failed to fetch OHLC data: {e}")
|
|
90
|
+
raise
|
|
91
|
+
|
|
92
|
+
def fetch_adjusted_ohlcv_snapshot(self, timestamp: str, hawk_ids: List[int]) -> Iterator[dict]:
|
|
93
|
+
"""Fetches the most recent snapshot data from BigQuery for the given time and hawk_ids."""
|
|
94
|
+
query = f"""
|
|
95
|
+
WITH latest_timestamp AS (
|
|
96
|
+
SELECT
|
|
97
|
+
MAX(r.record_timestamp) AS max_ts
|
|
98
|
+
FROM
|
|
99
|
+
`wsb-hc-qasap-ae2e.{self.environment}.records` AS r
|
|
100
|
+
WHERE
|
|
101
|
+
r.hawk_id IN UNNEST(@hawk_ids)
|
|
102
|
+
AND r.record_timestamp <= @timestamp
|
|
103
|
+
),
|
|
104
|
+
records_data AS (
|
|
105
|
+
SELECT
|
|
106
|
+
r.record_timestamp AS date,
|
|
107
|
+
hi.value AS ticker,
|
|
108
|
+
MAX(IF(f.field_name = 'adjusted_open_snapshot', r.double_value, NULL)) AS adjusted_open_snapshot,
|
|
109
|
+
MAX(IF(f.field_name = 'adjusted_high_snapshot', r.double_value, NULL)) AS adjusted_high_snapshot,
|
|
110
|
+
MAX(IF(f.field_name = 'adjusted_low_snapshot', r.double_value, NULL)) AS adjusted_low_snapshot,
|
|
111
|
+
MAX(IF(f.field_name = 'adjusted_close_snapshot', r.double_value, NULL)) AS adjusted_close_snapshot,
|
|
112
|
+
MAX(IF(f.field_name = 'volume_snapshot', r.int_value, NULL)) AS volume_snapshot
|
|
113
|
+
FROM
|
|
114
|
+
`wsb-hc-qasap-ae2e.{self.environment}.records` AS r
|
|
115
|
+
JOIN
|
|
116
|
+
`wsb-hc-qasap-ae2e.{self.environment}.fields` AS f
|
|
117
|
+
ON r.field_id = f.field_id
|
|
118
|
+
JOIN
|
|
119
|
+
`wsb-hc-qasap-ae2e.{self.environment}.hawk_identifiers` AS hi
|
|
120
|
+
ON r.hawk_id = hi.hawk_id
|
|
121
|
+
WHERE
|
|
122
|
+
r.hawk_id IN UNNEST(@hawk_ids)
|
|
123
|
+
AND f.field_name IN (
|
|
124
|
+
'adjusted_open_snapshot', 'adjusted_high_snapshot',
|
|
125
|
+
'adjusted_low_snapshot', 'adjusted_close_snapshot',
|
|
126
|
+
'volume_snapshot'
|
|
127
|
+
)
|
|
128
|
+
AND r.record_timestamp = (SELECT max_ts FROM latest_timestamp)
|
|
129
|
+
GROUP BY
|
|
130
|
+
date, ticker
|
|
131
|
+
)
|
|
132
|
+
SELECT
|
|
133
|
+
date,
|
|
134
|
+
ticker,
|
|
135
|
+
adjusted_open_snapshot,
|
|
136
|
+
adjusted_high_snapshot,
|
|
137
|
+
adjusted_low_snapshot,
|
|
138
|
+
adjusted_close_snapshot,
|
|
139
|
+
volume_snapshot
|
|
140
|
+
FROM
|
|
141
|
+
records_data
|
|
142
|
+
ORDER BY
|
|
143
|
+
ticker;
|
|
144
|
+
"""
|
|
145
|
+
|
|
146
|
+
query_params = [
|
|
147
|
+
bigquery.ArrayQueryParameter("hawk_ids", "INT64", hawk_ids),
|
|
148
|
+
bigquery.ScalarQueryParameter("timestamp", "TIMESTAMP", timestamp)
|
|
149
|
+
]
|
|
150
|
+
|
|
151
|
+
job_config = bigquery.QueryJobConfig(query_parameters=query_params)
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
query_job = self.bq_client.query(query, job_config=job_config)
|
|
155
|
+
return query_job.result()
|
|
156
|
+
except Exception as e:
|
|
157
|
+
logging.error(f"Failed to fetch snapshot data: {e}")
|
|
158
|
+
raise
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Service layer for processing and normalizing Equities data.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
from typing import List, Iterator
|
|
6
|
+
|
|
7
|
+
import pandas as pd
|
|
8
|
+
|
|
9
|
+
from hawk_sdk.api.equities.repository import EquitiesRepository
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class EquitiesService:
|
|
13
|
+
"""Service class for Futures business logic."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, repository: EquitiesRepository) -> None:
|
|
16
|
+
"""Initializes the service with a repository.
|
|
17
|
+
|
|
18
|
+
:param repository: An instance of FuturesRepository for data access.
|
|
19
|
+
"""
|
|
20
|
+
self.repository = repository
|
|
21
|
+
|
|
22
|
+
def get_adjusted_ohlcv(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> pd.DataFrame:
|
|
23
|
+
"""Equities and normalizes data into a pandas DataFrame.
|
|
24
|
+
|
|
25
|
+
:param start_date: The start date for the data query (YYYY-MM-DD).
|
|
26
|
+
:param end_date: The end date for the data query (YYYY-MM-DD).
|
|
27
|
+
:param interval: The interval for the data query (e.g., '1d', '1h', '1m').
|
|
28
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
29
|
+
:return: A pandas DataFrame containing the normalized data.
|
|
30
|
+
"""
|
|
31
|
+
raw_data = self.repository.fetch_adjusted_ohlcv(start_date, end_date, interval, hawk_ids)
|
|
32
|
+
return self._normalize_data(raw_data)
|
|
33
|
+
|
|
34
|
+
def get_adjusted_ohlcv_snapshot(self, timestamp: str, hawk_ids: List[int]) -> pd.DataFrame:
|
|
35
|
+
"""Fetches and normalizes snapshot data for the given date and hawk_ids.
|
|
36
|
+
|
|
37
|
+
:param timestamp: The timestamp for the data query (YYYY-MM-DD HH:MM:SS).
|
|
38
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
39
|
+
:return: A pandas DataFrame containing the normalized data.
|
|
40
|
+
"""
|
|
41
|
+
raw_data = self.repository.fetch_adjusted_ohlcv_snapshot(timestamp, hawk_ids)
|
|
42
|
+
return self._normalize_data(raw_data)
|
|
43
|
+
|
|
44
|
+
@staticmethod
|
|
45
|
+
def _normalize_data(data: Iterator[dict]) -> pd.DataFrame:
|
|
46
|
+
"""Converts raw data into a normalized pandas DataFrame.
|
|
47
|
+
|
|
48
|
+
:param data: An iterator over raw data rows.
|
|
49
|
+
:return: A pandas DataFrame containing normalized data.
|
|
50
|
+
"""
|
|
51
|
+
return pd.DataFrame([dict(row) for row in data])
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from hawk_sdk.api.futures.main import Futures
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Datasource API for Hawk Global Futures data access and export functions.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from hawk_sdk.api.futures.repository import FuturesRepository
|
|
8
|
+
from hawk_sdk.api.futures.service import FuturesService
|
|
9
|
+
from hawk_sdk.core.common.data_object import DataObject
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Futures:
|
|
13
|
+
"""Datasource API for fetching Futures data."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, environment="production") -> None:
|
|
16
|
+
"""Initializes the Futures datasource with required configurations."""
|
|
17
|
+
self.repository = FuturesRepository(environment=environment)
|
|
18
|
+
self.service = FuturesService(self.repository)
|
|
19
|
+
|
|
20
|
+
def get_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> DataObject:
|
|
21
|
+
"""Fetch open, high, low, close, volume, and open interest data for the given date range and hawk_ids.
|
|
22
|
+
|
|
23
|
+
:param start_date: %The start date for the data query (YYYY-MM-DD).
|
|
24
|
+
:param end_date: The end date for the data query (YYYY-MM-DD).
|
|
25
|
+
:param interval: The interval for the data query (e.g., '1d', '1h', '1m').
|
|
26
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
27
|
+
:return: A hawk DataObject containing the data.
|
|
28
|
+
"""
|
|
29
|
+
return DataObject(
|
|
30
|
+
name="futures_ohlcvo",
|
|
31
|
+
data=self.service.get_ohlcvo(start_date, end_date, interval, hawk_ids)
|
|
32
|
+
)
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Repository layer for fetching Futures data from BigQuery.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Iterator, List
|
|
7
|
+
|
|
8
|
+
from google.cloud import bigquery
|
|
9
|
+
|
|
10
|
+
from hawk_sdk.core.common.utils import get_bigquery_client
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class FuturesRepository:
|
|
14
|
+
"""Repository for accessing Futures raw data."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, environment: str) -> None:
|
|
17
|
+
"""Initializes the repository with a BigQuery client.
|
|
18
|
+
|
|
19
|
+
:param environment: The environment to fetch data from (e.g., 'production', 'development').
|
|
20
|
+
"""
|
|
21
|
+
self.bq_client = get_bigquery_client()
|
|
22
|
+
self.environment = environment
|
|
23
|
+
|
|
24
|
+
def fetch_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> Iterator[dict]:
|
|
25
|
+
"""Fetches raw OHLCVO data from BigQuery for the given date range and hawk_ids using query parameters."""
|
|
26
|
+
query = f"""
|
|
27
|
+
WITH records_data AS (
|
|
28
|
+
SELECT
|
|
29
|
+
r.record_timestamp AS date,
|
|
30
|
+
hi.value AS ticker,
|
|
31
|
+
MAX(CASE WHEN f.field_name = @open_field THEN r.double_value END) AS open,
|
|
32
|
+
MAX(CASE WHEN f.field_name = @high_field THEN r.double_value END) AS high,
|
|
33
|
+
MAX(CASE WHEN f.field_name = @low_field THEN r.double_value END) AS low,
|
|
34
|
+
MAX(CASE WHEN f.field_name = @close_field THEN r.double_value END) AS close,
|
|
35
|
+
MAX(CASE WHEN f.field_name = @volume_field THEN r.int_value END) AS volume,
|
|
36
|
+
MAX(CASE WHEN f.field_name = @open_interest_field THEN r.int_value END) AS open_interest
|
|
37
|
+
FROM
|
|
38
|
+
`wsb-hc-qasap-ae2e.{self.environment}.records` AS r
|
|
39
|
+
JOIN
|
|
40
|
+
`wsb-hc-qasap-ae2e.{self.environment}.fields` AS f
|
|
41
|
+
ON r.field_id = f.field_id
|
|
42
|
+
JOIN
|
|
43
|
+
`wsb-hc-qasap-ae2e.{self.environment}.hawk_identifiers` AS hi
|
|
44
|
+
ON r.hawk_id = hi.hawk_id
|
|
45
|
+
WHERE
|
|
46
|
+
r.hawk_id IN UNNEST(@hawk_ids)
|
|
47
|
+
AND f.field_name IN (@open_field, @high_field, @low_field, @close_field, @volume_field, @open_interest_field)
|
|
48
|
+
AND r.record_timestamp BETWEEN @start_date AND @end_date
|
|
49
|
+
GROUP BY
|
|
50
|
+
date, ticker
|
|
51
|
+
)
|
|
52
|
+
SELECT DISTINCT
|
|
53
|
+
date,
|
|
54
|
+
ticker,
|
|
55
|
+
open,
|
|
56
|
+
high,
|
|
57
|
+
low,
|
|
58
|
+
close,
|
|
59
|
+
volume,
|
|
60
|
+
open_interest
|
|
61
|
+
FROM
|
|
62
|
+
records_data
|
|
63
|
+
ORDER BY
|
|
64
|
+
date;
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
query_params = [
|
|
68
|
+
bigquery.ArrayQueryParameter("hawk_ids", "INT64", hawk_ids),
|
|
69
|
+
bigquery.ScalarQueryParameter("start_date", "STRING", start_date),
|
|
70
|
+
bigquery.ScalarQueryParameter("end_date", "STRING", end_date),
|
|
71
|
+
bigquery.ScalarQueryParameter("open_field", "STRING", f"open_{interval}"),
|
|
72
|
+
bigquery.ScalarQueryParameter("high_field", "STRING", f"high_{interval}"),
|
|
73
|
+
bigquery.ScalarQueryParameter("low_field", "STRING", f"low_{interval}"),
|
|
74
|
+
bigquery.ScalarQueryParameter("close_field", "STRING", f"close_{interval}"),
|
|
75
|
+
bigquery.ScalarQueryParameter("volume_field", "STRING", f"volume_{interval}"),
|
|
76
|
+
bigquery.ScalarQueryParameter("open_interest_field", "STRING", f"open_interest_{interval}"),
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
job_config = bigquery.QueryJobConfig(query_parameters=query_params)
|
|
80
|
+
|
|
81
|
+
try:
|
|
82
|
+
query_job = self.bq_client.query(query, job_config=job_config)
|
|
83
|
+
return query_job.result()
|
|
84
|
+
except Exception as e:
|
|
85
|
+
logging.error(f"Failed to fetch OHLCVO data: {e}")
|
|
86
|
+
raise
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Service layer for processing and normalizing Futures data.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
from typing import List, Iterator
|
|
6
|
+
|
|
7
|
+
import pandas as pd
|
|
8
|
+
|
|
9
|
+
from hawk_sdk.api.futures.repository import FuturesRepository
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class FuturesService:
|
|
13
|
+
"""Service class for Futures business logic."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, repository: FuturesRepository) -> None:
|
|
16
|
+
"""Initializes the service with a repository.
|
|
17
|
+
|
|
18
|
+
:param repository: An instance of FuturesRepository for data access.
|
|
19
|
+
"""
|
|
20
|
+
self.repository = repository
|
|
21
|
+
|
|
22
|
+
def get_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> pd.DataFrame:
|
|
23
|
+
"""Fetches and normalizes data into a pandas DataFrame.
|
|
24
|
+
|
|
25
|
+
:param start_date: The start date for the data query (YYYY-MM-DD).
|
|
26
|
+
:param end_date: The end date for the data query (YYYY-MM-DD).
|
|
27
|
+
:param interval: The interval for the data query (e.g., '1d', '1h', '1m').
|
|
28
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
29
|
+
:return: A pandas DataFrame containing the normalized data.
|
|
30
|
+
"""
|
|
31
|
+
raw_data = self.repository.fetch_ohlcvo(start_date, end_date, interval, hawk_ids)
|
|
32
|
+
return self._normalize_data(raw_data)
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def _normalize_data(data: Iterator[dict]) -> pd.DataFrame:
|
|
36
|
+
"""Converts raw data into a normalized pandas DataFrame.
|
|
37
|
+
|
|
38
|
+
:param data: An iterator over raw data rows.
|
|
39
|
+
:return: A pandas DataFrame containing normalized data.
|
|
40
|
+
"""
|
|
41
|
+
return pd.DataFrame([dict(row) for row in data])
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from hawk_sdk.api.ice_bofa_bonds_indices.main import ICEBofABondsIndices
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Datasource API for ICE BofA Bonds data access and export functions.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from hawk_sdk.api.ice_bofa_bonds_indices.repository import ICEBofABondsIndicesRepository
|
|
8
|
+
from hawk_sdk.api.ice_bofa_bonds_indices.service import ICEBofABondsIndicesService
|
|
9
|
+
from hawk_sdk.core.common.data_object import DataObject
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ICEBofABondsIndices:
|
|
13
|
+
"""Datasource API for fetching ICE BofA Bond Indices data."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, environment="production") -> None:
|
|
16
|
+
"""Initializes the ICE BofA Bonds Indices datasource with required configurations."""
|
|
17
|
+
self.repository = ICEBofABondsIndicesRepository(environment=environment)
|
|
18
|
+
self.service = ICEBofABondsIndicesService(self.repository)
|
|
19
|
+
|
|
20
|
+
def get_data(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> DataObject:
|
|
21
|
+
"""Fetch data for hawk_ids.
|
|
22
|
+
|
|
23
|
+
:param start_date: The start date for the data query (YYYY-MM-DD).
|
|
24
|
+
:param end_date: The end date for the data query (YYYY-MM-DD).
|
|
25
|
+
:param interval: The interval for the data query (e.g., '1d', '1h', '1m').
|
|
26
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
27
|
+
:return: A hawk DataObject containing the data.
|
|
28
|
+
"""
|
|
29
|
+
return DataObject(
|
|
30
|
+
name="ice_bofa_bonds_index_data",
|
|
31
|
+
data=self.service.get_data(start_date, end_date, interval, hawk_ids)
|
|
32
|
+
)
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Repository layer for fetching ICE BofA Bonds Indices data from BigQuery.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Iterator, List
|
|
7
|
+
|
|
8
|
+
from google.cloud import bigquery
|
|
9
|
+
|
|
10
|
+
from hawk_sdk.core.common.utils import get_bigquery_client
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ICEBofABondsIndicesRepository:
|
|
14
|
+
"""Repository for accessing ICE BofA Bonds Indices raw data."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, environment: str) -> None:
|
|
17
|
+
"""Initializes the repository with a BigQuery client.
|
|
18
|
+
|
|
19
|
+
:param environment: The environment to fetch data from (e.g., 'production', 'development').
|
|
20
|
+
"""
|
|
21
|
+
self.bq_client = get_bigquery_client()
|
|
22
|
+
self.environment = environment
|
|
23
|
+
|
|
24
|
+
def fetch_data(
|
|
25
|
+
self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]
|
|
26
|
+
) -> Iterator[dict]:
|
|
27
|
+
"""Fetches raw data from BigQuery for the given date range and hawk_ids using query parameters."""
|
|
28
|
+
total_return_field = f"total_return_{interval}"
|
|
29
|
+
oas_field = f"oas_{interval}"
|
|
30
|
+
duration_modified_field = f"duration_modified_{interval}"
|
|
31
|
+
duration_effective_field = f"duration_effective_{interval}"
|
|
32
|
+
convexity_field = f"convexity_{interval}"
|
|
33
|
+
|
|
34
|
+
query = f"""
|
|
35
|
+
WITH records_data AS (
|
|
36
|
+
SELECT
|
|
37
|
+
r.record_timestamp AS date,
|
|
38
|
+
hi.value AS ticker,
|
|
39
|
+
MAX(CASE WHEN f.field_name = @total_return_field THEN r.double_value END) AS {total_return_field},
|
|
40
|
+
MAX(CASE WHEN f.field_name = @oas_field THEN r.double_value END) AS {oas_field},
|
|
41
|
+
MAX(CASE WHEN f.field_name = @duration_modified_field THEN r.double_value END) AS {duration_modified_field},
|
|
42
|
+
MAX(CASE WHEN f.field_name = @duration_effective_field THEN r.double_value END) AS {duration_effective_field},
|
|
43
|
+
MAX(CASE WHEN f.field_name = @convexity_field THEN r.double_value END) AS {convexity_field}
|
|
44
|
+
FROM
|
|
45
|
+
`wsb-hc-qasap-ae2e.{self.environment}.records` AS r
|
|
46
|
+
JOIN
|
|
47
|
+
`wsb-hc-qasap-ae2e.{self.environment}.fields` AS f
|
|
48
|
+
ON r.field_id = f.field_id
|
|
49
|
+
JOIN
|
|
50
|
+
`wsb-hc-qasap-ae2e.{self.environment}.hawk_identifiers` AS hi
|
|
51
|
+
ON r.hawk_id = hi.hawk_id
|
|
52
|
+
WHERE
|
|
53
|
+
r.hawk_id IN UNNEST(@hawk_ids)
|
|
54
|
+
AND f.field_name IN (@total_return_field, @oas_field, @duration_modified_field, @duration_effective_field, @convexity_field)
|
|
55
|
+
AND r.record_timestamp BETWEEN @start_date AND @end_date
|
|
56
|
+
GROUP BY
|
|
57
|
+
date, ticker
|
|
58
|
+
)
|
|
59
|
+
SELECT DISTINCT
|
|
60
|
+
date,
|
|
61
|
+
ticker,
|
|
62
|
+
{total_return_field},
|
|
63
|
+
{oas_field},
|
|
64
|
+
{duration_modified_field},
|
|
65
|
+
{duration_effective_field},
|
|
66
|
+
{convexity_field}
|
|
67
|
+
FROM
|
|
68
|
+
records_data
|
|
69
|
+
ORDER BY
|
|
70
|
+
date;
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
query_params = [
|
|
74
|
+
bigquery.ArrayQueryParameter("hawk_ids", "INT64", hawk_ids),
|
|
75
|
+
bigquery.ScalarQueryParameter("start_date", "STRING", start_date),
|
|
76
|
+
bigquery.ScalarQueryParameter("end_date", "STRING", end_date),
|
|
77
|
+
bigquery.ScalarQueryParameter("total_return_field", "STRING", total_return_field),
|
|
78
|
+
bigquery.ScalarQueryParameter("oas_field", "STRING", oas_field),
|
|
79
|
+
bigquery.ScalarQueryParameter("duration_modified_field", "STRING", duration_modified_field),
|
|
80
|
+
bigquery.ScalarQueryParameter("duration_effective_field", "STRING", duration_effective_field),
|
|
81
|
+
bigquery.ScalarQueryParameter("convexity_field", "STRING", convexity_field)
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
job_config = bigquery.QueryJobConfig(query_parameters=query_params)
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
query_job = self.bq_client.query(query, job_config=job_config)
|
|
88
|
+
return query_job.result()
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logging.error(f"Failed to fetch data: {e}")
|
|
91
|
+
raise
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Service layer for processing and normalizing Equities data.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
from typing import List, Iterator
|
|
6
|
+
|
|
7
|
+
import pandas as pd
|
|
8
|
+
|
|
9
|
+
from hawk_sdk.api.ice_bofa_bonds_indices.repository import ICEBofABondsIndicesRepository
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ICEBofABondsIndicesService:
|
|
13
|
+
"""Service class for ICE BofA Bonds Indices business logic."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, repository: ICEBofABondsIndicesRepository) -> None:
|
|
16
|
+
"""Initializes the service with a repository.
|
|
17
|
+
|
|
18
|
+
:param repository: An instance of ICEBofABondsIndicesRepository for data access.
|
|
19
|
+
"""
|
|
20
|
+
self.repository = repository
|
|
21
|
+
|
|
22
|
+
def get_data(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> pd.DataFrame:
|
|
23
|
+
"""ICE BofA Bonds Indices and normalizes data into a pandas DataFrame.
|
|
24
|
+
|
|
25
|
+
:param start_date: The start date for the data query (YYYY-MM-DD).
|
|
26
|
+
:param end_date: The end date for the data query (YYYY-MM-DD).
|
|
27
|
+
:param interval: The interval for the data query (e.g., '1d', '1h', '1m').
|
|
28
|
+
:param hawk_ids: A list of specific hawk_ids to filter by.
|
|
29
|
+
:return: A pandas DataFrame containing the normalized data.
|
|
30
|
+
"""
|
|
31
|
+
raw_data = self.repository.fetch_data(start_date, end_date, interval, hawk_ids)
|
|
32
|
+
return self._normalize_data(raw_data)
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def _normalize_data(data: Iterator[dict]) -> pd.DataFrame:
|
|
36
|
+
"""Converts raw data into a normalized pandas DataFrame.
|
|
37
|
+
|
|
38
|
+
:param data: An iterator over raw data rows.
|
|
39
|
+
:return: A pandas DataFrame containing normalized data.
|
|
40
|
+
"""
|
|
41
|
+
return pd.DataFrame([dict(row) for row in data])
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from hawk_sdk.api.system.main import System
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Datasource API for Hawk System data access and export functions.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
from hawk_sdk.api.system.repository import SystemRepository
|
|
8
|
+
from hawk_sdk.api.system.service import SystemService
|
|
9
|
+
from hawk_sdk.core.common.data_object import DataObject
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class System:
|
|
13
|
+
"""Datasource API for fetching System data."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, environment="production") -> None:
|
|
16
|
+
"""Initializes the System datasource with required configurations."""
|
|
17
|
+
self.repository = SystemRepository(environment=environment)
|
|
18
|
+
self.service = SystemService(self.repository)
|
|
19
|
+
|
|
20
|
+
def get_hawk_ids(self, tickers: List[str]) -> DataObject:
|
|
21
|
+
"""Fetch hawk_ids for the given list of tickers.
|
|
22
|
+
|
|
23
|
+
:param tickers: A list of specific tickers to filter by.
|
|
24
|
+
:return: A hawk DataObject containing the hawk ID data.
|
|
25
|
+
"""
|
|
26
|
+
return DataObject(
|
|
27
|
+
name="system_hawk_id_mappings",
|
|
28
|
+
data=self.service.get_hawk_ids(tickers)
|
|
29
|
+
)
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Repository layer for fetching System data from BigQuery.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Iterator, List
|
|
7
|
+
|
|
8
|
+
from google.cloud import bigquery
|
|
9
|
+
|
|
10
|
+
from hawk_sdk.core.common.utils import get_bigquery_client
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SystemRepository:
|
|
14
|
+
"""Repository for accessing System data."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, environment: str) -> None:
|
|
17
|
+
"""Initializes the repository with a BigQuery client.
|
|
18
|
+
|
|
19
|
+
:param bq_client: An instance of BigQuery Client.
|
|
20
|
+
:param environment: The environment to fetch data from (e.g., 'production', 'development').
|
|
21
|
+
"""
|
|
22
|
+
self.bq_client = get_bigquery_client()
|
|
23
|
+
self.environment = environment
|
|
24
|
+
|
|
25
|
+
def fetch_hawk_ids(self, tickers: List[str]) -> Iterator[dict]:
|
|
26
|
+
"""Fetches hawk_ids for the given list of tickers from BigQuery.
|
|
27
|
+
|
|
28
|
+
:param tickers: A list of ticker strings to filter by.
|
|
29
|
+
:return: An iterator over raw data rows.
|
|
30
|
+
"""
|
|
31
|
+
query = f"""
|
|
32
|
+
SELECT
|
|
33
|
+
value AS ticker,
|
|
34
|
+
hawk_id
|
|
35
|
+
FROM
|
|
36
|
+
`wsb-hc-qasap-ae2e.{self.environment}.hawk_identifiers`
|
|
37
|
+
WHERE
|
|
38
|
+
id_type = 'TICKER'
|
|
39
|
+
AND value IN UNNEST(@ticker_list)
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
query_params = [
|
|
43
|
+
bigquery.ArrayQueryParameter("ticker_list", "STRING", tickers),
|
|
44
|
+
]
|
|
45
|
+
|
|
46
|
+
job_config = bigquery.QueryJobConfig(query_parameters=query_params)
|
|
47
|
+
|
|
48
|
+
try:
|
|
49
|
+
query_job = self.bq_client.query(query, job_config=job_config)
|
|
50
|
+
return query_job.result()
|
|
51
|
+
except Exception as e:
|
|
52
|
+
logging.error(f"Failed to fetch hawk_ids: {e}")
|
|
53
|
+
raise
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Service layer for processing and normalizing System data.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import List, Iterator
|
|
7
|
+
|
|
8
|
+
import pandas as pd
|
|
9
|
+
|
|
10
|
+
from hawk_sdk.api.system.repository import SystemRepository
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SystemService:
|
|
14
|
+
"""Service class for System business logic."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, repository: SystemRepository) -> None:
|
|
17
|
+
"""Initializes the service with a repository.
|
|
18
|
+
|
|
19
|
+
:param repository: An instance of SystemRepository for data access.
|
|
20
|
+
"""
|
|
21
|
+
self.repository = repository
|
|
22
|
+
|
|
23
|
+
def get_hawk_ids(self, tickers: List[str]) -> pd.DataFrame:
|
|
24
|
+
"""Fetches and normalizes hawk IDs into a pandas DataFrame.
|
|
25
|
+
|
|
26
|
+
:param tickers: A list of specific tickers to filter by.
|
|
27
|
+
:return: A pandas DataFrame containing the normalized hawk ID data.
|
|
28
|
+
"""
|
|
29
|
+
raw_data = self.repository.fetch_hawk_ids(tickers)
|
|
30
|
+
return self._normalize_data(raw_data)
|
|
31
|
+
|
|
32
|
+
@staticmethod
|
|
33
|
+
def _normalize_data(data: Iterator[dict]) -> pd.DataFrame:
|
|
34
|
+
"""Converts raw data into a normalized pandas DataFrame.
|
|
35
|
+
|
|
36
|
+
:param data: An iterator over raw data rows.
|
|
37
|
+
:return: A pandas DataFrame containing normalized data.
|
|
38
|
+
"""
|
|
39
|
+
return pd.DataFrame([dict(row) for row in data])
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Base enum class.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from enum import Enum
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BaseEnum(str, Enum):
|
|
10
|
+
"""
|
|
11
|
+
Base enum class used by all enum classes.
|
|
12
|
+
|
|
13
|
+
Note: Inheriting from str is necessary to correctly serialize output of enum
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
pass
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
PROJECT_ID = 'wsb-hc-qasap-ae2e'
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@description: Data Object class to handle output transformations.
|
|
3
|
+
@author: Rithwik Babu
|
|
4
|
+
"""
|
|
5
|
+
import pandas as pd
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DataObject:
|
|
9
|
+
def __init__(self, name, data):
|
|
10
|
+
self.__name = name
|
|
11
|
+
self.__data = data
|
|
12
|
+
|
|
13
|
+
def to_df(self) -> pd.DataFrame:
|
|
14
|
+
"""Exports data to a pandas DataFrame.
|
|
15
|
+
|
|
16
|
+
:return: pd.Dataframe
|
|
17
|
+
"""
|
|
18
|
+
return self.__data
|
|
19
|
+
|
|
20
|
+
def to_csv(self, file_name):
|
|
21
|
+
"""Exports data to an Excel file.
|
|
22
|
+
|
|
23
|
+
:param file_name: The name of the output Excel file.
|
|
24
|
+
:return: None
|
|
25
|
+
"""
|
|
26
|
+
self.__data.to_csv(file_name, index=False)
|
|
27
|
+
|
|
28
|
+
def to_xlsx(self, file_name):
|
|
29
|
+
"""Exports data to an Excel file.
|
|
30
|
+
|
|
31
|
+
:param file_name: The name of the output Excel file.
|
|
32
|
+
:return: None
|
|
33
|
+
"""
|
|
34
|
+
self.__data.to_excel(file_name, index=False)
|
|
35
|
+
|
|
36
|
+
def show(self, n=5):
|
|
37
|
+
"""Print the first n rows of the data.
|
|
38
|
+
|
|
39
|
+
:param n: Number of rows to print.
|
|
40
|
+
:return: None, prints the head of the data.
|
|
41
|
+
"""
|
|
42
|
+
print(self.__name)
|
|
43
|
+
print(self.__data.head(n))
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
from google.cloud import bigquery
|
|
5
|
+
from google.oauth2 import service_account
|
|
6
|
+
|
|
7
|
+
from hawk_sdk.core.common.constants import PROJECT_ID
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_bigquery_client() -> bigquery.Client:
|
|
11
|
+
service_account_json = os.environ.get('SERVICE_ACCOUNT_JSON')
|
|
12
|
+
if service_account_json:
|
|
13
|
+
# Use credentials provided in SERVICE_ACCOUNT_JSON
|
|
14
|
+
credentials = service_account.Credentials.from_service_account_info(
|
|
15
|
+
json.loads(service_account_json)
|
|
16
|
+
)
|
|
17
|
+
return bigquery.Client(project=PROJECT_ID, credentials=credentials)
|
|
18
|
+
else:
|
|
19
|
+
# Rely on Application Default Credentials (ADC),
|
|
20
|
+
# which will automatically use GOOGLE_APPLICATION_CREDENTIALS if set,
|
|
21
|
+
# or use the built-in credentials if running in GCP.
|
|
22
|
+
return bigquery.Client(project=PROJECT_ID)
|
|
23
|
+
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
hawk_sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
hawk_sdk/api/__init__.py,sha256=YF7_NhjIYtWJmVPl66QADJ9JicxsM91e-9VB3RCT5hw,211
|
|
3
|
+
hawk_sdk/api/equities/__init__.py,sha256=IyyiVMj4FRUbuNQHSHg_8fkKwGBDHeHLMyRhFeUZgxI,48
|
|
4
|
+
hawk_sdk/api/equities/main.py,sha256=3jaDQsrRokP1R5jNJ_txjQbhAFeweEp4IrdU4OiPXJ8,1928
|
|
5
|
+
hawk_sdk/api/equities/repository.py,sha256=YjsvEtUZh7L34_4IL86gCd3dJGnVc7fQ-Hd7F1gtzDE,6345
|
|
6
|
+
hawk_sdk/api/equities/service.py,sha256=QVmLLzxppwJOJBigQVLbrEel2JbXySbWPRVU_-hVFew,2126
|
|
7
|
+
hawk_sdk/api/futures/__init__.py,sha256=9mL1L6wlZKXTvMrTuQV35Fl9kTy4qjf3RIJzYBXcpeM,46
|
|
8
|
+
hawk_sdk/api/futures/main.py,sha256=KYEqAts7tsMdjLSqEqUgOtdrbt7LDQ2LIL4qtL5yfqI,1391
|
|
9
|
+
hawk_sdk/api/futures/repository.py,sha256=60HcrUKtwsMc0iH6rzOlST1sCvWoRLRh6Xp97d8fgTo,3548
|
|
10
|
+
hawk_sdk/api/futures/service.py,sha256=kAzWLKeLXurD1qu8bCaFfjBwugZy2wtpdBmjibrdXmI,1564
|
|
11
|
+
hawk_sdk/api/ice_bofa_bonds_indices/__init__.py,sha256=fhCZLQ50khLuNM106A-GQw2F2QuaOnXivhorKIRN03A,73
|
|
12
|
+
hawk_sdk/api/ice_bofa_bonds_indices/main.py,sha256=TA4fPukMtXt4IGbEsub0nApbtwsOVlp89QIvxThl1vE,1433
|
|
13
|
+
hawk_sdk/api/ice_bofa_bonds_indices/repository.py,sha256=wkUrnCBFU008mM04yXqa-v9ifEhHy7SPRhQfE2Fj4S8,3881
|
|
14
|
+
hawk_sdk/api/ice_bofa_bonds_indices/service.py,sha256=-yc7s4dWIFIOUZT6GDjcVXfKUiF9dicp7L2umWJxSn0,1654
|
|
15
|
+
hawk_sdk/api/system/__init__.py,sha256=Oy8XUp5WHx4fczjZXzfbkkHluHW-t7BLN4IgrOG6Pk4,44
|
|
16
|
+
hawk_sdk/api/system/main.py,sha256=t2zWeEHSTYuWZAsqCMLt_CyRWoN8T8N6oEVshPgf6kk,1025
|
|
17
|
+
hawk_sdk/api/system/repository.py,sha256=IllwoWXZyi46INuCRQeibHJaMDR1TfbT6l2uAhii2Xc,1654
|
|
18
|
+
hawk_sdk/api/system/service.py,sha256=LOZIfrEBKynJlj8V-4UM4abzVV9ogwUVxM7s2xFXlpo,1262
|
|
19
|
+
hawk_sdk/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
hawk_sdk/core/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
+
hawk_sdk/core/common/base_enum.py,sha256=ZgC8gZjTzsoJzzdgN2DjYk3dVL_uM2Stuaky5bo6GuQ,267
|
|
22
|
+
hawk_sdk/core/common/constants.py,sha256=KmsNfRVCwGeXEBHfo3TzSaDYJDMtnL-YEQKsO1DnWV8,33
|
|
23
|
+
hawk_sdk/core/common/data_object.py,sha256=f4YO415Zz-lm3QYJQ3sJ4ugH9ZX9Dc7T-JvO4IdeyOw,1073
|
|
24
|
+
hawk_sdk/core/common/utils.py,sha256=yEMtrMYA7bv7cbIqa6sGWjQOAR3i0BOP2rh68PRMUe4,822
|
|
25
|
+
hawk_sdk-0.0.17.dist-info/METADATA,sha256=fD3Gajq01oONdjFlLuYBe8BcqVgoTINZR_kuxLLSNEs,135
|
|
26
|
+
hawk_sdk-0.0.17.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
27
|
+
hawk_sdk-0.0.17.dist-info/top_level.txt,sha256=aSjbudHcWSYsKXH9Wg0L1ltJfDOHXzjYFPO3v3cP-SE,9
|
|
28
|
+
hawk_sdk-0.0.17.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
hawk_sdk
|