hawk-sdk 0.0.0__py3-none-any.whl → 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hawk-sdk might be problematic. Click here for more details.

@@ -0,0 +1 @@
1
+ PROJECT_ID = 'wsb-hc-qasap-ae2e'
hawk_sdk/futures/main.py CHANGED
@@ -2,25 +2,23 @@
2
2
  @description: Datasource API for Hawk Global Futures data access and export functions.
3
3
  @author: Rithwik Babu
4
4
  """
5
+ from typing import List
6
+
7
+ from google.cloud import bigquery
5
8
 
6
- from hawk_sdk.common.bigquery_connector import BigQueryConnector
9
+ from hawk_sdk.common.constants import PROJECT_ID
7
10
  from hawk_sdk.common.data_object import DataObject
8
11
  from hawk_sdk.futures.repository import FuturesRepository
9
12
  from hawk_sdk.futures.service import FuturesService
10
- from typing import List
11
13
 
12
14
 
13
15
  class Futures:
14
16
  """Datasource API for fetching Futures data."""
15
17
 
16
- def __init__(self, project_id: str, credentials_path: str = None) -> None:
17
- """Initializes the Futures datasource with required configurations.
18
-
19
- :param project_id: The GCP project ID.
20
- :param credentials_path: Path to the Google Cloud credentials file.
21
- """
22
- self.connector = BigQueryConnector(project_id, credentials_path)
23
- self.repository = FuturesRepository(self.connector)
18
+ def __init__(self, environment="production") -> None:
19
+ """Initializes the Futures datasource with required configurations."""
20
+ self.connector = bigquery.Client(project=PROJECT_ID)
21
+ self.repository = FuturesRepository(self.connector, environment=environment)
24
22
  self.service = FuturesService(self.repository)
25
23
 
26
24
  def get_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> DataObject:
@@ -36,4 +34,3 @@ class Futures:
36
34
  name="futures_ohlcvo",
37
35
  data=self.service.get_ohlcvo(start_date, end_date, interval, hawk_ids)
38
36
  )
39
-
@@ -2,58 +2,55 @@
2
2
  @description: Repository layer for fetching Futures data from BigQuery.
3
3
  @author: Rithwik Babu
4
4
  """
5
-
6
- from hawk_sdk.common.bigquery_connector import BigQueryConnector
5
+ import logging
7
6
  from typing import Iterator, List
8
7
 
8
+ from google.cloud import bigquery
9
+ from google.cloud.bigquery import Client
10
+
9
11
 
10
12
  class FuturesRepository:
11
13
  """Repository for accessing Futures raw data."""
12
14
 
13
- def __init__(self, connector: BigQueryConnector) -> None:
14
- """Initializes the repository with a BigQuery connector.
15
+ def __init__(self, bq_client: Client, environment: str) -> None:
16
+ """Initializes the repository with a BigQuery client.
15
17
 
16
- :param connector: An instance of BigQueryConnector.
18
+ :param bq_client: An instance of BigQuery Client.
19
+ :param environment: The environment to fetch data from (e.g., 'production', 'development').
17
20
  """
18
- self.connector = connector
21
+ self.bq_client = bq_client
22
+ self.environment = environment
19
23
 
20
24
  def fetch_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> Iterator[dict]:
21
- """Fetches raw data from BigQuery for the given date range and hawk_ids.
25
+ """Fetches raw data from BigQuery for the given date range and hawk_ids using query parameters."""
22
26
 
23
- :param start_date: The start date for the data query (YYYY-MM-DD).
24
- :param end_date: The end date for the data query (YYYY-MM-DD).
25
- :param interval: The interval for the data query (e.g., '1d', '1h', '1m').
26
- :param hawk_ids: A list of specific hawk_ids to filter by.
27
- :return: An iterator over raw data rows.
28
- """
29
- hawk_ids_str = ', '.join(map(str, hawk_ids))
30
- query = f"""
27
+ query = """
31
28
  WITH records_data AS (
32
29
  SELECT
33
30
  r.record_timestamp AS date,
34
31
  hi.value AS ticker,
35
- MAX(CASE WHEN f.field_name = 'open_{interval}' THEN r.double_value END) AS open,
36
- MAX(CASE WHEN f.field_name = 'high_{interval}' THEN r.double_value END) AS high,
37
- MAX(CASE WHEN f.field_name = 'low_{interval}' THEN r.double_value END) AS low,
38
- MAX(CASE WHEN f.field_name = 'close_{interval}' THEN r.double_value END) AS close,
39
- MAX(CASE WHEN f.field_name = 'volume_{interval}' THEN r.int_value END) AS volume,
40
- MAX(CASE WHEN f.field_name = 'open_interest_{interval}' THEN r.double_value END) AS open_interest
32
+ MAX(CASE WHEN f.field_name = @open_field THEN r.double_value END) AS open,
33
+ MAX(CASE WHEN f.field_name = @high_field THEN r.double_value END) AS high,
34
+ MAX(CASE WHEN f.field_name = @low_field THEN r.double_value END) AS low,
35
+ MAX(CASE WHEN f.field_name = @close_field THEN r.double_value END) AS close,
36
+ MAX(CASE WHEN f.field_name = @volume_field THEN r.int_value END) AS volume,
37
+ MAX(CASE WHEN f.field_name = @open_interest_field THEN r.double_value END) AS open_interest
41
38
  FROM
42
- `wsb-hc-qasap-ae2e.development.records` AS r
39
+ `wsb-hc-qasap-ae2e.@environment.records` AS r
43
40
  JOIN
44
- `wsb-hc-qasap-ae2e.development.fields` AS f
41
+ `wsb-hc-qasap-ae2e.@environment.fields` AS f
45
42
  ON r.field_id = f.field_id
46
43
  JOIN
47
- `wsb-hc-qasap-ae2e.development.hawk_identifiers` AS hi
44
+ `wsb-hc-qasap-ae2e.@environment.hawk_identifiers` AS hi
48
45
  ON r.hawk_id = hi.hawk_id
49
46
  WHERE
50
- r.hawk_id IN ({hawk_ids_str})
51
- AND f.field_name IN ('open_1d', 'high_1d', 'low_1d', 'close_1d', 'volume_1d', 'open_interest_1d')
52
- AND r.record_timestamp BETWEEN '{start_date}' AND '{end_date}'
47
+ r.hawk_id IN UNNEST(@hawk_ids)
48
+ AND f.field_name IN (@open_field, @high_field, @low_field, @close_field, @volume_field, @open_interest_field)
49
+ AND r.record_timestamp BETWEEN @start_date AND @end_date
53
50
  GROUP BY
54
51
  date, ticker
55
52
  )
56
- SELECT
53
+ SELECT DISTINCT
57
54
  date,
58
55
  ticker,
59
56
  open,
@@ -67,4 +64,25 @@ class FuturesRepository:
67
64
  ORDER BY
68
65
  date;
69
66
  """
70
- return self.connector.run_query(query)
67
+
68
+ query_params = [
69
+ bigquery.ArrayQueryParameter("hawk_ids", "INT64", hawk_ids),
70
+ bigquery.ScalarQueryParameter("start_date", "STRING", start_date),
71
+ bigquery.ScalarQueryParameter("end_date", "STRING", end_date),
72
+ bigquery.ScalarQueryParameter("open_field", "STRING", f"open_{interval}"),
73
+ bigquery.ScalarQueryParameter("high_field", "STRING", f"high_{interval}"),
74
+ bigquery.ScalarQueryParameter("low_field", "STRING", f"low_{interval}"),
75
+ bigquery.ScalarQueryParameter("close_field", "STRING", f"close_{interval}"),
76
+ bigquery.ScalarQueryParameter("volume_field", "STRING", f"volume_{interval}"),
77
+ bigquery.ScalarQueryParameter("open_interest_field", "STRING", f"open_interest_{interval}"),
78
+ bigquery.ScalarQueryParameter("environment", "string", self.environment),
79
+ ]
80
+
81
+ job_config = bigquery.QueryJobConfig(query_parameters=query_params)
82
+
83
+ try:
84
+ query_job = self.bq_client.query(query, job_config=job_config)
85
+ return query_job.result()
86
+ except Exception as e:
87
+ logging.error(f"Failed to fetch OHLCVO data: {e}")
88
+ raise
File without changes
@@ -0,0 +1,33 @@
1
+ """
2
+ @description: Datasource API for Hawk Global System data access and export functions.
3
+ @author: Rithwik Babu
4
+ """
5
+ from typing import List
6
+
7
+ from google.cloud import bigquery
8
+
9
+ from hawk_sdk.common.constants import PROJECT_ID
10
+ from hawk_sdk.common.data_object import DataObject
11
+ from hawk_sdk.system.repository import SystemRepository
12
+ from hawk_sdk.system.service import SystemService
13
+
14
+
15
+ class System:
16
+ """Datasource API for fetching System data."""
17
+
18
+ def __init__(self, environment="production") -> None:
19
+ """Initializes the System datasource with required configurations."""
20
+ self.connector = bigquery.Client(project=PROJECT_ID)
21
+ self.repository = SystemRepository(self.connector, environment=environment)
22
+ self.service = SystemService(self.repository)
23
+
24
+ def get_hawk_ids(self, tickers: List[str]) -> DataObject:
25
+ """Fetch hawk_ids for the given list of tickers.
26
+
27
+ :param tickers: A list of specific tickers to filter by.
28
+ :return: A hawk DataObject containing the hawk ID data.
29
+ """
30
+ return DataObject(
31
+ name="system_hawk_id_mappings",
32
+ data=self.service.get_hawk_ids(tickers)
33
+ )
@@ -0,0 +1,53 @@
1
+ """
2
+ @description: Repository layer for fetching System data from BigQuery.
3
+ @author: Rithwik Babu
4
+ """
5
+ import logging
6
+ from typing import Iterator, List
7
+
8
+ from google.cloud import bigquery
9
+ from google.cloud.bigquery import Client
10
+
11
+
12
+ class SystemRepository:
13
+ """Repository for accessing System data."""
14
+
15
+ def __init__(self, bq_client: Client, environment: str) -> None:
16
+ """Initializes the repository with a BigQuery client.
17
+
18
+ :param bq_client: An instance of BigQuery Client.
19
+ :param environment: The environment to fetch data from (e.g., 'production', 'development').
20
+ """
21
+ self.bq_client = bq_client
22
+ self.environment = environment
23
+
24
+ def fetch_hawk_ids(self, tickers: List[str]) -> Iterator[dict]:
25
+ """Fetches hawk_ids for the given list of tickers from BigQuery.
26
+
27
+ :param tickers: A list of ticker strings to filter by.
28
+ :return: An iterator over raw data rows.
29
+ """
30
+ query = """
31
+ SELECT
32
+ value AS ticker,
33
+ hawk_id
34
+ FROM
35
+ `wsb-hc-qasap-ae2e.@environment.hawk_identifiers`
36
+ WHERE
37
+ id_type = 'TICKER'
38
+ AND value IN UNNEST(@ticker_list)
39
+ """
40
+
41
+ query_params = [
42
+ bigquery.ArrayQueryParameter("ticker_list", "STRING", tickers),
43
+ bigquery.ScalarQueryParameter("environment", "string", self.environment),
44
+ ]
45
+
46
+ job_config = bigquery.QueryJobConfig(query_parameters=query_params)
47
+
48
+ try:
49
+ query_job = self.bq_client.query(query, job_config=job_config)
50
+ return query_job.result()
51
+ except Exception as e:
52
+ logging.error(f"Failed to fetch hawk_ids: {e}")
53
+ raise
@@ -0,0 +1,39 @@
1
+ """
2
+ @description: Service layer for processing and normalizing System data.
3
+ @author: Rithwik Babu
4
+ """
5
+
6
+ from typing import List, Iterator
7
+
8
+ import pandas as pd
9
+
10
+ from hawk_sdk.system.repository import SystemRepository
11
+
12
+
13
+ class SystemService:
14
+ """Service class for System business logic."""
15
+
16
+ def __init__(self, repository: SystemRepository) -> None:
17
+ """Initializes the service with a repository.
18
+
19
+ :param repository: An instance of SystemRepository for data access.
20
+ """
21
+ self.repository = repository
22
+
23
+ def get_hawk_ids(self, tickers: List[str]) -> pd.DataFrame:
24
+ """Fetches and normalizes hawk IDs into a pandas DataFrame.
25
+
26
+ :param tickers: A list of specific tickers to filter by.
27
+ :return: A pandas DataFrame containing the normalized hawk ID data.
28
+ """
29
+ raw_data = self.repository.fetch_hawk_ids(tickers)
30
+ return self._normalize_data(raw_data)
31
+
32
+ @staticmethod
33
+ def _normalize_data(data: Iterator[dict]) -> pd.DataFrame:
34
+ """Converts raw data into a normalized pandas DataFrame.
35
+
36
+ :param data: An iterator over raw data rows.
37
+ :return: A pandas DataFrame containing normalized data.
38
+ """
39
+ return pd.DataFrame([dict(row) for row in data])
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hawk-sdk
3
- Version: 0.0.0
3
+ Version: 0.0.1
4
4
  Requires-Dist: google-cloud-bigquery
5
5
  Requires-Dist: pandas
6
6
 
@@ -0,0 +1,16 @@
1
+ hawk_sdk/__init__.py,sha256=prH_sfjYeRZpBvyYYe5xV8lyn8auj1whvyftpBEHz_Y,37
2
+ hawk_sdk/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ hawk_sdk/common/constants.py,sha256=KmsNfRVCwGeXEBHfo3TzSaDYJDMtnL-YEQKsO1DnWV8,33
4
+ hawk_sdk/common/data_object.py,sha256=qXsnDf__ePw4DbeF9Pe82B2DY05xsID9uU8vyfCUIe0,977
5
+ hawk_sdk/futures/__init__.py,sha256=g0TubD9lrU2xoF1YvHSkNDjn3XehRT6zrIiSdX8C4no,42
6
+ hawk_sdk/futures/main.py,sha256=d12PdblMz79iBexaBfcdUHN5xdMmVZYqHDJHioT-XBk,1535
7
+ hawk_sdk/futures/repository.py,sha256=LRB2SGGrObjce4UbPNn5FHXAmeoY-ZlOWxpNzg6RlBY,3658
8
+ hawk_sdk/futures/service.py,sha256=LaLbNWJMw6kJEjl2SC3eKSCWSGwpKK2JBTnPCHHlL3k,1559
9
+ hawk_sdk/system/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ hawk_sdk/system/main.py,sha256=wXycZyIUMNlbQXD8b57edq642gqinD3sdKabsFyzlu4,1180
11
+ hawk_sdk/system/repository.py,sha256=7SEuZgjhaweO7jFdfv1KybLWCHbJsgbPwdn7n74Pcj8,1721
12
+ hawk_sdk/system/service.py,sha256=1QxvONep7E5-oN1Tiq8xoS-gyt4FVJQUed2ZTw_O0TM,1258
13
+ hawk_sdk-0.0.1.dist-info/METADATA,sha256=a-t1OpVDr10ezom4mb4wDx4wvPfmtXIELsHUDelWEHs,112
14
+ hawk_sdk-0.0.1.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
15
+ hawk_sdk-0.0.1.dist-info/top_level.txt,sha256=aSjbudHcWSYsKXH9Wg0L1ltJfDOHXzjYFPO3v3cP-SE,9
16
+ hawk_sdk-0.0.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.2)
2
+ Generator: setuptools (75.2.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,47 +0,0 @@
1
- """
2
- @description: Handles the connection and interaction with Google BigQuery.
3
- @author: Rithwik Babu
4
- """
5
-
6
- import os
7
- from typing import Iterator
8
-
9
- from google.cloud import bigquery
10
-
11
-
12
- class BigQueryConnector:
13
- """Handles authentication and querying BigQuery."""
14
-
15
- def __init__(self, project_id: str, credentials_path: str = None) -> None:
16
- """Initializes BigQuery client and sets up authentication.
17
-
18
- :param project_id: The GCP project ID.
19
- :param credentials_path: Path to the Google Cloud credentials file.
20
- """
21
- if credentials_path:
22
- os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = credentials_path
23
-
24
- if not self._validate_credentials_exists():
25
- raise ValueError("Credentials not found. Set GOOGLE_APPLICATION_CREDENTIALS environment variable.")
26
-
27
- self.client = bigquery.Client(project=project_id)
28
-
29
- def run_query(self, query: str) -> Iterator[bigquery.Row]:
30
- """Runs a SQL query on BigQuery and returns an iterator over rows.
31
-
32
- :param query: The SQL query to execute.
33
- :return: An iterator over the result rows.
34
- """
35
- query_job = self.client.query(query)
36
- return query_job.result()
37
-
38
- def _validate_credentials_exists(self) -> bool:
39
- """Validates if the GOOGLE_APPLICATION_CREDENTIALS environment variable is set.
40
-
41
- :return: True if the environment variable is set, False otherwise.
42
- """
43
- if "GOOGLE_APPLICATION_CREDENTIALS" in os.environ:
44
- return True
45
- else:
46
- print("Environment variable for credentials is not set.")
47
- return False
@@ -1,12 +0,0 @@
1
- hawk_sdk/__init__.py,sha256=prH_sfjYeRZpBvyYYe5xV8lyn8auj1whvyftpBEHz_Y,37
2
- hawk_sdk/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- hawk_sdk/common/bigquery_connector.py,sha256=QRwd6PW3G2l8d9m114gDCL6PkGCvehu0VKYblHGAlIU,1625
4
- hawk_sdk/common/data_object.py,sha256=qXsnDf__ePw4DbeF9Pe82B2DY05xsID9uU8vyfCUIe0,977
5
- hawk_sdk/futures/__init__.py,sha256=g0TubD9lrU2xoF1YvHSkNDjn3XehRT6zrIiSdX8C4no,42
6
- hawk_sdk/futures/main.py,sha256=yziRSZYlZMNr6UyR03vc9uZJiYtsgT7Q0gv2aNky6Bc,1658
7
- hawk_sdk/futures/repository.py,sha256=1hoLxO0w46FYWXt8R9mp1pkm2pXGcb3e_Wqp839pDH4,2733
8
- hawk_sdk/futures/service.py,sha256=LaLbNWJMw6kJEjl2SC3eKSCWSGwpKK2JBTnPCHHlL3k,1559
9
- hawk_sdk-0.0.0.dist-info/METADATA,sha256=gW5KFDfJ_v5SGT4uM1x_0SOvixrLNHutjYfp_g-UEpQ,112
10
- hawk_sdk-0.0.0.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
11
- hawk_sdk-0.0.0.dist-info/top_level.txt,sha256=aSjbudHcWSYsKXH9Wg0L1ltJfDOHXzjYFPO3v3cP-SE,9
12
- hawk_sdk-0.0.0.dist-info/RECORD,,