hawk-sdk 0.0.0__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hawk-sdk might be problematic. Click here for more details.

Files changed (32) hide show
  1. hawk_sdk-0.0.5/PKG-INFO +5 -0
  2. hawk_sdk-0.0.5/README.md +69 -0
  3. hawk_sdk-0.0.5/hawk_sdk/api/__init__.py +2 -0
  4. hawk_sdk-0.0.5/hawk_sdk/api/futures/__init__.py +1 -0
  5. {hawk-sdk-0.0.0/hawk_sdk → hawk_sdk-0.0.5/hawk_sdk/api}/futures/main.py +12 -15
  6. hawk_sdk-0.0.5/hawk_sdk/api/futures/repository.py +87 -0
  7. {hawk-sdk-0.0.0/hawk_sdk → hawk_sdk-0.0.5/hawk_sdk/api}/futures/service.py +3 -2
  8. hawk_sdk-0.0.5/hawk_sdk/api/system/__init__.py +1 -0
  9. hawk_sdk-0.0.5/hawk_sdk/api/system/main.py +33 -0
  10. hawk_sdk-0.0.5/hawk_sdk/api/system/repository.py +52 -0
  11. hawk_sdk-0.0.5/hawk_sdk/api/system/service.py +39 -0
  12. hawk_sdk-0.0.5/hawk_sdk/core/__init__.py +0 -0
  13. hawk_sdk-0.0.5/hawk_sdk/core/common/__init__.py +0 -0
  14. hawk_sdk-0.0.5/hawk_sdk/core/common/base_enum.py +16 -0
  15. hawk_sdk-0.0.5/hawk_sdk/core/common/constants.py +1 -0
  16. {hawk-sdk-0.0.0/hawk_sdk → hawk_sdk-0.0.5/hawk_sdk/core}/common/data_object.py +4 -0
  17. hawk_sdk-0.0.5/hawk_sdk.egg-info/PKG-INFO +5 -0
  18. hawk_sdk-0.0.5/hawk_sdk.egg-info/SOURCES.txt +22 -0
  19. {hawk-sdk-0.0.0 → hawk_sdk-0.0.5}/setup.py +1 -1
  20. hawk-sdk-0.0.0/PKG-INFO +0 -3
  21. hawk-sdk-0.0.0/README.md +0 -1
  22. hawk-sdk-0.0.0/hawk_sdk/__init__.py +0 -1
  23. hawk-sdk-0.0.0/hawk_sdk/common/bigquery_connector.py +0 -47
  24. hawk-sdk-0.0.0/hawk_sdk/futures/__init__.py +0 -1
  25. hawk-sdk-0.0.0/hawk_sdk/futures/repository.py +0 -70
  26. hawk-sdk-0.0.0/hawk_sdk.egg-info/PKG-INFO +0 -3
  27. hawk-sdk-0.0.0/hawk_sdk.egg-info/SOURCES.txt +0 -15
  28. {hawk-sdk-0.0.0/hawk_sdk/common → hawk_sdk-0.0.5/hawk_sdk}/__init__.py +0 -0
  29. {hawk-sdk-0.0.0 → hawk_sdk-0.0.5}/hawk_sdk.egg-info/dependency_links.txt +0 -0
  30. {hawk-sdk-0.0.0 → hawk_sdk-0.0.5}/hawk_sdk.egg-info/requires.txt +0 -0
  31. {hawk-sdk-0.0.0 → hawk_sdk-0.0.5}/hawk_sdk.egg-info/top_level.txt +0 -0
  32. {hawk-sdk-0.0.0 → hawk_sdk-0.0.5}/setup.cfg +0 -0
@@ -0,0 +1,5 @@
1
+ Metadata-Version: 2.1
2
+ Name: hawk-sdk
3
+ Version: 0.0.5
4
+ Requires-Dist: google-cloud-bigquery
5
+ Requires-Dist: pandas
@@ -0,0 +1,69 @@
1
+ ## Hawk SDK
2
+
3
+ https://hawk-center.github.io/hawk-sdk/
4
+
5
+ ### Contributing
6
+
7
+ Please read [CONTRIBUTING.md](.github/CONTRIBUTING.md) for more details.
8
+
9
+
10
+ Here’s a sample README section for updating a pip package using `twine`:
11
+
12
+ ---
13
+
14
+ ## Updating the Package on PyPI
15
+
16
+ Follow these steps to update and publish your package on PyPI using `twine`:
17
+
18
+ ### 1. Update `setup.py`
19
+ Ensure your `setup.py` file reflects the correct metadata and version for the new release. Update the version number in accordance with [Semantic Versioning](https://semver.org/).
20
+
21
+ ```python
22
+ version='X.Y.Z', # Update this with the new version number
23
+ ```
24
+
25
+ ### 2. Install Required Tools
26
+ Make sure you have the necessary tools installed:
27
+
28
+ ```bash
29
+ pip install setuptools wheel twine
30
+ ```
31
+
32
+ ### 3. Clean Up Previous Builds (Optional)
33
+ If you have previously built distributions in the `dist/` or `build/` directories, remove them to avoid conflicts:
34
+
35
+ ```bash
36
+ rm -rf dist/ build/
37
+ ```
38
+
39
+ ### 4. Build the Package
40
+ Generate the source distribution (`sdist`) and the wheel (`bdist_wheel`) for your package:
41
+
42
+ ```bash
43
+ python setup.py sdist bdist_wheel
44
+ ```
45
+
46
+ This creates distribution files in the `dist/` directory.
47
+
48
+ ### 5. Test the Package Locally (Optional)
49
+ You can test the package locally before uploading it to PyPI:
50
+
51
+ ```bash
52
+ pip install dist/your_package_name-X.Y.Z-py3-none-any.whl
53
+ ```
54
+
55
+ ### 6. Upload to PyPI
56
+ Once you’re satisfied with the package, upload it to PyPI. Contact Rithwik for the pypi API key:
57
+
58
+ ```bash
59
+ twine upload dist/*
60
+ ```
61
+
62
+ ### 8. Verify the Package
63
+ After uploading, verify the package has been successfully updated on PyPI by visiting your project page:
64
+
65
+ [https://pypi.org/project/your-package-name/](https://pypi.org/project/hawk-sd/)
66
+
67
+ ---
68
+
69
+ Let me know if you'd like any customization to fit your specific workflow!
@@ -0,0 +1,2 @@
1
+ from hawk_sdk.api.futures.main import Futures
2
+ from hawk_sdk.api.system.main import System
@@ -0,0 +1 @@
1
+ from hawk_sdk.api.futures.main import Futures
@@ -2,31 +2,29 @@
2
2
  @description: Datasource API for Hawk Global Futures data access and export functions.
3
3
  @author: Rithwik Babu
4
4
  """
5
-
6
- from hawk_sdk.common.bigquery_connector import BigQueryConnector
7
- from hawk_sdk.common.data_object import DataObject
8
- from hawk_sdk.futures.repository import FuturesRepository
9
- from hawk_sdk.futures.service import FuturesService
10
5
  from typing import List
11
6
 
7
+ from google.cloud import bigquery
8
+
9
+ from hawk_sdk.core.common.constants import PROJECT_ID
10
+ from hawk_sdk.core.common.data_object import DataObject
11
+ from hawk_sdk.api.futures.repository import FuturesRepository
12
+ from hawk_sdk.api.futures.service import FuturesService
13
+
12
14
 
13
15
  class Futures:
14
16
  """Datasource API for fetching Futures data."""
15
17
 
16
- def __init__(self, project_id: str, credentials_path: str = None) -> None:
17
- """Initializes the Futures datasource with required configurations.
18
-
19
- :param project_id: The GCP project ID.
20
- :param credentials_path: Path to the Google Cloud credentials file.
21
- """
22
- self.connector = BigQueryConnector(project_id, credentials_path)
23
- self.repository = FuturesRepository(self.connector)
18
+ def __init__(self, environment="production") -> None:
19
+ """Initializes the Futures datasource with required configurations."""
20
+ self.connector = bigquery.Client(project=PROJECT_ID)
21
+ self.repository = FuturesRepository(self.connector, environment=environment)
24
22
  self.service = FuturesService(self.repository)
25
23
 
26
24
  def get_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> DataObject:
27
25
  """Fetch open, high, low, close, volume, and open interest data for the given date range and hawk_ids.
28
26
 
29
- :param start_date: The start date for the data query (YYYY-MM-DD).
27
+ :param start_date: %The start date for the data query (YYYY-MM-DD).
30
28
  :param end_date: The end date for the data query (YYYY-MM-DD).
31
29
  :param interval: The interval for the data query (e.g., '1d', '1h', '1m').
32
30
  :param hawk_ids: A list of specific hawk_ids to filter by.
@@ -36,4 +34,3 @@ class Futures:
36
34
  name="futures_ohlcvo",
37
35
  data=self.service.get_ohlcvo(start_date, end_date, interval, hawk_ids)
38
36
  )
39
-
@@ -0,0 +1,87 @@
1
+ """
2
+ @description: Repository layer for fetching Futures data from BigQuery.
3
+ @author: Rithwik Babu
4
+ """
5
+ import logging
6
+ from typing import Iterator, List
7
+
8
+ from google.cloud import bigquery
9
+ from google.cloud.bigquery import Client
10
+
11
+
12
+ class FuturesRepository:
13
+ """Repository for accessing Futures raw data."""
14
+
15
+ def __init__(self, bq_client: Client, environment: str) -> None:
16
+ """Initializes the repository with a BigQuery client.
17
+
18
+ :param bq_client: An instance of BigQuery Client.
19
+ :param environment: The environment to fetch data from (e.g., 'production', 'development').
20
+ """
21
+ self.bq_client = bq_client
22
+ self.environment = environment
23
+
24
+ def fetch_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> Iterator[dict]:
25
+ """Fetches raw data from BigQuery for the given date range and hawk_ids using query parameters."""
26
+
27
+ query = f"""
28
+ WITH records_data AS (
29
+ SELECT
30
+ r.record_timestamp AS date,
31
+ hi.value AS ticker,
32
+ MAX(CASE WHEN f.field_name = @open_field THEN r.double_value END) AS open,
33
+ MAX(CASE WHEN f.field_name = @high_field THEN r.double_value END) AS high,
34
+ MAX(CASE WHEN f.field_name = @low_field THEN r.double_value END) AS low,
35
+ MAX(CASE WHEN f.field_name = @close_field THEN r.double_value END) AS close,
36
+ MAX(CASE WHEN f.field_name = @volume_field THEN r.int_value END) AS volume,
37
+ MAX(CASE WHEN f.field_name = @open_interest_field THEN r.int_value END) AS open_interest
38
+ FROM
39
+ `wsb-hc-qasap-ae2e.{self.environment}.records` AS r
40
+ JOIN
41
+ `wsb-hc-qasap-ae2e.{self.environment}.fields` AS f
42
+ ON r.field_id = f.field_id
43
+ JOIN
44
+ `wsb-hc-qasap-ae2e.{self.environment}.hawk_identifiers` AS hi
45
+ ON r.hawk_id = hi.hawk_id
46
+ WHERE
47
+ r.hawk_id IN UNNEST(@hawk_ids)
48
+ AND f.field_name IN (@open_field, @high_field, @low_field, @close_field, @volume_field, @open_interest_field)
49
+ AND r.record_timestamp BETWEEN @start_date AND @end_date
50
+ GROUP BY
51
+ date, ticker
52
+ )
53
+ SELECT DISTINCT
54
+ date,
55
+ ticker,
56
+ open,
57
+ high,
58
+ low,
59
+ close,
60
+ volume,
61
+ open_interest
62
+ FROM
63
+ records_data
64
+ ORDER BY
65
+ date;
66
+ """
67
+
68
+ query_params = [
69
+ bigquery.ArrayQueryParameter("hawk_ids", "INT64", hawk_ids),
70
+ bigquery.ScalarQueryParameter("start_date", "STRING", start_date),
71
+ bigquery.ScalarQueryParameter("end_date", "STRING", end_date),
72
+ bigquery.ScalarQueryParameter("open_field", "STRING", f"open_{interval}"),
73
+ bigquery.ScalarQueryParameter("high_field", "STRING", f"high_{interval}"),
74
+ bigquery.ScalarQueryParameter("low_field", "STRING", f"low_{interval}"),
75
+ bigquery.ScalarQueryParameter("close_field", "STRING", f"close_{interval}"),
76
+ bigquery.ScalarQueryParameter("volume_field", "STRING", f"volume_{interval}"),
77
+ bigquery.ScalarQueryParameter("open_interest_field", "STRING", f"open_interest_{interval}"),
78
+ ]
79
+
80
+ job_config = bigquery.QueryJobConfig(query_parameters=query_params)
81
+
82
+ try:
83
+ query_job = self.bq_client.query(query, job_config=job_config)
84
+ return query_job.result()
85
+ except Exception as e:
86
+ logging.error(f"Failed to fetch OHLCVO data: {e}")
87
+ raise
@@ -2,11 +2,12 @@
2
2
  @description: Service layer for processing and normalizing Futures data.
3
3
  @author: Rithwik Babu
4
4
  """
5
-
6
- from hawk_sdk.futures.repository import FuturesRepository
7
5
  from typing import List, Iterator
6
+
8
7
  import pandas as pd
9
8
 
9
+ from hawk_sdk.api.futures.repository import FuturesRepository
10
+
10
11
 
11
12
  class FuturesService:
12
13
  """Service class for Futures business logic."""
@@ -0,0 +1 @@
1
+ from hawk_sdk.api.system.main import System
@@ -0,0 +1,33 @@
1
+ """
2
+ @description: Datasource API for Hawk System data access and export functions.
3
+ @author: Rithwik Babu
4
+ """
5
+ from typing import List
6
+
7
+ from google.cloud import bigquery
8
+
9
+ from hawk_sdk.core.common.constants import PROJECT_ID
10
+ from hawk_sdk.core.common.data_object import DataObject
11
+ from hawk_sdk.api.system.repository import SystemRepository
12
+ from hawk_sdk.api.system.service import SystemService
13
+
14
+
15
+ class System:
16
+ """Datasource API for fetching System data."""
17
+
18
+ def __init__(self, environment="production") -> None:
19
+ """Initializes the System datasource with required configurations."""
20
+ self.connector = bigquery.Client(project=PROJECT_ID)
21
+ self.repository = SystemRepository(self.connector, environment=environment)
22
+ self.service = SystemService(self.repository)
23
+
24
+ def get_hawk_ids(self, tickers: List[str]) -> DataObject:
25
+ """Fetch hawk_ids for the given list of tickers.
26
+
27
+ :param tickers: A list of specific tickers to filter by.
28
+ :return: A hawk DataObject containing the hawk ID data.
29
+ """
30
+ return DataObject(
31
+ name="system_hawk_id_mappings",
32
+ data=self.service.get_hawk_ids(tickers)
33
+ )
@@ -0,0 +1,52 @@
1
+ """
2
+ @description: Repository layer for fetching System data from BigQuery.
3
+ @author: Rithwik Babu
4
+ """
5
+ import logging
6
+ from typing import Iterator, List
7
+
8
+ from google.cloud import bigquery
9
+ from google.cloud.bigquery import Client
10
+
11
+
12
+ class SystemRepository:
13
+ """Repository for accessing System data."""
14
+
15
+ def __init__(self, bq_client: Client, environment: str) -> None:
16
+ """Initializes the repository with a BigQuery client.
17
+
18
+ :param bq_client: An instance of BigQuery Client.
19
+ :param environment: The environment to fetch data from (e.g., 'production', 'development').
20
+ """
21
+ self.bq_client = bq_client
22
+ self.environment = environment
23
+
24
+ def fetch_hawk_ids(self, tickers: List[str]) -> Iterator[dict]:
25
+ """Fetches hawk_ids for the given list of tickers from BigQuery.
26
+
27
+ :param tickers: A list of ticker strings to filter by.
28
+ :return: An iterator over raw data rows.
29
+ """
30
+ query = f"""
31
+ SELECT
32
+ value AS ticker,
33
+ hawk_id
34
+ FROM
35
+ `wsb-hc-qasap-ae2e.{self.environment}.hawk_identifiers`
36
+ WHERE
37
+ id_type = 'TICKER'
38
+ AND value IN UNNEST(@ticker_list)
39
+ """
40
+
41
+ query_params = [
42
+ bigquery.ArrayQueryParameter("ticker_list", "STRING", tickers),
43
+ ]
44
+
45
+ job_config = bigquery.QueryJobConfig(query_parameters=query_params)
46
+
47
+ try:
48
+ query_job = self.bq_client.query(query, job_config=job_config)
49
+ return query_job.result()
50
+ except Exception as e:
51
+ logging.error(f"Failed to fetch hawk_ids: {e}")
52
+ raise
@@ -0,0 +1,39 @@
1
+ """
2
+ @description: Service layer for processing and normalizing System data.
3
+ @author: Rithwik Babu
4
+ """
5
+
6
+ from typing import List, Iterator
7
+
8
+ import pandas as pd
9
+
10
+ from hawk_sdk.api.system.repository import SystemRepository
11
+
12
+
13
+ class SystemService:
14
+ """Service class for System business logic."""
15
+
16
+ def __init__(self, repository: SystemRepository) -> None:
17
+ """Initializes the service with a repository.
18
+
19
+ :param repository: An instance of SystemRepository for data access.
20
+ """
21
+ self.repository = repository
22
+
23
+ def get_hawk_ids(self, tickers: List[str]) -> pd.DataFrame:
24
+ """Fetches and normalizes hawk IDs into a pandas DataFrame.
25
+
26
+ :param tickers: A list of specific tickers to filter by.
27
+ :return: A pandas DataFrame containing the normalized hawk ID data.
28
+ """
29
+ raw_data = self.repository.fetch_hawk_ids(tickers)
30
+ return self._normalize_data(raw_data)
31
+
32
+ @staticmethod
33
+ def _normalize_data(data: Iterator[dict]) -> pd.DataFrame:
34
+ """Converts raw data into a normalized pandas DataFrame.
35
+
36
+ :param data: An iterator over raw data rows.
37
+ :return: A pandas DataFrame containing normalized data.
38
+ """
39
+ return pd.DataFrame([dict(row) for row in data])
File without changes
File without changes
@@ -0,0 +1,16 @@
1
+ """
2
+ @description: Base enum class.
3
+ @author: Rithwik Babu
4
+ """
5
+
6
+ from enum import Enum
7
+
8
+
9
+ class BaseEnum(str, Enum):
10
+ """
11
+ Base enum class used by all enum classes.
12
+
13
+ Note: Inheriting from str is necessary to correctly serialize output of enum
14
+ """
15
+
16
+ pass
@@ -0,0 +1 @@
1
+ PROJECT_ID = 'wsb-hc-qasap-ae2e'
@@ -1,3 +1,7 @@
1
+ """
2
+ @description: Data Object class to handle output transformations.
3
+ @author: Rithwik Babu
4
+ """
1
5
  import pandas as pd
2
6
 
3
7
 
@@ -0,0 +1,5 @@
1
+ Metadata-Version: 2.1
2
+ Name: hawk-sdk
3
+ Version: 0.0.5
4
+ Requires-Dist: google-cloud-bigquery
5
+ Requires-Dist: pandas
@@ -0,0 +1,22 @@
1
+ README.md
2
+ setup.py
3
+ hawk_sdk/__init__.py
4
+ hawk_sdk.egg-info/PKG-INFO
5
+ hawk_sdk.egg-info/SOURCES.txt
6
+ hawk_sdk.egg-info/dependency_links.txt
7
+ hawk_sdk.egg-info/requires.txt
8
+ hawk_sdk.egg-info/top_level.txt
9
+ hawk_sdk/api/__init__.py
10
+ hawk_sdk/api/futures/__init__.py
11
+ hawk_sdk/api/futures/main.py
12
+ hawk_sdk/api/futures/repository.py
13
+ hawk_sdk/api/futures/service.py
14
+ hawk_sdk/api/system/__init__.py
15
+ hawk_sdk/api/system/main.py
16
+ hawk_sdk/api/system/repository.py
17
+ hawk_sdk/api/system/service.py
18
+ hawk_sdk/core/__init__.py
19
+ hawk_sdk/core/common/__init__.py
20
+ hawk_sdk/core/common/base_enum.py
21
+ hawk_sdk/core/common/constants.py
22
+ hawk_sdk/core/common/data_object.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='hawk-sdk',
5
- version='0.0.0',
5
+ version='0.0.5',
6
6
  packages=find_packages(),
7
7
  install_requires=[
8
8
  'google-cloud-bigquery',
hawk-sdk-0.0.0/PKG-INFO DELETED
@@ -1,3 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: hawk-sdk
3
- Version: 0.0.0
hawk-sdk-0.0.0/README.md DELETED
@@ -1 +0,0 @@
1
- # hawk-sdk
@@ -1 +0,0 @@
1
- from hawk_sdk.futures import Futures
@@ -1,47 +0,0 @@
1
- """
2
- @description: Handles the connection and interaction with Google BigQuery.
3
- @author: Rithwik Babu
4
- """
5
-
6
- import os
7
- from typing import Iterator
8
-
9
- from google.cloud import bigquery
10
-
11
-
12
- class BigQueryConnector:
13
- """Handles authentication and querying BigQuery."""
14
-
15
- def __init__(self, project_id: str, credentials_path: str = None) -> None:
16
- """Initializes BigQuery client and sets up authentication.
17
-
18
- :param project_id: The GCP project ID.
19
- :param credentials_path: Path to the Google Cloud credentials file.
20
- """
21
- if credentials_path:
22
- os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = credentials_path
23
-
24
- if not self._validate_credentials_exists():
25
- raise ValueError("Credentials not found. Set GOOGLE_APPLICATION_CREDENTIALS environment variable.")
26
-
27
- self.client = bigquery.Client(project=project_id)
28
-
29
- def run_query(self, query: str) -> Iterator[bigquery.Row]:
30
- """Runs a SQL query on BigQuery and returns an iterator over rows.
31
-
32
- :param query: The SQL query to execute.
33
- :return: An iterator over the result rows.
34
- """
35
- query_job = self.client.query(query)
36
- return query_job.result()
37
-
38
- def _validate_credentials_exists(self) -> bool:
39
- """Validates if the GOOGLE_APPLICATION_CREDENTIALS environment variable is set.
40
-
41
- :return: True if the environment variable is set, False otherwise.
42
- """
43
- if "GOOGLE_APPLICATION_CREDENTIALS" in os.environ:
44
- return True
45
- else:
46
- print("Environment variable for credentials is not set.")
47
- return False
@@ -1 +0,0 @@
1
- from hawk_sdk.futures.main import Futures
@@ -1,70 +0,0 @@
1
- """
2
- @description: Repository layer for fetching Futures data from BigQuery.
3
- @author: Rithwik Babu
4
- """
5
-
6
- from hawk_sdk.common.bigquery_connector import BigQueryConnector
7
- from typing import Iterator, List
8
-
9
-
10
- class FuturesRepository:
11
- """Repository for accessing Futures raw data."""
12
-
13
- def __init__(self, connector: BigQueryConnector) -> None:
14
- """Initializes the repository with a BigQuery connector.
15
-
16
- :param connector: An instance of BigQueryConnector.
17
- """
18
- self.connector = connector
19
-
20
- def fetch_ohlcvo(self, start_date: str, end_date: str, interval: str, hawk_ids: List[int]) -> Iterator[dict]:
21
- """Fetches raw data from BigQuery for the given date range and hawk_ids.
22
-
23
- :param start_date: The start date for the data query (YYYY-MM-DD).
24
- :param end_date: The end date for the data query (YYYY-MM-DD).
25
- :param interval: The interval for the data query (e.g., '1d', '1h', '1m').
26
- :param hawk_ids: A list of specific hawk_ids to filter by.
27
- :return: An iterator over raw data rows.
28
- """
29
- hawk_ids_str = ', '.join(map(str, hawk_ids))
30
- query = f"""
31
- WITH records_data AS (
32
- SELECT
33
- r.record_timestamp AS date,
34
- hi.value AS ticker,
35
- MAX(CASE WHEN f.field_name = 'open_{interval}' THEN r.double_value END) AS open,
36
- MAX(CASE WHEN f.field_name = 'high_{interval}' THEN r.double_value END) AS high,
37
- MAX(CASE WHEN f.field_name = 'low_{interval}' THEN r.double_value END) AS low,
38
- MAX(CASE WHEN f.field_name = 'close_{interval}' THEN r.double_value END) AS close,
39
- MAX(CASE WHEN f.field_name = 'volume_{interval}' THEN r.int_value END) AS volume,
40
- MAX(CASE WHEN f.field_name = 'open_interest_{interval}' THEN r.double_value END) AS open_interest
41
- FROM
42
- `wsb-hc-qasap-ae2e.development.records` AS r
43
- JOIN
44
- `wsb-hc-qasap-ae2e.development.fields` AS f
45
- ON r.field_id = f.field_id
46
- JOIN
47
- `wsb-hc-qasap-ae2e.development.hawk_identifiers` AS hi
48
- ON r.hawk_id = hi.hawk_id
49
- WHERE
50
- r.hawk_id IN ({hawk_ids_str})
51
- AND f.field_name IN ('open_1d', 'high_1d', 'low_1d', 'close_1d', 'volume_1d', 'open_interest_1d')
52
- AND r.record_timestamp BETWEEN '{start_date}' AND '{end_date}'
53
- GROUP BY
54
- date, ticker
55
- )
56
- SELECT
57
- date,
58
- ticker,
59
- open,
60
- high,
61
- low,
62
- close,
63
- volume,
64
- open_interest
65
- FROM
66
- records_data
67
- ORDER BY
68
- date;
69
- """
70
- return self.connector.run_query(query)
@@ -1,3 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: hawk-sdk
3
- Version: 0.0.0
@@ -1,15 +0,0 @@
1
- README.md
2
- setup.py
3
- hawk_sdk/__init__.py
4
- hawk_sdk.egg-info/PKG-INFO
5
- hawk_sdk.egg-info/SOURCES.txt
6
- hawk_sdk.egg-info/dependency_links.txt
7
- hawk_sdk.egg-info/requires.txt
8
- hawk_sdk.egg-info/top_level.txt
9
- hawk_sdk/common/__init__.py
10
- hawk_sdk/common/bigquery_connector.py
11
- hawk_sdk/common/data_object.py
12
- hawk_sdk/futures/__init__.py
13
- hawk_sdk/futures/main.py
14
- hawk_sdk/futures/repository.py
15
- hawk_sdk/futures/service.py
File without changes