cryptodatapy 0.2.7__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. cryptodatapy/extract/datarequest.py +27 -2
  2. cryptodatapy/extract/exchanges/__init__.py +2 -0
  3. cryptodatapy/extract/exchanges/dydx.py +137 -0
  4. cryptodatapy/extract/exchanges/exchange.py +439 -0
  5. cryptodatapy/extract/libraries/ccxt_api.py +684 -189
  6. cryptodatapy/extract/libraries/library.py +1 -3
  7. cryptodatapy/extract/libraries/pandasdr_api.py +151 -137
  8. cryptodatapy/extract/web/web.py +62 -0
  9. cryptodatapy/transform/convertparams.py +73 -164
  10. cryptodatapy/transform/wrangle.py +43 -23
  11. {cryptodatapy-0.2.7.dist-info → cryptodatapy-0.2.9.dist-info}/METADATA +1 -1
  12. {cryptodatapy-0.2.7.dist-info → cryptodatapy-0.2.9.dist-info}/RECORD +14 -25
  13. cryptodatapy/conf/fx_tickers.csv +0 -31
  14. cryptodatapy/extract/data_vendors/CoinMetrics.ipynb +0 -747
  15. cryptodatapy/extract/libraries/Untitled.ipynb +0 -33
  16. cryptodatapy/extract/libraries/ccxt.ipynb +0 -747
  17. cryptodatapy/extract/libraries/yfinance_api.py +0 -511
  18. cryptodatapy/transform/cc_onchain_data.csv +0 -118423
  19. cryptodatapy/transform/clean_onchain_data.ipynb +0 -4750
  20. cryptodatapy/transform/clean_perp_futures_ohlcv.ipynb +0 -2819
  21. cryptodatapy/transform/cmdty_data.ipynb +0 -402
  22. cryptodatapy/transform/credit_data.ipynb +0 -291
  23. cryptodatapy/transform/eqty_data.ipynb +0 -836
  24. cryptodatapy/transform/global_credit_data_daily.parquet +0 -0
  25. cryptodatapy/transform/rates_data.ipynb +0 -465
  26. cryptodatapy/transform/us_rates_daily.csv +0 -227752
  27. {cryptodatapy-0.2.7.dist-info → cryptodatapy-0.2.9.dist-info}/LICENSE +0 -0
  28. {cryptodatapy-0.2.7.dist-info → cryptodatapy-0.2.9.dist-info}/WHEEL +0 -0
@@ -688,108 +688,78 @@ class ConvertParams:
688
688
  # convert tickers
689
689
  with resources.path("cryptodatapy.conf", "tickers.csv") as f:
690
690
  tickers_path = f
691
- tickers_df, tickers = pd.read_csv(tickers_path, index_col=0, encoding="latin1"), []
691
+ tickers_df = pd.read_csv(tickers_path, index_col=0, encoding="latin1")
692
692
 
693
- if self.data_req.source_tickers is not None:
694
- tickers = self.data_req.source_tickers
695
- self.data_req.tickers = self.data_req.source_tickers
696
- else:
693
+ if self.data_req.source_tickers is None:
694
+ self.data_req.source_tickers = []
697
695
  for ticker in self.data_req.tickers:
698
696
  try:
699
- tickers.append(tickers_df.loc[ticker, "fred_id"])
697
+ self.data_req.source_tickers.append(tickers_df.loc[ticker, "fred_id"])
700
698
  except KeyError:
701
699
  logging.warning(
702
- f"{ticker} not found for Fred data source. Check tickers in"
700
+ f"{ticker} not found for Fred source. Check tickers in"
703
701
  f" data catalog and try again."
704
702
  )
705
- self.data_req.tickers.remove(ticker)
706
- # convert freq
707
- if self.data_req.source_freq is not None:
708
- freq = self.data_req.source_freq
709
- self.data_req.freq = self.data_req.source_freq
710
- else:
711
- freq = self.data_req.freq
712
- # convert quote ccy
713
- quote_ccy = self.data_req.quote_ccy
703
+
704
+ # freq
705
+ if self.data_req.source_freq is None:
706
+ self.data_req.source_freq = self.data_req.freq
707
+
714
708
  # start date
715
- if self.data_req.start_date is None:
716
- start_date = datetime(1920, 1, 1)
709
+ if self.data_req.source_start_date is None:
710
+ self.data_req.source_start_date = pd.Timestamp('1920-01-01')
717
711
  else:
718
- start_date = self.data_req.start_date
712
+ self.data_req.source_start_date = self.data_req.start_date
713
+
719
714
  # end date
720
715
  if self.data_req.end_date is None:
721
- end_date = pd.Timestamp.utcnow()
716
+ self.data_req.source_end_date = pd.Timestamp.utcnow().tz_localize(None)
722
717
  else:
723
- end_date = self.data_req.end_date
718
+ self.data_req.source_end_date = self.data_req.end_date
719
+
724
720
  # fields
725
- if self.data_req.source_fields is not None:
726
- fields = self.data_req.source_fields
727
- self.data_req.fields = self.data_req.source_fields
728
- else:
729
- fields = self.convert_fields(data_source='fred')
721
+ if self.data_req.source_fields is None:
722
+ self.data_req.source_fields = self.convert_fields(data_source='fred')
723
+
730
724
  # tz
731
725
  if self.data_req.tz is None:
732
- tz = "America/New_York"
733
- else:
734
- tz = self.data_req.tz
726
+ self.data_req.tz = "America/New_York"
735
727
 
736
- return {
737
- "tickers": tickers,
738
- "freq": freq,
739
- "quote_ccy": quote_ccy,
740
- "exch": self.data_req.exch,
741
- "ctys": None,
742
- "mkt_type": self.data_req.mkt_type,
743
- "mkts": None,
744
- "start_date": start_date,
745
- "end_date": end_date,
746
- "fields": fields,
747
- "tz": tz,
748
- "inst": None,
749
- "cat": self.data_req.cat,
750
- "trials": self.data_req.trials,
751
- "pause": self.data_req.pause,
752
- "source_tickers": self.data_req.source_tickers,
753
- "source_freq": self.data_req.source_freq,
754
- "source_fields": self.data_req.source_fields,
755
- }
728
+ return self.data_req
756
729
 
757
730
  def to_wb(self) -> Dict[str, Union[list, str, int, float, datetime, None]]:
758
731
  """
759
732
  Convert tickers from CryptoDataPy to Yahoo Finance format.
760
733
  """
761
- # convert tickers
734
+ # tickers
762
735
  with resources.path("cryptodatapy.conf", "tickers.csv") as f:
763
736
  tickers_path = f
764
- tickers_df, tickers = pd.read_csv(tickers_path, index_col=0, encoding="latin1"), []
737
+ tickers_df = pd.read_csv(tickers_path, index_col=0, encoding="latin1")
765
738
 
766
- if self.data_req.source_tickers is not None:
767
- tickers = self.data_req.source_tickers
768
- self.data_req.tickers = self.data_req.source_tickers
769
- else:
739
+ if self.data_req.source_tickers is None:
740
+ self.data_req.source_tickers = []
770
741
  for ticker in self.data_req.tickers:
771
742
  try:
772
- tickers.append(tickers_df.loc[ticker, "wb_id"])
743
+ self.data_req.source_tickers.append(tickers_df.loc[ticker, "wb_id"])
773
744
  except KeyError:
774
745
  logging.warning(
775
- f"{ticker} not found for World Bank data source. Check tickers in"
746
+ f"{ticker} not found for World Bank source. Check tickers in"
776
747
  f" data catalog and try again."
777
748
  )
778
- self.data_req.tickers.remove(ticker)
779
749
  # drop dupes
780
- tickers = list(set(tickers))
781
- # convert freq
782
- if self.data_req.source_freq is not None:
783
- freq = self.data_req.source_freq
784
- self.data_req.freq = self.data_req.source_freq
785
- else:
786
- freq = self.data_req.freq
750
+ self.data_req.source_tickers = list(set(self.data_req.source_tickers))
751
+
752
+ # freq
753
+ if self.data_req.source_freq is None:
754
+ self.data_req.source_freq = self.data_req.freq
755
+
787
756
  # convert quote ccy
788
757
  if self.data_req.quote_ccy is None:
789
- quote_ccy = "USD"
758
+ self.data_req.quote_ccy = "USD"
790
759
  else:
791
- quote_ccy = self.data_req.quote_ccy.upper()
792
- # convert ctys
760
+ self.data_req.quote_ccy = self.data_req.quote_ccy.upper()
761
+
762
+ # ctys
793
763
  ctys_list = []
794
764
  if self.data_req.cat == "macro":
795
765
  for ticker in self.data_req.tickers:
@@ -800,62 +770,44 @@ class ConvertParams:
800
770
  f"{ticker} not found for {self.data_req.source} source. Check tickers in "
801
771
  f"data catalog and try again."
802
772
  )
803
- ctys_list = list(set(ctys_list))
773
+ self.data_req.ctys = list(set(ctys_list))
774
+
804
775
  # start date
805
776
  if self.data_req.start_date is None:
806
- start_date = 1920
777
+ self.data_req.source_start_date = 1920
807
778
  else:
808
- start_date = int(self.data_req.start_date.year)
779
+ self.data_req.source_start_date = int(self.data_req.start_date.year)
780
+
809
781
  # end date
810
782
  if self.data_req.end_date is None:
811
- end_date = pd.Timestamp.utcnow().year
783
+ self.data_req.source_end_date = pd.Timestamp.utcnow().year
812
784
  else:
813
- end_date = int(self.data_req.end_date.year)
785
+ self.data_req.source_end_date = int(self.data_req.end_date.year)
786
+
814
787
  # fields
815
- if self.data_req.source_fields is not None:
816
- fields = self.data_req.source_fields
817
- self.data_req.fields = self.data_req.source_fields
818
- else:
819
- fields = self.convert_fields(data_source='wb')
788
+ if self.data_req.source_fields is None:
789
+ self.data_req.source_fields = self.convert_fields(data_source='wb')
820
790
 
821
- return {
822
- "tickers": tickers,
823
- "freq": freq,
824
- "quote_ccy": quote_ccy,
825
- "exch": self.data_req.exch,
826
- "ctys": ctys_list,
827
- "mkt_type": None,
828
- "mkts": None,
829
- "start_date": start_date,
830
- "end_date": end_date,
831
- "fields": fields,
832
- "tz": self.data_req.tz,
833
- "inst": None,
834
- "cat": self.data_req.cat,
835
- "trials": self.data_req.trials,
836
- "pause": self.data_req.pause,
837
- "source_tickers": self.data_req.source_tickers,
838
- "source_freq": self.data_req.source_freq,
839
- "source_fields": self.data_req.source_fields,
840
- }
791
+ return self.data_req
841
792
 
842
- def to_yahoo(self) -> Dict[str, Union[list, str, int, float, datetime, None]]:
793
+ def to_yahoo(self) -> DataRequest:
843
794
  """
844
795
  Convert tickers from CryptoDataPy to Yahoo Finance format.
845
796
  """
846
797
  # tickers
847
798
  with resources.path("cryptodatapy.conf", "tickers.csv") as f:
848
799
  tickers_path = f
849
- tickers_df, tickers = pd.read_csv(tickers_path, index_col=0, encoding="latin1"), []
800
+ tickers_df = pd.read_csv(tickers_path, index_col=0, encoding="latin1")
850
801
 
851
802
  if self.data_req.source_tickers is None:
852
803
  if self.data_req.cat == 'eqty':
853
804
  self.data_req.source_tickers = [ticker.upper() for ticker in self.data_req.tickers]
805
+ self.data_req.tickers = self.data_req.source_tickers
854
806
  else:
855
807
  self.data_req.source_tickers = []
808
+ if self.data_req.cat == 'fx':
809
+ self.data_req.tickers = [ticker.upper() for ticker in self.data_req.tickers]
856
810
  for ticker in self.data_req.tickers:
857
- if self.data_req.cat == 'fx':
858
- ticker = ticker.upper()
859
811
  try:
860
812
  self.data_req.source_tickers.append(tickers_df.loc[ticker, "yahoo_id"])
861
813
  except KeyError:
@@ -888,88 +840,45 @@ class ConvertParams:
888
840
  if self.data_req.tz is None:
889
841
  self.data_req.tz = "America/New_York"
890
842
 
891
- # return {
892
- # "tickers": tickers,
893
- # "freq": freq,
894
- # "quote_ccy": quote_ccy,
895
- # "exch": self.data_req.exch,
896
- # "ctys": None,
897
- # "mkt_type": self.data_req.mkt_type,
898
- # "mkts": None,
899
- # "start_date": start_date,
900
- # "end_date": end_date,
901
- # "fields": fields,
902
- # "tz": tz,
903
- # "inst": None,
904
- # "cat": self.data_req.cat,
905
- # "trials": self.data_req.trials,
906
- # "pause": self.data_req.pause,
907
- # "source_tickers": self.data_req.source_tickers,
908
- # "source_freq": self.data_req.source_freq,
909
- # "source_fields": self.data_req.source_fields,
910
- # }
911
-
912
- def to_famafrench(self) -> Dict[str, Union[list, str, int, float, datetime, None]]:
843
+ return self.data_req
844
+
845
+ def to_famafrench(self) -> DataRequest:
913
846
  """
914
847
  Convert tickers from CryptoDataPy to Fama-French format.
915
848
  """
916
- # convert tickers
849
+ # tickers
917
850
  with resources.path("cryptodatapy.conf", "tickers.csv") as f:
918
851
  tickers_path = f
919
- tickers_df, tickers = pd.read_csv(tickers_path, index_col=0, encoding="latin1"), []
852
+ tickers_df = pd.read_csv(tickers_path, index_col=0, encoding="latin1")
920
853
 
921
- if self.data_req.source_tickers is not None:
922
- tickers = self.data_req.source_tickers
923
- self.data_req.tickers = self.data_req.source_tickers
924
- else:
854
+ if self.data_req.source_tickers is None:
855
+ self.data_req.source_tickers = []
925
856
  for ticker in self.data_req.tickers:
926
857
  try:
927
- tickers.append(tickers_df.loc[ticker, "famafrench_id"])
858
+ self.data_req.source_tickers.append(tickers_df.loc[ticker, "famafrench_id"])
928
859
  except KeyError:
929
860
  logging.warning(
930
861
  f"{ticker} not found for Fama-French source. Check tickers in"
931
862
  f" data catalog and try again."
932
863
  )
933
- self.data_req.tickers.remove(ticker)
934
- # convert freq
935
- if self.data_req.source_freq is not None:
936
- freq = self.data_req.source_freq
937
- self.data_req.freq = self.data_req.source_freq
938
- else:
939
- freq = self.data_req.freq
940
- # convert quote ccy
941
- quote_ccy = self.data_req.quote_ccy
864
+
865
+ # freq
866
+ if self.data_req.source_freq is None:
867
+ self.data_req.source_freq = self.data_req.freq
868
+
942
869
  # start date
943
870
  if self.data_req.start_date is None:
944
- start_date = datetime(1920, 1, 1)
871
+ self.data_req.source_start_date = datetime(1920, 1, 1)
945
872
  else:
946
- start_date = self.data_req.start_date
873
+ self.data_req.source_start_date = self.data_req.start_date
874
+
947
875
  # end date
948
876
  if self.data_req.end_date is None:
949
- end_date = pd.Timestamp.utcnow().date()
877
+ self.data_req.source_end_date = datetime.now()
950
878
  else:
951
- end_date = self.data_req.end_date
879
+ self.data_req.source_end_date = self.data_req.end_date
952
880
 
953
- return {
954
- "tickers": tickers,
955
- "freq": freq,
956
- "quote_ccy": quote_ccy,
957
- "exch": self.data_req.exch,
958
- "ctys": None,
959
- "mkt_type": self.data_req.mkt_type,
960
- "mkts": None,
961
- "start_date": start_date,
962
- "end_date": end_date,
963
- "fields": self.data_req.fields,
964
- "tz": self.data_req.tz,
965
- "inst": None,
966
- "cat": self.data_req.cat,
967
- "trials": self.data_req.trials,
968
- "pause": self.data_req.pause,
969
- "source_tickers": self.data_req.source_tickers,
970
- "source_freq": self.data_req.source_freq,
971
- "source_fields": self.data_req.source_fields,
972
- }
881
+ return self.data_req
973
882
 
974
883
  def to_aqr(self) -> Dict[str, Union[list, str, int, dict, float, datetime, None]]:
975
884
  """
@@ -717,16 +717,22 @@ class WrangleData:
717
717
  """
718
718
  # convert fields to lib
719
719
  self.convert_fields_to_lib(data_source='dbnomics')
720
+
720
721
  # convert to datetime
721
722
  self.data_resp['date'] = pd.to_datetime(self.data_resp['date'])
723
+
722
724
  # set index
723
725
  self.data_resp = self.data_resp.set_index('date').sort_index()
726
+
724
727
  # resample
725
728
  self.data_resp = self.data_resp.resample(self.data_req.freq).last().ffill()
729
+
726
730
  # filter dates
727
731
  self.filter_dates()
732
+
728
733
  # type conversion
729
734
  self.data_resp = self.data_resp.apply(pd.to_numeric, errors='coerce').convert_dtypes()
735
+
730
736
  # remove bad data
731
737
  self.data_resp = self.data_resp[self.data_resp != 0] # 0 values
732
738
  self.data_resp = self.data_resp[~self.data_resp.index.duplicated()] # duplicate rows
@@ -862,24 +868,29 @@ class WrangleData:
862
868
  -------
863
869
  pd.DataFrame
864
870
  Wrangled dataframe into tidy data format.
865
-
866
871
  """
867
- # convert tickers to cryptodatapy format
872
+ # tickers
868
873
  self.data_resp.columns = self.data_req.tickers # convert tickers to cryptodatapy format
874
+
869
875
  # resample to match end of reporting period, not beginning
870
876
  self.data_resp = self.data_resp.resample('d').last().ffill().resample(self.data_req.freq).last().stack(). \
871
877
  to_frame().reset_index()
878
+
872
879
  # convert cols
873
880
  if self.data_req.cat == 'macro':
874
881
  self.data_resp.columns = ['DATE', 'symbol', 'actual']
875
882
  else:
876
883
  self.data_resp.columns = ['DATE', 'symbol', 'close']
877
- # convert fields to lib
884
+
885
+ # fields
878
886
  self.convert_fields_to_lib(data_source='fred')
879
- # set index
887
+
888
+ # index
880
889
  self.data_resp.set_index(['date', 'ticker'], inplace=True)
890
+
881
891
  # type conversion
882
892
  self.data_resp = self.data_resp.apply(pd.to_numeric, errors='coerce').convert_dtypes()
893
+
883
894
  # remove bad data
884
895
  self.data_resp = self.data_resp[self.data_resp != 0] # 0 values
885
896
  self.data_resp = self.data_resp[~self.data_resp.index.duplicated()] # duplicate rows
@@ -896,37 +907,41 @@ class WrangleData:
896
907
  pd.DataFrame
897
908
  Wrangled dataframe into tidy data format.
898
909
  """
899
- # convert tickers
900
- if len(self.data_req.tickers) == 1: # add ticker
901
- if self.data_req.cat == 'eqty' or self.data_req.cat == 'fx':
902
- self.data_resp['Ticker'] = self.data_req.tickers[0].upper()
903
- else:
904
- self.data_resp['Ticker'] = self.data_req.tickers[0]
905
- else: # convert tickers to cryptodatapy format
906
- self.data_resp = self.data_resp.stack() # stack to multi-index
910
+ # tickers
911
+ tickers_dict = {source_ticker: ticker for source_ticker, ticker in zip(self.data_req.source_tickers,
912
+ self.data_req.tickers)}
913
+ if len(self.data_req.tickers) == 1:
914
+ self.data_resp['Ticker'] = self.data_req.tickers[0]
915
+ else:
916
+ self.data_resp = self.data_resp.stack()
907
917
  self.data_resp.index.names = ['Date', 'Ticker']
908
- if self.data_req.cat == 'eqty' or self.data_req.cat == 'fx':
909
- self.data_resp.index = self.data_resp.index.set_levels([ticker.upper() for ticker in
910
- self.data_req.tickers], level=1)
911
- else:
912
- self.data_resp.index = self.data_resp.index.set_levels([ticker for ticker in self.data_req.tickers],
913
- level=1)
918
+ self.data_resp.index = self.data_resp.index.set_levels(self.data_resp.index.levels[1].map(tickers_dict),
919
+ level=1)
914
920
  self.data_resp.reset_index(inplace=True)
915
- # convert fields
921
+
922
+ # fields
916
923
  self.convert_fields_to_lib(data_source='yahoo')
917
- # convert to datetime
924
+
925
+ # index
918
926
  self.data_resp['date'] = pd.to_datetime(self.data_resp['date'])
927
+ self.data_resp.set_index(['date', 'ticker'], inplace=True)
928
+
919
929
  # resample
920
- self.data_resp = self.data_resp.set_index('date').groupby('ticker').resample(self.data_req.freq).last().\
921
- droplevel(0).reset_index().set_index(['date', 'ticker'])
930
+ self.data_resp = self.data_resp.groupby('ticker').\
931
+ resample(self.data_req.freq, level='date').\
932
+ last().swaplevel('ticker', 'date').sort_index()
933
+
922
934
  # re-order cols
923
935
  self.data_resp = self.data_resp.loc[:, ['open', 'high', 'low', 'close', 'close_adj', 'volume']]
936
+
924
937
  # type conversion
925
938
  self.data_resp = self.data_resp.apply(pd.to_numeric, errors='coerce').convert_dtypes()
939
+
926
940
  # remove bad data
927
941
  self.data_resp = self.data_resp[self.data_resp != 0] # 0 values
928
942
  self.data_resp = self.data_resp[~self.data_resp.index.duplicated()] # duplicate rows
929
943
  self.data_resp = self.data_resp.dropna(how='all').dropna(how='all', axis=1) # entire row or col NaNs
944
+
930
945
  # keep only requested fields and sort index
931
946
  self.data_resp = self.data_resp[self.data_req.fields].sort_index()
932
947
 
@@ -942,7 +957,7 @@ class WrangleData:
942
957
  Wrangled dataframe into tidy data format.
943
958
 
944
959
  """
945
- # convert tickers to cryptodatapy format
960
+ # ticker
946
961
  ff_tickers_dict = {'RF': 'US_Rates_1M_RF',
947
962
  'Mkt-RF': 'US_Eqty_CSRP_ER',
948
963
  'HML': 'US_Eqty_Val',
@@ -951,6 +966,7 @@ class WrangleData:
951
966
  'CMA': 'US_Eqty_Inv',
952
967
  'Mom': 'US_Eqty_Mom',
953
968
  'ST_Rev': 'US_Eqty_STRev'}
969
+
954
970
  # remove white space from cols str
955
971
  self.data_resp.columns = [col.strip() for col in self.data_resp.columns]
956
972
  # keep cols in data req tickers
@@ -959,14 +975,18 @@ class WrangleData:
959
975
  drop_cols = [col for col in self.data_resp.columns if col not in self.data_req.tickers]
960
976
  self.data_resp.drop(columns=drop_cols, inplace=True)
961
977
  self.data_resp = self.data_resp.loc[:, ~self.data_resp.columns.duplicated()] # drop dup cols
978
+
962
979
  # resample freq
963
980
  self.data_resp = self.data_resp.resample(self.data_req.freq).sum()
981
+
964
982
  # format index
965
983
  self.data_resp.index.name = 'date' # rename
966
984
  self.data_resp = self.data_resp.stack().to_frame('er')
967
985
  self.data_resp.index.names = ['date', 'ticker']
986
+
968
987
  # type and conversion to decimals
969
988
  self.data_resp = self.data_resp.apply(pd.to_numeric, errors='coerce').convert_dtypes() / 100
989
+
970
990
  # remove bad data
971
991
  self.data_resp = self.data_resp[self.data_resp != 0] # 0 values
972
992
  self.data_resp = self.data_resp[~self.data_resp.index.duplicated()] # duplicate rows
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cryptodatapy
3
- Version: 0.2.7
3
+ Version: 0.2.9
4
4
  Summary: Cryptoasset data library
5
5
  License: Apache-2.0
6
6
  Author: Systamental
@@ -1,7 +1,6 @@
1
1
  cryptodatapy/__init__.py,sha256=ee1UaINHZn1A_SZ96XM3hCguQEJgiPTvKlnYsk3mmS4,185
2
2
  cryptodatapy/conf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  cryptodatapy/conf/fields.csv,sha256=Xjs_sWIY4DuhdHlWOPo0pgDC8sHYA6xzf6kb0PuUp9w,25735
4
- cryptodatapy/conf/fx_tickers.csv,sha256=vqbY93_6Zi4vXg8iu0veXZ-NDm_NV2rrmb5lNYRqNUA,288
5
4
  cryptodatapy/conf/tickers.csv,sha256=Bs9KfDKawoUPKIQZMN8CtoLYJuzOkwLh2il30c8CsqE,357066
6
5
  cryptodatapy/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
6
  cryptodatapy/datasets/br_econ_calendar.csv,sha256=mSM0IOIByI-0gIIL1CbDQPqHYI5lK6vavrY1ODj3Jlk,1185318
@@ -27,47 +26,37 @@ cryptodatapy/extract/data_vendors/.ipynb_checkpoints/DBNomics-checkpoint.ipynb,s
27
26
  cryptodatapy/extract/data_vendors/.ipynb_checkpoints/InvestPy-checkpoint.ipynb,sha256=ybcHKXzmmlTYEoxC-qkWmd0Pjn1WjJ5CMPvMVotqJ7o,50215
28
27
  cryptodatapy/extract/data_vendors/.ipynb_checkpoints/NasdaqDataLink-checkpoint.ipynb,sha256=hY2QkCcTiLgPnl8SQPsO8spio5-RBMGeBLYzAwgSWb4,147170
29
28
  cryptodatapy/extract/data_vendors/.ipynb_checkpoints/PandasDataReader-checkpoint.ipynb,sha256=n7vzOV6AxC_Ti5CLWW2ABMEEcbbBpiBBs4qTUBQinIg,24171
30
- cryptodatapy/extract/data_vendors/CoinMetrics.ipynb,sha256=OtepxnBvt2DMCJpPFcY0kGhMRcKhw8ArSL85Cd3oO10,24923
31
29
  cryptodatapy/extract/data_vendors/__init__.py,sha256=Nk6gcT43d0XOLfrlVA9r--5mvHCgHfq295IKL3Puu74,354
32
30
  cryptodatapy/extract/data_vendors/coinmetrics_api.py,sha256=2fnsgKkBWjzMa1jzfVa7UbJKTpMWzcFVjo0bKDEud8U,34991
33
31
  cryptodatapy/extract/data_vendors/cryptocompare_api.py,sha256=3oBfQioBz1vrs9JNtwE0hBLI4BTtpFBBEEsDawmobE8,28872
34
32
  cryptodatapy/extract/data_vendors/datavendor.py,sha256=kGKxHcPng6JiGGhcuPx87ij0DXl4E-OSqxlvxhJ1HQo,12642
35
33
  cryptodatapy/extract/data_vendors/glassnode_api.py,sha256=PuuJOjHztoJyFijb5XU1zm1S_2NAj7MX-wC89DL_bWQ,13103
36
34
  cryptodatapy/extract/data_vendors/tiingo_api.py,sha256=Bvj5nF8zCkpU3cf5ImUmCS1cd1w2UtjgQvRmQ9Wfg6g,26404
37
- cryptodatapy/extract/datarequest.py,sha256=KMfrc4WeHivNeIKN-sbLFWD7sqPrwasPFiee6QUWRuA,25015
35
+ cryptodatapy/extract/datarequest.py,sha256=Yi1pVe-ljv_su6kwEw5uylhG3XtneujBaNS2pmAIyk0,25879
36
+ cryptodatapy/extract/exchanges/__init__.py,sha256=VKTNzrbe-wltGHWH9lK5RLZoXCGHp-UGGZ4gMVHJXrQ,113
37
+ cryptodatapy/extract/exchanges/dydx.py,sha256=Oifb4sKbPRKArdZBx9q5ob4yTFkd65n0zXiS7hga0mk,4199
38
+ cryptodatapy/extract/exchanges/exchange.py,sha256=Cicj3KS4zLbwmXX5fu89byXNwqqU4TH31GFv0zj3D4s,13010
38
39
  cryptodatapy/extract/getdata.py,sha256=HzWQyacfmphms97LVKbx1gEgcgsQJViBT4BBxL9TBXk,8703
39
- cryptodatapy/extract/libraries/Untitled.ipynb,sha256=4SGovpmy_kLijEhzBhVJoYNVPrwrERRO0DL6gwzeRvM,614
40
40
  cryptodatapy/extract/libraries/__init__.py,sha256=9rJ_hFHWlvkPwyIkNG5bqH6HTY2jQNPIKQjzYEsVSDo,319
41
- cryptodatapy/extract/libraries/ccxt.ipynb,sha256=O-xkr_jtRBY4kuKWek61UOLaU5AiyNRM7AnquNLUjFs,22865
42
- cryptodatapy/extract/libraries/ccxt_api.py,sha256=E_jVIKZ5iDjuTK-JwJFENvuWG08B24nN-NcVruWSoeQ,35882
41
+ cryptodatapy/extract/libraries/ccxt_api.py,sha256=GK5sAD9f-7bo_HFjxmMrMl6gUTVfAi9B0ilKD4cRgvA,52975
43
42
  cryptodatapy/extract/libraries/dbnomics_api.py,sha256=M6kPIH-hKqkmeBQb-g56dY9jatqLCtSl_MnvPblHtAc,9421
44
43
  cryptodatapy/extract/libraries/investpy_api.py,sha256=qtGm3LDluXxJorvFv0w1bm1oBrcZIfE5cZSYzNYvttY,18409
45
- cryptodatapy/extract/libraries/library.py,sha256=070YsO1RJzm4z_enhCjqe5hrj8qsk-Ni0Q_QKoAwQ6U,12316
46
- cryptodatapy/extract/libraries/pandasdr_api.py,sha256=DFko-N_ZmINUnzmXIct_CgnwEeoTh1xkaXewcJXdR2k,13177
47
- cryptodatapy/extract/libraries/yfinance_api.py,sha256=E4c8gIpDh5ta8ILsn9SBs3C1pOU1VP4OqwQb6TcOzCc,17311
44
+ cryptodatapy/extract/libraries/library.py,sha256=eU8NnQZ9luLGdIF5hms6j8VPCWc50evkREc4xdh-g1I,12301
45
+ cryptodatapy/extract/libraries/pandasdr_api.py,sha256=-62P0W0Pa98f-96nB_bDgDkPFshP8yiqKZ9VU-usv94,13696
48
46
  cryptodatapy/extract/web/__init__.py,sha256=8i0fweCeqSpdiPf-47jT240I4ca6SizCu9aD-qDS67w,89
49
47
  cryptodatapy/extract/web/aqr.py,sha256=LS1D7QzG6UWkLUfDMgBFtiHpznnnAUOpec5Sx3vRGME,11875
50
- cryptodatapy/extract/web/web.py,sha256=27cAzlIyYn6R29726J7p9NhSwHypas9EQSjHLILtcjk,9748
48
+ cryptodatapy/extract/web/web.py,sha256=R1xEnHE1McxSWxp4vrTfgh9gW6FF6XDlp0gmp2NmWOM,12126
51
49
  cryptodatapy/transform/__init__.py,sha256=Spb5cGJ3V_o8hgSWOSrF8J_vsSZpFk0uzW7RpkgfbFE,131
52
- cryptodatapy/transform/cc_onchain_data.csv,sha256=qA9u3hekHk_NueBlMYQ7IKATh7AlnY-EN9E9X-9kIsU,9544500
53
50
  cryptodatapy/transform/clean.py,sha256=C9VypQOjdJ987TcD-qAHh7qYaoJBotvp3cWTr3ttSGM,12807
54
- cryptodatapy/transform/clean_onchain_data.ipynb,sha256=WrVPs8_WVKEgL6XRvGUATzeinqGUDTbXv_CHivg0nXg,687176
55
- cryptodatapy/transform/clean_perp_futures_ohlcv.ipynb,sha256=3TFTG6riUfu5f0uYvlMC44iUtQRd27sQPxBMXBXzp6A,72758
56
- cryptodatapy/transform/cmdty_data.ipynb,sha256=McAMfzNDfrv61gSlzFOkw_DXaOGZE1qfqXc2E_KeSbs,1220371
57
- cryptodatapy/transform/convertparams.py,sha256=RxAOgtmmFaffX4cs0drLlG-jn3erF5K8BQ8EzlviZDo,43261
58
- cryptodatapy/transform/credit_data.ipynb,sha256=Wvvnu9ejsmqCb0s3cTG8bLJaywWQCskgk6FBd5J5Vf8,1892822
59
- cryptodatapy/transform/eqty_data.ipynb,sha256=A5cA13hOPrOe7Fra0HL4QPFkJGVfArigTR0GUUBpQ3A,25609
51
+ cryptodatapy/transform/convertparams.py,sha256=X80Hdi2AMHVSYTJ6i-ovOzv5L6JQlGswJlC82xCriX8,39687
60
52
  cryptodatapy/transform/filter.py,sha256=iQDUXthEXVGcrZUZLjevhDqwf9oywEQHTIh6n_sxOhU,9056
61
- cryptodatapy/transform/global_credit_data_daily.parquet ,sha256=Dw27SX41AeSYcZyYlrGbwVe8KZM6c35TQ-gzCd2gU2I,745732
62
53
  cryptodatapy/transform/impute.py,sha256=c7qdgFg0qs_xuQnX0jazpt0wgASC0KElLZRuxTkeVKY,5519
63
54
  cryptodatapy/transform/od.py,sha256=z__CWiN70f1leqx12SS9pIvTggxpUPrg1falJIKMZCc,31031
64
- cryptodatapy/transform/rates_data.ipynb,sha256=olKY4t2j4sfjsCYlhupTgaviC6922HHGBr-y3f80qjQ,13358
65
- cryptodatapy/transform/us_rates_daily.csv,sha256=BIA4a6egQYrVsLk51IZ54ZXXWMwjrx_t5S4XMdvHg44,6434830
66
- cryptodatapy/transform/wrangle.py,sha256=uYOrV4PQ3I0z4C5K3x468w82U3uhGRTEIPVrabaSS04,42987
55
+ cryptodatapy/transform/wrangle.py,sha256=KqPIY7akFtHasW5gqUNR1cCGMBBkgHmzWxyMZFw8t-Q,42564
67
56
  cryptodatapy/util/__init__.py,sha256=zSQ2HU2QIXzCuptJjknmrClwtQKCvIj4aNysZljIgrU,116
68
57
  cryptodatapy/util/datacatalog.py,sha256=qCCX6srXvaAbVAKuA0M2y5IK_2OEx5xA3yRahDZlC-g,13157
69
58
  cryptodatapy/util/datacredentials.py,sha256=fXuGgI2NKCLlcnK8M37CtdyAc3O_YCV23x3KTlfakjA,2160
70
- cryptodatapy-0.2.7.dist-info/LICENSE,sha256=sw4oVq8bDjT3uMtaFebQ-xeIVP4H-bXldTs9q-Jjeks,11344
71
- cryptodatapy-0.2.7.dist-info/METADATA,sha256=lwX6PFxeEg4RAqQo7LAteOZ2vxnh-NefSslgZtaQNzA,6426
72
- cryptodatapy-0.2.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
73
- cryptodatapy-0.2.7.dist-info/RECORD,,
59
+ cryptodatapy-0.2.9.dist-info/LICENSE,sha256=sw4oVq8bDjT3uMtaFebQ-xeIVP4H-bXldTs9q-Jjeks,11344
60
+ cryptodatapy-0.2.9.dist-info/METADATA,sha256=WwkoejYYowxSvY5WYs2-n1rb-4B3qdz-hFCFcFyJjZE,6426
61
+ cryptodatapy-0.2.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
62
+ cryptodatapy-0.2.9.dist-info/RECORD,,
@@ -1,31 +0,0 @@
1
- id,name,tiingo_id
2
- eurusd,,
3
- gbpusd,,
4
- usdjpy,,
5
- usdchf,,
6
- usdcad,,
7
- usdsek,,
8
- usdnok,,
9
- audusd,,
10
- nzdusd,,
11
- usdars,,
12
- usdmxn,,
13
- usdbrl,,
14
- usdcop,,
15
- usdclp,,
16
- usdpen,,
17
- usdils,,
18
- usdrub,,
19
- usdczk,,
20
- usdpln,,
21
- usdhuf,,
22
- usdzar,,
23
- usdtry,,
24
- usdcny,,
25
- usdhkd,,
26
- usdsgd,,
27
- usdtwd,,
28
- usdkrw,,
29
- usdphp,,
30
- usdinr,,
31
- usdidr,,