yfinrb 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,260 @@
1
+
2
+ require'date'
3
+ require 'open-uri'
4
+ require 'json'
5
+ require 'csv'
6
+
7
+
8
+ class Yfin
9
+ module Holders
10
+ extend ActiveSupport::Concern
11
+ # include YfConnection
12
+
13
+ BASE_URL = 'https://query2.finance.yahoo.com'.freeze
14
+ QUOTE_SUMMARY_URL = "#{BASE_URL}/v10/finance/quoteSummary/".freeze
15
+
16
+ # attr_accessor :ticker
17
+
18
+ def self.included(base) # built-in Ruby hook for modules
19
+ base.class_eval do
20
+ original_method = instance_method(:initialize)
21
+ define_method(:initialize) do |*args, &block|
22
+ original_method.bind(self).call(*args, &block)
23
+ initialize_holders # (your module code here)
24
+ end
25
+ end
26
+ end
27
+
28
+ def initialize_holders
29
+ @major = nil
30
+ @major_direct_holders = nil
31
+ @institutional = nil
32
+ @mutualfund = nil
33
+
34
+ @insider_transactions = nil
35
+ @insider_purchases = nil
36
+ @insider_roster = nil
37
+ end
38
+
39
+ def major
40
+ _fetch_and_parse if @major.nil?
41
+ return @major
42
+ end
43
+
44
+ alias_method :major_holders, :major
45
+
46
+ def institutional
47
+ _fetch_and_parse if @institutional.nil?
48
+ return @institutional
49
+ end
50
+
51
+ alias_method :institutional_holders, :institutional
52
+
53
+ def mutualfund
54
+ _fetch_and_parse if @mutualfund.nil?
55
+ return @mutualfund
56
+ end
57
+
58
+ alias_method :mutualfund_holders, :mutualfund
59
+
60
+ def insider_transactions
61
+ _fetch_and_parse if @insider_transactions.nil?
62
+ return @insider_transactions
63
+ end
64
+
65
+ def insider_purchases
66
+ _fetch_and_parse if @insider_purchases.nil?
67
+ return @insider_purchases
68
+ end
69
+
70
+ def insider_roster
71
+ return @insider_roster unless @insider_roster.nil?
72
+
73
+ _fetch_and_parse
74
+ return @insider_roster
75
+ end
76
+
77
+ alias_method :insider_roster_holders, :insider_roster
78
+
79
+ # holders_methods = [:major, :major_holders, :institutional, :institutional_holders, :mutualfund, \
80
+ # :mutualfund_holders, :insider_transactions, :insider_purchases, :insider_roster, \
81
+ # :insider_roster_holders]
82
+ # holders_methods.each do |meth|
83
+ # alias_method "get_#{meth}".to_sym, meth
84
+ # end
85
+
86
+
87
+
88
+
89
+
90
+ private
91
+
92
+ def _fetch_for_parse(params) #(self, proxy, modules: list)
93
+ # raise YahooFinanceException("Should provide a list of modules, see available modules using `valid_modules`") if !modules.is_a?(Array)
94
+
95
+ # modules = modules.intersection(QUOTE_SUMMARY_VALID_MODULES) #[m for m in modules if m in quote_summary_valid_modules])
96
+
97
+ modules = params[:modules]
98
+
99
+ raise YahooFinanceException("No valid modules provided.") if modules.empty?
100
+
101
+ params_dict = {"modules": modules, "corsDomain": "finance.yahoo.com", "formatted": "false", "symbol": symbol}
102
+
103
+ begin
104
+ result = get_raw_json(QUOTE_SUMMARY_URL + "/#{symbol}", user_agent_headers=user_agent_headers, params=params_dict)
105
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} result = #{result.inspect}" }
106
+ rescue Exception => e
107
+ Rails.logger.error("ERROR: #{e.message}")
108
+ return nil
109
+ end
110
+ return result
111
+ end
112
+
113
+ # def _fetch_for_parse(params)
114
+ # url = "#{QUOTE_SUMMARY_URL}#{symbol}"
115
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} url: #{url}, params = #{params.inspect}" }
116
+ # get(url).parsed_response
117
+
118
+ # # JSON.parse(URI.open(url, proxy: proxy, 'User-Agent' => 'Mozilla/5.0 (compatible; yahoo-finance2/0.0.1)').read(query: params))
119
+ # end
120
+
121
+ def _fetch_and_parse
122
+ modules = ['institutionOwnership', 'fundOwnership', 'majorDirectHolders', 'majorHoldersBreakdown',
123
+ 'insiderTransactions', 'insiderHolders', 'netSharePurchaseActivity'].join(',')
124
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} modules = #{modules.inspect}"}
125
+ params = { modules: modules, corsDomain: 'finance.yahoo.com', formatted: 'false' }
126
+ result = _fetch_for_parse(params)
127
+
128
+ _parse_result(result)
129
+ rescue OpenURI::HTTPError => e
130
+ # Rails.logger.error { "#{__FILE__}:#{__LINE__} Error: #{e.message}" }
131
+
132
+ @major = []
133
+ @major_direct_holders = []
134
+ @institutional = []
135
+ @mutualfund = []
136
+ @insider_transactions = []
137
+ @insider_purchases = []
138
+ @insider_roster = []
139
+ end
140
+
141
+ def _parse_result(result)
142
+ data = result.parsed_response['quoteSummary']['result'].first #.dig('quoteSummary', 'result', 0)
143
+ Rails.logger.info { "#{__FILE__}:#{__LINE__} data = #{data.inspect}" }
144
+ _parse_institution_ownership(data['institutionOwnership'])
145
+ _parse_fund_ownership(data['fundOwnership'])
146
+ _parse_major_holders_breakdown(data['majorHoldersBreakdown'])
147
+ _parse_insider_transactions(data['insiderTransactions'])
148
+ _parse_insider_holders(data['insiderHolders'])
149
+ _parse_net_share_purchase_activity(data['netSharePurchaseActivity'])
150
+ rescue NoMethodError
151
+ raise "Failed to parse holders json data."
152
+ end
153
+
154
+ def _parse_raw_values(data)
155
+ data.is_a?(Hash) && data.key?('raw') ? data['raw'] : data
156
+ end
157
+
158
+ def _parse_institution_ownership(data)
159
+ holders = data['ownershipList'].map { |owner| owner.transform_values { |v| _parse_raw_values(v) }.except('maxAge') }
160
+
161
+ @institutional = holders.map do |holder|
162
+ {
163
+ 'Date Reported' => DateTime.strptime(holder['reportDate'].to_s, '%s'),
164
+ 'Holder' => holder['organization'],
165
+ 'Shares' => holder['position'],
166
+ 'Value' => holder['value']
167
+ }
168
+ end
169
+ end
170
+
171
+ def _parse_fund_ownership(data)
172
+ holders = data['ownershipList'].map { |owner| owner.transform_values { |v| _parse_raw_values(v) }.except('maxAge') }
173
+
174
+ @mutualfund = holders.map do |holder|
175
+ {
176
+ 'Date Reported' => DateTime.strptime(holder['reportDate'].to_s, '%s'),
177
+ 'Holder' => holder['organization'],
178
+ 'Shares' => holder['position'],
179
+ 'Value' => holder['value']
180
+ }
181
+ end
182
+ end
183
+
184
+ def _parse_major_holders_breakdown(data)
185
+ data.except!('maxAge') if data.key?('maxAge')
186
+ @major = data.map { |k, v| [k, _parse_raw_values(v)] }.to_h
187
+ end
188
+
189
+ def _parse_insider_transactions(data)
190
+ holders = data['transactions'].map { |owner| owner.transform_values { |v| _parse_raw_values(v) }.except('maxAge') }
191
+
192
+ @insider_transactions = holders.map do |holder|
193
+ {
194
+ 'Start Date' => DateTime.strptime(holder['startDate'].to_s, '%s'),
195
+ 'Insider' => holder['filerName'],
196
+ 'Position' => holder['filerRelation'],
197
+ 'URL' => holder['filerUrl'],
198
+ 'Transaction' => holder['moneyText'],
199
+ 'Text' => holder['transactionText'],
200
+ 'Shares' => holder['shares'],
201
+ 'Value' => holder['value'],
202
+ 'Ownership' => holder['ownership']
203
+ }
204
+ end
205
+ end
206
+
207
+ def _parse_insider_holders(data)
208
+ holders = data['holders'].map { |owner| owner.transform_values { |v| _parse_raw_values(v) }.except('maxAge') }
209
+
210
+ @insider_roster = holders.map do |holder|
211
+ {
212
+ 'Name' => holder['name'].to_s,
213
+ 'Position' => holder['relation'].to_s,
214
+ 'URL' => holder['url'].to_s,
215
+ 'Most Recent Transaction' => holder['transactionDescription'].to_s,
216
+ 'Latest Transaction Date' => holder['latestTransDate'] ? DateTime.strptime(holder['latestTransDate'].to_s, '%s') : nil,
217
+ 'Position Direct Date' => DateTime.strptime(holder['positionDirectDate'].to_s, '%s'),
218
+ 'Shares Owned Directly' => holder['positionDirect'],
219
+ 'Position Indirect Date' => holder['positionIndirectDate'] ? DateTime.strptime(holder['positionIndirectDate'].to_s, '%s') : nil,
220
+ 'Shares Owned Indirectly' => holder['positionIndirect']
221
+ }
222
+ end
223
+ end
224
+
225
+ def _parse_net_share_purchase_activity(data)
226
+ period = data['period'] || ''
227
+ @insider_purchases = {
228
+ "Insider Purchases Last #{period}" => [
229
+ 'Purchases',
230
+ 'Sales',
231
+ 'Net Shares Purchased (Sold)',
232
+ 'Total Insider Shares Held',
233
+ '% Net Shares Purchased (Sold)',
234
+ '% Buy Shares',
235
+ '% Sell Shares'
236
+ ],
237
+ 'Shares' => [
238
+ data['buyInfoShares'],
239
+ data['sellInfoShares'],
240
+ data['netInfoShares'],
241
+ data['totalInsiderShares'],
242
+ data['netPercentInsiderShares'],
243
+ data['buyPercentInsiderShares'],
244
+ data['sellPercentInsiderShares']
245
+ ],
246
+ 'Trans' => [
247
+ data['buyInfoCount'],
248
+ data['sellInfoCount'],
249
+ data['netInfoCount'],
250
+ nil,
251
+ nil,
252
+ nil,
253
+ nil
254
+ ]
255
+ }
256
+ end
257
+ end
258
+
259
+
260
+ end
@@ -0,0 +1,238 @@
1
+
2
+
3
+ class Yfin
4
+ class Multi
5
+
6
+ def download(tickers, start: nil, fin: nil, actions: false, threads: true,
7
+ ignore_tz: nil, group_by: 'column', auto_adjust: false,
8
+ back_adjust: false, repair: false, keepna: false, progress: true,
9
+ period: "max", show_errors: nil, interval: "1d", prepost: false,
10
+ proxy: nil, rounding: false, timeout: 10, session: nil)
11
+ # """Download yahoo tickers
12
+ # :Parameters:
13
+ # tickers : str, list
14
+ # List of tickers to download
15
+ # period : str
16
+ # Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max
17
+ # Either Use period parameter or use start and end
18
+ # interval : str
19
+ # Valid intervals: 1m,2m,5m,15m,30m,60m,90m,1h,1d,5d,1wk,1mo,3mo
20
+ # Intraday data cannot extend last 60 days
21
+ # start: str
22
+ # Download start date string (YYYY-MM-DD) or _datetime, inclusive.
23
+ # Default is 99 years ago
24
+ # E.g. for start="2020-01-01", the first data point will be on "2020-01-01"
25
+ # fin: str
26
+ # Download end date string (YYYY-MM-DD) or _datetime, exclusive.
27
+ # Default is now
28
+ # E.g. for end="2023-01-01", the last data point will be on "2022-12-31"
29
+ # group_by : str
30
+ # Group by 'ticker' or 'column' (default)
31
+ # prepost : bool
32
+ # Include Pre and Post market data in results?
33
+ # Default is false
34
+ # auto_adjust: bool
35
+ # Adjust all OHLC automatically? Default is false
36
+ # repair: bool
37
+ # Detect currency unit 100x mixups and attempt repair
38
+ # Default is false
39
+ # keepna: bool
40
+ # Keep NaN rows returned by Yahoo?
41
+ # Default is false
42
+ # actions: bool
43
+ # Download dividend + stock splits data. Default is false
44
+ # threads: bool / int
45
+ # How many threads to use for mass downloading. Default is true
46
+ # ignore_tz: bool
47
+ # When combining from different timezones, ignore that part of datetime.
48
+ # Default depends on interval. Intraday = false. Day+ = true.
49
+ # proxy: str
50
+ # Optional. Proxy server URL scheme. Default is None
51
+ # rounding: bool
52
+ # Optional. Round values to 2 decimal places?
53
+ # show_errors: bool
54
+ # Optional. Doesn't print errors if false
55
+ # DEPRECATED, will be removed in future version
56
+ # timeout: None or float
57
+ # If not None stops waiting for a response after given number of
58
+ # seconds. (Can also be a fraction of a second e.g. 0.01)
59
+ # session: None or Session
60
+ # Optional. Pass your own session object to be used for all requests
61
+ # """
62
+ logger = Rails.logger
63
+
64
+ if show_errors
65
+ Utils.print_once("yfinance: download(show_errors=#{show_errors}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
66
+ logger.level = Logger::ERROR
67
+ else
68
+ Utils.print_once("yfinance: download(show_errors=#{show_errors}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
69
+ logger.level = Logger::CRITICAL
70
+ end
71
+
72
+ if logger.debug?
73
+ threads = false if threads
74
+ logger.debug('Disabling multithreading because DEBUG logging enabled')
75
+ progress = false if progress
76
+ end
77
+
78
+ ignore_tz = interval[1..-1].match?(/[mh]/) ? false : true if ignore_tz.nil?
79
+
80
+ tickers = tickers.is_a?(Array) ? tickers : tickers.gsub(',', ' ').split
81
+ _tickers_ = []
82
+ tickers.each do |ticker|
83
+ if Utils.is_isin(ticker)
84
+ isin = ticker
85
+ ticker = Utils.get_ticker_by_isin(ticker, proxy, session: session)
86
+ # @shared::_ISINS[ticker] = isin
87
+ end
88
+ _tickers_ << ticker
89
+ end
90
+ tickers = _tickers_
91
+
92
+ tickers = tickers.map(&:upcase).uniq
93
+
94
+ if threads
95
+ threads = [tickers.length, Multitasking.cpu_count * 2].min if threads == true
96
+ Multitasking.set_max_threads(threads)
97
+ tickers.each_with_index do |ticker, i|
98
+ _download_one_threaded(ticker, period: period, interval: interval,
99
+ start: start, fin: fin, prepost: prepost,
100
+ actions: actions, auto_adjust: auto_adjust,
101
+ back_adjust: back_adjust, repair: repair,
102
+ keepna: keepna, progress: (progress && i.positive?),
103
+ proxy: proxy, rounding: rounding, timeout: timeout)
104
+ end
105
+ sleep 0.01 until @shared::_DFS.length == tickers.length
106
+ else
107
+ tickers.each_with_index do |ticker, i|
108
+ data = _download_one(ticker, period: period, interval: interval,
109
+ start: start, fin: fin, prepost: prepost,
110
+ actions: actions, auto_adjust: auto_adjust,
111
+ back_adjust: back_adjust, repair: repair,
112
+ keepna: keepna, proxy: proxy,
113
+ rounding: rounding, timeout: timeout)
114
+ @shared::_PROGRESS_BAR.animate if progress
115
+ end
116
+ end
117
+
118
+ @shared::_PROGRESS_BAR.completed if progress
119
+
120
+ unless @shared::_ERRORS.empty?
121
+ logger.error("\n#{@shared::_ERRORS.length} Failed download#{@shared::_ERRORS.length > 1 ? 's' : ''}:")
122
+
123
+ errors = {}
124
+ @shared::_ERRORS.each do |ticker, err|
125
+ err = err.gsub(/%ticker%/, ticker)
126
+ errors[err] ||= []
127
+ errors[err] << ticker
128
+ end
129
+ errors.each do |err, tickers|
130
+ logger.error("#{tickers.join(', ')}: #{err}")
131
+ end
132
+
133
+ tbs = {}
134
+ @shared::_TRACEBACKS.each do |ticker, tb|
135
+ tb = tb.gsub(/%ticker%/, ticker)
136
+ tbs[tb] ||= []
137
+ tbs[tb] << ticker
138
+ end
139
+ tbs.each do |tb, tickers|
140
+ logger.debug("#{tickers.join(', ')}: #{tb}")
141
+ end
142
+ end
143
+
144
+ if ignore_tz
145
+ @shared::_DFS.each do |tkr, df|
146
+ next if df.nil? || df.empty?
147
+ @shared::_DFS[tkr].index = df.index.tz_localize(nil)
148
+ end
149
+ end
150
+
151
+ if tickers.length == 1
152
+ ticker = tickers.first
153
+ return @shared::_DFS[ticker]
154
+ end
155
+
156
+ begin
157
+ data = Polars::concat(@shared::_DFS.values, axis: 1, sort: true,
158
+ keys: @shared::_DFS.keys, names: ['Ticker', 'Price'])
159
+ rescue
160
+ _realign_dfs
161
+ data = Polars::concat(@shared::_DFS.values, axis: 1, sort: true,
162
+ keys: @shared::_DFS.keys, names: ['Ticker', 'Price'])
163
+ end
164
+ data.index = Polars.to_datetime(data.index)
165
+ data.rename(columns: @shared::_ISINS, inplace: true)
166
+
167
+ if group_by == 'column'
168
+ data.columns = data.columns.swaplevel(0, 1)
169
+ data.sort_index(level: 0, axis: 1, inplace: true)
170
+ end
171
+
172
+ data
173
+ end
174
+
175
+ def _realign_dfs
176
+ idx_len = 0
177
+ idx = nil
178
+
179
+ @shared::_DFS.values.each do |df|
180
+ if df.length > idx_len
181
+ idx_len = df.length
182
+ idx = df.index
183
+ end
184
+ end
185
+
186
+ @shared::_DFS.each do |key, df|
187
+ begin
188
+ @shared::_DFS[key] = Polars::DataFrame.new(index: idx, data: df).drop_duplicates
189
+ rescue
190
+ @shared::_DFS[key] = Polars.concat([Utils.empty_df(idx), df.dropna], axis: 0, sort: true)
191
+ end
192
+
193
+ @shared::_DFS[key] = @shared::_DFS[key].loc[!@shared::_DFS[key].index.duplicated(keep: 'last')]
194
+ end
195
+ end
196
+
197
+ Multitasking.task :_download_one_threaded do |ticker, start: nil, fin: nil,
198
+ auto_adjust: false, back_adjust: false,
199
+ repair: false, actions: false,
200
+ progress: true, period: "max",
201
+ interval: "1d", prepost: false,
202
+ proxy: nil, keepna: false,
203
+ rounding: false, timeout: 10|
204
+ _download_one(ticker, start, fin, auto_adjust, back_adjust, repair,
205
+ actions, period, interval, prepost, proxy, rounding,
206
+ keepna, timeout)
207
+ @shared::_PROGRESS_BAR.animate if progress
208
+ end
209
+
210
+ def _download_one(ticker, start: nil, fin: nil,
211
+ auto_adjust: false, back_adjust: false, repair: false,
212
+ actions: false, period: "max", interval: "1d",
213
+ prepost: false, proxy: nil, rounding: false,
214
+ keepna: false, timeout: 10)
215
+ data = nil
216
+ begin
217
+ data = Ticker.new(ticker).history(
218
+ period: period, interval: interval,
219
+ start: start, fin: fin, prepost: prepost,
220
+ actions: actions, auto_adjust: auto_adjust,
221
+ back_adjust: back_adjust, repair: repair, proxy: proxy,
222
+ rounding: rounding, keepna: keepna, timeout: timeout,
223
+ raise_errors: true
224
+ )
225
+ rescue Exception => e
226
+ @shared::_DFS[ticker.upcase] = Utils.empty_df
227
+ @shared::_ERRORS[ticker.upcase] = e.to_s
228
+ @shared::_TRACEBACKS[ticker.upcase] = e.backtrace.join("\n")
229
+ else
230
+ @shared::_DFS[ticker.upcase] = data
231
+ end
232
+
233
+ data
234
+ end
235
+ end
236
+ end
237
+
238
+