yf_as_dataframe 0.2.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,354 @@
1
+ require 'polars-df'
2
+
3
+ class YfAsDataframe
4
+ class Utils
5
+ BASE_URL = 'https://query1.finance.yahoo.com'
6
+
7
+ class << self
8
+ attr_accessor :logger
9
+ end
10
+
11
+ def self.get_all_by_isin(isin, proxy: nil, session: nil)
12
+ raise ArgumentError, 'Invalid ISIN number' unless is_isin(isin)
13
+
14
+ session ||= Net::HTTP
15
+ url = "#{BASE_URL}/v1/finance/search?q=#{isin}"
16
+ data = session.get(URI(url), 'User-Agent' => 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36', 'Accept' => 'application/json')
17
+ data = JSON.parse(data.body)
18
+ ticker = data['quotes'][0] || {}
19
+ {
20
+ 'ticker' => {
21
+ 'symbol' => ticker['symbol'],
22
+ 'shortname' => ticker['shortname'],
23
+ 'longname' => ticker['longname'],
24
+ 'type' => ticker['quoteType'],
25
+ 'exchange' => ticker['exchDisp']
26
+ },
27
+ 'news' => data['news'] || []
28
+ }
29
+ rescue StandardError
30
+ {}
31
+ end
32
+
33
+ def self.get_ticker_by_isin(isin, proxy: nil, session: nil)
34
+ data = get_all_by_isin(isin, proxy: proxy, session: session)
35
+ data.dig('ticker', 'symbol') || ''
36
+ end
37
+
38
+ def self.get_info_by_isin(isin, proxy: nil, session: nil)
39
+ data = get_all_by_isin(isin, proxy: proxy, session: session)
40
+ data['ticker'] || {}
41
+ end
42
+
43
+ def self.get_news_by_isin(isin, proxy: nil, session: nil)
44
+ data = get_all_by_isin(isin, proxy: proxy, session: session)
45
+ data['news'] || {}
46
+ end
47
+
48
+ def self.empty_df(index = nil)
49
+ # index ||= []
50
+ empty = Polars::DataFrame.new({
51
+ 'Timestamps' => DateTime.new(2000,1,1,0,0,0),
52
+ 'Open' => Float::NAN, 'High' => Float::NAN, 'Low' => Float::NAN,
53
+ 'Close' => Float::NAN, 'Adj Close' => Float::NAN, 'Volume' => Float::NAN
54
+ })
55
+ # empty = index.each_with_object({}) { |i, h| h[i] = empty }
56
+ # empty['Date'] = 'Date'
57
+ empty
58
+ end
59
+
60
+ def self.empty_earnings_dates_df
61
+ {
62
+ 'Symbol' => 'Symbol', 'Company' => 'Company', 'Earnings Date' => 'Earnings Date',
63
+ 'EPS Estimate' => 'EPS Estimate', 'Reported EPS' => 'Reported EPS', 'Surprise(%)' => 'Surprise(%)'
64
+ }
65
+ end
66
+
67
+ def self.build_template(data)
68
+ template_ttm_order = []
69
+ template_annual_order = []
70
+ template_order = []
71
+ level_detail = []
72
+
73
+ def traverse(node, level)
74
+ return if level > 5
75
+
76
+ template_ttm_order << "trailing#{node['key']}"
77
+ template_annual_order << "annual#{node['key']}"
78
+ template_order << node['key']
79
+ level_detail << level
80
+ return unless node['children']
81
+
82
+ node['children'].each { |child| traverse(child, level + 1) }
83
+ end
84
+
85
+ data['template'].each { |key| traverse(key, 0) }
86
+
87
+ [template_ttm_order, template_annual_order, template_order, level_detail]
88
+ end
89
+
90
+ def self.retrieve_financial_details(data)
91
+ ttm_dicts = []
92
+ annual_dicts = []
93
+
94
+ data['timeSeries'].each do |key, timeseries|
95
+ next unless timeseries
96
+
97
+ time_series_dict = { 'index' => key }
98
+ timeseries.each do |each|
99
+ next unless each
100
+
101
+ time_series_dict[each['asOfDate']] = each['reportedValue']
102
+ end
103
+ if key.include?('trailing')
104
+ ttm_dicts << time_series_dict
105
+ elsif key.include?('annual')
106
+ annual_dicts << time_series_dict
107
+ end
108
+ end
109
+
110
+ [ttm_dicts, annual_dicts]
111
+ end
112
+
113
+ def self.format_annual_financial_statement(level_detail, annual_dicts, annual_order, ttm_dicts = nil, ttm_order = nil)
114
+ annual = annual_dicts.each_with_object({}) { |d, h| h[d['index']] = d }
115
+ annual = annual_order.each_with_object({}) { |k, h| h[k] = annual[k] }
116
+ annual = annual.transform_keys { |k| k.gsub('annual', '') }
117
+
118
+ if ttm_dicts && ttm_order
119
+ ttm = ttm_dicts.each_with_object({}) { |d, h| h[d['index']] = d }
120
+ ttm = ttm_order.each_with_object({}) { |k, h| h[k] = ttm[k] }
121
+ ttm = ttm.transform_keys { |k| k.gsub('trailing', '') }
122
+ statement = annual.merge(ttm)
123
+ else
124
+ statement = annual
125
+ end
126
+
127
+ statement = statement.transform_keys { |k| camel2title(k) }
128
+ statement.transform_values { |v| v.transform_keys { |k| camel2title(k) } }
129
+ end
130
+
131
+ def self.format_quarterly_financial_statement(statement, level_detail, order)
132
+ statement = order.each_with_object({}) { |k, h| h[k] = statement[k] }
133
+ statement = statement.transform_keys { |k| camel2title(k) }
134
+ statement.transform_values { |v| v.transform_keys { |k| camel2title(k) } }
135
+ end
136
+
137
+ def self.camel2title(strings, sep: ' ', acronyms: nil)
138
+ raise TypeError, "camel2title() 'strings' argument must be iterable of strings" unless strings.is_a?(Enumerable)
139
+ raise TypeError, "camel2title() 'strings' argument must be iterable of strings" unless strings.all? { |s| s.is_a?(String) }
140
+ raise ValueError, "camel2title() 'sep' argument = '#{sep}' must be single character" unless sep.is_a?(String) && sep.length == 1
141
+ raise ValueError, "camel2title() 'sep' argument = '#{sep}' cannot be alpha-numeric" if sep.match?(/[a-zA-Z0-9]/)
142
+ raise ValueError, "camel2title() 'sep' argument = '#{sep}' cannot be special character" if sep != Regexp.escape(sep) && !%w[ -].include?(sep)
143
+
144
+ if acronyms.nil?
145
+ pat = /([a-z])([A-Z])/
146
+ rep = '\1' + sep + '\2'
147
+ strings.map { |s| s.gsub(pat, rep).capitalize }
148
+ else
149
+ raise TypeError, "camel2title() 'acronyms' argument must be iterable of strings" unless acronyms.is_a?(Enumerable)
150
+ raise TypeError, "camel2title() 'acronyms' argument must be iterable of strings" unless acronyms.all? { |a| a.is_a?(String) }
151
+ acronyms.each do |a|
152
+ raise ValueError, "camel2title() 'acronyms' argument must only contain upper-case, but '#{a}' detected" unless a.match?(/^[A-Z]+$/)
153
+ end
154
+
155
+ pat = /([a-z])([A-Z])/
156
+ rep = '\1' + sep + '\2'
157
+ strings = strings.map { |s| s.gsub(pat, rep) }
158
+
159
+ acronyms.each do |a|
160
+ pat = /(#{a})([A-Z][a-z])/
161
+ rep = '\1' + sep + '\2'
162
+ strings = strings.map { |s| s.gsub(pat, rep) }
163
+ end
164
+
165
+ strings.map do |s|
166
+ s.split(sep).map do |w|
167
+ if acronyms.include?(w)
168
+ w
169
+ else
170
+ w.capitalize
171
+ end
172
+ end.join(sep)
173
+ end
174
+ end
175
+ end
176
+
177
+ def self.snake_case_2_camelCase(s)
178
+ s.split('_').first + s.split('_')[1..].map(&:capitalize).join
179
+ end
180
+
181
+ # def self.parse_quotes(data)
182
+ # timestamps = data['timestamp']
183
+ # ohlc = data['indicators']['quote'][0]
184
+ # volumes = ohlc['volume']
185
+ # opens = ohlc['open']
186
+ # closes = ohlc['close']
187
+ # lows = ohlc['low']
188
+ # highs = ohlc['high']
189
+
190
+ # adjclose = closes
191
+ # adjclose = data['indicators']['adjclose'][0]['adjclose'] if data['indicators']['adjclose']
192
+
193
+ # quotes = {
194
+ # 'Open' => opens,
195
+ # 'High' => highs,
196
+ # 'Low' => lows,
197
+ # 'Close' => closes,
198
+ # 'Adj Close' => adjclose,
199
+ # 'Volume' => volumes
200
+ # }
201
+
202
+ # quotes.each { |k, v| quotes[k] = v.map { |x| x.nil? ? Float::NAN : x } }
203
+ # quotes['Date'] = timestamps.map { |x| Time.at(x).to_datetime }
204
+
205
+ # quotes
206
+ # end
207
+
208
+ # def self.auto_adjust(data)
209
+ # ratio = data['Adj Close'] / data['Close']
210
+ # data['Adj Open'] = data['Open'] * ratio
211
+ # data['Adj High'] = data['High'] * ratio
212
+ # data['Adj Low'] = data['Low'] * ratio
213
+
214
+ # data.delete('Open')
215
+ # data.delete('High')
216
+ # data.delete('Low')
217
+ # data.delete('Close')
218
+
219
+ # data['Open'] = data.delete('Adj Open')
220
+ # data['High'] = data.delete('Adj High')
221
+ # data['Low'] = data.delete('Adj Low')
222
+
223
+ # data
224
+ # end
225
+
226
+ # def self.back_adjust(data)
227
+ # ratio = data['Adj Close'] / data['Close']
228
+ # data['Adj Open'] = data['Open'] * ratio
229
+ # data['Adj High'] = data['High'] * ratio
230
+ # data['Adj Low'] = data['Low'] * ratio
231
+
232
+ # data.delete('Open')
233
+ # data.delete('High')
234
+ # data.delete('Low')
235
+ # data.delete('Adj Close')
236
+
237
+ # data['Open'] = data.delete('Adj Open')
238
+ # data['High'] = data.delete('Adj High')
239
+ # data['Low'] = data.delete('Adj Low')
240
+
241
+ # data
242
+ # end
243
+
244
+ def self.is_isin(string)
245
+ /^[A-Z]{2}[A-Z0-9]{9}[0-9]$/.match?(string)
246
+ end
247
+
248
+ def self.parse_user_dt(dt, exchange_tz)
249
+ if dt.is_a?(Integer)
250
+ Time.at(dt)
251
+ elsif dt.is_a?(String)
252
+ dt = DateTime.strptime(dt.to_s, '%Y-%m-%d')
253
+ elsif dt.is_a?(Date)
254
+ dt = dt.to_datetime
255
+ elsif dt.is_a?(DateTime) && dt.zone.nil?
256
+ dt = dt.in_time_zone(exchange_tz)
257
+ end
258
+ dt.to_i
259
+ end
260
+
261
+ def self.interval_to_timedelta(interval)
262
+ case interval
263
+ when '1mo'
264
+ 1.month
265
+ when '2mo'
266
+ 2.months
267
+ when '3mo'
268
+ 3.months
269
+ when '6mo'
270
+ 6.months
271
+ when '9mo'
272
+ 9.months
273
+ when '12mo'
274
+ 1.year
275
+ when '1y'
276
+ 1.year
277
+ when '2y'
278
+ 2.year
279
+ when '3y'
280
+ 3.year
281
+ when '4y'
282
+ 4.year
283
+ when '5y'
284
+ 5.year
285
+ when '1wk'
286
+ 1.week
287
+ when '2wk'
288
+ 2.week
289
+ when '3wk'
290
+ 3.week
291
+ when '4wk'
292
+ 4.week
293
+ else
294
+ Rails.logger.warn { "#{__FILE__}:#{__LINE__} #{interval} not a recognized interval" }
295
+ interval
296
+ end
297
+ end
298
+
299
+ # def _interval_to_timedelta(interval)
300
+ # if interval == "1mo"
301
+ # return ActiveSupport::Duration.new(months: 1)
302
+ # elsif interval == "3mo"
303
+ # return ActiveSupport::Duration.new(months: 3)
304
+ # elsif interval == "1y"
305
+ # return ActiveSupport::Duration.new(years: 1)
306
+ # elsif interval == "1wk"
307
+ # return 7.days
308
+ # else
309
+ # return ActiveSupport::Duration.parse(interval)
310
+ # end
311
+ # end
312
+ end
313
+ end
314
+
315
+ # module Yfin
316
+ # class << self
317
+ # attr_accessor :logger
318
+ # end
319
+
320
+ # self.logger = Logger.new(STDOUT)
321
+ # self.logger.level = Logger::WARN
322
+ # end
323
+
324
+ def attributes(obj)
325
+ disallowed_names = Set.new(obj.class.instance_methods(false).map(&:to_s))
326
+ obj.instance_variables.each_with_object({}) do |var, h|
327
+ name = var.to_s[1..]
328
+ next if name.start_with?('_') || disallowed_names.include?(name)
329
+
330
+ h[name] = obj.instance_variable_get(var)
331
+ end
332
+ end
333
+
334
+ def print_once(msg)
335
+ puts msg
336
+ end
337
+
338
+ def get_yf_logger
339
+ # Yfin.logger
340
+ Rails.logger
341
+ end
342
+
343
+ def setup_debug_formatting
344
+ logger = get_yf_logger
345
+
346
+ return unless logger.level == Logger::DEBUG
347
+
348
+ logger.formatter = MultiLineFormatter.new('%(levelname)-8s %(message)s')
349
+ end
350
+
351
+ def enable_debug_mode
352
+ Rails.logger.level = Logger::DEBUG
353
+ setup_debug_formatting
354
+ end
@@ -0,0 +1,3 @@
1
+ class YfAsDataframe
2
+ VERSION = "0.2.15"
3
+ end
@@ -0,0 +1,304 @@
1
+ # require 'requests'
2
+ # require 'requests_cache'
3
+ require 'thread'
4
+ require 'date'
5
+ require 'nokogiri'
6
+ require 'zache'
7
+ require 'httparty'
8
+
9
+ class YfAsDataframe
10
+ module YfConnection
11
+ extend ActiveSupport::Concern
12
+ # extend HTTParty
13
+
14
+ # """
15
+ # Have one place to retrieve data from Yahoo API in order to ease caching and speed up operations.
16
+ # """
17
+ @@user_agent_headers = {
18
+ 'User-Agent' => 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
19
+ # 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36'
20
+ }
21
+ @@proxy = nil
22
+
23
+ cattr_accessor :user_agent_headers, :proxy
24
+
25
+ def yfconn_initialize
26
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} here"}
27
+ begin
28
+ @@zache = ::Zache.new
29
+ @@session_is_caching = true
30
+ rescue NoMethodError
31
+ # Not caching
32
+ @@session_is_caching = false
33
+ end
34
+
35
+ @@crumb = nil
36
+ @@cookie = nil
37
+ @@cookie_strategy = 'basic'
38
+ @@cookie_lock = ::Mutex.new()
39
+ end
40
+
41
+
42
+ def get(url, headers=nil, params=nil)
43
+ # Important: treat input arguments as immutable.
44
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} url = #{url}, headers = #{headers}, params=#{params.inspect}" }
45
+
46
+ headers ||= {}
47
+ params ||= {}
48
+ params.merge!(crumb: @@crumb) unless @@crumb.nil?
49
+ cookie, crumb, strategy = _get_cookie_and_crumb()
50
+ crumbs = !crumb.nil? ? {'crumb' => crumb} : {}
51
+
52
+ request_args = {
53
+ url: url,
54
+ params: params.merge(crumbs),
55
+ headers: headers || {}
56
+ }
57
+
58
+ proxy = _get_proxy
59
+ ::HTTParty.http_proxy(addr = proxy.split(':').first, port = proxy.split(':').second.split('/').first) unless proxy.nil?
60
+
61
+ cookie_hash = ::HTTParty::CookieHash.new
62
+ cookie_hash.add_cookies(@@cookie)
63
+ options = { headers: headers.dup.merge(@@user_agent_headers).merge({ 'cookie' => cookie_hash.to_cookie_string, 'crumb' => crumb })} #, debug_output: STDOUT }
64
+
65
+ u = (request_args[:url]).dup.to_s
66
+ joiner = ('?'.in?(request_args[:url]) ? '&' : '?')
67
+ u += (joiner + CGI.unescape(request_args[:params].to_query)) unless request_args[:params].empty?
68
+
69
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} u=#{u}, options = #{options.inspect}" }
70
+ response = ::HTTParty.get(u, options)
71
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} response=#{response.inspect}" }
72
+
73
+ return response
74
+ end
75
+
76
+ alias_method :cache_get, :get
77
+
78
+
79
+ def get_raw_json(url, user_agent_headers=nil, params=nil)
80
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} url = #{url.inspect}" }
81
+ response = get(url, user_agent_headers, params)
82
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} response = #{response.inspect}" }
83
+ # response.raise_for_status()
84
+ return response #.json()
85
+ end
86
+
87
+
88
+
89
+
90
+
91
+
92
+
93
+ private
94
+
95
+
96
+ def _get_proxy
97
+ # setup proxy in requests format
98
+ proxy = nil
99
+ unless proxy.nil?
100
+ proxy = {"https" => @@proxy["https"]} if @@proxy.is_a?(Hash) && @@proxy.include?("https")
101
+ end
102
+
103
+ return proxy
104
+ end
105
+
106
+ def _set_cookie_strategy(strategy, have_lock=false)
107
+ return if strategy == @@cookie_strategy
108
+
109
+ if !have_lock
110
+ @@cookie_lock.synchronize do
111
+ @@cookie_strategy = strategy
112
+ @@cookie = nil
113
+ @@crumb = nil
114
+ end
115
+ end
116
+ end
117
+
118
+ def _get_cookie_and_crumb()
119
+ cookie, crumb, strategy = nil, nil, nil
120
+ # puts "cookie_mode = '#{@@cookie_strategy}'"
121
+
122
+ @@cookie_lock.synchronize do
123
+ if @@cookie_strategy == 'csrf'
124
+ crumb = _get_crumb_csrf()
125
+ if crumb.nil?
126
+ # Fail
127
+ _set_cookie_strategy('basic', have_lock=true)
128
+ cookie, crumb = __get_cookie_and_crumb_basic()
129
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} cookie = #{cookie}, crumb = #{crumb}" }
130
+ end
131
+ else
132
+ # Fallback strategy
133
+ cookie, crumb = __get_cookie_and_crumb_basic()
134
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} cookie = #{cookie}, crumb = #{crumb}" }
135
+ if cookie.nil? || crumb.nil?
136
+ # Fail
137
+ _set_cookie_strategy('csrf', have_lock=true)
138
+ crumb = _get_crumb_csrf()
139
+ end
140
+ end
141
+ strategy = @@cookie_strategy
142
+ end
143
+
144
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} cookie = #{cookie}, crumb = #{crumb}, strategy=#{strategy}" }
145
+ return cookie, crumb, strategy
146
+ end
147
+
148
+ def __get_cookie_and_crumb_basic()
149
+ cookie = _get_cookie_basic()
150
+ crumb = _get_crumb_basic()
151
+ return cookie, crumb
152
+ end
153
+
154
+ def _get_cookie_basic()
155
+ @@cookie ||= _load_cookie_basic()
156
+ return @@cookie unless @@cookie.nil? || @@cookie.length.zero?
157
+
158
+ headers = @@user_agent_headers.dup
159
+
160
+ response = HTTParty.get('https://fc.yahoo.com', headers) #.merge(debug_output: STDOUT))
161
+
162
+ cookie = response.headers['set-cookie']
163
+ cookies ||= ''
164
+ cookies += cookie.split(';').first
165
+ @@cookie = cookies;
166
+
167
+ _save_cookie_basic(@@cookie)
168
+
169
+ return @@cookie
170
+ end
171
+
172
+ def _get_crumb_basic()
173
+ return @@crumb unless @@crumb.nil?
174
+ return nil if (cookie = _get_cookie_basic()).nil?
175
+
176
+ cookie_hash = ::HTTParty::CookieHash.new
177
+ cookie_hash.add_cookies(cookie)
178
+ options = {headers: @@user_agent_headers.dup.merge(
179
+ { 'cookie' => cookie_hash.to_cookie_string }
180
+ )} #, debug_output: STDOUT }
181
+
182
+ crumb_response = ::HTTParty.get('https://query1.finance.yahoo.com/v1/test/getcrumb', options)
183
+ @@crumb = crumb_response.parsed_response
184
+
185
+ return (@@crumb.nil? || '<html>'.in?(@@crumb)) ? nil : @@crumb
186
+ end
187
+
188
+ def _get_cookie_csrf()
189
+ return true unless @@cookie.nil?
190
+ return (@@cookie = true) if _load_session_cookies()
191
+
192
+ base_args = {
193
+ headers: @@user_agent_headers,
194
+ # proxies: proxy,
195
+ }
196
+
197
+ get_args = base_args.merge({url: 'https://guce.yahoo.com/consent'})
198
+
199
+ get_args[:expire_after] = @expire_after if @session_is_caching
200
+ response = @session.get(**get_args)
201
+
202
+ soup = ::Nokogiri::HTML(response.content, 'html.parser')
203
+ csrfTokenInput = soup.find('input', attrs: {'name': 'csrfToken'})
204
+
205
+ # puts 'Failed to find "csrfToken" in response'
206
+ return false if csrfTokenInput.nil?
207
+
208
+ csrfToken = csrfTokenInput['value']
209
+ # puts "csrfToken = #{csrfToken}"
210
+ sessionIdInput = soup.find('input', attrs: {'name': 'sessionId'})
211
+ sessionId = sessionIdInput['value']
212
+ # puts "sessionId='#{sessionId}"
213
+
214
+ originalDoneUrl = 'https://finance.yahoo.com/'
215
+ namespace = 'yahoo'
216
+ data = {
217
+ 'agree': ['agree', 'agree'],
218
+ 'consentUUID': 'default',
219
+ 'sessionId': sessionId,
220
+ 'csrfToken': csrfToken,
221
+ 'originalDoneUrl': originalDoneUrl,
222
+ 'namespace': namespace,
223
+ }
224
+ post_args = base_args.merge(
225
+ {
226
+ url: "https://consent.yahoo.com/v2/collectConsent?sessionId=#{sessionId}",
227
+ data: data
228
+ }
229
+ )
230
+ get_args = base_args.merge(
231
+ {
232
+ url: "https://guce.yahoo.com/copyConsent?sessionId=#{sessionId}",
233
+ data: data
234
+ }
235
+ )
236
+ if @session_is_caching
237
+ post_args[:expire_after] = @expire_after
238
+ get_args[:expire_after] = @expire_after
239
+ end
240
+ @session.post(**post_args)
241
+ @session.get(**get_args)
242
+
243
+ @@cookie = true
244
+ _save_session_cookies()
245
+
246
+ return true
247
+ end
248
+
249
+ def _get_crumb_csrf()
250
+ # Credit goes to @bot-unit #1729
251
+
252
+ # puts 'reusing crumb'
253
+ return @@crumb unless @@crumb.nil?
254
+ # This cookie stored in session
255
+ return nil unless _get_cookie_csrf().present?
256
+
257
+ get_args = {
258
+ url: 'https://query2.finance.yahoo.com/v1/test/getcrumb',
259
+ headers: @@user_agent_headers
260
+ }
261
+
262
+ get_args[:expire_after] = @expire_after if @session_is_caching
263
+ r = @session.get(**get_args)
264
+
265
+ @@crumb = r.text
266
+
267
+ # puts "Didn't receive crumb"
268
+ return nil if @@crumb.nil? || '<html>'.in?(@@crumb) || @@crumb.length.zero?
269
+ return @@crumb
270
+ end
271
+
272
+
273
+
274
+
275
+
276
+ def _save_session_cookies()
277
+ begin
278
+ @@zache.put(:csrf, @session.cookies, lifetime: 60 * 60 * 24)
279
+ rescue Exception
280
+ return false
281
+ end
282
+ return true
283
+ end
284
+
285
+ def _load_session_cookies()
286
+ return false if @@zache.expired?(:csrf)
287
+ @session.cookies = @@zache.get(:csrf)
288
+ end
289
+
290
+ def _save_cookie_basic(cookie)
291
+ begin
292
+ @@zache.put(:basic, cookie, lifetime: 60*60*24)
293
+ rescue Exception
294
+ return false
295
+ end
296
+ return true
297
+ end
298
+
299
+ def _load_cookie_basic()
300
+ @@zache.put(:basic, nil, lifetime: 1) unless @@zache.exists?(:basic, dirty: false)
301
+ return @@zache.expired?(:basic) ? nil : @@zache.get(:basic)
302
+ end
303
+ end
304
+ end
@@ -0,0 +1,15 @@
1
+ class YfAsDataframe
2
+ class YfinanceException < StandardError
3
+ attr_reader :msg
4
+ end
5
+
6
+ class YfinDataException < YfinanceException
7
+ end
8
+
9
+ class YFNotImplementedError < NotImplementedError
10
+ def initialize(str)
11
+ @msg = "Have not implemented fetching \"#{str}\" from Yahoo API"
12
+ Rails.logger.warn { @msg }
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,24 @@
1
+ class YfAsDataframe
2
+ end
3
+
4
+ # frozen_string_literal: true
5
+
6
+ require_relative 'yf_as_dataframe/version'
7
+ require_relative 'yf_as_dataframe/utils'
8
+ require_relative 'yf_as_dataframe/yfinance_exception'
9
+ require_relative 'yf_as_dataframe/yf_connection'
10
+ require_relative 'yf_as_dataframe/price_technical'
11
+ require_relative 'yf_as_dataframe/price_history'
12
+ require_relative 'yf_as_dataframe/quote'
13
+ require_relative 'yf_as_dataframe/analysis'
14
+ require_relative 'yf_as_dataframe/fundamentals'
15
+ require_relative 'yf_as_dataframe/financials'
16
+ require_relative 'yf_as_dataframe/holders'
17
+ require_relative 'yf_as_dataframe/ticker'
18
+ require_relative "yf_as_dataframe/version"
19
+
20
+ class YfAsDataframe
21
+
22
+ extend YfAsDataframe::PriceTechnical
23
+ end
24
+