yfinrb 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,359 @@
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # yfinance - market data downloader
4
+ # https://github.com/ranaroussi/yfinance
5
+ #
6
+ # Copyright 2017-2019 Ran Aroussi
7
+ #
8
+ # Licensed under the Apache License, Version 2.0 (the "License");
9
+ # you may not use this file except in compliance with the License.
10
+ # You may obtain a copy of the License at
11
+ #
12
+ # http://www.apache.org/licenses/LICENSE-2.0
13
+ #
14
+ # Unless required by applicable law or agreed to in writing, software
15
+ # distributed under the License is distributed on an "AS IS" BASIS,
16
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ # See the License for the specific language governing permissions and
18
+ # limitations under the License.
19
+ #
20
+
21
+ #!/usr/bin/env ruby
22
+ # frozen_string_literal: true
23
+
24
+ require 'date'
25
+ require 'logger'
26
+ require 'net/http'
27
+ require 'json'
28
+ require 'time'
29
+ require 'uri'
30
+
31
+ class Yfin
32
+ class Utils
33
+ BASE_URL = 'https://query1.finance.yahoo.com'
34
+
35
+ class << self
36
+ attr_accessor :logger
37
+ end
38
+
39
+ def self.get_all_by_isin(isin, proxy: nil, session: nil)
40
+ raise ArgumentError, 'Invalid ISIN number' unless is_isin(isin)
41
+
42
+ session ||= Net::HTTP
43
+ url = "#{BASE_URL}/v1/finance/search?q=#{isin}"
44
+ data = session.get(URI(url), 'User-Agent' => 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36', 'Accept' => 'application/json')
45
+ data = JSON.parse(data.body)
46
+ ticker = data['quotes'][0] || {}
47
+ {
48
+ 'ticker' => {
49
+ 'symbol' => ticker['symbol'],
50
+ 'shortname' => ticker['shortname'],
51
+ 'longname' => ticker['longname'],
52
+ 'type' => ticker['quoteType'],
53
+ 'exchange' => ticker['exchDisp']
54
+ },
55
+ 'news' => data['news'] || []
56
+ }
57
+ rescue StandardError
58
+ {}
59
+ end
60
+
61
+ def self.get_ticker_by_isin(isin, proxy: nil, session: nil)
62
+ data = get_all_by_isin(isin, proxy: proxy, session: session)
63
+ data.dig('ticker', 'symbol') || ''
64
+ end
65
+
66
+ def self.get_info_by_isin(isin, proxy: nil, session: nil)
67
+ data = get_all_by_isin(isin, proxy: proxy, session: session)
68
+ data['ticker'] || {}
69
+ end
70
+
71
+ def self.get_news_by_isin(isin, proxy: nil, session: nil)
72
+ data = get_all_by_isin(isin, proxy: proxy, session: session)
73
+ data['news'] || {}
74
+ end
75
+
76
+ def self.empty_df(index = nil)
77
+ # index ||= []
78
+ empty = Polars::DataFrame.new({
79
+ 'Timestamps' => DateTime.new(2000,1,1,0,0,0),
80
+ 'Open' => Float::NAN, 'High' => Float::NAN, 'Low' => Float::NAN,
81
+ 'Close' => Float::NAN, 'Adj Close' => Float::NAN, 'Volume' => Float::NAN
82
+ })
83
+ # empty = index.each_with_object({}) { |i, h| h[i] = empty }
84
+ # empty['Date'] = 'Date'
85
+ empty
86
+ end
87
+
88
+ def self.empty_earnings_dates_df
89
+ {
90
+ 'Symbol' => 'Symbol', 'Company' => 'Company', 'Earnings Date' => 'Earnings Date',
91
+ 'EPS Estimate' => 'EPS Estimate', 'Reported EPS' => 'Reported EPS', 'Surprise(%)' => 'Surprise(%)'
92
+ }
93
+ end
94
+
95
+ def self.build_template(data)
96
+ template_ttm_order = []
97
+ template_annual_order = []
98
+ template_order = []
99
+ level_detail = []
100
+
101
+ def traverse(node, level)
102
+ return if level > 5
103
+
104
+ template_ttm_order << "trailing#{node['key']}"
105
+ template_annual_order << "annual#{node['key']}"
106
+ template_order << node['key']
107
+ level_detail << level
108
+ return unless node['children']
109
+
110
+ node['children'].each { |child| traverse(child, level + 1) }
111
+ end
112
+
113
+ data['template'].each { |key| traverse(key, 0) }
114
+
115
+ [template_ttm_order, template_annual_order, template_order, level_detail]
116
+ end
117
+
118
+ def self.retrieve_financial_details(data)
119
+ ttm_dicts = []
120
+ annual_dicts = []
121
+
122
+ data['timeSeries'].each do |key, timeseries|
123
+ next unless timeseries
124
+
125
+ time_series_dict = { 'index' => key }
126
+ timeseries.each do |each|
127
+ next unless each
128
+
129
+ time_series_dict[each['asOfDate']] = each['reportedValue']
130
+ end
131
+ if key.include?('trailing')
132
+ ttm_dicts << time_series_dict
133
+ elsif key.include?('annual')
134
+ annual_dicts << time_series_dict
135
+ end
136
+ end
137
+
138
+ [ttm_dicts, annual_dicts]
139
+ end
140
+
141
+ def self.format_annual_financial_statement(level_detail, annual_dicts, annual_order, ttm_dicts = nil, ttm_order = nil)
142
+ annual = annual_dicts.each_with_object({}) { |d, h| h[d['index']] = d }
143
+ annual = annual_order.each_with_object({}) { |k, h| h[k] = annual[k] }
144
+ annual = annual.transform_keys { |k| k.gsub('annual', '') }
145
+
146
+ if ttm_dicts && ttm_order
147
+ ttm = ttm_dicts.each_with_object({}) { |d, h| h[d['index']] = d }
148
+ ttm = ttm_order.each_with_object({}) { |k, h| h[k] = ttm[k] }
149
+ ttm = ttm.transform_keys { |k| k.gsub('trailing', '') }
150
+ statement = annual.merge(ttm)
151
+ else
152
+ statement = annual
153
+ end
154
+
155
+ statement = statement.transform_keys { |k| camel2title(k) }
156
+ statement.transform_values { |v| v.transform_keys { |k| camel2title(k) } }
157
+ end
158
+
159
+ def self.format_quarterly_financial_statement(statement, level_detail, order)
160
+ statement = order.each_with_object({}) { |k, h| h[k] = statement[k] }
161
+ statement = statement.transform_keys { |k| camel2title(k) }
162
+ statement.transform_values { |v| v.transform_keys { |k| camel2title(k) } }
163
+ end
164
+
165
+ def self.camel2title(strings, sep: ' ', acronyms: nil)
166
+ raise TypeError, "camel2title() 'strings' argument must be iterable of strings" unless strings.is_a?(Enumerable)
167
+ raise TypeError, "camel2title() 'strings' argument must be iterable of strings" unless strings.all? { |s| s.is_a?(String) }
168
+ raise ValueError, "camel2title() 'sep' argument = '#{sep}' must be single character" unless sep.is_a?(String) && sep.length == 1
169
+ raise ValueError, "camel2title() 'sep' argument = '#{sep}' cannot be alpha-numeric" if sep.match?(/[a-zA-Z0-9]/)
170
+ raise ValueError, "camel2title() 'sep' argument = '#{sep}' cannot be special character" if sep != Regexp.escape(sep) && !%w[ -].include?(sep)
171
+
172
+ if acronyms.nil?
173
+ pat = /([a-z])([A-Z])/
174
+ rep = '\1' + sep + '\2'
175
+ strings.map { |s| s.gsub(pat, rep).capitalize }
176
+ else
177
+ raise TypeError, "camel2title() 'acronyms' argument must be iterable of strings" unless acronyms.is_a?(Enumerable)
178
+ raise TypeError, "camel2title() 'acronyms' argument must be iterable of strings" unless acronyms.all? { |a| a.is_a?(String) }
179
+ acronyms.each do |a|
180
+ raise ValueError, "camel2title() 'acronyms' argument must only contain upper-case, but '#{a}' detected" unless a.match?(/^[A-Z]+$/)
181
+ end
182
+
183
+ pat = /([a-z])([A-Z])/
184
+ rep = '\1' + sep + '\2'
185
+ strings = strings.map { |s| s.gsub(pat, rep) }
186
+
187
+ acronyms.each do |a|
188
+ pat = /(#{a})([A-Z][a-z])/
189
+ rep = '\1' + sep + '\2'
190
+ strings = strings.map { |s| s.gsub(pat, rep) }
191
+ end
192
+
193
+ strings.map do |s|
194
+ s.split(sep).map do |w|
195
+ if acronyms.include?(w)
196
+ w
197
+ else
198
+ w.capitalize
199
+ end
200
+ end.join(sep)
201
+ end
202
+ end
203
+ end
204
+
205
+ def self.snake_case_2_camelCase(s)
206
+ s.split('_').first + s.split('_')[1..].map(&:capitalize).join
207
+ end
208
+
209
+ # def self.parse_quotes(data)
210
+ # timestamps = data['timestamp']
211
+ # ohlc = data['indicators']['quote'][0]
212
+ # volumes = ohlc['volume']
213
+ # opens = ohlc['open']
214
+ # closes = ohlc['close']
215
+ # lows = ohlc['low']
216
+ # highs = ohlc['high']
217
+
218
+ # adjclose = closes
219
+ # adjclose = data['indicators']['adjclose'][0]['adjclose'] if data['indicators']['adjclose']
220
+
221
+ # quotes = {
222
+ # 'Open' => opens,
223
+ # 'High' => highs,
224
+ # 'Low' => lows,
225
+ # 'Close' => closes,
226
+ # 'Adj Close' => adjclose,
227
+ # 'Volume' => volumes
228
+ # }
229
+
230
+ # quotes.each { |k, v| quotes[k] = v.map { |x| x.nil? ? Float::NAN : x } }
231
+ # quotes['Date'] = timestamps.map { |x| Time.at(x).to_datetime }
232
+
233
+ # quotes
234
+ # end
235
+
236
+ # def self.auto_adjust(data)
237
+ # ratio = data['Adj Close'] / data['Close']
238
+ # data['Adj Open'] = data['Open'] * ratio
239
+ # data['Adj High'] = data['High'] * ratio
240
+ # data['Adj Low'] = data['Low'] * ratio
241
+
242
+ # data.delete('Open')
243
+ # data.delete('High')
244
+ # data.delete('Low')
245
+ # data.delete('Close')
246
+
247
+ # data['Open'] = data.delete('Adj Open')
248
+ # data['High'] = data.delete('Adj High')
249
+ # data['Low'] = data.delete('Adj Low')
250
+
251
+ # data
252
+ # end
253
+
254
+ # def self.back_adjust(data)
255
+ # ratio = data['Adj Close'] / data['Close']
256
+ # data['Adj Open'] = data['Open'] * ratio
257
+ # data['Adj High'] = data['High'] * ratio
258
+ # data['Adj Low'] = data['Low'] * ratio
259
+
260
+ # data.delete('Open')
261
+ # data.delete('High')
262
+ # data.delete('Low')
263
+ # data.delete('Adj Close')
264
+
265
+ # data['Open'] = data.delete('Adj Open')
266
+ # data['High'] = data.delete('Adj High')
267
+ # data['Low'] = data.delete('Adj Low')
268
+
269
+ # data
270
+ # end
271
+
272
+ def self.is_isin(string)
273
+ /^[A-Z]{2}[A-Z0-9]{9}[0-9]$/.match?(string)
274
+ end
275
+
276
+ def self.parse_user_dt(dt, exchange_tz)
277
+ if dt.is_a?(Integer)
278
+ Time.at(dt)
279
+ elsif dt.is_a?(String)
280
+ dt = DateTime.strptime(dt.to_s, '%Y-%m-%d')
281
+ elsif dt.is_a?(Date)
282
+ dt = dt.to_datetime
283
+ elsif dt.is_a?(DateTime) && dt.zone.nil?
284
+ dt = dt.in_time_zone(exchange_tz)
285
+ end
286
+ dt.to_i
287
+ end
288
+
289
+ def self.interval_to_timedelta(interval)
290
+ case interval
291
+ when '1mo'
292
+ 1.month
293
+ when '3mo'
294
+ 3.months
295
+ when '1y'
296
+ 1.year
297
+ when '1wk'
298
+ 1.week
299
+ else
300
+ interval
301
+ end
302
+ end
303
+
304
+ # def _interval_to_timedelta(interval)
305
+ # if interval == "1mo"
306
+ # return ActiveSupport::Duration.new(months: 1)
307
+ # elsif interval == "3mo"
308
+ # return ActiveSupport::Duration.new(months: 3)
309
+ # elsif interval == "1y"
310
+ # return ActiveSupport::Duration.new(years: 1)
311
+ # elsif interval == "1wk"
312
+ # return 7.days
313
+ # else
314
+ # return ActiveSupport::Duration.parse(interval)
315
+ # end
316
+ # end
317
+ end
318
+ end
319
+
320
+ # module Yfin
321
+ # class << self
322
+ # attr_accessor :logger
323
+ # end
324
+
325
+ # self.logger = Logger.new(STDOUT)
326
+ # self.logger.level = Logger::WARN
327
+ # end
328
+
329
+ def attributes(obj)
330
+ disallowed_names = Set.new(obj.class.instance_methods(false).map(&:to_s))
331
+ obj.instance_variables.each_with_object({}) do |var, h|
332
+ name = var.to_s[1..]
333
+ next if name.start_with?('_') || disallowed_names.include?(name)
334
+
335
+ h[name] = obj.instance_variable_get(var)
336
+ end
337
+ end
338
+
339
+ def print_once(msg)
340
+ puts msg
341
+ end
342
+
343
+ def get_yf_logger
344
+ # Yfin.logger
345
+ Rails.logger
346
+ end
347
+
348
+ def setup_debug_formatting
349
+ logger = get_yf_logger
350
+
351
+ return unless logger.level == Logger::DEBUG
352
+
353
+ logger.formatter = MultiLineFormatter.new('%(levelname)-8s %(message)s')
354
+ end
355
+
356
+ def enable_debug_mode
357
+ Rails.logger.level = Logger::DEBUG
358
+ setup_debug_formatting
359
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Yfinrb
4
+ VERSION = "0.1.0"
5
+ end
@@ -0,0 +1,300 @@
1
+ # require 'requests'
2
+ # require 'requests_cache'
3
+ require 'thread'
4
+ require 'date'
5
+ require 'nokogiri'
6
+
7
+ class Yfin
8
+ module YfConnection
9
+ extend ActiveSupport::Concern
10
+ # extend HTTParty
11
+
12
+ # """
13
+ # Have one place to retrieve data from Yahoo API in order to ease caching and speed up operations.
14
+ # """
15
+ @@user_agent_headers = {
16
+ 'User-Agent' => 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
17
+ # 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36'
18
+ }
19
+ @@proxy = nil
20
+
21
+ cattr_accessor :user_agent_headers, :proxy
22
+
23
+ def yfconn_initialize
24
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} here"}
25
+ begin
26
+ @@zache = Zache.new
27
+ @@session_is_caching = true
28
+ rescue NoMethodError
29
+ # Not caching
30
+ @@session_is_caching = false
31
+ end
32
+
33
+ @@crumb = nil
34
+ @@cookie = nil
35
+ @@cookie_strategy = 'basic'
36
+ @@cookie_lock = Mutex.new()
37
+ end
38
+
39
+
40
+ def get(url, headers=nil, params=nil)
41
+ # Important: treat input arguments as immutable.
42
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} url = #{url}, headers = #{headers}, params=#{params.inspect}" }
43
+
44
+ headers ||= {}
45
+ params ||= {}
46
+ params.merge!(crumb: @@crumb) unless @@crumb.nil?
47
+ cookie, crumb, strategy = _get_cookie_and_crumb()
48
+ crumbs = !crumb.nil? ? {'crumb' => crumb} : {}
49
+
50
+ request_args = {
51
+ url: url,
52
+ params: params.merge(crumbs),
53
+ headers: headers || {}
54
+ }
55
+
56
+ proxy = _get_proxy
57
+ HTTParty.http_proxy(addr = proxy.split(':').first, port = proxy.split(':').second.split('/').first) unless proxy.nil?
58
+
59
+ cookie_hash = HTTParty::CookieHash.new
60
+ cookie_hash.add_cookies(@@cookie)
61
+ options = { headers: headers.dup.merge(@@user_agent_headers).merge({ 'cookie' => cookie_hash.to_cookie_string, 'crumb' => crumb })} #, debug_output: STDOUT }
62
+
63
+ u = (request_args[:url]).dup.to_s
64
+ joiner = ('?'.in?(request_args[:url]) ? '&' : '?')
65
+ u += (joiner + CGI.unescape(request_args[:params].to_query)) unless request_args[:params].empty?
66
+
67
+ response = HTTParty.get(u, options)
68
+
69
+ return response
70
+ end
71
+
72
+ alias_method :cache_get, :get
73
+
74
+
75
+ def get_raw_json(url, user_agent_headers=nil, params=nil)
76
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} url = #{url.inspect}" }
77
+ response = get(url, user_agent_headers, params)
78
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} response = #{response.inspect}" }
79
+ # response.raise_for_status()
80
+ return response #.json()
81
+ end
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+ private
90
+
91
+
92
+ def _get_proxy
93
+ # setup proxy in requests format
94
+ proxy = nil
95
+ unless proxy.nil?
96
+ proxy = {"https" => @@proxy["https"]} if @@proxy.is_a?(Hash) && @@proxy.include?("https")
97
+ end
98
+
99
+ return proxy
100
+ end
101
+
102
+ def _set_cookie_strategy(strategy, have_lock=false)
103
+ return if strategy == @@cookie_strategy
104
+
105
+ if !have_lock
106
+ @@cookie_lock.synchronize do
107
+ @@cookie_strategy = strategy
108
+ @@cookie = nil
109
+ @@crumb = nil
110
+ end
111
+ end
112
+ end
113
+
114
+ def _get_cookie_and_crumb()
115
+ cookie, crumb, strategy = nil, nil, nil
116
+ # puts "cookie_mode = '#{@@cookie_strategy}'"
117
+
118
+ @@cookie_lock.synchronize do
119
+ if @@cookie_strategy == 'csrf'
120
+ crumb = _get_crumb_csrf()
121
+ if crumb.nil?
122
+ # Fail
123
+ _set_cookie_strategy('basic', have_lock=true)
124
+ cookie, crumb = __get_cookie_and_crumb_basic()
125
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} cookie = #{cookie}, crumb = #{crumb}" }
126
+ end
127
+ else
128
+ # Fallback strategy
129
+ cookie, crumb = __get_cookie_and_crumb_basic()
130
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} cookie = #{cookie}, crumb = #{crumb}" }
131
+ if cookie.nil? || crumb.nil?
132
+ # Fail
133
+ _set_cookie_strategy('csrf', have_lock=true)
134
+ crumb = _get_crumb_csrf()
135
+ end
136
+ end
137
+ strategy = @@cookie_strategy
138
+ end
139
+
140
+ # Rails.logger.info { "#{__FILE__}:#{__LINE__} cookie = #{cookie}, crumb = #{crumb}, strategy=#{strategy}" }
141
+ return cookie, crumb, strategy
142
+ end
143
+
144
+ def __get_cookie_and_crumb_basic()
145
+ cookie = _get_cookie_basic()
146
+ crumb = _get_crumb_basic()
147
+ return cookie, crumb
148
+ end
149
+
150
+ def _get_cookie_basic()
151
+ @@cookie ||= _load_cookie_basic()
152
+ return @@cookie unless @@cookie.nil? || @@cookie.length.zero?
153
+
154
+ headers = @@user_agent_headers.dup
155
+
156
+ response = HTTParty.get('https://fc.yahoo.com', headers) #.merge(debug_output: STDOUT))
157
+
158
+ cookie = response.headers['set-cookie']
159
+ cookies ||= ''
160
+ cookies += cookie.split(';').first
161
+ @@cookie = cookies;
162
+
163
+ _save_cookie_basic(@@cookie)
164
+
165
+ return @@cookie
166
+ end
167
+
168
+ def _get_crumb_basic()
169
+ return @@crumb unless @@crumb.nil?
170
+ return nil if (cookie = _get_cookie_basic()).nil?
171
+
172
+ cookie_hash = HTTParty::CookieHash.new
173
+ cookie_hash.add_cookies(cookie)
174
+ options = {headers: @@user_agent_headers.dup.merge(
175
+ { 'cookie' => cookie_hash.to_cookie_string }
176
+ )} #, debug_output: STDOUT }
177
+
178
+ crumb_response = HTTParty.get('https://query1.finance.yahoo.com/v1/test/getcrumb', options)
179
+ @@crumb = crumb_response.parsed_response
180
+
181
+ return (@@crumb.nil? || '<html>'.in?(@@crumb)) ? nil : @@crumb
182
+ end
183
+
184
+ def _get_cookie_csrf()
185
+ return true unless @@cookie.nil?
186
+ return (@@cookie = true) if _load_session_cookies()
187
+
188
+ base_args = {
189
+ headers: @@user_agent_headers,
190
+ # proxies: proxy,
191
+ }
192
+
193
+ get_args = base_args.merge({url: 'https://guce.yahoo.com/consent'})
194
+
195
+ get_args[:expire_after] = @expire_after if @session_is_caching
196
+ response = @session.get(**get_args)
197
+
198
+ soup = Nokogiri::HTML(response.content, 'html.parser')
199
+ csrfTokenInput = soup.find('input', attrs: {'name': 'csrfToken'})
200
+
201
+ # puts 'Failed to find "csrfToken" in response'
202
+ return false if csrfTokenInput.nil?
203
+
204
+ csrfToken = csrfTokenInput['value']
205
+ puts "csrfToken = #{csrfToken}"
206
+ sessionIdInput = soup.find('input', attrs: {'name': 'sessionId'})
207
+ sessionId = sessionIdInput['value']
208
+ puts "sessionId='#{sessionId}"
209
+
210
+ originalDoneUrl = 'https://finance.yahoo.com/'
211
+ namespace = 'yahoo'
212
+ data = {
213
+ 'agree': ['agree', 'agree'],
214
+ 'consentUUID': 'default',
215
+ 'sessionId': sessionId,
216
+ 'csrfToken': csrfToken,
217
+ 'originalDoneUrl': originalDoneUrl,
218
+ 'namespace': namespace,
219
+ }
220
+ post_args = base_args.merge(
221
+ {
222
+ url: "https://consent.yahoo.com/v2/collectConsent?sessionId=#{sessionId}",
223
+ data: data
224
+ }
225
+ )
226
+ get_args = base_args.merge(
227
+ {
228
+ url: "https://guce.yahoo.com/copyConsent?sessionId=#{sessionId}",
229
+ data: data
230
+ }
231
+ )
232
+ if @session_is_caching
233
+ post_args[:expire_after] = @expire_after
234
+ get_args[:expire_after] = @expire_after
235
+ end
236
+ @session.post(**post_args)
237
+ @session.get(**get_args)
238
+
239
+ @@cookie = true
240
+ _save_session_cookies()
241
+
242
+ return true
243
+ end
244
+
245
+ def _get_crumb_csrf()
246
+ # Credit goes to @bot-unit #1729
247
+
248
+ # puts 'reusing crumb'
249
+ return @@crumb unless @@crumb.nil?
250
+ # This cookie stored in session
251
+ return nil unless _get_cookie_csrf().present?
252
+
253
+ get_args = {
254
+ url: 'https://query2.finance.yahoo.com/v1/test/getcrumb',
255
+ headers: @@user_agent_headers
256
+ }
257
+
258
+ get_args[:expire_after] = @expire_after if @session_is_caching
259
+ r = @session.get(**get_args)
260
+
261
+ @@crumb = r.text
262
+
263
+ # puts "Didn't receive crumb"
264
+ return nil if @@crumb.nil? || '<html>'.in?(@@crumb) || @@crumb.length.zero?
265
+ return @@crumb
266
+ end
267
+
268
+
269
+
270
+
271
+
272
+ def _save_session_cookies()
273
+ begin
274
+ @@zache.put(:csrf, @session.cookies, lifetime: 60 * 60 * 24)
275
+ rescue Exception
276
+ return false
277
+ end
278
+ return true
279
+ end
280
+
281
+ def _load_session_cookies()
282
+ return false if @@zache.expired?(:csrf)
283
+ @session.cookies = @@zache.get(:csrf)
284
+ end
285
+
286
+ def _save_cookie_basic(cookie)
287
+ begin
288
+ @@zache.put(:basic, cookie, lifetime: 60*60*24)
289
+ rescue Exception
290
+ return false
291
+ end
292
+ return true
293
+ end
294
+
295
+ def _load_cookie_basic()
296
+ @@zache.put(:basic, nil, lifetime: 1) unless @@zache.exists?(:basic, dirty: false)
297
+ return @@zache.expired?(:basic) ? nil : @@zache.get(:basic)
298
+ end
299
+ end
300
+ end
@@ -0,0 +1,16 @@
1
+ class Yfin
2
+
3
+ class YfinanceException < StandardError
4
+ attr_reader :msg
5
+ end
6
+
7
+ class YfinDataException < YfinanceException
8
+ end
9
+
10
+ class YFNotImplementedError < NotImplementedError
11
+ def initialize(str)
12
+ @msg = "Have not implemented fetching \"#{str}\" from Yahoo API"
13
+ Rails.logger.warn { @msg }
14
+ end
15
+ end
16
+ end
data/lib/yfinrb.rb ADDED
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'yfin/version'
4
+ require_relative 'yfin/yfinance_exception'
5
+ require_relative 'yfin/ticker'
6
+ require_relative 'yfin/price_history'
7
+ require_relative 'yfin/quote'
8
+ require_relative 'yfin/analysis'
9
+ require_relative 'yfin/fundamentals'
10
+ require_relative 'yfin/financials'
11
+ require_relative 'yfin/holders'
12
+ require_relative "yfinrb/version"
13
+
14
+ module Yfinrb
15
+ class Error < StandardError; end
16
+ # Your code goes here...
17
+ end
data/sig/yfinrb.rbs ADDED
@@ -0,0 +1,4 @@
1
+ module Yfinrb
2
+ VERSION: String
3
+ # See the writing guide of rbs: https://github.com/ruby/rbs#guides
4
+ end