sqa 0.0.11 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/checksums/sqa-0.0.12.gem.sha512 +1 -0
- data/lib/sqa/cli.rb +1 -0
- data/lib/sqa/config.rb +12 -10
- data/lib/sqa/data_frame/alpha_vantage.rb +155 -0
- data/lib/sqa/data_frame/yahoo_finance.rb +3 -0
- data/lib/sqa/data_frame.rb +10 -15
- data/lib/sqa/indicator/predict_next_value.rb +153 -39
- data/lib/sqa/stock.rb +25 -11
- data/lib/sqa/ticker.rb +68 -0
- data/lib/sqa/version.rb +1 -1
- data/lib/sqa.rb +10 -16
- metadata +6 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8dd371b757bcc5da8e913681a06666715cbe88ca11f582df64d68ef305391d28
|
4
|
+
data.tar.gz: 8b1851af5e7875266acbd3fb35a7ba108fdb267d7490eb19984819bba5f15538
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ab803635e2f85e71bebacab6be10cc345f6050a890c730a658741d4a3ae4530490be33fc692a3ced0e843e473cc5613644dc5a0c0d0bfd3a43d5c0d2b4d41510
|
7
|
+
data.tar.gz: 7ee788d760d4020efb41baa3f5bc894cc4824d0308144b68f8c352bed03a01ce2049547bace95636264fab1c7b58ab12f97c99209433e2b32719db575017d25c
|
@@ -0,0 +1 @@
|
|
1
|
+
2d4a5156118549d604963b8f4dd0b311459f5ad6a1747f2c4925d45f12fe94a863eb225d3c593f82face964773ccbf59478612a014ee9fc32814baf627f730c7
|
data/lib/sqa/cli.rb
CHANGED
data/lib/sqa/config.rb
CHANGED
@@ -86,17 +86,23 @@ module SQA
|
|
86
86
|
# TODO: arrange order in mostly often used
|
87
87
|
|
88
88
|
if ".json" == type
|
89
|
-
form_json
|
89
|
+
incoming = form_json
|
90
90
|
|
91
91
|
elsif %w[.yml .yaml].include?(type)
|
92
|
-
from_yaml
|
92
|
+
incoming = from_yaml
|
93
93
|
|
94
94
|
elsif ".toml" == type
|
95
|
-
from_toml
|
95
|
+
incoming = from_toml
|
96
96
|
|
97
97
|
else
|
98
98
|
raise BadParameterError, "Invalid Config File: #{config_file}"
|
99
99
|
end
|
100
|
+
|
101
|
+
if incoming.has_key? :data_dir
|
102
|
+
incoming[:data_dir] = incoming[:data_dir].gsub(/^~/, Nenv.home)
|
103
|
+
end
|
104
|
+
|
105
|
+
merge! incoming
|
100
106
|
end
|
101
107
|
|
102
108
|
def dump_file
|
@@ -142,19 +148,15 @@ module SQA
|
|
142
148
|
## override values from a config file
|
143
149
|
|
144
150
|
def from_json
|
145
|
-
|
146
|
-
debug_me{[ :incoming ]}
|
151
|
+
::JSON.load(File.open(config_file).read).symbolize_keys
|
147
152
|
end
|
148
153
|
|
149
154
|
def from_toml
|
150
|
-
|
151
|
-
debug_me{[ :incoming ]}
|
155
|
+
TomlRB.load_file(config_file).symbolize_keys
|
152
156
|
end
|
153
157
|
|
154
158
|
def from_yaml
|
155
|
-
|
156
|
-
debug_me{[ :incoming ]}
|
157
|
-
merge! incoming
|
159
|
+
::YAML.load_file(config_file).symbolize_keys
|
158
160
|
end
|
159
161
|
|
160
162
|
|
@@ -0,0 +1,155 @@
|
|
1
|
+
# lib/sqa/data_frame/alpha_vantage.rb
|
2
|
+
# frozen_string_literal: true
|
3
|
+
#
|
4
|
+
# Using the Alpha Vantage JSON interface
|
5
|
+
#
|
6
|
+
|
7
|
+
require 'faraday'
|
8
|
+
require 'json'
|
9
|
+
|
10
|
+
class SQA::DataFrame < Daru::DataFrame
|
11
|
+
class AlphaVantage
|
12
|
+
API_KEY = Nenv.av_api_key
|
13
|
+
CONNECTION = Faraday.new(url: 'https://www.alphavantage.co')
|
14
|
+
HEADERS = YahooFinance::HEADERS
|
15
|
+
|
16
|
+
# The Alpha Vantage headers are being remapped so that
|
17
|
+
# they match those of the Yahoo Finance CSV file.
|
18
|
+
#
|
19
|
+
HEADER_MAPPING = {
|
20
|
+
"date" => HEADERS[0],
|
21
|
+
"open" => HEADERS[1],
|
22
|
+
"high" => HEADERS[2],
|
23
|
+
"low" => HEADERS[3],
|
24
|
+
"close" => HEADERS[4],
|
25
|
+
"adjusted_close" => HEADERS[5],
|
26
|
+
"volume" => HEADERS[6]
|
27
|
+
}
|
28
|
+
|
29
|
+
|
30
|
+
################################################################
|
31
|
+
# Load a Dataframe from a csv file
|
32
|
+
def self.load(ticker, type="csv")
|
33
|
+
filepath = SQA.data_dir + "#{ticker}.#{type}"
|
34
|
+
|
35
|
+
if filepath.exist?
|
36
|
+
df = normalize_vector_names SQA::DataFrame.load(ticker, type)
|
37
|
+
else
|
38
|
+
df = recent(ticker, full: true)
|
39
|
+
df.send("to_#{type}",filepath)
|
40
|
+
end
|
41
|
+
|
42
|
+
df
|
43
|
+
end
|
44
|
+
|
45
|
+
|
46
|
+
# Normalize the vector (aka column) names as
|
47
|
+
# symbols using the standard names set by
|
48
|
+
# Yahoo Finance ... since it was the first one
|
49
|
+
# not because its anything special.
|
50
|
+
#
|
51
|
+
def self.normalize_vector_names(df)
|
52
|
+
headers = df.vectors.to_a
|
53
|
+
|
54
|
+
# convert vector names to symbols
|
55
|
+
# when they are strings. They become stings
|
56
|
+
# when the data frame is saved to a CSV file
|
57
|
+
# and then loaded back in.
|
58
|
+
|
59
|
+
if headers.first == HEADERS.first.to_s
|
60
|
+
a_hash = {}
|
61
|
+
HEADERS.each {|k| a_hash[k.to_s] = k}
|
62
|
+
df.rename_vectors(a_hash) # renames from String to Symbol
|
63
|
+
else
|
64
|
+
df.rename_vectors(HEADER_MAPPING)
|
65
|
+
end
|
66
|
+
|
67
|
+
df
|
68
|
+
end
|
69
|
+
|
70
|
+
|
71
|
+
# Get recent data from JSON API
|
72
|
+
#
|
73
|
+
# ticker String the security to retrieve
|
74
|
+
# returns a DataFrame
|
75
|
+
#
|
76
|
+
# NOTE: The function=TIME_SERIES_DAILY_ADJUSTED
|
77
|
+
# is not a free API endpoint from Alpha Vantange.
|
78
|
+
# So we are just using the free API endpoint
|
79
|
+
# function=TIME_SERIES_DAILY
|
80
|
+
# This means that we are not getting the
|
81
|
+
# real adjusted closing price. To sync
|
82
|
+
# the columns with those from Yahoo Finance
|
83
|
+
# we are duplicating the unadjusted clossing price
|
84
|
+
# and adding that to the data frame as if it were
|
85
|
+
# adjusted.
|
86
|
+
#
|
87
|
+
def self.recent(ticker, full: false)
|
88
|
+
# NOTE: Using the CSV format because the JSON format has
|
89
|
+
# really silly key values. The column names for the
|
90
|
+
# CSV format are much better.
|
91
|
+
response = CONNECTION.get(
|
92
|
+
"/query?" +
|
93
|
+
"function=TIME_SERIES_DAILY&" +
|
94
|
+
"symbol=#{ticker.upcase}&" +
|
95
|
+
"apikey=#{API_KEY}&" +
|
96
|
+
"datatype=csv&" +
|
97
|
+
"outputsize=#{full ? 'full' : 'compact'}"
|
98
|
+
).to_hash
|
99
|
+
|
100
|
+
unless 200 == response[:status]
|
101
|
+
raise "Bad Response: #{response[:status]}"
|
102
|
+
end
|
103
|
+
|
104
|
+
raw = response[:body].split
|
105
|
+
|
106
|
+
headers = raw.shift.split(',')
|
107
|
+
headers[0] = 'date' # website returns "timestamp" but that
|
108
|
+
# has an unintended side-effect when
|
109
|
+
# the names are normalized.
|
110
|
+
|
111
|
+
close_inx = headers.size - 2
|
112
|
+
adj_close_inx = close_inx + 1
|
113
|
+
|
114
|
+
headers.insert(adj_close_inx, 'adjusted_close')
|
115
|
+
|
116
|
+
data = raw.map do |e|
|
117
|
+
e2 = e.split(',')
|
118
|
+
e2[1..-2] = e2[1..-2].map(&:to_f) # converting open, high, low, close
|
119
|
+
e2[-1] = e2[-1].to_i # converting volumn
|
120
|
+
e2.insert(adj_close_inx, e2[close_inx]) # duplicate the close price as a fake adj close price
|
121
|
+
headers.zip(e2).to_h
|
122
|
+
end
|
123
|
+
|
124
|
+
# What oldest data first in the data frame
|
125
|
+
normalize_vector_names Daru::DataFrame.new(data.reverse)
|
126
|
+
end
|
127
|
+
|
128
|
+
|
129
|
+
# Append update_df rows to the base_df
|
130
|
+
#
|
131
|
+
# base_df is ascending on timestamp
|
132
|
+
# update_df is descending on timestamp
|
133
|
+
#
|
134
|
+
# base_df content came from CSV file downloaded
|
135
|
+
# from Yahoo Finance.
|
136
|
+
#
|
137
|
+
# update_df came from scraping the webpage
|
138
|
+
# at Yahoo Finance for the recent history.
|
139
|
+
#
|
140
|
+
# Returns a combined DataFrame.
|
141
|
+
#
|
142
|
+
def self.append(base_df, updates_df)
|
143
|
+
last_timestamp = Date.parse base_df.timestamp.last
|
144
|
+
filtered_df = updates_df.filter_rows { |row| Date.parse(row[:timestamp]) > last_timestamp }
|
145
|
+
|
146
|
+
last_inx = filtered_df.size - 1
|
147
|
+
|
148
|
+
(0..last_inx).each do |x|
|
149
|
+
base_df.add_row filtered_df.row[last_inx-x]
|
150
|
+
end
|
151
|
+
|
152
|
+
base_df
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|
@@ -59,6 +59,9 @@ class SQA::DataFrame < Daru::DataFrame
|
|
59
59
|
response = CONNECTION.get("/quote/#{ticker.upcase}/history")
|
60
60
|
doc = Nokogiri::HTML(response.body)
|
61
61
|
table = doc.css('table').first
|
62
|
+
|
63
|
+
raise "NoDataError" if table.nil?
|
64
|
+
|
62
65
|
rows = table.css('tbody tr')
|
63
66
|
|
64
67
|
data = []
|
data/lib/sqa/data_frame.rb
CHANGED
@@ -2,17 +2,19 @@
|
|
2
2
|
# frozen_string_literal: true
|
3
3
|
|
4
4
|
require_relative 'data_frame/yahoo_finance'
|
5
|
+
require_relative 'data_frame/alpha_vantage'
|
5
6
|
|
6
7
|
class Daru::DataFrame
|
7
8
|
|
8
9
|
def to_csv(path_to_file, opts={})
|
9
10
|
options = {
|
11
|
+
headers: true,
|
10
12
|
converters: :numeric
|
11
13
|
}.merge(opts)
|
12
14
|
|
13
15
|
writer = ::CSV.open(path_to_file, 'wb')
|
14
16
|
|
15
|
-
writer << vectors.to_a
|
17
|
+
writer << vectors.to_a if options[:headers]
|
16
18
|
|
17
19
|
each_row do |row|
|
18
20
|
writer << if options[:convert_comma]
|
@@ -31,27 +33,20 @@ end
|
|
31
33
|
|
32
34
|
class SQA::DataFrame < Daru::DataFrame
|
33
35
|
|
34
|
-
|
35
36
|
#################################################
|
37
|
+
def self.load(ticker, type="csv", options={}, &block)
|
38
|
+
source = SQA.data_dir + "#{ticker}.#{type}"
|
36
39
|
|
37
|
-
|
38
|
-
Pathname.new SQA.config.data_dir + filename
|
39
|
-
end
|
40
|
-
|
41
|
-
def self.load(filename, options={}, &block)
|
42
|
-
source = path(filename)
|
43
|
-
type = source.extname.downcase
|
44
|
-
|
45
|
-
if ".csv" == type
|
40
|
+
if :csv == type
|
46
41
|
from_csv(source, options={}, &block)
|
47
|
-
elsif
|
42
|
+
elsif :json == type
|
48
43
|
from_json(source, options={}, &block)
|
49
|
-
elsif %
|
44
|
+
elsif %i[txt dat].include?(type)
|
50
45
|
from_plaintext(source, options={}, &block)
|
51
|
-
elsif
|
46
|
+
elsif :xls == type
|
52
47
|
from_excel(source, options={}, &block)
|
53
48
|
else
|
54
|
-
raise SQA::
|
49
|
+
raise SQA::BadParameterError, "un-supported file type: #{type}"
|
55
50
|
end
|
56
51
|
end
|
57
52
|
end
|
@@ -5,19 +5,91 @@ end
|
|
5
5
|
|
6
6
|
class SQA::Indicator; class << self
|
7
7
|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
8
|
+
# Produce a Table show actual values and forecasted values
|
9
|
+
#
|
10
|
+
# actual .... Array of Float
|
11
|
+
# forecast .. Array of Float or Array of Array of Float
|
12
|
+
# entry is either a single value or
|
13
|
+
# an Array [high, guess, low]
|
14
|
+
#
|
15
|
+
def prediction_test(actual, forecast)
|
16
|
+
|
17
|
+
unless actual.size == forecast.size
|
18
|
+
debug_me("== ERROR =="){[
|
19
|
+
"actual.size",
|
20
|
+
"forecast.size"
|
21
|
+
]}
|
22
|
+
end
|
23
|
+
|
24
|
+
# Method Under Test (MUT)
|
25
|
+
mut = caller[0][/`([^']*)'/, 1]
|
26
|
+
window = actual.size
|
27
|
+
hgl = forecast.first.is_a?(Array)
|
28
|
+
|
29
|
+
if hgl
|
30
|
+
headers = %w[ Actual Forecast Diff %off InRange? High Low ]
|
31
|
+
else
|
32
|
+
headers = %w[ Actual Forecast Diff %off ]
|
33
|
+
end
|
34
|
+
|
35
|
+
diff = []
|
36
|
+
percent = []
|
37
|
+
values = []
|
38
|
+
|
39
|
+
actual.map!{|v| v.round(3)}
|
40
|
+
|
41
|
+
if hgl
|
42
|
+
high = forecast.map{|v| v[0].round(3)}
|
43
|
+
guess = forecast.map{|v| v[1].round(3)}
|
44
|
+
low = forecast.map{|v| v[2].round(3)}
|
45
|
+
else
|
46
|
+
guess = forecast.map{|v| v.round(3)}
|
47
|
+
end
|
48
|
+
|
49
|
+
window.times do |x|
|
50
|
+
diff << (actual[x] - guess[x]).round(3)
|
51
|
+
percent << ((diff.last / guess[x])*100.0).round(3)
|
52
|
+
|
53
|
+
entry = [
|
54
|
+
actual[x], guess[x],
|
55
|
+
diff[x], percent[x],
|
56
|
+
]
|
57
|
+
|
58
|
+
if hgl
|
59
|
+
entry << ( (high[x] >= actual[x] && actual[x] >= low[x]) ? "Yes" : "" )
|
60
|
+
entry << high[x]
|
61
|
+
entry << low[x]
|
62
|
+
end
|
63
|
+
|
64
|
+
values << entry
|
65
|
+
end
|
66
|
+
|
67
|
+
the_table = TTY::Table.new(headers, values)
|
68
|
+
|
69
|
+
puts "\n#{mut} Result Validation"
|
70
|
+
|
71
|
+
puts the_table.render(
|
72
|
+
:unicode,
|
73
|
+
{
|
74
|
+
padding: [0, 0, 0, 0],
|
75
|
+
alignments: [:right]*values.first.size,
|
76
|
+
}
|
77
|
+
)
|
78
|
+
puts
|
79
|
+
end
|
80
|
+
|
81
|
+
|
82
|
+
def predict_next_values(stock, window, testing=false)
|
83
|
+
prices = stock.df.adj_close_price.to_a
|
84
|
+
known = prices.pop(window) if testing
|
85
|
+
result = []
|
86
|
+
|
87
|
+
prices.each_cons(2) do |a, b|
|
16
88
|
result << b + (b - a)
|
17
89
|
end
|
18
90
|
|
19
|
-
if
|
20
|
-
(1..
|
91
|
+
if window > 0
|
92
|
+
(1..window).each do |_|
|
21
93
|
last_two_values = result.last(2)
|
22
94
|
delta = last_two_values.last - last_two_values.first
|
23
95
|
next_value = last_two_values.last + delta
|
@@ -25,64 +97,106 @@ class SQA::Indicator; class << self
|
|
25
97
|
end
|
26
98
|
end
|
27
99
|
|
28
|
-
result.last(
|
100
|
+
prediction_test(known, result.last(window)) if testing
|
101
|
+
|
102
|
+
result.last(window)
|
29
103
|
end
|
30
104
|
alias_method :pnv, :predict_next_values
|
31
105
|
|
32
106
|
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
# the forecast goes. This does not produce that kind of probability
|
38
|
-
# cone; but, that was hwat I was thinking about
|
39
|
-
#
|
40
|
-
# array is an Array - for example historical price data
|
41
|
-
# predictions is an Integer for how many predictions into the future
|
42
|
-
#
|
43
|
-
def pnv2(array, predictions)
|
107
|
+
def pnv2(stock, window, testing=false)
|
108
|
+
prices = stock.df.adj_close_price.to_a
|
109
|
+
known = prices.pop(window) if testing
|
110
|
+
|
44
111
|
result = []
|
45
|
-
last_inx =
|
112
|
+
last_inx = prices.size - 1 # indexes are zero based
|
46
113
|
|
47
|
-
|
114
|
+
window.times do |x|
|
48
115
|
x += 1 # forecasting 1 day into the future needs 2 days of near past data
|
49
116
|
|
50
117
|
# window is the near past values
|
51
|
-
window =
|
118
|
+
window = prices[last_inx-x..]
|
52
119
|
|
53
120
|
high = window.max
|
54
121
|
low = window.min
|
55
122
|
midpoint = (high + low) / 2.0
|
56
123
|
|
57
|
-
result
|
124
|
+
result << [high, midpoint, low]
|
125
|
+
end
|
126
|
+
|
127
|
+
prediction_test(known, result) if testing
|
128
|
+
|
129
|
+
result
|
130
|
+
end
|
131
|
+
|
132
|
+
|
133
|
+
def pnv3(stock, window, testing=false)
|
134
|
+
prices = stock.df.adj_close_price.to_a
|
135
|
+
known = prices.pop(window) if testing
|
136
|
+
|
137
|
+
result = []
|
138
|
+
known = prices.last(window)
|
139
|
+
|
140
|
+
last_inx = prices.size - 1
|
141
|
+
|
142
|
+
(0..window-1).to_a.reverse.each do |x|
|
143
|
+
curr_inx = last_inx - x
|
144
|
+
prev_inx = curr_inx - 1
|
145
|
+
current_price = prices[curr_inx]
|
146
|
+
percentage_change = (current_price - prices[prev_inx]) / prices[prev_inx]
|
147
|
+
|
148
|
+
result << current_price + (current_price * percentage_change)
|
58
149
|
end
|
59
150
|
|
151
|
+
prediction_test(known, result) if testing
|
152
|
+
|
60
153
|
result
|
61
154
|
end
|
62
155
|
|
63
156
|
|
64
|
-
def
|
65
|
-
|
66
|
-
known
|
157
|
+
def pnv4(stock, window, testing=false)
|
158
|
+
prices = stock.df.adj_close_price.to_a
|
159
|
+
known = prices.pop(window) if testing
|
160
|
+
|
161
|
+
result = []
|
162
|
+
known = prices.last(window).dup
|
163
|
+
current_price = known.last
|
67
164
|
|
68
165
|
# Loop through the prediction window size
|
69
|
-
(1..
|
70
|
-
current_price = known.last
|
166
|
+
(1..window).each do |x|
|
71
167
|
|
72
168
|
# Calculate the percentage change between the current price and its previous price
|
73
|
-
percentage_change = (current_price -
|
169
|
+
percentage_change = (current_price - prices[-x]) / prices[-x]
|
74
170
|
|
75
|
-
|
76
|
-
predicted_price = current_price + (current_price * percentage_change)
|
77
|
-
predicted_prices.unshift(predicted_price)
|
78
|
-
|
79
|
-
# Update the prices array for the next iteration
|
80
|
-
known.pop
|
171
|
+
result << current_price + (current_price * percentage_change)
|
81
172
|
end
|
82
173
|
|
83
|
-
|
174
|
+
prediction_test(known, result) if testing
|
175
|
+
|
176
|
+
result
|
84
177
|
end
|
85
178
|
|
86
179
|
|
180
|
+
def pnv5(stock, window, testing=false)
|
181
|
+
prices = stock.df.adj_close_price.to_a
|
182
|
+
known = prices.pop(window) if testing
|
183
|
+
|
184
|
+
result = []
|
185
|
+
current_price = prices.last
|
186
|
+
|
187
|
+
rate = 0.9 # convert angle into percentage
|
188
|
+
sma_trend = stock.indicators.sma_trend
|
189
|
+
percentage_change = 1 + (sma_trend[:angle] / 100.0) * rate
|
190
|
+
|
191
|
+
# Assumes the SMA trend will continue
|
192
|
+
window.times do |_|
|
193
|
+
result << current_price * percentage_change
|
194
|
+
current_price = result.last
|
195
|
+
end
|
196
|
+
|
197
|
+
prediction_test(known, result) if testing
|
198
|
+
|
199
|
+
result
|
200
|
+
end
|
87
201
|
|
88
202
|
end; end
|
data/lib/sqa/stock.rb
CHANGED
@@ -4,29 +4,47 @@ class SQA::Stock
|
|
4
4
|
attr_accessor :company_name
|
5
5
|
attr_accessor :df # The DataFrane
|
6
6
|
attr_accessor :ticker
|
7
|
+
attr_accessor :type # type of data store (default is CSV)
|
8
|
+
attr_accessor :indicators
|
9
|
+
|
10
|
+
def initialize(
|
11
|
+
ticker:,
|
12
|
+
source: :alpha_vantage,
|
13
|
+
type: :csv
|
14
|
+
)
|
15
|
+
raise "Invalid Ticker #{ticker}" unless SQA::Ticker.valid?(ticker)
|
16
|
+
|
17
|
+
# TODO: Change API on lookup to return array instead of hash
|
18
|
+
# Could this also incorporate the validation process to
|
19
|
+
# save an additiona hash lookup?
|
20
|
+
|
21
|
+
entry = SQA::Ticker.lookup(ticker)
|
7
22
|
|
8
|
-
def initialize(ticker:, source: :yahoo_finance, type: :csv)
|
9
23
|
@ticker = ticker.downcase
|
10
|
-
@company_name =
|
24
|
+
@company_name = entry[:name]
|
25
|
+
@exchange = entry[:exchange]
|
11
26
|
@klass = "SQA::DataFrame::#{source.to_s.camelize}".constantize
|
12
27
|
@type = type
|
13
|
-
@
|
28
|
+
@indicators = OpenStruct.new
|
14
29
|
|
15
30
|
update_the_dataframe
|
16
31
|
end
|
17
32
|
|
18
33
|
|
19
34
|
def update_the_dataframe
|
20
|
-
df1 = @klass.load(@
|
35
|
+
df1 = @klass.load(@ticker, type)
|
21
36
|
df2 = @klass.recent(@ticker)
|
22
|
-
@df = @klass.append(df1, df2)
|
23
37
|
|
24
|
-
|
25
|
-
|
38
|
+
df1_nrows = df1.nrows
|
39
|
+
@df = @klass.append(df1, df2)
|
40
|
+
|
41
|
+
if @df.nrows > df1_nrows
|
42
|
+
@df.send("to_#{@type}", SQA.data_dir + "#{ticker}.csv")
|
26
43
|
end
|
27
44
|
|
28
45
|
# Adding a ticker vector in case I want to do
|
29
46
|
# some multi-stock analysis in the same data frame.
|
47
|
+
# For example to see how one stock coorelates with another.
|
30
48
|
@df[:ticker] = @ticker
|
31
49
|
end
|
32
50
|
|
@@ -34,7 +52,3 @@ class SQA::Stock
|
|
34
52
|
"#{ticker} with #{@df.size} data points from #{@df.timestamp.first} to #{@df.timestamp.last}"
|
35
53
|
end
|
36
54
|
end
|
37
|
-
|
38
|
-
__END__
|
39
|
-
|
40
|
-
aapl = Stock.new('aapl', SQA::Datastore::CSV)
|
data/lib/sqa/ticker.rb
ADDED
@@ -0,0 +1,68 @@
|
|
1
|
+
# sqa/lib/sqa/ticker.rb
|
2
|
+
#
|
3
|
+
# Uses the https://dumbstockapi.com/ website to download a CSV file
|
4
|
+
#
|
5
|
+
# The CSV files have names like this:
|
6
|
+
# "dumbstockapi-2023-09-21T16 39 55.165Z.csv"
|
7
|
+
#
|
8
|
+
# which has this header:
|
9
|
+
# ticker,name,is_etf,exchange
|
10
|
+
#
|
11
|
+
# Not using the is_etf columns
|
12
|
+
#
|
13
|
+
class SQA::Ticker
|
14
|
+
FILENAME_PREFIX = "dumbstockapi"
|
15
|
+
CONNECTION = Faraday.new(url: "https://dumbstockapi.com")
|
16
|
+
@@data = {}
|
17
|
+
|
18
|
+
|
19
|
+
def self.download(country="US")
|
20
|
+
response = CONNECTION.get("/stock?format=csv&countries=#{country.upcase}").to_hash
|
21
|
+
|
22
|
+
if 200 == response[:status]
|
23
|
+
filename = response[:response_headers]["content-disposition"].split('=').last.gsub('"','')
|
24
|
+
out_path = Pathname.new(SQA.config.data_dir) + filename
|
25
|
+
out_path.write response[:body]
|
26
|
+
end
|
27
|
+
|
28
|
+
response[:status]
|
29
|
+
end
|
30
|
+
|
31
|
+
|
32
|
+
def self.load
|
33
|
+
tries = 0
|
34
|
+
found = false
|
35
|
+
|
36
|
+
until(found || tries >= 3) do
|
37
|
+
files = Pathname.new(SQA.config.data_dir).children.select{|c| c.basename.to_s.start_with?(FILENAME_PREFIX)}.sort
|
38
|
+
if files.empty?
|
39
|
+
download
|
40
|
+
tries += 1
|
41
|
+
else
|
42
|
+
found = true
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
raise "NoDataError" if files.empty?
|
47
|
+
|
48
|
+
load_from_csv files.last
|
49
|
+
end
|
50
|
+
|
51
|
+
|
52
|
+
def self.load_from_csv(csv_path)
|
53
|
+
CSV.foreach(csv_path, headers: true) do |row|
|
54
|
+
@@data[row["ticker"]] = {
|
55
|
+
name: row["name"],
|
56
|
+
exchange: row["exchange"]
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
60
|
+
@@data
|
61
|
+
end
|
62
|
+
|
63
|
+
|
64
|
+
|
65
|
+
def self.data = @@data.empty? ? load : @@data
|
66
|
+
def self.lookup(ticker) = data[ticker.upcase]
|
67
|
+
def self.valid?(ticker) = data.has_key?(ticker.upcase)
|
68
|
+
end
|
data/lib/sqa/version.rb
CHANGED
data/lib/sqa.rb
CHANGED
@@ -10,6 +10,7 @@ require 'nenv'
|
|
10
10
|
require 'pathname'
|
11
11
|
|
12
12
|
require_relative "sqa/version"
|
13
|
+
require_relative "sqa/errors"
|
13
14
|
|
14
15
|
|
15
16
|
unless defined?(HOME)
|
@@ -34,10 +35,12 @@ module SQA
|
|
34
35
|
CLI.run(argv)
|
35
36
|
else
|
36
37
|
# There are no real command line parameters
|
37
|
-
# because the sqa gem is
|
38
|
+
# because the sqa gem is being required within
|
38
39
|
# the context of a larger program.
|
39
40
|
end
|
40
41
|
|
42
|
+
config.data_dir = homify(config.data_dir)
|
43
|
+
|
41
44
|
Daru.lazy_update = config.lazy_update
|
42
45
|
Daru.plotting_library = config.plotting_library
|
43
46
|
|
@@ -50,25 +53,16 @@ module SQA
|
|
50
53
|
nil
|
51
54
|
end
|
52
55
|
|
53
|
-
def
|
54
|
-
|
55
|
-
end
|
56
|
+
def debug?() = @@config.debug?
|
57
|
+
def verbose?() = @@config.verbose?
|
56
58
|
|
57
|
-
def
|
58
|
-
|
59
|
-
|
59
|
+
def homify(filepath) = filepath.gsub(/^~/, Nenv.home)
|
60
|
+
def data_dir() = Pathname.new(config.data_dir)
|
61
|
+
def config() = @@config
|
60
62
|
|
61
63
|
def config=(an_object)
|
62
64
|
@@config = an_object
|
63
65
|
end
|
64
|
-
|
65
|
-
def debug?
|
66
|
-
@@config.debug?
|
67
|
-
end
|
68
|
-
|
69
|
-
def verbose?
|
70
|
-
@@config.verbose?
|
71
|
-
end
|
72
66
|
end
|
73
67
|
end
|
74
68
|
|
@@ -77,9 +71,9 @@ end
|
|
77
71
|
require_relative "sqa/config"
|
78
72
|
require_relative "sqa/constants"
|
79
73
|
require_relative "sqa/data_frame"
|
80
|
-
require_relative "sqa/errors"
|
81
74
|
require_relative "sqa/indicator"
|
82
75
|
require_relative "sqa/portfolio"
|
83
76
|
require_relative "sqa/strategy"
|
84
77
|
require_relative "sqa/stock"
|
78
|
+
require_relative "sqa/ticker"
|
85
79
|
require_relative "sqa/trade"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sqa
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.13
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Dewayne VanHoozer
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-09-
|
11
|
+
date: 2023-09-22 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -280,6 +280,7 @@ files:
|
|
280
280
|
- checksums/sqa-0.0.1.gem.sha512
|
281
281
|
- checksums/sqa-0.0.10.gem.sha512
|
282
282
|
- checksums/sqa-0.0.11.gem.sha512
|
283
|
+
- checksums/sqa-0.0.12.gem.sha512
|
283
284
|
- checksums/sqa-0.0.2.gem.sha512
|
284
285
|
- checksums/sqa-0.0.3.gem.sha512
|
285
286
|
- checksums/sqa-0.0.4.gem.sha512
|
@@ -328,6 +329,7 @@ files:
|
|
328
329
|
- lib/sqa/config.rb
|
329
330
|
- lib/sqa/constants.rb
|
330
331
|
- lib/sqa/data_frame.rb
|
332
|
+
- lib/sqa/data_frame/alpha_vantage.rb
|
331
333
|
- lib/sqa/data_frame/yahoo_finance.rb
|
332
334
|
- lib/sqa/errors.rb
|
333
335
|
- lib/sqa/indicator.rb
|
@@ -365,6 +367,7 @@ files:
|
|
365
367
|
- lib/sqa/strategy/random.rb
|
366
368
|
- lib/sqa/strategy/rsi.rb
|
367
369
|
- lib/sqa/strategy/sma.rb
|
370
|
+
- lib/sqa/ticker.rb
|
368
371
|
- lib/sqa/trade.rb
|
369
372
|
- lib/sqa/version.rb
|
370
373
|
- lib/sqa/web.rb
|
@@ -390,7 +393,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
390
393
|
- !ruby/object:Gem::Version
|
391
394
|
version: '0'
|
392
395
|
requirements: []
|
393
|
-
rubygems_version: 3.4.
|
396
|
+
rubygems_version: 3.4.19
|
394
397
|
signing_key:
|
395
398
|
specification_version: 4
|
396
399
|
summary: sqa - Stock Qualitative Analysis
|