sqa 0.0.12 → 0.0.13
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/sqa/cli.rb +1 -0
- data/lib/sqa/data_frame/alpha_vantage.rb +155 -0
- data/lib/sqa/data_frame/yahoo_finance.rb +3 -0
- data/lib/sqa/data_frame.rb +10 -15
- data/lib/sqa/stock.rb +23 -11
- data/lib/sqa/ticker.rb +68 -0
- data/lib/sqa/version.rb +1 -1
- data/lib/sqa.rb +10 -16
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8dd371b757bcc5da8e913681a06666715cbe88ca11f582df64d68ef305391d28
|
4
|
+
data.tar.gz: 8b1851af5e7875266acbd3fb35a7ba108fdb267d7490eb19984819bba5f15538
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ab803635e2f85e71bebacab6be10cc345f6050a890c730a658741d4a3ae4530490be33fc692a3ced0e843e473cc5613644dc5a0c0d0bfd3a43d5c0d2b4d41510
|
7
|
+
data.tar.gz: 7ee788d760d4020efb41baa3f5bc894cc4824d0308144b68f8c352bed03a01ce2049547bace95636264fab1c7b58ab12f97c99209433e2b32719db575017d25c
|
data/lib/sqa/cli.rb
CHANGED
@@ -0,0 +1,155 @@
|
|
1
|
+
# lib/sqa/data_frame/alpha_vantage.rb
|
2
|
+
# frozen_string_literal: true
|
3
|
+
#
|
4
|
+
# Using the Alpha Vantage JSON interface
|
5
|
+
#
|
6
|
+
|
7
|
+
require 'faraday'
|
8
|
+
require 'json'
|
9
|
+
|
10
|
+
class SQA::DataFrame < Daru::DataFrame
|
11
|
+
class AlphaVantage
|
12
|
+
API_KEY = Nenv.av_api_key
|
13
|
+
CONNECTION = Faraday.new(url: 'https://www.alphavantage.co')
|
14
|
+
HEADERS = YahooFinance::HEADERS
|
15
|
+
|
16
|
+
# The Alpha Vantage headers are being remapped so that
|
17
|
+
# they match those of the Yahoo Finance CSV file.
|
18
|
+
#
|
19
|
+
HEADER_MAPPING = {
|
20
|
+
"date" => HEADERS[0],
|
21
|
+
"open" => HEADERS[1],
|
22
|
+
"high" => HEADERS[2],
|
23
|
+
"low" => HEADERS[3],
|
24
|
+
"close" => HEADERS[4],
|
25
|
+
"adjusted_close" => HEADERS[5],
|
26
|
+
"volume" => HEADERS[6]
|
27
|
+
}
|
28
|
+
|
29
|
+
|
30
|
+
################################################################
|
31
|
+
# Load a Dataframe from a csv file
|
32
|
+
def self.load(ticker, type="csv")
|
33
|
+
filepath = SQA.data_dir + "#{ticker}.#{type}"
|
34
|
+
|
35
|
+
if filepath.exist?
|
36
|
+
df = normalize_vector_names SQA::DataFrame.load(ticker, type)
|
37
|
+
else
|
38
|
+
df = recent(ticker, full: true)
|
39
|
+
df.send("to_#{type}",filepath)
|
40
|
+
end
|
41
|
+
|
42
|
+
df
|
43
|
+
end
|
44
|
+
|
45
|
+
|
46
|
+
# Normalize the vector (aka column) names as
|
47
|
+
# symbols using the standard names set by
|
48
|
+
# Yahoo Finance ... since it was the first one
|
49
|
+
# not because its anything special.
|
50
|
+
#
|
51
|
+
def self.normalize_vector_names(df)
|
52
|
+
headers = df.vectors.to_a
|
53
|
+
|
54
|
+
# convert vector names to symbols
|
55
|
+
# when they are strings. They become stings
|
56
|
+
# when the data frame is saved to a CSV file
|
57
|
+
# and then loaded back in.
|
58
|
+
|
59
|
+
if headers.first == HEADERS.first.to_s
|
60
|
+
a_hash = {}
|
61
|
+
HEADERS.each {|k| a_hash[k.to_s] = k}
|
62
|
+
df.rename_vectors(a_hash) # renames from String to Symbol
|
63
|
+
else
|
64
|
+
df.rename_vectors(HEADER_MAPPING)
|
65
|
+
end
|
66
|
+
|
67
|
+
df
|
68
|
+
end
|
69
|
+
|
70
|
+
|
71
|
+
# Get recent data from JSON API
|
72
|
+
#
|
73
|
+
# ticker String the security to retrieve
|
74
|
+
# returns a DataFrame
|
75
|
+
#
|
76
|
+
# NOTE: The function=TIME_SERIES_DAILY_ADJUSTED
|
77
|
+
# is not a free API endpoint from Alpha Vantange.
|
78
|
+
# So we are just using the free API endpoint
|
79
|
+
# function=TIME_SERIES_DAILY
|
80
|
+
# This means that we are not getting the
|
81
|
+
# real adjusted closing price. To sync
|
82
|
+
# the columns with those from Yahoo Finance
|
83
|
+
# we are duplicating the unadjusted clossing price
|
84
|
+
# and adding that to the data frame as if it were
|
85
|
+
# adjusted.
|
86
|
+
#
|
87
|
+
def self.recent(ticker, full: false)
|
88
|
+
# NOTE: Using the CSV format because the JSON format has
|
89
|
+
# really silly key values. The column names for the
|
90
|
+
# CSV format are much better.
|
91
|
+
response = CONNECTION.get(
|
92
|
+
"/query?" +
|
93
|
+
"function=TIME_SERIES_DAILY&" +
|
94
|
+
"symbol=#{ticker.upcase}&" +
|
95
|
+
"apikey=#{API_KEY}&" +
|
96
|
+
"datatype=csv&" +
|
97
|
+
"outputsize=#{full ? 'full' : 'compact'}"
|
98
|
+
).to_hash
|
99
|
+
|
100
|
+
unless 200 == response[:status]
|
101
|
+
raise "Bad Response: #{response[:status]}"
|
102
|
+
end
|
103
|
+
|
104
|
+
raw = response[:body].split
|
105
|
+
|
106
|
+
headers = raw.shift.split(',')
|
107
|
+
headers[0] = 'date' # website returns "timestamp" but that
|
108
|
+
# has an unintended side-effect when
|
109
|
+
# the names are normalized.
|
110
|
+
|
111
|
+
close_inx = headers.size - 2
|
112
|
+
adj_close_inx = close_inx + 1
|
113
|
+
|
114
|
+
headers.insert(adj_close_inx, 'adjusted_close')
|
115
|
+
|
116
|
+
data = raw.map do |e|
|
117
|
+
e2 = e.split(',')
|
118
|
+
e2[1..-2] = e2[1..-2].map(&:to_f) # converting open, high, low, close
|
119
|
+
e2[-1] = e2[-1].to_i # converting volumn
|
120
|
+
e2.insert(adj_close_inx, e2[close_inx]) # duplicate the close price as a fake adj close price
|
121
|
+
headers.zip(e2).to_h
|
122
|
+
end
|
123
|
+
|
124
|
+
# What oldest data first in the data frame
|
125
|
+
normalize_vector_names Daru::DataFrame.new(data.reverse)
|
126
|
+
end
|
127
|
+
|
128
|
+
|
129
|
+
# Append update_df rows to the base_df
|
130
|
+
#
|
131
|
+
# base_df is ascending on timestamp
|
132
|
+
# update_df is descending on timestamp
|
133
|
+
#
|
134
|
+
# base_df content came from CSV file downloaded
|
135
|
+
# from Yahoo Finance.
|
136
|
+
#
|
137
|
+
# update_df came from scraping the webpage
|
138
|
+
# at Yahoo Finance for the recent history.
|
139
|
+
#
|
140
|
+
# Returns a combined DataFrame.
|
141
|
+
#
|
142
|
+
def self.append(base_df, updates_df)
|
143
|
+
last_timestamp = Date.parse base_df.timestamp.last
|
144
|
+
filtered_df = updates_df.filter_rows { |row| Date.parse(row[:timestamp]) > last_timestamp }
|
145
|
+
|
146
|
+
last_inx = filtered_df.size - 1
|
147
|
+
|
148
|
+
(0..last_inx).each do |x|
|
149
|
+
base_df.add_row filtered_df.row[last_inx-x]
|
150
|
+
end
|
151
|
+
|
152
|
+
base_df
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|
@@ -59,6 +59,9 @@ class SQA::DataFrame < Daru::DataFrame
|
|
59
59
|
response = CONNECTION.get("/quote/#{ticker.upcase}/history")
|
60
60
|
doc = Nokogiri::HTML(response.body)
|
61
61
|
table = doc.css('table').first
|
62
|
+
|
63
|
+
raise "NoDataError" if table.nil?
|
64
|
+
|
62
65
|
rows = table.css('tbody tr')
|
63
66
|
|
64
67
|
data = []
|
data/lib/sqa/data_frame.rb
CHANGED
@@ -2,17 +2,19 @@
|
|
2
2
|
# frozen_string_literal: true
|
3
3
|
|
4
4
|
require_relative 'data_frame/yahoo_finance'
|
5
|
+
require_relative 'data_frame/alpha_vantage'
|
5
6
|
|
6
7
|
class Daru::DataFrame
|
7
8
|
|
8
9
|
def to_csv(path_to_file, opts={})
|
9
10
|
options = {
|
11
|
+
headers: true,
|
10
12
|
converters: :numeric
|
11
13
|
}.merge(opts)
|
12
14
|
|
13
15
|
writer = ::CSV.open(path_to_file, 'wb')
|
14
16
|
|
15
|
-
writer << vectors.to_a
|
17
|
+
writer << vectors.to_a if options[:headers]
|
16
18
|
|
17
19
|
each_row do |row|
|
18
20
|
writer << if options[:convert_comma]
|
@@ -31,27 +33,20 @@ end
|
|
31
33
|
|
32
34
|
class SQA::DataFrame < Daru::DataFrame
|
33
35
|
|
34
|
-
|
35
36
|
#################################################
|
37
|
+
def self.load(ticker, type="csv", options={}, &block)
|
38
|
+
source = SQA.data_dir + "#{ticker}.#{type}"
|
36
39
|
|
37
|
-
|
38
|
-
Pathname.new(SQA.config.data_dir) + filename
|
39
|
-
end
|
40
|
-
|
41
|
-
def self.load(filename, options={}, &block)
|
42
|
-
source = path(filename)
|
43
|
-
type = source.extname.downcase
|
44
|
-
|
45
|
-
if ".csv" == type
|
40
|
+
if :csv == type
|
46
41
|
from_csv(source, options={}, &block)
|
47
|
-
elsif
|
42
|
+
elsif :json == type
|
48
43
|
from_json(source, options={}, &block)
|
49
|
-
elsif %
|
44
|
+
elsif %i[txt dat].include?(type)
|
50
45
|
from_plaintext(source, options={}, &block)
|
51
|
-
elsif
|
46
|
+
elsif :xls == type
|
52
47
|
from_excel(source, options={}, &block)
|
53
48
|
else
|
54
|
-
raise SQA::
|
49
|
+
raise SQA::BadParameterError, "un-supported file type: #{type}"
|
55
50
|
end
|
56
51
|
end
|
57
52
|
end
|
data/lib/sqa/stock.rb
CHANGED
@@ -4,14 +4,27 @@ class SQA::Stock
|
|
4
4
|
attr_accessor :company_name
|
5
5
|
attr_accessor :df # The DataFrane
|
6
6
|
attr_accessor :ticker
|
7
|
+
attr_accessor :type # type of data store (default is CSV)
|
7
8
|
attr_accessor :indicators
|
8
9
|
|
9
|
-
def initialize(
|
10
|
+
def initialize(
|
11
|
+
ticker:,
|
12
|
+
source: :alpha_vantage,
|
13
|
+
type: :csv
|
14
|
+
)
|
15
|
+
raise "Invalid Ticker #{ticker}" unless SQA::Ticker.valid?(ticker)
|
16
|
+
|
17
|
+
# TODO: Change API on lookup to return array instead of hash
|
18
|
+
# Could this also incorporate the validation process to
|
19
|
+
# save an additiona hash lookup?
|
20
|
+
|
21
|
+
entry = SQA::Ticker.lookup(ticker)
|
22
|
+
|
10
23
|
@ticker = ticker.downcase
|
11
|
-
@company_name =
|
24
|
+
@company_name = entry[:name]
|
25
|
+
@exchange = entry[:exchange]
|
12
26
|
@klass = "SQA::DataFrame::#{source.to_s.camelize}".constantize
|
13
27
|
@type = type
|
14
|
-
@filename = "#{@ticker}.#{type}"
|
15
28
|
@indicators = OpenStruct.new
|
16
29
|
|
17
30
|
update_the_dataframe
|
@@ -19,16 +32,19 @@ class SQA::Stock
|
|
19
32
|
|
20
33
|
|
21
34
|
def update_the_dataframe
|
22
|
-
df1 = @klass.load(@
|
35
|
+
df1 = @klass.load(@ticker, type)
|
23
36
|
df2 = @klass.recent(@ticker)
|
24
|
-
@df = @klass.append(df1, df2)
|
25
37
|
|
26
|
-
|
27
|
-
|
38
|
+
df1_nrows = df1.nrows
|
39
|
+
@df = @klass.append(df1, df2)
|
40
|
+
|
41
|
+
if @df.nrows > df1_nrows
|
42
|
+
@df.send("to_#{@type}", SQA.data_dir + "#{ticker}.csv")
|
28
43
|
end
|
29
44
|
|
30
45
|
# Adding a ticker vector in case I want to do
|
31
46
|
# some multi-stock analysis in the same data frame.
|
47
|
+
# For example to see how one stock coorelates with another.
|
32
48
|
@df[:ticker] = @ticker
|
33
49
|
end
|
34
50
|
|
@@ -36,7 +52,3 @@ class SQA::Stock
|
|
36
52
|
"#{ticker} with #{@df.size} data points from #{@df.timestamp.first} to #{@df.timestamp.last}"
|
37
53
|
end
|
38
54
|
end
|
39
|
-
|
40
|
-
__END__
|
41
|
-
|
42
|
-
aapl = Stock.new('aapl', SQA::Datastore::CSV)
|
data/lib/sqa/ticker.rb
ADDED
@@ -0,0 +1,68 @@
|
|
1
|
+
# sqa/lib/sqa/ticker.rb
|
2
|
+
#
|
3
|
+
# Uses the https://dumbstockapi.com/ website to download a CSV file
|
4
|
+
#
|
5
|
+
# The CSV files have names like this:
|
6
|
+
# "dumbstockapi-2023-09-21T16 39 55.165Z.csv"
|
7
|
+
#
|
8
|
+
# which has this header:
|
9
|
+
# ticker,name,is_etf,exchange
|
10
|
+
#
|
11
|
+
# Not using the is_etf columns
|
12
|
+
#
|
13
|
+
class SQA::Ticker
|
14
|
+
FILENAME_PREFIX = "dumbstockapi"
|
15
|
+
CONNECTION = Faraday.new(url: "https://dumbstockapi.com")
|
16
|
+
@@data = {}
|
17
|
+
|
18
|
+
|
19
|
+
def self.download(country="US")
|
20
|
+
response = CONNECTION.get("/stock?format=csv&countries=#{country.upcase}").to_hash
|
21
|
+
|
22
|
+
if 200 == response[:status]
|
23
|
+
filename = response[:response_headers]["content-disposition"].split('=').last.gsub('"','')
|
24
|
+
out_path = Pathname.new(SQA.config.data_dir) + filename
|
25
|
+
out_path.write response[:body]
|
26
|
+
end
|
27
|
+
|
28
|
+
response[:status]
|
29
|
+
end
|
30
|
+
|
31
|
+
|
32
|
+
def self.load
|
33
|
+
tries = 0
|
34
|
+
found = false
|
35
|
+
|
36
|
+
until(found || tries >= 3) do
|
37
|
+
files = Pathname.new(SQA.config.data_dir).children.select{|c| c.basename.to_s.start_with?(FILENAME_PREFIX)}.sort
|
38
|
+
if files.empty?
|
39
|
+
download
|
40
|
+
tries += 1
|
41
|
+
else
|
42
|
+
found = true
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
raise "NoDataError" if files.empty?
|
47
|
+
|
48
|
+
load_from_csv files.last
|
49
|
+
end
|
50
|
+
|
51
|
+
|
52
|
+
def self.load_from_csv(csv_path)
|
53
|
+
CSV.foreach(csv_path, headers: true) do |row|
|
54
|
+
@@data[row["ticker"]] = {
|
55
|
+
name: row["name"],
|
56
|
+
exchange: row["exchange"]
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
60
|
+
@@data
|
61
|
+
end
|
62
|
+
|
63
|
+
|
64
|
+
|
65
|
+
def self.data = @@data.empty? ? load : @@data
|
66
|
+
def self.lookup(ticker) = data[ticker.upcase]
|
67
|
+
def self.valid?(ticker) = data.has_key?(ticker.upcase)
|
68
|
+
end
|
data/lib/sqa/version.rb
CHANGED
data/lib/sqa.rb
CHANGED
@@ -10,6 +10,7 @@ require 'nenv'
|
|
10
10
|
require 'pathname'
|
11
11
|
|
12
12
|
require_relative "sqa/version"
|
13
|
+
require_relative "sqa/errors"
|
13
14
|
|
14
15
|
|
15
16
|
unless defined?(HOME)
|
@@ -34,10 +35,12 @@ module SQA
|
|
34
35
|
CLI.run(argv)
|
35
36
|
else
|
36
37
|
# There are no real command line parameters
|
37
|
-
# because the sqa gem is
|
38
|
+
# because the sqa gem is being required within
|
38
39
|
# the context of a larger program.
|
39
40
|
end
|
40
41
|
|
42
|
+
config.data_dir = homify(config.data_dir)
|
43
|
+
|
41
44
|
Daru.lazy_update = config.lazy_update
|
42
45
|
Daru.plotting_library = config.plotting_library
|
43
46
|
|
@@ -50,25 +53,16 @@ module SQA
|
|
50
53
|
nil
|
51
54
|
end
|
52
55
|
|
53
|
-
def
|
54
|
-
|
55
|
-
end
|
56
|
+
def debug?() = @@config.debug?
|
57
|
+
def verbose?() = @@config.verbose?
|
56
58
|
|
57
|
-
def
|
58
|
-
|
59
|
-
|
59
|
+
def homify(filepath) = filepath.gsub(/^~/, Nenv.home)
|
60
|
+
def data_dir() = Pathname.new(config.data_dir)
|
61
|
+
def config() = @@config
|
60
62
|
|
61
63
|
def config=(an_object)
|
62
64
|
@@config = an_object
|
63
65
|
end
|
64
|
-
|
65
|
-
def debug?
|
66
|
-
@@config.debug?
|
67
|
-
end
|
68
|
-
|
69
|
-
def verbose?
|
70
|
-
@@config.verbose?
|
71
|
-
end
|
72
66
|
end
|
73
67
|
end
|
74
68
|
|
@@ -77,9 +71,9 @@ end
|
|
77
71
|
require_relative "sqa/config"
|
78
72
|
require_relative "sqa/constants"
|
79
73
|
require_relative "sqa/data_frame"
|
80
|
-
require_relative "sqa/errors"
|
81
74
|
require_relative "sqa/indicator"
|
82
75
|
require_relative "sqa/portfolio"
|
83
76
|
require_relative "sqa/strategy"
|
84
77
|
require_relative "sqa/stock"
|
78
|
+
require_relative "sqa/ticker"
|
85
79
|
require_relative "sqa/trade"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sqa
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.13
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Dewayne VanHoozer
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-09-
|
11
|
+
date: 2023-09-22 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -329,6 +329,7 @@ files:
|
|
329
329
|
- lib/sqa/config.rb
|
330
330
|
- lib/sqa/constants.rb
|
331
331
|
- lib/sqa/data_frame.rb
|
332
|
+
- lib/sqa/data_frame/alpha_vantage.rb
|
332
333
|
- lib/sqa/data_frame/yahoo_finance.rb
|
333
334
|
- lib/sqa/errors.rb
|
334
335
|
- lib/sqa/indicator.rb
|
@@ -366,6 +367,7 @@ files:
|
|
366
367
|
- lib/sqa/strategy/random.rb
|
367
368
|
- lib/sqa/strategy/rsi.rb
|
368
369
|
- lib/sqa/strategy/sma.rb
|
370
|
+
- lib/sqa/ticker.rb
|
369
371
|
- lib/sqa/trade.rb
|
370
372
|
- lib/sqa/version.rb
|
371
373
|
- lib/sqa/web.rb
|