sqa 0.0.15 → 0.0.17
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +4 -0
- data/checksums/sqa-0.0.17.gem.sha512 +1 -0
- data/lib/sqa/data_frame/alpha_vantage.rb +24 -69
- data/lib/sqa/data_frame/yahoo_finance.rb +4 -46
- data/lib/sqa/data_frame.rb +282 -32
- data/lib/sqa/errors.rb +27 -3
- data/lib/sqa/init.rb +9 -4
- data/lib/sqa/stock.rb +96 -36
- data/lib/sqa/strategy.rb +1 -1
- data/lib/sqa/version.rb +1 -1
- data/lib/sqa.rb +2 -3
- metadata +11 -18
- data/checksums/sqa-0.0.14.gem.sha512 +0 -1
- data/lib/patches/daru/category.rb +0 -19
- data/lib/patches/daru/data_frame.rb +0 -19
- data/lib/patches/daru/plotting/svg-graph/category.rb +0 -55
- data/lib/patches/daru/plotting/svg-graph/dataframe.rb +0 -105
- data/lib/patches/daru/plotting/svg-graph/vector.rb +0 -102
- data/lib/patches/daru/plotting/svg-graph.rb +0 -6
- data/lib/patches/daru/vector.rb +0 -19
- data/lib/patches/daru.rb +0 -19
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 911775606b7e0fa046261c5a9bc4d3be21ca9caf38c37011141f1393ac6e5063
|
4
|
+
data.tar.gz: d70ae996a39dbe7c386750286cb4cc437681d584945b31104e91b06c30b1600f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: '0074688f69947d20d5aae7c81b090a91fb4f06862084b2349b248ecdf2e17376f47d5a1acffbfb7b2700865cf25f5ff5ba054514d00ed88750f46224b4c9360e'
|
7
|
+
data.tar.gz: 38d1ebf511e9dfa2b87084ed77093e2fc03ccf6eeb6a00e42d1060acca82e135d0b4c0d36a25d9f9bc4a2dd9adf91749a6640aab14b304e33f24fee0c3eb7e15
|
data/README.md
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
**Replacing Daru** with Hashie::Mash
|
2
|
+
|
3
|
+
This is branch hashie_df
|
4
|
+
|
1
5
|
# SQA - Simple Qualitative Analysis
|
2
6
|
|
3
7
|
This is a very simplistic set of tools for running technical analysis on a stock portfolio. Simplistic means it is not reliable nor intended for any kind of financial use. Think of it as a training tool. I do. Its helping me understand why I need professional help from people who know what they are doing.
|
@@ -0,0 +1 @@
|
|
1
|
+
2ee94a54d6ac3d13685dc9b91a2bae0fe75feab6148e1aa9a9d4096961b9b7b577b7ce9d1264f0cce260640515ddd86d5fd5fd2b66f49175844c903581ff6fd9
|
@@ -5,9 +5,8 @@
|
|
5
5
|
#
|
6
6
|
|
7
7
|
|
8
|
-
class SQA::DataFrame
|
8
|
+
class SQA::DataFrame
|
9
9
|
class AlphaVantage
|
10
|
-
API_KEY = Nenv.av_api_key
|
11
10
|
CONNECTION = Faraday.new(url: 'https://www.alphavantage.co')
|
12
11
|
HEADERS = YahooFinance::HEADERS
|
13
12
|
|
@@ -24,47 +23,16 @@ class SQA::DataFrame < Daru::DataFrame
|
|
24
23
|
"volume" => HEADERS[6]
|
25
24
|
}
|
26
25
|
|
26
|
+
TRANSFORMERS = {
|
27
|
+
HEADERS[1] => -> (v) { v.to_f.round(3) },
|
28
|
+
HEADERS[2] => -> (v) { v.to_f.round(3) },
|
29
|
+
HEADERS[3] => -> (v) { v.to_f.round(3) },
|
30
|
+
HEADERS[4] => -> (v) { v.to_f.round(3) },
|
31
|
+
HEADERS[5] => -> (v) { v.to_f.round(3) },
|
32
|
+
HEADERS[6] => -> (v) { v.to_i }
|
33
|
+
}
|
27
34
|
|
28
35
|
################################################################
|
29
|
-
# Load a Dataframe from a csv file
|
30
|
-
def self.load(ticker, type="csv")
|
31
|
-
filepath = SQA.data_dir + "#{ticker}.#{type}"
|
32
|
-
|
33
|
-
if filepath.exist?
|
34
|
-
df = normalize_vector_names SQA::DataFrame.load(ticker, type)
|
35
|
-
else
|
36
|
-
df = recent(ticker, full: true)
|
37
|
-
df.send("to_#{type}",filepath)
|
38
|
-
end
|
39
|
-
|
40
|
-
df
|
41
|
-
end
|
42
|
-
|
43
|
-
|
44
|
-
# Normalize the vector (aka column) names as
|
45
|
-
# symbols using the standard names set by
|
46
|
-
# Yahoo Finance ... since it was the first one
|
47
|
-
# not because its anything special.
|
48
|
-
#
|
49
|
-
def self.normalize_vector_names(df)
|
50
|
-
headers = df.vectors.to_a
|
51
|
-
|
52
|
-
# convert vector names to symbols
|
53
|
-
# when they are strings. They become stings
|
54
|
-
# when the data frame is saved to a CSV file
|
55
|
-
# and then loaded back in.
|
56
|
-
|
57
|
-
if headers.first == HEADERS.first.to_s
|
58
|
-
a_hash = {}
|
59
|
-
HEADERS.each {|k| a_hash[k.to_s] = k}
|
60
|
-
df.rename_vectors(a_hash) # renames from String to Symbol
|
61
|
-
else
|
62
|
-
df.rename_vectors(HEADER_MAPPING)
|
63
|
-
end
|
64
|
-
|
65
|
-
df
|
66
|
-
end
|
67
|
-
|
68
36
|
|
69
37
|
# Get recent data from JSON API
|
70
38
|
#
|
@@ -82,7 +50,8 @@ class SQA::DataFrame < Daru::DataFrame
|
|
82
50
|
# and adding that to the data frame as if it were
|
83
51
|
# adjusted.
|
84
52
|
#
|
85
|
-
def self.recent(ticker, full: false)
|
53
|
+
def self.recent(ticker, full: false, from_date: nil)
|
54
|
+
|
86
55
|
# NOTE: Using the CSV format because the JSON format has
|
87
56
|
# really silly key values. The column names for the
|
88
57
|
# CSV format are much better.
|
@@ -90,7 +59,7 @@ class SQA::DataFrame < Daru::DataFrame
|
|
90
59
|
"/query?" +
|
91
60
|
"function=TIME_SERIES_DAILY&" +
|
92
61
|
"symbol=#{ticker.upcase}&" +
|
93
|
-
"apikey=#{
|
62
|
+
"apikey=#{SQA.av.key}&" +
|
94
63
|
"datatype=csv&" +
|
95
64
|
"outputsize=#{full ? 'full' : 'compact'}"
|
96
65
|
).to_hash
|
@@ -100,18 +69,19 @@ class SQA::DataFrame < Daru::DataFrame
|
|
100
69
|
end
|
101
70
|
|
102
71
|
raw = response[:body].split
|
103
|
-
|
104
72
|
headers = raw.shift.split(',')
|
73
|
+
|
105
74
|
headers[0] = 'date' # website returns "timestamp" but that
|
106
75
|
# has an unintended side-effect when
|
107
76
|
# the names are normalized.
|
77
|
+
# SMELL: IS THIS STILL TRUE?
|
108
78
|
|
109
79
|
close_inx = headers.size - 2
|
110
80
|
adj_close_inx = close_inx + 1
|
111
81
|
|
112
82
|
headers.insert(adj_close_inx, 'adjusted_close')
|
113
83
|
|
114
|
-
|
84
|
+
aofh = raw.map do |e|
|
115
85
|
e2 = e.split(',')
|
116
86
|
e2[1..-2] = e2[1..-2].map(&:to_f) # converting open, high, low, close
|
117
87
|
e2[-1] = e2[-1].to_i # converting volumn
|
@@ -119,35 +89,20 @@ class SQA::DataFrame < Daru::DataFrame
|
|
119
89
|
headers.zip(e2).to_h
|
120
90
|
end
|
121
91
|
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
92
|
+
if from_date
|
93
|
+
aofh.reject!{|e| Date.parse(e['date']) < from_date}
|
94
|
+
end
|
126
95
|
|
127
|
-
|
128
|
-
#
|
129
|
-
# base_df is ascending on timestamp
|
130
|
-
# update_df is descending on timestamp
|
131
|
-
#
|
132
|
-
# base_df content came from CSV file downloaded
|
133
|
-
# from Yahoo Finance.
|
134
|
-
#
|
135
|
-
# update_df came from scraping the webpage
|
136
|
-
# at Yahoo Finance for the recent history.
|
137
|
-
#
|
138
|
-
# Returns a combined DataFrame.
|
139
|
-
#
|
140
|
-
def self.append(base_df, updates_df)
|
141
|
-
last_timestamp = Date.parse base_df.timestamp.last
|
142
|
-
filtered_df = updates_df.filter_rows { |row| Date.parse(row[:timestamp]) > last_timestamp }
|
96
|
+
return nil if aofh.empty?
|
143
97
|
|
144
|
-
|
98
|
+
# ensure tha the data frame is
|
99
|
+
# always sorted oldest to newest.
|
145
100
|
|
146
|
-
|
147
|
-
|
101
|
+
if aofh.first['date'] > aofh.last['date']
|
102
|
+
aofh.reverse!
|
148
103
|
end
|
149
104
|
|
150
|
-
|
105
|
+
SQA::DataFrame.from_aofh(aofh, mapping: HEADER_MAPPING, transformers: TRANSFORMERS)
|
151
106
|
end
|
152
107
|
end
|
153
108
|
end
|
@@ -2,7 +2,7 @@
|
|
2
2
|
# frozen_string_literal: true
|
3
3
|
|
4
4
|
|
5
|
-
class SQA::DataFrame
|
5
|
+
class SQA::DataFrame
|
6
6
|
class YahooFinance
|
7
7
|
CONNECTION = Faraday.new(url: 'https://finance.yahoo.com')
|
8
8
|
HEADERS = [
|
@@ -30,21 +30,6 @@ class SQA::DataFrame < Daru::DataFrame
|
|
30
30
|
}
|
31
31
|
|
32
32
|
################################################################
|
33
|
-
def self.load(filename, options={}, &block)
|
34
|
-
df = SQA::DataFrame.load(filename, options={}, &block)
|
35
|
-
|
36
|
-
headers = df.vectors
|
37
|
-
|
38
|
-
if headers.first == HEADERS.first.to_s
|
39
|
-
a_hash = {}
|
40
|
-
HEADERS.each {|k| a_hash[k.to_s] = k}
|
41
|
-
df.rename_vectors(a_hash)
|
42
|
-
else
|
43
|
-
df.rename_vectors(HEADER_MAPPING)
|
44
|
-
end
|
45
|
-
|
46
|
-
df
|
47
|
-
end
|
48
33
|
|
49
34
|
|
50
35
|
# Scrape the Yahoo Finance website to get recent
|
@@ -62,7 +47,7 @@ class SQA::DataFrame < Daru::DataFrame
|
|
62
47
|
|
63
48
|
rows = table.css('tbody tr')
|
64
49
|
|
65
|
-
|
50
|
+
aofh = []
|
66
51
|
|
67
52
|
rows.each do |row|
|
68
53
|
cols = row.css('td').map{|c| c.children[0].text}
|
@@ -80,37 +65,10 @@ class SQA::DataFrame < Daru::DataFrame
|
|
80
65
|
cols[0] = Date.parse(cols[0]).to_s
|
81
66
|
cols[6] = cols[6].tr(',','').to_i
|
82
67
|
(1..5).each {|x| cols[x] = cols[x].to_f}
|
83
|
-
|
84
|
-
end
|
85
|
-
|
86
|
-
Daru::DataFrame.new(data)
|
87
|
-
end
|
88
|
-
|
89
|
-
|
90
|
-
# Append update_df rows to the base_df
|
91
|
-
#
|
92
|
-
# base_df is ascending on timestamp
|
93
|
-
# update_df is descending on timestamp
|
94
|
-
#
|
95
|
-
# base_df content came from CSV file downloaded
|
96
|
-
# from Yahoo Finance.
|
97
|
-
#
|
98
|
-
# update_df came from scraping the webpage
|
99
|
-
# at Yahoo Finance for the recent history.
|
100
|
-
#
|
101
|
-
# Returns a combined DataFrame.
|
102
|
-
#
|
103
|
-
def self.append(base_df, updates_df)
|
104
|
-
last_timestamp = Date.parse base_df.timestamp.last
|
105
|
-
filtered_df = updates_df.filter_rows { |row| Date.parse(row[:timestamp]) > last_timestamp }
|
106
|
-
|
107
|
-
last_inx = filtered_df.size - 1
|
108
|
-
|
109
|
-
(0..last_inx).each do |x|
|
110
|
-
base_df.add_row filtered_df.row[last_inx-x]
|
68
|
+
aofh << HEADERS.zip(cols).to_h
|
111
69
|
end
|
112
70
|
|
113
|
-
|
71
|
+
aofh
|
114
72
|
end
|
115
73
|
end
|
116
74
|
end
|
data/lib/sqa/data_frame.rb
CHANGED
@@ -1,52 +1,302 @@
|
|
1
1
|
# lib/sqa/data_frame.rb
|
2
2
|
# frozen_string_literal: true
|
3
3
|
|
4
|
-
|
5
|
-
require_relative 'data_frame/alpha_vantage'
|
4
|
+
require 'forwardable'
|
6
5
|
|
7
|
-
|
6
|
+
require_relative 'data_frame/yahoo_finance'
|
7
|
+
require_relative 'data_frame/alpha_vantage'
|
8
8
|
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
9
|
+
class SQA::DataFrame
|
10
|
+
class Data < Hashie::Mash
|
11
|
+
# SNELL: Are all of these needed?
|
12
|
+
include Hashie::Extensions::Mash::KeepOriginalKeys
|
13
|
+
# include Hashie::Extensions::Mash::PermissiveRespondTo
|
14
|
+
include Hashie::Extensions::Mash::SafeAssignment
|
15
|
+
include Hashie::Extensions::Mash::SymbolizeKeys
|
16
|
+
# include Hashie::Extensions::Mash::DefineAccessors
|
17
|
+
end
|
18
|
+
|
19
|
+
extend Forwardable
|
14
20
|
|
15
|
-
|
21
|
+
# @data is of class Data
|
22
|
+
attr_accessor :data
|
23
|
+
|
24
|
+
# Expects a Hash of Arrays (hofa)
|
25
|
+
def initialize(a_hash={})
|
26
|
+
@data = Data.new(a_hash)
|
27
|
+
end
|
16
28
|
|
17
|
-
writer << vectors.to_a if options[:headers]
|
18
29
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
30
|
+
def to_csv(path_to_file)
|
31
|
+
CSV.open(path_to_file, 'w') do |csv|
|
32
|
+
csv << keys
|
33
|
+
size.times do |x|
|
34
|
+
csv << row(x)
|
35
|
+
end
|
25
36
|
end
|
37
|
+
end
|
38
|
+
|
26
39
|
|
27
|
-
|
40
|
+
def to_json(path_to_file)
|
41
|
+
NotImplemented.raise
|
28
42
|
end
|
29
|
-
end
|
30
43
|
|
31
44
|
|
45
|
+
def to_aofh
|
46
|
+
NotImplemented.raise
|
47
|
+
end
|
32
48
|
|
33
49
|
|
34
|
-
|
50
|
+
def_delegator :@data, :to_h, :to_hofa
|
51
|
+
alias_method :to_h, :to_hofa
|
52
|
+
|
53
|
+
|
54
|
+
# The number of data rows
|
55
|
+
def size
|
56
|
+
data[@data.keys[0]].size
|
57
|
+
end
|
58
|
+
alias_method :nrows, :size
|
59
|
+
alias_method :length, :size
|
60
|
+
|
61
|
+
|
62
|
+
def_delegator :@data, :keys
|
63
|
+
alias_method :vectors, :keys
|
64
|
+
alias_method :columns, :keys
|
65
|
+
|
66
|
+
|
67
|
+
def ncols
|
68
|
+
keys.size
|
69
|
+
end
|
70
|
+
|
71
|
+
|
72
|
+
def_delegator :@data, :values, :values
|
73
|
+
def_delegator :@data, :[], :[]
|
74
|
+
def_delegator :@data, :[]=, :[]=
|
75
|
+
|
76
|
+
|
77
|
+
def rows
|
78
|
+
result = []
|
79
|
+
(0..size - 1).each do |x|
|
80
|
+
entry = row(x)
|
81
|
+
result << entry
|
82
|
+
end
|
83
|
+
result
|
84
|
+
end
|
85
|
+
alias_method :to_a, :rows
|
86
|
+
|
87
|
+
|
88
|
+
def row(x)
|
89
|
+
if x.is_a?(Integer)
|
90
|
+
raise BadParameterError if x < 0 || x >= size
|
91
|
+
|
92
|
+
elsif x.is_a?(Hash)
|
93
|
+
raise BadParameterError, "x is #{x}" if x.size > 1
|
94
|
+
key = x.keys[0]
|
95
|
+
x = @data[key].index(x[key])
|
96
|
+
raise BadParameterError, 'Not Found #{x}' if x.nil?
|
97
|
+
return keys.zip(row(x)).to_h
|
35
98
|
|
36
|
-
#################################################
|
37
|
-
def self.load(ticker, type="csv", options={}, &block)
|
38
|
-
source = SQA.data_dir + "#{ticker}.#{type}"
|
39
|
-
|
40
|
-
if :csv == type
|
41
|
-
from_csv(source, options={}, &block)
|
42
|
-
elsif :json == type
|
43
|
-
from_json(source, options={}, &block)
|
44
|
-
elsif %i[txt dat].include?(type)
|
45
|
-
from_plaintext(source, options={}, &block)
|
46
|
-
elsif :xls == type
|
47
|
-
from_excel(source, options={}, &block)
|
48
99
|
else
|
49
|
-
raise
|
100
|
+
raise BadParameterError, "Unknown x.class: #{x.class}"
|
101
|
+
end
|
102
|
+
|
103
|
+
entry = []
|
104
|
+
|
105
|
+
keys.each do |key|
|
106
|
+
entry << @data[key][x]
|
107
|
+
end
|
108
|
+
|
109
|
+
entry
|
110
|
+
end
|
111
|
+
|
112
|
+
|
113
|
+
def append(new_df)
|
114
|
+
raise(BadParameterError, "Key mismatch") if keys != new_df.keys
|
115
|
+
|
116
|
+
keys.each do |key|
|
117
|
+
@data[key] += new_df[key]
|
118
|
+
end
|
119
|
+
end
|
120
|
+
alias_method :concat, :append
|
121
|
+
|
122
|
+
|
123
|
+
# Creates a new instance with new keys
|
124
|
+
# based on the mapping hash where
|
125
|
+
# { old_key => new_key }
|
126
|
+
#
|
127
|
+
def rename(mapping)
|
128
|
+
SQA::DataFrame.new(
|
129
|
+
self.class.rename(
|
130
|
+
mapping,
|
131
|
+
@data.to_h
|
132
|
+
)
|
133
|
+
)
|
134
|
+
end
|
135
|
+
alias_method :rename_vectors, :rename
|
136
|
+
|
137
|
+
|
138
|
+
# Map the values of the vectors into different objects
|
139
|
+
# types is a Hash where the key is the vector name and
|
140
|
+
# the value is a proc
|
141
|
+
#
|
142
|
+
# For Example:
|
143
|
+
# {
|
144
|
+
# price: -> (v) {v.to_f.round(3)}
|
145
|
+
# }
|
146
|
+
#
|
147
|
+
def coerce_vectors(transformers)
|
148
|
+
transformers.each_pair do |key, transformer|
|
149
|
+
@data[key].map!{|v| transformer.call(v)}
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
|
154
|
+
def method_missing(method_name, *args, &block)
|
155
|
+
if @data.respond_to?(method_name)
|
156
|
+
self.class.send(:define_method, method_name) do |*method_args, &method_block|
|
157
|
+
@data.send(method_name, *method_args, &method_block)
|
158
|
+
end
|
159
|
+
send(method_name, *args, &block)
|
160
|
+
else
|
161
|
+
super
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
|
166
|
+
def respond_to_missing?(method_name, include_private = false)
|
167
|
+
@data.respond_to?(method_name) || super
|
168
|
+
end
|
169
|
+
|
170
|
+
#################################################
|
171
|
+
class << self
|
172
|
+
|
173
|
+
def append(base_df, other_df)
|
174
|
+
base_df.append(other_df)
|
175
|
+
end
|
176
|
+
|
177
|
+
|
178
|
+
# TODO: The Data class has its own load which also supports
|
179
|
+
# YAML by default. Maybe this method should
|
180
|
+
# make use of @data = Data.load(source)
|
181
|
+
#
|
182
|
+
def load(source:, mapping: {}, transformers:{})
|
183
|
+
file_type = source.extname[1..].downcase.to_sym
|
184
|
+
|
185
|
+
df = if :csv == file_type
|
186
|
+
from_csv_file(source, mapping: mapping, transformers: transformers)
|
187
|
+
elsif :json == file_type
|
188
|
+
from_json_file(source, mapping: mapping, transformers: transformers)
|
189
|
+
else
|
190
|
+
raise BadParameterError, "unsupported file type: #{file_type}"
|
191
|
+
end
|
192
|
+
|
193
|
+
unless transformers.empty?
|
194
|
+
df.coerce_vectors(transformers)
|
195
|
+
end
|
196
|
+
|
197
|
+
df
|
198
|
+
end
|
199
|
+
|
200
|
+
|
201
|
+
def from_aofh(aofh, mapping: {}, transformers: {})
|
202
|
+
new(
|
203
|
+
aofh_to_hofa(
|
204
|
+
aofh,
|
205
|
+
mapping: mapping,
|
206
|
+
transformers: transformers
|
207
|
+
)
|
208
|
+
)
|
209
|
+
end
|
210
|
+
|
211
|
+
|
212
|
+
def from_csv_file(source, mapping: {}, transformers: {})
|
213
|
+
aofh = []
|
214
|
+
|
215
|
+
CSV.foreach(source, headers: true) do |row|
|
216
|
+
aofh << row.to_h
|
217
|
+
end
|
218
|
+
|
219
|
+
from_aofh(aofh, mapping: mapping, transformers: transformers)
|
220
|
+
end
|
221
|
+
|
222
|
+
|
223
|
+
def from_json_file(source, mapping: {}, transformers: {})
|
224
|
+
aofh = JSON.parse(source.read)
|
225
|
+
|
226
|
+
from_aofh(aofh, mapping: mapping, transformers: transformers)
|
227
|
+
end
|
228
|
+
|
229
|
+
|
230
|
+
# aofh -- Array of Hashes
|
231
|
+
# hofa -- Hash of Arrays
|
232
|
+
def aofh_to_hofa(aofh, mapping: {}, transformers: {})
|
233
|
+
hofa = {}
|
234
|
+
keys = aofh.first.keys
|
235
|
+
|
236
|
+
keys.each do |key|
|
237
|
+
hofa[key] = []
|
238
|
+
end
|
239
|
+
|
240
|
+
aofh.each do |entry|
|
241
|
+
keys.each do |key|
|
242
|
+
hofa[key] << entry[key]
|
243
|
+
end
|
244
|
+
end
|
245
|
+
|
246
|
+
# SMELL: This might be necessary
|
247
|
+
normalize_keys(hofa, adapter_mapping: mapping)
|
248
|
+
end
|
249
|
+
|
250
|
+
|
251
|
+
def normalize_keys(hofa, adapter_mapping: {})
|
252
|
+
hofa = rename(adapter_mapping, hofa)
|
253
|
+
mapping = generate_mapping(hofa.keys)
|
254
|
+
rename(mapping, hofa)
|
255
|
+
end
|
256
|
+
|
257
|
+
|
258
|
+
def rename(mapping, hofa)
|
259
|
+
mapping.each_pair do |old_key, new_key|
|
260
|
+
hofa[new_key] = hofa.delete(old_key)
|
261
|
+
end
|
262
|
+
|
263
|
+
hofa
|
264
|
+
end
|
265
|
+
|
266
|
+
|
267
|
+
def generate_mapping(keys)
|
268
|
+
mapping = {}
|
269
|
+
|
270
|
+
keys.each do |key|
|
271
|
+
mapping[key] = underscore_key(sanitize_key(key)) unless key.is_a?(Symbol)
|
272
|
+
end
|
273
|
+
|
274
|
+
mapping
|
275
|
+
end
|
276
|
+
|
277
|
+
|
278
|
+
# returns a snake_case Symbol
|
279
|
+
def underscore_key(key)
|
280
|
+
key.to_s.gsub(/::/, '/').
|
281
|
+
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
|
282
|
+
gsub(/([a-z\d])([A-Z])/,'\1_\2').
|
283
|
+
tr("-", "_").
|
284
|
+
downcase.to_sym
|
285
|
+
end
|
286
|
+
|
287
|
+
|
288
|
+
# removes punctuation and specal characters,
|
289
|
+
# replaces space with underscore.
|
290
|
+
def sanitize_key(key)
|
291
|
+
key.tr('.():/','').gsub(/^\d+.?\s/, "").tr(' ','_')
|
292
|
+
end
|
293
|
+
|
294
|
+
|
295
|
+
# returns true if key is in a date format
|
296
|
+
# like 2023-06-03
|
297
|
+
def is_date?(key)
|
298
|
+
!/(\d{4}-\d{2}-\d{2})/.match(key.to_s).nil?
|
50
299
|
end
|
51
300
|
end
|
52
301
|
end
|
302
|
+
|
data/lib/sqa/errors.rb
CHANGED
@@ -1,6 +1,30 @@
|
|
1
1
|
# lib/sqa/errors.rb
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
|
3
|
+
# raised when a method is still in TODO state
|
4
|
+
class ApiError < RuntimeError
|
5
|
+
def self.raise(why)
|
6
|
+
puts "="*64
|
7
|
+
puts "== API Error"
|
8
|
+
puts why
|
9
|
+
puts
|
10
|
+
puts "Callback trace:"
|
11
|
+
puts caller
|
12
|
+
puts "="*64
|
13
|
+
super
|
14
|
+
end
|
6
15
|
end
|
16
|
+
|
17
|
+
# raised when a method is still in TODO state
|
18
|
+
class NotImplemented < RuntimeError
|
19
|
+
def self.raise
|
20
|
+
puts "="*64
|
21
|
+
puts "== Not Yet Implemented"
|
22
|
+
puts "Callback trace:"
|
23
|
+
puts caller
|
24
|
+
puts "="*64
|
25
|
+
super
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
# raised when an API contract is broken
|
30
|
+
class BadParameterError < ArgumentError; end
|
data/lib/sqa/init.rb
CHANGED
@@ -2,7 +2,13 @@
|
|
2
2
|
|
3
3
|
module SQA
|
4
4
|
class << self
|
5
|
-
@@config
|
5
|
+
@@config = nil
|
6
|
+
@@av = ApiKeyManager::RateLimited.new(
|
7
|
+
api_keys: ENV['AV_API_KEYS'],
|
8
|
+
delay: true,
|
9
|
+
rate_count: ENV['AV_RATE_CNT'] || 5,
|
10
|
+
rate_period: ENV['AV_RATE_PER'] || 60
|
11
|
+
)
|
6
12
|
|
7
13
|
# Initializes the SQA modules
|
8
14
|
# returns the configuration
|
@@ -26,12 +32,11 @@ module SQA
|
|
26
32
|
|
27
33
|
config.data_dir = homify(config.data_dir)
|
28
34
|
|
29
|
-
Daru.lazy_update = config.lazy_update
|
30
|
-
Daru.plotting_library = config.plotting_library
|
31
|
-
|
32
35
|
config
|
33
36
|
end
|
34
37
|
|
38
|
+
def av() = @@av
|
39
|
+
|
35
40
|
def debug?() = @@config.debug?
|
36
41
|
def verbose?() = @@config.verbose?
|
37
42
|
|
data/lib/sqa/stock.rb
CHANGED
@@ -6,83 +6,143 @@
|
|
6
6
|
# separate class and injected by the requiring program?
|
7
7
|
|
8
8
|
class SQA::Stock
|
9
|
+
extend Forwardable
|
10
|
+
|
9
11
|
CONNECTION = Faraday.new(url: "https://www.alphavantage.co")
|
10
12
|
|
11
|
-
attr_accessor :
|
12
|
-
attr_accessor :df
|
13
|
-
|
14
|
-
attr_accessor :
|
15
|
-
attr_accessor :
|
13
|
+
attr_accessor :data # General Info -- SQA::DataFrame::Data
|
14
|
+
attr_accessor :df # Historical Prices -- SQA::DataFrame::Data
|
15
|
+
|
16
|
+
attr_accessor :klass # class of historical and current prices
|
17
|
+
attr_accessor :transformers # procs for changing column values from String to Numeric
|
16
18
|
|
17
19
|
def initialize(
|
18
20
|
ticker:,
|
19
|
-
source: :alpha_vantage
|
20
|
-
type: :csv
|
21
|
+
source: :alpha_vantage
|
21
22
|
)
|
23
|
+
|
24
|
+
@ticker = ticker.downcase
|
25
|
+
@source = source
|
26
|
+
|
22
27
|
raise "Invalid Ticker #{ticker}" unless SQA::Ticker.valid?(ticker)
|
23
28
|
|
24
|
-
|
25
|
-
|
26
|
-
# save an additiona hash lookup?
|
29
|
+
@data_path = SQA.data_dir + "#{@ticker}.json"
|
30
|
+
@df_path = SQA.data_dir + "#{@ticker}.csv"
|
27
31
|
|
28
|
-
|
32
|
+
@klass = "SQA::DataFrame::#{@source.to_s.camelize}".constantize
|
33
|
+
@transformers = "SQA::DataFrame::#{@source.to_s.camelize}::TRANSFORMERS".constantize
|
29
34
|
|
30
|
-
@
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
35
|
+
if @data_path.exist?
|
36
|
+
load
|
37
|
+
else
|
38
|
+
create
|
39
|
+
update
|
40
|
+
save
|
41
|
+
end
|
36
42
|
|
37
43
|
update_the_dataframe
|
38
44
|
end
|
39
45
|
|
40
46
|
|
41
|
-
def
|
42
|
-
|
43
|
-
|
47
|
+
def load
|
48
|
+
@data = SQA::DataFrame::Data.new(
|
49
|
+
JSON.parse(@data_path.read)
|
50
|
+
)
|
51
|
+
end
|
44
52
|
|
45
|
-
df1_nrows = df1.nrows
|
46
|
-
@df = @klass.append(df1, df2)
|
47
53
|
|
48
|
-
|
49
|
-
|
54
|
+
def create
|
55
|
+
@data =
|
56
|
+
SQA::DataFrame::Data.new(
|
57
|
+
{
|
58
|
+
ticker: @ticker,
|
59
|
+
source: @source,
|
60
|
+
indicators: { xyzzy: "Magic" },
|
61
|
+
}
|
62
|
+
)
|
63
|
+
end
|
64
|
+
|
65
|
+
|
66
|
+
def update
|
67
|
+
merge_overview
|
68
|
+
end
|
69
|
+
|
70
|
+
|
71
|
+
def save
|
72
|
+
@data_path.write @data.to_json
|
73
|
+
end
|
74
|
+
|
75
|
+
|
76
|
+
def_delegator :@data, :ticker, :ticker
|
77
|
+
def_delegator :@data, :name, :name
|
78
|
+
def_delegator :@data, :exchange, :exchange
|
79
|
+
def_delegator :@data, :source, :source
|
80
|
+
def_delegator :@data, :indicators, :indicators
|
81
|
+
def_delegator :@data, :indicators=, :indicators=
|
82
|
+
def_delegator :@data, :overview, :overview
|
83
|
+
|
84
|
+
|
85
|
+
|
86
|
+
def update_the_dataframe
|
87
|
+
if @df_path.exist?
|
88
|
+
@df = SQA::DataFrame.load(
|
89
|
+
source: @df_path,
|
90
|
+
transformers: @transformers
|
91
|
+
)
|
92
|
+
else
|
93
|
+
@df = klass.recent(@ticker, full: true)
|
94
|
+
@df.to_csv(@df_path)
|
95
|
+
return
|
50
96
|
end
|
51
97
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
98
|
+
from_date = Date.parse(@df.timestamp.last) + 1
|
99
|
+
df2 = klass.recent(@ticker, from_date: from_date)
|
100
|
+
|
101
|
+
return if df2.nil? # CSV file is up to date.
|
102
|
+
|
103
|
+
df_nrows = @df.nrows
|
104
|
+
@df.append(df2)
|
105
|
+
|
106
|
+
if @df.nrows > df_nrows
|
107
|
+
@df.to_csv(file_path)
|
108
|
+
end
|
56
109
|
end
|
57
110
|
|
111
|
+
|
58
112
|
def to_s
|
59
113
|
"#{ticker} with #{@df.size} data points from #{@df.timestamp.first} to #{@df.timestamp.last}"
|
60
114
|
end
|
115
|
+
alias_method :inspect, :to_s
|
61
116
|
|
62
|
-
# TODO: Turn this into a class Stock::Overview
|
63
|
-
# which is a sub-class of Hashie::Dash
|
64
|
-
def overview
|
65
|
-
return @overview unless @overview.nil?
|
66
117
|
|
118
|
+
def merge_overview
|
67
119
|
temp = JSON.parse(
|
68
|
-
CONNECTION.get("/query?function=OVERVIEW&symbol=#{
|
120
|
+
CONNECTION.get("/query?function=OVERVIEW&symbol=#{ticker.upcase}&apikey=#{SQA.av.key}")
|
69
121
|
.to_hash[:body]
|
70
122
|
)
|
71
123
|
|
124
|
+
if temp.has_key? "Information"
|
125
|
+
ApiError.raise(temp["Information"])
|
126
|
+
end
|
127
|
+
|
72
128
|
# TODO: CamelCase hash keys look common in Alpha Vantage
|
73
129
|
# JSON; look at making a special Hashie-based class
|
74
130
|
# to convert the keys to normal Ruby standards.
|
75
131
|
|
76
132
|
temp2 = {}
|
77
133
|
|
78
|
-
string_values = %w[ address asset_type cik country currency
|
134
|
+
string_values = %w[ address asset_type cik country currency
|
135
|
+
description dividend_date ex_dividend_date
|
136
|
+
exchange fiscal_year_end industry latest_quarter
|
137
|
+
name sector symbol
|
138
|
+
]
|
79
139
|
|
80
140
|
temp.keys.each do |k|
|
81
141
|
new_k = k.underscore
|
82
142
|
temp2[new_k] = string_values.include?(new_k) ? temp[k] : temp[k].to_f
|
83
143
|
end
|
84
144
|
|
85
|
-
@overview =
|
145
|
+
@data.overview = temp2
|
86
146
|
end
|
87
147
|
|
88
148
|
|
@@ -100,7 +160,7 @@ class SQA::Stock
|
|
100
160
|
|
101
161
|
a_hash = JSON.parse(
|
102
162
|
CONNECTION.get(
|
103
|
-
"/query?function=TOP_GAINERS_LOSERS&apikey=#{
|
163
|
+
"/query?function=TOP_GAINERS_LOSERS&apikey=#{SQA.av.key}"
|
104
164
|
).to_hash[:body]
|
105
165
|
)
|
106
166
|
|
data/lib/sqa/strategy.rb
CHANGED
@@ -8,7 +8,7 @@ class SQA::Strategy
|
|
8
8
|
end
|
9
9
|
|
10
10
|
def add(a_strategy)
|
11
|
-
raise
|
11
|
+
raise BadParameterError unless [Class, Method].include? a_strategy.class
|
12
12
|
|
13
13
|
a_proc = if Class == a_strategy.class
|
14
14
|
a_strategy.method(:trade)
|
data/lib/sqa/version.rb
CHANGED
data/lib/sqa.rb
CHANGED
@@ -17,9 +17,9 @@ end
|
|
17
17
|
## Additional Libraries
|
18
18
|
|
19
19
|
require 'active_support/core_ext/string'
|
20
|
-
require 'alphavantage' # TODO: add rate limiter to it
|
20
|
+
require 'alphavantage' # TODO: add rate limiter to it; ** PR submitted! **
|
21
|
+
require 'api_key_manager'
|
21
22
|
require 'amazing_print'
|
22
|
-
require 'daru' # TODO: Replace this gem with something better
|
23
23
|
require 'descriptive_statistics'
|
24
24
|
require 'faraday'
|
25
25
|
require 'hashie'
|
@@ -38,7 +38,6 @@ require_relative "sqa/errors"
|
|
38
38
|
|
39
39
|
require_relative 'sqa/init.rb'
|
40
40
|
|
41
|
-
# require_relative "patches/daru" # TODO: extract Daru::DataFrame in new gem sqa-data_frame
|
42
41
|
|
43
42
|
# TODO: Some of these components make direct calls to the
|
44
43
|
# Alpha Vantage API. Convert them to use the
|
metadata
CHANGED
@@ -1,29 +1,29 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sqa
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.17
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Dewayne VanHoozer
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-09
|
11
|
+
date: 2023-10-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
|
-
- -
|
17
|
+
- - '='
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version:
|
19
|
+
version: 7.0.6
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
|
-
- -
|
24
|
+
- - '='
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version:
|
26
|
+
version: 7.0.6
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: alphavantage
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -39,7 +39,7 @@ dependencies:
|
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
|
-
name:
|
42
|
+
name: api_key_manager
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
45
|
- - ">="
|
@@ -240,8 +240,8 @@ files:
|
|
240
240
|
- checksums/sqa-0.0.11.gem.sha512
|
241
241
|
- checksums/sqa-0.0.12.gem.sha512
|
242
242
|
- checksums/sqa-0.0.13.gem.sha512
|
243
|
-
- checksums/sqa-0.0.14.gem.sha512
|
244
243
|
- checksums/sqa-0.0.15.gem.sha512
|
244
|
+
- checksums/sqa-0.0.17.gem.sha512
|
245
245
|
- checksums/sqa-0.0.2.gem.sha512
|
246
246
|
- checksums/sqa-0.0.3.gem.sha512
|
247
247
|
- checksums/sqa-0.0.4.gem.sha512
|
@@ -276,14 +276,6 @@ files:
|
|
276
276
|
- docs/stochastic_oscillator.md
|
277
277
|
- docs/strategy.md
|
278
278
|
- docs/true_range.md
|
279
|
-
- lib/patches/daru.rb
|
280
|
-
- lib/patches/daru/category.rb
|
281
|
-
- lib/patches/daru/data_frame.rb
|
282
|
-
- lib/patches/daru/plotting/svg-graph.rb
|
283
|
-
- lib/patches/daru/plotting/svg-graph/category.rb
|
284
|
-
- lib/patches/daru/plotting/svg-graph/dataframe.rb
|
285
|
-
- lib/patches/daru/plotting/svg-graph/vector.rb
|
286
|
-
- lib/patches/daru/vector.rb
|
287
279
|
- lib/sqa.rb
|
288
280
|
- lib/sqa/activity.rb
|
289
281
|
- lib/sqa/analysis.rb
|
@@ -340,7 +332,8 @@ licenses:
|
|
340
332
|
metadata:
|
341
333
|
allowed_push_host: https://rubygems.org
|
342
334
|
homepage_uri: https://github.com/MadBomber/sqa
|
343
|
-
source_code_uri: https://github.com/MadBomber/
|
335
|
+
source_code_uri: https://github.com/MadBomber/sqa
|
336
|
+
changelog_uri: https://github.com/MadBomber/sqa
|
344
337
|
post_install_message:
|
345
338
|
rdoc_options: []
|
346
339
|
require_paths:
|
@@ -356,7 +349,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
356
349
|
- !ruby/object:Gem::Version
|
357
350
|
version: '0'
|
358
351
|
requirements: []
|
359
|
-
rubygems_version: 3.4.
|
352
|
+
rubygems_version: 3.4.20
|
360
353
|
signing_key:
|
361
354
|
specification_version: 4
|
362
355
|
summary: sqa - Stock Qualitative Analysis
|
@@ -1 +0,0 @@
|
|
1
|
-
ae291d1c8a3a80fc6f24a6a1194c6db1b6e1fbdbee586546ae34db6ee304a3e431ea59a154ea976af0f25f3b0d6519f2e0a1aad4ddf3c3cdf77b7d37aabf425f
|
@@ -1,19 +0,0 @@
|
|
1
|
-
# lib/patches/daru/category.rb
|
2
|
-
|
3
|
-
module Daru
|
4
|
-
module Category
|
5
|
-
|
6
|
-
def plotting_lig lib
|
7
|
-
if :svg_graph = lib
|
8
|
-
@plotting_library = lib
|
9
|
-
if Daru.send("has_#{lib}?".to_sym)
|
10
|
-
extend Module.const_get(
|
11
|
-
"Daru::Plotting::Category::#{lib.to_s.capitalize}Library"
|
12
|
-
)
|
13
|
-
end
|
14
|
-
else
|
15
|
-
super
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
19
|
-
end
|
@@ -1,19 +0,0 @@
|
|
1
|
-
# lib/patches/daru/data_frame.rb
|
2
|
-
|
3
|
-
module Daru
|
4
|
-
module DataFrame
|
5
|
-
|
6
|
-
def plotting_lig lib
|
7
|
-
if :svg_graph = lib
|
8
|
-
@plotting_library = lib
|
9
|
-
if Daru.send("has_#{lib}?".to_sym)
|
10
|
-
extend Module.const_get(
|
11
|
-
"Daru::Plotting::DataFrame::#{lib.to_s.capitalize}Library"
|
12
|
-
)
|
13
|
-
end
|
14
|
-
else
|
15
|
-
super
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
19
|
-
end
|
@@ -1,55 +0,0 @@
|
|
1
|
-
# lib/patches/daru/plotting/svg-graph/category.rb
|
2
|
-
|
3
|
-
# NOTE: Code originally from Gruff
|
4
|
-
# TODO: Tailor the code to SvgGraph
|
5
|
-
|
6
|
-
module Daru
|
7
|
-
module Plotting
|
8
|
-
module Category
|
9
|
-
module SvgGraphLibrary
|
10
|
-
def plot opts={}
|
11
|
-
type = opts[:type] || :bar
|
12
|
-
size = opts[:size] || 500
|
13
|
-
case type
|
14
|
-
when :bar, :pie, :sidebar
|
15
|
-
plot = send("category_#{type}_plot".to_sym, size, opts[:method])
|
16
|
-
else
|
17
|
-
raise ArgumentError, 'This type of plot is not supported.'
|
18
|
-
end
|
19
|
-
yield plot if block_given?
|
20
|
-
plot
|
21
|
-
end
|
22
|
-
|
23
|
-
private
|
24
|
-
|
25
|
-
def category_bar_plot size, method
|
26
|
-
plot = SvgGraph::Bar.new size
|
27
|
-
method ||= :count
|
28
|
-
dv = frequencies(method)
|
29
|
-
plot.labels = size.times.to_a.zip(dv.index.to_a).to_h
|
30
|
-
plot.data name || :vector, dv.to_a
|
31
|
-
plot
|
32
|
-
end
|
33
|
-
|
34
|
-
def category_pie_plot size, method
|
35
|
-
plot = SvgGraph::Pie.new size
|
36
|
-
method ||= :count
|
37
|
-
frequencies(method).each_with_index do |data, index|
|
38
|
-
plot.data index, data
|
39
|
-
end
|
40
|
-
plot
|
41
|
-
end
|
42
|
-
|
43
|
-
def category_sidebar_plot size, method
|
44
|
-
plot = SvgGraph::SideBar.new size
|
45
|
-
plot.labels = {0 => (name.to_s || 'vector')}
|
46
|
-
method ||= :count
|
47
|
-
frequencies(method).each_with_index do |data, index|
|
48
|
-
plot.data index, data
|
49
|
-
end
|
50
|
-
plot
|
51
|
-
end
|
52
|
-
end
|
53
|
-
end
|
54
|
-
end
|
55
|
-
end
|
@@ -1,105 +0,0 @@
|
|
1
|
-
# lib/patches/daru/plotting/svg-graph/dataframe.rb
|
2
|
-
|
3
|
-
# NOTE: Code originally from Gruff
|
4
|
-
# TODO: Tailor the code to SvgGraph
|
5
|
-
|
6
|
-
module Daru
|
7
|
-
module Plotting
|
8
|
-
module DataFrame
|
9
|
-
module SvgGraphLibrary
|
10
|
-
def plot opts={}
|
11
|
-
opts[:type] ||= :line
|
12
|
-
opts[:size] ||= 500
|
13
|
-
|
14
|
-
x = extract_x_vector opts[:x]
|
15
|
-
y = extract_y_vectors opts[:y]
|
16
|
-
|
17
|
-
opts[:type] = process_type opts[:type], opts[:categorized]
|
18
|
-
|
19
|
-
type = opts[:type]
|
20
|
-
|
21
|
-
if %o[line bar scatter].include? type
|
22
|
-
graph = send("#{type}_plot", size, x, y)
|
23
|
-
|
24
|
-
elsif :scatter_categorized == type
|
25
|
-
graph = scatter_with_category(size, x, y, opts[:categorized])
|
26
|
-
|
27
|
-
else
|
28
|
-
raise ArgumentError, 'This type of plot is not supported.'
|
29
|
-
end
|
30
|
-
|
31
|
-
yield graph if block_given?
|
32
|
-
graph
|
33
|
-
end
|
34
|
-
|
35
|
-
private
|
36
|
-
|
37
|
-
def process_type type, categorized
|
38
|
-
type == :scatter && categorized ? :scatter_categorized : type
|
39
|
-
end
|
40
|
-
|
41
|
-
##########################################################
|
42
|
-
def line_plot size, x, y
|
43
|
-
plot = SvgGraph::Line.new size
|
44
|
-
plot.labels = size.times.to_a.zip(x).to_h
|
45
|
-
y.each do |vec|
|
46
|
-
plot.data vec.name || :vector, vec.to_a
|
47
|
-
end
|
48
|
-
plot
|
49
|
-
end
|
50
|
-
|
51
|
-
##########################################################
|
52
|
-
def bar_plot size, x, y
|
53
|
-
plot = SvgGraph::Bar.new size
|
54
|
-
plot.labels = size.times.to_a.zip(x).to_h
|
55
|
-
y.each do |vec|
|
56
|
-
plot.data vec.name || :vector, vec.to_a
|
57
|
-
end
|
58
|
-
plot
|
59
|
-
end
|
60
|
-
|
61
|
-
##########################################################
|
62
|
-
def scatter_plot size, x, y
|
63
|
-
plot = SvgGraph::Scatter.new size
|
64
|
-
y.each do |vec|
|
65
|
-
plot.data vec.name || :vector, x, vec.to_a
|
66
|
-
end
|
67
|
-
plot
|
68
|
-
end
|
69
|
-
|
70
|
-
##########################################################
|
71
|
-
def scatter_with_category size, x, y, opts
|
72
|
-
x = Daru::Vector.new x
|
73
|
-
y = y.first
|
74
|
-
plot = SvgGraph::Scatter.new size
|
75
|
-
cat_dv = self[opts[:by]]
|
76
|
-
|
77
|
-
cat_dv.categories.each do |cat|
|
78
|
-
bools = cat_dv.eq cat
|
79
|
-
plot.data cat, x.where(bools).to_a, y.where(bools).to_a
|
80
|
-
end
|
81
|
-
|
82
|
-
plot
|
83
|
-
end
|
84
|
-
|
85
|
-
def extract_x_vector x_name
|
86
|
-
x_name && self[x_name].to_a || index.to_a
|
87
|
-
end
|
88
|
-
|
89
|
-
def extract_y_vectors y_names
|
90
|
-
y_names =
|
91
|
-
case y_names
|
92
|
-
when nil
|
93
|
-
vectors.to_a
|
94
|
-
when Array
|
95
|
-
y_names
|
96
|
-
else
|
97
|
-
[y_names]
|
98
|
-
end
|
99
|
-
|
100
|
-
y_names.map { |y| self[y] }.select(&:numeric?)
|
101
|
-
end
|
102
|
-
end
|
103
|
-
end
|
104
|
-
end
|
105
|
-
end
|
@@ -1,102 +0,0 @@
|
|
1
|
-
# lib/patches/daru/plotting/svg-graph/vector.rb
|
2
|
-
|
3
|
-
# NOTE: Code originally from Gruff
|
4
|
-
# TODO: Tailor the code to SvgGraph
|
5
|
-
|
6
|
-
module Daru
|
7
|
-
module Plotting
|
8
|
-
module Vector
|
9
|
-
module SvgGraphLibrary
|
10
|
-
def plot opts={}
|
11
|
-
opts[:type] ||= :line
|
12
|
-
opts[:size] ||= 500 # SMELL: What is this?
|
13
|
-
opts[:height] ||= 720
|
14
|
-
opts[:width] ||= 720
|
15
|
-
opts[:title] ||= name || :vector
|
16
|
-
|
17
|
-
debug_me{[
|
18
|
-
:opts,
|
19
|
-
:self
|
20
|
-
]}
|
21
|
-
|
22
|
-
if %i[line bar pie scatter sidebar].include? type
|
23
|
-
graph = send("#{type}_plot", opts)
|
24
|
-
else
|
25
|
-
raise ArgumentError, 'This type of plot is not supported.'
|
26
|
-
end
|
27
|
-
|
28
|
-
yield graph if block_given?
|
29
|
-
|
30
|
-
graph
|
31
|
-
end
|
32
|
-
|
33
|
-
private
|
34
|
-
|
35
|
-
####################################################
|
36
|
-
def line_plot opts={}
|
37
|
-
graph = SVG::Graph::Line.new opts
|
38
|
-
|
39
|
-
graph.add_data(
|
40
|
-
data: to_a,
|
41
|
-
title: opts[:title]
|
42
|
-
)
|
43
|
-
|
44
|
-
graph
|
45
|
-
end
|
46
|
-
|
47
|
-
|
48
|
-
####################################################
|
49
|
-
def bar_plot opts
|
50
|
-
graph = SVG::Graph::Bar.new opts
|
51
|
-
|
52
|
-
graph.add_data(
|
53
|
-
data: to_a,
|
54
|
-
title: opts[:title]
|
55
|
-
)
|
56
|
-
|
57
|
-
graph
|
58
|
-
end
|
59
|
-
|
60
|
-
|
61
|
-
####################################################
|
62
|
-
def pie_plot opts
|
63
|
-
graph = SVG::Graph::Pie.new opts
|
64
|
-
|
65
|
-
graph.add_data(
|
66
|
-
data: to_a,
|
67
|
-
title: opts[:title]
|
68
|
-
)
|
69
|
-
|
70
|
-
graph
|
71
|
-
end
|
72
|
-
|
73
|
-
|
74
|
-
####################################################
|
75
|
-
def scatter_plot size
|
76
|
-
graph = SVG::Graph::Plot.new opts
|
77
|
-
|
78
|
-
|
79
|
-
graph.add_data(
|
80
|
-
data: to_a.zip(index.to_a)
|
81
|
-
title: opts[:title]
|
82
|
-
)
|
83
|
-
|
84
|
-
graph
|
85
|
-
end
|
86
|
-
|
87
|
-
|
88
|
-
####################################################
|
89
|
-
def sidebar_plot size
|
90
|
-
graph = SVG::Graph::BarHorizontal.new opts
|
91
|
-
|
92
|
-
graph.add_data(
|
93
|
-
data: to_a,
|
94
|
-
title: opts[:title]
|
95
|
-
)
|
96
|
-
|
97
|
-
graph
|
98
|
-
end
|
99
|
-
end
|
100
|
-
end
|
101
|
-
end
|
102
|
-
end
|
data/lib/patches/daru/vector.rb
DELETED
@@ -1,19 +0,0 @@
|
|
1
|
-
# lib/patches/daru/vector.rb
|
2
|
-
|
3
|
-
module Daru
|
4
|
-
module Vector
|
5
|
-
|
6
|
-
def plotting_lig lib
|
7
|
-
if :svg_graph = lib
|
8
|
-
@plotting_library = lib
|
9
|
-
if Daru.send("has_#{lib}?".to_sym)
|
10
|
-
extend Module.const_get(
|
11
|
-
"Daru::Plotting::Vector::#{lib.to_s.capitalize}Library"
|
12
|
-
)
|
13
|
-
end
|
14
|
-
else
|
15
|
-
super
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
19
|
-
end
|
data/lib/patches/daru.rb
DELETED
@@ -1,19 +0,0 @@
|
|
1
|
-
# lib/patches/daru.rb
|
2
|
-
|
3
|
-
require_relative 'daru/category'
|
4
|
-
require_relative 'daru/data_frame'
|
5
|
-
require_relative 'daru/vector'
|
6
|
-
|
7
|
-
module Daru
|
8
|
-
create_has_library :svg_graph
|
9
|
-
|
10
|
-
class << self
|
11
|
-
def plotting_library lib
|
12
|
-
if :svg_graph = lib
|
13
|
-
@plotting_library = lib
|
14
|
-
else
|
15
|
-
super
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
19
|
-
end
|