sqa 0.0.15 → 0.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +170 -32
- data/checksums/sqa-0.0.17.gem.sha512 +1 -0
- data/checksums/sqa-0.0.18.gem.sha512 +1 -0
- data/lib/sqa/data_frame/alpha_vantage.rb +24 -69
- data/lib/sqa/data_frame/yahoo_finance.rb +4 -46
- data/lib/sqa/data_frame.rb +282 -32
- data/lib/sqa/errors.rb +27 -3
- data/lib/sqa/init.rb +9 -4
- data/lib/sqa/stock.rb +96 -36
- data/lib/sqa/strategy.rb +1 -1
- data/lib/sqa/version.rb +1 -1
- data/lib/sqa.rb +4 -4
- metadata +23 -29
- data/checksums/sqa-0.0.14.gem.sha512 +0 -1
- data/lib/patches/daru/category.rb +0 -19
- data/lib/patches/daru/data_frame.rb +0 -19
- data/lib/patches/daru/plotting/svg-graph/category.rb +0 -55
- data/lib/patches/daru/plotting/svg-graph/dataframe.rb +0 -105
- data/lib/patches/daru/plotting/svg-graph/vector.rb +0 -102
- data/lib/patches/daru/plotting/svg-graph.rb +0 -6
- data/lib/patches/daru/vector.rb +0 -19
- data/lib/patches/daru.rb +0 -19
data/lib/sqa/data_frame.rb
CHANGED
@@ -1,52 +1,302 @@
|
|
1
1
|
# lib/sqa/data_frame.rb
|
2
2
|
# frozen_string_literal: true
|
3
3
|
|
4
|
-
|
5
|
-
require_relative 'data_frame/alpha_vantage'
|
4
|
+
require 'forwardable'
|
6
5
|
|
7
|
-
|
6
|
+
require_relative 'data_frame/yahoo_finance'
|
7
|
+
require_relative 'data_frame/alpha_vantage'
|
8
8
|
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
9
|
+
class SQA::DataFrame
|
10
|
+
class Data < Hashie::Mash
|
11
|
+
# SNELL: Are all of these needed?
|
12
|
+
include Hashie::Extensions::Mash::KeepOriginalKeys
|
13
|
+
# include Hashie::Extensions::Mash::PermissiveRespondTo
|
14
|
+
include Hashie::Extensions::Mash::SafeAssignment
|
15
|
+
include Hashie::Extensions::Mash::SymbolizeKeys
|
16
|
+
# include Hashie::Extensions::Mash::DefineAccessors
|
17
|
+
end
|
18
|
+
|
19
|
+
extend Forwardable
|
14
20
|
|
15
|
-
|
21
|
+
# @data is of class Data
|
22
|
+
attr_accessor :data
|
23
|
+
|
24
|
+
# Expects a Hash of Arrays (hofa)
|
25
|
+
def initialize(a_hash={})
|
26
|
+
@data = Data.new(a_hash)
|
27
|
+
end
|
16
28
|
|
17
|
-
writer << vectors.to_a if options[:headers]
|
18
29
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
30
|
+
def to_csv(path_to_file)
|
31
|
+
CSV.open(path_to_file, 'w') do |csv|
|
32
|
+
csv << keys
|
33
|
+
size.times do |x|
|
34
|
+
csv << row(x)
|
35
|
+
end
|
25
36
|
end
|
37
|
+
end
|
38
|
+
|
26
39
|
|
27
|
-
|
40
|
+
def to_json(path_to_file)
|
41
|
+
NotImplemented.raise
|
28
42
|
end
|
29
|
-
end
|
30
43
|
|
31
44
|
|
45
|
+
def to_aofh
|
46
|
+
NotImplemented.raise
|
47
|
+
end
|
32
48
|
|
33
49
|
|
34
|
-
|
50
|
+
def_delegator :@data, :to_h, :to_hofa
|
51
|
+
alias_method :to_h, :to_hofa
|
52
|
+
|
53
|
+
|
54
|
+
# The number of data rows
|
55
|
+
def size
|
56
|
+
data[@data.keys[0]].size
|
57
|
+
end
|
58
|
+
alias_method :nrows, :size
|
59
|
+
alias_method :length, :size
|
60
|
+
|
61
|
+
|
62
|
+
def_delegator :@data, :keys
|
63
|
+
alias_method :vectors, :keys
|
64
|
+
alias_method :columns, :keys
|
65
|
+
|
66
|
+
|
67
|
+
def ncols
|
68
|
+
keys.size
|
69
|
+
end
|
70
|
+
|
71
|
+
|
72
|
+
def_delegator :@data, :values, :values
|
73
|
+
def_delegator :@data, :[], :[]
|
74
|
+
def_delegator :@data, :[]=, :[]=
|
75
|
+
|
76
|
+
|
77
|
+
def rows
|
78
|
+
result = []
|
79
|
+
(0..size - 1).each do |x|
|
80
|
+
entry = row(x)
|
81
|
+
result << entry
|
82
|
+
end
|
83
|
+
result
|
84
|
+
end
|
85
|
+
alias_method :to_a, :rows
|
86
|
+
|
87
|
+
|
88
|
+
def row(x)
|
89
|
+
if x.is_a?(Integer)
|
90
|
+
raise BadParameterError if x < 0 || x >= size
|
91
|
+
|
92
|
+
elsif x.is_a?(Hash)
|
93
|
+
raise BadParameterError, "x is #{x}" if x.size > 1
|
94
|
+
key = x.keys[0]
|
95
|
+
x = @data[key].index(x[key])
|
96
|
+
raise BadParameterError, 'Not Found #{x}' if x.nil?
|
97
|
+
return keys.zip(row(x)).to_h
|
35
98
|
|
36
|
-
#################################################
|
37
|
-
def self.load(ticker, type="csv", options={}, &block)
|
38
|
-
source = SQA.data_dir + "#{ticker}.#{type}"
|
39
|
-
|
40
|
-
if :csv == type
|
41
|
-
from_csv(source, options={}, &block)
|
42
|
-
elsif :json == type
|
43
|
-
from_json(source, options={}, &block)
|
44
|
-
elsif %i[txt dat].include?(type)
|
45
|
-
from_plaintext(source, options={}, &block)
|
46
|
-
elsif :xls == type
|
47
|
-
from_excel(source, options={}, &block)
|
48
99
|
else
|
49
|
-
raise
|
100
|
+
raise BadParameterError, "Unknown x.class: #{x.class}"
|
101
|
+
end
|
102
|
+
|
103
|
+
entry = []
|
104
|
+
|
105
|
+
keys.each do |key|
|
106
|
+
entry << @data[key][x]
|
107
|
+
end
|
108
|
+
|
109
|
+
entry
|
110
|
+
end
|
111
|
+
|
112
|
+
|
113
|
+
def append(new_df)
|
114
|
+
raise(BadParameterError, "Key mismatch") if keys != new_df.keys
|
115
|
+
|
116
|
+
keys.each do |key|
|
117
|
+
@data[key] += new_df[key]
|
118
|
+
end
|
119
|
+
end
|
120
|
+
alias_method :concat, :append
|
121
|
+
|
122
|
+
|
123
|
+
# Creates a new instance with new keys
|
124
|
+
# based on the mapping hash where
|
125
|
+
# { old_key => new_key }
|
126
|
+
#
|
127
|
+
def rename(mapping)
|
128
|
+
SQA::DataFrame.new(
|
129
|
+
self.class.rename(
|
130
|
+
mapping,
|
131
|
+
@data.to_h
|
132
|
+
)
|
133
|
+
)
|
134
|
+
end
|
135
|
+
alias_method :rename_vectors, :rename
|
136
|
+
|
137
|
+
|
138
|
+
# Map the values of the vectors into different objects
|
139
|
+
# types is a Hash where the key is the vector name and
|
140
|
+
# the value is a proc
|
141
|
+
#
|
142
|
+
# For Example:
|
143
|
+
# {
|
144
|
+
# price: -> (v) {v.to_f.round(3)}
|
145
|
+
# }
|
146
|
+
#
|
147
|
+
def coerce_vectors(transformers)
|
148
|
+
transformers.each_pair do |key, transformer|
|
149
|
+
@data[key].map!{|v| transformer.call(v)}
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
|
154
|
+
def method_missing(method_name, *args, &block)
|
155
|
+
if @data.respond_to?(method_name)
|
156
|
+
self.class.send(:define_method, method_name) do |*method_args, &method_block|
|
157
|
+
@data.send(method_name, *method_args, &method_block)
|
158
|
+
end
|
159
|
+
send(method_name, *args, &block)
|
160
|
+
else
|
161
|
+
super
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
|
166
|
+
def respond_to_missing?(method_name, include_private = false)
|
167
|
+
@data.respond_to?(method_name) || super
|
168
|
+
end
|
169
|
+
|
170
|
+
#################################################
|
171
|
+
class << self
|
172
|
+
|
173
|
+
def append(base_df, other_df)
|
174
|
+
base_df.append(other_df)
|
175
|
+
end
|
176
|
+
|
177
|
+
|
178
|
+
# TODO: The Data class has its own load which also supports
|
179
|
+
# YAML by default. Maybe this method should
|
180
|
+
# make use of @data = Data.load(source)
|
181
|
+
#
|
182
|
+
def load(source:, mapping: {}, transformers:{})
|
183
|
+
file_type = source.extname[1..].downcase.to_sym
|
184
|
+
|
185
|
+
df = if :csv == file_type
|
186
|
+
from_csv_file(source, mapping: mapping, transformers: transformers)
|
187
|
+
elsif :json == file_type
|
188
|
+
from_json_file(source, mapping: mapping, transformers: transformers)
|
189
|
+
else
|
190
|
+
raise BadParameterError, "unsupported file type: #{file_type}"
|
191
|
+
end
|
192
|
+
|
193
|
+
unless transformers.empty?
|
194
|
+
df.coerce_vectors(transformers)
|
195
|
+
end
|
196
|
+
|
197
|
+
df
|
198
|
+
end
|
199
|
+
|
200
|
+
|
201
|
+
def from_aofh(aofh, mapping: {}, transformers: {})
|
202
|
+
new(
|
203
|
+
aofh_to_hofa(
|
204
|
+
aofh,
|
205
|
+
mapping: mapping,
|
206
|
+
transformers: transformers
|
207
|
+
)
|
208
|
+
)
|
209
|
+
end
|
210
|
+
|
211
|
+
|
212
|
+
def from_csv_file(source, mapping: {}, transformers: {})
|
213
|
+
aofh = []
|
214
|
+
|
215
|
+
CSV.foreach(source, headers: true) do |row|
|
216
|
+
aofh << row.to_h
|
217
|
+
end
|
218
|
+
|
219
|
+
from_aofh(aofh, mapping: mapping, transformers: transformers)
|
220
|
+
end
|
221
|
+
|
222
|
+
|
223
|
+
def from_json_file(source, mapping: {}, transformers: {})
|
224
|
+
aofh = JSON.parse(source.read)
|
225
|
+
|
226
|
+
from_aofh(aofh, mapping: mapping, transformers: transformers)
|
227
|
+
end
|
228
|
+
|
229
|
+
|
230
|
+
# aofh -- Array of Hashes
|
231
|
+
# hofa -- Hash of Arrays
|
232
|
+
def aofh_to_hofa(aofh, mapping: {}, transformers: {})
|
233
|
+
hofa = {}
|
234
|
+
keys = aofh.first.keys
|
235
|
+
|
236
|
+
keys.each do |key|
|
237
|
+
hofa[key] = []
|
238
|
+
end
|
239
|
+
|
240
|
+
aofh.each do |entry|
|
241
|
+
keys.each do |key|
|
242
|
+
hofa[key] << entry[key]
|
243
|
+
end
|
244
|
+
end
|
245
|
+
|
246
|
+
# SMELL: This might be necessary
|
247
|
+
normalize_keys(hofa, adapter_mapping: mapping)
|
248
|
+
end
|
249
|
+
|
250
|
+
|
251
|
+
def normalize_keys(hofa, adapter_mapping: {})
|
252
|
+
hofa = rename(adapter_mapping, hofa)
|
253
|
+
mapping = generate_mapping(hofa.keys)
|
254
|
+
rename(mapping, hofa)
|
255
|
+
end
|
256
|
+
|
257
|
+
|
258
|
+
def rename(mapping, hofa)
|
259
|
+
mapping.each_pair do |old_key, new_key|
|
260
|
+
hofa[new_key] = hofa.delete(old_key)
|
261
|
+
end
|
262
|
+
|
263
|
+
hofa
|
264
|
+
end
|
265
|
+
|
266
|
+
|
267
|
+
def generate_mapping(keys)
|
268
|
+
mapping = {}
|
269
|
+
|
270
|
+
keys.each do |key|
|
271
|
+
mapping[key] = underscore_key(sanitize_key(key)) unless key.is_a?(Symbol)
|
272
|
+
end
|
273
|
+
|
274
|
+
mapping
|
275
|
+
end
|
276
|
+
|
277
|
+
|
278
|
+
# returns a snake_case Symbol
|
279
|
+
def underscore_key(key)
|
280
|
+
key.to_s.gsub(/::/, '/').
|
281
|
+
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
|
282
|
+
gsub(/([a-z\d])([A-Z])/,'\1_\2').
|
283
|
+
tr("-", "_").
|
284
|
+
downcase.to_sym
|
285
|
+
end
|
286
|
+
|
287
|
+
|
288
|
+
# removes punctuation and specal characters,
|
289
|
+
# replaces space with underscore.
|
290
|
+
def sanitize_key(key)
|
291
|
+
key.tr('.():/','').gsub(/^\d+.?\s/, "").tr(' ','_')
|
292
|
+
end
|
293
|
+
|
294
|
+
|
295
|
+
# returns true if key is in a date format
|
296
|
+
# like 2023-06-03
|
297
|
+
def is_date?(key)
|
298
|
+
!/(\d{4}-\d{2}-\d{2})/.match(key.to_s).nil?
|
50
299
|
end
|
51
300
|
end
|
52
301
|
end
|
302
|
+
|
data/lib/sqa/errors.rb
CHANGED
@@ -1,6 +1,30 @@
|
|
1
1
|
# lib/sqa/errors.rb
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
|
3
|
+
# raised when a method is still in TODO state
|
4
|
+
class ApiError < RuntimeError
|
5
|
+
def self.raise(why)
|
6
|
+
puts "="*64
|
7
|
+
puts "== API Error"
|
8
|
+
puts why
|
9
|
+
puts
|
10
|
+
puts "Callback trace:"
|
11
|
+
puts caller
|
12
|
+
puts "="*64
|
13
|
+
super
|
14
|
+
end
|
6
15
|
end
|
16
|
+
|
17
|
+
# raised when a method is still in TODO state
|
18
|
+
class NotImplemented < RuntimeError
|
19
|
+
def self.raise
|
20
|
+
puts "="*64
|
21
|
+
puts "== Not Yet Implemented"
|
22
|
+
puts "Callback trace:"
|
23
|
+
puts caller
|
24
|
+
puts "="*64
|
25
|
+
super
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
# raised when an API contract is broken
|
30
|
+
class BadParameterError < ArgumentError; end
|
data/lib/sqa/init.rb
CHANGED
@@ -2,7 +2,13 @@
|
|
2
2
|
|
3
3
|
module SQA
|
4
4
|
class << self
|
5
|
-
@@config
|
5
|
+
@@config = nil
|
6
|
+
@@av = ApiKeyManager::RateLimited.new(
|
7
|
+
api_keys: ENV['AV_API_KEYS'],
|
8
|
+
delay: true,
|
9
|
+
rate_count: ENV['AV_RATE_CNT'] || 5,
|
10
|
+
rate_period: ENV['AV_RATE_PER'] || 60
|
11
|
+
)
|
6
12
|
|
7
13
|
# Initializes the SQA modules
|
8
14
|
# returns the configuration
|
@@ -26,12 +32,11 @@ module SQA
|
|
26
32
|
|
27
33
|
config.data_dir = homify(config.data_dir)
|
28
34
|
|
29
|
-
Daru.lazy_update = config.lazy_update
|
30
|
-
Daru.plotting_library = config.plotting_library
|
31
|
-
|
32
35
|
config
|
33
36
|
end
|
34
37
|
|
38
|
+
def av() = @@av
|
39
|
+
|
35
40
|
def debug?() = @@config.debug?
|
36
41
|
def verbose?() = @@config.verbose?
|
37
42
|
|
data/lib/sqa/stock.rb
CHANGED
@@ -6,83 +6,143 @@
|
|
6
6
|
# separate class and injected by the requiring program?
|
7
7
|
|
8
8
|
class SQA::Stock
|
9
|
+
extend Forwardable
|
10
|
+
|
9
11
|
CONNECTION = Faraday.new(url: "https://www.alphavantage.co")
|
10
12
|
|
11
|
-
attr_accessor :
|
12
|
-
attr_accessor :df
|
13
|
-
|
14
|
-
attr_accessor :
|
15
|
-
attr_accessor :
|
13
|
+
attr_accessor :data # General Info -- SQA::DataFrame::Data
|
14
|
+
attr_accessor :df # Historical Prices -- SQA::DataFrame::Data
|
15
|
+
|
16
|
+
attr_accessor :klass # class of historical and current prices
|
17
|
+
attr_accessor :transformers # procs for changing column values from String to Numeric
|
16
18
|
|
17
19
|
def initialize(
|
18
20
|
ticker:,
|
19
|
-
source: :alpha_vantage
|
20
|
-
type: :csv
|
21
|
+
source: :alpha_vantage
|
21
22
|
)
|
23
|
+
|
24
|
+
@ticker = ticker.downcase
|
25
|
+
@source = source
|
26
|
+
|
22
27
|
raise "Invalid Ticker #{ticker}" unless SQA::Ticker.valid?(ticker)
|
23
28
|
|
24
|
-
|
25
|
-
|
26
|
-
# save an additiona hash lookup?
|
29
|
+
@data_path = SQA.data_dir + "#{@ticker}.json"
|
30
|
+
@df_path = SQA.data_dir + "#{@ticker}.csv"
|
27
31
|
|
28
|
-
|
32
|
+
@klass = "SQA::DataFrame::#{@source.to_s.camelize}".constantize
|
33
|
+
@transformers = "SQA::DataFrame::#{@source.to_s.camelize}::TRANSFORMERS".constantize
|
29
34
|
|
30
|
-
@
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
35
|
+
if @data_path.exist?
|
36
|
+
load
|
37
|
+
else
|
38
|
+
create
|
39
|
+
update
|
40
|
+
save
|
41
|
+
end
|
36
42
|
|
37
43
|
update_the_dataframe
|
38
44
|
end
|
39
45
|
|
40
46
|
|
41
|
-
def
|
42
|
-
|
43
|
-
|
47
|
+
def load
|
48
|
+
@data = SQA::DataFrame::Data.new(
|
49
|
+
JSON.parse(@data_path.read)
|
50
|
+
)
|
51
|
+
end
|
44
52
|
|
45
|
-
df1_nrows = df1.nrows
|
46
|
-
@df = @klass.append(df1, df2)
|
47
53
|
|
48
|
-
|
49
|
-
|
54
|
+
def create
|
55
|
+
@data =
|
56
|
+
SQA::DataFrame::Data.new(
|
57
|
+
{
|
58
|
+
ticker: @ticker,
|
59
|
+
source: @source,
|
60
|
+
indicators: { xyzzy: "Magic" },
|
61
|
+
}
|
62
|
+
)
|
63
|
+
end
|
64
|
+
|
65
|
+
|
66
|
+
def update
|
67
|
+
merge_overview
|
68
|
+
end
|
69
|
+
|
70
|
+
|
71
|
+
def save
|
72
|
+
@data_path.write @data.to_json
|
73
|
+
end
|
74
|
+
|
75
|
+
|
76
|
+
def_delegator :@data, :ticker, :ticker
|
77
|
+
def_delegator :@data, :name, :name
|
78
|
+
def_delegator :@data, :exchange, :exchange
|
79
|
+
def_delegator :@data, :source, :source
|
80
|
+
def_delegator :@data, :indicators, :indicators
|
81
|
+
def_delegator :@data, :indicators=, :indicators=
|
82
|
+
def_delegator :@data, :overview, :overview
|
83
|
+
|
84
|
+
|
85
|
+
|
86
|
+
def update_the_dataframe
|
87
|
+
if @df_path.exist?
|
88
|
+
@df = SQA::DataFrame.load(
|
89
|
+
source: @df_path,
|
90
|
+
transformers: @transformers
|
91
|
+
)
|
92
|
+
else
|
93
|
+
@df = klass.recent(@ticker, full: true)
|
94
|
+
@df.to_csv(@df_path)
|
95
|
+
return
|
50
96
|
end
|
51
97
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
98
|
+
from_date = Date.parse(@df.timestamp.last) + 1
|
99
|
+
df2 = klass.recent(@ticker, from_date: from_date)
|
100
|
+
|
101
|
+
return if df2.nil? # CSV file is up to date.
|
102
|
+
|
103
|
+
df_nrows = @df.nrows
|
104
|
+
@df.append(df2)
|
105
|
+
|
106
|
+
if @df.nrows > df_nrows
|
107
|
+
@df.to_csv(@df_path)
|
108
|
+
end
|
56
109
|
end
|
57
110
|
|
111
|
+
|
58
112
|
def to_s
|
59
113
|
"#{ticker} with #{@df.size} data points from #{@df.timestamp.first} to #{@df.timestamp.last}"
|
60
114
|
end
|
115
|
+
alias_method :inspect, :to_s
|
61
116
|
|
62
|
-
# TODO: Turn this into a class Stock::Overview
|
63
|
-
# which is a sub-class of Hashie::Dash
|
64
|
-
def overview
|
65
|
-
return @overview unless @overview.nil?
|
66
117
|
|
118
|
+
def merge_overview
|
67
119
|
temp = JSON.parse(
|
68
|
-
CONNECTION.get("/query?function=OVERVIEW&symbol=#{
|
120
|
+
CONNECTION.get("/query?function=OVERVIEW&symbol=#{ticker.upcase}&apikey=#{SQA.av.key}")
|
69
121
|
.to_hash[:body]
|
70
122
|
)
|
71
123
|
|
124
|
+
if temp.has_key? "Information"
|
125
|
+
ApiError.raise(temp["Information"])
|
126
|
+
end
|
127
|
+
|
72
128
|
# TODO: CamelCase hash keys look common in Alpha Vantage
|
73
129
|
# JSON; look at making a special Hashie-based class
|
74
130
|
# to convert the keys to normal Ruby standards.
|
75
131
|
|
76
132
|
temp2 = {}
|
77
133
|
|
78
|
-
string_values = %w[ address asset_type cik country currency
|
134
|
+
string_values = %w[ address asset_type cik country currency
|
135
|
+
description dividend_date ex_dividend_date
|
136
|
+
exchange fiscal_year_end industry latest_quarter
|
137
|
+
name sector symbol
|
138
|
+
]
|
79
139
|
|
80
140
|
temp.keys.each do |k|
|
81
141
|
new_k = k.underscore
|
82
142
|
temp2[new_k] = string_values.include?(new_k) ? temp[k] : temp[k].to_f
|
83
143
|
end
|
84
144
|
|
85
|
-
@overview =
|
145
|
+
@data.overview = temp2
|
86
146
|
end
|
87
147
|
|
88
148
|
|
@@ -100,7 +160,7 @@ class SQA::Stock
|
|
100
160
|
|
101
161
|
a_hash = JSON.parse(
|
102
162
|
CONNECTION.get(
|
103
|
-
"/query?function=TOP_GAINERS_LOSERS&apikey=#{
|
163
|
+
"/query?function=TOP_GAINERS_LOSERS&apikey=#{SQA.av.key}"
|
104
164
|
).to_hash[:body]
|
105
165
|
)
|
106
166
|
|
data/lib/sqa/strategy.rb
CHANGED
@@ -8,7 +8,7 @@ class SQA::Strategy
|
|
8
8
|
end
|
9
9
|
|
10
10
|
def add(a_strategy)
|
11
|
-
raise
|
11
|
+
raise BadParameterError unless [Class, Method].include? a_strategy.class
|
12
12
|
|
13
13
|
a_proc = if Class == a_strategy.class
|
14
14
|
a_strategy.method(:trade)
|
data/lib/sqa/version.rb
CHANGED
data/lib/sqa.rb
CHANGED
@@ -17,12 +17,13 @@ end
|
|
17
17
|
## Additional Libraries
|
18
18
|
|
19
19
|
require 'active_support/core_ext/string'
|
20
|
-
require 'alphavantage' # TODO: add rate limiter to it
|
20
|
+
require 'alphavantage' # TODO: add rate limiter to it; ** PR submitted! **
|
21
|
+
require 'api_key_manager'
|
21
22
|
require 'amazing_print'
|
22
|
-
require 'daru' # TODO: Replace this gem with something better
|
23
|
-
require 'descriptive_statistics'
|
24
23
|
require 'faraday'
|
25
24
|
require 'hashie'
|
25
|
+
require 'lite/statistics'
|
26
|
+
require 'lite/statistics/monkey_patches' # patch to Enumerable
|
26
27
|
require 'nenv'
|
27
28
|
require 'sem_version'
|
28
29
|
require 'sem_version/core_ext'
|
@@ -38,7 +39,6 @@ require_relative "sqa/errors"
|
|
38
39
|
|
39
40
|
require_relative 'sqa/init.rb'
|
40
41
|
|
41
|
-
# require_relative "patches/daru" # TODO: extract Daru::DataFrame in new gem sqa-data_frame
|
42
42
|
|
43
43
|
# TODO: Some of these components make direct calls to the
|
44
44
|
# Alpha Vantage API. Convert them to use the
|