kgrift 1.3.108
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/KGrift/Gemfile +22 -0
- data/KGrift/README.md +66 -0
- data/KGrift/bin/kgrift +11 -0
- data/KGrift/grifter.yml +224 -0
- data/KGrift/internal_test_graphs/basic_test_graph_definition.yml +2915 -0
- data/KGrift/internal_test_graphs/unicode_test_graph_definition.yml +3070 -0
- data/KGrift/knewton_grifts/analytics_grifts.rb +103 -0
- data/KGrift/knewton_grifts/async_helper_grifts.rb +63 -0
- data/KGrift/knewton_grifts/authenticator_grifts.rb +46 -0
- data/KGrift/knewton_grifts/basic_grifts.rb +29 -0
- data/KGrift/knewton_grifts/batch_grifts.rb +14 -0
- data/KGrift/knewton_grifts/content_collection_grifts.rb +204 -0
- data/KGrift/knewton_grifts/content_collection_v1_grifts.rb +521 -0
- data/KGrift/knewton_grifts/content_eid_grifts.rb +41 -0
- data/KGrift/knewton_grifts/copy_grifts.rb +151 -0
- data/KGrift/knewton_grifts/deprecated_graph_and_taxonomy_grifts.rb +353 -0
- data/KGrift/knewton_grifts/goal_grifts.rb +203 -0
- data/KGrift/knewton_grifts/graph_and_taxonomy_grifts.rb +136 -0
- data/KGrift/knewton_grifts/graph_create_grifts.rb +34 -0
- data/KGrift/knewton_grifts/graph_query_grifts.rb +448 -0
- data/KGrift/knewton_grifts/graph_tools_grifts.rb +151 -0
- data/KGrift/knewton_grifts/graph_validation_grifts.rb +447 -0
- data/KGrift/knewton_grifts/helper_grifts.rb +92 -0
- data/KGrift/knewton_grifts/jmeter_data_grifts.rb +56 -0
- data/KGrift/knewton_grifts/learning_instance_grifts.rb +46 -0
- data/KGrift/knewton_grifts/looper_grifts.rb +34 -0
- data/KGrift/knewton_grifts/moxy_grifts.rb +64 -0
- data/KGrift/knewton_grifts/oauth_grifts.rb +182 -0
- data/KGrift/knewton_grifts/partner_grifts.rb +70 -0
- data/KGrift/knewton_grifts/partner_support_grifts.rb +85 -0
- data/KGrift/knewton_grifts/recommendation_setup_grifts.rb +215 -0
- data/KGrift/knewton_grifts/registration_grifts.rb +159 -0
- data/KGrift/knewton_grifts/registration_info_grifts.rb +23 -0
- data/KGrift/knewton_grifts/report_grifts.rb +122 -0
- data/KGrift/knewton_grifts/shell_command_grifts.rb +21 -0
- data/KGrift/knewton_grifts/student_flow_grifts.rb +560 -0
- data/KGrift/knewton_grifts/tag_grifts.rb +41 -0
- data/KGrift/knewton_grifts/test_data_grifts.rb +328 -0
- data/KGrift/knewton_grifts/test_user_grifts.rb +264 -0
- data/KGrift/lib/dtrace.rb +20 -0
- data/KGrift/lib/kgrift.rb +7 -0
- data/KGrift/test_data_generators/basic_book_and_taxonomies.rb +35 -0
- data/KGrift/test_data_generators/lo_test_graph.rb +34 -0
- data/KGrift/test_data_generators/partner_owned_book_and_taxonomies.rb +28 -0
- data/KGrift/test_data_generators/partner_owned_book_and_taxonomies_unicode.rb +28 -0
- data/KGrift/test_data_generators/sandcastle_book_and_taxonomies.rb +13 -0
- data/KGrift/test_data_generators/sandcastle_book_and_taxonomies.yml +3709 -0
- data/KGrift/test_data_generators/sandcastle_graph.rb +8 -0
- data/KGrift/test_data_generators/sandcastle_graph_definition.json +4483 -0
- data/KGrift/test_data_generators/sandcastle_graph_full.rb +7 -0
- data/KGrift/test_data_generators/sandcastle_taxonomies.yml +378 -0
- data/KGrift/test_data_generators/sandcastle_with_taxons.rb +56 -0
- data/KGrift/test_data_generators/sandcastle_with_taxons.yml +3994 -0
- data/KGrift/test_data_generators/test_users_and_partners.rb +76 -0
- data/kgrift.gemspec +43 -0
- metadata +144 -0
@@ -0,0 +1,41 @@
|
|
1
|
+
#NODE TAG METHODS
|
2
|
+
def get_node_tags graph_id, node_id, overrides={}
|
3
|
+
Log.info "Getting tags for graph node: #{graph_id} -> #{node_id}"
|
4
|
+
do_as_knewton_partner overrides.delete('use_current_user') do
|
5
|
+
kapi.get "/v0/graphs/#{graph_id}/nodes/#{node_id}/tags"
|
6
|
+
end
|
7
|
+
end
|
8
|
+
|
9
|
+
def create_node_tag graph_id, node_id, tag_name, encode=true, overrides={}
|
10
|
+
tag_name = URI.encode_www_form_component(tag_name) if encode
|
11
|
+
do_as_knewton_partner overrides.delete('use_current_user') do
|
12
|
+
Log.info "adding a tag '#{tag_name}' to graph node: #{graph_id} -> #{node_id}"
|
13
|
+
kapi.put "/v0/graphs/#{graph_id}/nodes/#{node_id}/tags/#{tag_name}", nil
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
def delete_node_tag graph_id, node_id, tag_name, encode=true, overrides={}
|
18
|
+
tag_name = URI.encode_www_form_component(tag_name) if encode
|
19
|
+
Log.info "deleting tag '#{tag_name}' from graph node: #{graph_id} -> #{node_id}"
|
20
|
+
do_as_knewton_partner overrides.delete('use_current_user') do
|
21
|
+
kapi.delete "/v0/graphs/#{graph_id}/nodes/#{node_id}/tags/#{tag_name}"
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
#LEARNING INSTANCE TAG METHODS
|
26
|
+
def get_learning_instance_tags learning_instance_id
|
27
|
+
Log.info "Getting tags for learning instance: #{learning_instance_id}"
|
28
|
+
kapi.get "/v0/learning-instances/#{learning_instance_id}/tags"
|
29
|
+
end
|
30
|
+
|
31
|
+
def create_learning_instance_tag learning_instance_id, tag_name, encode=true
|
32
|
+
tag_name = URI.encode_www_form_component(tag_name) if encode
|
33
|
+
Log.info "adding a tag '#{tag_name}' to learning instance: #{learning_instance_id}"
|
34
|
+
kapi.put "/v0/learning-instances/#{learning_instance_id}/tags/#{tag_name}", nil
|
35
|
+
end
|
36
|
+
|
37
|
+
def delete_learning_instance_tag learning_instance_id, tag_name, encode=true
|
38
|
+
tag_name = URI.encode_www_form_component(tag_name) if encode
|
39
|
+
Log.info "deleting tag '#{tag_name}' from learning instance: #{learning_instance_id}"
|
40
|
+
kapi.delete "/v0/learning-instances/#{learning_instance_id}/tags/#{tag_name}"
|
41
|
+
end
|
@@ -0,0 +1,328 @@
|
|
1
|
+
"""
|
2
|
+
test_data_grifts contains the all important get_test_data method
|
3
|
+
|
4
|
+
this method is all about making it easy to deal with shared test data
|
5
|
+
This test data is persisted in S3 and is available to any machine
|
6
|
+
that is executing tests.
|
7
|
+
|
8
|
+
Here is a basic outline of how this works:
|
9
|
+
|
10
|
+
- client calls get_test_data <test data label> ( for example: get_test_data :test_users_and_partners)
|
11
|
+
- look for a generator file named test_data_generators/<label>.rb.
|
12
|
+
- If generator found, get the sha of the file. This is the version of test data we need
|
13
|
+
- It generator not found, version is 'default'
|
14
|
+
- Now look for test data locally at: ~/.kgrift/<env>/<test data label>/<generator sha>/<test data label>.yml
|
15
|
+
- If found, load the yaml data from the file and return it
|
16
|
+
- If not found, look for data in S3 at s3://knewton-<env>/tools/kgrift/<test data label>/<sha>/<test data label>.yml
|
17
|
+
- If found in S3, retrieve locally, load it, return it
|
18
|
+
- If not found in S3, we need to generate the data.
|
19
|
+
- Run the generator. It will return a data structure (usually a Hash)
|
20
|
+
- Save the data it returned locally
|
21
|
+
- Push the freshly generated data into S3.
|
22
|
+
- Load the data, and return it
|
23
|
+
|
24
|
+
Other notes:
|
25
|
+
- special logic for localmode bivins is provided. In this case test data is simply generated and returned since local mode has no
|
26
|
+
permanent persistence, any data made is gone once the localmode bivins is stopped.
|
27
|
+
|
28
|
+
- REGEN_TEST_DATA is a way to allow a user to regenerate data that exists but may have been invalidated for some reason
|
29
|
+
|
30
|
+
|
31
|
+
end
|
32
|
+
|
33
|
+
"""
|
34
|
+
|
35
|
+
require 'aws-sdk'
|
36
|
+
|
37
|
+
# each of these places is important to how test data is found and generated
|
38
|
+
USER_HOME = Dir.home
|
39
|
+
CUR_DIR = Dir.pwd
|
40
|
+
KAPI_GEM_HOME = File.dirname(File.dirname(__FILE__))
|
41
|
+
|
42
|
+
TEST_DATA_GENERATORS_FOLDERS = [
|
43
|
+
File.join(KAPI_GEM_HOME, 'test_data_generators'), #generators can come with the gem
|
44
|
+
File.join(CUR_DIR, 'test_data_generators'), #or they can come with the project (current working dir)
|
45
|
+
]
|
46
|
+
|
47
|
+
# if the environment we are running against is here, we
|
48
|
+
# will generate the data every time, instead of checking
|
49
|
+
# if we have a persisted data file for it
|
50
|
+
DONT_PERSIST_DATA_ENVIRONMENTS = [:local, :maven, :undefined]
|
51
|
+
|
52
|
+
FORCE_REGENERATION_ENV_VAR='REGEN_TEST_DATA'
|
53
|
+
|
54
|
+
# get_test_data is the main method here
|
55
|
+
def get_test_data test_data_label, options={}
|
56
|
+
test_data_label = test_data_label.to_s
|
57
|
+
options = {
|
58
|
+
force_generate: false, #if true, data will be generated even if already persisted
|
59
|
+
session_only: false, #if true, the data will last only for a single session
|
60
|
+
}.merge(options)
|
61
|
+
Log.debug "get_test_data '#{test_data_label}'"
|
62
|
+
|
63
|
+
# parse the REGEN_TEST_DATA variable which is a list of test_data_labels, separated by non-word chars (spaces, commas, etc)
|
64
|
+
sets_to_force_gen = ENV[FORCE_REGENERATION_ENV_VAR] ? ENV[FORCE_REGENERATION_ENV_VAR].split(/\W+/) : []
|
65
|
+
|
66
|
+
#sometimes you just want to run a generator and skip all the fancy stuff
|
67
|
+
if options[:force_generate]
|
68
|
+
return generate_test_data(test_data_label, options)
|
69
|
+
end
|
70
|
+
|
71
|
+
#sometimes you want all data created to be completely ephemeral
|
72
|
+
#one such case is when running against a local server
|
73
|
+
#this will cache the data only for the life of interpreter (one test run)
|
74
|
+
if options[:session_only] || local_environment?
|
75
|
+
@session_data ||= {}
|
76
|
+
if @session_data.has_key?(test_data_label)
|
77
|
+
Log.debug "Found existing test data for this session"
|
78
|
+
else
|
79
|
+
Log.debug "First request for this test data in this session, generating"
|
80
|
+
@session_data[test_data_label] = generate_test_data(test_data_label, options)
|
81
|
+
end
|
82
|
+
return @session_data[test_data_label]
|
83
|
+
end
|
84
|
+
|
85
|
+
generator_sha = test_data_version test_data_label
|
86
|
+
|
87
|
+
# check if we already have the data generated
|
88
|
+
persisted_test_data_file = test_data_file_path(test_data_label)
|
89
|
+
# if user put this in REGEN_TEST_DATA list, then skip the checking and just generate
|
90
|
+
if sets_to_force_gen.include?(test_data_label)
|
91
|
+
Log.debug "Test data generation forced for '#{test_data_label}' because #{FORCE_REGENERATION_ENV_VAR} set"
|
92
|
+
options[:force_generate] = true
|
93
|
+
else
|
94
|
+
if File.exist? persisted_test_data_file
|
95
|
+
data = YAML.load_file(persisted_test_data_file)
|
96
|
+
if !generator_sha || !data['sha1'] || data['sha1'] == generator_sha
|
97
|
+
Log.debug "the persisted test data file exists, and is up to date, so we are going to use it"
|
98
|
+
return data
|
99
|
+
else
|
100
|
+
Log.debug "the persisted test data file exists but it is out of date. We will not use it"
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
data = retrieve_data_set_from_s3(test_data_label)
|
105
|
+
if data
|
106
|
+
if !generator_sha || !data['sha1'] || data['sha1'] == generator_sha
|
107
|
+
Log.debug "the data was found in s3, and it is up to date"
|
108
|
+
return data
|
109
|
+
else
|
110
|
+
Log.debug "the data was found in s3 but it is out of date. We will not use it"
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
#generate data and save to file
|
116
|
+
data = generate_test_data(test_data_label, options)
|
117
|
+
Log.info 'Writing test data to: ' + persisted_test_data_file
|
118
|
+
FileUtils.mkdir_p(File.dirname persisted_test_data_file)
|
119
|
+
File.open(persisted_test_data_file, 'w') {|f| f.write(data.to_yaml)}
|
120
|
+
archive_test_data_to_s3(test_data_label)
|
121
|
+
YAML.load_file(persisted_test_data_file)
|
122
|
+
end
|
123
|
+
|
124
|
+
def local_environment?
|
125
|
+
DONT_PERSIST_DATA_ENVIRONMENTS.include?(grifter_configuration[:environment].to_sym)
|
126
|
+
end
|
127
|
+
|
128
|
+
#path to directory containing test data
|
129
|
+
def test_data_base_path
|
130
|
+
File.join USER_HOME, '.kgrift', grifter_configuration[:environment].to_s
|
131
|
+
end
|
132
|
+
|
133
|
+
#path to a test data yaml file
|
134
|
+
def test_data_file_path test_data_label
|
135
|
+
test_data_label = test_data_label
|
136
|
+
generator_sha = test_data_version test_data_label
|
137
|
+
File.join test_data_base_path, test_data_label, generator_sha, test_data_label+'.yml'
|
138
|
+
end
|
139
|
+
|
140
|
+
#construct a path to the test data generator script
|
141
|
+
def test_data_generator_path test_data_label
|
142
|
+
TEST_DATA_GENERATORS_FOLDERS.each do |possible_path|
|
143
|
+
|
144
|
+
generator_path = File.join(*[
|
145
|
+
possible_path,
|
146
|
+
test_data_label + '.rb'
|
147
|
+
])
|
148
|
+
|
149
|
+
Log.debug "Checking: #{generator_path}"
|
150
|
+
return generator_path if File.exist? generator_path
|
151
|
+
end
|
152
|
+
return nil
|
153
|
+
end
|
154
|
+
|
155
|
+
def generate_test_data test_data_label, options={}
|
156
|
+
options = {
|
157
|
+
force_generate: false, #if true, data will be generated even if already persisted
|
158
|
+
session_only: false, #if true, the data will last only for a single session
|
159
|
+
}.merge(options)
|
160
|
+
Log.debug "get_test_data '#{test_data_label}'"
|
161
|
+
grift_script_path = test_data_generator_path(test_data_label)
|
162
|
+
if (!grift_script_path)
|
163
|
+
raise "Persisted data was not present, and no generator exists either"
|
164
|
+
end
|
165
|
+
|
166
|
+
Log.info "Generating test data '#{test_data_label}' via generator script '#{grift_script_path}'"
|
167
|
+
generated_test_data = run_script_file(grift_script_path)
|
168
|
+
# stick the sha of the generator into the data
|
169
|
+
generated_test_data['sha1'] = get_file_sha1(grift_script_path)
|
170
|
+
Log.info "Done generating test data '#{test_data_label}'"
|
171
|
+
return generated_test_data
|
172
|
+
end
|
173
|
+
|
174
|
+
def retrieve_data_set_from_s3 test_data_label
|
175
|
+
return nil if grifter_configuration[:environment] =~ /local/
|
176
|
+
tgt_file = test_data_file_path test_data_label
|
177
|
+
s3_key = test_data_s3_key(test_data_label)
|
178
|
+
bucket = test_data_s3_bucket
|
179
|
+
Log.debug "Retrieving from S3: s3://#{bucket}/#{s3_key}"
|
180
|
+
FileUtils.mkdir_p File.dirname(tgt_file)
|
181
|
+
s3_client.get_object bucket: bucket,
|
182
|
+
key: s3_key,
|
183
|
+
response_target: tgt_file
|
184
|
+
|
185
|
+
YAML.load_file(tgt_file)
|
186
|
+
|
187
|
+
# handle case where data is not in S3
|
188
|
+
rescue Aws::S3::Errors::NoSuchKey
|
189
|
+
msg = "Persisted test data not found for data set: #{test_data_label}. S3 path 's3://#{test_data_s3_bucket}/#{s3_key}'"
|
190
|
+
Log.error msg
|
191
|
+
nil
|
192
|
+
end
|
193
|
+
|
194
|
+
def archive_test_data_to_s3 test_data_label
|
195
|
+
# we only archive for non-local environments where data actually persists in a real DB
|
196
|
+
# this catches local and localx cases
|
197
|
+
if grifter_configuration[:environment] !~ /local/
|
198
|
+
src_file = test_data_file_path test_data_label
|
199
|
+
s3_key = test_data_s3_key(test_data_label)
|
200
|
+
File.open(src_file, 'rb') do |file|
|
201
|
+
s3_client.put_object bucket: test_data_s3_bucket, key: s3_key, body: file
|
202
|
+
end
|
203
|
+
Log.info "Archived s3://#{test_data_s3_bucket}/#{s3_key}"
|
204
|
+
end
|
205
|
+
nil # return nil, to make sure nothing is paying attention to the return of this
|
206
|
+
rescue Exception => e
|
207
|
+
# so archiving failed... if we are on ITStack... there is not much to do here. If this is a local run though... it is FATAL and should not be ignored
|
208
|
+
if on_ec2_instance?
|
209
|
+
Log.warn "Failed to archive newly generated test data! Going to ignore this error since we are on an EC2 instance... but this is an S3 permission problem that should be fixed!"
|
210
|
+
else
|
211
|
+
raise e
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
def test_data_s3_bucket
|
216
|
+
if grifter_configuration[:services][:kapi][:s3_bucket]
|
217
|
+
grifter_configuration[:services][:kapi][:s3_bucket]
|
218
|
+
else
|
219
|
+
"knewton-#{grifter_configuration[:environment].to_s}"
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
def test_data_s3_region
|
224
|
+
case grifter_configuration[:environment].to_s
|
225
|
+
when 'production-euwest1'
|
226
|
+
'eu-west-1'
|
227
|
+
else
|
228
|
+
'us-east-1'
|
229
|
+
end
|
230
|
+
end
|
231
|
+
|
232
|
+
# return the aws credential object which will be used to authenticate with S3
|
233
|
+
# Handle three cases:
|
234
|
+
# - AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY are defined (the manual override option)
|
235
|
+
# - We are on an EC2 instance, so simply use the IAM role (aka instance profile)
|
236
|
+
# - use ~/.aws/credentials as setup by kva (for developers laptops)
|
237
|
+
def test_data_s3_credentials
|
238
|
+
# lazy initialize credentials once
|
239
|
+
@aws_creds ||= begin
|
240
|
+
Log.debug "Initializing AWS credentials"
|
241
|
+
if ENV['AWS_ACCESS_KEY_ID'] and ENV['AWS_SECRET_ACCESS_KEY']
|
242
|
+
Log.debug "Using AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY environment variables"
|
243
|
+
Aws::Credentials.new ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
|
244
|
+
|
245
|
+
elsif on_ec2_instance?
|
246
|
+
Log.debug "Using the instance profile for AWS credentials since we are on an EC2 instance"
|
247
|
+
Aws::InstanceProfileCredentials.new
|
248
|
+
else
|
249
|
+
profile_name = setup_aws_profile_using_kvault
|
250
|
+
Log.debug "Using profile '#{profile_name}' from ~/.aws/credentials"
|
251
|
+
Aws::SharedCredentials.new profile_name: profile_name
|
252
|
+
end
|
253
|
+
end
|
254
|
+
@aws_creds
|
255
|
+
end
|
256
|
+
|
257
|
+
|
258
|
+
# use kva to get an aws profile, and return the profile name
|
259
|
+
# This is all about the ~/.aws/credentials file
|
260
|
+
# use kva to get up to date keys in that file,
|
261
|
+
# then return the name of the profile
|
262
|
+
def setup_aws_profile_using_kvault
|
263
|
+
# we need some info from grifter.yml
|
264
|
+
profile_name = grifter_configuration[:services][:kapi][:aws_profile]
|
265
|
+
kva_env_name = grifter_configuration[:services][:kapi][:kva_env_name]
|
266
|
+
if profile_name.nil? or kva_env_name.nil?
|
267
|
+
Kernel.abort "Missing configuration for environment: #{grifter_configuration[:environment]}. Unsure how to acquire aws keys using kva. See grifter.yml settings kva_env_name and/or aws_profile"
|
268
|
+
end
|
269
|
+
#run kva to ensure credentials are good in ~/.aws/credentials
|
270
|
+
Log.info "Acquiring leased aws keys. This may prompt you for your LDAP password..."
|
271
|
+
# --validate switch ensures keys will work immediately after cmd returns (see SRE-1200)
|
272
|
+
# pwrusr role ensure both read and write access into s3
|
273
|
+
kva_cmd = "kva aws --validate #{kva_env_name} pwrusr"
|
274
|
+
execute_shell_command kva_cmd
|
275
|
+
profile_name
|
276
|
+
rescue ShellCommandFailed
|
277
|
+
Kernel.abort "Failed to setup aws keys using kva. Make sure this command works before proceeding: $ #{kva_cmd}"
|
278
|
+
end
|
279
|
+
|
280
|
+
# this returns the sha of the generator file, or 'default' if there is no generator
|
281
|
+
# This string is used to determine if the data available matches the data needed
|
282
|
+
def test_data_version test_data_label
|
283
|
+
test_data_generator_file = test_data_generator_path(test_data_label)
|
284
|
+
test_data_generator_file ? get_file_sha1(test_data_generator_file) : 'default'
|
285
|
+
end
|
286
|
+
|
287
|
+
# return true or false depending whether on ec2 instance or not
|
288
|
+
def on_ec2_instance?
|
289
|
+
# we'll figure this out once only since it involves running a curl with a 1 second timeout
|
290
|
+
@ec2instance ||= begin
|
291
|
+
Log.debug "Testing whether we are on an EC2 instance or not..."
|
292
|
+
# some vpn settings could be redirecting through an EC2 gateway, so it is important that we query the WHOLE iam path
|
293
|
+
execute_shell_command "curl --connect-timeout 1 -s http://169.254.169.254/2014-11-05/meta-data/iam/info"
|
294
|
+
Log.debug "We are on an EC2 instance"
|
295
|
+
true
|
296
|
+
rescue Exception
|
297
|
+
Log.debug "We are NOT on an EC2 instance"
|
298
|
+
false
|
299
|
+
end
|
300
|
+
@ec2instance
|
301
|
+
end
|
302
|
+
|
303
|
+
def test_data_s3_key test_data_label
|
304
|
+
generator_sha = test_data_version test_data_label
|
305
|
+
"tools/kgrift/#{test_data_label}/#{generator_sha}/#{test_data_label}.yml"
|
306
|
+
end
|
307
|
+
|
308
|
+
def s3_client
|
309
|
+
@s3_client ||= Aws::S3::Client.new region: test_data_s3_region, credentials: test_data_s3_credentials
|
310
|
+
@s3_client
|
311
|
+
end
|
312
|
+
|
313
|
+
|
314
|
+
# Remove this once migration is complete
|
315
|
+
def migrate_kird_data
|
316
|
+
td_files = Dir["#{USER_HOME}/.kird/kgrift/#{grifter_configuration[:environment]}/**/*.yml"]
|
317
|
+
td_files.each do |td_file|
|
318
|
+
|
319
|
+
test_data_label = File.basename td_file, '.yml'
|
320
|
+
|
321
|
+
unless test_data_s3_key(test_data_label) =~ /\/default\//
|
322
|
+
File.open(td_file, 'rb') do |file|
|
323
|
+
s3_client.put_object bucket: test_data_s3_bucket, key: test_data_s3_key(test_data_label), body: file
|
324
|
+
end
|
325
|
+
Log.info "Migrated s3://#{test_data_s3_bucket}/#{test_data_s3_key(test_data_label)}"
|
326
|
+
end
|
327
|
+
end
|
328
|
+
end
|
@@ -0,0 +1,264 @@
|
|
1
|
+
def set_account arg1, arg2=nil
|
2
|
+
Log.debug "Requested user '#{arg1}' with credentials '#{arg2}'"
|
3
|
+
#find the correct set of user and client credentials
|
4
|
+
account={}
|
5
|
+
case arg1
|
6
|
+
when Symbol
|
7
|
+
account = get_account arg1
|
8
|
+
account.delete('client')
|
9
|
+
when Hash
|
10
|
+
if arg1['external_user_id'] and arg1['client_credentials']
|
11
|
+
account = arg1
|
12
|
+
end
|
13
|
+
when String
|
14
|
+
account['external_user_id'] = arg1
|
15
|
+
case arg2
|
16
|
+
when Symbol
|
17
|
+
account['client_credentials'] = get_client_credentials arg2
|
18
|
+
when Hash
|
19
|
+
account['client_credentials'] = arg2
|
20
|
+
end
|
21
|
+
else raise TypeError.new('First argument must be a Symbol, Hash, or String')
|
22
|
+
end
|
23
|
+
unless account['external_user_id'] and account['client_credentials']
|
24
|
+
raise ArgumentError.new('Could not set user and client credentials')
|
25
|
+
end
|
26
|
+
Log.debug "Resolved to account user '#{account['external_user_id']}' with partner '#{account['client_credentials']['client_id']}'"
|
27
|
+
|
28
|
+
#get access token for account (and authorize if first time making account)
|
29
|
+
token_request_params = {
|
30
|
+
'code' => nil,
|
31
|
+
'redirect_uri' => nil,
|
32
|
+
'scope' => account['external_user_id'],
|
33
|
+
'grant_type' => "client_credentials",
|
34
|
+
'client_credentials' => account['client_credentials'],
|
35
|
+
'external_user_id' => account['external_user_id']
|
36
|
+
}
|
37
|
+
access_token = do_cached_token_request token_request_params
|
38
|
+
|
39
|
+
set_authorization_headers access_token
|
40
|
+
true
|
41
|
+
end
|
42
|
+
|
43
|
+
# Cache access tokens to improve performance when switching between test users rapidly
|
44
|
+
# All the cacheing logic lives here
|
45
|
+
# We also check that the cache tokens are not expired and refresh them if they are
|
46
|
+
TOKEN_CACHE={}
|
47
|
+
def do_cached_token_request token_request_params, options={}
|
48
|
+
|
49
|
+
# build a cache key and see if anything in the cache
|
50
|
+
cache_key = token_request_params.select{|k| ['scope', 'grant_type', 'external_user_id', 'client_credentials'].include?(k) }
|
51
|
+
tokens = TOKEN_CACHE[cache_key]
|
52
|
+
|
53
|
+
#nothing was in the cache, so just do the token request
|
54
|
+
if tokens.nil?
|
55
|
+
Log.debug "Token cache miss: will do token request"
|
56
|
+
tokens = get_access_token token_request_params
|
57
|
+
tokens['resolved_token_expiration_time'] = Time.now + tokens['expires_in']
|
58
|
+
TOKEN_CACHE[cache_key] = tokens
|
59
|
+
seconds_remaining = tokens['resolved_token_expiration_time'] - Time.now
|
60
|
+
Log.debug "Get token for this account:\ntoken: #{tokens['access_token']}\nexternal_user_id: #{cache_key['external_user_id']}\nseconds remaining: #{seconds_remaining.to_i}"
|
61
|
+
|
62
|
+
# the cache had the needed tokens, but we need to check if they have expired
|
63
|
+
else
|
64
|
+
seconds_remaining = tokens['resolved_token_expiration_time'] - Time.now
|
65
|
+
Log.debug "Token cache hit: found cached token response for this account:\ntoken: #{tokens['access_token']}\nexternal_user_id: #{cache_key['external_user_id']}\nseconds remaining: #{seconds_remaining.to_i}"
|
66
|
+
|
67
|
+
# Bivins uses 60 seconds expiry for it's own server side cache of access tokens.
|
68
|
+
# We might as well use the same cache timeout client side since otherwise the refresh will simply return the server cached tokens
|
69
|
+
if seconds_remaining < 60
|
70
|
+
Log.debug "cached token has less than 60 seconds until expiration, refreshing..."
|
71
|
+
|
72
|
+
token_request_params.merge! 'refresh_token' => tokens['refresh_token'],
|
73
|
+
'grant_type' => 'refresh_token'
|
74
|
+
|
75
|
+
tokens = get_access_token_from_refresh_token token_request_params
|
76
|
+
tokens['resolved_token_expiration_time'] = Time.now + tokens['expires_in']
|
77
|
+
TOKEN_CACHE[cache_key] = tokens
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
tokens['access_token']
|
82
|
+
end
|
83
|
+
|
84
|
+
# remove anything from the token cache for a given account id
|
85
|
+
# this is used when an account is deleted to clear the cache
|
86
|
+
def delete_account_id_from_token_cache account_id
|
87
|
+
TOKEN_CACHE.delete_if {|k,v| v['account_id'] == account_id }
|
88
|
+
end
|
89
|
+
|
90
|
+
# temporarily authenticate as a user.
|
91
|
+
# Do the block.
|
92
|
+
# Go back to the orriginal user.
|
93
|
+
def as_account arg1, arg2=nil, &blk
|
94
|
+
cur_auth_header = kapi.headers["Authorization"]
|
95
|
+
Log.debug "as_account called, Authorization header currently set to '#{cur_auth_header}'"
|
96
|
+
|
97
|
+
set_account arg1, arg2
|
98
|
+
yield
|
99
|
+
|
100
|
+
ensure
|
101
|
+
# whether the block returns, or threw exception, we need to change back to original user
|
102
|
+
if cur_auth_header.nil?
|
103
|
+
kapi.headers.delete "Authorization"
|
104
|
+
else
|
105
|
+
set_authorization_headers cur_auth_header
|
106
|
+
end
|
107
|
+
Log.debug "as_user over, Authorization header set back to '#{cur_auth_header}'"
|
108
|
+
end
|
109
|
+
|
110
|
+
def knerd_accounts
|
111
|
+
#if were dealing with any kind of local environment, return some pre-defined constants
|
112
|
+
if local_environment? || grifter_configuration[:environment].to_s.downcase =~ /local/
|
113
|
+
|
114
|
+
knewton_client_credentials = {
|
115
|
+
'client_id' => 'knewton',
|
116
|
+
'client_secret' => '321knewton123',
|
117
|
+
}
|
118
|
+
|
119
|
+
{
|
120
|
+
'knerd' => {
|
121
|
+
'external_user_id' => 'knewton-system-user',
|
122
|
+
'client' => 'knewton',
|
123
|
+
'client_credentials' => knewton_client_credentials,
|
124
|
+
},
|
125
|
+
'prod_knerd' => {
|
126
|
+
'external_user_id' => 'knewton-prod-system-user',
|
127
|
+
'client' => 'knewton',
|
128
|
+
'client_credentials' => knewton_client_credentials,
|
129
|
+
},
|
130
|
+
}
|
131
|
+
|
132
|
+
else
|
133
|
+
get_test_data :knerd_accounts
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
def test_account_map
|
138
|
+
@test_account_map ||= begin
|
139
|
+
full_data = get_test_data(:test_users_and_partners)
|
140
|
+
full_data['accounts'].merge! knerd_accounts
|
141
|
+
full_data['partners']['knewton'] = full_data['accounts']['knerd']['client_credentials']
|
142
|
+
full_data
|
143
|
+
end
|
144
|
+
@test_account_map.clone #clone to protect our original copy
|
145
|
+
end
|
146
|
+
|
147
|
+
def get_account label
|
148
|
+
if label==:partner
|
149
|
+
test_account_map['accounts']['system_user_default']
|
150
|
+
elsif label==:user
|
151
|
+
test_account_map['accounts']['user_default']
|
152
|
+
else
|
153
|
+
test_account_map['accounts'][label.to_s]
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
def get_partner_helper label=nil
|
158
|
+
if !label or label==:partner
|
159
|
+
test_account_map['partners']['test_partner_default']
|
160
|
+
else
|
161
|
+
test_account_map['partners'][label.to_s]
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
def get_client_id label=nil
|
166
|
+
(get_partner_helper label)['client_id']
|
167
|
+
end
|
168
|
+
|
169
|
+
def get_client_secret label=nil
|
170
|
+
(get_partner_helper label)['client_secret']
|
171
|
+
end
|
172
|
+
|
173
|
+
def get_client_credentials label=nil
|
174
|
+
partner = get_partner_helper label
|
175
|
+
{
|
176
|
+
'client_id' => partner['client_id'],
|
177
|
+
'client_secret' => partner['client_secret']
|
178
|
+
}
|
179
|
+
end
|
180
|
+
|
181
|
+
def get_client_partner_id label=nil
|
182
|
+
if label==nil or label==:partner
|
183
|
+
test_account_map['partners']['test_partner_default']['partner_id']
|
184
|
+
else
|
185
|
+
test_account_map['partners'][label.to_s]['partner_id']
|
186
|
+
end
|
187
|
+
end
|
188
|
+
|
189
|
+
#this method is used for grifters automatic authentication feature
|
190
|
+
#when calling grifts off the cmd line, this authentication will be
|
191
|
+
#done automatically first
|
192
|
+
def bivins_grifter_authenticate
|
193
|
+
set_account :knerd
|
194
|
+
end
|
195
|
+
|
196
|
+
#this method is used to set a descriptive user-agent header before
|
197
|
+
#executing any method off the command line
|
198
|
+
def user_agent_grifter_authenticate
|
199
|
+
user = Etc.getpwuid(Process.uid).name.slice(0,15)
|
200
|
+
cmd_method_name = ARGV.first.slice(0,15)
|
201
|
+
tracing_header = rand(2**32..2**64-1).to_s(16)
|
202
|
+
Log.debug "Setting user-agent header to '#{cmd_method_name}', tracing header to #{tracing_header}"
|
203
|
+
kapi.headers['user-agent'] = kapi.config[:default_headers][:"user-agent"] + " - #{cmd_method_name} - #{user}"
|
204
|
+
kapi.headers['X-B3-TraceId'] = tracing_header
|
205
|
+
rescue Exception
|
206
|
+
Log.warn "Failed to set method-specific user-agent header"
|
207
|
+
end
|
208
|
+
|
209
|
+
# create_partner_with_admin creates
|
210
|
+
# * a new partner with api key set to a name optionally
|
211
|
+
# * a partner admin for that partner
|
212
|
+
#
|
213
|
+
# returns a hash with all the data in a format that is ready to
|
214
|
+
# use with set_account / as_account methods.
|
215
|
+
def create_partner_with_admin options={}
|
216
|
+
client_id = options.fetch('name', random_string(23)) + "-#{random_string(8)}"
|
217
|
+
options.delete('name')
|
218
|
+
client_secret = options.fetch('password', random_string)
|
219
|
+
options.delete('password')
|
220
|
+
|
221
|
+
as_account :knerd do
|
222
|
+
#make the partner (and keep the partner id)
|
223
|
+
partner_options = {'name' => client_id,
|
224
|
+
'password' => client_secret,
|
225
|
+
'rate_limit_multiplier' => 1.0,
|
226
|
+
'default_entitlements' => options['default_entitlements'],
|
227
|
+
}.merge(options)
|
228
|
+
partner = create_partner partner_options
|
229
|
+
|
230
|
+
#make the system user
|
231
|
+
system_user = create_system_user partner['id']
|
232
|
+
|
233
|
+
{
|
234
|
+
'account_id' => system_user['id'],
|
235
|
+
'external_user_id' => system_user['external_user_id'],
|
236
|
+
'partner_id' => partner['id'],
|
237
|
+
'client_credentials' => {
|
238
|
+
'client_id' => client_id,
|
239
|
+
'client_secret' => client_secret,
|
240
|
+
},
|
241
|
+
}
|
242
|
+
end
|
243
|
+
end
|
244
|
+
|
245
|
+
#create a user account, and get the account id
|
246
|
+
#This is useful for making fresh accounts that can help avoid entity limits
|
247
|
+
def create_test_account client_credentials
|
248
|
+
external_user_id = create_account['external_user_id']
|
249
|
+
account_id = get_account_id_for external_user_id, client_credentials
|
250
|
+
{
|
251
|
+
'account_id' => account_id,
|
252
|
+
'external_user_id' => external_user_id,
|
253
|
+
}
|
254
|
+
end
|
255
|
+
|
256
|
+
def get_partner_id_for_current_account
|
257
|
+
orig_header_val = kapi.headers['X-Include-Partner-Id']
|
258
|
+
kapi.headers['X-Include-Partner-Id'] = 'true'
|
259
|
+
get_current_account
|
260
|
+
pid = kapi.last_response['X-Partner-Id']
|
261
|
+
kapi.headers['X-Include-Partner-Id'] = orig_header_val
|
262
|
+
|
263
|
+
pid
|
264
|
+
end
|