aws-sdk-core 3.226.3 → 3.241.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +138 -0
- data/VERSION +1 -1
- data/lib/aws-defaults/default_configuration.rb +2 -1
- data/lib/aws-sdk-core/assume_role_credentials.rb +8 -8
- data/lib/aws-sdk-core/assume_role_web_identity_credentials.rb +2 -2
- data/lib/aws-sdk-core/client_stubs.rb +6 -0
- data/lib/aws-sdk-core/credential_provider_chain.rb +71 -22
- data/lib/aws-sdk-core/ecs_credentials.rb +13 -13
- data/lib/aws-sdk-core/endpoints.rb +37 -13
- data/lib/aws-sdk-core/errors.rb +3 -0
- data/lib/aws-sdk-core/instance_profile_credentials.rb +7 -7
- data/lib/aws-sdk-core/login_credentials.rb +229 -0
- data/lib/aws-sdk-core/plugins/checksum_algorithm.rb +145 -75
- data/lib/aws-sdk-core/plugins/credentials_configuration.rb +75 -59
- data/lib/aws-sdk-core/plugins/retries/clock_skew.rb +28 -16
- data/lib/aws-sdk-core/plugins/sign.rb +23 -28
- data/lib/aws-sdk-core/plugins/stub_responses.rb +6 -0
- data/lib/aws-sdk-core/plugins/user_agent.rb +4 -1
- data/lib/aws-sdk-core/refreshing_credentials.rb +8 -11
- data/lib/aws-sdk-core/shared_config.rb +30 -0
- data/lib/aws-sdk-core/sso_credentials.rb +1 -1
- data/lib/aws-sdk-core/static_token_provider.rb +1 -2
- data/lib/aws-sdk-core/token.rb +3 -3
- data/lib/aws-sdk-core/token_provider.rb +4 -0
- data/lib/aws-sdk-core/token_provider_chain.rb +2 -6
- data/lib/aws-sdk-core.rb +4 -0
- data/lib/aws-sdk-signin/client.rb +604 -0
- data/lib/aws-sdk-signin/client_api.rb +119 -0
- data/lib/aws-sdk-signin/customizations.rb +1 -0
- data/lib/aws-sdk-signin/endpoint_parameters.rb +69 -0
- data/lib/aws-sdk-signin/endpoint_provider.rb +59 -0
- data/lib/aws-sdk-signin/endpoints.rb +20 -0
- data/lib/aws-sdk-signin/errors.rb +122 -0
- data/lib/aws-sdk-signin/plugins/endpoints.rb +77 -0
- data/lib/aws-sdk-signin/resource.rb +26 -0
- data/lib/aws-sdk-signin/types.rb +299 -0
- data/lib/aws-sdk-signin.rb +63 -0
- data/lib/aws-sdk-sso/client.rb +24 -17
- data/lib/aws-sdk-sso/endpoint_parameters.rb +4 -4
- data/lib/aws-sdk-sso/endpoint_provider.rb +2 -2
- data/lib/aws-sdk-sso.rb +1 -1
- data/lib/aws-sdk-ssooidc/client.rb +43 -23
- data/lib/aws-sdk-ssooidc/client_api.rb +5 -0
- data/lib/aws-sdk-ssooidc/endpoint_parameters.rb +4 -4
- data/lib/aws-sdk-ssooidc/errors.rb +10 -0
- data/lib/aws-sdk-ssooidc/types.rb +27 -15
- data/lib/aws-sdk-ssooidc.rb +1 -1
- data/lib/aws-sdk-sts/client.rb +159 -28
- data/lib/aws-sdk-sts/client_api.rb +72 -0
- data/lib/aws-sdk-sts/customizations.rb +0 -1
- data/lib/aws-sdk-sts/endpoint_parameters.rb +5 -5
- data/lib/aws-sdk-sts/errors.rb +64 -0
- data/lib/aws-sdk-sts/presigner.rb +2 -6
- data/lib/aws-sdk-sts/types.rb +175 -6
- data/lib/aws-sdk-sts.rb +1 -1
- data/lib/seahorse/client/h2/handler.rb +6 -1
- data/lib/seahorse/client/net_http/patches.rb +44 -11
- metadata +27 -1
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Aws
|
|
4
|
+
# An auto-refreshing credential provider that retrieves credentials from
|
|
5
|
+
# a cached login token. This class does NOT implement the AWS Sign-In
|
|
6
|
+
# login flow - tokens must be generated separately by running `aws login`
|
|
7
|
+
# from the AWS CLI/AWS Tools for PowerShell with the correct profile.
|
|
8
|
+
# The {LoginCredentials} will auto-refresh the AWS credentials from AWS Sign-In.
|
|
9
|
+
#
|
|
10
|
+
# # You must first run aws login --profile your-login-profile
|
|
11
|
+
# login_credentials = Aws::LoginCredentials.new(login_session: 'my_login_session')
|
|
12
|
+
# ec2 = Aws::EC2::Client.new(credentials: login_credentials)
|
|
13
|
+
#
|
|
14
|
+
# If you omit the `:client` option, a new {Aws::Signin::Client} object will
|
|
15
|
+
# be constructed with additional options that were provided.
|
|
16
|
+
class LoginCredentials
|
|
17
|
+
include CredentialProvider
|
|
18
|
+
include RefreshingCredentials
|
|
19
|
+
|
|
20
|
+
# @option options [required, String] :login_session An opaque string
|
|
21
|
+
# used to determine the cache file location. This value can be found
|
|
22
|
+
# in the AWS config file which is set by the AWS CLI/AWS Tools for
|
|
23
|
+
# PowerShell automatically.
|
|
24
|
+
#
|
|
25
|
+
# @option options [Signin::Client] :client Optional `Signin::Client`.
|
|
26
|
+
# If not provided, a client will be constructed.
|
|
27
|
+
def initialize(options = {})
|
|
28
|
+
raise ArgumentError, 'Missing login_session' unless options[:login_session]
|
|
29
|
+
|
|
30
|
+
@login_session = options.delete(:login_session)
|
|
31
|
+
@client = options[:client]
|
|
32
|
+
unless @client
|
|
33
|
+
client_opts = options.reject { |key, _| CLIENT_EXCLUDE_OPTIONS.include?(key) }
|
|
34
|
+
@client = Signin::Client.new(client_opts.merge(credentials: nil))
|
|
35
|
+
end
|
|
36
|
+
@metrics = ['CREDENTIALS_LOGIN']
|
|
37
|
+
@async_refresh = true
|
|
38
|
+
super
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# @return [Signin::Client]
|
|
42
|
+
attr_reader :client
|
|
43
|
+
|
|
44
|
+
private
|
|
45
|
+
|
|
46
|
+
def refresh
|
|
47
|
+
# First reload the token from disk to ensure it hasn't been refreshed externally
|
|
48
|
+
token_json = read_cached_token
|
|
49
|
+
update_creds(token_json['accessToken'])
|
|
50
|
+
return if @credentials && @expiration && !near_expiration?(sync_expiration_length)
|
|
51
|
+
|
|
52
|
+
# Using OpenSSL 3.6.0 may result in errors like "certificate verify failed (unable to get certificate CRL)."
|
|
53
|
+
# A recommended workaround is to use OpenSSL version < 3.6.0 or requiring the openssl gem with a version of at
|
|
54
|
+
# least 3.2.2. GitHub issue: https://github.com/openssl/openssl/issues/28752.
|
|
55
|
+
if OpenSSL::OPENSSL_LIBRARY_VERSION.include?('3.6.') &&
|
|
56
|
+
(!Gem.loaded_specs['openssl'] || Gem.loaded_specs['openssl'].version < Gem::Version.new('3.2.2'))
|
|
57
|
+
warn 'WARNING: OpenSSL 3.6.x may cause certificate verify errors - use OpenSSL < 3.6.0 or openssl gem >= 3.2.2'
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Attempt to refresh the token
|
|
61
|
+
attempt_refresh(token_json)
|
|
62
|
+
|
|
63
|
+
# Raise if token is hard expired
|
|
64
|
+
return unless !@expiration || @expiration < Time.now
|
|
65
|
+
|
|
66
|
+
raise Errors::InvalidLoginToken,
|
|
67
|
+
'Login token is invalid and failed to refresh. Please reauthenticate.'
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def read_cached_token
|
|
71
|
+
cached_token = JSON.load_file(login_cache_file)
|
|
72
|
+
validate_cached_token(cached_token)
|
|
73
|
+
cached_token
|
|
74
|
+
rescue Errno::ENOENT, Aws::Json::ParseError
|
|
75
|
+
raise Errors::InvalidLoginToken,
|
|
76
|
+
"Failed to load a Login token for login session #{@login_session}. Please reauthenticate."
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def login_cache_file
|
|
80
|
+
directory = ENV['AWS_LOGIN_CACHE_DIRECTORY'] || File.join(Dir.home, '.aws', 'login', 'cache')
|
|
81
|
+
login_session_sha = OpenSSL::Digest::SHA256.hexdigest(@login_session.strip.encode('utf-8'))
|
|
82
|
+
File.join(directory, "#{login_session_sha}.json")
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def validate_cached_token(cached_token)
|
|
86
|
+
required_cached_token_fields = %w[accessToken clientId refreshToken dpopKey]
|
|
87
|
+
missing_fields = required_cached_token_fields.reject { |field| cached_token[field] }
|
|
88
|
+
unless missing_fields.empty?
|
|
89
|
+
raise ArgumentError, "Cached login token is missing required field(s): #{missing_fields}. " \
|
|
90
|
+
'Please reauthenticate.'
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
access_token = cached_token['accessToken']
|
|
94
|
+
required_access_token_fields = %w[accessKeyId secretAccessKey sessionToken accountId expiresAt]
|
|
95
|
+
missing_fields = required_access_token_fields.reject { |field| access_token[field] }
|
|
96
|
+
|
|
97
|
+
return if missing_fields.empty?
|
|
98
|
+
|
|
99
|
+
raise ArgumentError, "Access token in cached login token is missing required field(s): #{missing_fields}. " \
|
|
100
|
+
'Please reauthenticate.'
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def update_creds(access_token)
|
|
104
|
+
@credentials = Credentials.new(
|
|
105
|
+
access_token['accessKeyId'],
|
|
106
|
+
access_token['secretAccessKey'],
|
|
107
|
+
access_token['sessionToken'],
|
|
108
|
+
account_id: access_token['accountId']
|
|
109
|
+
)
|
|
110
|
+
@expiration = Time.parse(access_token['expiresAt'])
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def attempt_refresh(token_json)
|
|
114
|
+
resp = make_request(token_json)
|
|
115
|
+
parse_resp(resp.token_output, token_json)
|
|
116
|
+
update_creds(token_json['accessToken'])
|
|
117
|
+
update_token_cache(token_json)
|
|
118
|
+
rescue Signin::Errors::AccessDeniedException => e
|
|
119
|
+
case e.error
|
|
120
|
+
when 'TOKEN_EXPIRED'
|
|
121
|
+
warn 'Your session has expired. Please reauthenticate.'
|
|
122
|
+
when 'USER_CREDENTIALS_CHANGED'
|
|
123
|
+
warn 'Unable to refresh credentials because of a change in your password. ' \
|
|
124
|
+
'Please reauthenticate with your new password.'
|
|
125
|
+
when 'INSUFFICIENT_PERMISSIONS'
|
|
126
|
+
warn 'Unable to refresh credentials due to insufficient permissions. ' \
|
|
127
|
+
'You may be missing permission for the `CreateOAuth2Token` action.'
|
|
128
|
+
end
|
|
129
|
+
rescue StandardError => e
|
|
130
|
+
warn("Failed to refresh Login token for LoginCredentials: #{e.message}")
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def make_request(token_json)
|
|
134
|
+
options = {
|
|
135
|
+
token_input: {
|
|
136
|
+
client_id: token_json['clientId'],
|
|
137
|
+
grant_type: 'refresh_token',
|
|
138
|
+
refresh_token: token_json['refreshToken']
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
req = @client.build_request(:create_o_auth_2_token, options)
|
|
142
|
+
endpoint_params = Aws::Signin::EndpointParameters.create(req.context.config)
|
|
143
|
+
endpoint = req.context.config.endpoint_provider.resolve_endpoint(endpoint_params)
|
|
144
|
+
endpoint = URI.join(endpoint.url, @client.config.api.operation(:create_o_auth_2_token).http_request_uri).to_s
|
|
145
|
+
req.context.http_request.headers['DPoP'] = dpop_proof(token_json['dpopKey'], endpoint)
|
|
146
|
+
req.send_request
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
def dpop_proof(dpop_key, endpoint)
|
|
150
|
+
# Load private key from cached token file
|
|
151
|
+
private_key = OpenSSL::PKey.read(dpop_key)
|
|
152
|
+
public_key = private_key.public_key.to_octet_string(:uncompressed)
|
|
153
|
+
|
|
154
|
+
# Construct header and payload
|
|
155
|
+
header = build_header(public_key[1, 32], public_key[33, 32])
|
|
156
|
+
payload = build_payload(endpoint)
|
|
157
|
+
|
|
158
|
+
# Base64URL encode header and payload, sign message using private key, and create header
|
|
159
|
+
message = build_message(header, payload)
|
|
160
|
+
signature = private_key.sign(OpenSSL::Digest.new('SHA256'), message)
|
|
161
|
+
jws_signature = der_to_jws(signature)
|
|
162
|
+
"#{message}.#{Base64.urlsafe_encode64(jws_signature, padding: false)}"
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
def build_header(x_bytes, y_bytes)
|
|
166
|
+
{
|
|
167
|
+
'alg' => 'ES256', # signing algorithm
|
|
168
|
+
'jwk' => {
|
|
169
|
+
'crv' => 'P-256', # curve name
|
|
170
|
+
'kty' => 'EC', # key type
|
|
171
|
+
'x' => Base64.urlsafe_encode64(x_bytes, padding: false), # public x coordinate
|
|
172
|
+
'y' => Base64.urlsafe_encode64(y_bytes, padding: false) # public y coordinate
|
|
173
|
+
},
|
|
174
|
+
'typ' => 'dpop+jwt' # hardcoded
|
|
175
|
+
}
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
def build_payload(htu)
|
|
179
|
+
{
|
|
180
|
+
'jti' => SecureRandom.uuid, # unique identifier (UUID4)
|
|
181
|
+
'htm' => @client.config.api.operation(:create_o_auth_2_token).http_method, # POST
|
|
182
|
+
'htu' => htu, # endpoint of the CreateOAuth2Token operation, with path
|
|
183
|
+
'iat' => Time.now.utc.to_i # UTC timestamp, specified number of seconds from 1970-01-01T00:00:00Z UTC
|
|
184
|
+
}
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
def build_message(header, payload)
|
|
188
|
+
encoded_header = Base64.urlsafe_encode64(JSON.dump(header), padding: false)
|
|
189
|
+
encoded_payload = Base64.urlsafe_encode64(JSON.dump(payload), padding: false)
|
|
190
|
+
"#{encoded_header}.#{encoded_payload}"
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
# Converts DER-encoded ASN.1 signature to JWS
|
|
194
|
+
def der_to_jws(der_signature)
|
|
195
|
+
asn1 = OpenSSL::ASN1.decode(der_signature)
|
|
196
|
+
r = asn1.value[0].value
|
|
197
|
+
s = asn1.value[1].value
|
|
198
|
+
|
|
199
|
+
r_hex = r.to_s(16).rjust(64, '0')
|
|
200
|
+
s_hex = s.to_s(16).rjust(64, '0')
|
|
201
|
+
|
|
202
|
+
[r_hex + s_hex].pack('H*')
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
def parse_resp(resp, token_json)
|
|
206
|
+
access_token = token_json['accessToken']
|
|
207
|
+
access_token.merge!(
|
|
208
|
+
'accessKeyId' => resp.access_token.access_key_id,
|
|
209
|
+
'secretAccessKey' => resp.access_token.secret_access_key,
|
|
210
|
+
'sessionToken' => resp.access_token.session_token,
|
|
211
|
+
'expiresAt' => (Time.now.utc + resp.expires_in).to_datetime.rfc3339
|
|
212
|
+
)
|
|
213
|
+
token_json['refreshToken'] = resp.refresh_token
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
def update_token_cache(token_json)
|
|
217
|
+
cached_token = token_json.dup
|
|
218
|
+
# File.write is not atomic so use temp file and move
|
|
219
|
+
temp_file = Tempfile.new('temp_file')
|
|
220
|
+
begin
|
|
221
|
+
temp_file.write(Json.dump(cached_token))
|
|
222
|
+
temp_file.close
|
|
223
|
+
FileUtils.mv(temp_file.path, login_cache_file)
|
|
224
|
+
ensure
|
|
225
|
+
temp_file.unlink if File.exist?(temp_file.path) # Ensure temp file is cleaned up
|
|
226
|
+
end
|
|
227
|
+
end
|
|
228
|
+
end
|
|
229
|
+
end
|
|
@@ -4,7 +4,8 @@ module Aws
|
|
|
4
4
|
module Plugins
|
|
5
5
|
# @api private
|
|
6
6
|
class ChecksumAlgorithm < Seahorse::Client::Plugin
|
|
7
|
-
|
|
7
|
+
CHECKSUM_CHUNK_SIZE = 1 * 1024 * 1024 # one MB
|
|
8
|
+
DEFAULT_TRAILER_CHUNK_SIZE = 16_384 # 16 KB
|
|
8
9
|
|
|
9
10
|
# determine the set of supported client side checksum algorithms
|
|
10
11
|
# CRC32c requires aws-crt (optional sdk dependency) for support
|
|
@@ -21,6 +22,7 @@ module Aws
|
|
|
21
22
|
end.freeze
|
|
22
23
|
|
|
23
24
|
CRT_ALGORITHMS = %w[CRC32C CRC64NVME].freeze
|
|
25
|
+
DEFAULT_CHECKSUM = 'CRC32'
|
|
24
26
|
|
|
25
27
|
# Priority order of checksum algorithms to validate responses against.
|
|
26
28
|
# Remove any algorithms not supported by client (ie, depending on CRT availability).
|
|
@@ -37,8 +39,6 @@ module Aws
|
|
|
37
39
|
'SHA256' => 44 + 1
|
|
38
40
|
}.freeze
|
|
39
41
|
|
|
40
|
-
DEFAULT_CHECKSUM = 'CRC32'
|
|
41
|
-
|
|
42
42
|
option(:request_checksum_calculation,
|
|
43
43
|
doc_default: 'when_supported',
|
|
44
44
|
doc_type: 'String',
|
|
@@ -162,9 +162,7 @@ module Aws
|
|
|
162
162
|
context[:http_checksum] ||= {}
|
|
163
163
|
|
|
164
164
|
# Set validation mode to enabled when supported.
|
|
165
|
-
if context.config.response_checksum_validation == 'when_supported'
|
|
166
|
-
enable_request_validation_mode(context)
|
|
167
|
-
end
|
|
165
|
+
enable_request_validation_mode(context) if context.config.response_checksum_validation == 'when_supported'
|
|
168
166
|
|
|
169
167
|
@handler.call(context)
|
|
170
168
|
end
|
|
@@ -190,14 +188,11 @@ module Aws
|
|
|
190
188
|
name: "x-amz-checksum-#{algorithm.downcase}",
|
|
191
189
|
request_algorithm_header: request_algorithm_header(context)
|
|
192
190
|
}
|
|
193
|
-
|
|
194
191
|
context[:http_checksum][:request_algorithm] = request_algorithm
|
|
195
192
|
calculate_request_checksum(context, request_algorithm)
|
|
196
193
|
end
|
|
197
194
|
|
|
198
|
-
if should_verify_response_checksum?(context)
|
|
199
|
-
add_verify_response_checksum_handlers(context)
|
|
200
|
-
end
|
|
195
|
+
add_verify_response_checksum_handlers(context) if should_verify_response_checksum?(context)
|
|
201
196
|
|
|
202
197
|
with_metrics(context.config, algorithm) { @handler.call(context) }
|
|
203
198
|
end
|
|
@@ -249,6 +244,7 @@ module Aws
|
|
|
249
244
|
return unless context.operation.http_checksum
|
|
250
245
|
|
|
251
246
|
input_member = context.operation.http_checksum['requestAlgorithmMember']
|
|
247
|
+
|
|
252
248
|
context.params[input_member.to_sym] ||= DEFAULT_CHECKSUM if input_member
|
|
253
249
|
end
|
|
254
250
|
|
|
@@ -271,25 +267,38 @@ module Aws
|
|
|
271
267
|
context.operation.http_checksum['responseAlgorithms']
|
|
272
268
|
end
|
|
273
269
|
|
|
274
|
-
def checksum_required?(context)
|
|
275
|
-
(http_checksum = context.operation.http_checksum) &&
|
|
276
|
-
(checksum_required = http_checksum['requestChecksumRequired']) &&
|
|
277
|
-
(checksum_required && context.config.request_checksum_calculation == 'when_required')
|
|
278
|
-
end
|
|
279
|
-
|
|
280
|
-
def checksum_optional?(context)
|
|
281
|
-
context.operation.http_checksum &&
|
|
282
|
-
context.config.request_checksum_calculation != 'when_required'
|
|
283
|
-
end
|
|
284
|
-
|
|
285
270
|
def checksum_provided_as_header?(headers)
|
|
286
271
|
headers.any? { |k, _| k.start_with?('x-amz-checksum-') }
|
|
287
272
|
end
|
|
288
273
|
|
|
274
|
+
# Determines whether a request checksum should be calculated.
|
|
275
|
+
# 1. **No existing checksum in header**: Skips if checksum header already present
|
|
276
|
+
# 2. **Operation support**: Considers model, client configuration and user input.
|
|
289
277
|
def should_calculate_request_checksum?(context)
|
|
290
|
-
!checksum_provided_as_header?(context.http_request.headers) &&
|
|
291
|
-
|
|
292
|
-
|
|
278
|
+
!checksum_provided_as_header?(context.http_request.headers) && checksum_applicable?(context)
|
|
279
|
+
end
|
|
280
|
+
|
|
281
|
+
# Checks if checksum calculation should proceed based on operation requirements and client settings.
|
|
282
|
+
# Returns true when any of these conditions are met:
|
|
283
|
+
# 1. http checksum's requestChecksumRequired is true
|
|
284
|
+
# 2. Config for request_checksum_calculation is "when_supported"
|
|
285
|
+
# 3. Config for request_checksum_calculation is "when_required" AND user provided checksum algorithm
|
|
286
|
+
def checksum_applicable?(context)
|
|
287
|
+
http_checksum = context.operation.http_checksum
|
|
288
|
+
return false unless http_checksum
|
|
289
|
+
|
|
290
|
+
return true if http_checksum['requestChecksumRequired']
|
|
291
|
+
|
|
292
|
+
return false unless (algorithm_member = http_checksum['requestAlgorithmMember'])
|
|
293
|
+
|
|
294
|
+
case context.config.request_checksum_calculation
|
|
295
|
+
when 'when_supported'
|
|
296
|
+
true
|
|
297
|
+
when 'when_required'
|
|
298
|
+
!context.params[algorithm_member.to_sym].nil?
|
|
299
|
+
else
|
|
300
|
+
false
|
|
301
|
+
end
|
|
293
302
|
end
|
|
294
303
|
|
|
295
304
|
def choose_request_algorithm!(context)
|
|
@@ -307,12 +316,13 @@ module Aws
|
|
|
307
316
|
end
|
|
308
317
|
|
|
309
318
|
def checksum_request_in(context)
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
319
|
+
return 'header' unless supports_trailer_checksums?(context.operation)
|
|
320
|
+
|
|
321
|
+
should_fallback_to_header?(context) ? 'header' : 'trailer'
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
def supports_trailer_checksums?(operation)
|
|
325
|
+
operation['unsignedPayload'] || operation['authtype'] == 'v4-unsigned-body'
|
|
316
326
|
end
|
|
317
327
|
|
|
318
328
|
def calculate_request_checksum(context, checksum_properties)
|
|
@@ -320,6 +330,7 @@ module Aws
|
|
|
320
330
|
if (algorithm_header = checksum_properties[:request_algorithm_header])
|
|
321
331
|
headers[algorithm_header] = checksum_properties[:algorithm]
|
|
322
332
|
end
|
|
333
|
+
|
|
323
334
|
case checksum_properties[:in]
|
|
324
335
|
when 'header'
|
|
325
336
|
apply_request_checksum(context, headers, checksum_properties)
|
|
@@ -330,19 +341,34 @@ module Aws
|
|
|
330
341
|
end
|
|
331
342
|
end
|
|
332
343
|
|
|
344
|
+
def should_fallback_to_header?(context)
|
|
345
|
+
# Trailer implementation within Mac/JRUBY environment is facing some
|
|
346
|
+
# network issues that will need further investigation:
|
|
347
|
+
# * https://github.com/jruby/jruby-openssl/issues/271
|
|
348
|
+
# * https://github.com/jruby/jruby-openssl/issues/317
|
|
349
|
+
return true if defined?(JRUBY_VERSION)
|
|
350
|
+
|
|
351
|
+
# Chunked signing is currently not supported
|
|
352
|
+
# Https is required for unsigned payload for security
|
|
353
|
+
return true if context.http_request.endpoint.scheme == 'http'
|
|
354
|
+
|
|
355
|
+
context[:skip_trailer_checksums]
|
|
356
|
+
end
|
|
357
|
+
|
|
333
358
|
def apply_request_checksum(context, headers, checksum_properties)
|
|
334
359
|
header_name = checksum_properties[:name]
|
|
335
|
-
body = context.http_request.body_contents
|
|
336
360
|
headers[header_name] = calculate_checksum(
|
|
337
361
|
checksum_properties[:algorithm],
|
|
338
|
-
body
|
|
362
|
+
context.http_request.body
|
|
339
363
|
)
|
|
340
364
|
end
|
|
341
365
|
|
|
342
366
|
def calculate_checksum(algorithm, body)
|
|
343
367
|
digest = ChecksumAlgorithm.digest_for_algorithm(algorithm)
|
|
344
368
|
if body.respond_to?(:read)
|
|
369
|
+
body.rewind
|
|
345
370
|
update_in_chunks(digest, body)
|
|
371
|
+
body.rewind
|
|
346
372
|
else
|
|
347
373
|
digest.update(body)
|
|
348
374
|
end
|
|
@@ -351,7 +377,7 @@ module Aws
|
|
|
351
377
|
|
|
352
378
|
def update_in_chunks(digest, io)
|
|
353
379
|
loop do
|
|
354
|
-
chunk = io.read(
|
|
380
|
+
chunk = io.read(CHECKSUM_CHUNK_SIZE)
|
|
355
381
|
break unless chunk
|
|
356
382
|
|
|
357
383
|
digest.update(chunk)
|
|
@@ -363,7 +389,12 @@ module Aws
|
|
|
363
389
|
location_name = checksum_properties[:name]
|
|
364
390
|
|
|
365
391
|
# set required headers
|
|
366
|
-
headers['Content-Encoding'] =
|
|
392
|
+
headers['Content-Encoding'] =
|
|
393
|
+
if headers['Content-Encoding']
|
|
394
|
+
headers['Content-Encoding'] += ', aws-chunked'
|
|
395
|
+
else
|
|
396
|
+
'aws-chunked'
|
|
397
|
+
end
|
|
367
398
|
headers['X-Amz-Content-Sha256'] = 'STREAMING-UNSIGNED-PAYLOAD-TRAILER'
|
|
368
399
|
headers['X-Amz-Trailer'] = location_name
|
|
369
400
|
|
|
@@ -374,13 +405,14 @@ module Aws
|
|
|
374
405
|
unless context.http_request.body.respond_to?(:size)
|
|
375
406
|
raise Aws::Errors::ChecksumError, 'Could not determine length of the body'
|
|
376
407
|
end
|
|
377
|
-
headers['X-Amz-Decoded-Content-Length'] = context.http_request.body.size
|
|
378
408
|
|
|
379
|
-
context.http_request.body
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
409
|
+
headers['X-Amz-Decoded-Content-Length'] = context.http_request.body.size
|
|
410
|
+
context.http_request.body =
|
|
411
|
+
AwsChunkedTrailerDigestIO.new(
|
|
412
|
+
io: context.http_request.body,
|
|
413
|
+
algorithm: checksum_properties[:algorithm],
|
|
414
|
+
location_name: location_name
|
|
415
|
+
)
|
|
384
416
|
end
|
|
385
417
|
|
|
386
418
|
def should_verify_response_checksum?(context)
|
|
@@ -398,15 +430,11 @@ module Aws
|
|
|
398
430
|
end
|
|
399
431
|
|
|
400
432
|
def add_verify_response_headers_handler(context, checksum_context)
|
|
401
|
-
validation_list = CHECKSUM_ALGORITHM_PRIORITIES &
|
|
402
|
-
operation_response_algorithms(context)
|
|
433
|
+
validation_list = CHECKSUM_ALGORITHM_PRIORITIES & operation_response_algorithms(context)
|
|
403
434
|
context[:http_checksum][:validation_list] = validation_list
|
|
404
435
|
|
|
405
436
|
context.http_response.on_headers do |_status, headers|
|
|
406
|
-
header_name, algorithm = response_header_to_verify(
|
|
407
|
-
headers,
|
|
408
|
-
validation_list
|
|
409
|
-
)
|
|
437
|
+
header_name, algorithm = response_header_to_verify(headers, validation_list)
|
|
410
438
|
next unless header_name
|
|
411
439
|
|
|
412
440
|
expected = headers[header_name]
|
|
@@ -452,52 +480,94 @@ module Aws
|
|
|
452
480
|
# Wrapper for request body that implements application-layer
|
|
453
481
|
# chunking with Digest computed on chunks + added as a trailer
|
|
454
482
|
class AwsChunkedTrailerDigestIO
|
|
455
|
-
|
|
483
|
+
CHUNK_OVERHEAD = 4 # "\r\n\r\n"
|
|
484
|
+
HEX_BASE = 16
|
|
456
485
|
|
|
457
|
-
def initialize(
|
|
458
|
-
@io = io
|
|
459
|
-
@location_name = location_name
|
|
460
|
-
@algorithm = algorithm
|
|
461
|
-
@digest = ChecksumAlgorithm.digest_for_algorithm(algorithm)
|
|
462
|
-
@
|
|
486
|
+
def initialize(options = {})
|
|
487
|
+
@io = options.delete(:io)
|
|
488
|
+
@location_name = options.delete(:location_name)
|
|
489
|
+
@algorithm = options.delete(:algorithm)
|
|
490
|
+
@digest = ChecksumAlgorithm.digest_for_algorithm(@algorithm)
|
|
491
|
+
@chunk_size = Thread.current[:net_http_override_body_stream_chunk] || DEFAULT_TRAILER_CHUNK_SIZE
|
|
492
|
+
@overhead_bytes = calculate_overhead(@chunk_size)
|
|
493
|
+
@base_chunk_size = @chunk_size - @overhead_bytes
|
|
494
|
+
@encoded_buffer = +''
|
|
495
|
+
@eof = false
|
|
463
496
|
end
|
|
464
497
|
|
|
465
498
|
# the size of the application layer aws-chunked + trailer body
|
|
466
499
|
def size
|
|
467
|
-
# compute the number of chunks
|
|
468
|
-
# a full chunk has 4 + 4 bytes overhead, a partial chunk is len.to_s(16).size + 4
|
|
469
500
|
orig_body_size = @io.size
|
|
470
|
-
n_full_chunks = orig_body_size /
|
|
471
|
-
partial_bytes = orig_body_size %
|
|
472
|
-
|
|
473
|
-
|
|
501
|
+
n_full_chunks = orig_body_size / @base_chunk_size
|
|
502
|
+
partial_bytes = orig_body_size % @base_chunk_size
|
|
503
|
+
|
|
504
|
+
full_chunk_overhead = @base_chunk_size.to_s(HEX_BASE).size + CHUNK_OVERHEAD
|
|
505
|
+
chunked_body_size = n_full_chunks * (@base_chunk_size + full_chunk_overhead)
|
|
506
|
+
unless partial_bytes.zero?
|
|
507
|
+
chunked_body_size += partial_bytes.to_s(HEX_BASE).size + partial_bytes + CHUNK_OVERHEAD
|
|
508
|
+
end
|
|
474
509
|
trailer_size = ChecksumAlgorithm.trailer_length(@algorithm, @location_name)
|
|
475
510
|
chunked_body_size + trailer_size
|
|
476
511
|
end
|
|
477
512
|
|
|
478
513
|
def rewind
|
|
479
514
|
@io.rewind
|
|
515
|
+
@encoded_buffer = +''
|
|
516
|
+
@eof = false
|
|
517
|
+
@digest = ChecksumAlgorithm.digest_for_algorithm(@algorithm)
|
|
480
518
|
end
|
|
481
519
|
|
|
482
|
-
def read(length, buf = nil)
|
|
483
|
-
|
|
484
|
-
if
|
|
485
|
-
|
|
486
|
-
|
|
520
|
+
def read(length = nil, buf = nil)
|
|
521
|
+
return '' if length&.zero?
|
|
522
|
+
return if eof?
|
|
523
|
+
|
|
524
|
+
buf&.clear
|
|
525
|
+
output_buffer = buf || +''
|
|
487
526
|
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
return StringIO.new(application_chunked).read(application_chunked.size, buf)
|
|
527
|
+
fill_encoded_buffer(length)
|
|
528
|
+
|
|
529
|
+
if length
|
|
530
|
+
output_buffer << @encoded_buffer.slice!(0, length)
|
|
493
531
|
else
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
532
|
+
output_buffer << @encoded_buffer
|
|
533
|
+
@encoded_buffer.clear
|
|
534
|
+
end
|
|
535
|
+
|
|
536
|
+
output_buffer.empty? && eof? ? nil : output_buffer
|
|
537
|
+
end
|
|
538
|
+
|
|
539
|
+
def eof?
|
|
540
|
+
@eof && @encoded_buffer.empty?
|
|
541
|
+
end
|
|
542
|
+
|
|
543
|
+
private
|
|
544
|
+
|
|
545
|
+
def calculate_overhead(chunk_size)
|
|
546
|
+
chunk_size.to_s(HEX_BASE).size + CHUNK_OVERHEAD
|
|
547
|
+
end
|
|
548
|
+
|
|
549
|
+
def fill_encoded_buffer(required_length)
|
|
550
|
+
return if required_length && @encoded_buffer.bytesize >= required_length
|
|
551
|
+
|
|
552
|
+
while !@eof && fill_data?(required_length)
|
|
553
|
+
chunk = @io.read(@base_chunk_size)
|
|
554
|
+
if chunk && !chunk.empty?
|
|
555
|
+
@digest.update(chunk)
|
|
556
|
+
@encoded_buffer << "#{chunk.bytesize.to_s(HEX_BASE)}\r\n#{chunk}\r\n"
|
|
557
|
+
else
|
|
558
|
+
@encoded_buffer << "0\r\n#{trailer_string}\r\n\r\n"
|
|
559
|
+
@eof = true
|
|
560
|
+
end
|
|
499
561
|
end
|
|
500
|
-
|
|
562
|
+
end
|
|
563
|
+
|
|
564
|
+
def trailer_string
|
|
565
|
+
{ @location_name => @digest.base64digest }.map { |k, v| "#{k}:#{v}" }.join("\r\n")
|
|
566
|
+
end
|
|
567
|
+
|
|
568
|
+
# Returns true if more data needs to be read into the buffer
|
|
569
|
+
def fill_data?(length)
|
|
570
|
+
length.nil? || @encoded_buffer.bytesize < length
|
|
501
571
|
end
|
|
502
572
|
end
|
|
503
573
|
end
|