aptible-cli 0.19.4 → 0.19.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f3f0904b2033596eb17ff98125288e5a2adec95ae6e379b95a5b8131b81bfe54
4
- data.tar.gz: 9c002c17e22dd77ff79360671d0057455a70ff4ee2311d1466651a5c39b3c1a2
3
+ metadata.gz: bc1a5d4d43d71792087a31f7994ed5a612dfe93ba90fd2ff3cebe63f5c8888a7
4
+ data.tar.gz: 7ac17167081e9edb3104b372a06622d736ce03051bb07279a690716eb7787ac9
5
5
  SHA512:
6
- metadata.gz: 0c1b0857f175135e6d7feb64a9a6a7b9633b73823f1f32100473f2f90c5b529a6dcf041b55bf4af47a92af08c425b24f4ce1abfacce3deea74b1e031e0bf5fa0
7
- data.tar.gz: ccb65684ab24eb9055adee5e17a04d0a9dbf2cdc496ba1b71a9a6a229e83ee092ad43c2ee660f73b5da95b47b875bdf2d0212d6ede3d932455ff67188d4b47c1
6
+ metadata.gz: b65bc6b0af2a8f88cc0fd72b47948ed01bbb3a32201ccaefd2b2dae3cdbe8042c6133e77862d79c63d4a0fa303460a5d453fa9f9a7ea7543af111a8101388fda
7
+ data.tar.gz: 7f8e3a443093b34b3b7428f249918e35327f92043cf0a35a248f7ef8aa8842f1a0d7ba8d48eeaa29f7d341be049f8f19e6b86d9d9ef91ea7401babaad2091fbf
data/.travis.yml CHANGED
@@ -2,10 +2,12 @@ dist: xenial
2
2
  sudo: false
3
3
 
4
4
  rvm:
5
- - "2.0"
6
5
  - "2.1"
7
6
  - "2.2"
8
7
  - "2.3"
8
+ - "2.4"
9
+ - "2.5"
10
+ - "2.6"
9
11
 
10
12
  script:
11
13
  - bundle exec rake
data/README.md CHANGED
@@ -28,75 +28,76 @@ From `aptible help`:
28
28
  <!-- BEGIN USAGE -->
29
29
  ```
30
30
  Commands:
31
- aptible apps # List all applications
32
- aptible apps:create HANDLE # Create a new application
33
- aptible apps:deprovision # Deprovision an app
34
- aptible apps:rename OLD_HANDLE NEW_HANDLE [--environment ENVIRONMENT_HANDLE] # Rename an app handle. In order for the new app handle to appear in log drain and metric drain destinations, you must restart the app.
35
- aptible apps:scale SERVICE [--container-count COUNT] [--container-size SIZE_MB] # Scale a service
36
- aptible backup:list DB_HANDLE # List backups for a database
37
- aptible backup:orphaned # List backups associated with deprovisioned databases
38
- aptible backup:purge BACKUP_ID # Permanently delete a backup and any copies of it
39
- aptible backup:restore BACKUP_ID [--environment ENVIRONMENT_HANDLE] [--handle HANDLE] [--container-size SIZE_MB] [--disk-size SIZE_GB] [--key-arn KEY_ARN] # Restore a backup
40
- aptible config # Print an app's current configuration
41
- aptible config:add [VAR1=VAL1] [VAR2=VAL2] [...] # Add an ENV variable to an app
42
- aptible config:rm [VAR1] [VAR2] [...] # Remove an ENV variable from an app
43
- aptible config:set [VAR1=VAL1] [VAR2=VAL2] [...] # Add an ENV variable to an app
44
- aptible config:unset [VAR1] [VAR2] [...] # Remove an ENV variable from an app
45
- aptible db:backup HANDLE # Backup a database
46
- aptible db:clone SOURCE DEST # Clone a database to create a new one
47
- aptible db:create HANDLE [--type TYPE] [--version VERSION] [--container-size SIZE_MB] [--disk-size SIZE_GB] [--key-arn KEY_ARN] # Create a new database
48
- aptible db:deprovision HANDLE # Deprovision a database
49
- aptible db:dump HANDLE [pg_dump options] # Dump a remote database to file
50
- aptible db:execute HANDLE SQL_FILE [--on-error-stop] # Executes sql against a database
51
- aptible db:list # List all databases
52
- aptible db:modify HANDLE [--iops IOPS] [--volume-type [gp2, gp3]] # Modify a database disk
53
- aptible db:reload HANDLE # Reload a database
54
- aptible db:rename OLD_HANDLE NEW_HANDLE [--environment ENVIRONMENT_HANDLE] # Rename a database handle. In order for the new database handle to appear in log drain and metric drain destinations, you must reload the database.
55
- aptible db:replicate HANDLE REPLICA_HANDLE [--container-size SIZE_MB] [--disk-size SIZE_GB] [--logical --version VERSION] [--key-arn KEY_ARN] # Create a replica/follower of a database
56
- aptible db:restart HANDLE [--container-size SIZE_MB] [--disk-size SIZE_GB] [--iops IOPS] [--volume-type [gp2, gp3]] # Restart a database
57
- aptible db:tunnel HANDLE # Create a local tunnel to a database
58
- aptible db:url HANDLE # Display a database URL
59
- aptible db:versions # List available database versions
60
- aptible deploy [OPTIONS] [VAR1=VAL1] [VAR2=VAL2] [...] # Deploy an app
61
- aptible endpoints:database:create DATABASE # Create a Database Endpoint
62
- aptible endpoints:database:modify --database DATABASE ENDPOINT_HOSTNAME # Modify a Database Endpoint
63
- aptible endpoints:deprovision [--app APP | --database DATABASE] ENDPOINT_HOSTNAME # Deprovision an App or Database Endpoint
64
- aptible endpoints:https:create [--app APP] SERVICE # Create an App HTTPS Endpoint
65
- aptible endpoints:https:modify [--app APP] ENDPOINT_HOSTNAME # Modify an App HTTPS Endpoint
66
- aptible endpoints:list [--app APP | --database DATABASE] # List Endpoints for an App or Database
67
- aptible endpoints:renew [--app APP] ENDPOINT_HOSTNAME # Renew an App Managed TLS Endpoint
68
- aptible endpoints:tcp:create [--app APP] SERVICE # Create an App TCP Endpoint
69
- aptible endpoints:tcp:modify [--app APP] ENDPOINT_HOSTNAME # Modify an App TCP Endpoint
70
- aptible endpoints:tls:create [--app APP] SERVICE # Create an App TLS Endpoint
71
- aptible endpoints:tls:modify [--app APP] ENDPOINT_HOSTNAME # Modify an App TLS Endpoint
72
- aptible environment:ca_cert # Retrieve the CA certificate associated with the environment
73
- aptible environment:list # List all environments
74
- aptible environment:rename OLD_HANDLE NEW_HANDLE # Rename an environment handle. In order for the new environment handle to appear in log drain/metric destinations, you must restart the apps/databases in this environment.
75
- aptible help [COMMAND] # Describe available commands or one specific command
76
- aptible log_drain:create:datadog HANDLE --url DATADOG_URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Datadog Log Drain
77
- aptible log_drain:create:elasticsearch HANDLE --db DATABASE_HANDLE --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create an Elasticsearch Log Drain
78
- aptible log_drain:create:https HANDLE --url URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a HTTPS Drain
79
- aptible log_drain:create:logdna HANDLE --url LOGDNA_URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a LogDNA Log Drain
80
- aptible log_drain:create:papertrail HANDLE --host PAPERTRAIL_HOST --port PAPERTRAIL_PORT --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Papertrail Log Drain
81
- aptible log_drain:create:sumologic HANDLE --url SUMOLOGIC_URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Sumologic Drain
82
- aptible log_drain:create:syslog HANDLE --host SYSLOG_HOST --port SYSLOG_PORT [--token TOKEN] --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Papertrail Log Drain
83
- aptible log_drain:deprovision HANDLE --environment ENVIRONMENT # Deprovisions a log drain
84
- aptible log_drain:list # List all Log Drains
85
- aptible login # Log in to Aptible
86
- aptible logs [--app APP | --database DATABASE] # Follows logs from a running app or database
87
- aptible metric_drain:create:datadog HANDLE --api_key DATADOG_API_KEY --site DATADOG_SITE --environment ENVIRONMENT # Create a Datadog Metric Drain
88
- aptible metric_drain:create:influxdb HANDLE --db DATABASE_HANDLE --environment ENVIRONMENT # Create an InfluxDB Metric Drain
89
- aptible metric_drain:create:influxdb:custom HANDLE --username USERNAME --password PASSWORD --url URL_INCLUDING_PORT --db INFLUX_DATABASE_NAME --environment ENVIRONMENT # Create an InfluxDB Metric Drain
90
- aptible metric_drain:deprovision HANDLE --environment ENVIRONMENT # Deprovisions a Metric Drain
91
- aptible metric_drain:list # List all Metric Drains
92
- aptible operation:cancel OPERATION_ID # Cancel a running operation
93
- aptible operation:follow OPERATION_ID # Follow logs of a running operation
94
- aptible operation:logs OPERATION_ID # View logs for given operation
95
- aptible rebuild # Rebuild an app, and restart its services
96
- aptible restart # Restart all services associated with an app
97
- aptible services # List Services for an App
98
- aptible ssh [COMMAND] # Run a command against an app
99
- aptible version # Print Aptible CLI version
31
+ aptible apps # List all applications
32
+ aptible apps:create HANDLE # Create a new application
33
+ aptible apps:deprovision # Deprovision an app
34
+ aptible apps:rename OLD_HANDLE NEW_HANDLE [--environment ENVIRONMENT_HANDLE] # Rename an app handle. In order for the new app handle to appear in log drain and metric drain destinations, you must restart the app.
35
+ aptible apps:scale SERVICE [--container-count COUNT] [--container-size SIZE_MB] # Scale a service
36
+ aptible backup:list DB_HANDLE # List backups for a database
37
+ aptible backup:orphaned # List backups associated with deprovisioned databases
38
+ aptible backup:purge BACKUP_ID # Permanently delete a backup and any copies of it
39
+ aptible backup:restore BACKUP_ID [--environment ENVIRONMENT_HANDLE] [--handle HANDLE] [--container-size SIZE_MB] [--disk-size SIZE_GB] [--key-arn KEY_ARN] # Restore a backup
40
+ aptible config # Print an app's current configuration
41
+ aptible config:add [VAR1=VAL1] [VAR2=VAL2] [...] # Add an ENV variable to an app
42
+ aptible config:rm [VAR1] [VAR2] [...] # Remove an ENV variable from an app
43
+ aptible config:set [VAR1=VAL1] [VAR2=VAL2] [...] # Add an ENV variable to an app
44
+ aptible config:unset [VAR1] [VAR2] [...] # Remove an ENV variable from an app
45
+ aptible db:backup HANDLE # Backup a database
46
+ aptible db:clone SOURCE DEST # Clone a database to create a new one
47
+ aptible db:create HANDLE [--type TYPE] [--version VERSION] [--container-size SIZE_MB] [--disk-size SIZE_GB] [--key-arn KEY_ARN] # Create a new database
48
+ aptible db:deprovision HANDLE # Deprovision a database
49
+ aptible db:dump HANDLE [pg_dump options] # Dump a remote database to file
50
+ aptible db:execute HANDLE SQL_FILE [--on-error-stop] # Executes sql against a database
51
+ aptible db:list # List all databases
52
+ aptible db:modify HANDLE [--iops IOPS] [--volume-type [gp2, gp3]] # Modify a database disk
53
+ aptible db:reload HANDLE # Reload a database
54
+ aptible db:rename OLD_HANDLE NEW_HANDLE [--environment ENVIRONMENT_HANDLE] # Rename a database handle. In order for the new database handle to appear in log drain and metric drain destinations, you must reload the database.
55
+ aptible db:replicate HANDLE REPLICA_HANDLE [--container-size SIZE_MB] [--disk-size SIZE_GB] [--logical --version VERSION] [--key-arn KEY_ARN] # Create a replica/follower of a database
56
+ aptible db:restart HANDLE [--container-size SIZE_MB] [--disk-size SIZE_GB] [--iops IOPS] [--volume-type [gp2, gp3]] # Restart a database
57
+ aptible db:tunnel HANDLE # Create a local tunnel to a database
58
+ aptible db:url HANDLE # Display a database URL
59
+ aptible db:versions # List available database versions
60
+ aptible deploy [OPTIONS] [VAR1=VAL1] [VAR2=VAL2] [...] # Deploy an app
61
+ aptible endpoints:database:create DATABASE # Create a Database Endpoint
62
+ aptible endpoints:database:modify --database DATABASE ENDPOINT_HOSTNAME # Modify a Database Endpoint
63
+ aptible endpoints:deprovision [--app APP | --database DATABASE] ENDPOINT_HOSTNAME # Deprovision an App or Database Endpoint
64
+ aptible endpoints:https:create [--app APP] SERVICE # Create an App HTTPS Endpoint
65
+ aptible endpoints:https:modify [--app APP] ENDPOINT_HOSTNAME # Modify an App HTTPS Endpoint
66
+ aptible endpoints:list [--app APP | --database DATABASE] # List Endpoints for an App or Database
67
+ aptible endpoints:renew [--app APP] ENDPOINT_HOSTNAME # Renew an App Managed TLS Endpoint
68
+ aptible endpoints:tcp:create [--app APP] SERVICE # Create an App TCP Endpoint
69
+ aptible endpoints:tcp:modify [--app APP] ENDPOINT_HOSTNAME # Modify an App TCP Endpoint
70
+ aptible endpoints:tls:create [--app APP] SERVICE # Create an App TLS Endpoint
71
+ aptible endpoints:tls:modify [--app APP] ENDPOINT_HOSTNAME # Modify an App TLS Endpoint
72
+ aptible environment:ca_cert # Retrieve the CA certificate associated with the environment
73
+ aptible environment:list # List all environments
74
+ aptible environment:rename OLD_HANDLE NEW_HANDLE # Rename an environment handle. In order for the new environment handle to appear in log drain/metric destinations, you must restart the apps/databases in this environment.
75
+ aptible help [COMMAND] # Describe available commands or one specific command
76
+ aptible log_drain:create:datadog HANDLE --url DATADOG_URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Datadog Log Drain
77
+ aptible log_drain:create:elasticsearch HANDLE --db DATABASE_HANDLE --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create an Elasticsearch Log Drain
78
+ aptible log_drain:create:https HANDLE --url URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a HTTPS Drain
79
+ aptible log_drain:create:logdna HANDLE --url LOGDNA_URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a LogDNA Log Drain
80
+ aptible log_drain:create:papertrail HANDLE --host PAPERTRAIL_HOST --port PAPERTRAIL_PORT --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Papertrail Log Drain
81
+ aptible log_drain:create:sumologic HANDLE --url SUMOLOGIC_URL --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Sumologic Drain
82
+ aptible log_drain:create:syslog HANDLE --host SYSLOG_HOST --port SYSLOG_PORT [--token TOKEN] --environment ENVIRONMENT [--drain-apps true/false] [--drain_databases true/false] [--drain_ephemeral_sessions true/false] [--drain_proxies true/false] # Create a Papertrail Log Drain
83
+ aptible log_drain:deprovision HANDLE --environment ENVIRONMENT # Deprovisions a log drain
84
+ aptible log_drain:list # List all Log Drains
85
+ aptible login # Log in to Aptible
86
+ aptible logs [--app APP | --database DATABASE] # Follows logs from a running app or database
87
+ aptible logs_from_archive --bucket NAME --region REGION --stack NAME [ --decryption-keys ONE [OR MORE] ] [ --download-location LOCATION ] [ [ --string-matches ONE [OR MORE] ] | [ --app-id ID | --database-id ID | --endpoint-id ID | --container-id ID ] [ --start-date YYYY-MM-DD --end-date YYYY-MM-DD ] ] --bucket=BUCKET --region=REGION --stack=STACK # Retrieves container logs from an S3 archive in your own AWS account. You must provide your AWS credentials via the environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
88
+ aptible metric_drain:create:datadog HANDLE --api_key DATADOG_API_KEY --site DATADOG_SITE --environment ENVIRONMENT # Create a Datadog Metric Drain
89
+ aptible metric_drain:create:influxdb HANDLE --db DATABASE_HANDLE --environment ENVIRONMENT # Create an InfluxDB Metric Drain
90
+ aptible metric_drain:create:influxdb:custom HANDLE --username USERNAME --password PASSWORD --url URL_INCLUDING_PORT --db INFLUX_DATABASE_NAME --environment ENVIRONMENT # Create an InfluxDB Metric Drain
91
+ aptible metric_drain:deprovision HANDLE --environment ENVIRONMENT # Deprovisions a Metric Drain
92
+ aptible metric_drain:list # List all Metric Drains
93
+ aptible operation:cancel OPERATION_ID # Cancel a running operation
94
+ aptible operation:follow OPERATION_ID # Follow logs of a running operation
95
+ aptible operation:logs OPERATION_ID # View logs for given operation
96
+ aptible rebuild # Rebuild an app, and restart its services
97
+ aptible restart # Restart all services associated with an app
98
+ aptible services # List Services for an App
99
+ aptible ssh [COMMAND] # Run a command against an app
100
+ aptible version # Print Aptible CLI version
100
101
  ```
101
102
  <!-- END USAGE -->
102
103
 
data/aptible-cli.gemspec CHANGED
@@ -29,6 +29,7 @@ Gem::Specification.new do |spec|
29
29
  spec.add_dependency 'term-ansicolor'
30
30
  spec.add_dependency 'chronic_duration', '~> 0.10.6'
31
31
  spec.add_dependency 'cbor'
32
+ spec.add_dependency 'aws-sdk', '~> 2.0'
32
33
 
33
34
  # Temporarily pin ffi until https://github.com/ffi/ffi/issues/868 is fixed
34
35
  spec.add_dependency 'ffi', '<= 1.14.1' if Gem.win_platform?
@@ -21,6 +21,7 @@ require_relative 'helpers/security_key'
21
21
  require_relative 'helpers/config_path'
22
22
  require_relative 'helpers/log_drain'
23
23
  require_relative 'helpers/metric_drain'
24
+ require_relative 'helpers/s3_log_helpers'
24
25
 
25
26
  require_relative 'subcommands/apps'
26
27
  require_relative 'subcommands/config'
@@ -0,0 +1,225 @@
1
+ require 'aws-sdk'
2
+ require 'pathname'
3
+
4
+ module Aptible
5
+ module CLI
6
+ module Helpers
7
+ module S3LogHelpers
8
+ def ensure_aws_creds
9
+ cred_errors = []
10
+ unless ENV['AWS_ACCESS_KEY_ID']
11
+ cred_errors << 'Missing environment variable: AWS_ACCESS_KEY_ID'
12
+ end
13
+ unless ENV['AWS_SECRET_ACCESS_KEY']
14
+ cred_errors << 'Missing environment variable: AWS_SECRET_ACCESS_KEY'
15
+ end
16
+ raise Thor::Error, cred_errors.join(' ') if cred_errors.any?
17
+ end
18
+
19
+ def validate_log_search_options(options = {})
20
+ id_options = [
21
+ options[:app_id],
22
+ options[:database_id],
23
+ options[:endpoint_id],
24
+ options[:container_id]
25
+ ]
26
+ date_options = [options[:start_date], options[:end_date]]
27
+ unless options[:string_matches] || id_options.any?
28
+ m = 'You must specify an option to identify the logs to download,' \
29
+ ' either: --string-matches, --app-id, --database-id,' \
30
+ ' --endpoint-id, or --container-id'
31
+ raise Thor::Error, m
32
+ end
33
+
34
+ m = 'You cannot pass --app-id, --database-id, --endpoint-id, or ' \
35
+ '--container-id when using --string-matches.'
36
+ raise Thor::Error, m if options[:string_matches] && id_options.any?
37
+
38
+ m = 'You must specify only one of ' \
39
+ '--app-id, --database-id, --endpoint-id or --container-id'
40
+ raise Thor::Error, m if id_options.any? && !id_options.one?
41
+
42
+ m = 'The options --start-date/--end-date cannot be used when ' \
43
+ 'searching by string'
44
+ raise Thor::Error, m if options[:string_matches] && date_options.any?
45
+
46
+ m = 'You must pass both --start-date and --end-date'
47
+ raise Thor::Error, m if date_options.any? && !date_options.all?
48
+
49
+ if options[:container_id] && options[:container_id].length < 12
50
+ m = 'You must specify at least the first 12 characters of the ' \
51
+ 'container ID'
52
+ raise Thor::Error, m
53
+ end
54
+
55
+ if options[:download_location] && !options[:decryption_keys]
56
+ m = 'You must provide decryption keys with the --decryption-keys' \
57
+ 'option in order to download files.'
58
+ raise Thor::Error, m
59
+ end
60
+ end
61
+
62
+ def info_from_path(file)
63
+ properties = {}
64
+
65
+ properties[:stack], _, properties[:schema],
66
+ properties[:shasum], type_id, *remainder = file.split('/')
67
+
68
+ properties[:id] = type_id.split('-').last.to_i
69
+ properties[:type] = type_id.split('-').first
70
+
71
+ case properties[:schema]
72
+ when 'v2'
73
+ # Eliminate the extensions
74
+ split_by_dot = remainder.pop.split('.') - %w(log bck gz)
75
+ properties[:container_id] = split_by_dot.first.delete!('-json')
76
+ properties[:uploaded_at] = utc_datetime(split_by_dot.last)
77
+ when 'v3'
78
+ case properties[:type]
79
+ when 'apps'
80
+ properties[:service_id] = remainder.first.split('-').last.to_i
81
+ file_name = remainder.second
82
+ else
83
+ file_name = remainder.first
84
+ end
85
+ # The file name may have differing number of elements due to
86
+ # docker file log rotation. So we eliminate some useless items
87
+ # and then work from the beginning or end of the remaining to find
88
+ # known elements, ignoring any .1 .2 (or none at all) extension
89
+ # found in the middle of the file name. EG:
90
+ # ['container_id', 'start_time', 'end_time']
91
+ # or
92
+ # ['container_id', '.1', 'start_time', 'end_time']]
93
+ split_by_dot = file_name.split('.') - %w(log gz archived)
94
+ properties[:container_id] = split_by_dot.first.delete!('-json')
95
+ properties[:start_time] = utc_datetime(split_by_dot[-2])
96
+ properties[:end_time] = utc_datetime(split_by_dot[-1])
97
+ else
98
+ m = "Cannot determine aptible log naming schema from #{file}"
99
+ raise Thor::Error, m
100
+ end
101
+ properties
102
+ end
103
+
104
+ def decrypt_and_translate_s3_file(file, enc_key, region, bucket, path)
105
+ # AWS warns us about using the legacy encryption schema
106
+ s3 = Kernel.silence_warnings do
107
+ Aws::S3::EncryptionV2::Client.new(
108
+ encryption_key: enc_key, region: region,
109
+ key_wrap_schema: :aes_gcm,
110
+ content_encryption_schema: :aes_gcm_no_padding,
111
+ security_profile: :v2_and_legacy
112
+ )
113
+ end
114
+
115
+ # Just write it to a file directly
116
+ location = File.join(path, file.split('/').drop(4).join('/'))
117
+ FileUtils.mkdir_p(File.dirname(location))
118
+ File.open(location, 'wb') do |f|
119
+ CLI.logger.info location
120
+ # Is this memory efficient?
121
+ s3.get_object(bucket: bucket, key: file, response_target: f)
122
+ end
123
+ end
124
+
125
+ def find_s3_files_by_string_match(region, bucket, stack, strings)
126
+ # This function just regex matches a provided string anywhwere
127
+ # in the s3 path
128
+ begin
129
+ stack_logs = s3_client(region).bucket(bucket)
130
+ .objects(prefix: stack)
131
+ .map(&:key)
132
+ rescue => error
133
+ raise Thor::Error, error.message
134
+ end
135
+ strings.each do |s|
136
+ stack_logs = stack_logs.select { |f| f =~ /#{s}/ }
137
+ end
138
+ stack_logs
139
+ end
140
+
141
+ def find_s3_files_by_attrs(region, bucket, stack,
142
+ attrs, time_range = nil)
143
+ # This function uses the known path schema to return files matching
144
+ # any provided criteria. EG:
145
+ # * attrs: { :type => 'app', :id => 123 }
146
+ # * attrs: { :container_id => 'deadbeef' }
147
+
148
+ begin
149
+ stack_logs = s3_client(region).bucket(bucket)
150
+ .objects(prefix: stack)
151
+ .map(&:key)
152
+ rescue => error
153
+ raise Thor::Error, error.message
154
+ end
155
+ attrs.each do |k, v|
156
+ stack_logs = stack_logs.select do |f|
157
+ if k == :container_id
158
+ # Match short container IDs
159
+ info_from_path(f)[k].start_with?(v)
160
+ else
161
+ info_from_path(f)[k] == v
162
+ end
163
+ end
164
+ end
165
+
166
+ if time_range
167
+ # select only logs within the time range
168
+ stack_logs = stack_logs.select do |f|
169
+ info = info_from_path(f)
170
+ first_log = info[:start_time]
171
+ last_log = info[:end_time]
172
+ if first_log.nil? || last_log.nil?
173
+ m = 'Cannot determine precise timestamps of file: ' \
174
+ "#{f.split('/').drop(4).join('/')}"
175
+ CLI.logger.warn m
176
+ false
177
+ else
178
+ time_match?(time_range, first_log, last_log)
179
+ end
180
+ end
181
+ end
182
+
183
+ stack_logs
184
+ end
185
+
186
+ def time_match?(time_range, start_timestamp, end_timestamp)
187
+ return false if start_timestamp.nil? || end_timestamp.nil?
188
+ return false if time_range.last < start_timestamp
189
+ return false if time_range.first > end_timestamp
190
+ true
191
+ end
192
+
193
+ def utc_date(date_string)
194
+ t_fmt = '%Y-%m-%d %Z'
195
+ Time.strptime("#{date_string} UTC", t_fmt)
196
+ rescue ArgumentError
197
+ raise Thor::Error, 'Please provide dates in YYYY-MM-DD format'
198
+ end
199
+
200
+ def utc_datetime(datetime_string)
201
+ Time.parse("#{datetime_string}Z")
202
+ rescue ArgumentError
203
+ nil
204
+ end
205
+
206
+ def encryption_key(filesum, possible_keys)
207
+ # The key can be determined from the sum
208
+ possible_keys.each do |k|
209
+ keysum = Digest::SHA256.hexdigest(Base64.strict_decode64(k))
210
+ next unless keysum == filesum
211
+ return Base64.strict_decode64(k)
212
+ end
213
+ m = "Did not find a matching key for shasum #{filesum}"
214
+ raise Thor::Error, m
215
+ end
216
+
217
+ def s3_client(region)
218
+ @s3_client ||= Kernel.silence_warnings do
219
+ Aws::S3::Resource.new(region: region)
220
+ end
221
+ end
222
+ end
223
+ end
224
+ end
225
+ end
@@ -1,4 +1,6 @@
1
+ require 'aws-sdk'
1
2
  require 'shellwords'
3
+ require 'time'
2
4
 
3
5
  module Aptible
4
6
  module CLI
@@ -8,6 +10,7 @@ module Aptible
8
10
  thor.class_eval do
9
11
  include Helpers::Operation
10
12
  include Helpers::AppOrDatabase
13
+ include Helpers::S3LogHelpers
11
14
 
12
15
  desc 'logs [--app APP | --database DATABASE]',
13
16
  'Follows logs from a running app or database'
@@ -25,6 +28,148 @@ module Aptible
25
28
  ENV['ACCESS_TOKEN'] = fetch_token
26
29
  exit_with_ssh_portal(op, '-o', 'SendEnv=ACCESS_TOKEN', '-T')
27
30
  end
31
+
32
+ desc 'logs_from_archive --bucket NAME --region REGION ' \
33
+ '--stack NAME [ --decryption-keys ONE [OR MORE] ] ' \
34
+ '[ --download-location LOCATION ] ' \
35
+ '[ [ --string-matches ONE [OR MORE] ] ' \
36
+ '| [ --app-id ID | --database-id ID | --endpoint-id ID | ' \
37
+ '--container-id ID ] ' \
38
+ '[ --start-date YYYY-MM-DD --end-date YYYY-MM-DD ] ]',
39
+ 'Retrieves container logs from an S3 archive in your own ' \
40
+ 'AWS account. You must provide your AWS credentials via ' \
41
+ 'the environment variables AWS_ACCESS_KEY_ID and ' \
42
+ 'AWS_SECRET_ACCESS_KEY'
43
+
44
+ # Required to retrieve files
45
+ option :region,
46
+ desc: 'The AWS region your S3 bucket resides in',
47
+ type: :string, required: true
48
+ option :bucket,
49
+ desc: 'The name of your S3 bucket',
50
+ type: :string, required: true
51
+ option :stack,
52
+ desc: 'The name of the Stack to download logs from',
53
+ type: :string, required: true
54
+ option :decryption_keys,
55
+ desc: 'The Aptible-provided keys for decryption. ' \
56
+ '(Space separated if multiple)',
57
+ type: :array
58
+
59
+ # For identifying files to download
60
+ option :string_matches,
61
+ desc: 'The strings to match in log file names.' \
62
+ '(Space separated if multiple)',
63
+ type: :array
64
+ option :app_id,
65
+ desc: 'The Application ID to download logs for.',
66
+ type: :numeric
67
+ option :database_id,
68
+ desc: 'The Database ID to download logs for.',
69
+ type: :numeric
70
+ option :endpoint_id,
71
+ desc: 'The Endpoint ID to download logs for.',
72
+ type: :numeric
73
+ option :container_id,
74
+ desc: 'The container ID to download logs for'
75
+ option :start_date,
76
+ desc: 'Get logs starting from this (UTC) date ' \
77
+ '(format: YYYY-MM-DD)',
78
+ type: :string
79
+ option :end_date,
80
+ desc: 'Get logs before this (UTC) date (format: YYYY-MM-DD)',
81
+ type: :string
82
+
83
+ # We don't download by default
84
+ option :download_location,
85
+ desc: 'The local path place downloaded log files. ' \
86
+ 'If you do not set this option, the file names ' \
87
+ 'will be shown, but not downloaded.',
88
+ type: :string
89
+
90
+ def logs_from_archive
91
+ ensure_aws_creds
92
+ validate_log_search_options(options)
93
+
94
+ id_options = [
95
+ options[:app_id],
96
+ options[:database_id],
97
+ options[:endpoint_id],
98
+ options[:container_id]
99
+ ]
100
+
101
+ date_options = [options[:start_date], options[:end_date]]
102
+
103
+ r_type = 'apps' if options[:app_id]
104
+ r_type = 'databases' if options[:database_id]
105
+ r_type = 'proxy' if options[:endpoint_id]
106
+
107
+ if date_options.any?
108
+ start_date = utc_date(options[:start_date])
109
+ end_date = utc_date(options[:end_date])
110
+ if end_date < start_date
111
+ raise Thor::Error, 'End date must be after start date.'
112
+ end
113
+ time_range = [start_date, end_date]
114
+ CLI.logger.info "Searching from #{start_date} to #{end_date}"
115
+ else
116
+ time_range = nil
117
+ end
118
+
119
+ # --string-matches is useful for matching by partial container id,
120
+ # or for more flexibility than the currently supported id_options
121
+ # may allow for. We should update id_options with new use cases,
122
+ # but leave string_matches as a way to download any named file
123
+ if options[:string_matches]
124
+ files = find_s3_files_by_string_match(
125
+ options[:region],
126
+ options[:bucket],
127
+ options[:stack],
128
+ options[:string_matches]
129
+ )
130
+ elsif id_options.any?
131
+ if options[:container_id]
132
+ search_attrs = { container_id: options[:container_id] }
133
+ else
134
+ search_attrs = { type: r_type, id: id_options.compact.first }
135
+ end
136
+ files = find_s3_files_by_attrs(
137
+ options[:region],
138
+ options[:bucket],
139
+ options[:stack],
140
+ search_attrs,
141
+ time_range
142
+ )
143
+ end
144
+
145
+ unless files.any?
146
+ raise Thor::Error, 'No files found that matched all criteria'
147
+ end
148
+
149
+ CLI.logger.info "Found #{files.count} matching files..."
150
+
151
+ if options[:download_location]
152
+ # Since these files likely contain PHI, we will only download
153
+ # them if the user is explicit about where to save them.
154
+ files.each do |file|
155
+ shasum = info_from_path(file)[:shasum]
156
+ decrypt_and_translate_s3_file(
157
+ file,
158
+ encryption_key(shasum, options[:decryption_keys]),
159
+ options[:region],
160
+ options[:bucket],
161
+ options[:download_location]
162
+ )
163
+ end
164
+ else
165
+ files.each do |file|
166
+ CLI.logger.info file.split('/').drop(4).join('/')
167
+ end
168
+ m = 'No files were downloaded. Please provide a location ' \
169
+ 'with --download-location to download the files.'
170
+ CLI.logger.warn m
171
+ end
172
+ end
28
173
  end
29
174
  end
30
175
  end
@@ -1,5 +1,5 @@
1
1
  module Aptible
2
2
  module CLI
3
- VERSION = '0.19.4'.freeze
3
+ VERSION = '0.19.6'.freeze
4
4
  end
5
5
  end
@@ -0,0 +1,334 @@
1
+ require 'spec_helper'
2
+
3
+ describe Aptible::CLI::Helpers::S3LogHelpers do
4
+ subject { Class.new.send(:include, described_class).new }
5
+ let(:v2_pfx) { 'mystack/shareable/v2/fakesha' }
6
+ let(:v3_pfx) { 'mystack/shareable/v3/fakesha' }
7
+ let(:v2app) do
8
+ "#{v2_pfx}/apps-321/fakebread-json.log.2022-06-29T18:30:01.bck.gz"
9
+ end
10
+ let(:v2app_rotated) do
11
+ "#{v2_pfx}/apps-321/fakebread-json.1.log.2022-06-29T18:30:01.bck.gz"
12
+ end
13
+ let(:v3app) do
14
+ "#{v3_pfx}/apps-321/service-123/deadbeef-json.log." \
15
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
16
+ end
17
+ let(:v3db) do
18
+ "#{v3_pfx}/databases-321/fakebread-json.log." \
19
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
20
+ end
21
+ let(:v3db_rotated) do
22
+ "#{v3_pfx}/databases-321/fakebread-json.log.1." \
23
+ '2022-08-24T21:12:33.2022-08-24T21:14:38.archived.gz'
24
+ end
25
+
26
+ describe '#ensure_aws_creds' do
27
+ it 'Raises if no keys are provided via ENV' do
28
+ expect { subject.ensure_aws_creds }
29
+ .to raise_error(Thor::Error, /Missing environment variable/)
30
+ end
31
+
32
+ it 'Accepts AWS keypair from the ENV' do
33
+ ENV['AWS_ACCESS_KEY_ID'] = 'foo'
34
+ ENV['AWS_SECRET_ACCESS_KEY'] = 'bar'
35
+ expect { subject.ensure_aws_creds }.to_not raise_error
36
+ end
37
+ end
38
+
39
+ describe '#info_from_path' do
40
+ context 'time zones are in UTC' do
41
+ it 'processes v2 upload time in UTC' do
42
+ result = subject.info_from_path(v2app)
43
+ expect(result[:uploaded_at].zone).to eq('UTC')
44
+ end
45
+
46
+ it 'processes v3 log times in UTC' do
47
+ result = subject.info_from_path(v3app)
48
+ expect(result[:start_time].zone).to eq('UTC')
49
+ expect(result[:end_time].zone).to eq('UTC')
50
+ end
51
+ end
52
+
53
+ it 'does not choke on v3 logs with unknown timestamps' do
54
+ path = "#{v3_pfx}/apps-321/service-123/deadbeef-json.log." \
55
+ 'unknown.unknown.archived.gz'
56
+ result = subject.info_from_path(path)
57
+ expect(result[:start_time]).to be(nil)
58
+ expect(result[:end_time]).to be(nil)
59
+ end
60
+
61
+ it 'can read app data from v2 paths' do
62
+ result = subject.info_from_path(v2app)
63
+ expect(result[:schema]).to eq('v2')
64
+ expect(result[:shasum]).to eq('fakesha')
65
+ expect(result[:type]).to eq('apps')
66
+ expect(result[:id]).to eq(321)
67
+ expect(result[:service_id]).to be(nil)
68
+ expect(result[:container_id]).to eq('fakebread')
69
+ expect(result[:uploaded_at]).to eq('2022-06-29T18:30:01')
70
+ expect(result[:container_id]).to eq('fakebread')
71
+ expect(result[:start_time]).to be(nil)
72
+ expect(result[:end_time]).to be(nil)
73
+ end
74
+
75
+ it 'can read app data from v3 paths' do
76
+ result = subject.info_from_path(v3app)
77
+ expect(result[:schema]).to eq('v3')
78
+ expect(result[:shasum]).to eq('fakesha')
79
+ expect(result[:type]).to eq('apps')
80
+ expect(result[:id]).to eq(321)
81
+ expect(result[:service_id]).to eq(123)
82
+ expect(result[:container_id]).to eq('deadbeef')
83
+ expect(result[:uploaded_at]).to be(nil)
84
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
85
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
86
+ end
87
+
88
+ it 'can read db data from v3 paths' do
89
+ result = subject.info_from_path(v3db)
90
+ expect(result[:schema]).to eq('v3')
91
+ expect(result[:shasum]).to eq('fakesha')
92
+ expect(result[:type]).to eq('databases')
93
+ expect(result[:id]).to eq(321)
94
+ expect(result[:service_id]).to be(nil)
95
+ expect(result[:container_id]).to eq('fakebread')
96
+ expect(result[:uploaded_at]).to be(nil)
97
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
98
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
99
+ end
100
+
101
+ context 'files that have been rotated by docker (.json.log.1)' do
102
+ it 'can read data from v3 paths' do
103
+ result = subject.info_from_path(v3db_rotated)
104
+ expect(result[:schema]).to eq('v3')
105
+ expect(result[:shasum]).to eq('fakesha')
106
+ expect(result[:type]).to eq('databases')
107
+ expect(result[:id]).to eq(321)
108
+ expect(result[:service_id]).to be(nil)
109
+ expect(result[:container_id]).to eq('fakebread')
110
+ expect(result[:uploaded_at]).to be(nil)
111
+ expect(result[:start_time]).to eq('2022-08-24T21:12:33')
112
+ expect(result[:end_time]).to eq('2022-08-24T21:14:38')
113
+ end
114
+
115
+ it 'can read app data from v2 paths' do
116
+ result = subject.info_from_path(v2app)
117
+ expect(result[:schema]).to eq('v2')
118
+ expect(result[:shasum]).to eq('fakesha')
119
+ expect(result[:type]).to eq('apps')
120
+ expect(result[:id]).to eq(321)
121
+ expect(result[:service_id]).to be(nil)
122
+ expect(result[:container_id]).to eq('fakebread')
123
+ expect(result[:uploaded_at]).to eq('2022-06-29T18:30:01')
124
+ expect(result[:container_id]).to eq('fakebread')
125
+ expect(result[:start_time]).to be(nil)
126
+ expect(result[:end_time]).to be(nil)
127
+ end
128
+ end
129
+ end
130
+
131
+ describe '#validate_log_search_options' do
132
+ it 'Forces you to identify the files with a supported option' do
133
+ opts = {}
134
+ expect { subject.validate_log_search_options(opts) }
135
+ .to raise_error(Thor::Error, / specify an option to identify/)
136
+ end
137
+
138
+ it 'Does not let you pass --string-matches and id options' do
139
+ opts = { string_matches: ['foo'], app_id: 123 }
140
+ expect { subject.validate_log_search_options(opts) }
141
+ .to raise_error(Thor::Error, /cannot pass/)
142
+ end
143
+
144
+ it 'Does not let you pass multiple id options' do
145
+ opts = { database_id: 12, app_id: 23 }
146
+ expect { subject.validate_log_search_options(opts) }
147
+ .to raise_error(Thor::Error, /specify only one of/)
148
+ end
149
+
150
+ it 'Does not let you use date options with string-matches' do
151
+ opts = { string_matches: 12, start_date: 'foo' }
152
+ expect { subject.validate_log_search_options(opts) }
153
+ .to raise_error(Thor::Error, /cannot be used when searching by string/)
154
+ end
155
+
156
+ it 'Does not allow open-ended date range.' do
157
+ opts = { app_id: 123, start_date: 'foo' }
158
+ expect { subject.validate_log_search_options(opts) }
159
+ .to raise_error(Thor::Error, /must pass both/)
160
+ end
161
+
162
+ it 'Ensures you have provided a long enough container ID' do
163
+ opts = { container_id: 'tooshort' }
164
+ expect { subject.validate_log_search_options(opts) }
165
+ .to raise_error(Thor::Error, /at least the first 12/)
166
+ end
167
+
168
+ it 'Requires you to pass keys when downloading' do
169
+ opts = { app_id: 123, download_location: 'asdf' }
170
+ expect { subject.validate_log_search_options(opts) }
171
+ .to raise_error(Thor::Error, /You must provide decryption keys/)
172
+ end
173
+ end
174
+
175
+ describe '#find_s3_files_by_string_match' do
176
+ client_stub = Aws::S3::Client.new(stub_responses: true)
177
+ client_stub.stub_responses(
178
+ :list_buckets, buckets: [{ name: 'bucket' }]
179
+ )
180
+ client_stub.stub_responses(
181
+ :list_objects_v2, contents: [
182
+ { key: 'stack/it/doesnt/matter' },
183
+ { key: 'stack/matter/it/does/not/yoda' }
184
+ ]
185
+ )
186
+ before do
187
+ subject.stub(:s3_client) do
188
+ Aws::S3::Resource.new(region: 'us-east-1', client: client_stub)
189
+ end
190
+ end
191
+
192
+ it 'finds files with a single matching string' do
193
+ strings = %w(yoda)
194
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
195
+ 'stack', strings)
196
+ expect(result).to match_array(%w(stack/matter/it/does/not/yoda))
197
+ end
198
+
199
+ it 'finds files with two matching strings' do
200
+ strings = %w(it matter)
201
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
202
+ 'stack', strings)
203
+ expect(result).to match_array(%w(stack/it/doesnt/matter
204
+ stack/matter/it/does/not/yoda))
205
+ end
206
+
207
+ it 'only find files with all matching strings' do
208
+ strings = %w(it yoda)
209
+ result = subject.find_s3_files_by_string_match('us-east-1', 'bucket',
210
+ 'stack', strings)
211
+ expect(result).to match_array(%w(stack/matter/it/does/not/yoda))
212
+ end
213
+ end
214
+
215
+ describe '#find_s3_files_by_attrs' do
216
+ before do
217
+ client_stub = Aws::S3::Client.new(stub_responses: true)
218
+ client_stub.stub_responses(
219
+ :list_buckets, buckets: [{ name: 'bucket' }]
220
+ )
221
+ client_stub.stub_responses(
222
+ :list_objects_v2, contents: [
223
+ { key: v2app },
224
+ { key: v2app_rotated },
225
+ { key: v3db_rotated },
226
+ { key: v3db },
227
+ { key: v3app }
228
+ ]
229
+ )
230
+ subject.stub(:s3_client) do
231
+ Aws::S3::Resource.new(region: 'us-east-1', client: client_stub)
232
+ end
233
+ end
234
+
235
+ it 'can find apps by id' do
236
+ attrs = { type: 'apps', id: 321 }
237
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
238
+ 'stack', attrs)
239
+ expect(result).to match_array([v3app, v2app, v2app_rotated])
240
+ end
241
+
242
+ it 'can find databases by id' do
243
+ attrs = { type: 'databases', id: 321 }
244
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
245
+ 'stack', attrs)
246
+ expect(result).to match_array([v3db, v3db_rotated])
247
+ end
248
+
249
+ it 'can find by other attributes of the log file like container id' do
250
+ attrs = { container_id: 'deadbeef' }
251
+ result = subject.find_s3_files_by_attrs('us-east-1', 'bucket',
252
+ 'stack', attrs)
253
+ expect(result).to match_array([v3app])
254
+ end
255
+ end
256
+
257
+ describe '#time_match?' do
258
+ # Here's a represenation of the test cases. We keep the file timestamps
259
+ # fixed and move --start-date/--end-date around to all possible combos.
260
+ # Note that we do foce the start to be earlier than the end, which keeps the
261
+ # logic here quite simple.
262
+
263
+ # | |se
264
+ # | s|e
265
+ # s| |e
266
+ # |se|
267
+ # s|e |
268
+ # se| |
269
+
270
+ # s = start / lower bound of search
271
+ # e = end / upper bound of search
272
+ # |'s are the first and last timestamp in the file
273
+
274
+ let(:first_log) { Time.parse('2022-08-01T00:00:00') }
275
+ let(:last_log) { Time.parse('2022-09-01T00:00:00') }
276
+ let(:before) { Time.parse('2022-07-01T00:00:00') }
277
+ let(:between) { Time.parse('2022-08-15T00:00:00') }
278
+ let(:after) { Time.parse('2022-10-01T00:00:00') }
279
+
280
+ context 'identifies files that may have lines within a range' do
281
+ it 'before before does not match' do
282
+ range = [before, before]
283
+ expect(subject.time_match?(range, first_log, last_log)).to be(false)
284
+ end
285
+
286
+ it 'before between matches' do
287
+ range = [before, between]
288
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
289
+ end
290
+
291
+ it 'between between matches' do
292
+ range = [between, between]
293
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
294
+ end
295
+
296
+ it 'before after matches' do
297
+ range = [before, after]
298
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
299
+ end
300
+
301
+ it 'between after matches' do
302
+ range = [between, after]
303
+ expect(subject.time_match?(range, first_log, last_log)).to be(true)
304
+ end
305
+
306
+ it 'after after does not match' do
307
+ range = [after, after]
308
+ expect(subject.time_match?(range, first_log, last_log)).to be(false)
309
+ end
310
+ end
311
+ end
312
+
313
+ describe '#utc_date' do
314
+ e = 'Please provide dates in YYYY-MM-DD format'
315
+
316
+ it 'converts strings to dates in UTC' do
317
+ result = subject.utc_date('2022-08-30')
318
+ expect(result).to be_a(Time)
319
+ expect(result).to eq(Time.utc(2022, 8, 30, 0, 0, 0))
320
+ end
321
+
322
+ it 'raises an error if the input is a valid date/tiem in wrong format' do
323
+ expect do
324
+ subject.utc_date('2022-08-30 11:32')
325
+ end.to raise_error(Thor::Error, e)
326
+ end
327
+
328
+ it 'raises an error if the input is wrong' do
329
+ expect do
330
+ subject.utc_date('foobar')
331
+ end.to raise_error(Thor::Error, e)
332
+ end
333
+ end
334
+ end
@@ -60,4 +60,137 @@ describe Aptible::CLI::Agent do
60
60
  expect { subject.send(:logs) }.to raise_error(/only one of/im)
61
61
  end
62
62
  end
63
+
64
+ describe '#logs_from_archive' do
65
+ context 'using string-matches' do
66
+ let(:files) { %w(file_1 file_2) }
67
+
68
+ before do
69
+ subject.options = {
70
+ region: 'some-region',
71
+ bucket: 'some-bucket',
72
+ decryption_keys: 'mykey',
73
+ string_matches: 'foo',
74
+ download_location: './'
75
+ }
76
+ subject.stub(:info_from_path) { { shasum: 'foo' } }
77
+ subject.stub(:encryption_key) { subject.options[:decryption_keys] }
78
+ end
79
+
80
+ it 'download all files' do
81
+ expect(subject).to receive(:ensure_aws_creds)
82
+ expect(subject).to receive(:validate_log_search_options)
83
+ .with(subject.options)
84
+
85
+ expect(subject).to receive(:find_s3_files_by_string_match)
86
+ .with(
87
+ subject.options[:region],
88
+ subject.options[:bucket],
89
+ subject.options[:stack],
90
+ subject.options[:string_matches]
91
+ ).and_return(files)
92
+
93
+ files.each do |f|
94
+ expect(subject).to receive(:decrypt_and_translate_s3_file)
95
+ .with(
96
+ f,
97
+ subject.options[:decryption_keys],
98
+ subject.options[:region],
99
+ subject.options[:bucket],
100
+ subject.options[:download_location]
101
+ )
102
+ end
103
+ subject.send('logs_from_archive')
104
+ end
105
+ end
106
+
107
+ context 'using app/database/endpoint id' do
108
+ let(:files) { %w(file_1 file_2) }
109
+
110
+ before do
111
+ subject.options = {
112
+ region: 'some-region',
113
+ bucket: 'some-bucket',
114
+ stack: 'mystack',
115
+ decryption_keys: 'mykey',
116
+ app_id: 123,
117
+ download_location: './'
118
+ }
119
+ subject.stub(:info_from_path) { { shasum: 'foo' } }
120
+ subject.stub(:encryption_key) { subject.options[:decryption_keys] }
121
+ end
122
+
123
+ it 'download all files' do
124
+ expect(subject).to receive(:ensure_aws_creds)
125
+ expect(subject).to receive(:validate_log_search_options)
126
+ .with(subject.options)
127
+
128
+ expect(subject).to receive(:find_s3_files_by_attrs)
129
+ .with(
130
+ subject.options[:region],
131
+ subject.options[:bucket],
132
+ subject.options[:stack],
133
+ { type: 'apps', id: 123 },
134
+ nil
135
+ ).and_return(files)
136
+
137
+ files.each do |f|
138
+ expect(subject).to receive(:decrypt_and_translate_s3_file)
139
+ .with(
140
+ f,
141
+ subject.options[:decryption_keys],
142
+ subject.options[:region],
143
+ subject.options[:bucket],
144
+ subject.options[:download_location]
145
+ )
146
+ end
147
+ subject.send('logs_from_archive')
148
+ end
149
+ end
150
+
151
+ context 'using container id' do
152
+ let(:files) { %w(file_1 file_2) }
153
+
154
+ before do
155
+ subject.options = {
156
+ region: 'some-region',
157
+ bucket: 'some-bucket',
158
+ stack: 'mystack',
159
+ decryption_keys: 'mykey',
160
+ container_id:
161
+ '9080b96447f98b31ef9831d5fd98b09e3c5c545269734e2e825644571152457c',
162
+ download_location: './'
163
+ }
164
+ subject.stub(:info_from_path) { { shasum: 'foo' } }
165
+ subject.stub(:encryption_key) { subject.options[:decryption_keys] }
166
+ end
167
+
168
+ it 'download all files' do
169
+ expect(subject).to receive(:ensure_aws_creds)
170
+ expect(subject).to receive(:validate_log_search_options)
171
+ .with(subject.options)
172
+
173
+ expect(subject).to receive(:find_s3_files_by_attrs)
174
+ .with(
175
+ subject.options[:region],
176
+ subject.options[:bucket],
177
+ subject.options[:stack],
178
+ { container_id: subject.options[:container_id] },
179
+ nil
180
+ ).and_return(files)
181
+
182
+ files.each do |f|
183
+ expect(subject).to receive(:decrypt_and_translate_s3_file)
184
+ .with(
185
+ f,
186
+ subject.options[:decryption_keys],
187
+ subject.options[:region],
188
+ subject.options[:bucket],
189
+ subject.options[:download_location]
190
+ )
191
+ end
192
+ subject.send('logs_from_archive')
193
+ end
194
+ end
195
+ end
63
196
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aptible-cli
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.19.4
4
+ version: 0.19.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Frank Macreery
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-09-09 00:00:00.000000000 Z
11
+ date: 2022-09-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aptible-resource
@@ -136,6 +136,20 @@ dependencies:
136
136
  - - ">="
137
137
  - !ruby/object:Gem::Version
138
138
  version: '0'
139
+ - !ruby/object:Gem::Dependency
140
+ name: aws-sdk
141
+ requirement: !ruby/object:Gem::Requirement
142
+ requirements:
143
+ - - "~>"
144
+ - !ruby/object:Gem::Version
145
+ version: '2.0'
146
+ type: :runtime
147
+ prerelease: false
148
+ version_requirements: !ruby/object:Gem::Requirement
149
+ requirements:
150
+ - - "~>"
151
+ - !ruby/object:Gem::Version
152
+ version: '2.0'
139
153
  - !ruby/object:Gem::Dependency
140
154
  name: activesupport
141
155
  requirement: !ruby/object:Gem::Requirement
@@ -294,6 +308,7 @@ files:
294
308
  - lib/aptible/cli/helpers/log_drain.rb
295
309
  - lib/aptible/cli/helpers/metric_drain.rb
296
310
  - lib/aptible/cli/helpers/operation.rb
311
+ - lib/aptible/cli/helpers/s3_log_helpers.rb
297
312
  - lib/aptible/cli/helpers/security_key.rb
298
313
  - lib/aptible/cli/helpers/ssh.rb
299
314
  - lib/aptible/cli/helpers/system.rb
@@ -331,6 +346,7 @@ files:
331
346
  - spec/aptible/cli/helpers/handle_from_git_remote_spec.rb
332
347
  - spec/aptible/cli/helpers/operation_spec.rb
333
348
  - spec/aptible/cli/helpers/options_handle_strategy_spec.rb
349
+ - spec/aptible/cli/helpers/s3_log_helpers_spec.rb
334
350
  - spec/aptible/cli/helpers/ssh_spec.rb
335
351
  - spec/aptible/cli/helpers/token_spec.rb
336
352
  - spec/aptible/cli/helpers/tunnel_spec.rb
@@ -398,7 +414,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
398
414
  - !ruby/object:Gem::Version
399
415
  version: '0'
400
416
  requirements: []
401
- rubygems_version: 3.0.3.1
417
+ rubygems_version: 3.0.3
402
418
  signing_key:
403
419
  specification_version: 4
404
420
  summary: Command-line interface for Aptible services
@@ -410,6 +426,7 @@ test_files:
410
426
  - spec/aptible/cli/helpers/handle_from_git_remote_spec.rb
411
427
  - spec/aptible/cli/helpers/operation_spec.rb
412
428
  - spec/aptible/cli/helpers/options_handle_strategy_spec.rb
429
+ - spec/aptible/cli/helpers/s3_log_helpers_spec.rb
413
430
  - spec/aptible/cli/helpers/ssh_spec.rb
414
431
  - spec/aptible/cli/helpers/token_spec.rb
415
432
  - spec/aptible/cli/helpers/tunnel_spec.rb