subspace 2.5.10 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. checksums.yaml +4 -4
  2. data/.ruby-version +1 -1
  3. data/CHANGELOG.md +22 -5
  4. data/README.md +105 -51
  5. data/UPGRADING.md +10 -0
  6. data/ansible/roles/common/defaults/main.yml +0 -1
  7. data/ansible/roles/common/files/sudoers-service +1 -1
  8. data/ansible/roles/common/tasks/main.yml +18 -7
  9. data/ansible/roles/common/tasks/no_swap.yml +26 -0
  10. data/ansible/roles/common/templates/motd +1 -1
  11. data/ansible/roles/common/templates/motd2 +1 -1
  12. data/ansible/roles/delayed_job/tasks/main.yml +21 -38
  13. data/ansible/roles/delayed_job/templates/delayed-job-systemd.service +33 -0
  14. data/ansible/roles/letsencrypt/defaults/main.yml +7 -7
  15. data/ansible/roles/letsencrypt/tasks/main.yml +18 -24
  16. data/ansible/roles/memcache/defaults/main.yml +2 -0
  17. data/ansible/roles/memcache/tasks/main.yml +16 -1
  18. data/ansible/roles/newrelic-infra/tasks/main.yml +3 -3
  19. data/ansible/roles/nginx/tasks/main.yml +12 -3
  20. data/ansible/roles/puma/tasks/main.yml +32 -20
  21. data/ansible/roles/puma/templates/puma-systemd.service +37 -0
  22. data/ansible/roles/puma/templates/puma-systemd.socket +14 -0
  23. data/ansible/roles/puma/templates/puma.rb +4 -2
  24. data/ansible/roles/rails/defaults/main.yml +0 -7
  25. data/ansible/roles/redis/tasks/main.yml +28 -3
  26. data/ansible/roles/resque/tasks/main.yml +11 -12
  27. data/ansible/roles/resque/templates/resque-systemd.service +10 -3
  28. data/ansible/roles/ruby-common/tasks/main.yml +1 -16
  29. data/ansible/roles/sidekiq/defaults/main.yml +1 -1
  30. data/ansible/roles/sidekiq/tasks/main.yml +11 -15
  31. data/ansible/roles/sidekiq/templates/sidekiq-monit-rc +1 -1
  32. data/ansible/roles/sidekiq/templates/sidekiq-systemd.service +63 -0
  33. data/ansible/roles/tailscale/defaults/main.yml +2 -0
  34. data/ansible/roles/tailscale/tasks/main.yml +22 -0
  35. data/bin/console +0 -4
  36. data/exe/subspace +1 -2
  37. data/lib/subspace/cli.rb +51 -14
  38. data/lib/subspace/commands/ansible.rb +12 -3
  39. data/lib/subspace/commands/base.rb +20 -5
  40. data/lib/subspace/commands/bootstrap.rb +16 -21
  41. data/lib/subspace/commands/configure.rb +2 -2
  42. data/lib/subspace/commands/exec.rb +20 -0
  43. data/lib/subspace/commands/init.rb +94 -45
  44. data/lib/subspace/commands/inventory.rb +54 -0
  45. data/lib/subspace/commands/maintain.rb +1 -1
  46. data/lib/subspace/commands/provision.rb +1 -3
  47. data/lib/subspace/commands/secrets.rb +69 -0
  48. data/lib/subspace/commands/ssh.rb +14 -8
  49. data/lib/subspace/commands/terraform.rb +83 -0
  50. data/lib/subspace/inventory.rb +144 -0
  51. data/lib/subspace/version.rb +1 -1
  52. data/subspace.gemspec +8 -2
  53. data/template/{provision → subspace}/.gitignore +3 -0
  54. data/template/{provision → subspace}/ansible.cfg.erb +2 -2
  55. data/template/subspace/group_vars/all.erb +28 -0
  56. data/template/subspace/group_vars/template.erb +26 -0
  57. data/template/{provision → subspace}/hosts.erb +0 -0
  58. data/template/subspace/inventory.yml.erb +11 -0
  59. data/template/{provision → subspace}/playbook.yml.erb +2 -5
  60. data/template/{provision/vars → subspace/secrets}/template.erb +0 -0
  61. data/template/{provision → subspace}/templates/application.yml.template +0 -0
  62. data/template/subspace/templates/authorized_keys.erb +1 -0
  63. data/template/subspace/terraform/.gitignore +2 -0
  64. data/template/subspace/terraform/template/main-oxenwagen.tf.erb +116 -0
  65. data/template/subspace/terraform/template/main-workhorse.tf.erb +41 -0
  66. data/template/subspace/terraformrc.erb +9 -0
  67. data/terraform/modules/s3_backend/README +2 -0
  68. data/terraform/modules/s3_backend/dynamodb.tf +1 -0
  69. data/terraform/modules/s3_backend/iam_user.tf +38 -0
  70. data/terraform/modules/s3_backend/main.tf +39 -0
  71. data/terraform/modules/s3_backend/state_bucket.tf +14 -0
  72. metadata +41 -55
  73. data/ansible/roles/awscli/tasks/main.yml +0 -10
  74. data/ansible/roles/delayed_job/meta/main.yml +0 -5
  75. data/ansible/roles/letsencrypt_dns/defaults/main.yml +0 -4
  76. data/ansible/roles/letsencrypt_dns/tasks/main.yml +0 -133
  77. data/ansible/roles/monit/files/monit-http.conf +0 -3
  78. data/ansible/roles/monit/files/sudoers-monit +0 -1
  79. data/ansible/roles/monit/handlers/main.yml +0 -14
  80. data/ansible/roles/monit/tasks/main.yml +0 -34
  81. data/ansible/roles/mtpereira.passenger/.bumpversion.cfg +0 -7
  82. data/ansible/roles/mtpereira.passenger/.gitignore +0 -2
  83. data/ansible/roles/mtpereira.passenger/LICENSE +0 -20
  84. data/ansible/roles/mtpereira.passenger/README.md +0 -31
  85. data/ansible/roles/mtpereira.passenger/defaults/main.yml +0 -5
  86. data/ansible/roles/mtpereira.passenger/handlers/main.yml +0 -8
  87. data/ansible/roles/mtpereira.passenger/meta/.galaxy_install_info +0 -1
  88. data/ansible/roles/mtpereira.passenger/meta/main.yml +0 -21
  89. data/ansible/roles/mtpereira.passenger/tasks/apt.yml +0 -13
  90. data/ansible/roles/mtpereira.passenger/tasks/main.yml +0 -8
  91. data/ansible/roles/mtpereira.passenger/tasks/pkg.yml +0 -35
  92. data/ansible/roles/mtpereira.passenger/tasks/service.yml +0 -8
  93. data/ansible/roles/passenger/files/sudoers-passenger +0 -1
  94. data/ansible/roles/passenger/meta/main.yml +0 -6
  95. data/ansible/roles/passenger/tasks/main.yml +0 -5
  96. data/ansible/roles/postgis/defaults/main.yml +0 -2
  97. data/ansible/roles/puma/defaults/main.yml +0 -5
  98. data/ansible/roles/puma/meta/main.yml +0 -5
  99. data/ansible/roles/sidekiq/meta/main.yml +0 -5
  100. data/lib/subspace/commands/vars.rb +0 -48
  101. data/template/provision/group_vars/all.erb +0 -17
  102. data/template/provision/group_vars/template.erb +0 -11
  103. data/template/provision/host_vars/template.erb +0 -4
@@ -0,0 +1,144 @@
1
+ require 'yaml'
2
+ module Subspace
3
+ class Inventory
4
+ attr_accessor :group_vars, :hosts, :global_vars, :path
5
+ def initialize
6
+ @hosts = {}
7
+ @group_vars = {}
8
+ @global_vars = {}
9
+ end
10
+
11
+ # Find all the hosts in the host/group or exit
12
+ def find_hosts!(host_spec)
13
+ if self.groups[host_spec]
14
+ return self.groups[host_spec].host_list.map { |m| self.hosts[m] }
15
+ elsif self.hosts[host_spec]
16
+ return [self.hosts[host_spec]]
17
+ else
18
+ say "No inventory matching: '#{host_spec}' found. "
19
+ say (["Available hosts:"] + self.hosts.keys).join("\n\t")
20
+ say (["Available groups:"] + self.groups.keys).join("\n\t")
21
+ exit
22
+ end
23
+ end
24
+
25
+ def self.read(path)
26
+ inventory = new
27
+ inventory.path = path
28
+
29
+ yml = YAML.load(File.read(path)).to_h
30
+
31
+ # Run through all hosts
32
+ yml["all"]["hosts"].each do |name, vars|
33
+ inventory.hosts[name] = Host.new(name, vars: vars || {})
34
+ end
35
+
36
+ # Run through all children (groups)
37
+ # This does NOT handle sub-groups yet
38
+ yml["all"]["children"].each do |name, group|
39
+ next unless group["hosts"]
40
+
41
+ # Each group defines its host membership
42
+ group["hosts"].each do |host, vars|
43
+ inventory.hosts[host] ||= Host.new(host, vars: vars || {})
44
+ inventory.hosts[host].group_list.push name
45
+ end
46
+
47
+ if group["vars"]
48
+ inventory.group_vars[name] = group["vars"]
49
+ end
50
+ end
51
+
52
+ # Capture global variables
53
+ inventory.global_vars = yml["all"]["vars"] || {}
54
+
55
+ inventory
56
+ end
57
+
58
+ def write(path=nil)
59
+ File.write(@path || path, self.to_yml)
60
+ end
61
+
62
+ def merge(inventory_json)
63
+ inventory_json["hostnames"].each_with_index do |host, i|
64
+ if hosts[host]
65
+ old_ip = hosts[host].vars["ansible_host"]
66
+ new_ip = inventory_json["ip_addresses"][i]
67
+ if old_ip != new_ip
68
+ say " * Host '#{host}' IP address changed! You may need to update the inventory! (#{old_ip} => #{new_ip})"
69
+ end
70
+ next
71
+ end
72
+ hosts[host] = Host.new(host)
73
+ hosts[host].vars["ansible_host"] = inventory_json["ip_addresses"][i]
74
+ hosts[host].vars["ansible_user"] = inventory_json["users"][i]
75
+ hosts[host].vars["hostname"] = host
76
+ hosts[host].group_list = inventory_json["groups"][i].split(/\s/)
77
+ end
78
+ end
79
+
80
+ def to_yml
81
+ all_groups = {}
82
+ all_hosts = {}
83
+ @hosts.each do |name, host|
84
+ all_hosts[host.name] = host.vars.empty? ? nil : host.vars.transform_keys(&:to_s)
85
+ host.group_list.each do |group|
86
+ all_groups[group] ||= { "hosts" => {}}
87
+ all_groups[group]["hosts"][host.name] = nil
88
+ end
89
+ end
90
+
91
+ @group_vars.each do |group, vars|
92
+ all_groups[group] ||= {}
93
+ if !vars.empty?
94
+ all_groups[group]["vars"] = vars.transform_keys(&:to_s)
95
+ end
96
+ end
97
+
98
+ yml = {
99
+ "all" => {
100
+ "hosts" => all_hosts,
101
+ "children" => all_groups.empty? ? nil : all_groups
102
+ }
103
+ }
104
+
105
+ if !@global_vars.empty?
106
+ yml["all"]["vars"] = @global_vars.transform_keys(&:to_s)
107
+ end
108
+
109
+ YAML.dump(yml)
110
+ end
111
+
112
+ def groups
113
+ @groups ||= begin
114
+ all_groups = {"all" => Group.new("all", vars: {}, host_list: hosts.keys) }
115
+ @hosts.each do |name, host|
116
+ host.group_list.each do |group|
117
+ all_groups[group] ||= Group.new(group, vars: @group_vars[group])
118
+ all_groups[group].host_list.append(name)
119
+ end
120
+ end
121
+ all_groups
122
+ end
123
+ end
124
+
125
+ class Host
126
+ attr_accessor :vars, :group_list, :name
127
+
128
+ def initialize(name, vars: {}, group_list: [])
129
+ @name = name
130
+ @vars = vars
131
+ @group_list = group_list
132
+ end
133
+ end
134
+
135
+ class Group
136
+ attr_accessor :name, :vars, :host_list
137
+ def initialize(name, vars: {}, host_list: [])
138
+ @name = name
139
+ @vars = vars
140
+ @host_list = host_list
141
+ end
142
+ end
143
+ end
144
+ end
@@ -1,3 +1,3 @@
1
1
  module Subspace
2
- VERSION = "2.5.10"
2
+ VERSION = "3.0.0"
3
3
  end
data/subspace.gemspec CHANGED
@@ -13,6 +13,13 @@ Gem::Specification.new do |spec|
13
13
  spec.description = %q{WIP -- don't use this :)}
14
14
  spec.homepage = "https://github.com/tenforwardconsulting/subspace"
15
15
  spec.license = "MIT"
16
+ spec.post_install_message = <<~EOS
17
+ *** Subspace 3 has many breaking changes
18
+ Primarily, the entire configuration directory structure has moved from config/provision to config/subspace.
19
+ You will need to migrate your old configuration to the new location, or downgrade to Subspace 2 if this was not intentional.
20
+ Please review the Upgrade guide: https://github.com/tenforwardconsulting/subspace/UPGRADING.md
21
+ EOS
22
+
16
23
 
17
24
  # Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
18
25
  # to allow pushing to a single host or delete this section to allow pushing to any host.
@@ -24,7 +31,7 @@ Gem::Specification.new do |spec|
24
31
 
25
32
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
26
33
  spec.bindir = "exe"
27
- spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
34
+ spec.executables << "subspace"
28
35
  spec.require_paths = ["lib"]
29
36
 
30
37
  spec.add_development_dependency "bundler", "~> 2.1"
@@ -33,5 +40,4 @@ Gem::Specification.new do |spec|
33
40
 
34
41
  spec.add_runtime_dependency "commander", "~>4.2"
35
42
  spec.add_runtime_dependency "figaro", "~>1.0"
36
- spec.add_runtime_dependency "ed25519", "~>1.0"
37
43
  end
@@ -1,3 +1,6 @@
1
1
  .vault_pass
2
2
  ansible.cfg
3
3
  *.retry
4
+ *.tfstate*
5
+ *.pem
6
+ .terraform
@@ -1,8 +1,8 @@
1
1
  [defaults]
2
- inventory = hosts
3
2
  forks = 10
4
3
  roles_path = ./roles:<%= File.join(gem_path, 'ansible', 'roles') %>:/etc/ansible/roles
5
4
  vault_password_file = .vault_pass
5
+ inventory = inventory.yml
6
6
  # Uncomment to add timestamps to tasks to find slow ones.
7
7
  # callback_whitelist = profile_tasks
8
8
 
@@ -13,4 +13,4 @@ strategy = mitogen_linear
13
13
 
14
14
  [ssh_connection]
15
15
  pipelining = True
16
- control_path = /tmp/subspace-control-%%h-%%p-%%r
16
+ control_path = /tmp/subspace-control-%%h-%%p-%%r
@@ -0,0 +1,28 @@
1
+ # Conventions and defaults are defined here
2
+ project_name: <%= project_name %>
3
+ database_host: localhost
4
+ use_sudo: true
5
+
6
+ # ruby-common
7
+ # pull the checksum/url from https://www.ruby-lang.org/en/downloads/
8
+ ruby_version: # ruby-2.7.1
9
+ ruby_checksum: # d418483bdd0000576c1370571121a6eb24582116db0b7bb2005e90e250eae418
10
+ ruby_download_location: # https://cache.ruby-lang.org/pub/ruby/2.7/ruby-2.7.1.tar.gz
11
+ bundler_version: # 2.3.5
12
+
13
+ # Other stuff
14
+ letsencrypt_email:
15
+ nodejs_version: # 16.x
16
+ ssl_enabled: true
17
+ postgresql_version: # 14
18
+
19
+ logrotate_scripts:
20
+ - name: rails
21
+ path: "/u/apps/{{project_name}}/shared/log/{{rails_env}}.log"
22
+ options:
23
+ - weekly
24
+ - size 100M
25
+ - missingok
26
+ - compress
27
+ - delaycompress
28
+ - copytruncate
@@ -0,0 +1,26 @@
1
+ ## rails
2
+ rails_env: <%= @env %>
3
+
4
+ database_pool: 5
5
+ database_name: "{{project_name}}_{{rails_env}}"
6
+ database_user: "{{project_name}}"
7
+ database_adapter: postgresql
8
+ # job_queues:
9
+ # - default
10
+ # - mailers
11
+
12
+ # nginx / letsencrypt
13
+ server_name: "{{ansible_host}}"
14
+
15
+ ## postgresql
16
+ # postgresql_version: 14
17
+ # postgresql_authentication:
18
+ # - type: local
19
+ # user: "{{database_user}}"
20
+ # database: 'all'
21
+ # method: trust
22
+
23
+ ## puma
24
+ puma_workers: 1
25
+ puma_min_threads: 4
26
+ puma_max_threads: 16
File without changes
@@ -0,0 +1,11 @@
1
+ ---
2
+ all:
3
+ hosts:
4
+ <%= @env %>1:
5
+ ansible_host: localhost
6
+ ansible_user: ubuntu
7
+ ansible_ssh_private_key_file: subspace.pem
8
+ hostname: web1
9
+ children:
10
+ <%= @env %>:
11
+ <%= @env %>1:
@@ -3,19 +3,16 @@
3
3
  become: yes
4
4
 
5
5
  vars_files:
6
- - ./vars/<%= @env %>.yml
6
+ - ./secrets/<%= @env %>.yml
7
7
 
8
8
  roles:
9
9
  - common
10
- - yarn
11
10
  - nodejs
11
+ - yarn
12
12
  - ruby-common
13
13
  - rails
14
14
  - puma
15
15
  - letsencrypt
16
16
  - nginx
17
17
  - postgresql
18
- - monit
19
18
  - logrotate
20
- - collectd
21
- - delayed_job
@@ -0,0 +1 @@
1
+ <%= `cat $HOME/.ssh/id_rsa.pub` %>
@@ -0,0 +1,2 @@
1
+ credentials.auto.tfvars
2
+ .subspace-tf-modules
@@ -0,0 +1,116 @@
1
+ terraform {
2
+ # Default backend is just local.
3
+
4
+ # Uncomment to use s3
5
+ # backend "s3" {
6
+ # bucket = "subspace-backend-<%= project_name %>"
7
+ # key = "subspace.<%= @env %>.tfstate"
8
+ # region = "us-west-2"
9
+ # }
10
+
11
+ # Uncomment to use Terraform Cloud
12
+ # cloud {
13
+ # organization = "<%= project_name %>"
14
+ #
15
+ # workspaces {
16
+ # name = "<%= @env %>"
17
+ # }
18
+ # }
19
+
20
+ }
21
+
22
+ provider aws {
23
+ region = "us-west-2"
24
+ profile = "subspace-<%= project_name %>"
25
+ default_tags {
26
+ tags = {
27
+ Environment = "<%= @env %>"
28
+ Project = "<%= project_name %>"
29
+ }
30
+ }
31
+ }
32
+
33
+ variable database_password { type = string }
34
+
35
+ module oxenwagen {
36
+ source = "github.com/tenforwardconsulting/terraform-subspace-oxenwagen?ref=v2.1.0"
37
+ project_name = "<%= project_name %>"
38
+ project_environment = "<%= @env %>"
39
+ aws_region = ## "us-west-2"
40
+ lb_health_check_path = "/"
41
+ subspace_public_key = file("../../subspace.pem.pub")
42
+
43
+ # Ubuntu Server 20.04 LTS (HVM), SSD Volume Type
44
+ instance_ami = "<%= @latest_ami %>"
45
+ web_instance_type = "t3.small"
46
+ web_instance_count = 2
47
+ worker_instance_type = "t3.medium"
48
+ worker_instance_count = 1
49
+ worker_volume_size = 100
50
+ ssh_cidr_blocks = [] # Put office/local/vpn IP addresses here
51
+
52
+ database_engine = "postgres"
53
+ database_engine_version = ## "14.1"
54
+ database_instance_class = "db.t3.medium"
55
+ database_name = "<%= "#{project_name}_#{@env}" %>"
56
+ database_username = "<%= project_name %>"
57
+ database_password = var.database_password
58
+ database_allocated_storage = 100
59
+ database_max_allocated_storage = 1000
60
+ database_iops = 1000
61
+
62
+ # lb_domain_name = "www.<%= project_name %>.com"
63
+ # lb_alternate_names = []
64
+ }
65
+
66
+ output "inventory" {
67
+ value = module.oxenwagen
68
+ }
69
+
70
+ resource "aws_s3_bucket" "bucket" {
71
+ bucket = "<%= "#{project_name}-#{@env}-assets" %>"
72
+ acl = "private"
73
+ }
74
+
75
+ resource "aws_s3_bucket_public_access_block" "block_public_acls" {
76
+ bucket = aws_s3_bucket.bucket.id
77
+
78
+ block_public_acls = true
79
+ block_public_policy = true
80
+ ignore_public_acls = true
81
+ restrict_public_buckets = true
82
+ }
83
+
84
+ resource "aws_iam_user" "s3_user" {
85
+ name = "<%= "#{project_name}-#{@env}-assets" %>-subspace-s3-user"
86
+ }
87
+
88
+ resource "aws_iam_user_policy" "s3-upload" {
89
+ name = "test"
90
+ user = aws_iam_user.s3_user.name
91
+
92
+ policy = <<EOF
93
+ {
94
+ "Version": "2012-10-17",
95
+ "Statement": [
96
+ {
97
+ "Action": [
98
+ "s3:PutObject",
99
+ "s3:PutObjectAcl",
100
+ "s3:GetObject",
101
+ "s3:GetObjectVersion",
102
+ "s3:GetBucketAcl",
103
+ "s3:DeleteObject",
104
+ "s3:DeleteObjectVersion"
105
+ ],
106
+ "Effect": "Allow",
107
+ "Resource": [
108
+ "arn:aws:s3:::<%= "#{project_name}-#{@env}-assets" %>",
109
+ "arn:aws:s3:::<%= "#{project_name}-#{@env}-assets" %>/*"
110
+ ]
111
+ }
112
+ ]
113
+ }
114
+ EOF
115
+ }
116
+
@@ -0,0 +1,41 @@
1
+ terraform {
2
+ # Default backend is just local.
3
+
4
+ # Uncomment to use s3
5
+ # backend "s3" {
6
+ # bucket = "subspace-backend-<%= project_name %>"
7
+ # key = "subspace.<%= @env %>.tfstate"
8
+ # region = "us-west-2"
9
+ # }
10
+
11
+ # Uncomment to use Terraform Cloud
12
+ # cloud {
13
+ # organization = "<%= project_name %>"
14
+ #
15
+ # workspaces {
16
+ # name = "<%= @env %>"
17
+ # }
18
+ # }
19
+
20
+ }
21
+
22
+ module workhorse {
23
+ source = "github.com/tenforwardconsulting/terraform-subspace-workhorse?ref=v1.0.0"
24
+ project_name = "<%= project_name %>"
25
+ project_environment = "<%= @env %>"
26
+ aws_region = "us-west-2"
27
+ subspace_public_key = file("../../subspace.pem.pub")
28
+ # zone_id = "ZOJ6811VRVYBT" # 10fw.net
29
+ # subdomain = "<%= project_name.gsub("_", "-") %>"
30
+
31
+ # Ubuntu Server 20.04 LTS (HVM), SSD Volume Type
32
+ instance_ami = "ami-0f81e6e71078b75b6"
33
+ instance_user = "ubuntu"
34
+ instance_type = "t3.medium"
35
+ instance_hostname = "${var.project_environment}-app1"
36
+ instance_volume_size = 20
37
+ }
38
+
39
+ output "workhorse" {
40
+ value = module.workhorse
41
+ }
@@ -0,0 +1,9 @@
1
+ provider_installation {
2
+ filesystem_mirror {
3
+ path = "<%= File.join(gem_path, 'terraform', 'modules') %>"
4
+ include = ["subspace/*/*"]
5
+ }
6
+ direct {
7
+ exclude = ["subspace/*/*"]
8
+ }
9
+ }
@@ -0,0 +1,2 @@
1
+ This terraform configuration is used solely to provision the tfstate backend.
2
+ It makes an s3 bucket and a user and you should not need to really mess with this after the project is bootstrapped.
@@ -0,0 +1 @@
1
+ # Maybe later
@@ -0,0 +1,38 @@
1
+ resource "aws_iam_user" "ss" {
2
+ name = "subspace"
3
+ path = "/"
4
+
5
+ tags = {
6
+ Name = "Subspace IAM user"
7
+ Environment = "Global"
8
+ }
9
+ }
10
+
11
+ resource "aws_iam_access_key" "ss" {
12
+ user = aws_iam_user.ss.name
13
+
14
+ pgp_key = data.local_file.pgp_key.content_base64
15
+ }
16
+
17
+ resource "aws_iam_user_policy" "ss_s3" {
18
+ name = "ss_s3_user_policy"
19
+ user = aws_iam_user.ss.name
20
+
21
+ policy = <<EOF
22
+ {
23
+ "Version": "2012-10-17",
24
+ "Statement": [
25
+ {
26
+ "Effect": "Allow",
27
+ "Action": "s3:ListBucket",
28
+ "Resource": "arn:aws:s3:::${local.state_bucket_name}"
29
+ },
30
+ {
31
+ "Effect": "Allow",
32
+ "Action": ["s3:GetObject", "s3:PutObject"],
33
+ "Resource": "arn:aws:s3:::${local.state_bucket_name}/*"
34
+ }
35
+ ]
36
+ }
37
+ EOF
38
+ }
@@ -0,0 +1,39 @@
1
+ terraform {
2
+ required_providers {
3
+ aws = {
4
+ source = "hashicorp/aws"
5
+ version = "~> 3.0"
6
+ }
7
+ }
8
+ }
9
+
10
+ # Variables
11
+ variable aws_region {
12
+ type = string
13
+ }
14
+
15
+ variable project_name {
16
+ type = string
17
+ }
18
+
19
+ locals {
20
+ state_bucket_name = "subspace-backend-${var.project_name}"
21
+ }
22
+
23
+ provider "aws" {
24
+ region = var.aws_region
25
+ }
26
+
27
+ data "local_file" "pgp_key" {
28
+ filename = "../public-key-binary.gpg"
29
+ }
30
+
31
+ # Outputs
32
+
33
+ output "subspace_aws_access_key_id" {
34
+ value = aws_iam_access_key.ss.id
35
+ }
36
+
37
+ output "subspace_aws_encrypted_secret_access_key" {
38
+ value = aws_iam_access_key.ss.encrypted_secret
39
+ }
@@ -0,0 +1,14 @@
1
+ # S3 Bucket for storing state
2
+ resource "aws_s3_bucket" "state_bucket" {
3
+ bucket = local.state_bucket_name
4
+ acl = "private"
5
+
6
+ versioning {
7
+ enabled = true
8
+ }
9
+
10
+ tags = {
11
+ Name = "Subspace Terraform State"
12
+ Environment = "Global"
13
+ }
14
+ }