scout-camp 0.1.5 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/.vimproject +59 -8
  3. data/VERSION +1 -1
  4. data/lib/scout/aws/s3.rb +33 -4
  5. data/lib/scout/offsite/resource.rb +34 -0
  6. data/lib/scout/offsite/ssh.rb +2 -0
  7. data/lib/scout/offsite/sync.rb +2 -0
  8. data/lib/scout/terraform_dsl/util.rb +2 -2
  9. data/lib/scout/terraform_dsl.rb +51 -0
  10. data/scout-camp.gemspec +30 -5
  11. data/scout_commands/sync +33 -0
  12. data/scout_commands/terraform/add/relay +40 -0
  13. data/scout_commands/terraform/lambda_task +124 -0
  14. data/scout_commands/terraform/list +12 -3
  15. data/scout_commands/terraform/outputs +33 -0
  16. data/share/aws/lambda_function.rb +15 -5
  17. data/share/terraform/aws/efs/data.tf +12 -0
  18. data/share/terraform/aws/efs/locals.tf +6 -0
  19. data/share/terraform/aws/efs/main.tf +14 -0
  20. data/share/terraform/aws/efs/output.tf +3 -0
  21. data/share/terraform/aws/efs/variables.tf +9 -0
  22. data/share/terraform/aws/efs_host/data.tf +11 -0
  23. data/share/terraform/aws/efs_host/locals.tf +8 -0
  24. data/share/terraform/aws/efs_host/main.tf +31 -0
  25. data/share/terraform/aws/efs_host/output.tf +3 -0
  26. data/share/terraform/aws/efs_host/variables.tf +20 -0
  27. data/share/terraform/aws/fargate/locals.tf +8 -0
  28. data/share/terraform/aws/fargate/main.tf +38 -0
  29. data/share/terraform/aws/fargate/variables.tf +73 -0
  30. data/share/terraform/aws/network/data.tf +15 -0
  31. data/share/terraform/aws/network/main.tf +41 -0
  32. data/share/terraform/aws/network/output.tf +7 -0
  33. data/share/terraform/aws/network/variables.tf +0 -0
  34. data/share/terraform/ssh/cmd/main.tf +34 -0
  35. data/share/terraform/ssh/cmd/variables.tf +19 -0
  36. data/test/scout/offsite/test_resource.rb +46 -0
  37. data/test/scout/offsite/test_sync.rb +1 -0
  38. metadata +29 -4
  39. /data/scout_commands/terraform/{add → add/lambda} +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 225a937ec9eac0522929e3da409bed070cf387a725d8c8cc5e4041832746fee5
4
- data.tar.gz: 5d88f8bc6d5d382b0de9b1765a5cf95e688708dfdbe67cb4372055876fcc5f4c
3
+ metadata.gz: e451509a388ca0463eccc9609ef4e506cc793a6ee3df3c720ed441f806641c13
4
+ data.tar.gz: 3950a94e466395630140a5265f3351f8e482b7a983402488587f840af4f177d3
5
5
  SHA512:
6
- metadata.gz: 7008e9b2ae08a9f6c8fdef48e913d09a6d07eb42825c246bd6182b23868798c1d5227fbcd8acf9147dc1918e0b920d90851539c6a8d74cae481e1cc65afce22a
7
- data.tar.gz: 9cfc47356b2225417cceab5625e43a047df912011b655ff6754f770d6cdf9beca85dd6cddfe266ffe6a4018ba5ee52728d7a199051dfcfa4a026aa7248801f10
6
+ metadata.gz: 96ee495972e08312e7b1f9ed10e8e47d7f72983d554b3113c17967c5080a20ec01ff6317939da97e90cb5db6b5b2ad2ffd9aeb2cf7fb092e3e8a2faaadc0543b
7
+ data.tar.gz: 16daf0c75fbe0028eaf44d5be4a174f2de7ad05866568fd27e1376851397e4c493607bdce1e03f8943e47b22a7533dfe13599c7f10126a3e60f3b6dfe20f226b
data/.vimproject CHANGED
@@ -19,35 +19,74 @@ scout-camp=/$PWD filter="*" {
19
19
  ssh.rb
20
20
  step.rb
21
21
  sync.rb
22
+ resource.rb
22
23
  }
23
24
  }
24
25
  }
25
26
  scout_commands=scout_commands {
26
27
  offsite
28
+ sync
29
+
27
30
  terraform=terraform{
28
31
  list
29
- add
30
- fargate
32
+ add=add{
33
+ relay
34
+ lambda
35
+ host
36
+ fargate
37
+ }
31
38
  status
39
+ outputs
32
40
  apply
33
41
  plan
34
42
  destroy
35
43
  remove
36
- task
44
+ lambda_task
37
45
  }
38
46
  }
39
47
  share=share {
40
48
  terraform=terraform {
41
49
  aws=aws {
50
+ fargate=fargate{
51
+ main.tf
52
+ variables.tf
53
+ locals.tf
54
+ }
55
+
56
+ efs_host=efs_host{
57
+ main.tf
58
+ variables.tf
59
+ locals.tf
60
+ data.tf
61
+ output.tf
62
+ }
63
+
64
+ efs=efs{
65
+ main.tf
66
+ variables.tf
67
+ data.tf
68
+ locals.tf
69
+ output.tf
70
+ }
71
+
72
+ network=network{
73
+ data.tf
74
+ main.tf
75
+ variables.tf
76
+ output.tf
77
+ }
78
+
42
79
  bucket=bucket {
43
80
  main.tf
44
81
  output.tf
45
82
  variables.tf
46
83
  }
84
+
47
85
  lambda=lambda {
48
86
  main.tf
49
87
  variables.tf
50
88
  }
89
+
51
90
  role=role{
52
91
  main.tf
53
92
  variables.tf
@@ -68,16 +107,28 @@ scout-camp=/$PWD filter="*" {
68
107
  output.tf
69
108
  variables.tf
70
109
  }
71
- fargate=fargate{
72
- locals.tf
73
- main.tf
74
- variables.tf
75
- }
76
110
  provider=provider {
77
111
  data.tf
78
112
  output.tf
79
113
  }
80
114
  }
115
+ ssh=ssh{
116
+ cmd=cmd{
117
+ main.tf
118
+ variables.tf
119
+ }
120
+ ollama=ollama{
121
+ main.tf
122
+ variables.tf
123
+ }
124
+ vllm=vllm{
125
+ main.tf
126
+ variables.tf
127
+ }
128
+ relay=relay{
129
+
130
+ }
131
+ }
81
132
  }
82
133
  aws=aws{
83
134
  lambda_function.rb
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.1.5
1
+ 0.1.8
data/lib/scout/aws/s3.rb CHANGED
@@ -10,7 +10,11 @@ module Open
10
10
  extend Hook
11
11
 
12
12
  def self.lock(*args, &block)
13
- yield nil
13
+ begin
14
+ yield nil
15
+ rescue KeepLocked
16
+ $!.payload
17
+ end
14
18
  end
15
19
 
16
20
  def self.is_s3?(uri)
@@ -51,6 +55,7 @@ module Open
51
55
  bucket, key = parse_s3_uri(uri)
52
56
  s3 = Aws::S3::Client.new
53
57
  content = Open.open_pipe(&block).read if block_given?
58
+ content = content.read if IO === content
54
59
  s3.put_object(bucket: bucket, key: key, body: content)
55
60
  end
56
61
 
@@ -130,6 +135,18 @@ module Open
130
135
  false
131
136
  end
132
137
 
138
+ def self.cp(source, target)
139
+ source_bucket, source_key = parse_s3_uri(source)
140
+ target_bucket, target_key = parse_s3_uri(target)
141
+
142
+ s3 = Aws::S3::Client.new
143
+ s3.copy_object({
144
+ copy_source: "#{source_bucket}/#{source_key}",
145
+ bucket: target_bucket,
146
+ key: target_key
147
+ })
148
+ end
149
+
133
150
  def self.exists?(uri)
134
151
  bucket, key = parse_s3_uri(uri)
135
152
  return false if key.empty? # Can't check existence of bucket this way
@@ -142,14 +159,26 @@ module Open
142
159
  end
143
160
 
144
161
  self.singleton_class.alias_method :exist?, :exists?
145
-
146
162
 
147
163
  def self.sensible_write(path, content = nil, options = {}, &block)
164
+ content = content.to_s if content.respond_to?(:write_file)
148
165
  Open::S3.write(path, content)
149
166
  end
150
167
 
151
168
  def self.mkdir(path)
152
169
  end
170
+
171
+ def self.link(source, target, options = {})
172
+ cp(source, target)
173
+ end
174
+
175
+ def self.ln(source, target, options = {})
176
+ cp(source, target)
177
+ end
178
+
179
+ def self.ln_s(source, target, options = {})
180
+ cp(source, target)
181
+ end
153
182
  end
154
183
  end
155
184
 
@@ -162,8 +191,8 @@ module Path
162
191
  end
163
192
 
164
193
  def glob(*args)
165
- if Open::S3.is_s3?(self)
166
- Open::S3.glob(self, *args)
194
+ if Open::S3.is_s3?(self.find)
195
+ Open::S3.glob(self.find, *args)
167
196
  else
168
197
  orig_glob(*args)
169
198
  end
@@ -0,0 +1,34 @@
1
+ require 'scout/resource'
2
+ require_relative 'sync'
3
+ module Resource
4
+ def self.sync(path, map = nil, source: nil, target: nil, resource: nil, **kwargs)
5
+ map = 'user' if map.nil?
6
+
7
+ if source
8
+ paths = [path]
9
+ real_paths, identified_paths = SSHLine.locate(source, paths)
10
+ else
11
+ resource = path.pkgdir if resource.nil? and path.is_a?(Path) and path.pkgdir.is_a?(Resource)
12
+ resource = Resource.default_resource if resource.nil?
13
+
14
+ if File.exist?(path)
15
+ real_paths = [path]
16
+ else
17
+ path = Path.setup(path, pkgdir: resource) unless path.is_a?(Path)
18
+ real_paths = path.directory? ? path.find_all : path.glob_all
19
+ end
20
+
21
+ identified_paths = real_paths.collect{|path| resource.identify(path) }
22
+ end
23
+
24
+ if target
25
+ target_paths = SSHLine.locate(target, identified_paths, map: map)
26
+ else
27
+ target_paths = identified_paths.collect{|p| p.find(map) }
28
+ end
29
+
30
+ real_paths.zip(target_paths).each do |source_path,target_path|
31
+ Open.sync(source_path, target_path, kwargs.merge(source: source, target: target))
32
+ end
33
+ end
34
+ end
@@ -1,4 +1,5 @@
1
1
  require 'net/ssh'
2
+ require 'scout/persist'
2
3
  require_relative 'exceptions'
3
4
 
4
5
  class SSHLine
@@ -90,6 +91,7 @@ class SSHLine
90
91
  def scout(script)
91
92
  scout_script =<<-EOF
92
93
  require 'scout'
94
+ require 'scout/offsite/ssh'
93
95
  SSHLine.run_local do
94
96
  #{script.strip}
95
97
  end
@@ -1,3 +1,4 @@
1
+ require_relative 'ssh'
1
2
  class SSHLine
2
3
  def self.locate(server, paths, map: :user)
3
4
  SSHLine.scout server, <<-EOF
@@ -5,6 +6,7 @@ map = :#{map}
5
6
  paths = [#{paths.collect{|p| "'" + p + "'" } * ", " }]
6
7
  located = paths.collect{|p| Path.setup(p).find(map) }
7
8
  identified = paths.collect{|p| Resource.identify(p) }
9
+ located = located.collect{|path| path << "/" if path.directory? }
8
10
  [located, identified]
9
11
  EOF
10
12
  end
@@ -13,9 +13,9 @@ class TerraformDSL
13
13
  # @param prefix [nil,String] Optional prefix to prepend
14
14
  def self.log(msg, prefix = nil)
15
15
  if prefix
16
- STDOUT.puts("[#{prefix}] " + msg)
16
+ Log.info("[#{prefix}] " + msg)
17
17
  else
18
- STDOUT.puts(msg)
18
+ Log.info(msg)
19
19
  end
20
20
  end
21
21
 
@@ -80,7 +80,12 @@ class TerraformDSL
80
80
  def to_json(*_args)
81
81
  ['module', @name].join('.')
82
82
  end
83
+ end
83
84
 
85
+ module DirectReference
86
+ def to_json(*_args)
87
+ self
88
+ end
84
89
  end
85
90
 
86
91
  MODULES_DIR = Scout.share.terraform
@@ -180,6 +185,52 @@ class TerraformDSL
180
185
  nil
181
186
  end
182
187
 
188
+ # Add a backend template file without using modules.
189
+ #
190
+ # @param variables [Hash] variables for the backend:
191
+ # :bucket, :key & :region
192
+ def backend(type, variables = {})
193
+ variables = variables.dup
194
+
195
+ text =<<~EOF
196
+ terraform {
197
+ backend "#{type}" {
198
+ #{variable_block(variables)}
199
+ }
200
+ }
201
+ EOF
202
+
203
+ element_file = ['backend_config', type.to_s].join('.')
204
+
205
+ custom(element_file, text)
206
+
207
+ nil
208
+ end
209
+
210
+ # Connect to a remote state through a backend
211
+ #
212
+ # @param variables [Hash] variables for the backend:
213
+ # :bucket, :key & :region
214
+ def remote(type, key, variables = {})
215
+ variables = variables.dup
216
+ variables[:key] = key unless variables.include?(:key)
217
+
218
+ text =<<~EOF
219
+ data "terraform_remote_state" "#{key}" {
220
+ backend = "#{type}"
221
+ config = {
222
+ #{variable_block(variables)}
223
+ }
224
+ }
225
+ EOF
226
+
227
+ element_file = ['remote', type.to_s, key.to_s].join('.')
228
+
229
+ custom(element_file, text)
230
+
231
+ "data.terraform_remote_state.#{key}".extend DirectReference
232
+ end
233
+
183
234
  # Add a provider template file without using modules.
184
235
  # Defining providers in modules is problematic when providers
185
236
  # are not managed by Hashicorp. Hopefully we can
data/scout-camp.gemspec CHANGED
@@ -2,16 +2,16 @@
2
2
  # DO NOT EDIT THIS FILE DIRECTLY
3
3
  # Instead, edit Juwelier::Tasks in Rakefile, and run 'rake gemspec'
4
4
  # -*- encoding: utf-8 -*-
5
- # stub: scout-camp 0.1.5 ruby lib
5
+ # stub: scout-camp 0.1.8 ruby lib
6
6
 
7
7
  Gem::Specification.new do |s|
8
8
  s.name = "scout-camp".freeze
9
- s.version = "0.1.5".freeze
9
+ s.version = "0.1.8".freeze
10
10
 
11
11
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
12
12
  s.require_paths = ["lib".freeze]
13
13
  s.authors = ["Miguel Vazquez".freeze]
14
- s.date = "2025-03-31"
14
+ s.date = "1980-01-02"
15
15
  s.description = "Functionalities to deploy and use scouts in remote servers like AWS".freeze
16
16
  s.email = "mikisvaz@gmail.com".freeze
17
17
  s.executables = ["scout-camp".freeze]
@@ -30,6 +30,7 @@ Gem::Specification.new do |s|
30
30
  "lib/scout/aws/s3.rb",
31
31
  "lib/scout/offsite.rb",
32
32
  "lib/scout/offsite/exceptions.rb",
33
+ "lib/scout/offsite/resource.rb",
33
34
  "lib/scout/offsite/ssh.rb",
34
35
  "lib/scout/offsite/step.rb",
35
36
  "lib/scout/offsite/sync.rb",
@@ -38,10 +39,14 @@ Gem::Specification.new do |s|
38
39
  "lib/scout/terraform_dsl/util.rb",
39
40
  "scout-camp.gemspec",
40
41
  "scout_commands/offsite",
41
- "scout_commands/terraform/add",
42
+ "scout_commands/sync",
43
+ "scout_commands/terraform/add/lambda",
44
+ "scout_commands/terraform/add/relay",
42
45
  "scout_commands/terraform/apply",
43
46
  "scout_commands/terraform/destroy",
47
+ "scout_commands/terraform/lambda_task",
44
48
  "scout_commands/terraform/list",
49
+ "scout_commands/terraform/outputs",
45
50
  "scout_commands/terraform/plan",
46
51
  "scout_commands/terraform/remove",
47
52
  "scout_commands/terraform/status",
@@ -53,12 +58,29 @@ Gem::Specification.new do |s|
53
58
  "share/terraform/aws/cluster/main.tf",
54
59
  "share/terraform/aws/cluster/output.tf",
55
60
  "share/terraform/aws/cluster/variables.tf",
61
+ "share/terraform/aws/efs/data.tf",
62
+ "share/terraform/aws/efs/locals.tf",
63
+ "share/terraform/aws/efs/main.tf",
64
+ "share/terraform/aws/efs/output.tf",
65
+ "share/terraform/aws/efs/variables.tf",
66
+ "share/terraform/aws/efs_host/data.tf",
67
+ "share/terraform/aws/efs_host/locals.tf",
68
+ "share/terraform/aws/efs_host/main.tf",
69
+ "share/terraform/aws/efs_host/output.tf",
70
+ "share/terraform/aws/efs_host/variables.tf",
71
+ "share/terraform/aws/fargate/locals.tf",
72
+ "share/terraform/aws/fargate/main.tf",
73
+ "share/terraform/aws/fargate/variables.tf",
56
74
  "share/terraform/aws/host/locals.tf",
57
75
  "share/terraform/aws/host/main.tf",
58
76
  "share/terraform/aws/host/output.tf",
59
77
  "share/terraform/aws/host/variables.tf",
60
78
  "share/terraform/aws/lambda/main.tf",
61
79
  "share/terraform/aws/lambda/variables.tf",
80
+ "share/terraform/aws/network/data.tf",
81
+ "share/terraform/aws/network/main.tf",
82
+ "share/terraform/aws/network/output.tf",
83
+ "share/terraform/aws/network/variables.tf",
62
84
  "share/terraform/aws/policy_attachment/main.tf",
63
85
  "share/terraform/aws/policy_attachment/variables.tf",
64
86
  "share/terraform/aws/provider/data.tf",
@@ -66,7 +88,10 @@ Gem::Specification.new do |s|
66
88
  "share/terraform/aws/role/main.tf",
67
89
  "share/terraform/aws/role/output.tf",
68
90
  "share/terraform/aws/role/variables.tf",
91
+ "share/terraform/ssh/cmd/main.tf",
92
+ "share/terraform/ssh/cmd/variables.tf",
69
93
  "test/scout/aws/test_s3.rb",
94
+ "test/scout/offsite/test_resource.rb",
70
95
  "test/scout/offsite/test_ssh.rb",
71
96
  "test/scout/offsite/test_step.rb",
72
97
  "test/scout/offsite/test_sync.rb",
@@ -75,7 +100,7 @@ Gem::Specification.new do |s|
75
100
  ]
76
101
  s.homepage = "http://github.com/mikisvaz/scout-camp".freeze
77
102
  s.licenses = ["MIT".freeze]
78
- s.rubygems_version = "3.6.5".freeze
103
+ s.rubygems_version = "3.6.7".freeze
79
104
  s.summary = "Deploy you scouts".freeze
80
105
 
81
106
  s.specification_version = 4
@@ -0,0 +1,33 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'scout'
4
+ require 'scout/offsite/resource'
5
+
6
+ $0 = "scout #{$previous_commands.any? ? $previous_commands*" " + " " : "" }#{ File.basename(__FILE__) }" if $previous_commands
7
+
8
+ options = SOPT.setup <<EOF
9
+
10
+ Sync to resources between path maps or between hosts
11
+
12
+ $ #{$0} [<options>] <path> [<path_map>]
13
+
14
+ Path map defaults to :user
15
+
16
+ -h--help Print this help
17
+ -s--source* Host of source path
18
+ -t--target* Host of target path
19
+ EOF
20
+ if options[:help]
21
+ if defined? scout_usage
22
+ scout_usage
23
+ else
24
+ puts SOPT.doc
25
+ end
26
+ exit 0
27
+ end
28
+
29
+ path, path_map = ARGV
30
+
31
+ raise MissingParameterException, :path if path.nil?
32
+
33
+ Resource.sync(Path.setup(path.dup), path_map, **options)
@@ -0,0 +1,40 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'scout'
4
+
5
+ $0 = "scout #{$previous_commands.any? ? $previous_commands*" " + " " : "" }#{ File.basename(__FILE__) }" if $previous_commands
6
+
7
+ options = SOPT.setup <<EOF
8
+
9
+ Add a new deployment
10
+
11
+ $ #{$0} [<options>] <name>
12
+
13
+ -h--help Print this help
14
+ -s--server* Server name
15
+ -u--user* User in server
16
+ -m--model* Model name
17
+ EOF
18
+ if options[:help]
19
+ if defined? scout_usage
20
+ scout_usage
21
+ else
22
+ puts SOPT.doc
23
+ end
24
+ exit 0
25
+ end
26
+
27
+ name = ARGV.shift
28
+ raise MissingParameterException, :name if name.nil?
29
+
30
+ server, user, model = IndiferentHash.process_options options, :server, :user, :model
31
+
32
+ deployments = Scout.var.deployments.glob_all("*").collect(&:basename)
33
+ raise "Deployment clash" if deployments.include? name
34
+ dir = Scout.var.deployments[name]
35
+
36
+ terraform = TerraformDSL.new
37
+
38
+ ollama = terraform.add :ssh, :cmd, service_id: "#{name}_ollama", name: "ollama", host: server, user: user, command: 'module load ollama; ollama serve'
39
+ terraform.add :ssh, :cmd, service_id: "#{name}_ollama", name: "process", host: server, user: user, command: %Q(bash -l -c "scout-ai llm process -ck 'backend ollama ask,model #{model} ask'"), depends_on: [ollama]
40
+ terraform.config dir
@@ -0,0 +1,124 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'scout'
4
+
5
+ $0 = "scout #{$previous_commands.any? ? $previous_commands*" " + " " : "" }#{ File.basename(__FILE__) }" if $previous_commands
6
+
7
+ options = SOPT.setup <<EOF
8
+
9
+ Call a task on AWS
10
+
11
+ $ #{$0} [<options>] <workflow> <task> [<other|->]*
12
+
13
+ -h--help Print this help
14
+ --prefix* Prefix, defaults to Scout
15
+ --clean Clean job
16
+ --recursive_clean Clean job recursively
17
+ EOF
18
+ workflow, task_name = ARGV
19
+
20
+ if workflow.nil? && options[:help]
21
+ if defined? scout_usage
22
+ scout_usage
23
+ else
24
+ puts SOPT.doc
25
+ end
26
+ exit 0
27
+ end
28
+
29
+ raise ParamterException, "No workflow specified" if workflow.nil?
30
+
31
+ prefix, clean, recursive_clean = IndiferentHash.process_options options, :prefix, :clean, :recursive_clean,
32
+ prefix: "Scout"
33
+
34
+ require 'aws-sdk-lambda'
35
+
36
+ payload = {}
37
+ payload["workflow"] = workflow
38
+ payload["task_name"] = task_name
39
+
40
+ if clean
41
+ payload["clean"] = true
42
+ elsif recursive_clean
43
+ payload["clean"] = 'recursive'
44
+ end
45
+
46
+ lambda_handler = "#{prefix}Job"
47
+
48
+ def aws_lambda(name, payload)
49
+ client = Aws::Lambda::Client.new
50
+ resp = client.invoke({
51
+ function_name: name,
52
+ payload: payload.to_json,
53
+ })
54
+
55
+ JSON.load(resp.payload)
56
+ end
57
+
58
+ def SOPT_str(task_info)
59
+ sopt_options = []
60
+ task_info[:inputs].each do |name|
61
+ options = task_info[:input_options][name]
62
+ type = task_info[:input_types][name]
63
+ shortcut = (options && options[:shortcut]) || name.to_s.slice(0,1)
64
+ boolean = type.to_sym == :boolean
65
+
66
+ sopt_options << "-#{shortcut}--#{name}#{boolean ? "" : "*"}"
67
+ end
68
+
69
+ sopt_options * ":"
70
+ end
71
+
72
+ def get_SOPT(task_info)
73
+ IndiferentHash.setup task_info
74
+ sopt_option_string = self.SOPT_str(task_info)
75
+ job_options = SOPT.get sopt_option_string
76
+
77
+ task_info[:inputs].each do |name|
78
+ options = task_info[:input_options][name]
79
+ type = task_info[:input_types][name]
80
+ next unless type.to_s.include?('array')
81
+ if job_options.include?(name) && (! Open.exist?(job_options[name]) || type.to_s.include?('file') || type.to_s.include?('path'))
82
+ job_options[name] = job_options[name].split(",")
83
+ end
84
+ end
85
+ job_options
86
+ end
87
+
88
+ res = case task_name
89
+ when nil
90
+
91
+ if options[:help]
92
+ if defined? scout_usage
93
+ scout_usage
94
+ else
95
+ puts SOPT.doc
96
+ end
97
+ exit 0
98
+ end
99
+
100
+ aws_lambda(lambda_handler, payload)
101
+ else
102
+ task_info = aws_lambda(lambda_handler, payload.merge(task_name: "info", inputs: {task_name: task_name}))
103
+
104
+ payload["inputs"] = get_SOPT(task_info)
105
+
106
+ if options[:help]
107
+ if defined? scout_usage
108
+ scout_usage
109
+ else
110
+ puts SOPT.doc
111
+ end
112
+ exit 0
113
+ end
114
+
115
+ aws_lambda(lambda_handler, payload)
116
+ end
117
+
118
+ if Hash === res && res["errorMessage"]
119
+ Log.error res["errorMessage"]
120
+ Log.stack res["stackTrace"] if res["stackTrace"]
121
+ exit -1
122
+ else
123
+ iii res
124
+ end
@@ -11,6 +11,7 @@ List all deployments
11
11
  $ #{$0} [<options>] <filename>
12
12
 
13
13
  -h--help Print this help
14
+ -a--active Show if they are active
14
15
  EOF
15
16
  if options[:help]
16
17
  if defined? scout_usage
@@ -24,12 +25,20 @@ end
24
25
  deployments = Scout.var.deployments.glob_all("*")
25
26
 
26
27
  deployments.each do |dir|
28
+ next unless File.directory?(dir)
27
29
  name = File.basename dir
28
30
  deployment = TerraformDSL::Deployment.new dir
29
- active = deployment.provisioned_elements.any?
31
+ #active = options[:active] && deployment.provisioned_elements.any?
30
32
 
31
- if active
32
- puts Log.color(:title, name ) + "\t" + dir + "\t" + "Active"
33
+ if options[:active]
34
+ Log.with_severity 5 do
35
+ print Log.color(:title, name ) + "\t" + dir + "\t"
36
+ if deployment.provisioned_elements.any?
37
+ puts Log.color :present, "Active"
38
+ else
39
+ puts Log.color :missing, "Inactive"
40
+ end
41
+ end
33
42
  else
34
43
  puts Log.color(:title, name) + "\t" + dir
35
44
  end
@@ -0,0 +1,33 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'scout'
4
+
5
+ $0 = "scout #{$previous_commands.any? ? $previous_commands*" " + " " : "" }#{ File.basename(__FILE__) }" if $previous_commands
6
+
7
+ options = SOPT.setup <<EOF
8
+
9
+ Work with deployment
10
+
11
+ $ #{$0} [<options>] <name>
12
+
13
+ -h--help Print this help
14
+ EOF
15
+ if options[:help]
16
+ if defined? scout_usage
17
+ scout_usage
18
+ else
19
+ puts SOPT.doc
20
+ end
21
+ exit 0
22
+ end
23
+
24
+ name = ARGV.shift
25
+ raise MissingParameterException, :name if name.nil?
26
+
27
+ dir = Scout.var.deployments.glob_all(name).first
28
+
29
+ deployment = TerraformDSL::Deployment.new dir
30
+ deployment.provisioned_elements.each do |element|
31
+ ppp deployment.element_state(element)
32
+ end
33
+
@@ -7,11 +7,11 @@ def lambda_handler(event:, context:)
7
7
  require 'scout/workflow'
8
8
  require 'scout/aws/s3'
9
9
 
10
- workflow, task_name, jobname, inputs = IndiferentHash.process_options event,
11
- :workflow, :task_name, :jobname, :inputs
12
-
10
+ workflow, task_name, jobname, inputs, clean = IndiferentHash.process_options event,
11
+ :workflow, :task_name, :jobname, :inputs, :clean
12
+
13
13
  raise ParamterException, "No workflow specified" if workflow.nil?
14
-
14
+
15
15
  workflow = Workflow.require_workflow workflow
16
16
 
17
17
  case task_name
@@ -22,6 +22,16 @@ def lambda_handler(event:, context:)
22
22
  return workflow.task_info(inputs["task_name"])
23
23
  else
24
24
  job = workflow.job(task_name, jobname, inputs)
25
- job.run
25
+
26
+ case clean
27
+ when true, 'true'
28
+ job.clean
29
+ when 'recursive'
30
+ job.recursive_clean
31
+ end
32
+
33
+ job.produce
34
+
35
+ job.load
26
36
  end
27
37
  end
@@ -0,0 +1,12 @@
1
+ # Get default VPC (optional if you want to restrict to default VPC)
2
+ data "aws_vpc" "default" {
3
+ default = true
4
+ }
5
+
6
+ # Get all subnets in the region (filtered to default VPC if needed)
7
+ data "aws_subnets" "all" {
8
+ filter {
9
+ name = "vpc-id"
10
+ values = [data.aws_vpc.default.id]
11
+ }
12
+ }
@@ -0,0 +1,6 @@
1
+ locals {
2
+ security_group_ids = [
3
+ for sg_key in var.sg_keys :
4
+ lookup(var.remote.outputs, sg_key, null)
5
+ ]
6
+ }
@@ -0,0 +1,14 @@
1
+ resource "aws_efs_file_system" "this" {
2
+ creation_token = "herlab-efs"
3
+ tags = {
4
+ Name = "HERLab main EFS"
5
+ }
6
+ }
7
+
8
+ resource "aws_efs_mount_target" "this" {
9
+ for_each = toset(data.aws_subnets.all.ids)
10
+
11
+ file_system_id = aws_efs_file_system.this.id
12
+ subnet_id = each.value
13
+ security_groups = local.security_group_ids
14
+ }
@@ -0,0 +1,3 @@
1
+ output "id" {
2
+ value = aws_efs_file_system.this.id
3
+ }
@@ -0,0 +1,9 @@
1
+ variable "remote" {
2
+ description = "Name of the remote state block to use"
3
+ }
4
+
5
+ variable "sg_keys" {
6
+ description = "List of output names in the remote state representing security group IDs"
7
+ type = list(string)
8
+ default = ["aws_network_efs_sg_id"]
9
+ }
@@ -0,0 +1,11 @@
1
+ data "aws_ami" "amazon_linux_2" {
2
+ most_recent = true
3
+ owners = ["amazon"]
4
+
5
+ # Filter for Amazon Linux 2 AMIs
6
+ filter {
7
+ name = "name"
8
+ values = ["amzn2-ami-hvm-*-x86_64-gp2"]
9
+ }
10
+ }
11
+
@@ -0,0 +1,8 @@
1
+ locals {
2
+ security_group_ids = [
3
+ for sg_key in var.sg_keys :
4
+ lookup(var.network.outputs, sg_key, null)
5
+ ]
6
+
7
+ efs_id = lookup(var.efs.outputs, "aws_efs_id", null)
8
+ }
@@ -0,0 +1,31 @@
1
+ resource "aws_key_pair" "this" {
2
+ key_name = "my-key"
3
+ public_key = file("~/.ssh/id_rsa.pub") # Adjust if your key is elsewhere
4
+ }
5
+
6
+ resource "aws_instance" "this" {
7
+ ami = data.aws_ami.amazon_linux_2.id
8
+ instance_type = "t2.micro"
9
+
10
+ key_name = aws_key_pair.this.key_name
11
+
12
+ tags = {
13
+ Name = "EFS-Service"
14
+ }
15
+
16
+ # Open port 22 for SSH
17
+ vpc_security_group_ids = local.security_group_ids
18
+
19
+ user_data = <<-EOF
20
+ #cloud-config
21
+ package_update: true
22
+ package_upgrade: true
23
+ packages:
24
+ - amazon-efs-utils
25
+ runcmd:
26
+ - mkdir -p /mnt/efs
27
+ - mount -t efs -o tls ${local.efs_id}:/ ${var.mount_point}
28
+ - echo "${local.efs_id}:/ ${var.mount_point} efs defaults,_netdev 0 0" >> /etc/fstab
29
+ EOF
30
+ }
31
+
@@ -0,0 +1,3 @@
1
+ output "public_ip" {
2
+ value = aws_instance.this.public_ip
3
+ }
@@ -0,0 +1,20 @@
1
+ variable "network" {
2
+ description = "Name of the remote state block to use for the network"
3
+ }
4
+
5
+ variable "efs" {
6
+ description = "Name of the remote state block to use for the EFS"
7
+ }
8
+
9
+ variable "sg_keys" {
10
+ description = "List of output names in the remote state representing security group IDs"
11
+ type = list(string)
12
+ default = ["aws_network_efs_sg_id", "aws_network_ssh_sg_id"]
13
+ }
14
+
15
+ variable "mount_point" {
16
+ description = "Where to mount the efs drive"
17
+ type = string
18
+ default = "/mnt/efs"
19
+ }
20
+
@@ -0,0 +1,8 @@
1
+ locals {
2
+ security_group_ids = [
3
+ for sg_key in var.sg_keys :
4
+ lookup(var.network.outputs, sg_key, null)
5
+ ]
6
+
7
+ efs_id = lookup(var.efs.outputs, "aws_efs_id", null)
8
+ }
@@ -0,0 +1,38 @@
1
+ resource "aws_ecs_task_definition" "this" {
2
+ family = var.task_family
3
+ requires_compatibilities = ["FARGATE"]
4
+ network_mode = "awsvpc"
5
+ cpu = var.cpu
6
+ memory = var.memory
7
+ execution_role_arn = var.role_arn
8
+
9
+ container_definitions = jsonencode([
10
+ {
11
+ name = var.container_name
12
+ image = var.image
13
+ essential = true
14
+ portMappings = var.port_mappings
15
+ //entryPoint = var.entry_point
16
+ command = var.command
17
+
18
+ mountPoints = [
19
+ {
20
+ sourceVolume = "efs-volume"
21
+ containerPath = var.mount_point
22
+ }
23
+ ]
24
+ }
25
+ ])
26
+
27
+ volume {
28
+ name = "efs-volume"
29
+ efs_volume_configuration {
30
+ file_system_id = local.efs_id
31
+ root_directory = "/"
32
+ }
33
+ }
34
+ }
35
+
36
+ resource "aws_ecs_cluster" "this" {
37
+ name = "${var.task_family}_cluster"
38
+ }
@@ -0,0 +1,73 @@
1
+ variable "network" {
2
+ description = "Name of the remote state block to use for the network"
3
+ }
4
+
5
+ variable "efs" {
6
+ description = "Name of the remote state block to use for the EFS"
7
+ }
8
+
9
+ variable "sg_keys" {
10
+ description = "List of output names in the remote state representing security group IDs"
11
+ type = list(string)
12
+ default = ["aws_network_efs_sg_id", "aws_network_ssh_sg_id"]
13
+ }
14
+
15
+ variable "mount_point" {
16
+ description = "Where to mount the efs drive"
17
+ type = string
18
+ default = "/mnt/efs"
19
+ }
20
+
21
+ variable "task_family" {
22
+ type = string
23
+ description = "The family name of the ECS task definition"
24
+ }
25
+
26
+ variable "cpu" {
27
+ type = number
28
+ description = "The CPU units for the task"
29
+ default = 256
30
+ }
31
+
32
+ variable "memory" {
33
+ type = number
34
+ description = "The memory (MiB) for the task"
35
+ default = 512
36
+ }
37
+
38
+ variable "role_arn" {
39
+ type = string
40
+ description = "ARN of the task execution role"
41
+ }
42
+
43
+ variable "container_name" {
44
+ type = string
45
+ description = "Name of the container"
46
+ default = "app"
47
+ }
48
+
49
+ variable "image" {
50
+ type = string
51
+ description = "Docker image URL for the container"
52
+ }
53
+
54
+ variable "port_mappings" {
55
+ type = list(object({
56
+ containerPort = number
57
+ hostPort = number
58
+ protocol = string
59
+ }))
60
+ description = "List of port mappings for the container"
61
+ default = []
62
+ }
63
+
64
+ variable "command" {
65
+ type = list(string)
66
+ description = "Command to run"
67
+ }
68
+
69
+ variable "entry_point" {
70
+ type = list(string)
71
+ description = "Container entry point"
72
+ default = ["bash"]
73
+ }
@@ -0,0 +1,15 @@
1
+ data "aws_vpc" "default" {
2
+ filter {
3
+ name = "is-default"
4
+ values = ["true"]
5
+ }
6
+ }
7
+
8
+ # Get all subnets in the default VPC
9
+ data "aws_subnets" "default" {
10
+ filter {
11
+ name = "vpc-id"
12
+ values = [data.aws_vpc.default.id]
13
+ }
14
+ }
15
+
@@ -0,0 +1,41 @@
1
+ resource "aws_security_group" "efs" {
2
+ name = "efs-sg"
3
+ description = "Allow NFS"
4
+
5
+ ingress {
6
+ from_port = 2049
7
+ to_port = 2049
8
+ protocol = "tcp"
9
+ cidr_blocks = ["0.0.0.0/0"]
10
+ }
11
+
12
+ egress {
13
+ from_port = 0
14
+ to_port = 0
15
+ protocol = "-1"
16
+ cidr_blocks = ["0.0.0.0/0"]
17
+ }
18
+
19
+ tags = {
20
+ Name = "efs-sg"
21
+ }
22
+ }
23
+
24
+ resource "aws_security_group" "ssh" {
25
+ name = "allow_ssh"
26
+ description = "Allow SSH inbound traffic"
27
+
28
+ ingress {
29
+ from_port = 22
30
+ to_port = 22
31
+ protocol = "tcp"
32
+ cidr_blocks = ["0.0.0.0/0"] # WARNING: open to the world. Limit this for production.
33
+ }
34
+
35
+ egress {
36
+ from_port = 0
37
+ to_port = 0
38
+ protocol = "-1"
39
+ cidr_blocks = ["0.0.0.0/0"]
40
+ }
41
+ }
@@ -0,0 +1,7 @@
1
+ output "efs_sg_id" {
2
+ value = aws_security_group.efs.id
3
+ }
4
+
5
+ output "ssh_sg_id" {
6
+ value = aws_security_group.ssh.id
7
+ }
File without changes
@@ -0,0 +1,34 @@
1
+ terraform {
2
+ required_providers {
3
+ ssh = {
4
+ source = "loafoe/ssh"
5
+ version = ">= 1.0.0"
6
+ }
7
+ }
8
+ }
9
+
10
+ resource "ssh_resource" "up" {
11
+ host = var.host
12
+ user = var.user
13
+ agent = true
14
+ when = "create"
15
+
16
+ commands = [
17
+ "${var.command} > ${var.service_id}.log 2>&1 & echo $! > /tmp/${var.service_id}.pid"
18
+ ]
19
+
20
+ }
21
+
22
+ resource "ssh_resource" "down" {
23
+ host = var.host
24
+ user = var.user
25
+ agent = true
26
+ when = "destroy"
27
+
28
+ commands = [
29
+ "kill $(cat /tmp/${var.service_id}.pid) || true",
30
+ "rm -f /tmp/${var.service_id}.pid"
31
+ ]
32
+ }
33
+
34
+
@@ -0,0 +1,19 @@
1
+ variable "host" {
2
+ description = "Target host for SSH"
3
+ type = string
4
+ }
5
+
6
+ variable "user" {
7
+ description = "Username for SSH"
8
+ type = string
9
+ }
10
+
11
+ variable "service_id" {
12
+ description = "Identifier of the service, should be unique to avoid collisions"
13
+ type = string
14
+ }
15
+
16
+ variable "command" {
17
+ description = "Command to execute"
18
+ type = string
19
+ }
@@ -0,0 +1,46 @@
1
+ require File.expand_path(__FILE__).sub(%r(/test/.*), '/test/test_helper.rb')
2
+ require File.expand_path(__FILE__).sub(%r(.*/test/), '').sub(/test_(.*)\.rb/,'\1')
3
+
4
+ require 'scout'
5
+ class TestResourceSync < Test::Unit::TestCase
6
+ def test_sync_file
7
+ TmpFile.with_path do |source|
8
+ TmpFile.with_path do |target|
9
+ Open.write(source.file, 'payload')
10
+ Misc.in_dir target.find do
11
+ Resource.sync(source.file, :current)
12
+ end
13
+
14
+ assert_equal 'payload', Open.read(target[Resource.identify(source)].file)
15
+ end
16
+ end
17
+ end
18
+
19
+ def test_sync_dir
20
+ TmpFile.with_path do |source|
21
+ TmpFile.with_path do |target|
22
+ Open.write(source.file, 'payload')
23
+ Misc.in_dir target.find do
24
+ Resource.sync(source, :current)
25
+ end
26
+
27
+ assert_equal 'payload', Open.read(target[Resource.identify(source)].file)
28
+ end
29
+ end
30
+ end
31
+
32
+ def test_sync_from_localhost
33
+ TmpFile.with_path do |source|
34
+ TmpFile.with_path do |target|
35
+ Open.write(source.file, 'payload')
36
+ Misc.in_dir target.find do
37
+ sss 0
38
+ Resource.sync(source, :current, source: 'localhost')
39
+ end
40
+
41
+ assert_equal 'payload', Open.read(target[Resource.identify(source)].file)
42
+ end
43
+ end
44
+ end
45
+ end
46
+
@@ -1,6 +1,7 @@
1
1
  require File.expand_path(__FILE__).sub(%r(/test/.*), '/test/test_helper.rb')
2
2
  require File.expand_path(__FILE__).sub(%r(.*/test/), '').sub(/test_(.*)\.rb/,'\1')
3
3
 
4
+ require 'scout'
4
5
  class TestSync < Test::Unit::TestCase
5
6
  def test_sync
6
7
  TmpFile.with_path do |tmpdir|
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: scout-camp
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.5
4
+ version: 0.1.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Miguel Vazquez
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-03-31 00:00:00.000000000 Z
10
+ date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: scout-essentials
@@ -42,6 +42,7 @@ files:
42
42
  - lib/scout/aws/s3.rb
43
43
  - lib/scout/offsite.rb
44
44
  - lib/scout/offsite/exceptions.rb
45
+ - lib/scout/offsite/resource.rb
45
46
  - lib/scout/offsite/ssh.rb
46
47
  - lib/scout/offsite/step.rb
47
48
  - lib/scout/offsite/sync.rb
@@ -50,10 +51,14 @@ files:
50
51
  - lib/scout/terraform_dsl/util.rb
51
52
  - scout-camp.gemspec
52
53
  - scout_commands/offsite
53
- - scout_commands/terraform/add
54
+ - scout_commands/sync
55
+ - scout_commands/terraform/add/lambda
56
+ - scout_commands/terraform/add/relay
54
57
  - scout_commands/terraform/apply
55
58
  - scout_commands/terraform/destroy
59
+ - scout_commands/terraform/lambda_task
56
60
  - scout_commands/terraform/list
61
+ - scout_commands/terraform/outputs
57
62
  - scout_commands/terraform/plan
58
63
  - scout_commands/terraform/remove
59
64
  - scout_commands/terraform/status
@@ -65,12 +70,29 @@ files:
65
70
  - share/terraform/aws/cluster/main.tf
66
71
  - share/terraform/aws/cluster/output.tf
67
72
  - share/terraform/aws/cluster/variables.tf
73
+ - share/terraform/aws/efs/data.tf
74
+ - share/terraform/aws/efs/locals.tf
75
+ - share/terraform/aws/efs/main.tf
76
+ - share/terraform/aws/efs/output.tf
77
+ - share/terraform/aws/efs/variables.tf
78
+ - share/terraform/aws/efs_host/data.tf
79
+ - share/terraform/aws/efs_host/locals.tf
80
+ - share/terraform/aws/efs_host/main.tf
81
+ - share/terraform/aws/efs_host/output.tf
82
+ - share/terraform/aws/efs_host/variables.tf
83
+ - share/terraform/aws/fargate/locals.tf
84
+ - share/terraform/aws/fargate/main.tf
85
+ - share/terraform/aws/fargate/variables.tf
68
86
  - share/terraform/aws/host/locals.tf
69
87
  - share/terraform/aws/host/main.tf
70
88
  - share/terraform/aws/host/output.tf
71
89
  - share/terraform/aws/host/variables.tf
72
90
  - share/terraform/aws/lambda/main.tf
73
91
  - share/terraform/aws/lambda/variables.tf
92
+ - share/terraform/aws/network/data.tf
93
+ - share/terraform/aws/network/main.tf
94
+ - share/terraform/aws/network/output.tf
95
+ - share/terraform/aws/network/variables.tf
74
96
  - share/terraform/aws/policy_attachment/main.tf
75
97
  - share/terraform/aws/policy_attachment/variables.tf
76
98
  - share/terraform/aws/provider/data.tf
@@ -78,7 +100,10 @@ files:
78
100
  - share/terraform/aws/role/main.tf
79
101
  - share/terraform/aws/role/output.tf
80
102
  - share/terraform/aws/role/variables.tf
103
+ - share/terraform/ssh/cmd/main.tf
104
+ - share/terraform/ssh/cmd/variables.tf
81
105
  - test/scout/aws/test_s3.rb
106
+ - test/scout/offsite/test_resource.rb
82
107
  - test/scout/offsite/test_ssh.rb
83
108
  - test/scout/offsite/test_step.rb
84
109
  - test/scout/offsite/test_sync.rb
@@ -102,7 +127,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
102
127
  - !ruby/object:Gem::Version
103
128
  version: '0'
104
129
  requirements: []
105
- rubygems_version: 3.6.5
130
+ rubygems_version: 3.6.7
106
131
  specification_version: 4
107
132
  summary: Deploy you scouts
108
133
  test_files: []
File without changes