etna 0.1.15 → 0.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/etna +18 -0
- data/etna.completion +1001 -0
- data/etna_app.completion +133 -0
- data/ext/completions/extconf.rb +20 -0
- data/lib/commands.rb +395 -0
- data/lib/etna.rb +7 -0
- data/lib/etna/application.rb +46 -22
- data/lib/etna/client.rb +82 -48
- data/lib/etna/clients.rb +4 -0
- data/lib/etna/clients/enum.rb +9 -0
- data/lib/etna/clients/janus.rb +2 -0
- data/lib/etna/clients/janus/client.rb +73 -0
- data/lib/etna/clients/janus/models.rb +78 -0
- data/lib/etna/clients/magma.rb +4 -0
- data/lib/etna/clients/magma/client.rb +80 -0
- data/lib/etna/clients/magma/formatting.rb +1 -0
- data/lib/etna/clients/magma/formatting/models_csv.rb +354 -0
- data/lib/etna/clients/magma/models.rb +630 -0
- data/lib/etna/clients/magma/workflows.rb +10 -0
- data/lib/etna/clients/magma/workflows/add_project_models_workflow.rb +67 -0
- data/lib/etna/clients/magma/workflows/attribute_actions_from_json_workflow.rb +62 -0
- data/lib/etna/clients/magma/workflows/create_project_workflow.rb +123 -0
- data/lib/etna/clients/magma/workflows/crud_workflow.rb +85 -0
- data/lib/etna/clients/magma/workflows/ensure_containing_record_workflow.rb +44 -0
- data/lib/etna/clients/magma/workflows/file_attributes_blank_workflow.rb +68 -0
- data/lib/etna/clients/magma/workflows/file_linking_workflow.rb +115 -0
- data/lib/etna/clients/magma/workflows/json_converters.rb +81 -0
- data/lib/etna/clients/magma/workflows/json_validators.rb +452 -0
- data/lib/etna/clients/magma/workflows/model_synchronization_workflow.rb +306 -0
- data/lib/etna/clients/magma/workflows/record_synchronization_workflow.rb +63 -0
- data/lib/etna/clients/magma/workflows/update_attributes_from_csv_workflow.rb +246 -0
- data/lib/etna/clients/metis.rb +3 -0
- data/lib/etna/clients/metis/client.rb +239 -0
- data/lib/etna/clients/metis/models.rb +313 -0
- data/lib/etna/clients/metis/workflows.rb +2 -0
- data/lib/etna/clients/metis/workflows/metis_download_workflow.rb +37 -0
- data/lib/etna/clients/metis/workflows/metis_upload_workflow.rb +137 -0
- data/lib/etna/clients/polyphemus.rb +3 -0
- data/lib/etna/clients/polyphemus/client.rb +33 -0
- data/lib/etna/clients/polyphemus/models.rb +68 -0
- data/lib/etna/clients/polyphemus/workflows.rb +1 -0
- data/lib/etna/clients/polyphemus/workflows/set_configuration_workflow.rb +47 -0
- data/lib/etna/command.rb +243 -5
- data/lib/etna/controller.rb +4 -0
- data/lib/etna/csvs.rb +159 -0
- data/lib/etna/directed_graph.rb +56 -0
- data/lib/etna/environment_scoped.rb +19 -0
- data/lib/etna/errors.rb +6 -0
- data/lib/etna/generate_autocompletion_script.rb +131 -0
- data/lib/etna/json_serializable_struct.rb +37 -0
- data/lib/etna/logger.rb +24 -2
- data/lib/etna/multipart_serializable_nested_hash.rb +50 -0
- data/lib/etna/route.rb +1 -1
- data/lib/etna/server.rb +3 -0
- data/lib/etna/spec/vcr.rb +99 -0
- data/lib/etna/templates/attribute_actions_template.json +43 -0
- data/lib/etna/test_auth.rb +3 -1
- data/lib/etna/user.rb +4 -0
- data/lib/helpers.rb +90 -0
- metadata +70 -5
data/lib/etna/controller.rb
CHANGED
@@ -12,6 +12,7 @@ module Etna
|
|
12
12
|
@logger = @request.env['etna.logger']
|
13
13
|
@user = @request.env['etna.user']
|
14
14
|
@request_id = @request.env['etna.request_id']
|
15
|
+
@hmac = @request.env['etna.hmac']
|
15
16
|
end
|
16
17
|
|
17
18
|
def log(line)
|
@@ -23,11 +24,14 @@ module Etna
|
|
23
24
|
|
24
25
|
return send(@action) if @action
|
25
26
|
|
27
|
+
|
26
28
|
[501, {}, ['This controller is not implemented.']]
|
27
29
|
rescue Etna::Error => e
|
30
|
+
Rollbar.error(e)
|
28
31
|
@logger.error(request_msg("Exiting with #{e.status}, #{e.message}"))
|
29
32
|
return failure(e.status, error: e.message)
|
30
33
|
rescue Exception => e
|
34
|
+
Rollbar.error(e)
|
31
35
|
@logger.error(request_msg('Caught unspecified error'))
|
32
36
|
@logger.error(request_msg(e.message))
|
33
37
|
e.backtrace.each do |trace|
|
data/lib/etna/csvs.rb
ADDED
@@ -0,0 +1,159 @@
|
|
1
|
+
module Etna
|
2
|
+
class CsvImporter
|
3
|
+
def initialize(
|
4
|
+
strip: true,
|
5
|
+
filter_empties: true,
|
6
|
+
&row_formatter
|
7
|
+
)
|
8
|
+
# Removes any columns from a row that are empty strings. Allows for some simpler 'empty' processing. This occurs
|
9
|
+
# post stripping as well.
|
10
|
+
@filter_empties = filter_empties
|
11
|
+
@row_formatter = row_formatter
|
12
|
+
end
|
13
|
+
|
14
|
+
COLUMN_AS_BOOLEAN = -> (s) { ['true', 't', 'y', 'yes'].include?(s&.downcase) }
|
15
|
+
|
16
|
+
def each_csv_row(filename: nil, input_io: nil, &block)
|
17
|
+
if input_io.nil?
|
18
|
+
unless filename.nil?
|
19
|
+
File.open(filename, 'r') do |io|
|
20
|
+
return each_csv_row(input_io: io, &block)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
lineno = 1
|
26
|
+
CSV.parse(input_io, headers: true, header_converters: :symbol) do |row|
|
27
|
+
lineno += 1
|
28
|
+
row = row.to_hash
|
29
|
+
row.keys.each { |k| row[k].strip! if row[k] =~ /^\s+$/ } if @strip
|
30
|
+
row.select! { |k, v| !v.empty? } if @filter_empties
|
31
|
+
@row_formatter.call(row) unless @row_formatter.nil?
|
32
|
+
yield row, lineno if block_given?
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def replace_row_column(row, column, &block)
|
37
|
+
if !row[column].nil? || block.arity == 0
|
38
|
+
row[column] = yield row[column]
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
class ImportError < StandardError
|
43
|
+
attr_accessor :lineno
|
44
|
+
|
45
|
+
def initialize(msg, lineno = nil)
|
46
|
+
@lineno = lineno
|
47
|
+
super(msg)
|
48
|
+
end
|
49
|
+
|
50
|
+
def message
|
51
|
+
"line #{lineno}: #{super}"
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
class NestedRowProcessor
|
56
|
+
attr_reader :row, :lineno, :context, :errors
|
57
|
+
|
58
|
+
def initialize(row, lineno, context)
|
59
|
+
@row = row
|
60
|
+
@lineno = lineno
|
61
|
+
@context = context
|
62
|
+
@errors = []
|
63
|
+
|
64
|
+
# If a parent context changes, all child contexts are invalidated. But since parent contexts are changed
|
65
|
+
# before the relationship of child contexts are declared, we have to track that so that when a child context
|
66
|
+
# dependency is declared we can clear it based on wether parents have changed.
|
67
|
+
@changed = {}
|
68
|
+
end
|
69
|
+
|
70
|
+
def process(column, *parents, &block)
|
71
|
+
if parents.any? { |p| @changed.include?(p) }
|
72
|
+
@changed[column] = true
|
73
|
+
@context[column] = nil
|
74
|
+
end
|
75
|
+
|
76
|
+
return self if (next_val = row[column]).nil?
|
77
|
+
@changed[column] = true
|
78
|
+
|
79
|
+
parent_values = parents.map do |p|
|
80
|
+
if @context[p].nil?
|
81
|
+
raise ImportError.new("Found a #{column} value, but no previous #{p} had been given!", @lineno)
|
82
|
+
end
|
83
|
+
|
84
|
+
@context[p]
|
85
|
+
end
|
86
|
+
|
87
|
+
begin
|
88
|
+
next_val = yield next_val, *parent_values, self if block_given?
|
89
|
+
rescue ImportError => e
|
90
|
+
e.lineno = @lineno
|
91
|
+
raise e
|
92
|
+
end
|
93
|
+
|
94
|
+
@context[column] = next_val
|
95
|
+
self
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
class CsvExporter
|
101
|
+
# column_headers should be an array of symbols, mapping the column heading names and ordering to export
|
102
|
+
# column_serializer is an optional block that takes column (string), column_value (string) and should
|
103
|
+
# return a string representation of column_value to write to the csv. By default, when nil, the exporter
|
104
|
+
# will attempt to convert the value to a string via to_s or simply write an empty string for nil.
|
105
|
+
|
106
|
+
attr_reader :column_headers
|
107
|
+
def initialize(column_headers, &column_serializer)
|
108
|
+
@column_headers = column_headers
|
109
|
+
@column_serializer = column_serializer
|
110
|
+
end
|
111
|
+
|
112
|
+
def header_row
|
113
|
+
@column_headers.map(&:to_s)
|
114
|
+
end
|
115
|
+
|
116
|
+
def map_column_value(column, column_value)
|
117
|
+
@column_serializer&.call(column, column_value) || column_value&.to_s || ''
|
118
|
+
end
|
119
|
+
|
120
|
+
def row_from_columns(columns)
|
121
|
+
@column_headers.map { |c| self.map_column_value(c, columns[c] || '') }
|
122
|
+
end
|
123
|
+
|
124
|
+
def with_row_writeable(filename: nil, output_io: nil, &block)
|
125
|
+
if output_io.nil? && !filename.nil?
|
126
|
+
File.open(filename, 'w') do |io|
|
127
|
+
return with_row_writeable(output_io: io, &block)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
writeable = self.class::RowWriteable.new(self, CSV.new(output_io))
|
132
|
+
yield writeable
|
133
|
+
writeable.ensure_headers
|
134
|
+
nil
|
135
|
+
end
|
136
|
+
|
137
|
+
class RowWriteable
|
138
|
+
def initialize(exporter, csv)
|
139
|
+
@exporter = exporter
|
140
|
+
@csv = csv
|
141
|
+
@written_headers = false
|
142
|
+
end
|
143
|
+
|
144
|
+
def <<(columns)
|
145
|
+
self.ensure_headers
|
146
|
+
@csv << @exporter.row_from_columns(columns)
|
147
|
+
end
|
148
|
+
|
149
|
+
def write(**columns)
|
150
|
+
self.<<(columns)
|
151
|
+
end
|
152
|
+
|
153
|
+
def ensure_headers
|
154
|
+
@csv << @exporter.header_row unless @written_headers
|
155
|
+
@written_headers = true
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
class DirectedGraph
|
2
|
+
def initialize
|
3
|
+
@children = {}
|
4
|
+
@parents = {}
|
5
|
+
end
|
6
|
+
|
7
|
+
attr_reader :children
|
8
|
+
attr_reader :parents
|
9
|
+
|
10
|
+
def add_connection(parent, child)
|
11
|
+
children = @children[parent] ||= {}
|
12
|
+
child_children = @children[child] ||= {}
|
13
|
+
|
14
|
+
children[child] = child_children
|
15
|
+
|
16
|
+
parents = @parents[child] ||= {}
|
17
|
+
parent_parents = @parents[parent] ||= {}
|
18
|
+
parents[parent] = parent_parents
|
19
|
+
end
|
20
|
+
|
21
|
+
def descendants(parent)
|
22
|
+
seen = Set.new
|
23
|
+
|
24
|
+
seen.add(parent)
|
25
|
+
queue = @children[parent].keys.dup
|
26
|
+
parent_queue = @parents[parent].keys.dup
|
27
|
+
|
28
|
+
# Because this is not an acyclical graph, the definition of descendants needs to be stronger;
|
29
|
+
# here we believe that any path that would move through --any-- parent to this child would not be considered
|
30
|
+
# descendant, so we first find all those parents and mark them as 'seen' so that they are not traveled.
|
31
|
+
while next_parent = parent_queue.pop
|
32
|
+
next if seen.include?(next_parent)
|
33
|
+
seen.add(next_parent)
|
34
|
+
parent_queue.push(*@parents[next_parent].keys)
|
35
|
+
end
|
36
|
+
|
37
|
+
queue = queue.nil? ? [] : queue.dup
|
38
|
+
paths = {}
|
39
|
+
|
40
|
+
while child = queue.pop
|
41
|
+
next if seen.include? child
|
42
|
+
seen.add(child)
|
43
|
+
path = (paths[child] ||= [parent])
|
44
|
+
|
45
|
+
@children[child].keys.each do |child_child|
|
46
|
+
queue.push child_child
|
47
|
+
|
48
|
+
unless paths.include? child_child
|
49
|
+
paths[child_child] = path + [child]
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
paths
|
55
|
+
end
|
56
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
class EnvironmentScoped < Module
|
2
|
+
def initialize(&block)
|
3
|
+
environment_class = Class.new do
|
4
|
+
class_eval(&block)
|
5
|
+
|
6
|
+
attr_reader :environment
|
7
|
+
def initialize(environment)
|
8
|
+
@environment = environment
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
super() do
|
13
|
+
define_method :environment do |env|
|
14
|
+
env = env.to_sym
|
15
|
+
(@envs ||= {})[env] ||= environment_class.new(env)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
data/lib/etna/errors.rb
CHANGED
@@ -0,0 +1,131 @@
|
|
1
|
+
require_relative 'command'
|
2
|
+
|
3
|
+
# application.rb instantiates this for the project scoping.
|
4
|
+
# This generates a file, project-name.completion, which is sourced
|
5
|
+
# in build.sh to provide autocompletion in that environment.
|
6
|
+
module Etna
|
7
|
+
class GenerateCompletionScript < Etna::Command
|
8
|
+
def generate_for_command(command)
|
9
|
+
completions = command.completions
|
10
|
+
completions.each do |c|
|
11
|
+
generate_start_match(c, false)
|
12
|
+
write "fi"
|
13
|
+
write "shift"
|
14
|
+
end
|
15
|
+
|
16
|
+
enable_flags(command.class)
|
17
|
+
write 'while [[ "$#" != "0" ]]; do'
|
18
|
+
generate_start_match([])
|
19
|
+
generate_flag_handling
|
20
|
+
|
21
|
+
write "else"
|
22
|
+
write "return"
|
23
|
+
write 'fi'
|
24
|
+
write 'done'
|
25
|
+
write "return"
|
26
|
+
end
|
27
|
+
|
28
|
+
def generate_flag_handling
|
29
|
+
write %Q(elif [[ -z "$(echo $all_flag_completion_names | xargs)" ]]; then)
|
30
|
+
write "return"
|
31
|
+
write %Q(elif [[ "$all_flag_completion_names" =~ $1\\ ]]; then)
|
32
|
+
write %Q(all_flag_completion_names="${all_flag_completion_names//$1\\ /}")
|
33
|
+
write 'a=$1'
|
34
|
+
write 'shift'
|
35
|
+
write %Q(if [[ "$string_flag_completion_names" =~ $a\\ ]]; then)
|
36
|
+
write 'if [[ "$#" == "1" ]]; then'
|
37
|
+
write %Q(a="${a//--/}")
|
38
|
+
write %Q(a="${a//-/_}")
|
39
|
+
write %Q(i="_completions_for_$a")
|
40
|
+
write %Q(all_completion_names="${!i}")
|
41
|
+
write 'COMPREPLY=($(compgen -W "$all_completion_names" -- "$1"))'
|
42
|
+
write 'return'
|
43
|
+
write 'fi'
|
44
|
+
write 'shift'
|
45
|
+
write 'fi'
|
46
|
+
end
|
47
|
+
|
48
|
+
def generate_start_match(completions, include_flags=true)
|
49
|
+
write 'if [[ "$#" == "1" ]]; then'
|
50
|
+
write %Q(all_completion_names="#{completions.join(' ')}")
|
51
|
+
write %Q(all_completion_names="$all_completion_names $all_flag_completion_names") if include_flags
|
52
|
+
write %Q(if [[ -z "$(echo $all_completion_names | xargs)" ]]; then)
|
53
|
+
write 'return'
|
54
|
+
write 'fi'
|
55
|
+
write 'COMPREPLY=($(compgen -W "$all_completion_names" -- "$1"))'
|
56
|
+
write 'return'
|
57
|
+
end
|
58
|
+
|
59
|
+
def enable_flags(flags_container)
|
60
|
+
boolean_flags = flags_container.boolean_flags
|
61
|
+
string_flags = flags_container.string_flags
|
62
|
+
flags = boolean_flags + string_flags
|
63
|
+
write %Q(all_flag_completion_names="$all_flag_completion_names #{flags.join(' ')} ")
|
64
|
+
write %Q(string_flag_completion_names="$string_flag_completion_names #{string_flags.join(' ')} ")
|
65
|
+
|
66
|
+
string_flags.each do |flag|
|
67
|
+
write %Q(declare _completions_for_#{flag_as_parameter(flag)}="#{completions_for(flag_as_parameter(flag)).join(' ')}")
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def generate_for_scope(scope)
|
72
|
+
enable_flags(scope.class)
|
73
|
+
write 'while [[ "$#" != "0" ]]; do'
|
74
|
+
generate_start_match(scope.subcommands.keys)
|
75
|
+
|
76
|
+
scope.subcommands.each do |name, command|
|
77
|
+
write %Q(elif [[ "$1" == "#{name}" ]]; then)
|
78
|
+
write 'shift'
|
79
|
+
if command.class.included_modules.include?(CommandExecutor)
|
80
|
+
generate_for_scope(command)
|
81
|
+
else
|
82
|
+
generate_for_command(command)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
generate_flag_handling
|
87
|
+
|
88
|
+
write "else"
|
89
|
+
write "return"
|
90
|
+
write "fi"
|
91
|
+
write 'done'
|
92
|
+
end
|
93
|
+
|
94
|
+
def program_name
|
95
|
+
$PROGRAM_NAME
|
96
|
+
end
|
97
|
+
|
98
|
+
def execute
|
99
|
+
name = File.basename(program_name)
|
100
|
+
|
101
|
+
write <<-EOF
|
102
|
+
#!/usr/bin/env bash
|
103
|
+
|
104
|
+
function _#{name}_completions() {
|
105
|
+
_#{name}_inner_completions "${COMP_WORDS[@]:1:COMP_CWORD}"
|
106
|
+
}
|
107
|
+
|
108
|
+
function _#{name}_inner_completions() {
|
109
|
+
local all_flag_completion_names=''
|
110
|
+
local string_flag_completion_names=''
|
111
|
+
local all_completion_names=''
|
112
|
+
local i=''
|
113
|
+
local a=''
|
114
|
+
EOF
|
115
|
+
generate_for_scope(parent)
|
116
|
+
write <<-EOF
|
117
|
+
}
|
118
|
+
|
119
|
+
complete -o default -F _#{name}_completions #{name}
|
120
|
+
EOF
|
121
|
+
|
122
|
+
File.open("#{name}.completion", 'w') { |f| f.write(@script) }
|
123
|
+
end
|
124
|
+
|
125
|
+
def write(string)
|
126
|
+
@script ||= ""
|
127
|
+
@script << string
|
128
|
+
@script << "\n"
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
module Etna
|
2
|
+
module JsonSerializableStruct
|
3
|
+
def self.included(cls)
|
4
|
+
cls.instance_eval do
|
5
|
+
def self.as_json(v)
|
6
|
+
if v.respond_to? :as_json
|
7
|
+
return v.as_json
|
8
|
+
end
|
9
|
+
|
10
|
+
if v.is_a? Hash
|
11
|
+
return v.map { |k, v| [k, as_json(v)] }.to_h
|
12
|
+
end
|
13
|
+
|
14
|
+
if v.class.include? Enumerable
|
15
|
+
return v.map { |v| as_json(v) }
|
16
|
+
end
|
17
|
+
|
18
|
+
v
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def as_json(keep_nils: false)
|
24
|
+
inner_json = members.map do |k|
|
25
|
+
v = self.class.as_json(send(k))
|
26
|
+
[k, v]
|
27
|
+
end.to_h
|
28
|
+
|
29
|
+
return inner_json if keep_nils
|
30
|
+
inner_json.delete_if { |k, v| v.nil? }
|
31
|
+
end
|
32
|
+
|
33
|
+
def to_json
|
34
|
+
as_json.to_json
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
data/lib/etna/logger.rb
CHANGED
@@ -1,10 +1,18 @@
|
|
1
1
|
require 'logger'
|
2
|
+
require 'rollbar'
|
2
3
|
|
3
4
|
module Etna
|
4
5
|
class Logger < ::Logger
|
5
|
-
def initialize(log_dev, age, size)
|
6
|
-
|
6
|
+
def initialize(log_dev, age, size=1048576)
|
7
|
+
# On windows, these posix devices exist, but are not mounted in *nix style paths.
|
8
|
+
# Swap the paths out with the actual IO handles instead.
|
9
|
+
if log_dev == '/dev/stdout'
|
10
|
+
log_dev = STDOUT
|
11
|
+
elsif log_dev == '/dev/stderr'
|
12
|
+
log_dev = STDERR
|
13
|
+
end
|
7
14
|
|
15
|
+
super
|
8
16
|
self.formatter = proc do |severity, datetime, progname, msg|
|
9
17
|
format(severity, datetime, progname, msg)
|
10
18
|
end
|
@@ -14,11 +22,25 @@ module Etna
|
|
14
22
|
"#{severity}:#{datetime.iso8601} #{msg}\n"
|
15
23
|
end
|
16
24
|
|
25
|
+
def warn(msg, &block)
|
26
|
+
super
|
27
|
+
end
|
28
|
+
|
29
|
+
def error(msg, &block)
|
30
|
+
super
|
31
|
+
end
|
32
|
+
|
33
|
+
def fatal(msg, &block)
|
34
|
+
super
|
35
|
+
end
|
36
|
+
|
17
37
|
def log_error(e)
|
18
38
|
error(e.message)
|
19
39
|
e.backtrace.each do |trace|
|
20
40
|
error(trace)
|
21
41
|
end
|
42
|
+
|
43
|
+
Rollbar.error(e)
|
22
44
|
end
|
23
45
|
|
24
46
|
def log_request(request)
|