xapixctl 1.1.2 → 1.2.4

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,184 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Xapixctl
4
+ module PhoenixClient
5
+ class ProjectConnection < OrganizationConnection
6
+ attr_reader :project
7
+
8
+ def initialize(connection, org, project)
9
+ super(connection, org)
10
+ @project = project
11
+ end
12
+
13
+ def project_resource(format: :hash, &block)
14
+ organization.resource('Project', @project, format: format, &block)
15
+ end
16
+
17
+ def organization
18
+ OrganizationConnection.new(@connection, @org)
19
+ end
20
+
21
+ def resource_types_for_export
22
+ @resource_types_for_export ||=
23
+ @connection.available_resource_types do |res|
24
+ res.on_success do |available_types|
25
+ prj_types = available_types.select { |desc| desc['context'] == 'Project' }
26
+ SUPPORTED_RESOURCE_TYPES & prj_types.map { |desc| desc['type'] }
27
+ end
28
+ end
29
+ end
30
+
31
+ # Notes on parameters:
32
+ # - Query parameters should be part of the URL
33
+ # - Path parameters should be marked with `{name}` in the URL, and values should be given in path_params hash
34
+ # - Headers should be given in headers hash
35
+ # - Cookies should be given in cookies hash
36
+ # - The body has to be given as a string
37
+ # - The required authentication schemes should be listed, referring to previously created schemes
38
+ #
39
+ # This returns a hash like the following:
40
+ # "data_source" => { "id" => id, "resource_description" => resource_description }
41
+ #
42
+ # To successfully onboard a DB using the API, the following steps are needed:
43
+ # 1. setup the data source using add_rest_data_source.
44
+ # 2. retrieve a preview using preview_data_source using the id returned by previous step
45
+ # 3. confirm preview
46
+ # 4. call accept_data_source_preview to complete onboarding
47
+ #
48
+ def add_rest_data_source(http_method:, url:, path_params: {}, headers: {}, cookies: {}, body: nil, auth_schemes: [], &block)
49
+ data_source_details = {
50
+ data_source: {
51
+ http_method: http_method, url: url,
52
+ parameters: { path: path_params.to_query, header: headers.to_query, cookies: cookies.to_query, body: body },
53
+ auth_schemes: auth_schemes
54
+ }
55
+ }
56
+ result_handler(block).
57
+ run { @client[rest_data_source_path].post(data_source_details.to_json, content_type: :json) }
58
+ end
59
+
60
+ # Notes on parameters:
61
+ # - To call a data source which requires authentication, provide a hash with each required auth scheme as key and
62
+ # as the value a reference to a previously created credential.
63
+ # Example: { scheme_ref1 => credential_ref1, scheme_ref2 => credential_ref2 }
64
+ #
65
+ # This returns a hashified preview like the following:
66
+ # { "preview" => {
67
+ # "sample" => { "status" => integer, "body" => { ... }, "headers" => { ... }, "cookies" => { ... } },
68
+ # "fetched_at" => Timestamp },
69
+ # "data_source" => { "id" => id, "resource_description" => resource_description } }
70
+ #
71
+ def data_source_preview(data_source_id, authentications: {}, &block)
72
+ preview_data = {
73
+ authentications: authentications.map { |scheme, cred| { auth_scheme_id: scheme, auth_credential_id: cred } }
74
+ }
75
+ result_handler(block).
76
+ run { @client[data_source_preview_path(data_source_id)].post(preview_data.to_json, content_type: :json) }
77
+ end
78
+
79
+ def add_schema_import(spec_filename, &block)
80
+ spec_data = { schema_import: { file: File.new(spec_filename, 'r') } }
81
+ result_handler(block).
82
+ run { @client[schema_imports_path].post(spec_data) }
83
+ end
84
+
85
+ def update_schema_import(schema_import, spec_filename, &block)
86
+ spec_data = { schema_import: { file: File.new(spec_filename, 'r') } }
87
+ result_handler(block).
88
+ run { @client[schema_import_path(schema_import)].patch(spec_data) }
89
+ end
90
+
91
+ def pipeline_preview(pipeline_id, format: :hash, &block)
92
+ result_handler(block).
93
+ prepare_data(->(data) { data['pipeline_preview'] }).
94
+ formatter(PREVIEW_FORMATTERS[format]).
95
+ run { @client[pipeline_preview_path(pipeline_id)].get }
96
+ end
97
+
98
+ def endpoint_preview(endpoint_id, format: :hash, &block)
99
+ result_handler(block).
100
+ prepare_data(->(data) { data['endpoint_preview'] }).
101
+ formatter(PREVIEW_FORMATTERS[format]).
102
+ run { @client[endpoint_preview_path(endpoint_id)].get }
103
+ end
104
+
105
+ def stream_processor_preview(stream_processor_id, format: :hash, &block)
106
+ result_handler(block).
107
+ prepare_data(->(data) { data['stream_processor_preview'] }).
108
+ formatter(PREVIEW_FORMATTERS[format]).
109
+ run { @client[stream_processor_preview_path(stream_processor_id)].get }
110
+ end
111
+
112
+ def publish(&block)
113
+ result_handler(block).
114
+ run { @client[project_publications_path].post('') }
115
+ end
116
+
117
+ def logs(correlation_id, &block)
118
+ result_handler(block).
119
+ run { @client[project_logss_path(correlation_id)].get }
120
+ end
121
+
122
+ # This returns a hashified preview like the following:
123
+
124
+ def accept_data_source_preview(data_source_id, &block)
125
+ result_handler(block).
126
+ run { @client[data_source_preview_path(data_source_id)].patch('') }
127
+ end
128
+
129
+ def public_project_url
130
+ File.join(@connection.xapix_url, @org, @project)
131
+ end
132
+
133
+ private
134
+
135
+ def rest_data_source_path
136
+ "/projects/#{@org}/#{@project}/onboarding/data_sources/rest"
137
+ end
138
+
139
+ def data_source_preview_path(id)
140
+ "/projects/#{@org}/#{@project}/onboarding/data_sources/#{id}/preview"
141
+ end
142
+
143
+ def schema_imports_path
144
+ "/projects/#{@org}/#{@project}/onboarding/schema_imports"
145
+ end
146
+
147
+ def schema_import_path(schema_import)
148
+ "/projects/#{@org}/#{@project}/onboarding/schema_imports/#{schema_import}"
149
+ end
150
+
151
+ def resource_path(type, id)
152
+ "/projects/#{@org}/#{@project}/#{translate_type(type)}/#{id}"
153
+ end
154
+
155
+ def resources_path(type)
156
+ "/projects/#{@org}/#{@project}/#{translate_type(type)}"
157
+ end
158
+
159
+ def generic_resource_path
160
+ "projects/#{@org}/#{@project}/resource"
161
+ end
162
+
163
+ def pipeline_preview_path(pipeline)
164
+ "/projects/#{@org}/#{@project}/pipelines/#{pipeline}/preview"
165
+ end
166
+
167
+ def endpoint_preview_path(endpoint)
168
+ "/projects/#{@org}/#{@project}/endpoints/#{endpoint}/preview"
169
+ end
170
+
171
+ def stream_processor_preview_path(stream_processor)
172
+ "/projects/#{@org}/#{@project}/stream_processors/#{stream_processor}/preview"
173
+ end
174
+
175
+ def project_publications_path
176
+ "/projects/#{@org}/#{@project}/publications"
177
+ end
178
+
179
+ def project_logss_path(correlation_id)
180
+ "/projects/#{@org}/#{@project}/logs/#{correlation_id}"
181
+ end
182
+ end
183
+ end
184
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Xapixctl
4
+ module PhoenixClient
5
+ class ResultHandler
6
+ def initialize(default_success_handler:, default_error_handler:)
7
+ @success_handler = default_success_handler
8
+ @error_handler = default_error_handler
9
+ @result_handler = nil
10
+ yield self if block_given?
11
+ end
12
+
13
+ def on_success(&block); @success_handler = block; self; end
14
+
15
+ def on_error(&block); @error_handler = block; self; end
16
+
17
+ def prepare_data(proc); @result_handler = proc; self; end
18
+
19
+ def formatter(proc); @formatter = proc; self; end
20
+
21
+ def run
22
+ res = yield
23
+ res = res.present? ? JSON.parse(res) : res
24
+ res = @result_handler ? @result_handler.call(res) : res
25
+ res = @formatter ? @formatter.call(res) : res
26
+ @success_handler.call(res)
27
+ rescue RestClient::Exception => err
28
+ response = JSON.parse(err.response) rescue {}
29
+ @error_handler.call(err, response)
30
+ rescue SocketError, Errno::ECONNREFUSED => err
31
+ @error_handler.call(err, nil)
32
+ end
33
+ end
34
+ end
35
+ end
@@ -3,9 +3,7 @@
3
3
  require 'xapixctl/base_cli'
4
4
 
5
5
  module Xapixctl
6
- class Preview < BaseCli
7
- option :org, aliases: "-o", desc: "Organization", required: true
8
- option :project, aliases: "-p", desc: "Project", required: true
6
+ class PreviewCli < BaseCli
9
7
  option :format, aliases: "-f", default: 'text', enum: ['text', 'yaml', 'json'], desc: "Output format"
10
8
  desc "pipeline ID", "Preview a pipeline"
11
9
  long_desc <<-LONGDESC
@@ -17,16 +15,12 @@ module Xapixctl
17
15
 
18
16
  Examples:
19
17
  \x5> $ xapixctl preview pipeline -o xapix -p some-project pipeline
18
+ \x5> $ xapixctl preview pipeline -p xapix/some-project pipeline
20
19
  LONGDESC
21
20
  def pipeline(pipeline)
22
- connection.pipeline_preview(pipeline, org: options[:org], project: options[:project], format: options[:format].to_sym) do |res|
23
- res.on_success { |preview| puts preview }
24
- res.on_error { |err, result| warn_api_error('could not fetch preview', err, result) }
25
- end
21
+ say prj_connection.pipeline_preview(pipeline, format: options[:format].to_sym)
26
22
  end
27
23
 
28
- option :org, aliases: "-o", desc: "Organization", required: true
29
- option :project, aliases: "-p", desc: "Project", required: true
30
24
  option :format, aliases: "-f", default: 'text', enum: ['text', 'yaml', 'json'], desc: "Output format"
31
25
  desc "endpoint ID", "Preview an endpoint"
32
26
  long_desc <<-LONGDESC
@@ -36,16 +30,12 @@ module Xapixctl
36
30
 
37
31
  Examples:
38
32
  \x5> $ xapixctl preview endpoint -o xapix -p some-project endpoint
33
+ \x5> $ xapixctl preview endpoint -p xapix/some-project endpoint
39
34
  LONGDESC
40
35
  def endpoint(endpoint)
41
- connection.endpoint_preview(endpoint, org: options[:org], project: options[:project], format: options[:format].to_sym) do |res|
42
- res.on_success { |preview| puts preview }
43
- res.on_error { |err, result| warn_api_error('could not fetch preview', err, result) }
44
- end
36
+ say prj_connection.endpoint_preview(endpoint, format: options[:format].to_sym)
45
37
  end
46
38
 
47
- option :org, aliases: "-o", desc: "Organization", required: true
48
- option :project, aliases: "-p", desc: "Project", required: true
49
39
  option :format, aliases: "-f", default: 'text', enum: ['text', 'yaml', 'json'], desc: "Output format"
50
40
  desc "stream-processor ID", "Preview a stream processor"
51
41
  long_desc <<-LONGDESC
@@ -55,12 +45,10 @@ module Xapixctl
55
45
 
56
46
  Examples:
57
47
  \x5> $ xapixctl preview stream-processor -o xapix -p some-project processor
48
+ \x5> $ xapixctl preview stream-processor -p xapix/some-project processor
58
49
  LONGDESC
59
50
  def stream_processor(stream_processor)
60
- connection.stream_processor_preview(stream_processor, org: options[:org], project: options[:project], format: options[:format].to_sym) do |res|
61
- res.on_success { |preview| puts preview }
62
- res.on_error { |err, result| warn_api_error('could not fetch preview', err, result) }
63
- end
51
+ say prj_connection.stream_processor_preview(stream_processor, format: options[:format].to_sym)
64
52
  end
65
53
  end
66
54
  end
@@ -0,0 +1,241 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'xapixctl/base_cli'
4
+ require 'pathname'
5
+ require 'hashdiff'
6
+
7
+ module Xapixctl
8
+ class SyncCli < BaseCli
9
+ class_option :credentials, desc: "Whether to include Credential resources in sync", type: :boolean, default: true
10
+ class_option :exclude_types, desc: "Resource types to exclude from sync", type: :array
11
+
12
+ desc "to-dir DIRECTORY", "Syncs resources in project to directory"
13
+ long_desc <<-LONGDESC
14
+ `xapixctl sync to-dir DIRECTORY -p org/prj` will export all resources of a given project and remove any additional resources from the directory.
15
+
16
+ With --no-credentials you can exclude all credentials from getting exported.
17
+
18
+ With --exclude-types you can specify any resource types besides Project you'd like to exclude.
19
+
20
+ When excluding types, the excluded types will be recorded in the sync directory in a file called .excluded_types, so that any future syncs will exclude those types.
21
+
22
+ Examples:
23
+ \x5> $ xapixctl sync to-dir ./project_dir -p xapix/some-project
24
+ \x5> $ xapixctl sync to-dir ./project_dir -p xapix/some-project --no-credentials
25
+ \x5> $ xapixctl sync to-dir ./project_dir -p xapix/some-project --exclude-types=ApiPublishing ApiPublishingRole Credential
26
+ LONGDESC
27
+ def to_dir(dir)
28
+ sync_path = SyncPath.new(shell, dir, prj_connection.resource_types_for_export, excluded_types)
29
+
30
+ res_details = prj_connection.project_resource
31
+ sync_path.write_file(generate_readme(res_details), 'README.md')
32
+ sync_path.write_resource_yaml(res_details, 'project')
33
+
34
+ sync_path.types_to_sync.each do |type|
35
+ res_path = sync_path.resource_path(type)
36
+ prj_connection.resource_ids(type).each do |res_id|
37
+ res_details = prj_connection.resource(type, res_id)
38
+ res_path.write_resource_yaml(res_details, res_id)
39
+ end
40
+ res_path.remove_outdated_resources
41
+ end
42
+ sync_path.update_excluded_types_file
43
+ end
44
+
45
+ desc "from-dir DIRECTORY", "Syncs resources in project from directory"
46
+ long_desc <<-LONGDESC
47
+ `xapixctl sync from-dir DIRECTORY -p org/prj` will import all resources into the given project from the directory and remove any additional resources which are not present in the directory.
48
+
49
+ With --no-credentials you can exclude all credentials from getting exported.
50
+
51
+ With --exclude-types you can specify any resource types besides Project you'd like to exclude.
52
+
53
+ Examples:
54
+ \x5> $ xapixctl sync from-dir ./project_dir -p xapix/some-project
55
+ \x5> $ xapixctl sync from-dir ./project_dir -p xapix/some-project --no-credentials
56
+ \x5> $ xapixctl sync from-dir ./project_dir -p xapix/some-project --exclude-types=ApiPublishing ApiPublishingRole Credential
57
+ LONGDESC
58
+ def from_dir(dir)
59
+ sync_path = SyncPath.new(shell, dir, prj_connection.resource_types_for_export, excluded_types)
60
+
61
+ sync_path.load_resource('project') do |desc|
62
+ say "applying #{desc['kind']} #{desc.dig('metadata', 'id')} to #{prj_connection.project}"
63
+ desc['metadata']['id'] = prj_connection.project
64
+ prj_connection.organization.apply(desc)
65
+ end
66
+
67
+ outdated_resources = {}
68
+ sync_path.types_to_sync.each do |type|
69
+ res_path = sync_path.resource_path(type)
70
+ updated_resource_ids = []
71
+ res_path.load_resources do |desc|
72
+ say "applying #{desc['kind']} #{desc.dig('metadata', 'id')}"
73
+ updated_resource_ids += prj_connection.apply(desc)
74
+ end
75
+ outdated_resources[type] = prj_connection.resource_ids(type) - updated_resource_ids
76
+ end
77
+
78
+ outdated_resources.each do |type, resource_ids|
79
+ resource_ids.each do |resource_id|
80
+ say "removing #{type} #{resource_id}"
81
+ prj_connection.delete(type, resource_id)
82
+ end
83
+ end
84
+ end
85
+
86
+ desc "diff DIRECTORY", "List resource which differ between project and directory"
87
+ long_desc <<-LONGDESC
88
+ `xapixctl sync diff DIRECTORY -p org/prj` will list the resources which are different between the given project and the given directory.
89
+
90
+ With --no-credentials you can exclude all credentials from getting exported.
91
+
92
+ With --exclude-types you can specify any resource types besides Project you'd like to exclude.
93
+
94
+ When only listing changed resources, the first character in a line indicates the status of the resource:
95
+ \x5 = - no changes
96
+ \x5 ~ - changed
97
+ \x5 ^ - in remote project
98
+ \x5 v - in directory
99
+
100
+ Examples:
101
+ \x5> $ xapixctl sync diff ./project_dir -p xapix/some-project
102
+ \x5> $ xapixctl sync diff ./project_dir -p xapix/some-project --no-credentials
103
+ \x5> $ xapixctl sync diff ./project_dir -p xapix/some-project --exclude-types=ApiPublishing ApiPublishingRole Credential
104
+ LONGDESC
105
+ option :details, desc: "Include detailed differences", type: :boolean, default: false
106
+ def diff(dir)
107
+ sync_path = SyncPath.new(shell, dir, prj_connection.resource_types_for_export, excluded_types)
108
+
109
+ sync_path.load_resource('project') do |desc|
110
+ desc['metadata']['id'] = prj_connection.project
111
+ res_details = prj_connection.project_resource
112
+ show_diff(desc, res_details)
113
+ end
114
+
115
+ sync_path.types_to_sync.each do |type|
116
+ res_path = sync_path.resource_path(type)
117
+ local_resource_ids = []
118
+ remote_resource_ids = prj_connection.resource_ids(type)
119
+ res_path.load_resources do |desc|
120
+ resource_id = desc['metadata']['id']
121
+ local_resource_ids << resource_id
122
+ if remote_resource_ids.include?(resource_id)
123
+ res_details = prj_connection.resource(type, desc['metadata']['id'])
124
+ show_diff(desc, res_details)
125
+ else
126
+ say "v #{type} #{resource_id}"
127
+ end
128
+ end
129
+ (remote_resource_ids - local_resource_ids).each do |resource_id|
130
+ say "^ #{type} #{resource_id}"
131
+ end
132
+ end
133
+ end
134
+
135
+ private
136
+
137
+ def show_diff(local, remote)
138
+ kind, id = local['kind'], local['metadata']['id']
139
+ local, remote = local.slice('definition'), remote.slice('definition')
140
+ changed = local != remote
141
+ status = changed ? "~" : "="
142
+ say "#{status} #{kind} #{id}"
143
+ return unless changed && options[:details]
144
+ shell.indent do
145
+ Hashdiff.diff(local, remote).each do |change|
146
+ status = change[0].tr('+-', '^v')
147
+ key = change[1]
148
+ say "#{status} #{key}"
149
+ shell.indent do
150
+ case status
151
+ when "~" then say "^ #{change[3]}"; say "v #{change[2]}"
152
+ else say "#{status} #{change[2]}" if change[2]
153
+ end
154
+ end
155
+ end
156
+ end
157
+ end
158
+
159
+ class ResourcePath
160
+ delegate :say, to: :@shell
161
+
162
+ def initialize(shell, path)
163
+ @shell = shell
164
+ @path = path
165
+ @resource_files = []
166
+ end
167
+
168
+ def write_file(content, filename)
169
+ @path.mkpath
170
+ unless @path.directory? && @path.writable?
171
+ warn "Cannot write to #{@path}, please check directory exists and is writable"
172
+ exit 1
173
+ end
174
+ file = @path.join(filename)
175
+ file.write(content)
176
+ say "updated #{file}..."
177
+ file
178
+ end
179
+
180
+ def write_resource_yaml(res_details, res_name)
181
+ file = write_file(res_details.to_yaml, "#{res_name}.yaml")
182
+ @resource_files << file
183
+ file
184
+ end
185
+
186
+ def load_resources(&block)
187
+ Util.resources_from_file(@path, ignore_missing: true, &block)
188
+ end
189
+
190
+ def load_resource(res_name, &block)
191
+ Util.resources_from_file(@path.join("#{res_name}.yaml"), ignore_missing: false, &block)
192
+ end
193
+
194
+ def remove_outdated_resources
195
+ (@path.glob('*.yaml') - @resource_files).each do |outdated_file|
196
+ outdated_file.delete
197
+ say "removed #{outdated_file}"
198
+ end
199
+ end
200
+ end
201
+
202
+ class SyncPath < ResourcePath
203
+ attr_reader :types_to_sync
204
+
205
+ def initialize(shell, dir, all_types, excluded_types)
206
+ super(shell, Pathname.new(dir))
207
+ @all_types = all_types
208
+ @excluded_types_file = @path.join('.excluded_types')
209
+ @excluded_types = excluded_types || []
210
+ @excluded_types += @excluded_types_file.read.split if @excluded_types_file.exist?
211
+ @excluded_types &= @all_types
212
+ @excluded_types.sort!
213
+ @types_to_sync = @all_types - @excluded_types
214
+ say "Resource types excluded from sync: #{@excluded_types.join(', ')}" if @excluded_types.any?
215
+ end
216
+
217
+ def resource_path(type)
218
+ ResourcePath.new(@shell, @path.join(type.underscore))
219
+ end
220
+
221
+ def update_excluded_types_file
222
+ @excluded_types_file.write(@excluded_types.join(" ") + "\n") if @excluded_types.any?
223
+ end
224
+ end
225
+
226
+ def excluded_types
227
+ excluded = options[:exclude_types] || []
228
+ excluded += ['Credential'] unless options[:credentials]
229
+ excluded
230
+ end
231
+
232
+ def generate_readme(res_details)
233
+ <<~EOREADME
234
+ # #{res_details.dig('definition', 'name')}
235
+ #{res_details.dig('definition', 'description')}
236
+
237
+ Project exported from #{File.join(prj_connection.public_project_url)} by xapixctl v#{Xapixctl::VERSION}.
238
+ EOREADME
239
+ end
240
+ end
241
+ end