databricks 2.2.0 → 2.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4fa10746d84fcd9135f2b811ae8f48509fde3ef567fa87b914c337a0881d7e9e
4
- data.tar.gz: 8cedbeb212e416c99d84fc0f5f53ec609e0510f021af5142c550a7272d0b1a79
3
+ metadata.gz: 4459669b73b6119b98edbf5cbc090da4737c3ff81f07a5facee4ea9082f184b9
4
+ data.tar.gz: 4385d46df3c43d914ac510a82d61fc256d90dbe4896dd6d94aa5377340829edf
5
5
  SHA512:
6
- metadata.gz: 18cda5b9682e863158015c018fa1941b135421feac1361c6a8714dd51d82f484010d7cce4538d9dd04fcd91f494c486a78ba155824e9c1dcede212849a45730a
7
- data.tar.gz: dd5c5fea89ab21a638e88ea16665722457fbfa2d48811f7cdba6228927a2293338611003a611b1880035c5f5782911d200f7ce954f21c3bfd41c487107799bdd
6
+ metadata.gz: fddff3e968fe272ced9363f5762861943f277dee5f73897432c4d272d739b031aac2b97871bcba43d3ef161d3a0b76b00b739b86898e062647497160f608557b
7
+ data.tar.gz: fb72364cade4b589374f4050341271bff45f80c19e8d0bb4b4325a3fa29c3475400049585e80d303c230d1edf77684c72326423e7eacb691c9bc5fa68c39fcd6
@@ -15,6 +15,10 @@ module Databricks
15
15
  post_json
16
16
  ]
17
17
 
18
+ # Get an accessor on all properties of this resource
19
+ # Hash< Symbol, Object >
20
+ attr_reader :properties
21
+
18
22
  # Declare sub-resources accessors.
19
23
  # This will make sure this resource has methods named after the sub-resources identifiers.
20
24
  #
@@ -42,20 +46,27 @@ module Databricks
42
46
  @properties = {}
43
47
  end
44
48
 
45
- # Add/replace properties for this resource
49
+ # Add/replace properties for this resource.
50
+ # Properties will be deep-symbolized.
46
51
  #
47
52
  # Parameters::
48
- # * *properties* (Hash<Symbol,Object>): Properties for this resource
49
- def add_properties(properties)
53
+ # * *properties* (Hash<Symbol or String,Object>): Properties for this resource
54
+ # * *replace* (Boolean): Should we replace properties instead of merging them? [default: false]
55
+ def add_properties(properties, replace: false)
56
+ symbolized_properties = deep_symbolize(properties)
50
57
  # Define getters for properties
51
- (properties.keys - @properties.keys).each do |property_name|
58
+ (symbolized_properties.keys - @properties.keys).each do |property_name|
52
59
  if self.respond_to?(property_name)
53
60
  raise "Can't define a property named #{property_name} - It's already used."
54
61
  else
55
62
  define_singleton_method(property_name) { @properties[property_name] }
56
63
  end
57
64
  end
58
- @properties.merge!(properties)
65
+ if replace
66
+ @properties = symbolized_properties
67
+ else
68
+ @properties.merge!(symbolized_properties)
69
+ end
59
70
  end
60
71
 
61
72
  # Return a simple string representation of this resource
@@ -88,10 +99,34 @@ module Databricks
88
99
  def new_resource(resource_name, properties = {})
89
100
  require "#{__dir__}/resources/#{resource_name}.rb"
90
101
  resource = Resources.const_get(resource_name.to_s.split('_').collect(&:capitalize).join.to_sym).new(@connector)
91
- resource.add_properties(properties.transform_keys(&:to_sym))
102
+ resource.add_properties(properties)
92
103
  resource
93
104
  end
94
105
 
106
+ private
107
+
108
+ # Deep-symbolize a JSON object
109
+ #
110
+ # Parameters::
111
+ # * *json* (Object): The JSON object
112
+ # Result::
113
+ # * Object: Symbolized JSON object
114
+ def deep_symbolize(json)
115
+ case json
116
+ when Hash
117
+ Hash[json.map do |k, v|
118
+ [
119
+ k.is_a?(String) ? k.to_sym : k,
120
+ deep_symbolize(v)
121
+ ]
122
+ end]
123
+ when Array
124
+ json.map { |e| deep_symbolize(e) }
125
+ else
126
+ json
127
+ end
128
+ end
129
+
95
130
  end
96
131
 
97
132
  end
@@ -4,6 +4,27 @@ module Databricks
4
4
 
5
5
  class Job < Resource
6
6
 
7
+ # Reset properties of this job.
8
+ #
9
+ # Parameters::
10
+ # * *properties* (Hash<Symbol,Object>): New job's properties
11
+ def reset(**properties)
12
+ # Make sure we don't change its ID
13
+ post_json(
14
+ 'jobs/reset',
15
+ {
16
+ job_id: job_id,
17
+ new_settings: properties
18
+ }
19
+ )
20
+ add_properties(properties.merge(job_id: job_id), replace: true)
21
+ end
22
+
23
+ # Delete this job
24
+ def delete
25
+ post_json('jobs/delete', { job_id: job_id })
26
+ end
27
+
7
28
  end
8
29
 
9
30
  end
@@ -11,7 +11,21 @@ module Databricks
11
11
  # Result::
12
12
  # * Array<Job>: List of jobs information
13
13
  def list
14
- (get_json('jobs/list')['jobs'] || []).map { |properties| new_resource(:job, properties) }
14
+ (get_json('jobs/list')['jobs'] || []).map do |properties|
15
+ # The settings property should be merged at root
16
+ new_resource(:job, properties.merge(properties.delete('settings')))
17
+ end
18
+ end
19
+
20
+ # Get a job based on its job_id
21
+ #
22
+ # Parameters::
23
+ # * *job_id* (String): The job id to get
24
+ # Result::
25
+ # * Job: The job
26
+ def get(job_id)
27
+ properties = get_json('jobs/get', { job_id: job_id })
28
+ new_resource(:job, properties.merge(properties.delete('settings')))
15
29
  end
16
30
 
17
31
  # Create a new job.
@@ -1,5 +1,5 @@
1
1
  module Databricks
2
2
 
3
- VERSION = '2.2.0'
3
+ VERSION = '2.3.0'
4
4
 
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: databricks
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.2.0
4
+ version: 2.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Muriel Salvan
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-05-12 00:00:00.000000000 Z
11
+ date: 2021-05-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rest-client