databricks 1.0.0 → 2.3.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c27151990dcc087e396f387f45f6909f54bb6eb8529b8835706ce84d3ac399ab
4
- data.tar.gz: 0ddfaf4cd06981894bddc1ff2ede2bf2372aa6d08521699458502cdf4b83f308
3
+ metadata.gz: cc44c2e0ce0d40ceb08101541a9da02226b83200c681b675418baed111c06336
4
+ data.tar.gz: b32827444314a21c8453a0f620a32b66501e392edc620555b7f6dfd1558b536b
5
5
  SHA512:
6
- metadata.gz: 747bf19db30cc24740b101aaacd14d22eeb7051995b7d1707917835a34fb90bc096304d2e11c18ea4c04c28049e5ead90961f1fcdb319642bbd54409feb580cf
7
- data.tar.gz: dae0b046280172f770df37ce37440d0311f5e65896fe35209e3d0e900c4961d1a8396d195b570dac122c23b373aeb0be364f9c94178d54da888fab20dd81a819
6
+ metadata.gz: 638132f2c85b3a8a36505933db36bef16e81e093c42e93ac268becf5407a8291cb93add54efd941c3fdbb4ca08a165dc7351b03d48cebdcff226c94dad10f6ae
7
+ data.tar.gz: 4837f5936f68ed8e4746392b1954b9d40b12a099408a2f4d3397852b56c30a81ca8307e90898e1e72e381120df0faf47f7ad4bed7f605729befff2ce32cd01a2
data/lib/databricks.rb CHANGED
@@ -1,5 +1,6 @@
1
- require 'databricks/domains/root'
1
+ require 'databricks/connector'
2
2
  require 'databricks/resource'
3
+ require 'databricks/resources/root'
3
4
 
4
5
  module Databricks
5
6
 
@@ -9,9 +10,9 @@ module Databricks
9
10
  # * *host* (String): Host to connect to
10
11
  # * *token* (String): Token to be used in th API
11
12
  # Result::
12
- # * Domains::Root: The root domain of the API
13
+ # * Resources::Root: The root resource of the API
13
14
  def self.api(host, token)
14
- Domains::Root.new(Resource.new(host, token))
15
+ Resources::Root.new(Connector.new(host, token))
15
16
  end
16
17
 
17
18
  end
@@ -0,0 +1,79 @@
1
+ require 'json'
2
+ require 'rest-client'
3
+
4
+ module Databricks
5
+
6
+ # Underlying connector making API calls
7
+ class Connector
8
+
9
+ # Constructor
10
+ #
11
+ # Parameters::
12
+ # * *host* (String): Host to connect to
13
+ # * *token* (String): Token to be used in th API
14
+ def initialize(host, token)
15
+ @host = host
16
+ @token = token
17
+ end
18
+
19
+ # Issue a GET request to the API with JSON payload
20
+ #
21
+ # Parameters::
22
+ # * *api_path* (String): API path to query
23
+ # * *json_payload* (Object): JSON payload to include in the query [default = {}]
24
+ # Result::
25
+ # * Object: JSON result
26
+ def get_json(api_path, json_payload = {})
27
+ JSON.parse(
28
+ RestClient::Request.execute(
29
+ method: :get,
30
+ url: "#{@host}/api/2.0/#{api_path}",
31
+ payload: json_payload.to_json,
32
+ headers: {
33
+ Authorization: "Bearer #{@token}",
34
+ 'Content-Type': 'application/json'
35
+ }
36
+ ).body
37
+ )
38
+ end
39
+
40
+ # Issue a POST request to the API with JSON payload
41
+ #
42
+ # Parameters::
43
+ # * *api_path* (String): API path to query
44
+ # * *json_payload* (Object): JSON payload to include in the query [default = {}]
45
+ # Result::
46
+ # * Object: JSON result
47
+ def post_json(api_path, json_payload = {})
48
+ JSON.parse(
49
+ RestClient::Request.execute(
50
+ method: :post,
51
+ url: "#{@host}/api/2.0/#{api_path}",
52
+ payload: json_payload.to_json,
53
+ headers: {
54
+ Authorization: "Bearer #{@token}",
55
+ 'Content-Type': 'application/json'
56
+ }
57
+ ).body
58
+ )
59
+ end
60
+
61
+ # Issue a POST request to the API with multipart form data payload
62
+ #
63
+ # Parameters::
64
+ # * *api_path* (String): API path to query
65
+ # * *form_payload* (Hash): Form payload to include in the query [default = {}]
66
+ def post(api_path, form_payload = {})
67
+ RestClient::Request.execute(
68
+ method: :post,
69
+ url: "#{@host}/api/2.0/#{api_path}",
70
+ payload: form_payload.merge(multipart: true),
71
+ headers: {
72
+ Authorization: "Bearer #{@token}"
73
+ }
74
+ )
75
+ end
76
+
77
+ end
78
+
79
+ end
@@ -1,77 +1,130 @@
1
- require 'json'
2
- require 'rest-client'
1
+ require 'forwardable'
3
2
 
4
3
  module Databricks
5
4
 
6
- # Underlying resource making API calls
5
+ # Encapsulate a resource identified in the API.
6
+ # A resource can have some properties, directly accessible, and also gives access to eventual sub-resources to get a hierarchical organization of the API.
7
7
  class Resource
8
8
 
9
+ extend Forwardable
10
+
11
+ # Delegate the API low-level methods to the @connector object
12
+ def_delegators :@connector, *%i[
13
+ get_json
14
+ post
15
+ post_json
16
+ ]
17
+
18
+ # Get an accessor on all properties of this resource
19
+ # Hash< Symbol, Object >
20
+ attr_reader :properties
21
+
22
+ # Declare sub-resources accessors.
23
+ # This will make sure this resource has methods named after the sub-resources identifiers.
24
+ #
25
+ # Parameters::
26
+ # * *resource_names* (Array<Symbol>): Resource names to instantiate
27
+ def self.sub_resources(*resource_names)
28
+ resource_names.flatten.each do |resource_name|
29
+ self.define_method(resource_name) do
30
+ sub_resource(resource_name)
31
+ end
32
+ end
33
+ end
34
+
9
35
  # Constructor
10
36
  #
11
37
  # Parameters::
12
- # * *host* (String): Host to connect to
13
- # * *token* (String): Token to be used in th API
14
- def initialize(host, token)
15
- @host = host
16
- @token = token
38
+ # * *connector* (Connector): Connector handling API calls
39
+ def initialize(connector)
40
+ @connector = connector
41
+ # Keep a map of sub-resources instantiated, per resource name.
42
+ # Hash< Symbol, Resource >
43
+ @sub_resources = {}
44
+ # Properties linked to this resource
45
+ # Hash< Symbol, Object >
46
+ @properties = {}
17
47
  end
18
48
 
19
- # Issue a GET request to the API with JSON payload
49
+ # Add/replace properties for this resource.
50
+ # Properties will be deep-symbolized.
20
51
  #
21
52
  # Parameters::
22
- # * *api_path* (String): API path to query
23
- # * *json_payload* (Object): JSON payload to include in the query [default = {}]
53
+ # * *properties* (Hash<Symbol or String,Object>): Properties for this resource
54
+ # * *replace* (Boolean): Should we replace properties instead of merging them? [default: false]
55
+ def add_properties(properties, replace: false)
56
+ symbolized_properties = deep_symbolize(properties)
57
+ # Define getters for properties
58
+ (symbolized_properties.keys - @properties.keys).each do |property_name|
59
+ if self.respond_to?(property_name)
60
+ raise "Can't define a property named #{property_name} - It's already used."
61
+ else
62
+ define_singleton_method(property_name) { @properties[property_name] }
63
+ end
64
+ end
65
+ if replace
66
+ @properties = symbolized_properties
67
+ else
68
+ @properties.merge!(symbolized_properties)
69
+ end
70
+ end
71
+
72
+ # Return a simple string representation of this resource
73
+ #
74
+ # Result::
75
+ # * String: Default representation
76
+ def inspect
77
+ "#<#{self.class.name.split('::').last} - #{@properties}>"
78
+ end
79
+
80
+ # Instantiate a sub-resource.
81
+ # Keep a cache of it.
82
+ #
83
+ # Parameters::
84
+ # * *resource_name* (Symbol): Resource name.
24
85
  # Result::
25
- # * Object: JSON result
26
- def get_json(api_path, json_payload = {})
27
- JSON.parse(
28
- RestClient::Request.execute(
29
- method: :get,
30
- url: "#{@host}/api/2.0/#{api_path}",
31
- payload: json_payload.to_json,
32
- headers: {
33
- Authorization: "Bearer #{@token}",
34
- 'Content-Type': 'application/json'
35
- }
36
- ).body
37
- )
86
+ # * Resource: Corresponding sub-resource
87
+ def sub_resource(resource_name)
88
+ @sub_resources[resource_name] = new_resource(resource_name) unless @sub_resources.key?(resource_name)
89
+ @sub_resources[resource_name]
38
90
  end
39
91
 
40
- # Issue a POST request to the API with JSON payload
92
+ # Instantiate a new resource, with optional properties
41
93
  #
42
94
  # Parameters::
43
- # * *api_path* (String): API path to query
44
- # * *json_payload* (Object): JSON payload to include in the query [default = {}]
95
+ # * *resource_name* (Symbol): The resource's name.
96
+ # * *properties* (Hash<Symbol or String,Object>): This resource's initial properties [default = {}]
45
97
  # Result::
46
- # * Object: JSON result
47
- def post_json(api_path, json_payload = {})
48
- JSON.parse(
49
- RestClient::Request.execute(
50
- method: :post,
51
- url: "#{@host}/api/2.0/#{api_path}",
52
- payload: json_payload.to_json,
53
- headers: {
54
- Authorization: "Bearer #{@token}",
55
- 'Content-Type': 'application/json'
56
- }
57
- ).body
58
- )
98
+ # * Resource: The corresponding resource
99
+ def new_resource(resource_name, properties = {})
100
+ require "#{__dir__}/resources/#{resource_name}.rb"
101
+ resource = Resources.const_get(resource_name.to_s.split('_').collect(&:capitalize).join.to_sym).new(@connector)
102
+ resource.add_properties(properties)
103
+ resource
59
104
  end
60
105
 
61
- # Issue a POST request to the API with multipart form data payload
106
+ private
107
+
108
+ # Deep-symbolize a JSON object
62
109
  #
63
110
  # Parameters::
64
- # * *api_path* (String): API path to query
65
- # * *form_payload* (Hash): Form payload to include in the query [default = {}]
66
- def post(api_path, form_payload = {})
67
- RestClient::Request.execute(
68
- method: :post,
69
- url: "#{@host}/api/2.0/#{api_path}",
70
- payload: form_payload.merge(multipart: true),
71
- headers: {
72
- Authorization: "Bearer #{@token}"
73
- }
74
- )
111
+ # * *json* (Object): The JSON object
112
+ # Result::
113
+ # * Object: Symbolized JSON object
114
+ def deep_symbolize(json)
115
+ case json
116
+ when Hash
117
+ Hash[json.map do |k, v|
118
+ [
119
+ k.is_a?(String) ? k.to_sym : k,
120
+ deep_symbolize(v)
121
+ ]
122
+ end]
123
+ when Array
124
+ json.map { |e| deep_symbolize(e) }
125
+ else
126
+ json
127
+ end
75
128
  end
76
129
 
77
130
  end
@@ -0,0 +1,27 @@
1
+ module Databricks
2
+
3
+ module Resources
4
+
5
+ class Cluster < Resource
6
+
7
+ # Edit properties of this cluster.
8
+ #
9
+ # Parameters::
10
+ # * *properties* (Hash<Symbol,Object>): Properties of this cluster
11
+ def edit(**properties)
12
+ # Make sure we don't change its ID
13
+ properties[:cluster_id] = cluster_id
14
+ post_json('clusters/edit', properties)
15
+ add_properties(properties)
16
+ end
17
+
18
+ # Delete a cluster
19
+ def delete
20
+ post_json('clusters/delete', { cluster_id: cluster_id })
21
+ end
22
+
23
+ end
24
+
25
+ end
26
+
27
+ end
@@ -0,0 +1,43 @@
1
+ module Databricks
2
+
3
+ module Resources
4
+
5
+ # Provide the Clusters API
6
+ # cf. https://docs.databricks.com/dev-tools/api/latest/clusters.html
7
+ class Clusters < Resource
8
+
9
+ # List clusters
10
+ #
11
+ # Result::
12
+ # * Array<Cluster>: List of clusters
13
+ def list
14
+ (get_json('clusters/list')['clusters'] || []).map { |properties| new_resource(:cluster, properties) }
15
+ end
16
+
17
+ # Get a cluster based on its cluster_id
18
+ #
19
+ # Parameters::
20
+ # * *cluster_id* (String): The cluster id to get
21
+ # Result::
22
+ # * Cluster: The cluster
23
+ def get(cluster_id)
24
+ new_resource(:cluster, get_json('clusters/get', { cluster_id: cluster_id }))
25
+ end
26
+
27
+ # Create a new cluster.
28
+ #
29
+ # Parameters::
30
+ # * *properties* (Hash<Symbol,Object>): Properties to create the cluster
31
+ # Result::
32
+ # * Cluster: The new cluster created
33
+ def create(**properties)
34
+ cluster = new_resource(:cluster, post_json('clusters/create', properties))
35
+ cluster.add_properties(properties)
36
+ cluster
37
+ end
38
+
39
+ end
40
+
41
+ end
42
+
43
+ end
@@ -0,0 +1,78 @@
1
+ require 'base64'
2
+
3
+ module Databricks
4
+
5
+ module Resources
6
+
7
+ # Provide the DBFS API
8
+ # cf. https://docs.databricks.com/dev-tools/api/latest/dbfs.html
9
+ class Dbfs < Resource
10
+
11
+ # List a path
12
+ #
13
+ # Parameters::
14
+ # * *path* (String): Path to be listed
15
+ # Result::
16
+ # * Array<String>: List of DBFS paths
17
+ def list(path)
18
+ (get_json('dbfs/list', { path: path })['files'] || []).map { |properties| new_resource(:file, properties) }
19
+ end
20
+
21
+ # Put a new file
22
+ #
23
+ # Parameters::
24
+ # * *path* (String): Path to the file to create
25
+ # * *local_file* (String): Path to the local file to put
26
+ def put(path, local_file)
27
+ post(
28
+ 'dbfs/put',
29
+ {
30
+ path: path,
31
+ contents: ::File.new(local_file, 'rb'),
32
+ overwrite: true
33
+ }
34
+ )
35
+ end
36
+
37
+ # Delete a path
38
+ #
39
+ # Parameters::
40
+ # * *path* (String): Path to delete
41
+ # * *recursive* (Boolean): Do we delete recursively? [default: false]
42
+ def delete(path, recursive: false)
43
+ post_json(
44
+ 'dbfs/delete',
45
+ {
46
+ path: path,
47
+ recursive: recursive
48
+ }
49
+ )
50
+ end
51
+
52
+ # Read a file.
53
+ # Decodes the content in the json response (that is originally Base64-encoded).
54
+ #
55
+ # Parameters::
56
+ # * *path* (String): Path to the file to read
57
+ # * *offset* (Integer): Offset to read from [default: 0]
58
+ # * *length* (Integer): Number of nytes to read from (max 1MB) [default: 524_288]
59
+ def read(path, offset: 0, length: 524_288)
60
+ raw_json = get_json(
61
+ 'dbfs/read',
62
+ {
63
+ path: path,
64
+ offset: offset,
65
+ length: length
66
+ }
67
+ )
68
+ {
69
+ 'bytes_read' => raw_json['bytes_read'],
70
+ 'data' => Base64.decode64(raw_json['data'])
71
+ }
72
+ end
73
+
74
+ end
75
+
76
+ end
77
+
78
+ end
@@ -0,0 +1,11 @@
1
+ module Databricks
2
+
3
+ module Resources
4
+
5
+ class File < Resource
6
+
7
+ end
8
+
9
+ end
10
+
11
+ end
@@ -0,0 +1,27 @@
1
+ module Databricks
2
+
3
+ module Resources
4
+
5
+ class InstancePool < Resource
6
+
7
+ # Edit properties of this instance pool.
8
+ #
9
+ # Parameters::
10
+ # * *properties* (Hash<Symbol,Object>): Properties of this cluster
11
+ def edit(**properties)
12
+ # Make sure we don't change its ID
13
+ properties[:instance_pool_id] = instance_pool_id
14
+ post_json('instance-pools/edit', properties)
15
+ add_properties(properties)
16
+ end
17
+
18
+ # Delete this instance pool
19
+ def delete
20
+ post_json('instance-pools/delete', { instance_pool_id: instance_pool_id })
21
+ end
22
+
23
+ end
24
+
25
+ end
26
+
27
+ end
@@ -0,0 +1,43 @@
1
+ module Databricks
2
+
3
+ module Resources
4
+
5
+ # Provide the Instance Pools API
6
+ # cf. https://docs.databricks.com/dev-tools/api/latest/instance-pools.html
7
+ class InstancePools < Resource
8
+
9
+ # List instance pools
10
+ #
11
+ # Result::
12
+ # * Array<InstancePool>: List of instance pools
13
+ def list
14
+ (get_json('instance-pools/list')['instance_pools'] || []).map { |properties| new_resource(:instance_pool, properties) }
15
+ end
16
+
17
+ # Get an instance pool based on its instance_pool_id
18
+ #
19
+ # Parameters::
20
+ # * *instance_pool_id* (String): The instance pool id to get
21
+ # Result::
22
+ # * InstancePool: The instance pool
23
+ def get(instance_pool_id)
24
+ new_resource(:instance_pool, get_json('instance-pools/get', { instance_pool_id: instance_pool_id }))
25
+ end
26
+
27
+ # Create a new instance pool.
28
+ #
29
+ # Parameters::
30
+ # * *properties* (Hash<Symbol,Object>): Properties to create the instance pool
31
+ # Result::
32
+ # * InstancePool: The new instance pool created
33
+ def create(**properties)
34
+ instance_pool = new_resource(:instance_pool, post_json('instance-pools/create', properties))
35
+ instance_pool.add_properties(properties)
36
+ instance_pool
37
+ end
38
+
39
+ end
40
+
41
+ end
42
+
43
+ end
@@ -0,0 +1,32 @@
1
+ module Databricks
2
+
3
+ module Resources
4
+
5
+ class Job < Resource
6
+
7
+ # Reset properties of this job.
8
+ #
9
+ # Parameters::
10
+ # * *properties* (Hash<Symbol,Object>): New job's properties
11
+ def reset(**properties)
12
+ # Make sure we don't change its ID
13
+ post_json(
14
+ 'jobs/reset',
15
+ {
16
+ job_id: job_id,
17
+ new_settings: properties
18
+ }
19
+ )
20
+ add_properties(properties.merge(job_id: job_id), replace: true)
21
+ end
22
+
23
+ # Delete this job
24
+ def delete
25
+ post_json('jobs/delete', { job_id: job_id })
26
+ end
27
+
28
+ end
29
+
30
+ end
31
+
32
+ end
@@ -0,0 +1,47 @@
1
+ module Databricks
2
+
3
+ module Resources
4
+
5
+ # Provide the Jobs API
6
+ # cf. https://docs.databricks.com/dev-tools/api/latest/jobs.html
7
+ class Jobs < Resource
8
+
9
+ # List jobs
10
+ #
11
+ # Result::
12
+ # * Array<Job>: List of jobs information
13
+ def list
14
+ (get_json('jobs/list')['jobs'] || []).map do |properties|
15
+ # The settings property should be merged at root
16
+ new_resource(:job, properties.merge(properties.delete('settings')))
17
+ end
18
+ end
19
+
20
+ # Get a job based on its job_id
21
+ #
22
+ # Parameters::
23
+ # * *job_id* (String): The job id to get
24
+ # Result::
25
+ # * Job: The job
26
+ def get(job_id)
27
+ properties = get_json('jobs/get', { job_id: job_id })
28
+ new_resource(:job, properties.merge(properties.delete('settings')))
29
+ end
30
+
31
+ # Create a new job.
32
+ #
33
+ # Parameters::
34
+ # * *properties* (Hash<Symbol,Object>): Properties to create the job
35
+ # Result::
36
+ # * Job: The new job created
37
+ def create(**properties)
38
+ job = new_resource(:job, post_json('jobs/create', properties))
39
+ job.add_properties(properties)
40
+ job
41
+ end
42
+
43
+ end
44
+
45
+ end
46
+
47
+ end
@@ -1,15 +1,17 @@
1
- require 'databricks/domain'
1
+ require 'databricks/resource'
2
2
 
3
3
  module Databricks
4
4
 
5
- module Domains
5
+ module Resources
6
6
 
7
7
  # API entry point
8
8
  # cf. https://docs.databricks.com/dev-tools/api/latest/index.html
9
- class Root < Domain
9
+ class Root < Resource
10
10
 
11
- sub_domains %i[
11
+ sub_resources %i[
12
+ clusters
12
13
  dbfs
14
+ instance_pools
13
15
  jobs
14
16
  ]
15
17
 
@@ -1,5 +1,5 @@
1
1
  module Databricks
2
2
 
3
- VERSION = '1.0.0'
3
+ VERSION = '2.3.1'
4
4
 
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: databricks
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0
4
+ version: 2.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Muriel Salvan
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-01-20 00:00:00.000000000 Z
11
+ date: 2021-06-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rest-client
@@ -74,11 +74,17 @@ extensions: []
74
74
  extra_rdoc_files: []
75
75
  files:
76
76
  - lib/databricks.rb
77
- - lib/databricks/domain.rb
78
- - lib/databricks/domains/dbfs.rb
79
- - lib/databricks/domains/jobs.rb
80
- - lib/databricks/domains/root.rb
77
+ - lib/databricks/connector.rb
81
78
  - lib/databricks/resource.rb
79
+ - lib/databricks/resources/cluster.rb
80
+ - lib/databricks/resources/clusters.rb
81
+ - lib/databricks/resources/dbfs.rb
82
+ - lib/databricks/resources/file.rb
83
+ - lib/databricks/resources/instance_pool.rb
84
+ - lib/databricks/resources/instance_pools.rb
85
+ - lib/databricks/resources/job.rb
86
+ - lib/databricks/resources/jobs.rb
87
+ - lib/databricks/resources/root.rb
82
88
  - lib/databricks/version.rb
83
89
  homepage: https://github.com/Muriel-Salvan/databricks
84
90
  licenses:
@@ -99,7 +105,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
99
105
  - !ruby/object:Gem::Version
100
106
  version: '0'
101
107
  requirements: []
102
- rubygems_version: 3.2.3
108
+ rubygems_version: 3.2.15
103
109
  signing_key:
104
110
  specification_version: 4
105
111
  summary: Rubygem wrapping the Databricks REST API
@@ -1,47 +0,0 @@
1
- module Databricks
2
-
3
- # Encapsulate a part of the API for better organization
4
- class Domain
5
-
6
- # Declare sub-domain accessors in the current domain.
7
- # This will make sure the current domain has methods named after the sub-domain identifiers that will instantiate the corresponding domains at will.
8
- #
9
- # Parameters::
10
- # * *domains* (Array<Symbol>): Domains to instantiate
11
- def self.sub_domains(*domains)
12
- domains.flatten.each do |domain|
13
- self.define_method(domain) do
14
- sub_domain(domain)
15
- end
16
- end
17
- end
18
-
19
- # Instantiate a sub-domain.
20
- # Keep a cache of it.
21
- #
22
- # Parameters::
23
- # * *domain* (Symbol): Sub-domain identifier.
24
- # Result::
25
- # * Domain: Corresponding sub-domain
26
- def sub_domain(domain)
27
- unless @sub_domains.key?(domain)
28
- require "#{__dir__}/domains/#{domain}.rb"
29
- @sub_domains[domain] = Domains.const_get(domain.to_s.split('_').collect(&:capitalize).join.to_sym).new(@resource)
30
- end
31
- @sub_domains[domain]
32
- end
33
-
34
- # Constructor
35
- #
36
- # Parameters::
37
- # * *resource* (Resource): Resource handling API calls
38
- def initialize(resource)
39
- @resource = resource
40
- # Keep a map of sub-domains instantiated, per domain identifier.
41
- # Hash< Symbol, Domain >
42
- @sub_domains = {}
43
- end
44
-
45
- end
46
-
47
- end
@@ -1,46 +0,0 @@
1
- require 'databricks/domain'
2
-
3
- module Databricks
4
-
5
- module Domains
6
-
7
- # Provide the DBFS API
8
- # cf. https://docs.databricks.com/dev-tools/api/latest/dbfs.html
9
- class Dbfs < Domain
10
-
11
- # List a path
12
- #
13
- # Parameters::
14
- # * *path* (String): Path to be listed
15
- # Result::
16
- # * Array<String>: List of DBFS paths
17
- def list(path)
18
- @resource.get_json(
19
- 'dbfs/list',
20
- {
21
- path: path
22
- }
23
- )['files'].map { |file_info| file_info['path'] }
24
- end
25
-
26
- # Put a new file
27
- #
28
- # Parameters::
29
- # * *path* (String): Path to the file to create
30
- # * *local_file* (String): Path to the local file to put
31
- def put(path, local_file)
32
- @resource.post(
33
- 'dbfs/put',
34
- {
35
- path: path,
36
- contents: File.new(local_file, 'rb'),
37
- overwrite: true
38
- }
39
- )
40
- end
41
-
42
- end
43
-
44
- end
45
-
46
- end
@@ -1,31 +0,0 @@
1
- require 'databricks/domain'
2
-
3
- module Databricks
4
-
5
- module Domains
6
-
7
- # Provide the Jobs API
8
- # cf. https://docs.databricks.com/dev-tools/api/latest/jobs.html
9
- class Jobs < Domain
10
-
11
- # List a path
12
- #
13
- # Result::
14
- # * Array<Hash>: List of jobs information
15
- def list
16
- @resource.get_json('jobs/list')['jobs']
17
- end
18
-
19
- # Create a new job
20
- #
21
- # Parameters::
22
- # * *settings* (Hash<Symbol,Object>): Settings to create the job
23
- def create(**settings)
24
- @resource.post_json('jobs/create', settings)
25
- end
26
-
27
- end
28
-
29
- end
30
-
31
- end