nexosis_api 1.2.4 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/nexosis_api/algorithm.rb +4 -0
- data/lib/nexosis_api/client.rb +7 -2
- data/lib/nexosis_api/client/datasets.rb +11 -21
- data/lib/nexosis_api/client/imports.rb +20 -24
- data/lib/nexosis_api/client/models.rb +107 -0
- data/lib/nexosis_api/client/sessions.rb +42 -36
- data/lib/nexosis_api/column_role.rb +4 -0
- data/lib/nexosis_api/dataset_data.rb +2 -4
- data/lib/nexosis_api/dataset_summary.rb +14 -8
- data/lib/nexosis_api/imports_response.rb +13 -0
- data/lib/nexosis_api/model_summary.rb +66 -0
- data/lib/nexosis_api/predict_response.rb +35 -0
- data/lib/nexosis_api/session.rb +26 -12
- data/lib/nexosis_api/session_result.rb +10 -6
- data/lib/nexosis_api/view_definition.rb +10 -0
- data/nexosisapi.gemspec +1 -1
- metadata +5 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 05cf5a6d76290c17f23d0db1e00a5d9215dfc56c
|
4
|
+
data.tar.gz: e28cbae30ef33132f250eb200efe6545eb46091b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b17a834985acaa28a81bf3ecb2e7874b03adfd40907b9df3698b324b334bd8e0c1d51fb103cf8c521d7523d23b32c2c014392a3e0ae27689f55b48ec9a4b90ef
|
7
|
+
data.tar.gz: 06de1a773837de420b6a41cb24c32b41a50e007cdfd1eaccd2916fd18d7102448c7ea4b041545d29e1dbbefc98e759d68f9499f072fe15894b3709ad43e40e55
|
data/lib/nexosis_api/client.rb
CHANGED
@@ -16,6 +16,8 @@ require 'nexosis_api/imports_response'
|
|
16
16
|
require 'nexosis_api/join'
|
17
17
|
require 'nexosis_api/link'
|
18
18
|
require 'nexosis_api/metric'
|
19
|
+
require 'nexosis_api/model_summary'
|
20
|
+
require 'nexosis_api/predict_response'
|
19
21
|
require 'nexosis_api/session_response'
|
20
22
|
require 'nexosis_api/session_result'
|
21
23
|
require 'nexosis_api/session'
|
@@ -26,6 +28,7 @@ require 'nexosis_api/client/sessions'
|
|
26
28
|
require 'nexosis_api/client/datasets'
|
27
29
|
require 'nexosis_api/client/imports'
|
28
30
|
require 'nexosis_api/client/views'
|
31
|
+
require 'nexosis_api/client/models'
|
29
32
|
|
30
33
|
module NexosisApi
|
31
34
|
# Primary entry point to working with Nexosis API
|
@@ -36,13 +39,15 @@ module NexosisApi
|
|
36
39
|
include Client::Datasets
|
37
40
|
include Client::Imports
|
38
41
|
include Client::Views
|
42
|
+
include Client::Models
|
39
43
|
|
40
44
|
def initialize(options = {})
|
41
45
|
raise ArgumentError, 'api_key was not defined' unless options[:api_key].nil? == false
|
42
46
|
@api_key = options[:api_key]
|
43
47
|
self.class.base_uri options[:base_uri] unless options[:base_uri].nil?
|
44
|
-
@headers = {'api-key' => @api_key, 'Content-Type' => 'application/json'
|
45
|
-
|
48
|
+
@headers = { 'api-key' => @api_key, 'Content-Type' => 'application/json',
|
49
|
+
'User-Agent' => 'Nexosis-Ruby-API-Client/1.2' }
|
50
|
+
@options = { headers: @headers, format: :json }
|
46
51
|
end
|
47
52
|
|
48
53
|
# Gets the current account balance.
|
@@ -34,11 +34,9 @@ module NexosisApi
|
|
34
34
|
list_dataset_url = "/data?partialName=#{partial_name}"
|
35
35
|
response = self.class.get(list_dataset_url, headers: @headers)
|
36
36
|
if response.success?
|
37
|
-
|
38
|
-
|
39
|
-
results << NexosisApi::DatasetSummary.new(dr)
|
37
|
+
response.parsed_response['items'].map do |dr|
|
38
|
+
NexosisApi::DatasetSummary.new(dr)
|
40
39
|
end
|
41
|
-
results
|
42
40
|
else
|
43
41
|
raise HttpException.new("There was a problem listing datasets: #{response.code}.", "listing datasets with partial name #{partial_name}", response)
|
44
42
|
end
|
@@ -55,11 +53,8 @@ module NexosisApi
|
|
55
53
|
# The dates can be used independently and are inclusive. Lack of options returns all values within the given page.
|
56
54
|
def get_dataset(dataset_name, page_number = 0, page_size = 50, query_options = {})
|
57
55
|
response = get_dataset_internal(dataset_name, page_number, page_size, query_options)
|
58
|
-
if response.success?
|
59
|
-
|
60
|
-
else
|
61
|
-
raise HttpException.new("There was a problem getting the dataset: #{response.code}.", "getting dataset #{dataset_name}", response)
|
62
|
-
end
|
56
|
+
return NexosisApi::DatasetData.new(response.parsed_response) if response.success?
|
57
|
+
raise HttpException.new("There was a problem getting the dataset: #{response.code}.", "getting dataset #{dataset_name}", response)
|
63
58
|
end
|
64
59
|
|
65
60
|
# Get the data in the set, written to a CSV file, optionally filtering it.
|
@@ -75,11 +70,8 @@ module NexosisApi
|
|
75
70
|
# NexosisApi.client.get_dataset_csv('MyDataset', 1, 20, {:include => 'sales'})
|
76
71
|
def get_dataset_csv(dataset_name, page_number = 0, page_size = 50, query_options = {})
|
77
72
|
response = get_dataset_internal(dataset_name, page_number, page_size, query_options, 'text/csv')
|
78
|
-
if response.success?
|
79
|
-
|
80
|
-
else
|
81
|
-
raise HttpException.new("There was a problem getting the dataset: #{response.code}.", "getting dataset #{dataset_name}", response)
|
82
|
-
end
|
73
|
+
return response.body if response.success?
|
74
|
+
raise HttpException.new("There was a problem getting the dataset: #{response.code}.", "getting dataset #{dataset_name}", response)
|
83
75
|
end
|
84
76
|
|
85
77
|
# Remove data from a data set or the entire set.
|
@@ -92,6 +84,7 @@ module NexosisApi
|
|
92
84
|
# - cascade_forecast - will cascade deletes to all related forecasts
|
93
85
|
# - cascade_session - will cascade deletes to all related sessions
|
94
86
|
# - cascade_view - will cascade deletes to all related views (any part of join - think twice)
|
87
|
+
# - cascase_model - will cascade deletes to all models created from this dataset
|
95
88
|
# - cascade - will cascade deletes to all related forecasts and sessions
|
96
89
|
# @example - request delete with cascade forecast
|
97
90
|
# NexosisApi.client.remove_dataset('mydataset', {:cascade_forecast => true})
|
@@ -105,16 +98,13 @@ module NexosisApi
|
|
105
98
|
query['startDate'] = [filter_options[:start_date].to_s] unless filter_options[:start_date].nil?
|
106
99
|
query['endDate'] = [filter_options[:end_date].to_s] unless filter_options[:end_date].nil?
|
107
100
|
end
|
108
|
-
#normalizer = proc { |query_set| query_set.map { |key, value| value.map { |v| "#{key}=#{v}" } }.join('&') }
|
101
|
+
# normalizer = proc { |query_set| query_set.map { |key, value| value.map { |v| "#{key}=#{v}" } }.join('&') }
|
109
102
|
response = self.class.delete(dataset_remove_url,
|
110
103
|
headers: @headers,
|
111
104
|
query: query,
|
112
105
|
query_string_normalizer: ->(query_map) {array_query_normalizer(query_map)})
|
113
|
-
if response.success?
|
114
|
-
|
115
|
-
else
|
116
|
-
raise HttpException.new("There was a problem removing the dataset: #{response.code}.", "removing dataset #{dataset_name}", response)
|
117
|
-
end
|
106
|
+
return if response.success?
|
107
|
+
raise HttpException.new("There was a problem removing the dataset: #{response.code}.", "removing dataset #{dataset_name}", response)
|
118
108
|
end
|
119
109
|
|
120
110
|
private
|
@@ -146,7 +136,7 @@ module NexosisApi
|
|
146
136
|
# @private
|
147
137
|
def create_cascade_options(option_hash)
|
148
138
|
return nil if option_hash.nil?
|
149
|
-
return %w[session view forecast] if option_hash.key?(:cascade)
|
139
|
+
return %w[session view forecast model] if option_hash.key?(:cascade)
|
150
140
|
options_set = []
|
151
141
|
option_hash.each_key { |k| options_set << k.to_s.gsub(/cascade_/, '') if k.to_s.include? 'cascade_' }
|
152
142
|
# HACK: required to be backward compatible with incorrect key names
|
@@ -10,16 +10,12 @@ module NexosisApi
|
|
10
10
|
#
|
11
11
|
# @return [Array of NexosisApi::ImportsResponse]
|
12
12
|
def list_imports
|
13
|
-
imports_url =
|
14
|
-
response = self.class.get(imports_url, :
|
15
|
-
if(response.success?)
|
16
|
-
items
|
17
|
-
response.parsed_response["items"].each do |i|
|
18
|
-
items << NexosisApi::ImportsResponse.new(i)
|
19
|
-
end
|
20
|
-
items
|
13
|
+
imports_url = '/imports'
|
14
|
+
response = self.class.get(imports_url, headers: @headers)
|
15
|
+
if (response.success?)
|
16
|
+
response.parsed_response['items'].map { |i| NexosisApi::ImportsResponse.new(i) }
|
21
17
|
else
|
22
|
-
raise HttpException.new("There was a problem getting the imports: #{response.code}.", "uploading dataset from s3 #{dataset_name}"
|
18
|
+
raise HttpException.new("There was a problem getting the imports: #{response.code}.", "uploading dataset from s3 #{dataset_name}", response)
|
23
19
|
end
|
24
20
|
end
|
25
21
|
|
@@ -32,21 +28,21 @@ module NexosisApi
|
|
32
28
|
# @param column_metadata [Array of NexosisApi::Column] description of each column in target dataset. Optional.
|
33
29
|
# @return [NexosisApi::ImportsResponse]
|
34
30
|
# @see http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region for information on region names
|
35
|
-
def import_from_s3(dataset_name, bucket_name, path, region =
|
36
|
-
raise ArgumentError,
|
37
|
-
raise ArgumentError,
|
38
|
-
raise ArgumentError,
|
39
|
-
|
31
|
+
def import_from_s3(dataset_name, bucket_name, path, region = 'us-east-1', column_metadata = [])
|
32
|
+
raise ArgumentError, 'dataset_name was not provided and is not optional ' unless dataset_name.to_s.empty? == false
|
33
|
+
raise ArgumentError, 'bucket_name was not provided and is not optional ' unless bucket_name.to_s.empty? == false
|
34
|
+
raise ArgumentError, 'path was not provided and is not optional ' unless path.to_s.empty? == false
|
35
|
+
s3_import_url = '/imports/s3'
|
40
36
|
column_json = Column.to_json(column_metadata)
|
41
37
|
body = {
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
38
|
+
'dataSetName' => dataset_name,
|
39
|
+
'bucket' => bucket_name,
|
40
|
+
'path' => path,
|
41
|
+
'region' => region,
|
42
|
+
'columns' => column_json
|
47
43
|
}
|
48
|
-
response = self.class.post(s3_import_url, :
|
49
|
-
if(response.success?)
|
44
|
+
response = self.class.post(s3_import_url, headers: @headers, body: body.to_json)
|
45
|
+
if (response.success?)
|
50
46
|
NexosisApi::ImportsResponse.new(response.parsed_response)
|
51
47
|
else
|
52
48
|
raise HttpException.new("There was a problem importing from s3: #{response.code}.", "uploading dataset from s3 #{dataset_name}" ,response)
|
@@ -60,10 +56,10 @@ module NexosisApi
|
|
60
56
|
# @example get S3 import
|
61
57
|
# NexosisApi.client.retrieve_import('740dca2a-b488-4322-887e-fa473b1caa54')
|
62
58
|
def retrieve_import(import_id)
|
63
|
-
raise ArgumentError,
|
59
|
+
raise ArgumentError, 'import_id was not provided and is not optional ' unless import_id.to_s.empty? == false
|
64
60
|
imports_url = "/imports/#{import_id}"
|
65
|
-
response = self.class.get(imports_url, :
|
66
|
-
if(response.success?)
|
61
|
+
response = self.class.get(imports_url, headers: @headers)
|
62
|
+
if (response.success?)
|
67
63
|
NexosisApi::ImportsResponse.new(response.parsed_response)
|
68
64
|
else
|
69
65
|
raise HttpException.new("There was a problem getting the import #{response.code}.", "requesting an import #{import_id}" ,response)
|
@@ -0,0 +1,107 @@
|
|
1
|
+
module NexosisApi
|
2
|
+
class Client
|
3
|
+
# class to operate on model endpoint in Nexosis API
|
4
|
+
# @since 1.3.0
|
5
|
+
module Models
|
6
|
+
# List all models created in your company, optionally filtered by query parameters
|
7
|
+
#
|
8
|
+
# @param datasource_name [String] optionally limit to those
|
9
|
+
# models created for this data source name.
|
10
|
+
# @param query_options [Hash] limit by dates: begin_date and/or end_date
|
11
|
+
# @note - query options dates can either be ISO 8601 compliant strings or Date objects
|
12
|
+
# @return [Array of NexosisApi::ModelSummary] - all models available within the query parameters
|
13
|
+
def list_models(datasource_name = nil, page = 0, page_size = 50, query_options = {})
|
14
|
+
model_url = '/models'
|
15
|
+
query = {
|
16
|
+
page: page,
|
17
|
+
pageSize: page_size
|
18
|
+
}
|
19
|
+
unless query_options.empty?
|
20
|
+
query.store('createdBeforeDate', query_options['end_date']) unless query_options['end_date'].nil?
|
21
|
+
query.store('createdAfterDate', query_options['begin_date']) unless query_options['begin_date'].nil?
|
22
|
+
end
|
23
|
+
query.store(dataSourceName: datasource_name) unless datasource_name.nil?
|
24
|
+
response = self.class.get(model_url, headers: @headers, query: query)
|
25
|
+
if (response.success?)
|
26
|
+
response.parsed_response['items'].map { |item| NexosisApi::ModelSummary.new(item) }
|
27
|
+
else
|
28
|
+
raise HttpException.new("There was a problem listing models: #{response.code}.", "listing models with data source name #{datasource_name}", response)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
# Get the details of the particular model requested by id
|
33
|
+
#
|
34
|
+
# @param model_id [String] The unique identifier for the model returned by a create-model session
|
35
|
+
# @return [NexosisApi::ModelSummary]
|
36
|
+
def get_model(model_id)
|
37
|
+
raise ArgumentError, 'Retrieving a model requires that model_id be specified and it is currently null.' if model_id.nil?
|
38
|
+
model_url = "/models/#{model_id}"
|
39
|
+
response = self.class.get(model_url, @options)
|
40
|
+
if (response.success?)
|
41
|
+
NexosisApi::ModelSummary.new(response.parsed_response)
|
42
|
+
else
|
43
|
+
raise HttpException.new("There was a problem getting your model: #{response.code}.", "Could not get model #{model_id}", response)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
# Run a feature set through the model to get predictions
|
48
|
+
#
|
49
|
+
# @param model_id [String] unique identifier of model to use
|
50
|
+
# @param feature_data [Array of Hash] feature columns with values to predict from
|
51
|
+
# @return [NexosisApi::PredictResponse]
|
52
|
+
# @note The feature data shape should match that of the dataset used to create the model.
|
53
|
+
# Any missing features in this request will reduce the quality of the predictions.
|
54
|
+
def predict(model_id, feature_data)
|
55
|
+
raise ArgumentError, 'Running predictions requires that model_id be specified and it is currently empty.' if model_id.empty?
|
56
|
+
raise ArgumentError, 'Running predictions requires that feature_data be specified and it is currently empty.' if feature_data.empty?
|
57
|
+
predict_url = "/models/#{model_id}/predict"
|
58
|
+
response = self.class.post(predict_url, headers: @headers, body: { "data": feature_data }.to_json)
|
59
|
+
if (response.success?)
|
60
|
+
NexosisApi::PredictResponse.new(model_id, response.parsed_response)
|
61
|
+
else
|
62
|
+
raise HttpException.new("There was a problem predicting from your model: #{response.code}.",
|
63
|
+
"Could not start predict for #{model_id}",
|
64
|
+
response)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
# Remove an existing model
|
69
|
+
#
|
70
|
+
# @param model_id [String] the unique id of the model to remove.
|
71
|
+
def remove_model(model_id)
|
72
|
+
raise ArgumentError, 'Deleting a model requires that model_id be specified and it is currently empty.' if model_id.empty?
|
73
|
+
delete_url = "/models/#{model_id}"
|
74
|
+
response = self.class.delete(delete_url, @options)
|
75
|
+
unless (response.success?)
|
76
|
+
raise HttpException.new("There was a problem deleting your model: #{response.code}.",
|
77
|
+
"Could not delete #{model_id}",
|
78
|
+
response)
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
# Deletes multiple models based on the provided filter criteria.
|
83
|
+
# @param datasource_name [String] remove all models created by this datasource
|
84
|
+
# @param begin_date [DateTime] remove all models created after this date/time - inclusive. May be a ISO 8601 compliant string.
|
85
|
+
# @param end_date [DateTime] remove all models created before this date/time - inclusive. May be a ISO 8601 compliant string.
|
86
|
+
# @note - Use with great care. This permanently removes trained models.
|
87
|
+
# All parameters are indepdently optional, but one must be sent.
|
88
|
+
def remove_models(datasource_name = nil, begin_date = nil, end_date = nil)
|
89
|
+
params_unset = datasource_name.nil?
|
90
|
+
params_unset &= begin_date.nil?
|
91
|
+
params_unset &= end_date.nil?
|
92
|
+
raise ArgumentError, 'Must set one of the method parameters.' if params_unset
|
93
|
+
delete_url = '/models'
|
94
|
+
query = {}
|
95
|
+
query.store('dataSourceName', datasource_name) unless datasource_name.nil?
|
96
|
+
query.store('createdAfterDate', begin_date) unless begin_date.nil?
|
97
|
+
query.store('createdBeforeDate', end_date) unless end_date.nil?
|
98
|
+
response = self.class.delete(delete_url, headers: @headers, query: query)
|
99
|
+
unless (response.success?)
|
100
|
+
raise HttpException.new("There was a problem deleting your models: #{response.code}.",
|
101
|
+
'Could not delete models',
|
102
|
+
response)
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
@@ -45,21 +45,18 @@ module NexosisApi
|
|
45
45
|
# Remove a session
|
46
46
|
# @param session_id [String] required session identifier
|
47
47
|
def remove_session(session_id)
|
48
|
-
if(session_id.to_s.empty?)
|
48
|
+
if (session_id.to_s.empty?)
|
49
49
|
raise ArgumentError 'session_id cannot be empty or nil'
|
50
50
|
end
|
51
51
|
session_url = "/sessions/#{session_id}"
|
52
52
|
response = self.class.delete(session_url, headers: @headers)
|
53
|
-
if
|
54
|
-
|
55
|
-
else
|
56
|
-
raise HttpException.new('Could not delete session with given id', "remove session with id #{session_id.to_s}",response)
|
57
|
-
end
|
53
|
+
return if response.success?
|
54
|
+
raise HttpException.new('Could not delete session with given id', "remove session with id #{session_id}", response)
|
58
55
|
end
|
59
56
|
|
60
57
|
# Remove sessions that have been run. All query options are optional and will be used to limit the sessions removed.
|
61
|
-
# @param query_options [Hash] optionally provide query parametes to limit the set of sessions removed.
|
62
|
-
# @note query parameters hash members are type, dataset_name, event_name, start_date, and end_date.
|
58
|
+
# @param query_options [Hash] optionally provide query parametes to limit the set of sessions removed.
|
59
|
+
# @note query parameters hash members are type, dataset_name, event_name, start_date, and end_date.
|
63
60
|
# Start and end dates refer to the session requested date.
|
64
61
|
# Results are not removed but then can only be accessed by dataset name
|
65
62
|
# @example Remove all sessions based on a dataset by name
|
@@ -67,25 +64,22 @@ module NexosisApi
|
|
67
64
|
def remove_sessions(query_options = {})
|
68
65
|
sessions_url = '/sessions'
|
69
66
|
response = self.class.delete(sessions_url, :headers => @headers, :query => get_query_from_options(query_options))
|
70
|
-
if
|
71
|
-
|
72
|
-
else
|
73
|
-
raise HttpException.new('Could not remove sessions', "Remove sessions with query #{query_options.to_s}",response)
|
74
|
-
end
|
67
|
+
return if response.success?
|
68
|
+
raise HttpException.new('Could not remove sessions', "Remove sessions with query #{query_options.to_s}",response)
|
75
69
|
end
|
76
70
|
|
77
71
|
# Initiate a new forecast session based on a named dataset.
|
78
72
|
#
|
79
73
|
# @param dataset_name [String] The name of the saved data set that has the data to forecast on.
|
80
|
-
# @param start_date [DateTime] The starting date of the forecast period. Can be ISO 8601 string.
|
74
|
+
# @param start_date [DateTime] The starting date of the forecast period. Can be ISO 8601 string.
|
81
75
|
# @param end_date [DateTime] The ending date of the forecast period. Can be ISO 8601 string.
|
82
76
|
# @param target_column [String] The name of the column for which you want predictions. Nil if defined in dataset.
|
83
77
|
# @param result_interval [NexosisApi::TimeInterval] (optional) - The date/time interval (e.g. Day, Hour) at which predictions should be generated. So, if Hour is specified for this parameter you will get a Result record for each hour between startDate and endDate. If unspecified, we’ll generate predictions at a Day interval.
|
84
|
-
# @param column_metadata [Array of NexosisApi::Column] (optional) - specification for how to handle columns if different from existing metadata on dataset
|
78
|
+
# @param column_metadata [Array of NexosisApi::Column] (optional) - specification for how to handle columns if different from existing metadata on dataset
|
85
79
|
# @return [NexosisApi::SessionResponse] providing information about the sesssion
|
86
80
|
# @note The time interval selected must be greater than or equal to the finest granularity of the data provided.
|
87
81
|
# For instance if your data includes many recoreds per hour, then you could request hour, day, or any other result interval.
|
88
|
-
# However, if your data includes only a few records per day or fewer, then a request for an hourly result interval will produce poor results.
|
82
|
+
# However, if your data includes only a few records per day or fewer, then a request for an hourly result interval will produce poor results.
|
89
83
|
def create_forecast_session(dataset_name, start_date, end_date, target_column = nil, result_interval = NexosisApi::TimeInterval::DAY, column_metadata = nil)
|
90
84
|
create_session(dataset_name, start_date, end_date, target_column, false, nil, 'forecast', result_interval, column_metadata)
|
91
85
|
end
|
@@ -136,18 +130,13 @@ module NexosisApi
|
|
136
130
|
# @return [NexosisApi::SessionResult] SessionResult if parsed, String of csv data otherwise
|
137
131
|
def get_session_results(session_id, as_csv = false)
|
138
132
|
session_result_url = "/sessions/#{session_id}/results"
|
139
|
-
if as_csv
|
140
|
-
@headers["Accept"] = 'text/csv'
|
141
|
-
end
|
133
|
+
@headers['Accept'] = 'text/csv' if as_csv
|
142
134
|
response = self.class.get(session_result_url, @options)
|
143
135
|
@headers.delete('Accept')
|
144
136
|
|
145
|
-
|
146
|
-
if
|
147
|
-
|
148
|
-
else
|
149
|
-
NexosisApi::SessionResult.new(response.parsed_response)
|
150
|
-
end
|
137
|
+
if (response.success?)
|
138
|
+
return response.body if as_csv
|
139
|
+
NexosisApi::SessionResult.new(response.parsed_response)
|
151
140
|
else
|
152
141
|
raise HttpException.new("There was a problem getting the session: #{response.code}.", "get results for session #{session_id}" ,response)
|
153
142
|
end
|
@@ -160,17 +149,34 @@ module NexosisApi
|
|
160
149
|
def get_session(session_id)
|
161
150
|
session_url = "/sessions/#{session_id}"
|
162
151
|
response = self.class.get(session_url, @options)
|
163
|
-
|
164
|
-
|
152
|
+
return NexosisApi::Session.new(response.parsed_response) if response.success?
|
153
|
+
raise HttpException.new("There was a problem getting the session: #{response.code}.", "getting session #{session_id}" ,response)
|
154
|
+
end
|
155
|
+
|
156
|
+
def create_model(datasource_name, target_column, columns = {})
|
157
|
+
model_url = '/sessions/model'
|
158
|
+
body = {
|
159
|
+
dataSourceName: datasource_name,
|
160
|
+
targetColumn: target_column,
|
161
|
+
predictionDomain: 'regression',
|
162
|
+
isEstimate: false
|
163
|
+
}
|
164
|
+
body.store(columns: columns) unless columns.empty?
|
165
|
+
response = self.class.post(model_url, headers: @headers, body: body.to_json)
|
166
|
+
if response.success?
|
167
|
+
session_hash = { 'session' => response.parsed_response }.merge(response.headers)
|
168
|
+
NexosisApi::SessionResponse.new(session_hash)
|
165
169
|
else
|
166
|
-
raise HttpException.new("There was a problem
|
170
|
+
raise HttpException.new("There was a problem creating the model session: #{response.code}.", 'creating model session' ,response)
|
167
171
|
end
|
168
|
-
|
172
|
+
end
|
169
173
|
|
170
174
|
private
|
171
|
-
|
175
|
+
|
176
|
+
# @private
|
177
|
+
def create_session(dataset_name, start_date, end_date, target_column = nil, is_estimate=false, event_name = nil, type = 'forecast', result_interval = NexosisApi::TimeInterval::DAY, column_metadata = nil)
|
172
178
|
session_url = "/sessions/#{type}"
|
173
|
-
query = {
|
179
|
+
query = {
|
174
180
|
'targetColumn' => target_column.to_s,
|
175
181
|
'startDate' => start_date.to_s,
|
176
182
|
'endDate' => end_date.to_s,
|
@@ -178,20 +184,20 @@ module NexosisApi
|
|
178
184
|
'resultInterval' => result_interval.to_s
|
179
185
|
}
|
180
186
|
query['dataSetName'] = dataset_name.to_s unless dataset_name.to_s.empty?
|
181
|
-
if(event_name.nil? == false)
|
187
|
+
if (event_name.nil? == false)
|
182
188
|
query['eventName'] = event_name
|
183
189
|
end
|
184
190
|
body = ''
|
185
|
-
if(column_metadata.nil? == false)
|
191
|
+
if (column_metadata.nil? == false)
|
186
192
|
column_json = Column.to_json(column_metadata)
|
187
193
|
body = {
|
188
194
|
'dataSetName' => dataset_name,
|
189
195
|
'columns' => column_json
|
190
196
|
}
|
191
197
|
end
|
192
|
-
response = self.class.post(session_url, :
|
193
|
-
if(response.success?)
|
194
|
-
session_hash = {'session' => response.parsed_response}.merge(response.headers)
|
198
|
+
response = self.class.post(session_url, headers: @headers, query: query, body: body.to_json)
|
199
|
+
if (response.success?)
|
200
|
+
session_hash = { 'session' => response.parsed_response }.merge(response.headers)
|
195
201
|
NexosisApi::SessionResponse.new(session_hash)
|
196
202
|
else
|
197
203
|
raise HttpException.new("Unable to create new #{type} session", "Create session for dataset #{dataset_name}",response)
|
@@ -6,9 +6,7 @@ module NexosisApi
|
|
6
6
|
if (k == 'data')
|
7
7
|
@data = v
|
8
8
|
elsif (k == 'links')
|
9
|
-
links =
|
10
|
-
v.each do |l| links << NexosisApi::Link.new(l) end
|
11
|
-
@links = links
|
9
|
+
@links = v.reject(&:nil?).map { |l| NexosisApi::Link.new(l) }
|
12
10
|
end
|
13
11
|
end
|
14
12
|
end
|
@@ -17,7 +15,7 @@ module NexosisApi
|
|
17
15
|
# @return [Array of NexosisApi::Link]
|
18
16
|
attr_accessor :links
|
19
17
|
|
20
|
-
#The hash of data values from the dataset
|
18
|
+
# The hash of data values from the dataset
|
21
19
|
# @return [Array of Hash] where each hash contains the dataset data
|
22
20
|
attr_accessor :data
|
23
21
|
end
|
@@ -3,15 +3,13 @@ module NexosisApi
|
|
3
3
|
class DatasetSummary
|
4
4
|
def initialize(data_hash)
|
5
5
|
data_hash.each do |k, v|
|
6
|
-
if(k == 'dataSetName')
|
6
|
+
if (k == 'dataSetName')
|
7
7
|
@dataset_name = v unless v.nil?
|
8
|
-
elsif(k == 'columns')
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
end
|
14
|
-
@column_metadata = columns
|
8
|
+
elsif (k == 'columns')
|
9
|
+
@column_metadata = v.reject { |_key, value| value.nil? }
|
10
|
+
.map do |col_key, col_val|
|
11
|
+
NexosisApi::Column.new(col_key, col_val)
|
12
|
+
end
|
15
13
|
end
|
16
14
|
end
|
17
15
|
end
|
@@ -23,5 +21,13 @@ module NexosisApi
|
|
23
21
|
# Descriptive information about the columns
|
24
22
|
# @return [Array of NexosisApi::Column]
|
25
23
|
attr_accessor :column_metadata
|
24
|
+
|
25
|
+
# Helper method which tells you whether or not this dataset has a column with timestamp role.
|
26
|
+
# @note Often helpful for implmenters as non-timeseries datasets
|
27
|
+
# cannot be sent to forecast or impact sessions
|
28
|
+
# @since 1.3.0
|
29
|
+
def timeseries?
|
30
|
+
!@column_metadata.select { |dc| dc.role == NexosisApi::ColumnRole::TIMESTAMP }.empty?
|
31
|
+
end
|
26
32
|
end
|
27
33
|
end
|
@@ -18,6 +18,8 @@ module NexosisApi
|
|
18
18
|
links = []
|
19
19
|
v.each { |l| links << NexosisApi::Link.new(l) }
|
20
20
|
instance_variable_set("@#{k}", links) unless v.nil?
|
21
|
+
elsif k == 'dataSetName'
|
22
|
+
@datasource_name = v
|
21
23
|
else
|
22
24
|
instance_variable_set("@#{k}", v) unless v.nil?
|
23
25
|
end
|
@@ -37,10 +39,21 @@ module NexosisApi
|
|
37
39
|
# @note The import will be performed in a FIFO queue. Check back on status before attempting to start a session using the dataset.
|
38
40
|
attr_accessor :status
|
39
41
|
|
42
|
+
# Date and status of each status this session has entered
|
43
|
+
# @return [Hash]
|
44
|
+
# @since 1.3.0
|
45
|
+
attr_accessor :statusHistory
|
46
|
+
|
40
47
|
# echo back the dataset name provided
|
41
48
|
# @return [String]
|
49
|
+
# @deprecated use datasource_name instead
|
42
50
|
attr_accessor :dataSetName
|
43
51
|
|
52
|
+
# echo back the name of the data source uploaded
|
53
|
+
# @return [String]
|
54
|
+
# @since 1.3.0
|
55
|
+
attr_accessor :datasource_name
|
56
|
+
|
44
57
|
# The S3 parameters used to import a dataset
|
45
58
|
# @return [Hash]
|
46
59
|
# For an S3 response the keys of this hash should be 'bucket', 'path', and 'region'
|
@@ -0,0 +1,66 @@
|
|
1
|
+
module NexosisApi
|
2
|
+
# class to hold the parsed results of model summary json
|
3
|
+
# @since 1.3.0
|
4
|
+
class ModelSummary
|
5
|
+
def initialize(model_hash)
|
6
|
+
model_hash.each do |k, v|
|
7
|
+
k = k.to_s
|
8
|
+
if (k == 'modelId')
|
9
|
+
@model_id = v
|
10
|
+
elsif (k == 'predictionDomain')
|
11
|
+
@prediction_domain = v
|
12
|
+
elsif (k == 'dataSourceName')
|
13
|
+
@datasource_name = v
|
14
|
+
elsif (k == 'createdDate')
|
15
|
+
@created_date = DateTime.parse(v) unless v.nil?
|
16
|
+
elsif (k == 'algorithm')
|
17
|
+
@algorithm = NexosisApi::Algorithm.new(v) unless v.nil?
|
18
|
+
elsif (k == 'columns')
|
19
|
+
@column_metadata = v.reject { |_key, value| value.nil? }
|
20
|
+
.map do |col_key, col_val|
|
21
|
+
NexosisApi::Column.new(col_key, col_val)
|
22
|
+
end
|
23
|
+
elsif (k == 'metrics')
|
24
|
+
@metrics = v.reject { |_key, value| value.nil? }
|
25
|
+
.map do |col_key, col_val|
|
26
|
+
NexosisApi::Metric.new(name: col_key, value: col_val)
|
27
|
+
end
|
28
|
+
elsif (k == 'sessionId')
|
29
|
+
@session_id = v
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
# Unique model id for this model in uuid/guid format.
|
35
|
+
# @return [String]
|
36
|
+
attr_accessor :model_id
|
37
|
+
|
38
|
+
# Unique id of the session used to create this model instance
|
39
|
+
# @return [String]
|
40
|
+
attr_accessor :session_id
|
41
|
+
|
42
|
+
# The type of prediction performed
|
43
|
+
# @return [String]
|
44
|
+
attr_reader :prediction_domain
|
45
|
+
|
46
|
+
# The data source used to create this model
|
47
|
+
# @return [String]
|
48
|
+
attr_accessor :datasource_name
|
49
|
+
|
50
|
+
# The date on which this model was created.
|
51
|
+
# @return [DateTime]
|
52
|
+
attr_accessor :created_date
|
53
|
+
|
54
|
+
# Information about the algorithm used to create the model
|
55
|
+
# @return [NexosisApi::Algorithm]
|
56
|
+
attr_accessor :algorithm
|
57
|
+
|
58
|
+
# Descriptive information about the columns
|
59
|
+
# @return [Array of NexosisApi::Column]
|
60
|
+
attr_accessor :column_metadata
|
61
|
+
|
62
|
+
# Algorithm and model specific metrics which may be of interest
|
63
|
+
# @return [Array of NexosisApi::Metric]
|
64
|
+
attr_accessor :metrics
|
65
|
+
end
|
66
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
module NexosisApi
|
2
|
+
# Class to hold the parsed response of a prediction request
|
3
|
+
# @since 1.3.0
|
4
|
+
class PredictResponse
|
5
|
+
def initialize(model_id, response_hash)
|
6
|
+
@model_id = model_id
|
7
|
+
@predictions = response_hash['data']
|
8
|
+
@message = response_hash['messages']
|
9
|
+
end
|
10
|
+
|
11
|
+
# The unique identifier for the model used to create these predictions
|
12
|
+
# @return [String]
|
13
|
+
attr_reader :model_id
|
14
|
+
|
15
|
+
# The feature data along with predicted target value
|
16
|
+
# @return [Array of Hash] each row of data as a hash in an array of values
|
17
|
+
# @note The result data includes an echo of the data sent to the predict request
|
18
|
+
# along with the target column containing the values predicted.
|
19
|
+
# [
|
20
|
+
# {
|
21
|
+
# "feature1": 23.33,
|
22
|
+
# "target": 2.59
|
23
|
+
# },
|
24
|
+
# {
|
25
|
+
# "feature1": 15.82,
|
26
|
+
# "target": 1.75
|
27
|
+
# }
|
28
|
+
# ]
|
29
|
+
attr_accessor :predictions
|
30
|
+
|
31
|
+
# A list of warning message optionally returned from prediction run
|
32
|
+
# @return [Array]
|
33
|
+
attr_accessor :messages
|
34
|
+
end
|
35
|
+
end
|
data/lib/nexosis_api/session.rb
CHANGED
@@ -3,23 +3,25 @@ module NexosisApi
|
|
3
3
|
class Session
|
4
4
|
def initialize(sessionHash)
|
5
5
|
sessionHash.each do |k,v|
|
6
|
-
if(k == 'links')
|
6
|
+
if (k == 'links')
|
7
7
|
links = Array.new
|
8
|
-
v.each
|
8
|
+
v.each { |l| links << NexosisApi::Link.new(l) }
|
9
9
|
instance_variable_set("@#{k}", links) unless v.nil?
|
10
|
-
elsif(k == 'isEstimate')
|
10
|
+
elsif (k == 'isEstimate')
|
11
11
|
instance_variable_set('@is_estimate', v) unless v.nil?
|
12
|
-
elsif(k == 'columns')
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
@column_metadata = columns
|
19
|
-
elsif(k == 'resultInterval')
|
12
|
+
elsif (k == 'columns')
|
13
|
+
@column_metadata = v.reject { |_key, value| value.nil? }
|
14
|
+
.map do |col_key, col_val|
|
15
|
+
NexosisApi::Column.new(col_key, v[col_key])
|
16
|
+
end
|
17
|
+
elsif (k == 'resultInterval')
|
20
18
|
@result_interval = v
|
21
19
|
elsif (k == 'dataSourceName')
|
22
20
|
@datasource_name = v
|
21
|
+
elsif (k == 'modelId')
|
22
|
+
@model_id = v
|
23
|
+
elsif (k == 'requestedDate')
|
24
|
+
@requested_date = DateTime.parse(v)
|
23
25
|
else
|
24
26
|
instance_variable_set("@#{k}", v) unless v.nil?
|
25
27
|
end
|
@@ -48,7 +50,7 @@ module NexosisApi
|
|
48
50
|
|
49
51
|
# the dataset used in this session
|
50
52
|
# @return [String]
|
51
|
-
# @deprecated - Use the @
|
53
|
+
# @deprecated - Use the @datasource_name property instead
|
52
54
|
attr_accessor :dataSetName
|
53
55
|
|
54
56
|
# The column in the dataset for which this session ran predictions
|
@@ -84,5 +86,17 @@ module NexosisApi
|
|
84
86
|
# @return [String] - the dataset or view name
|
85
87
|
# @since 1.2.0
|
86
88
|
attr_accessor :datasource_name
|
89
|
+
|
90
|
+
# The date this session was orginally submitted
|
91
|
+
# @since 1.3.0
|
92
|
+
attr_accessor :requested_date
|
93
|
+
|
94
|
+
# The id of the model created by this session if any
|
95
|
+
# @return [String] a uuid/buid format unique string for the model
|
96
|
+
# @since 1.3.0
|
97
|
+
# @note This is always empty in time-series sessions (forecast/impact)
|
98
|
+
# The model id returned here should be used in all future calls
|
99
|
+
# to model endpoints - primarily the /models/{modelId}/predict endpoint.
|
100
|
+
attr_accessor :model_id
|
87
101
|
end
|
88
102
|
end
|
@@ -3,13 +3,15 @@ module NexosisApi
|
|
3
3
|
class SessionResult < Session
|
4
4
|
def initialize(session_hash)
|
5
5
|
session_hash.each do |k, v|
|
6
|
-
if k == 'metrics'
|
7
|
-
|
8
|
-
|
9
|
-
|
6
|
+
if k.to_s == 'metrics' && session_hash['type'] == 'impact'
|
7
|
+
instance_variable_set("@#{k}", NexosisApi::ImpactMetric.new(v)) unless v.nil?
|
8
|
+
elsif k.to_s == 'metrics'
|
9
|
+
@metrics = v.map { |key, value| NexosisApi::Metric.new( { 'name' => key.to_s, 'value' => value } ) } unless v.nil?
|
10
|
+
elsif k.to_s == 'data'
|
11
|
+
@data = v
|
10
12
|
end
|
11
13
|
end
|
12
|
-
super(session_hash.reject { |k,
|
14
|
+
super(session_hash.reject { |k, _v| k.to_s == 'data' || k.to_s == 'metrics' })
|
13
15
|
end
|
14
16
|
|
15
17
|
# The impact analysis if this session type is impact
|
@@ -17,7 +19,9 @@ module NexosisApi
|
|
17
19
|
attr_accessor :metrics
|
18
20
|
|
19
21
|
# The result data in a hash with the name of the target column
|
20
|
-
# @return [Hash]
|
22
|
+
# @return [Array of Hash]
|
23
|
+
# @note When retrieving a model creation session this field
|
24
|
+
# will contain the test data and results.
|
21
25
|
attr_accessor :data
|
22
26
|
end
|
23
27
|
end
|
@@ -14,6 +14,8 @@ module NexosisApi
|
|
14
14
|
elsif k == 'joins'
|
15
15
|
next if v.nil?
|
16
16
|
@joins = v.reject(&:nil?).map { |join| NexosisApi::Join.new(join) }
|
17
|
+
elsif k == 'isTimeSeries'
|
18
|
+
@is_timeseries = v
|
17
19
|
end
|
18
20
|
end
|
19
21
|
end
|
@@ -34,6 +36,14 @@ module NexosisApi
|
|
34
36
|
# @return [Array of NexosisApi::Join]
|
35
37
|
attr_accessor :joins
|
36
38
|
|
39
|
+
# Is this view based on time series data?
|
40
|
+
# @since 1.3.0
|
41
|
+
attr_accessor :is_timeseries
|
42
|
+
|
43
|
+
# Is this view based on time series data?
|
44
|
+
# @since 1.3.0
|
45
|
+
alias_method :timeseries?, :is_timeseries
|
46
|
+
|
37
47
|
def to_json
|
38
48
|
hash = {}
|
39
49
|
hash['dataSetName'] = dataset_name
|
data/nexosisapi.gemspec
CHANGED
@@ -16,6 +16,6 @@ Gem::Specification.new do |spec|
|
|
16
16
|
spec.require_paths = ['lib']
|
17
17
|
spec.required_ruby_version = '>= 2.0.0'
|
18
18
|
spec.summary = "Ruby client for working with the Nexosis API"
|
19
|
-
spec.version = '1.
|
19
|
+
spec.version = '1.3.0'
|
20
20
|
spec.metadata["yard.run"] = "yri"
|
21
21
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: nexosis_api
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Nexosis,Inc
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-
|
11
|
+
date: 2017-10-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -53,6 +53,7 @@ files:
|
|
53
53
|
- lib/nexosis_api/client.rb
|
54
54
|
- lib/nexosis_api/client/datasets.rb
|
55
55
|
- lib/nexosis_api/client/imports.rb
|
56
|
+
- lib/nexosis_api/client/models.rb
|
56
57
|
- lib/nexosis_api/client/sessions.rb
|
57
58
|
- lib/nexosis_api/client/views.rb
|
58
59
|
- lib/nexosis_api/column.rb
|
@@ -69,6 +70,8 @@ files:
|
|
69
70
|
- lib/nexosis_api/join.rb
|
70
71
|
- lib/nexosis_api/link.rb
|
71
72
|
- lib/nexosis_api/metric.rb
|
73
|
+
- lib/nexosis_api/model_summary.rb
|
74
|
+
- lib/nexosis_api/predict_response.rb
|
72
75
|
- lib/nexosis_api/session.rb
|
73
76
|
- lib/nexosis_api/session_response.rb
|
74
77
|
- lib/nexosis_api/session_result.rb
|