chalk_ruby 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.dev.md +8 -0
- data/chalk_ruby.gemspec +1 -0
- data/lib/chalk_ruby/config.rb +5 -0
- data/lib/chalk_ruby/defaults.rb +1 -0
- data/lib/chalk_ruby/grpc/auth_interceptor.rb +62 -0
- data/lib/chalk_ruby/grpc_client.rb +340 -0
- data/lib/chalk_ruby/http/http_requester.rb +2 -1
- data/lib/chalk_ruby/protos/chalk/aggregate/v1/backfill_pb.rb +28 -0
- data/lib/chalk_ruby/protos/chalk/aggregate/v1/service_pb.rb +36 -0
- data/lib/chalk_ruby/protos/chalk/aggregate/v1/service_services_pb.rb +42 -0
- data/lib/chalk_ruby/protos/chalk/aggregate/v1/timeseries_pb.rb +22 -0
- data/lib/chalk_ruby/protos/chalk/arrow/v1/arrow_pb.rb +41 -0
- data/lib/chalk_ruby/protos/chalk/artifacts/v1/cdc_pb.rb +20 -0
- data/lib/chalk_ruby/protos/chalk/artifacts/v1/chart_pb.rb +34 -0
- data/lib/chalk_ruby/protos/chalk/artifacts/v1/cron_query_pb.rb +22 -0
- data/lib/chalk_ruby/protos/chalk/artifacts/v1/deployment_pb.rb +25 -0
- data/lib/chalk_ruby/protos/chalk/artifacts/v1/export_pb.rb +35 -0
- data/lib/chalk_ruby/protos/chalk/auth/v1/agent_pb.rb +29 -0
- data/lib/chalk_ruby/protos/chalk/auth/v1/audit_pb.rb +22 -0
- data/lib/chalk_ruby/protos/chalk/auth/v1/displayagent_pb.rb +31 -0
- data/lib/chalk_ruby/protos/chalk/auth/v1/featurepermission_pb.rb +20 -0
- data/lib/chalk_ruby/protos/chalk/auth/v1/permissions_pb.rb +22 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/chalk_error_pb.rb +22 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/chart_pb.rb +20 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/dataset_response_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/feature_values_pb.rb +31 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/offline_query_pb.rb +32 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/online_query_pb.rb +44 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/operation_kind_pb.rb +19 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/query_log_pb.rb +28 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/query_status_pb.rb +19 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/query_values_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/common/v1/upload_features_pb.rb +22 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/bloom_filter_pb.rb +19 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/offline_store_service_pb.rb +24 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/offline_store_service_services_pb.rb +31 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/plan_pb.rb +39 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/plan_server_pb.rb +25 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/plan_server_services_pb.rb +28 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/query_server_pb.rb +25 -0
- data/lib/chalk_ruby/protos/chalk/engine/v1/query_server_services_pb.rb +44 -0
- data/lib/chalk_ruby/protos/chalk/engine/v2/feature_values_chart_pb.rb +29 -0
- data/lib/chalk_ruby/protos/chalk/engine/v2/feature_values_pb.rb +27 -0
- data/lib/chalk_ruby/protos/chalk/engine/v2/offline_store_service_pb.rb +25 -0
- data/lib/chalk_ruby/protos/chalk/engine/v2/offline_store_service_services_pb.rb +33 -0
- data/lib/chalk_ruby/protos/chalk/engine/v2/query_log_pb.rb +28 -0
- data/lib/chalk_ruby/protos/chalk/engine/v2/query_values_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/expression/v1/expression_pb.rb +76 -0
- data/lib/chalk_ruby/protos/chalk/graph/v1/graph_pb.rb +63 -0
- data/lib/chalk_ruby/protos/chalk/graph/v1/sources_pb.rb +41 -0
- data/lib/chalk_ruby/protos/chalk/graph/v2/sources_pb.rb +25 -0
- data/lib/chalk_ruby/protos/chalk/lsp/v1/lsp_pb.rb +32 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/audit_pb.rb +27 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/audit_services_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/auth_pb.rb +29 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/auth_services_pb.rb +29 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/authtesting_pb.rb +34 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/authtesting_services_pb.rb +32 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/billing_pb.rb +37 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/billing_services_pb.rb +43 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/bootstrap_pb.rb +24 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/builder_pb.rb +60 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/builder_services_pb.rb +50 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/chart_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/cli_pb.rb +23 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/cli_services_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/deploy_pb.rb +36 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/deploy_services_pb.rb +32 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/deployment_pb.rb +23 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/environment_pb.rb +32 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/flag_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/flag_services_pb.rb +27 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/graph_pb.rb +31 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/graph_services_pb.rb +31 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/kube_pb.rb +22 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/kube_services_pb.rb +28 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/link_pb.rb +28 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/log_pb.rb +21 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/metrics_pb.rb +23 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/metrics_services_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/monitoring_pb.rb +45 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/monitoring_services_pb.rb +32 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/named_query_pb.rb +27 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/named_query_services_pb.rb +28 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/pod_request_pb.rb +26 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/status_pb.rb +31 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/status_services_pb.rb +31 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/team_pb.rb +78 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/team_services_pb.rb +49 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/timeserieschart_pb.rb +24 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/topic_pb.rb +21 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/topicpush_pb.rb +32 -0
- data/lib/chalk_ruby/protos/chalk/server/v1/topicpush_services_pb.rb +30 -0
- data/lib/chalk_ruby/protos/chalk/utils/v1/encoding_pb.rb +21 -0
- data/lib/chalk_ruby/protos/chalk/utils/v1/sensitive_pb.rb +20 -0
- data/lib/chalk_ruby/version.rb +1 -1
- metadata +113 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 3891cc405d93658b67e829bd1d06e212e381cdfb9f50d89b5e41daa7fd9f3329
|
4
|
+
data.tar.gz: a1015fe9ad93731926243b4d1dcde8691d7d4c37a8f7c33ca499eac469445bcf
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 58371adc96eb738cf596e46b7078f201285ce8576105bb883272f972f72f79d44f28ab9aba86cc9e13a836d43f5be4b40d5c4588150ce27535201196149ccc11
|
7
|
+
data.tar.gz: 38c1a7f8152107f763537973141b43a6b2cd6393f15dc76797288bb45e3de035d58e0d9f8446280505af85b188e360e9a7878f1374ff668ad6f5c5ec0f2506e9
|
data/README.dev.md
ADDED
@@ -0,0 +1,8 @@
|
|
1
|
+
## update the version number in the gemspec file
|
2
|
+
vim lib/chalk_ruby/version.rb
|
3
|
+
|
4
|
+
## build the gem, which pulls in the version from the above file
|
5
|
+
gem build ./chalk_ruby.gemspec
|
6
|
+
|
7
|
+
## push the gem to rubygems. Update the filename to be the gem you want to publish
|
8
|
+
gem push ./chalk_ruby-0.1.4.gem
|
data/chalk_ruby.gemspec
CHANGED
@@ -38,6 +38,7 @@ Gem::Specification.new do |spec|
|
|
38
38
|
|
39
39
|
spec.add_dependency 'faraday', ['>= 0.15', '< 3']
|
40
40
|
spec.add_dependency 'faraday-net_http_persistent', ['>= 0.15', '< 3']
|
41
|
+
spec.add_dependency 'grpc', ['>=1.68.1', '< 2']
|
41
42
|
|
42
43
|
spec.add_dependency 'multi_json', '~> 1.0'
|
43
44
|
spec.add_dependency 'net-http-persistent'
|
data/lib/chalk_ruby/config.rb
CHANGED
data/lib/chalk_ruby/defaults.rb
CHANGED
@@ -0,0 +1,62 @@
|
|
1
|
+
require 'grpc'
|
2
|
+
require 'chalk_ruby/protos/chalk/server/v1/auth_pb'
|
3
|
+
require 'chalk_ruby/protos/chalk/server/v1/auth_services_pb'
|
4
|
+
|
5
|
+
module ChalkRuby
|
6
|
+
module Grpc
|
7
|
+
class AuthInterceptor < GRPC::ClientInterceptor
|
8
|
+
def initialize(auth_stub, client_id, client_secret, environment_id)
|
9
|
+
@auth_stub = auth_stub
|
10
|
+
@client_id = client_id
|
11
|
+
@client_secret = client_secret
|
12
|
+
@environment_id = environment_id
|
13
|
+
@token = nil
|
14
|
+
end
|
15
|
+
|
16
|
+
def request_response(request:, call:, method:, metadata:)
|
17
|
+
# If we haven't fetched a token yet or if you'd like to handle token expiration,
|
18
|
+
# this is where you'd refresh it. For now, let's assume a long-lived token.
|
19
|
+
if @token.nil?
|
20
|
+
response = @auth_stub.get_token(
|
21
|
+
Chalk::Server::V1::GetTokenRequest.new(
|
22
|
+
client_id: @client_id,
|
23
|
+
client_secret: @client_secret
|
24
|
+
)
|
25
|
+
)
|
26
|
+
@token = response.access_token
|
27
|
+
end
|
28
|
+
|
29
|
+
|
30
|
+
# Add the token to the request's metadata
|
31
|
+
metadata["authorization"] = "Bearer #{@token}"
|
32
|
+
metadata["x-chalk-deployment-type"] = "engine-grpc"
|
33
|
+
metadata["x-chalk-env-id"] = @environment_id
|
34
|
+
|
35
|
+
# Proceed with the original call
|
36
|
+
yield
|
37
|
+
end
|
38
|
+
|
39
|
+
# If you're using streaming calls, also consider overriding:
|
40
|
+
# - client_streamer
|
41
|
+
# - server_streamer
|
42
|
+
# - bidi_streamer
|
43
|
+
# following a similar pattern to insert metadata.
|
44
|
+
end
|
45
|
+
|
46
|
+
class ApiServerInterceptor < GRPC::ClientInterceptor
|
47
|
+
def initialize()
|
48
|
+
end
|
49
|
+
|
50
|
+
def request_response(request:, call:, method:, metadata:)
|
51
|
+
metadata["x-chalk-server"] = "go-api"
|
52
|
+
yield
|
53
|
+
end
|
54
|
+
|
55
|
+
# If you're using streaming calls, also consider overriding:
|
56
|
+
# - client_streamer
|
57
|
+
# - server_streamer
|
58
|
+
# - bidi_streamer
|
59
|
+
# following a similar pattern to insert metadata.
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,340 @@
|
|
1
|
+
require 'chalk_ruby/config'
|
2
|
+
require 'grpc'
|
3
|
+
|
4
|
+
module ChalkRuby
|
5
|
+
|
6
|
+
class GrpcClient
|
7
|
+
# Create a new client.
|
8
|
+
#
|
9
|
+
# @param client_id [String]
|
10
|
+
# Chalk client ID.
|
11
|
+
# If not provided, it will be read from the CHALK_CLIENT_ID environment variable.
|
12
|
+
#
|
13
|
+
# @param client_secret [String]
|
14
|
+
# Chalk client secret.
|
15
|
+
# If not provided, it will be read from the CHALK_CLIENT_SECRET environment variable.
|
16
|
+
#
|
17
|
+
# @param environment [String]
|
18
|
+
# The Chalk environment id (not the name of the environment).
|
19
|
+
# If not provided, it will be read from the CHALK_ACTIVE_ENVIRONMENT environment variable.
|
20
|
+
#
|
21
|
+
# @param query_server [String]
|
22
|
+
# ChalkRuby query server
|
23
|
+
#
|
24
|
+
# @param api_server [String]
|
25
|
+
# ChalkRuby API server
|
26
|
+
#
|
27
|
+
# @param additional_headers [Hash[String, String]]
|
28
|
+
# Additional headers to be sent with every request. Typically not required.
|
29
|
+
#
|
30
|
+
# @return self
|
31
|
+
|
32
|
+
def self.create(
|
33
|
+
client_id = nil,
|
34
|
+
client_secret = nil,
|
35
|
+
environment = nil,
|
36
|
+
query_server = nil,
|
37
|
+
api_server = nil,
|
38
|
+
additional_headers = {}
|
39
|
+
)
|
40
|
+
config = Config.new(
|
41
|
+
client_id: client_id,
|
42
|
+
client_secret: client_secret,
|
43
|
+
environment: environment,
|
44
|
+
query_server: query_server,
|
45
|
+
api_server: api_server,
|
46
|
+
additional_headers: additional_headers
|
47
|
+
)
|
48
|
+
create_with_config(config)
|
49
|
+
end
|
50
|
+
|
51
|
+
# Create a new client providing only a ChalkRuby::Config object
|
52
|
+
#
|
53
|
+
# @param config [ChalkRuby::Config]
|
54
|
+
#
|
55
|
+
# @return self
|
56
|
+
#
|
57
|
+
def self.create_with_config(config)
|
58
|
+
new(config)
|
59
|
+
end
|
60
|
+
|
61
|
+
# Compute features values using online resolvers.
|
62
|
+
# See https://docs.chalk.ai/docs/query-basics for more information.
|
63
|
+
#
|
64
|
+
# @param input [Hash[String, any]] The features for which there are known values, mapped to those values.
|
65
|
+
#
|
66
|
+
# @param output [[String]] Outputs are the features that you'd like to compute from the inputs.
|
67
|
+
# For example, `['user.age', 'user.name', 'user.email']`.
|
68
|
+
#
|
69
|
+
# If an empty sequence, the output will be set to all features on the namespace
|
70
|
+
# of the query. For example, if you pass as input `{"user.id": 1234}`, then the query
|
71
|
+
# is defined on the `User` namespace, and all features on the `User` namespace
|
72
|
+
# (excluding has-one and has-many relationships) will be used as outputs.
|
73
|
+
#
|
74
|
+
# @param now [DateTime?]
|
75
|
+
# The time at which to evaluate the query. If not specified, the current time will be used.
|
76
|
+
# This parameter is complex in the context of online_query since the online store
|
77
|
+
# only stores the most recent value of an entity's features. If `now` is in the past,
|
78
|
+
# it is extremely likely that `None` will be returned for cache-only features.
|
79
|
+
#
|
80
|
+
# This parameter is primarily provided to support:
|
81
|
+
# - controlling the time window for aggregations over cached has-many relationships
|
82
|
+
# - controlling the time window for aggregations over has-many relationships loaded from an
|
83
|
+
# external database
|
84
|
+
#
|
85
|
+
# If you are trying to perform an exploratory analysis of past feature values, prefer `offline_query`.
|
86
|
+
#
|
87
|
+
# @param staleness [Hash[String, String]?]
|
88
|
+
# Maximum staleness overrides for any output features or intermediate features.
|
89
|
+
# See https://docs.chalk.ai/docs/query-caching for more information.
|
90
|
+
#
|
91
|
+
# @param tags [Hash[String, String]?]
|
92
|
+
# The tags used to scope the resolvers.
|
93
|
+
# See https://docs.chalk.ai/docs/resolver-tags for more information.
|
94
|
+
#
|
95
|
+
# @param branch [String?]
|
96
|
+
# If specified, Chalk will route your request to the relevant branch.
|
97
|
+
#
|
98
|
+
# @param correlation_id [String?]
|
99
|
+
# You can specify a correlation ID to be used in logs and web interfaces.
|
100
|
+
# This should be globally unique, i.e. a `uuid` or similar. Logs generated
|
101
|
+
# during the execution of your query will be tagged with this correlation id.
|
102
|
+
#
|
103
|
+
# @param query_name [String?]
|
104
|
+
# The semantic name for the query you're making, for example, `"loan_application_model"`.
|
105
|
+
# Typically, each query that you make from your application should have a name.
|
106
|
+
# Chalk will present metrics and dashboard functionality grouped by 'query_name'.
|
107
|
+
#
|
108
|
+
# @param query_name_version [String?]
|
109
|
+
# If query_name is specified, this specifies the version of the named query you're
|
110
|
+
# making. This is only useful if you want your query to use a NamedQuery with a
|
111
|
+
# specific name and a specific version. If a query_name has not been supplied, then
|
112
|
+
# this parameter is ignored.
|
113
|
+
#
|
114
|
+
# @param meta [Hash[String, String]?]
|
115
|
+
# Arbitrary `key:value` pairs to associate with a query.
|
116
|
+
#
|
117
|
+
# @param explain [Boolean?]
|
118
|
+
# Log the query execution plan. Requests using `explain=true` will be slower
|
119
|
+
# than requests using `explain=false`. If `"only"`, the query will not be executed,
|
120
|
+
# and only the query plan will be returned.
|
121
|
+
#
|
122
|
+
# If true, 'include_meta' will be set to true as well.
|
123
|
+
#
|
124
|
+
# @param include_meta [Boolean?]
|
125
|
+
# Whether to include metadata about the query execution.
|
126
|
+
#
|
127
|
+
# @param store_plan_stages [Boolean?]
|
128
|
+
# If `true`, the output of each of the query plan stages will be stored.
|
129
|
+
# This option dramatically impacts the performance of the query,
|
130
|
+
# so it should only be used for debugging.
|
131
|
+
#
|
132
|
+
# @option options [Float?] :timeout
|
133
|
+
# Allows overriding default timeout for query operations (in seconds).
|
134
|
+
#
|
135
|
+
# @return [Hash[Symbol, String]]
|
136
|
+
|
137
|
+
def ping
|
138
|
+
query_service.ping(Chalk::Engine::V1::PingRequest.new(num: 1))
|
139
|
+
end
|
140
|
+
|
141
|
+
def query(
|
142
|
+
input:,
|
143
|
+
output:,
|
144
|
+
now: nil,
|
145
|
+
staleness: nil,
|
146
|
+
tags: nil,
|
147
|
+
branch: nil,
|
148
|
+
correlation_id: nil,
|
149
|
+
query_name: nil,
|
150
|
+
query_name_version: nil,
|
151
|
+
meta: nil,
|
152
|
+
explain: nil,
|
153
|
+
include_meta: nil,
|
154
|
+
store_plan_stages: nil,
|
155
|
+
timeout: nil
|
156
|
+
)
|
157
|
+
formatted_inputs = input.transform_values { |value| self.convert_to_protobuf_value(value) }
|
158
|
+
|
159
|
+
context = Chalk::Common::V1::OnlineQueryContext.new(
|
160
|
+
query_name: query_name,
|
161
|
+
query_name_version: query_name_version,
|
162
|
+
)
|
163
|
+
|
164
|
+
r = Chalk::Common::V1::OnlineQueryRequest.new(
|
165
|
+
inputs: formatted_inputs,
|
166
|
+
outputs: output.map { |o| Chalk::Common::V1::OutputExpr.new(feature_fqn: o) },
|
167
|
+
context: context
|
168
|
+
)
|
169
|
+
|
170
|
+
if timeout.nil?
|
171
|
+
query_service.online_query(r)
|
172
|
+
else
|
173
|
+
query_service.online_query(r, deadline: Time.now + timeout)
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
def get_token
|
178
|
+
Token.new(
|
179
|
+
api_server_request(
|
180
|
+
method: :post,
|
181
|
+
path: '/v1/oauth/token',
|
182
|
+
body: {
|
183
|
+
client_id: @config.client_id,
|
184
|
+
client_secret: @config.client_secret,
|
185
|
+
grant_type: 'client_credentials'
|
186
|
+
},
|
187
|
+
headers: get_unauthenticated_headers
|
188
|
+
)
|
189
|
+
)
|
190
|
+
end
|
191
|
+
|
192
|
+
def get_unauthenticated_headers
|
193
|
+
{
|
194
|
+
'Content-Type': 'application/json',
|
195
|
+
'Accept': 'application/json',
|
196
|
+
'X-Chalk-Env-Id': @config.environment
|
197
|
+
}.merge(@config.additional_headers)
|
198
|
+
end
|
199
|
+
|
200
|
+
# Initializes the ChalkRuby client. Generally, you should not need to call this directly.
|
201
|
+
# Instead, use ChalkRuby::Client.create or ChalkRuby::Client.create_with_config.
|
202
|
+
#
|
203
|
+
# @param chalk_config [ChalkRuby::Config]
|
204
|
+
# A ChalkRuby::Config object which contains your CLIENT_ID and CLIENT_SECRET
|
205
|
+
#
|
206
|
+
# @option adapter [Object]
|
207
|
+
# Adapter object used for the connection
|
208
|
+
#
|
209
|
+
# @option logger [Object]
|
210
|
+
#
|
211
|
+
# @option http_requester [Object]
|
212
|
+
# object used for the connection
|
213
|
+
#
|
214
|
+
def initialize(chalk_config, opts = {})
|
215
|
+
@token = nil
|
216
|
+
@config = chalk_config
|
217
|
+
adapter = opts[:adapter] || Defaults::ADAPTER
|
218
|
+
logger = opts[:logger] || LoggerHelper.create
|
219
|
+
requester = opts[:http_requester] || Defaults::REQUESTER_CLASS.new(adapter: adapter, logger: logger)
|
220
|
+
@transporter = Http::HttpRequesterChalk.new(requester: requester)
|
221
|
+
end
|
222
|
+
|
223
|
+
private
|
224
|
+
|
225
|
+
def api_server_request(method:, path:, body:, headers:)
|
226
|
+
@transporter.send_request(
|
227
|
+
method: method,
|
228
|
+
host: @config.api_server,
|
229
|
+
path: path,
|
230
|
+
timeout: @config.api_timeout,
|
231
|
+
connect_timeout: @config.connect_timeout,
|
232
|
+
body: body,
|
233
|
+
headers: headers
|
234
|
+
)
|
235
|
+
end
|
236
|
+
|
237
|
+
def engine_request(method:, path:, body:, headers:, timeout: nil)
|
238
|
+
@transporter.send_request(
|
239
|
+
method: method,
|
240
|
+
host: query_server_host,
|
241
|
+
path: path,
|
242
|
+
timeout: timeout || @config.query_timeout,
|
243
|
+
connect_timeout: @config.connect_timeout,
|
244
|
+
body: body,
|
245
|
+
headers: headers
|
246
|
+
)
|
247
|
+
end
|
248
|
+
|
249
|
+
def auth_service
|
250
|
+
if @auth_service.nil?
|
251
|
+
@auth_service = Chalk::Server::V1::AuthService::Stub.new(@config.api_server, GRPC::Core::ChannelCredentials.new(), interceptors: [api_server_interceptor])
|
252
|
+
end
|
253
|
+
|
254
|
+
@auth_service
|
255
|
+
end
|
256
|
+
|
257
|
+
def api_server_interceptor
|
258
|
+
if @api_server_interceptor.nil?
|
259
|
+
@api_server_interceptor = ChalkRuby::Grpc::ApiServerInterceptor::new()
|
260
|
+
end
|
261
|
+
|
262
|
+
@api_server_interceptor
|
263
|
+
end
|
264
|
+
|
265
|
+
def engine_interceptor
|
266
|
+
if @engine_interceptor.nil?
|
267
|
+
@engine_interceptor = ChalkRuby::Grpc::AuthInterceptor.new(auth_service, @config.client_id, @config.client_secret, active_environment)
|
268
|
+
end
|
269
|
+
|
270
|
+
@engine_interceptor
|
271
|
+
end
|
272
|
+
|
273
|
+
def query_service
|
274
|
+
if @query_service.nil?
|
275
|
+
@query_service = Chalk::Engine::V1::QueryService::Stub.new(query_server_host, GRPC::Core::ChannelCredentials.new(), interceptors: [engine_interceptor])
|
276
|
+
end
|
277
|
+
|
278
|
+
@query_service
|
279
|
+
end
|
280
|
+
|
281
|
+
def query_server_host
|
282
|
+
explicit = @config.query_server
|
283
|
+
ret =
|
284
|
+
if explicit.nil?
|
285
|
+
tok = valid_token
|
286
|
+
found = @config.environment || tok.environment
|
287
|
+
ret = found.nil? ? Defaults::QUERY_SERVER : tok.engines[found] || Defaults::QUERY_SERVER
|
288
|
+
else
|
289
|
+
ret = explicit
|
290
|
+
end
|
291
|
+
|
292
|
+
if ret.start_with?('http://') or ret.start_with?('https://')
|
293
|
+
ret = ret.sub('https://', '').sub("http://", '')
|
294
|
+
end
|
295
|
+
|
296
|
+
if ret.end_with?(':443') or ret.end_with?(':80')
|
297
|
+
ret
|
298
|
+
else
|
299
|
+
ret + ':443'
|
300
|
+
end
|
301
|
+
end
|
302
|
+
|
303
|
+
def active_environment
|
304
|
+
@config.environment || valid_token.environment
|
305
|
+
end
|
306
|
+
|
307
|
+
def valid_token
|
308
|
+
if @token.nil? || @token.expired?
|
309
|
+
t = get_token
|
310
|
+
@token = t
|
311
|
+
t
|
312
|
+
else
|
313
|
+
@token
|
314
|
+
end
|
315
|
+
end
|
316
|
+
|
317
|
+
def convert_to_protobuf_value(value)
|
318
|
+
case value
|
319
|
+
when NilClass
|
320
|
+
Google::Protobuf::Value.new(null_value: :NULL_VALUE)
|
321
|
+
when Float
|
322
|
+
Google::Protobuf::Value.new(number_value: value)
|
323
|
+
when Integer
|
324
|
+
Google::Protobuf::Value.new(number_value: value)
|
325
|
+
when String
|
326
|
+
Google::Protobuf::Value.new(string_value: value)
|
327
|
+
when TrueClass, FalseClass
|
328
|
+
Google::Protobuf::Value.new(bool_value: value)
|
329
|
+
when Hash
|
330
|
+
struct_value = Google::Protobuf::Struct.new(fields: value.transform_values { |v| convert_to_protobuf_value(v) })
|
331
|
+
Google::Protobuf::Value.new(struct_value: struct_value)
|
332
|
+
when Array
|
333
|
+
list_value = Google::Protobuf::ListValue.new(values: value.map { |v| convert_to_protobuf_value(v) })
|
334
|
+
Google::Protobuf::Value.new(list_value: list_value)
|
335
|
+
else
|
336
|
+
raise "Unsupported type: #{value.class}"
|
337
|
+
end
|
338
|
+
end
|
339
|
+
end
|
340
|
+
end
|
@@ -38,7 +38,8 @@ module ChalkRuby
|
|
38
38
|
timeout:,
|
39
39
|
connect_timeout:
|
40
40
|
)
|
41
|
-
|
41
|
+
normalized_host = host.is_a?(String) ? host : host[:api_server]
|
42
|
+
connection = connection(normalized_host)
|
42
43
|
connection.options.timeout = timeout
|
43
44
|
connection.options.open_timeout = connect_timeout
|
44
45
|
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
3
|
+
# source: chalk/aggregate/v1/backfill.proto
|
4
|
+
|
5
|
+
require 'google/protobuf'
|
6
|
+
|
7
|
+
require 'chalk_ruby/protos/chalk/aggregate/v1/timeseries_pb'
|
8
|
+
require 'google/protobuf/duration_pb'
|
9
|
+
require 'google/protobuf/timestamp_pb'
|
10
|
+
|
11
|
+
|
12
|
+
descriptor_data = "\n!chalk/aggregate/v1/backfill.proto\x12\x12\x63halk.aggregate.v1\x1a#chalk/aggregate/v1/timeseries.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x8c\x02\n\x1d\x41ggregateBackfillCostEstimate\x12\x1f\n\x0bmax_buckets\x18\x01 \x01(\x03R\nmaxBuckets\x12)\n\x10\x65xpected_buckets\x18\x02 \x01(\x03R\x0f\x65xpectedBuckets\x12%\n\x0e\x65xpected_bytes\x18\x03 \x01(\x03R\rexpectedBytes\x12\x32\n\x15\x65xpected_storage_cost\x18\x04 \x01(\x01R\x13\x65xpectedStorageCost\x12\x44\n\x10\x65xpected_runtime\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationR\x0f\x65xpectedRuntime\"\xf8\x02\n\x1b\x41ggregateBackfillUserParams\x12\x1a\n\x08\x66\x65\x61tures\x18\x01 \x03(\tR\x08\x66\x65\x61tures\x12\x1f\n\x08resolver\x18\x02 \x01(\tH\x00R\x08resolver\x88\x01\x01\x12;\n\x15timestamp_column_name\x18\x03 \x01(\tB\x02\x18\x01H\x01R\x13timestampColumnName\x88\x01\x01\x12@\n\x0blower_bound\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x02R\nlowerBound\x88\x01\x01\x12@\n\x0bupper_bound\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x03R\nupperBound\x88\x01\x01\x12\x14\n\x05\x65xact\x18\x06 \x01(\x08R\x05\x65xactB\x0b\n\t_resolverB\x18\n\x16_timestamp_column_nameB\x0e\n\x0c_lower_boundB\x0e\n\x0c_upper_bound\"\xe5\x03\n\x11\x41ggregateBackfill\x12?\n\x06series\x18\x01 \x03(\x0b\x32\'.chalk.aggregate.v1.AggregateTimeSeriesR\x06series\x12\x1a\n\x08resolver\x18\x02 \x01(\tR\x08resolver\x12)\n\x10\x64\x61tetime_feature\x18\x03 \x01(\tR\x0f\x64\x61tetimeFeature\x12\x42\n\x0f\x62ucket_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationR\x0e\x62ucketDuration\x12/\n\x13\x66ilters_description\x18\x05 \x01(\tR\x12\x66iltersDescription\x12\x19\n\x08group_by\x18\x06 \x03(\tR\x07groupBy\x12>\n\rmax_retention\x18\x07 \x01(\x0b\x32\x19.google.protobuf.DurationR\x0cmaxRetention\x12;\n\x0blower_bound\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\nlowerBound\x12;\n\x0bupper_bound\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampR\nupperBound\"\xb5\x01\n!AggregateBackfillWithCostEstimate\x12\x41\n\x08\x62\x61\x63kfill\x18\x01 \x01(\x0b\x32%.chalk.aggregate.v1.AggregateBackfillR\x08\x62\x61\x63kfill\x12M\n\x08\x65stimate\x18\x02 \x01(\x0b\x32\x31.chalk.aggregate.v1.AggregateBackfillCostEstimateR\x08\x65stimate\"\xa5\x04\n\x14\x41ggregateBackfillJob\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12%\n\x0e\x65nvironment_id\x18\x02 \x01(\tR\renvironmentId\x12\x1f\n\x08resolver\x18\x03 \x01(\tH\x00R\x08resolver\x88\x01\x01\x12\x1a\n\x08\x66\x65\x61tures\x18\x04 \x03(\tR\x08\x66\x65\x61tures\x12\x1e\n\x08\x61gent_id\x18\x05 \x01(\tH\x01R\x07\x61gentId\x88\x01\x01\x12(\n\rdeployment_id\x18\x06 \x01(\tH\x02R\x0c\x64\x65ploymentId\x88\x01\x01\x12\x39\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x1c\n\tresolvers\x18\t \x03(\tR\tresolvers\x12@\n\x1a\x63ron_aggregate_backfill_id\x18\n \x01(\tH\x03R\x17\x63ronAggregateBackfillId\x88\x01\x01\x12 \n\tplan_hash\x18\x0b \x01(\tH\x04R\x08planHash\x88\x01\x01\x42\x0b\n\t_resolverB\x0b\n\t_agent_idB\x10\n\x0e_deployment_idB\x1d\n\x1b_cron_aggregate_backfill_idB\x0c\n\n_plan_hash\"\xdc\x02\n\x15\x43ronAggregateBackfill\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12%\n\x0e\x65nvironment_id\x18\x02 \x01(\tR\renvironmentId\x12#\n\rdeployment_id\x18\x03 \x01(\tR\x0c\x64\x65ploymentId\x12\x1a\n\x08schedule\x18\x04 \x01(\tR\x08schedule\x12\x1b\n\tplan_hash\x18\x05 \x01(\tR\x08planHash\x12\x1a\n\x08\x66\x65\x61tures\x18\x08 \x03(\tR\x08\x66\x65\x61tures\x12\x1c\n\tresolvers\x18\t \x03(\tR\tresolvers\x12\x39\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAtB\xab\x01\n\x16\x63om.chalk.aggregate.v1B\rBackfillProtoP\x01Z\x18\x61ggregate/v1;aggregatev1\xa2\x02\x03\x43\x41X\xaa\x02\x12\x43halk.Aggregate.V1\xca\x02\x12\x43halk\\Aggregate\\V1\xe2\x02\x1e\x43halk\\Aggregate\\V1\\GPBMetadata\xea\x02\x14\x43halk::Aggregate::V1b\x06proto3"
|
13
|
+
|
14
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
15
|
+
pool.add_serialized_file(descriptor_data)
|
16
|
+
|
17
|
+
module Chalk
|
18
|
+
module Aggregate
|
19
|
+
module V1
|
20
|
+
AggregateBackfillCostEstimate = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.AggregateBackfillCostEstimate").msgclass
|
21
|
+
AggregateBackfillUserParams = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.AggregateBackfillUserParams").msgclass
|
22
|
+
AggregateBackfill = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.AggregateBackfill").msgclass
|
23
|
+
AggregateBackfillWithCostEstimate = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.AggregateBackfillWithCostEstimate").msgclass
|
24
|
+
AggregateBackfillJob = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.AggregateBackfillJob").msgclass
|
25
|
+
CronAggregateBackfill = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.CronAggregateBackfill").msgclass
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
3
|
+
# source: chalk/aggregate/v1/service.proto
|
4
|
+
|
5
|
+
require 'google/protobuf'
|
6
|
+
|
7
|
+
require 'chalk_ruby/protos/chalk/aggregate/v1/backfill_pb'
|
8
|
+
require 'chalk_ruby/protos/chalk/aggregate/v1/timeseries_pb'
|
9
|
+
require 'chalk_ruby/protos/chalk/auth/v1/audit_pb'
|
10
|
+
require 'chalk_ruby/protos/chalk/auth/v1/permissions_pb'
|
11
|
+
|
12
|
+
|
13
|
+
descriptor_data = "\n chalk/aggregate/v1/service.proto\x12\x12\x63halk.aggregate.v1\x1a!chalk/aggregate/v1/backfill.proto\x1a#chalk/aggregate/v1/timeseries.proto\x1a\x19\x63halk/auth/v1/audit.proto\x1a\x1f\x63halk/auth/v1/permissions.proto\"g\n\x1cPlanAggregateBackfillRequest\x12G\n\x06params\x18\x01 \x01(\x0b\x32/.chalk.aggregate.v1.AggregateBackfillUserParamsR\x06params\"\x8f\x02\n\x1dPlanAggregateBackfillResponse\x12M\n\x08\x65stimate\x18\x02 \x01(\x0b\x32\x31.chalk.aggregate.v1.AggregateBackfillCostEstimateR\x08\x65stimate\x12\x16\n\x06\x65rrors\x18\x04 \x03(\tR\x06\x65rrors\x12S\n\tbackfills\x18\x06 \x03(\x0b\x32\x35.chalk.aggregate.v1.AggregateBackfillWithCostEstimateR\tbackfills\x12\x32\n\x15\x61ggregate_backfill_id\x18\x07 \x01(\tR\x13\x61ggregateBackfillId\"9\n\x14GetAggregatesRequest\x12!\n\x0c\x66or_features\x18\x01 \x03(\tR\x0b\x66orFeatures\"p\n\x15GetAggregatesResponse\x12?\n\x06series\x18\x01 \x03(\x0b\x32\'.chalk.aggregate.v1.AggregateTimeSeriesR\x06series\x12\x16\n\x06\x65rrors\x18\x02 \x03(\tR\x06\x65rrors\"\x7f\n\x1fGetAggregateBackfillJobsRequest\x12\x14\n\x05limit\x18\x01 \x01(\x05R\x05limit\x12\x16\n\x06\x63ursor\x18\x02 \x01(\tR\x06\x63ursor\x12 \n\tplan_hash\x18\x03 \x01(\tH\x00R\x08planHash\x88\x01\x01\x42\x0c\n\n_plan_hash\"x\n GetAggregateBackfillJobsResponse\x12<\n\x04jobs\x18\x01 \x03(\x0b\x32(.chalk.aggregate.v1.AggregateBackfillJobR\x04jobs\x12\x16\n\x06\x63ursor\x18\x02 \x01(\tR\x06\x63ursor\"T\n\x1eGetAggregateBackfillJobRequest\x12\x32\n\x15\x61ggregate_backfill_id\x18\x01 \x01(\tR\x13\x61ggregateBackfillId\"]\n\x1fGetAggregateBackfillJobResponse\x12:\n\x03job\x18\x01 \x01(\x0b\x32(.chalk.aggregate.v1.AggregateBackfillJobR\x03job\"^\n\x1fGetCronAggregateBackfillRequest\x12;\n\x1a\x63ron_aggregate_backfill_id\x18\x01 \x01(\tR\x17\x63ronAggregateBackfillId\"\x85\x01\n GetCronAggregateBackfillResponse\x12\x61\n\x17\x63ron_aggregate_backfill\x18\x01 \x01(\x0b\x32).chalk.aggregate.v1.CronAggregateBackfillR\x15\x63ronAggregateBackfill\"(\n&GetActiveCronAggregateBackfillsRequest\"\xd0\x01\n\"CronAggregateBackfillWithLatestRun\x12\x61\n\x17\x63ron_aggregate_backfill\x18\x01 \x01(\x0b\x32).chalk.aggregate.v1.CronAggregateBackfillR\x15\x63ronAggregateBackfill\x12G\n\nlatest_job\x18\x02 \x01(\x0b\x32(.chalk.aggregate.v1.AggregateBackfillJobR\tlatestJob\"\x9b\x01\n\'GetActiveCronAggregateBackfillsResponse\x12p\n\x18\x63ron_aggregate_backfills\x18\x01 \x03(\x0b\x32\x36.chalk.aggregate.v1.CronAggregateBackfillWithLatestRunR\x16\x63ronAggregateBackfills2\xe5\x06\n\x10\x41ggregateService\x12\x8a\x01\n\x15PlanAggregateBackfill\x12\x30.chalk.aggregate.v1.PlanAggregateBackfillRequest\x1a\x31.chalk.aggregate.v1.PlanAggregateBackfillResponse\"\x0c\x90\x02\x01\x80}\x0c\x8a\xd3\x0e\x02\x08\x02\x12r\n\rGetAggregates\x12(.chalk.aggregate.v1.GetAggregatesRequest\x1a).chalk.aggregate.v1.GetAggregatesResponse\"\x0c\x90\x02\x01\x80}\x0b\x8a\xd3\x0e\x02\x08\x02\x12\x8d\x01\n\x18GetAggregateBackfillJobs\x12\x33.chalk.aggregate.v1.GetAggregateBackfillJobsRequest\x1a\x34.chalk.aggregate.v1.GetAggregateBackfillJobsResponse\"\x06\x90\x02\x01\x80}\x0b\x12\x8a\x01\n\x17GetAggregateBackfillJob\x12\x32.chalk.aggregate.v1.GetAggregateBackfillJobRequest\x1a\x33.chalk.aggregate.v1.GetAggregateBackfillJobResponse\"\x06\x90\x02\x01\x80}\x0b\x12\x8d\x01\n\x18GetCronAggregateBackfill\x12\x33.chalk.aggregate.v1.GetCronAggregateBackfillRequest\x1a\x34.chalk.aggregate.v1.GetCronAggregateBackfillResponse\"\x06\x90\x02\x01\x80}\x0b\x12\xa2\x01\n\x1fGetActiveCronAggregateBackfills\x12:.chalk.aggregate.v1.GetActiveCronAggregateBackfillsRequest\x1a;.chalk.aggregate.v1.GetActiveCronAggregateBackfillsResponse\"\x06\x90\x02\x01\x80}\x0b\x42\xaa\x01\n\x16\x63om.chalk.aggregate.v1B\x0cServiceProtoP\x01Z\x18\x61ggregate/v1;aggregatev1\xa2\x02\x03\x43\x41X\xaa\x02\x12\x43halk.Aggregate.V1\xca\x02\x12\x43halk\\Aggregate\\V1\xe2\x02\x1e\x43halk\\Aggregate\\V1\\GPBMetadata\xea\x02\x14\x43halk::Aggregate::V1b\x06proto3"
|
14
|
+
|
15
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
16
|
+
pool.add_serialized_file(descriptor_data)
|
17
|
+
|
18
|
+
module Chalk
|
19
|
+
module Aggregate
|
20
|
+
module V1
|
21
|
+
PlanAggregateBackfillRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.PlanAggregateBackfillRequest").msgclass
|
22
|
+
PlanAggregateBackfillResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.PlanAggregateBackfillResponse").msgclass
|
23
|
+
GetAggregatesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetAggregatesRequest").msgclass
|
24
|
+
GetAggregatesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetAggregatesResponse").msgclass
|
25
|
+
GetAggregateBackfillJobsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetAggregateBackfillJobsRequest").msgclass
|
26
|
+
GetAggregateBackfillJobsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetAggregateBackfillJobsResponse").msgclass
|
27
|
+
GetAggregateBackfillJobRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetAggregateBackfillJobRequest").msgclass
|
28
|
+
GetAggregateBackfillJobResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetAggregateBackfillJobResponse").msgclass
|
29
|
+
GetCronAggregateBackfillRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetCronAggregateBackfillRequest").msgclass
|
30
|
+
GetCronAggregateBackfillResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetCronAggregateBackfillResponse").msgclass
|
31
|
+
GetActiveCronAggregateBackfillsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetActiveCronAggregateBackfillsRequest").msgclass
|
32
|
+
CronAggregateBackfillWithLatestRun = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.CronAggregateBackfillWithLatestRun").msgclass
|
33
|
+
GetActiveCronAggregateBackfillsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.GetActiveCronAggregateBackfillsResponse").msgclass
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
|
+
# Source: chalk/aggregate/v1/service.proto for package 'Chalk.Aggregate.V1'
|
3
|
+
|
4
|
+
require 'grpc'
|
5
|
+
require 'chalk_ruby/protos/chalk/aggregate/v1/service_pb'
|
6
|
+
|
7
|
+
module Chalk
|
8
|
+
module Aggregate
|
9
|
+
module V1
|
10
|
+
module AggregateService
|
11
|
+
class Service
|
12
|
+
|
13
|
+
include ::GRPC::GenericService
|
14
|
+
|
15
|
+
self.marshal_class_method = :encode
|
16
|
+
self.unmarshal_class_method = :decode
|
17
|
+
self.service_name = 'chalk.aggregate.v1.AggregateService'
|
18
|
+
|
19
|
+
# PlanAggregateBackfill determines the estimated resources needed to backfill
|
20
|
+
# an aggregate.
|
21
|
+
#
|
22
|
+
# This method is a duplicate of the PlanAggregateBackfill method
|
23
|
+
# in the query_server.proto file. We should remove the query_server.proto method
|
24
|
+
# and move that request to this service instead.
|
25
|
+
# buf:lint:ignore RPC_REQUEST_RESPONSE_UNIQUE
|
26
|
+
rpc :PlanAggregateBackfill, ::Chalk::Aggregate::V1::PlanAggregateBackfillRequest, ::Chalk::Aggregate::V1::PlanAggregateBackfillResponse
|
27
|
+
# This method is a duplicate of the PlanAggregateBackfill method
|
28
|
+
# in the query_server.proto file. We should remove the query_server.proto method
|
29
|
+
# and move that request to this service instead.
|
30
|
+
# buf:lint:ignore RPC_REQUEST_RESPONSE_UNIQUE
|
31
|
+
rpc :GetAggregates, ::Chalk::Aggregate::V1::GetAggregatesRequest, ::Chalk::Aggregate::V1::GetAggregatesResponse
|
32
|
+
rpc :GetAggregateBackfillJobs, ::Chalk::Aggregate::V1::GetAggregateBackfillJobsRequest, ::Chalk::Aggregate::V1::GetAggregateBackfillJobsResponse
|
33
|
+
rpc :GetAggregateBackfillJob, ::Chalk::Aggregate::V1::GetAggregateBackfillJobRequest, ::Chalk::Aggregate::V1::GetAggregateBackfillJobResponse
|
34
|
+
rpc :GetCronAggregateBackfill, ::Chalk::Aggregate::V1::GetCronAggregateBackfillRequest, ::Chalk::Aggregate::V1::GetCronAggregateBackfillResponse
|
35
|
+
rpc :GetActiveCronAggregateBackfills, ::Chalk::Aggregate::V1::GetActiveCronAggregateBackfillsRequest, ::Chalk::Aggregate::V1::GetActiveCronAggregateBackfillsResponse
|
36
|
+
end
|
37
|
+
|
38
|
+
Stub = Service.rpc_stub_class
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
3
|
+
# source: chalk/aggregate/v1/timeseries.proto
|
4
|
+
|
5
|
+
require 'google/protobuf'
|
6
|
+
|
7
|
+
require 'google/protobuf/duration_pb'
|
8
|
+
|
9
|
+
|
10
|
+
descriptor_data = "\n#chalk/aggregate/v1/timeseries.proto\x12\x12\x63halk.aggregate.v1\x1a\x1egoogle/protobuf/duration.proto\"\x92\x02\n\x17\x41ggregateTimeSeriesRule\x12 \n\x0b\x61ggregation\x18\x01 \x01(\tR\x0b\x61ggregation\x12\x42\n\x0f\x62ucket_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationR\x0e\x62ucketDuration\x12-\n\x12\x64\x65pendent_features\x18\x03 \x03(\tR\x11\x64\x65pendentFeatures\x12\x37\n\tretention\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationR\tretention\x12)\n\x10\x64\x61tetime_feature\x18\x05 \x01(\tR\x0f\x64\x61tetimeFeature\"\x8c\x02\n\x13\x41ggregateTimeSeries\x12\x1c\n\tnamespace\x18\x01 \x01(\tR\tnamespace\x12!\n\x0c\x61ggregate_on\x18\x02 \x01(\tR\x0b\x61ggregateOn\x12\x19\n\x08group_by\x18\x03 \x03(\tR\x07groupBy\x12\x41\n\x05rules\x18\x05 \x03(\x0b\x32+.chalk.aggregate.v1.AggregateTimeSeriesRuleR\x05rules\x12/\n\x13\x66ilters_description\x18\x06 \x01(\tR\x12\x66iltersDescription\x12%\n\x0e\x62ucket_feature\x18\x07 \x01(\tR\rbucketFeatureB\xad\x01\n\x16\x63om.chalk.aggregate.v1B\x0fTimeseriesProtoP\x01Z\x18\x61ggregate/v1;aggregatev1\xa2\x02\x03\x43\x41X\xaa\x02\x12\x43halk.Aggregate.V1\xca\x02\x12\x43halk\\Aggregate\\V1\xe2\x02\x1e\x43halk\\Aggregate\\V1\\GPBMetadata\xea\x02\x14\x43halk::Aggregate::V1b\x06proto3"
|
11
|
+
|
12
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
13
|
+
pool.add_serialized_file(descriptor_data)
|
14
|
+
|
15
|
+
module Chalk
|
16
|
+
module Aggregate
|
17
|
+
module V1
|
18
|
+
AggregateTimeSeriesRule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.AggregateTimeSeriesRule").msgclass
|
19
|
+
AggregateTimeSeries = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("chalk.aggregate.v1.AggregateTimeSeries").msgclass
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|