@daocloud-proto/mcamel-kafka 0.0.1-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cluster.pb.ts +79 -0
- package/common.pb.ts +24 -0
- package/fetch.pb.ts +232 -0
- package/kafka.pb.ts +294 -0
- package/package.json +12 -0
- package/version.pb.ts +27 -0
package/cluster.pb.ts
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
/* eslint-disable */
|
|
2
|
+
// @ts-nocheck
|
|
3
|
+
/*
|
|
4
|
+
* This file is a generated Typescript file for GRPC Gateway, DO NOT MODIFY
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import * as CommonCommon from "./common.pb"
|
|
8
|
+
import * as fm from "./fetch.pb"
|
|
9
|
+
|
|
10
|
+
export enum GetWorkspaceListReqSortDir {
|
|
11
|
+
ASC = "ASC",
|
|
12
|
+
DESC = "DESC",
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export enum GetInsightAgentStatusRespInsightAgentStatus {
|
|
16
|
+
NotInstall = "NotInstall",
|
|
17
|
+
Install = "Install",
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export type GetWorkspaceListReq = {
|
|
21
|
+
page?: number
|
|
22
|
+
pageSize?: number
|
|
23
|
+
sortDir?: GetWorkspaceListReqSortDir
|
|
24
|
+
sortBy?: string
|
|
25
|
+
searchKey?: string
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export type GetWorkspaceListRespItem = {
|
|
29
|
+
workspaceId?: number
|
|
30
|
+
alias?: string
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export type GetWorkspaceListResp = {
|
|
34
|
+
items?: GetWorkspaceListRespItem[]
|
|
35
|
+
pagination?: CommonCommon.Pagination
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export type GetClusterListReq = {
|
|
39
|
+
workspaceId?: number
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export type GetClusterListResp = {
|
|
43
|
+
items?: string[]
|
|
44
|
+
pagination?: CommonCommon.Pagination
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export type GetClusterNamespaceListReq = {
|
|
48
|
+
workspaceId?: number
|
|
49
|
+
cluster?: string
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export type GetClusterNamespaceListResp = {
|
|
53
|
+
items?: string[]
|
|
54
|
+
pagination?: CommonCommon.Pagination
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export type GetInsightAgentStatusReq = {
|
|
58
|
+
cluster?: string
|
|
59
|
+
mcamelType?: string
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export type GetInsightAgentStatusResp = {
|
|
63
|
+
status?: GetInsightAgentStatusRespInsightAgentStatus
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export class Cluster {
|
|
67
|
+
static GetClusterList(req: GetClusterListReq, initReq?: fm.InitReq): Promise<GetClusterListResp> {
|
|
68
|
+
return fm.fetchReq<GetClusterListReq, GetClusterListResp>(`/apis/mcamel.io/kafka/v1alpha1/${req["workspaceId"]}/clusters?${fm.renderURLSearchParams(req, ["workspaceId"])}`, {...initReq, method: "GET"})
|
|
69
|
+
}
|
|
70
|
+
static GetClusterNamespaceList(req: GetClusterNamespaceListReq, initReq?: fm.InitReq): Promise<GetClusterNamespaceListResp> {
|
|
71
|
+
return fm.fetchReq<GetClusterNamespaceListReq, GetClusterNamespaceListResp>(`/apis/mcamel.io/kafka/v1alpha1/${req["workspaceId"]}/${req["cluster"]}/namespaces?${fm.renderURLSearchParams(req, ["workspaceId", "cluster"])}`, {...initReq, method: "GET"})
|
|
72
|
+
}
|
|
73
|
+
static GetWorkspaceList(req: GetWorkspaceListReq, initReq?: fm.InitReq): Promise<GetWorkspaceListResp> {
|
|
74
|
+
return fm.fetchReq<GetWorkspaceListReq, GetWorkspaceListResp>(`/apis/mcamel.io/kafka/v1alpha1/workspaces?${fm.renderURLSearchParams(req, [])}`, {...initReq, method: "GET"})
|
|
75
|
+
}
|
|
76
|
+
static GetInsightAgentStatus(req: GetInsightAgentStatusReq, initReq?: fm.InitReq): Promise<GetInsightAgentStatusResp> {
|
|
77
|
+
return fm.fetchReq<GetInsightAgentStatusReq, GetInsightAgentStatusResp>(`/apis/mcamel.io/kafka/v1alpha1/${req["cluster"]}/insight/status?${fm.renderURLSearchParams(req, ["cluster"])}`, {...initReq, method: "GET"})
|
|
78
|
+
}
|
|
79
|
+
}
|
package/common.pb.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/* eslint-disable */
|
|
2
|
+
// @ts-nocheck
|
|
3
|
+
/*
|
|
4
|
+
* This file is a generated Typescript file for GRPC Gateway, DO NOT MODIFY
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
export enum PageInfoReqSortDir {
|
|
8
|
+
ASC = "ASC",
|
|
9
|
+
DESC = "DESC",
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export type Pagination = {
|
|
13
|
+
total?: number
|
|
14
|
+
page?: number
|
|
15
|
+
pageSize?: number
|
|
16
|
+
pages?: number
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export type PageInfoReq = {
|
|
20
|
+
page?: number
|
|
21
|
+
pageSize?: number
|
|
22
|
+
sortDir?: PageInfoReqSortDir
|
|
23
|
+
sortBy?: string
|
|
24
|
+
}
|
package/fetch.pb.ts
ADDED
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
/* eslint-disable */
|
|
2
|
+
// @ts-nocheck
|
|
3
|
+
/*
|
|
4
|
+
* This file is a generated Typescript file for GRPC Gateway, DO NOT MODIFY
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
export interface InitReq extends RequestInit {
|
|
8
|
+
pathPrefix?: string
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function fetchReq<I, O>(path: string, init?: InitReq): Promise<O> {
|
|
12
|
+
const {pathPrefix, ...req} = init || {}
|
|
13
|
+
|
|
14
|
+
const url = pathPrefix ? `${pathPrefix}${path}` : path
|
|
15
|
+
|
|
16
|
+
return fetch(url, req).then(r => r.json().then((body: O) => {
|
|
17
|
+
if (!r.ok) { throw body; }
|
|
18
|
+
return body;
|
|
19
|
+
})) as Promise<O>
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// NotifyStreamEntityArrival is a callback that will be called on streaming entity arrival
|
|
23
|
+
export type NotifyStreamEntityArrival<T> = (resp: T) => void
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* fetchStreamingRequest is able to handle grpc-gateway server side streaming call
|
|
27
|
+
* it takes NotifyStreamEntityArrival that lets users respond to entity arrival during the call
|
|
28
|
+
* all entities will be returned as an array after the call finishes.
|
|
29
|
+
**/
|
|
30
|
+
export async function fetchStreamingRequest<S, R>(path: string, callback?: NotifyStreamEntityArrival<R>, init?: InitReq) {
|
|
31
|
+
const {pathPrefix, ...req} = init || {}
|
|
32
|
+
const url = pathPrefix ?`${pathPrefix}${path}` : path
|
|
33
|
+
const result = await fetch(url, req)
|
|
34
|
+
// needs to use the .ok to check the status of HTTP status code
|
|
35
|
+
// http other than 200 will not throw an error, instead the .ok will become false.
|
|
36
|
+
// see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch#
|
|
37
|
+
if (!result.ok) {
|
|
38
|
+
const resp = await result.json()
|
|
39
|
+
const errMsg = resp.error && resp.error.message ? resp.error.message : ""
|
|
40
|
+
throw new Error(errMsg)
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if (!result.body) {
|
|
44
|
+
throw new Error("response doesnt have a body")
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
await result.body
|
|
48
|
+
.pipeThrough(new TextDecoderStream())
|
|
49
|
+
.pipeThrough<R>(getNewLineDelimitedJSONDecodingStream<R>())
|
|
50
|
+
.pipeTo(getNotifyEntityArrivalSink((e: R) => {
|
|
51
|
+
if (callback) {
|
|
52
|
+
callback(e)
|
|
53
|
+
}
|
|
54
|
+
}))
|
|
55
|
+
|
|
56
|
+
// wait for the streaming to finish and return the success respond
|
|
57
|
+
return
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* JSONStringStreamController represents the transform controller that's able to transform the incoming
|
|
62
|
+
* new line delimited json content stream into entities and able to push the entity to the down stream
|
|
63
|
+
*/
|
|
64
|
+
interface JSONStringStreamController<T> extends TransformStreamDefaultController {
|
|
65
|
+
buf?: string
|
|
66
|
+
pos?: number
|
|
67
|
+
enqueue: (s: T) => void
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* getNewLineDelimitedJSONDecodingStream returns a TransformStream that's able to handle new line delimited json stream content into parsed entities
|
|
72
|
+
*/
|
|
73
|
+
function getNewLineDelimitedJSONDecodingStream<T>(): TransformStream<string, T> {
|
|
74
|
+
return new TransformStream({
|
|
75
|
+
start(controller: JSONStringStreamController<T>) {
|
|
76
|
+
controller.buf = ''
|
|
77
|
+
controller.pos = 0
|
|
78
|
+
},
|
|
79
|
+
|
|
80
|
+
transform(chunk: string, controller: JSONStringStreamController<T>) {
|
|
81
|
+
if (controller.buf === undefined) {
|
|
82
|
+
controller.buf = ''
|
|
83
|
+
}
|
|
84
|
+
if (controller.pos === undefined) {
|
|
85
|
+
controller.pos = 0
|
|
86
|
+
}
|
|
87
|
+
controller.buf += chunk
|
|
88
|
+
while (controller.pos < controller.buf.length) {
|
|
89
|
+
if (controller.buf[controller.pos] === '\n') {
|
|
90
|
+
const line = controller.buf.substring(0, controller.pos)
|
|
91
|
+
const response = JSON.parse(line)
|
|
92
|
+
controller.enqueue(response.result)
|
|
93
|
+
controller.buf = controller.buf.substring(controller.pos + 1)
|
|
94
|
+
controller.pos = 0
|
|
95
|
+
} else {
|
|
96
|
+
++controller.pos
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
})
|
|
101
|
+
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* getNotifyEntityArrivalSink takes the NotifyStreamEntityArrival callback and return
|
|
106
|
+
* a sink that will call the callback on entity arrival
|
|
107
|
+
* @param notifyCallback
|
|
108
|
+
*/
|
|
109
|
+
function getNotifyEntityArrivalSink<T>(notifyCallback: NotifyStreamEntityArrival<T>) {
|
|
110
|
+
return new WritableStream<T>({
|
|
111
|
+
write(entity: T) {
|
|
112
|
+
notifyCallback(entity)
|
|
113
|
+
}
|
|
114
|
+
})
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
type Primitive = string | boolean | number;
|
|
118
|
+
type RequestPayload = Record<string, unknown>;
|
|
119
|
+
type FlattenedRequestPayload = Record<string, Primitive | Array<Primitive>>;
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Checks if given value is a plain object
|
|
123
|
+
* Logic copied and adapted from below source:
|
|
124
|
+
* https://github.com/char0n/ramda-adjunct/blob/master/src/isPlainObj.js
|
|
125
|
+
* @param {unknown} value
|
|
126
|
+
* @return {boolean}
|
|
127
|
+
*/
|
|
128
|
+
function isPlainObject(value: unknown): boolean {
|
|
129
|
+
const isObject =
|
|
130
|
+
Object.prototype.toString.call(value).slice(8, -1) === "Object";
|
|
131
|
+
const isObjLike = value !== null && isObject;
|
|
132
|
+
|
|
133
|
+
if (!isObjLike || !isObject) {
|
|
134
|
+
return false;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const proto = Object.getPrototypeOf(value);
|
|
138
|
+
|
|
139
|
+
const hasObjectConstructor =
|
|
140
|
+
typeof proto === "object" &&
|
|
141
|
+
proto.constructor === Object.prototype.constructor;
|
|
142
|
+
|
|
143
|
+
return hasObjectConstructor;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Checks if given value is of a primitive type
|
|
148
|
+
* @param {unknown} value
|
|
149
|
+
* @return {boolean}
|
|
150
|
+
*/
|
|
151
|
+
function isPrimitive(value: unknown): boolean {
|
|
152
|
+
return ["string", "number", "boolean"].some(t => typeof value === t);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Checks if given primitive is zero-value
|
|
157
|
+
* @param {Primitive} value
|
|
158
|
+
* @return {boolean}
|
|
159
|
+
*/
|
|
160
|
+
function isZeroValuePrimitive(value: Primitive): boolean {
|
|
161
|
+
return value === false || value === 0 || value === "";
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* Flattens a deeply nested request payload and returns an object
|
|
166
|
+
* with only primitive values and non-empty array of primitive values
|
|
167
|
+
* as per https://github.com/googleapis/googleapis/blob/master/google/api/http.proto
|
|
168
|
+
* @param {RequestPayload} requestPayload
|
|
169
|
+
* @param {String} path
|
|
170
|
+
* @return {FlattenedRequestPayload>}
|
|
171
|
+
*/
|
|
172
|
+
function flattenRequestPayload<T extends RequestPayload>(
|
|
173
|
+
requestPayload: T,
|
|
174
|
+
path: string = ""
|
|
175
|
+
): FlattenedRequestPayload {
|
|
176
|
+
return Object.keys(requestPayload).reduce(
|
|
177
|
+
(acc: T, key: string): T => {
|
|
178
|
+
const value = requestPayload[key];
|
|
179
|
+
const newPath = path ? [path, key].join(".") : key;
|
|
180
|
+
|
|
181
|
+
const isNonEmptyPrimitiveArray =
|
|
182
|
+
Array.isArray(value) &&
|
|
183
|
+
value.every(v => isPrimitive(v)) &&
|
|
184
|
+
value.length > 0;
|
|
185
|
+
|
|
186
|
+
const isNonZeroValuePrimitive =
|
|
187
|
+
isPrimitive(value) && !isZeroValuePrimitive(value as Primitive);
|
|
188
|
+
|
|
189
|
+
let objectToMerge = {};
|
|
190
|
+
|
|
191
|
+
if (isPlainObject(value)) {
|
|
192
|
+
objectToMerge = flattenRequestPayload(value as RequestPayload, newPath);
|
|
193
|
+
} else if (isNonZeroValuePrimitive || isNonEmptyPrimitiveArray) {
|
|
194
|
+
objectToMerge = { [newPath]: value };
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return { ...acc, ...objectToMerge };
|
|
198
|
+
},
|
|
199
|
+
{} as T
|
|
200
|
+
) as FlattenedRequestPayload;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Renders a deeply nested request payload into a string of URL search
|
|
205
|
+
* parameters by first flattening the request payload and then removing keys
|
|
206
|
+
* which are already present in the URL path.
|
|
207
|
+
* @param {RequestPayload} requestPayload
|
|
208
|
+
* @param {string[]} urlPathParams
|
|
209
|
+
* @return {string}
|
|
210
|
+
*/
|
|
211
|
+
export function renderURLSearchParams<T extends RequestPayload>(
|
|
212
|
+
requestPayload: T,
|
|
213
|
+
urlPathParams: string[] = []
|
|
214
|
+
): string {
|
|
215
|
+
const flattenedRequestPayload = flattenRequestPayload(requestPayload);
|
|
216
|
+
|
|
217
|
+
const urlSearchParams = Object.keys(flattenedRequestPayload).reduce(
|
|
218
|
+
(acc: string[][], key: string): string[][] => {
|
|
219
|
+
// key should not be present in the url path as a parameter
|
|
220
|
+
const value = flattenedRequestPayload[key];
|
|
221
|
+
if (urlPathParams.find(f => f === key)) {
|
|
222
|
+
return acc;
|
|
223
|
+
}
|
|
224
|
+
return Array.isArray(value)
|
|
225
|
+
? [...acc, ...value.map(m => [key, m.toString()])]
|
|
226
|
+
: (acc = [...acc, [key, value.toString()]]);
|
|
227
|
+
},
|
|
228
|
+
[] as string[][]
|
|
229
|
+
);
|
|
230
|
+
|
|
231
|
+
return new URLSearchParams(urlSearchParams).toString();
|
|
232
|
+
}
|
package/kafka.pb.ts
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
/* eslint-disable */
|
|
2
|
+
// @ts-nocheck
|
|
3
|
+
/*
|
|
4
|
+
* This file is a generated Typescript file for GRPC Gateway, DO NOT MODIFY
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import * as CommonCommon from "./common.pb"
|
|
8
|
+
import * as fm from "./fetch.pb"
|
|
9
|
+
|
|
10
|
+
type Absent<T, K extends keyof T> = { [k in Exclude<keyof T, K>]?: undefined };
|
|
11
|
+
type OneOf<T> =
|
|
12
|
+
| { [k in keyof T]?: undefined }
|
|
13
|
+
| (
|
|
14
|
+
keyof T extends infer K ?
|
|
15
|
+
(K extends string & keyof T ? { [k in K]: T[K] } & Absent<T, K>
|
|
16
|
+
: never)
|
|
17
|
+
: never);
|
|
18
|
+
|
|
19
|
+
export enum Status {
|
|
20
|
+
Failed = "Failed",
|
|
21
|
+
Running = "Running",
|
|
22
|
+
Creating = "Creating",
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export enum GetKafkaListReqSortDir {
|
|
26
|
+
ASC = "ASC",
|
|
27
|
+
DESC = "DESC",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export enum CreateKafkaReqServiceType {
|
|
31
|
+
ClusterIP = "ClusterIP",
|
|
32
|
+
NodePort = "NodePort",
|
|
33
|
+
LoadBalancer = "LoadBalancer",
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export enum GetKafkaParamRespSelectSelectType {
|
|
37
|
+
Single = "Single",
|
|
38
|
+
Multiple = "Multiple",
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export enum GetKafkaNodeListReqSortDir {
|
|
42
|
+
ASC = "ASC",
|
|
43
|
+
DESC = "DESC",
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export enum GetKafkaNodeListRespPodStatus {
|
|
47
|
+
PHASE_UNSPECIFIED = "PHASE_UNSPECIFIED",
|
|
48
|
+
Unknown = "Unknown",
|
|
49
|
+
Pending = "Pending",
|
|
50
|
+
Running = "Running",
|
|
51
|
+
Succeeded = "Succeeded",
|
|
52
|
+
Failed = "Failed",
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export enum GetKafkaNodeListRespNodeType {
|
|
56
|
+
Kafka = "Kafka",
|
|
57
|
+
Zookeeper = "Zookeeper",
|
|
58
|
+
Manager = "Manager",
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export type GetKafkaListReq = {
|
|
62
|
+
page?: number
|
|
63
|
+
pageSize?: number
|
|
64
|
+
sortDir?: GetKafkaListReqSortDir
|
|
65
|
+
sortBy?: string
|
|
66
|
+
searchKey?: string
|
|
67
|
+
workspaceId?: number
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export type GetKafkaParamReq = {
|
|
71
|
+
cluster?: string
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export type CreateKafkaReqPorts = {
|
|
75
|
+
name?: string
|
|
76
|
+
protocol?: string
|
|
77
|
+
port?: number
|
|
78
|
+
targetPort?: number
|
|
79
|
+
nodePort?: number
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export type CreateKafkaReq = {
|
|
83
|
+
cluster?: string
|
|
84
|
+
namespace?: string
|
|
85
|
+
name?: string
|
|
86
|
+
describe?: string
|
|
87
|
+
version?: string
|
|
88
|
+
kafkaReplicas?: number
|
|
89
|
+
storageClassName?: string
|
|
90
|
+
storageCapacity?: string
|
|
91
|
+
serviceType?: CreateKafkaReqServiceType
|
|
92
|
+
serviceAnnotations?: {[key: string]: string}
|
|
93
|
+
ports?: CreateKafkaReqPorts[]
|
|
94
|
+
cpuRequest?: string
|
|
95
|
+
cpuLimit?: string
|
|
96
|
+
memoryRequest?: string
|
|
97
|
+
memoryLimit?: string
|
|
98
|
+
isOpenManager?: boolean
|
|
99
|
+
managerCpuRequest?: string
|
|
100
|
+
managerCpuLimit?: string
|
|
101
|
+
managerMemoryRequest?: string
|
|
102
|
+
managerMemoryLimit?: string
|
|
103
|
+
managerUser?: string
|
|
104
|
+
managerPass?: string
|
|
105
|
+
zookeeperReplicas?: number
|
|
106
|
+
kafkaConf?: string
|
|
107
|
+
zookeeperConf?: string
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
export type UpdateKafkaParamsResp = {
|
|
111
|
+
message?: string
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export type GetKafkaParamRespSelectDataStringValue = {
|
|
115
|
+
value?: string
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
export type GetKafkaParamRespSelectDataResourceValue = {
|
|
119
|
+
cpuRequest?: string
|
|
120
|
+
cpuLimit?: string
|
|
121
|
+
memoryRequest?: string
|
|
122
|
+
memoryLimit?: string
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
export type GetKafkaParamRespSelectDataIntValue = {
|
|
126
|
+
value?: number
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
type BaseGetKafkaParamRespSelectData = {
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
export type GetKafkaParamRespSelectData = BaseGetKafkaParamRespSelectData
|
|
134
|
+
& OneOf<{ sValue: GetKafkaParamRespSelectDataStringValue; rValue: GetKafkaParamRespSelectDataResourceValue; iValue: GetKafkaParamRespSelectDataIntValue }>
|
|
135
|
+
|
|
136
|
+
export type GetKafkaParamRespSelect = {
|
|
137
|
+
selectType?: GetKafkaParamRespSelectSelectType
|
|
138
|
+
data?: GetKafkaParamRespSelectData[]
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
export type GetKafkaParamResp = {
|
|
142
|
+
version?: GetKafkaParamRespSelect
|
|
143
|
+
replicas?: GetKafkaParamRespSelect
|
|
144
|
+
resource?: GetKafkaParamRespSelect
|
|
145
|
+
storage?: GetKafkaParamRespSelect
|
|
146
|
+
kafkaConf?: GetKafkaParamRespSelect
|
|
147
|
+
zookeeperConf?: GetKafkaParamRespSelect
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
export type CreateKafkaResp = {
|
|
151
|
+
message?: string
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
export type GetKafkaOperatorVersionListReq = {
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
export type GetKafkaOperatorVersionListRespGetKafkaOperatorVersionListData = {
|
|
158
|
+
cluster?: string
|
|
159
|
+
namespace?: string
|
|
160
|
+
version?: string
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
export type GetKafkaOperatorVersionListResp = {
|
|
164
|
+
items?: GetKafkaOperatorVersionListRespGetKafkaOperatorVersionListData[]
|
|
165
|
+
pagination?: CommonCommon.Pagination
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
export type DeleteKafkaReq = {
|
|
169
|
+
cluster?: string
|
|
170
|
+
namespace?: string
|
|
171
|
+
name?: string
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
export type DeleteKafkasReq = {
|
|
175
|
+
data?: DeleteKafkaReq[]
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
export type DeleteKafkaResp = {
|
|
179
|
+
message?: string
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
export type DeleteKafkasResp = {
|
|
183
|
+
message?: string
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
export type GetKafkaNodeListReq = {
|
|
187
|
+
cluster?: string
|
|
188
|
+
namespace?: string
|
|
189
|
+
name?: string
|
|
190
|
+
page?: number
|
|
191
|
+
pageSize?: number
|
|
192
|
+
sortDir?: GetKafkaNodeListReqSortDir
|
|
193
|
+
sortBy?: string
|
|
194
|
+
searchKey?: string
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
export type GetKafkaNodeListRespData = {
|
|
198
|
+
podName?: string
|
|
199
|
+
status?: GetKafkaNodeListRespPodStatus
|
|
200
|
+
ip?: string
|
|
201
|
+
restart?: number
|
|
202
|
+
cpuUsage?: number
|
|
203
|
+
cpuLimit?: number
|
|
204
|
+
memoryUsage?: number
|
|
205
|
+
memoryLimit?: number
|
|
206
|
+
createTimestamp?: string
|
|
207
|
+
nodeType?: GetKafkaNodeListRespNodeType
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
export type GetKafkaNodeListResp = {
|
|
211
|
+
items?: GetKafkaNodeListRespData[]
|
|
212
|
+
pagination?: CommonCommon.Pagination
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
export type GetKafkaGrafanaAddrReq = {
|
|
216
|
+
cluster?: string
|
|
217
|
+
namespace?: string
|
|
218
|
+
name?: string
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
export type GetKafkaGrafanaAddrResp = {
|
|
222
|
+
data?: string
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
export type GetKafkaReq = {
|
|
226
|
+
cluster?: string
|
|
227
|
+
namespace?: string
|
|
228
|
+
name?: string
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
export type GetKafkaResp = {
|
|
232
|
+
data?: KafkaClusterItem
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
export type GetKafkaListResp = {
|
|
236
|
+
items?: KafkaClusterItem[]
|
|
237
|
+
pagination?: CommonCommon.Pagination
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
export type KafkaClusterItemStatus = {
|
|
241
|
+
status?: Status
|
|
242
|
+
podsAreReadyNum?: number
|
|
243
|
+
webManagerAddr?: string
|
|
244
|
+
clusterIPs?: string[]
|
|
245
|
+
serviceAddr?: string
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
export type KafkaClusterItemMetadata = {
|
|
249
|
+
annotations?: {[key: string]: string}
|
|
250
|
+
creationTimestamp?: string
|
|
251
|
+
name?: string
|
|
252
|
+
namespace?: string
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
export type KafkaClusterItem = {
|
|
256
|
+
apiVersion?: string
|
|
257
|
+
kind?: string
|
|
258
|
+
metadata?: KafkaClusterItemMetadata
|
|
259
|
+
spec?: CreateKafkaReq
|
|
260
|
+
status?: KafkaClusterItemStatus
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
export class Kafka {
|
|
264
|
+
static GetKafkaList(req: GetKafkaListReq, initReq?: fm.InitReq): Promise<GetKafkaListResp> {
|
|
265
|
+
return fm.fetchReq<GetKafkaListReq, GetKafkaListResp>(`/apis/mcamel.io/kafka/v1alpha1/${req["workspaceId"]}/kafkas?${fm.renderURLSearchParams(req, ["workspaceId"])}`, {...initReq, method: "GET"})
|
|
266
|
+
}
|
|
267
|
+
static GetKafkaOperatorVersionList(req: GetKafkaOperatorVersionListReq, initReq?: fm.InitReq): Promise<GetKafkaOperatorVersionListResp> {
|
|
268
|
+
return fm.fetchReq<GetKafkaOperatorVersionListReq, GetKafkaOperatorVersionListResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka-operator/versions?${fm.renderURLSearchParams(req, [])}`, {...initReq, method: "GET"})
|
|
269
|
+
}
|
|
270
|
+
static GetKafka(req: GetKafkaReq, initReq?: fm.InitReq): Promise<GetKafkaResp> {
|
|
271
|
+
return fm.fetchReq<GetKafkaReq, GetKafkaResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka/${req["cluster"]}/${req["namespace"]}/${req["name"]}?${fm.renderURLSearchParams(req, ["cluster", "namespace", "name"])}`, {...initReq, method: "GET"})
|
|
272
|
+
}
|
|
273
|
+
static GetKafkaParam(req: GetKafkaParamReq, initReq?: fm.InitReq): Promise<GetKafkaParamResp> {
|
|
274
|
+
return fm.fetchReq<GetKafkaParamReq, GetKafkaParamResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka-params/${req["cluster"]}?${fm.renderURLSearchParams(req, ["cluster"])}`, {...initReq, method: "GET"})
|
|
275
|
+
}
|
|
276
|
+
static GetKafkaNodeList(req: GetKafkaNodeListReq, initReq?: fm.InitReq): Promise<GetKafkaNodeListResp> {
|
|
277
|
+
return fm.fetchReq<GetKafkaNodeListReq, GetKafkaNodeListResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka/${req["cluster"]}/${req["namespace"]}/${req["name"]}/nodes?${fm.renderURLSearchParams(req, ["cluster", "namespace", "name"])}`, {...initReq, method: "GET"})
|
|
278
|
+
}
|
|
279
|
+
static GetKafkaGrafanaAddr(req: GetKafkaGrafanaAddrReq, initReq?: fm.InitReq): Promise<GetKafkaGrafanaAddrResp> {
|
|
280
|
+
return fm.fetchReq<GetKafkaGrafanaAddrReq, GetKafkaGrafanaAddrResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka/${req["cluster"]}/${req["namespace"]}/${req["name"]}/grafana?${fm.renderURLSearchParams(req, ["cluster", "namespace", "name"])}`, {...initReq, method: "GET"})
|
|
281
|
+
}
|
|
282
|
+
static CreateKafka(req: CreateKafkaReq, initReq?: fm.InitReq): Promise<CreateKafkaResp> {
|
|
283
|
+
return fm.fetchReq<CreateKafkaReq, CreateKafkaResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka`, {...initReq, method: "POST", body: JSON.stringify(req)})
|
|
284
|
+
}
|
|
285
|
+
static UpdateKafkaParams(req: CreateKafkaReq, initReq?: fm.InitReq): Promise<UpdateKafkaParamsResp> {
|
|
286
|
+
return fm.fetchReq<CreateKafkaReq, UpdateKafkaParamsResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka/${req["cluster"]}/${req["namespace"]}/${req["name"]}/params`, {...initReq, method: "PUT", body: JSON.stringify(req)})
|
|
287
|
+
}
|
|
288
|
+
static DeleteKafka(req: DeleteKafkaReq, initReq?: fm.InitReq): Promise<DeleteKafkaResp> {
|
|
289
|
+
return fm.fetchReq<DeleteKafkaReq, DeleteKafkaResp>(`/apis/mcamel.io/kafka/v1alpha1/kafka/${req["cluster"]}/${req["namespace"]}/${req["name"]}`, {...initReq, method: "DELETE"})
|
|
290
|
+
}
|
|
291
|
+
static DeleteKafkas(req: DeleteKafkasReq, initReq?: fm.InitReq): Promise<DeleteKafkasResp> {
|
|
292
|
+
return fm.fetchReq<DeleteKafkasReq, DeleteKafkasResp>(`/apis/mcamel.io/kafka/v1alpha1/kafkas`, {...initReq, method: "POST", body: JSON.stringify(req)})
|
|
293
|
+
}
|
|
294
|
+
}
|
package/package.json
ADDED
package/version.pb.ts
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/* eslint-disable */
|
|
2
|
+
// @ts-nocheck
|
|
3
|
+
/*
|
|
4
|
+
* This file is a generated Typescript file for GRPC Gateway, DO NOT MODIFY
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import * as fm from "./fetch.pb"
|
|
8
|
+
export type CommonReply = {
|
|
9
|
+
code?: number
|
|
10
|
+
msg?: string
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export type GetVersionReply = {
|
|
14
|
+
commonReply?: CommonReply
|
|
15
|
+
gitCommit?: string
|
|
16
|
+
gitVersion?: string
|
|
17
|
+
buildTime?: string
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export type Empty = {
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export class Version {
|
|
24
|
+
static Get(req: Empty, initReq?: fm.InitReq): Promise<GetVersionReply> {
|
|
25
|
+
return fm.fetchReq<Empty, GetVersionReply>(`/apis/mcamel.io/kafka/v1alpha1/kafka/version?${fm.renderURLSearchParams(req, [])}`, {...initReq, method: "GET"})
|
|
26
|
+
}
|
|
27
|
+
}
|