macrocosmos 1.2.2 → 1.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/generated/apex/v1/apex.d.ts +440 -718
- package/dist/generated/apex/v1/apex.js +2924 -582
- package/dist/generated/billing/v1/billing.d.ts +71 -94
- package/dist/generated/billing/v1/billing.js +328 -80
- package/dist/generated/google/protobuf/timestamp.d.ts +122 -0
- package/dist/generated/google/protobuf/timestamp.js +92 -0
- package/dist/generated/gravity/v1/gravity.d.ts +378 -562
- package/dist/generated/gravity/v1/gravity.js +2360 -457
- package/dist/generated/sn13/v1/sn13_validator.d.ts +91 -114
- package/dist/generated/sn13/v1/sn13_validator.js +421 -96
- package/dist/lib/BaseClient.js +5 -5
- package/dist/lib/apex/Client.d.ts +8 -13
- package/dist/lib/apex/Client.js +14 -24
- package/dist/lib/billing/Client.d.ts +4 -17
- package/dist/lib/billing/Client.js +7 -21
- package/dist/lib/gravity/Client.d.ts +14 -22
- package/dist/lib/gravity/Client.js +13 -27
- package/dist/lib/sn13/Client.d.ts +4 -20
- package/dist/lib/sn13/Client.js +8 -22
- package/dist/lib/util.types.d.ts +1 -0
- package/dist/lib/util.types.js +3 -0
- package/new_version.txt +1 -1
- package/old_version.txt +1 -1
- package/package.json +7 -3
- package/tsconfig.eslint.json +1 -1
- package/eslint.config.cjs +0 -33
|
@@ -1,591 +1,407 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
1
|
+
import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
|
|
2
|
+
import { type CallOptions, ChannelCredentials, Client, type ClientOptions, type ClientUnaryCall, type handleUnaryCall, Metadata, type ServiceError, type UntypedServiceImplementation } from "@grpc/grpc-js";
|
|
3
|
+
export declare const protobufPackage = "gravity.v1";
|
|
4
|
+
/** Crawler is a single crawler workflow that registers a single job (platform/topic) on SN13's dynamic desirability engine */
|
|
5
|
+
export interface Crawler {
|
|
6
|
+
/** crawler_id: the ID of the crawler */
|
|
7
|
+
crawlerId: string;
|
|
8
|
+
/** criteria: the contents of the job and the notification details */
|
|
9
|
+
criteria?: CrawlerCriteria | undefined;
|
|
10
|
+
/** start_time: the time the crawler was created */
|
|
11
|
+
startTime?: Date | undefined;
|
|
12
|
+
/** deregistration_time: the time the crawler was deregistered */
|
|
13
|
+
deregistrationTime?: Date | undefined;
|
|
14
|
+
/** archive_time: the time the crawler was archived */
|
|
15
|
+
archiveTime?: Date | undefined;
|
|
16
|
+
/** state: the current state of the crawler */
|
|
17
|
+
state?: CrawlerState | undefined;
|
|
18
|
+
/** dataset_workflows: the IDs of the dataset workflows that are associated with the crawler */
|
|
19
|
+
datasetWorkflows: string[];
|
|
9
20
|
}
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
21
|
+
/** CrawlerCriteria is the contents of the job and the notification details */
|
|
22
|
+
export interface CrawlerCriteria {
|
|
23
|
+
/** platform: the platform of the job ('x' or 'reddit') */
|
|
24
|
+
platform: string;
|
|
25
|
+
/** topic: the topic of the job (e.g. '#ai' for X, 'r/ai' for Reddit) */
|
|
26
|
+
topic: string;
|
|
27
|
+
/** notification: the details of the notification to be sent to the user */
|
|
28
|
+
notification?: CrawlerNotification | undefined;
|
|
29
|
+
/** mock: Used for testing purposes (optional, defaults to false) */
|
|
30
|
+
mock: boolean;
|
|
15
31
|
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
32
|
+
/** CrawlerNotification is the details of the notification to be sent to the user */
|
|
33
|
+
export interface CrawlerNotification {
|
|
34
|
+
/** to: the email address of the user */
|
|
35
|
+
to: string;
|
|
36
|
+
/** link: the redirect link in the email where the user can view the dataset */
|
|
37
|
+
link: string;
|
|
19
38
|
}
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
39
|
+
/** HfRepo is a single Hugging Face repository that contains data for a crawler */
|
|
40
|
+
export interface HfRepo {
|
|
41
|
+
/** repo_name: the name of the Hugging Face repository */
|
|
42
|
+
repoName: string;
|
|
43
|
+
/** row_count: the number of rows in the repository for the crawler criteria */
|
|
44
|
+
rowCount: number;
|
|
45
|
+
/** last_update: the last recorded time the repository was updated */
|
|
46
|
+
lastUpdate: string;
|
|
24
47
|
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
48
|
+
/** CrawlerState is the current state of the crawler */
|
|
49
|
+
export interface CrawlerState {
|
|
50
|
+
/**
|
|
51
|
+
* status: the current status of the crawler
|
|
52
|
+
* "Pending" -- Crawler is pending submission to the SN13 Validator
|
|
53
|
+
* "Submitted" -- Crawler is submitted to the SN13 Validator
|
|
54
|
+
* "Running" -- Crawler is running (we got the first update)
|
|
55
|
+
* "Completed" -- Crawler is completed (timer expired)
|
|
56
|
+
* "Cancelled" -- Crawler is cancelled by user via cancellation of workflow
|
|
57
|
+
* "Archived" -- Crawler is archived (now read-only i.e. no new dataset)
|
|
58
|
+
* "Failed" -- Crawler failed to run
|
|
59
|
+
*/
|
|
60
|
+
status: string;
|
|
61
|
+
/** bytes_collected: the estimated number of bytes collected by the crawler */
|
|
62
|
+
bytesCollected: number;
|
|
63
|
+
/** records_collected: the estimated number of records collected by the crawler */
|
|
64
|
+
recordsCollected: number;
|
|
65
|
+
/** repos: the Hugging Face repositories that contain data for a crawler */
|
|
66
|
+
repos: HfRepo[];
|
|
30
67
|
}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
68
|
+
/** GravityTaskState is the current state of a gravity task */
|
|
69
|
+
export interface GravityTaskState {
|
|
70
|
+
/** gravity_task_id: the ID of the gravity task */
|
|
71
|
+
gravityTaskId: string;
|
|
72
|
+
/** name: the name given by the user of the gravity task */
|
|
73
|
+
name: string;
|
|
74
|
+
/** status: the current status of the gravity task */
|
|
75
|
+
status: string;
|
|
76
|
+
/** start_time: the time the gravity task was created */
|
|
77
|
+
startTime?: Date | undefined;
|
|
78
|
+
/** crawler_ids: the IDs of the crawler workflows that are associated with the gravity task */
|
|
79
|
+
crawlerIds: string[];
|
|
80
|
+
/** crawler_workflows: the crawler workflows that are associated with the gravity task */
|
|
81
|
+
crawlerWorkflows: Crawler[];
|
|
38
82
|
}
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
83
|
+
/** GetGravityTasksRequest is the request message for listing gravity tasks for a user */
|
|
84
|
+
export interface GetGravityTasksRequest {
|
|
85
|
+
/** gravity_task_id: the ID of the gravity task (optional, if not provided, all gravity tasks for the user will be returned) */
|
|
86
|
+
gravityTaskId?: string | undefined;
|
|
87
|
+
/** include_crawlers: whether to include the crawler states in the response */
|
|
88
|
+
includeCrawlers?: boolean | undefined;
|
|
42
89
|
}
|
|
43
|
-
|
|
44
|
-
|
|
90
|
+
/** GetGravityTasksResponse is the response message for listing gravity tasks for a user */
|
|
91
|
+
export interface GetGravityTasksResponse {
|
|
92
|
+
/** gravity_task_states: the current states of the gravity tasks */
|
|
93
|
+
gravityTaskStates: GravityTaskState[];
|
|
45
94
|
}
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
95
|
+
/** GravityTask defines a crawler's criteria for a single job (platform/topic) */
|
|
96
|
+
export interface GravityTask {
|
|
97
|
+
/** topic: the topic of the job (e.g. '#ai' for X, 'r/ai' for Reddit) */
|
|
98
|
+
topic: string;
|
|
99
|
+
/** platform: the platform of the job ('x' or 'reddit') */
|
|
100
|
+
platform: string;
|
|
49
101
|
}
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
102
|
+
/** NotificationRequest is the request message for sending a notification to a user when a dataset is ready to download */
|
|
103
|
+
export interface NotificationRequest {
|
|
104
|
+
/** type: the type of notification to send ('email' is only supported currently) */
|
|
105
|
+
type: string;
|
|
106
|
+
/** address: the address to send the notification to (only email addresses are supported currently) */
|
|
107
|
+
address: string;
|
|
108
|
+
/** redirect_url: the URL to include in the notication message that redirects the user to any built datasets */
|
|
109
|
+
redirectUrl?: string | undefined;
|
|
54
110
|
}
|
|
55
|
-
|
|
56
|
-
|
|
111
|
+
/** GetCrawlerRequest is the request message for getting a crawler */
|
|
112
|
+
export interface GetCrawlerRequest {
|
|
113
|
+
/** crawler_id: the ID of the crawler */
|
|
114
|
+
crawlerId: string;
|
|
57
115
|
}
|
|
58
|
-
|
|
59
|
-
|
|
116
|
+
/** GetCrawlerResponse is the response message for getting a crawler */
|
|
117
|
+
export interface GetCrawlerResponse {
|
|
118
|
+
/** crawler: the crawler */
|
|
119
|
+
crawler?: Crawler | undefined;
|
|
60
120
|
}
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
121
|
+
/** CreateGravityTaskRequest is the request message for creating a new gravity task */
|
|
122
|
+
export interface CreateGravityTaskRequest {
|
|
123
|
+
/** gravity_tasks: the criteria for the crawlers that will be created */
|
|
124
|
+
gravityTasks: GravityTask[];
|
|
125
|
+
/** name: the name of the gravity task (optional, default will generate a random name) */
|
|
126
|
+
name: string;
|
|
127
|
+
/**
|
|
128
|
+
* notification_requests: the details of the notification to be sent to the user when a dataset
|
|
129
|
+
* that is automatically generated upon completion of the crawler is ready to download (optional)
|
|
130
|
+
*/
|
|
131
|
+
notificationRequests: NotificationRequest[];
|
|
132
|
+
/** gravity_task_id: the ID of the gravity task (optional, default will generate a random ID) */
|
|
133
|
+
gravityTaskId?: string | undefined;
|
|
66
134
|
}
|
|
67
|
-
|
|
68
|
-
|
|
135
|
+
/** CreateGravityTaskResponse is the response message for creating a new gravity task */
|
|
136
|
+
export interface CreateGravityTaskResponse {
|
|
137
|
+
/** gravity_task_id: the ID of the gravity task */
|
|
138
|
+
gravityTaskId: string;
|
|
69
139
|
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
140
|
+
/** BuildDatasetRequest is the request message for manually requesting the building of a dataset for a single crawler */
|
|
141
|
+
export interface BuildDatasetRequest {
|
|
142
|
+
/** crawler_id: the ID of the crawler that will be used to build the dataset */
|
|
143
|
+
crawlerId: string;
|
|
144
|
+
/** notification_requests: the details of the notification to be sent to the user when the dataset is ready to download (optional) */
|
|
145
|
+
notificationRequests: NotificationRequest[];
|
|
146
|
+
/** max_rows: the maximum number of rows to include in the dataset (optional, defaults to 500) */
|
|
147
|
+
maxRows: number;
|
|
74
148
|
}
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
149
|
+
/**
|
|
150
|
+
* BuildDatasetResponse is the response message for manually requesting the building of a dataset for a single crawler
|
|
151
|
+
* - dataset: the dataset that was built
|
|
152
|
+
*/
|
|
153
|
+
export interface BuildDatasetResponse {
|
|
154
|
+
/** dataset_id: the ID of the dataset */
|
|
155
|
+
datasetId: string;
|
|
156
|
+
/** dataset: the dataset that was built */
|
|
157
|
+
dataset?: Dataset | undefined;
|
|
78
158
|
}
|
|
79
|
-
export interface
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
steps?: IDatasetStep[];
|
|
87
|
-
totalSteps?: number;
|
|
159
|
+
export interface Nebula {
|
|
160
|
+
/** error: nebula build error message */
|
|
161
|
+
error: string;
|
|
162
|
+
/** file_size_bytes: the size of the file in bytes */
|
|
163
|
+
fileSizeBytes: number;
|
|
164
|
+
/** url: the URL of the file */
|
|
165
|
+
url: string;
|
|
88
166
|
}
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
167
|
+
/** Dataset contains the progress and results of a dataset build */
|
|
168
|
+
export interface Dataset {
|
|
169
|
+
/** crawler_workflow_id: the ID of the parent crawler for this dataset */
|
|
170
|
+
crawlerWorkflowId: string;
|
|
171
|
+
/** create_date: the date the dataset was created */
|
|
172
|
+
createDate?: Date | undefined;
|
|
173
|
+
/** expire_date: the date the dataset will expire (be deleted) */
|
|
174
|
+
expireDate?: Date | undefined;
|
|
175
|
+
/** files: the details about the dataset files that are included in the dataset */
|
|
176
|
+
files: DatasetFile[];
|
|
177
|
+
/** status: the status of the dataset */
|
|
178
|
+
status: string;
|
|
179
|
+
/** status_message: the message of the status of the dataset */
|
|
180
|
+
statusMessage: string;
|
|
181
|
+
/** steps: the progress of the dataset build */
|
|
182
|
+
steps: DatasetStep[];
|
|
183
|
+
/** total_steps: the total number of steps in the dataset build */
|
|
184
|
+
totalSteps: number;
|
|
185
|
+
/** nebula: the details about the nebula that was built */
|
|
186
|
+
nebula?: Nebula | undefined;
|
|
96
187
|
}
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
188
|
+
/** DatasetFile contains the details about a dataset file */
|
|
189
|
+
export interface DatasetFile {
|
|
190
|
+
/** file_name: the name of the file */
|
|
191
|
+
fileName: string;
|
|
192
|
+
/** file_size_bytes: the size of the file in bytes */
|
|
193
|
+
fileSizeBytes: number;
|
|
194
|
+
/** last_modified: the date the file was last modified */
|
|
195
|
+
lastModified?: Date | undefined;
|
|
196
|
+
/** num_rows: the number of rows in the file */
|
|
197
|
+
numRows: number;
|
|
198
|
+
/** s3_key: the key of the file in S3 (internal use only) */
|
|
199
|
+
s3Key: string;
|
|
200
|
+
/** url: the URL of the file (public use) */
|
|
201
|
+
url: string;
|
|
101
202
|
}
|
|
102
|
-
|
|
103
|
-
|
|
203
|
+
/**
|
|
204
|
+
* DatasetStep contains one step of the progress of a dataset build
|
|
205
|
+
* (NOTE: each step varies in time and complexity)
|
|
206
|
+
*/
|
|
207
|
+
export interface DatasetStep {
|
|
208
|
+
/** progress: the progress of this step in the dataset build (0.0 - 1.0) */
|
|
209
|
+
progress: number;
|
|
210
|
+
/** step: the step number of the dataset build (1-indexed) */
|
|
211
|
+
step: number;
|
|
212
|
+
/** step_name: description of what is happening in the step */
|
|
213
|
+
stepName: string;
|
|
104
214
|
}
|
|
105
|
-
|
|
106
|
-
|
|
215
|
+
/** GetDatasetRequest is the request message for getting the status of a dataset */
|
|
216
|
+
export interface GetDatasetRequest {
|
|
217
|
+
/** dataset_id: the ID of the dataset */
|
|
218
|
+
datasetId: string;
|
|
107
219
|
}
|
|
108
|
-
|
|
109
|
-
|
|
220
|
+
/** GetDatasetResponse is the response message for getting the status of a dataset */
|
|
221
|
+
export interface GetDatasetResponse {
|
|
222
|
+
/** dataset: the dataset that is being built */
|
|
223
|
+
dataset?: Dataset | undefined;
|
|
110
224
|
}
|
|
111
|
-
|
|
112
|
-
|
|
225
|
+
/** CancelGravityTaskRequest is the request message for cancelling a gravity task */
|
|
226
|
+
export interface CancelGravityTaskRequest {
|
|
227
|
+
/** gravity_task_id: the ID of the gravity task */
|
|
228
|
+
gravityTaskId: string;
|
|
113
229
|
}
|
|
114
|
-
|
|
115
|
-
|
|
230
|
+
/** CancelGravityTaskResponse is the response message for cancelling a gravity task */
|
|
231
|
+
export interface CancelGravityTaskResponse {
|
|
232
|
+
/** message: the message of the cancellation of the gravity task (currently hardcoded to "success") */
|
|
233
|
+
message: string;
|
|
116
234
|
}
|
|
117
|
-
|
|
118
|
-
|
|
235
|
+
/** CancelDatasetRequest is the request message for cancelling a dataset build */
|
|
236
|
+
export interface CancelDatasetRequest {
|
|
237
|
+
/** dataset_id: the ID of the dataset */
|
|
238
|
+
datasetId: string;
|
|
119
239
|
}
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
GetCrawler(request: IGetCrawlerRequest, callback: (error: Error | null, response: IGetCrawlerResponse) => void): void;
|
|
125
|
-
CreateGravityTask(request: ICreateGravityTaskRequest): Promise<ICreateGravityTaskResponse>;
|
|
126
|
-
CreateGravityTask(request: ICreateGravityTaskRequest, callback: (error: Error | null, response: ICreateGravityTaskResponse) => void): void;
|
|
127
|
-
BuildDataset(request: IBuildDatasetRequest): Promise<IBuildDatasetResponse>;
|
|
128
|
-
BuildDataset(request: IBuildDatasetRequest, callback: (error: Error | null, response: IBuildDatasetResponse) => void): void;
|
|
129
|
-
GetDataset(request: IGetDatasetRequest): Promise<IGetDatasetResponse>;
|
|
130
|
-
GetDataset(request: IGetDatasetRequest, callback: (error: Error | null, response: IGetDatasetResponse) => void): void;
|
|
131
|
-
CancelGravityTask(request: ICancelGravityTaskRequest): Promise<ICancelGravityTaskResponse>;
|
|
132
|
-
CancelGravityTask(request: ICancelGravityTaskRequest, callback: (error: Error | null, response: ICancelGravityTaskResponse) => void): void;
|
|
133
|
-
CancelDataset(request: ICancelDatasetRequest): Promise<ICancelDatasetResponse>;
|
|
134
|
-
CancelDataset(request: ICancelDatasetRequest, callback: (error: Error | null, response: ICancelDatasetResponse) => void): void;
|
|
240
|
+
/** CancelDatasetResponse is the response message for cancelling a dataset build */
|
|
241
|
+
export interface CancelDatasetResponse {
|
|
242
|
+
/** message: the message of the cancellation of the dataset build (currently hardcoded to "success") */
|
|
243
|
+
message: string;
|
|
135
244
|
}
|
|
136
|
-
export declare const
|
|
137
|
-
|
|
138
|
-
|
|
245
|
+
export declare const Crawler: MessageFns<Crawler>;
|
|
246
|
+
export declare const CrawlerCriteria: MessageFns<CrawlerCriteria>;
|
|
247
|
+
export declare const CrawlerNotification: MessageFns<CrawlerNotification>;
|
|
248
|
+
export declare const HfRepo: MessageFns<HfRepo>;
|
|
249
|
+
export declare const CrawlerState: MessageFns<CrawlerState>;
|
|
250
|
+
export declare const GravityTaskState: MessageFns<GravityTaskState>;
|
|
251
|
+
export declare const GetGravityTasksRequest: MessageFns<GetGravityTasksRequest>;
|
|
252
|
+
export declare const GetGravityTasksResponse: MessageFns<GetGravityTasksResponse>;
|
|
253
|
+
export declare const GravityTask: MessageFns<GravityTask>;
|
|
254
|
+
export declare const NotificationRequest: MessageFns<NotificationRequest>;
|
|
255
|
+
export declare const GetCrawlerRequest: MessageFns<GetCrawlerRequest>;
|
|
256
|
+
export declare const GetCrawlerResponse: MessageFns<GetCrawlerResponse>;
|
|
257
|
+
export declare const CreateGravityTaskRequest: MessageFns<CreateGravityTaskRequest>;
|
|
258
|
+
export declare const CreateGravityTaskResponse: MessageFns<CreateGravityTaskResponse>;
|
|
259
|
+
export declare const BuildDatasetRequest: MessageFns<BuildDatasetRequest>;
|
|
260
|
+
export declare const BuildDatasetResponse: MessageFns<BuildDatasetResponse>;
|
|
261
|
+
export declare const Nebula: MessageFns<Nebula>;
|
|
262
|
+
export declare const Dataset: MessageFns<Dataset>;
|
|
263
|
+
export declare const DatasetFile: MessageFns<DatasetFile>;
|
|
264
|
+
export declare const DatasetStep: MessageFns<DatasetStep>;
|
|
265
|
+
export declare const GetDatasetRequest: MessageFns<GetDatasetRequest>;
|
|
266
|
+
export declare const GetDatasetResponse: MessageFns<GetDatasetResponse>;
|
|
267
|
+
export declare const CancelGravityTaskRequest: MessageFns<CancelGravityTaskRequest>;
|
|
268
|
+
export declare const CancelGravityTaskResponse: MessageFns<CancelGravityTaskResponse>;
|
|
269
|
+
export declare const CancelDatasetRequest: MessageFns<CancelDatasetRequest>;
|
|
270
|
+
export declare const CancelDatasetResponse: MessageFns<CancelDatasetResponse>;
|
|
271
|
+
export type GravityServiceService = typeof GravityServiceService;
|
|
272
|
+
export declare const GravityServiceService: {
|
|
273
|
+
/** Lists all data collection tasks for a user */
|
|
274
|
+
readonly getGravityTasks: {
|
|
275
|
+
readonly path: "/gravity.v1.GravityService/GetGravityTasks";
|
|
276
|
+
readonly requestStream: false;
|
|
277
|
+
readonly responseStream: false;
|
|
278
|
+
readonly requestSerialize: (value: GetGravityTasksRequest) => Buffer<ArrayBuffer>;
|
|
279
|
+
readonly requestDeserialize: (value: Buffer) => GetGravityTasksRequest;
|
|
280
|
+
readonly responseSerialize: (value: GetGravityTasksResponse) => Buffer<ArrayBuffer>;
|
|
281
|
+
readonly responseDeserialize: (value: Buffer) => GetGravityTasksResponse;
|
|
139
282
|
};
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
type: string;
|
|
200
|
-
id: number;
|
|
201
|
-
};
|
|
202
|
-
state: {
|
|
203
|
-
type: string;
|
|
204
|
-
id: number;
|
|
205
|
-
};
|
|
206
|
-
datasetWorkflows: {
|
|
207
|
-
rule: string;
|
|
208
|
-
type: string;
|
|
209
|
-
id: number;
|
|
210
|
-
};
|
|
211
|
-
};
|
|
212
|
-
};
|
|
213
|
-
CrawlerCriteria: {
|
|
214
|
-
fields: {
|
|
215
|
-
platform: {
|
|
216
|
-
type: string;
|
|
217
|
-
id: number;
|
|
218
|
-
};
|
|
219
|
-
topic: {
|
|
220
|
-
type: string;
|
|
221
|
-
id: number;
|
|
222
|
-
};
|
|
223
|
-
notification: {
|
|
224
|
-
type: string;
|
|
225
|
-
id: number;
|
|
226
|
-
};
|
|
227
|
-
mock: {
|
|
228
|
-
type: string;
|
|
229
|
-
id: number;
|
|
230
|
-
};
|
|
231
|
-
};
|
|
232
|
-
};
|
|
233
|
-
CrawlerNotification: {
|
|
234
|
-
fields: {
|
|
235
|
-
to: {
|
|
236
|
-
type: string;
|
|
237
|
-
id: number;
|
|
238
|
-
};
|
|
239
|
-
link: {
|
|
240
|
-
type: string;
|
|
241
|
-
id: number;
|
|
242
|
-
};
|
|
243
|
-
};
|
|
244
|
-
};
|
|
245
|
-
HfRepo: {
|
|
246
|
-
fields: {
|
|
247
|
-
repoName: {
|
|
248
|
-
type: string;
|
|
249
|
-
id: number;
|
|
250
|
-
};
|
|
251
|
-
rowCount: {
|
|
252
|
-
type: string;
|
|
253
|
-
id: number;
|
|
254
|
-
};
|
|
255
|
-
lastUpdate: {
|
|
256
|
-
type: string;
|
|
257
|
-
id: number;
|
|
258
|
-
};
|
|
259
|
-
};
|
|
260
|
-
};
|
|
261
|
-
CrawlerState: {
|
|
262
|
-
fields: {
|
|
263
|
-
status: {
|
|
264
|
-
type: string;
|
|
265
|
-
id: number;
|
|
266
|
-
};
|
|
267
|
-
bytesCollected: {
|
|
268
|
-
type: string;
|
|
269
|
-
id: number;
|
|
270
|
-
};
|
|
271
|
-
recordsCollected: {
|
|
272
|
-
type: string;
|
|
273
|
-
id: number;
|
|
274
|
-
};
|
|
275
|
-
repos: {
|
|
276
|
-
rule: string;
|
|
277
|
-
type: string;
|
|
278
|
-
id: number;
|
|
279
|
-
};
|
|
280
|
-
};
|
|
281
|
-
};
|
|
282
|
-
GravityTaskState: {
|
|
283
|
-
fields: {
|
|
284
|
-
gravityTaskId: {
|
|
285
|
-
type: string;
|
|
286
|
-
id: number;
|
|
287
|
-
};
|
|
288
|
-
name: {
|
|
289
|
-
type: string;
|
|
290
|
-
id: number;
|
|
291
|
-
};
|
|
292
|
-
status: {
|
|
293
|
-
type: string;
|
|
294
|
-
id: number;
|
|
295
|
-
};
|
|
296
|
-
startTime: {
|
|
297
|
-
type: string;
|
|
298
|
-
id: number;
|
|
299
|
-
};
|
|
300
|
-
crawlerIds: {
|
|
301
|
-
rule: string;
|
|
302
|
-
type: string;
|
|
303
|
-
id: number;
|
|
304
|
-
};
|
|
305
|
-
crawlerWorkflows: {
|
|
306
|
-
rule: string;
|
|
307
|
-
type: string;
|
|
308
|
-
id: number;
|
|
309
|
-
};
|
|
310
|
-
};
|
|
311
|
-
};
|
|
312
|
-
GetGravityTasksRequest: {
|
|
313
|
-
fields: {
|
|
314
|
-
gravityTaskId: {
|
|
315
|
-
type: string;
|
|
316
|
-
id: number;
|
|
317
|
-
};
|
|
318
|
-
includeCrawlers: {
|
|
319
|
-
type: string;
|
|
320
|
-
id: number;
|
|
321
|
-
};
|
|
322
|
-
};
|
|
323
|
-
};
|
|
324
|
-
GetGravityTasksResponse: {
|
|
325
|
-
fields: {
|
|
326
|
-
gravityTaskStates: {
|
|
327
|
-
rule: string;
|
|
328
|
-
type: string;
|
|
329
|
-
id: number;
|
|
330
|
-
};
|
|
331
|
-
};
|
|
332
|
-
};
|
|
333
|
-
GravityTask: {
|
|
334
|
-
fields: {
|
|
335
|
-
topic: {
|
|
336
|
-
type: string;
|
|
337
|
-
id: number;
|
|
338
|
-
};
|
|
339
|
-
platform: {
|
|
340
|
-
type: string;
|
|
341
|
-
id: number;
|
|
342
|
-
};
|
|
343
|
-
};
|
|
344
|
-
};
|
|
345
|
-
NotificationRequest: {
|
|
346
|
-
fields: {
|
|
347
|
-
type: {
|
|
348
|
-
type: string;
|
|
349
|
-
id: number;
|
|
350
|
-
};
|
|
351
|
-
address: {
|
|
352
|
-
type: string;
|
|
353
|
-
id: number;
|
|
354
|
-
};
|
|
355
|
-
redirectUrl: {
|
|
356
|
-
type: string;
|
|
357
|
-
id: number;
|
|
358
|
-
};
|
|
359
|
-
};
|
|
360
|
-
};
|
|
361
|
-
GetCrawlerRequest: {
|
|
362
|
-
fields: {
|
|
363
|
-
crawlerId: {
|
|
364
|
-
type: string;
|
|
365
|
-
id: number;
|
|
366
|
-
};
|
|
367
|
-
};
|
|
368
|
-
};
|
|
369
|
-
GetCrawlerResponse: {
|
|
370
|
-
fields: {
|
|
371
|
-
crawler: {
|
|
372
|
-
type: string;
|
|
373
|
-
id: number;
|
|
374
|
-
};
|
|
375
|
-
};
|
|
376
|
-
};
|
|
377
|
-
CreateGravityTaskRequest: {
|
|
378
|
-
fields: {
|
|
379
|
-
gravityTasks: {
|
|
380
|
-
rule: string;
|
|
381
|
-
type: string;
|
|
382
|
-
id: number;
|
|
383
|
-
};
|
|
384
|
-
name: {
|
|
385
|
-
type: string;
|
|
386
|
-
id: number;
|
|
387
|
-
};
|
|
388
|
-
notificationRequests: {
|
|
389
|
-
rule: string;
|
|
390
|
-
type: string;
|
|
391
|
-
id: number;
|
|
392
|
-
};
|
|
393
|
-
gravityTaskId: {
|
|
394
|
-
type: string;
|
|
395
|
-
id: number;
|
|
396
|
-
};
|
|
397
|
-
};
|
|
398
|
-
};
|
|
399
|
-
CreateGravityTaskResponse: {
|
|
400
|
-
fields: {
|
|
401
|
-
gravityTaskId: {
|
|
402
|
-
type: string;
|
|
403
|
-
id: number;
|
|
404
|
-
};
|
|
405
|
-
};
|
|
406
|
-
};
|
|
407
|
-
BuildDatasetRequest: {
|
|
408
|
-
fields: {
|
|
409
|
-
crawlerId: {
|
|
410
|
-
type: string;
|
|
411
|
-
id: number;
|
|
412
|
-
};
|
|
413
|
-
notificationRequests: {
|
|
414
|
-
rule: string;
|
|
415
|
-
type: string;
|
|
416
|
-
id: number;
|
|
417
|
-
};
|
|
418
|
-
maxRows: {
|
|
419
|
-
type: string;
|
|
420
|
-
id: number;
|
|
421
|
-
};
|
|
422
|
-
};
|
|
423
|
-
};
|
|
424
|
-
BuildDatasetResponse: {
|
|
425
|
-
fields: {
|
|
426
|
-
datasetId: {
|
|
427
|
-
type: string;
|
|
428
|
-
id: number;
|
|
429
|
-
};
|
|
430
|
-
dataset: {
|
|
431
|
-
type: string;
|
|
432
|
-
id: number;
|
|
433
|
-
};
|
|
434
|
-
};
|
|
435
|
-
};
|
|
436
|
-
Dataset: {
|
|
437
|
-
fields: {
|
|
438
|
-
crawlerWorkflowId: {
|
|
439
|
-
type: string;
|
|
440
|
-
id: number;
|
|
441
|
-
};
|
|
442
|
-
createDate: {
|
|
443
|
-
type: string;
|
|
444
|
-
id: number;
|
|
445
|
-
};
|
|
446
|
-
expireDate: {
|
|
447
|
-
type: string;
|
|
448
|
-
id: number;
|
|
449
|
-
};
|
|
450
|
-
files: {
|
|
451
|
-
rule: string;
|
|
452
|
-
type: string;
|
|
453
|
-
id: number;
|
|
454
|
-
};
|
|
455
|
-
status: {
|
|
456
|
-
type: string;
|
|
457
|
-
id: number;
|
|
458
|
-
};
|
|
459
|
-
statusMessage: {
|
|
460
|
-
type: string;
|
|
461
|
-
id: number;
|
|
462
|
-
};
|
|
463
|
-
steps: {
|
|
464
|
-
rule: string;
|
|
465
|
-
type: string;
|
|
466
|
-
id: number;
|
|
467
|
-
};
|
|
468
|
-
totalSteps: {
|
|
469
|
-
type: string;
|
|
470
|
-
id: number;
|
|
471
|
-
};
|
|
472
|
-
};
|
|
473
|
-
};
|
|
474
|
-
DatasetFile: {
|
|
475
|
-
fields: {
|
|
476
|
-
fileName: {
|
|
477
|
-
type: string;
|
|
478
|
-
id: number;
|
|
479
|
-
};
|
|
480
|
-
fileSizeBytes: {
|
|
481
|
-
type: string;
|
|
482
|
-
id: number;
|
|
483
|
-
};
|
|
484
|
-
lastModified: {
|
|
485
|
-
type: string;
|
|
486
|
-
id: number;
|
|
487
|
-
};
|
|
488
|
-
numRows: {
|
|
489
|
-
type: string;
|
|
490
|
-
id: number;
|
|
491
|
-
};
|
|
492
|
-
s3Key: {
|
|
493
|
-
type: string;
|
|
494
|
-
id: number;
|
|
495
|
-
};
|
|
496
|
-
url: {
|
|
497
|
-
type: string;
|
|
498
|
-
id: number;
|
|
499
|
-
};
|
|
500
|
-
};
|
|
501
|
-
};
|
|
502
|
-
DatasetStep: {
|
|
503
|
-
fields: {
|
|
504
|
-
progress: {
|
|
505
|
-
type: string;
|
|
506
|
-
id: number;
|
|
507
|
-
};
|
|
508
|
-
step: {
|
|
509
|
-
type: string;
|
|
510
|
-
id: number;
|
|
511
|
-
};
|
|
512
|
-
stepName: {
|
|
513
|
-
type: string;
|
|
514
|
-
id: number;
|
|
515
|
-
};
|
|
516
|
-
};
|
|
517
|
-
};
|
|
518
|
-
GetDatasetRequest: {
|
|
519
|
-
fields: {
|
|
520
|
-
datasetId: {
|
|
521
|
-
type: string;
|
|
522
|
-
id: number;
|
|
523
|
-
};
|
|
524
|
-
};
|
|
525
|
-
};
|
|
526
|
-
GetDatasetResponse: {
|
|
527
|
-
fields: {
|
|
528
|
-
dataset: {
|
|
529
|
-
type: string;
|
|
530
|
-
id: number;
|
|
531
|
-
};
|
|
532
|
-
};
|
|
533
|
-
};
|
|
534
|
-
CancelGravityTaskRequest: {
|
|
535
|
-
fields: {
|
|
536
|
-
gravityTaskId: {
|
|
537
|
-
type: string;
|
|
538
|
-
id: number;
|
|
539
|
-
};
|
|
540
|
-
};
|
|
541
|
-
};
|
|
542
|
-
CancelGravityTaskResponse: {
|
|
543
|
-
fields: {
|
|
544
|
-
message: {
|
|
545
|
-
type: string;
|
|
546
|
-
id: number;
|
|
547
|
-
};
|
|
548
|
-
};
|
|
549
|
-
};
|
|
550
|
-
CancelDatasetRequest: {
|
|
551
|
-
fields: {
|
|
552
|
-
datasetId: {
|
|
553
|
-
type: string;
|
|
554
|
-
id: number;
|
|
555
|
-
};
|
|
556
|
-
};
|
|
557
|
-
};
|
|
558
|
-
CancelDatasetResponse: {
|
|
559
|
-
fields: {
|
|
560
|
-
message: {
|
|
561
|
-
type: string;
|
|
562
|
-
id: number;
|
|
563
|
-
};
|
|
564
|
-
};
|
|
565
|
-
};
|
|
566
|
-
};
|
|
567
|
-
};
|
|
568
|
-
};
|
|
569
|
-
};
|
|
570
|
-
google: {
|
|
571
|
-
nested: {
|
|
572
|
-
protobuf: {
|
|
573
|
-
nested: {
|
|
574
|
-
Timestamp: {
|
|
575
|
-
fields: {
|
|
576
|
-
seconds: {
|
|
577
|
-
type: string;
|
|
578
|
-
id: number;
|
|
579
|
-
};
|
|
580
|
-
nanos: {
|
|
581
|
-
type: string;
|
|
582
|
-
id: number;
|
|
583
|
-
};
|
|
584
|
-
};
|
|
585
|
-
};
|
|
586
|
-
};
|
|
587
|
-
};
|
|
588
|
-
};
|
|
589
|
-
};
|
|
283
|
+
/** Get a single crawler by its ID */
|
|
284
|
+
readonly getCrawler: {
|
|
285
|
+
readonly path: "/gravity.v1.GravityService/GetCrawler";
|
|
286
|
+
readonly requestStream: false;
|
|
287
|
+
readonly responseStream: false;
|
|
288
|
+
readonly requestSerialize: (value: GetCrawlerRequest) => Buffer<ArrayBuffer>;
|
|
289
|
+
readonly requestDeserialize: (value: Buffer) => GetCrawlerRequest;
|
|
290
|
+
readonly responseSerialize: (value: GetCrawlerResponse) => Buffer<ArrayBuffer>;
|
|
291
|
+
readonly responseDeserialize: (value: Buffer) => GetCrawlerResponse;
|
|
292
|
+
};
|
|
293
|
+
/** Create a new gravity task */
|
|
294
|
+
readonly createGravityTask: {
|
|
295
|
+
readonly path: "/gravity.v1.GravityService/CreateGravityTask";
|
|
296
|
+
readonly requestStream: false;
|
|
297
|
+
readonly responseStream: false;
|
|
298
|
+
readonly requestSerialize: (value: CreateGravityTaskRequest) => Buffer<ArrayBuffer>;
|
|
299
|
+
readonly requestDeserialize: (value: Buffer) => CreateGravityTaskRequest;
|
|
300
|
+
readonly responseSerialize: (value: CreateGravityTaskResponse) => Buffer<ArrayBuffer>;
|
|
301
|
+
readonly responseDeserialize: (value: Buffer) => CreateGravityTaskResponse;
|
|
302
|
+
};
|
|
303
|
+
/** Build a dataset for a single crawler */
|
|
304
|
+
readonly buildDataset: {
|
|
305
|
+
readonly path: "/gravity.v1.GravityService/BuildDataset";
|
|
306
|
+
readonly requestStream: false;
|
|
307
|
+
readonly responseStream: false;
|
|
308
|
+
readonly requestSerialize: (value: BuildDatasetRequest) => Buffer<ArrayBuffer>;
|
|
309
|
+
readonly requestDeserialize: (value: Buffer) => BuildDatasetRequest;
|
|
310
|
+
readonly responseSerialize: (value: BuildDatasetResponse) => Buffer<ArrayBuffer>;
|
|
311
|
+
readonly responseDeserialize: (value: Buffer) => BuildDatasetResponse;
|
|
312
|
+
};
|
|
313
|
+
/** Get the dataset build status and results */
|
|
314
|
+
readonly getDataset: {
|
|
315
|
+
readonly path: "/gravity.v1.GravityService/GetDataset";
|
|
316
|
+
readonly requestStream: false;
|
|
317
|
+
readonly responseStream: false;
|
|
318
|
+
readonly requestSerialize: (value: GetDatasetRequest) => Buffer<ArrayBuffer>;
|
|
319
|
+
readonly requestDeserialize: (value: Buffer) => GetDatasetRequest;
|
|
320
|
+
readonly responseSerialize: (value: GetDatasetResponse) => Buffer<ArrayBuffer>;
|
|
321
|
+
readonly responseDeserialize: (value: Buffer) => GetDatasetResponse;
|
|
322
|
+
};
|
|
323
|
+
/** Cancel a gravity task and any crawlers associated with it */
|
|
324
|
+
readonly cancelGravityTask: {
|
|
325
|
+
readonly path: "/gravity.v1.GravityService/CancelGravityTask";
|
|
326
|
+
readonly requestStream: false;
|
|
327
|
+
readonly responseStream: false;
|
|
328
|
+
readonly requestSerialize: (value: CancelGravityTaskRequest) => Buffer<ArrayBuffer>;
|
|
329
|
+
readonly requestDeserialize: (value: Buffer) => CancelGravityTaskRequest;
|
|
330
|
+
readonly responseSerialize: (value: CancelGravityTaskResponse) => Buffer<ArrayBuffer>;
|
|
331
|
+
readonly responseDeserialize: (value: Buffer) => CancelGravityTaskResponse;
|
|
332
|
+
};
|
|
333
|
+
/** Cancel dataset build if it is in progress and purges the dataset */
|
|
334
|
+
readonly cancelDataset: {
|
|
335
|
+
readonly path: "/gravity.v1.GravityService/CancelDataset";
|
|
336
|
+
readonly requestStream: false;
|
|
337
|
+
readonly responseStream: false;
|
|
338
|
+
readonly requestSerialize: (value: CancelDatasetRequest) => Buffer<ArrayBuffer>;
|
|
339
|
+
readonly requestDeserialize: (value: Buffer) => CancelDatasetRequest;
|
|
340
|
+
readonly responseSerialize: (value: CancelDatasetResponse) => Buffer<ArrayBuffer>;
|
|
341
|
+
readonly responseDeserialize: (value: Buffer) => CancelDatasetResponse;
|
|
590
342
|
};
|
|
591
343
|
};
|
|
344
|
+
export interface GravityServiceServer extends UntypedServiceImplementation {
|
|
345
|
+
/** Lists all data collection tasks for a user */
|
|
346
|
+
getGravityTasks: handleUnaryCall<GetGravityTasksRequest, GetGravityTasksResponse>;
|
|
347
|
+
/** Get a single crawler by its ID */
|
|
348
|
+
getCrawler: handleUnaryCall<GetCrawlerRequest, GetCrawlerResponse>;
|
|
349
|
+
/** Create a new gravity task */
|
|
350
|
+
createGravityTask: handleUnaryCall<CreateGravityTaskRequest, CreateGravityTaskResponse>;
|
|
351
|
+
/** Build a dataset for a single crawler */
|
|
352
|
+
buildDataset: handleUnaryCall<BuildDatasetRequest, BuildDatasetResponse>;
|
|
353
|
+
/** Get the dataset build status and results */
|
|
354
|
+
getDataset: handleUnaryCall<GetDatasetRequest, GetDatasetResponse>;
|
|
355
|
+
/** Cancel a gravity task and any crawlers associated with it */
|
|
356
|
+
cancelGravityTask: handleUnaryCall<CancelGravityTaskRequest, CancelGravityTaskResponse>;
|
|
357
|
+
/** Cancel dataset build if it is in progress and purges the dataset */
|
|
358
|
+
cancelDataset: handleUnaryCall<CancelDatasetRequest, CancelDatasetResponse>;
|
|
359
|
+
}
|
|
360
|
+
export interface GravityServiceClient extends Client {
|
|
361
|
+
/** Lists all data collection tasks for a user */
|
|
362
|
+
getGravityTasks(request: GetGravityTasksRequest, callback: (error: ServiceError | null, response: GetGravityTasksResponse) => void): ClientUnaryCall;
|
|
363
|
+
getGravityTasks(request: GetGravityTasksRequest, metadata: Metadata, callback: (error: ServiceError | null, response: GetGravityTasksResponse) => void): ClientUnaryCall;
|
|
364
|
+
getGravityTasks(request: GetGravityTasksRequest, metadata: Metadata, options: Partial<CallOptions>, callback: (error: ServiceError | null, response: GetGravityTasksResponse) => void): ClientUnaryCall;
|
|
365
|
+
/** Get a single crawler by its ID */
|
|
366
|
+
getCrawler(request: GetCrawlerRequest, callback: (error: ServiceError | null, response: GetCrawlerResponse) => void): ClientUnaryCall;
|
|
367
|
+
getCrawler(request: GetCrawlerRequest, metadata: Metadata, callback: (error: ServiceError | null, response: GetCrawlerResponse) => void): ClientUnaryCall;
|
|
368
|
+
getCrawler(request: GetCrawlerRequest, metadata: Metadata, options: Partial<CallOptions>, callback: (error: ServiceError | null, response: GetCrawlerResponse) => void): ClientUnaryCall;
|
|
369
|
+
/** Create a new gravity task */
|
|
370
|
+
createGravityTask(request: CreateGravityTaskRequest, callback: (error: ServiceError | null, response: CreateGravityTaskResponse) => void): ClientUnaryCall;
|
|
371
|
+
createGravityTask(request: CreateGravityTaskRequest, metadata: Metadata, callback: (error: ServiceError | null, response: CreateGravityTaskResponse) => void): ClientUnaryCall;
|
|
372
|
+
createGravityTask(request: CreateGravityTaskRequest, metadata: Metadata, options: Partial<CallOptions>, callback: (error: ServiceError | null, response: CreateGravityTaskResponse) => void): ClientUnaryCall;
|
|
373
|
+
/** Build a dataset for a single crawler */
|
|
374
|
+
buildDataset(request: BuildDatasetRequest, callback: (error: ServiceError | null, response: BuildDatasetResponse) => void): ClientUnaryCall;
|
|
375
|
+
buildDataset(request: BuildDatasetRequest, metadata: Metadata, callback: (error: ServiceError | null, response: BuildDatasetResponse) => void): ClientUnaryCall;
|
|
376
|
+
buildDataset(request: BuildDatasetRequest, metadata: Metadata, options: Partial<CallOptions>, callback: (error: ServiceError | null, response: BuildDatasetResponse) => void): ClientUnaryCall;
|
|
377
|
+
/** Get the dataset build status and results */
|
|
378
|
+
getDataset(request: GetDatasetRequest, callback: (error: ServiceError | null, response: GetDatasetResponse) => void): ClientUnaryCall;
|
|
379
|
+
getDataset(request: GetDatasetRequest, metadata: Metadata, callback: (error: ServiceError | null, response: GetDatasetResponse) => void): ClientUnaryCall;
|
|
380
|
+
getDataset(request: GetDatasetRequest, metadata: Metadata, options: Partial<CallOptions>, callback: (error: ServiceError | null, response: GetDatasetResponse) => void): ClientUnaryCall;
|
|
381
|
+
/** Cancel a gravity task and any crawlers associated with it */
|
|
382
|
+
cancelGravityTask(request: CancelGravityTaskRequest, callback: (error: ServiceError | null, response: CancelGravityTaskResponse) => void): ClientUnaryCall;
|
|
383
|
+
cancelGravityTask(request: CancelGravityTaskRequest, metadata: Metadata, callback: (error: ServiceError | null, response: CancelGravityTaskResponse) => void): ClientUnaryCall;
|
|
384
|
+
cancelGravityTask(request: CancelGravityTaskRequest, metadata: Metadata, options: Partial<CallOptions>, callback: (error: ServiceError | null, response: CancelGravityTaskResponse) => void): ClientUnaryCall;
|
|
385
|
+
/** Cancel dataset build if it is in progress and purges the dataset */
|
|
386
|
+
cancelDataset(request: CancelDatasetRequest, callback: (error: ServiceError | null, response: CancelDatasetResponse) => void): ClientUnaryCall;
|
|
387
|
+
cancelDataset(request: CancelDatasetRequest, metadata: Metadata, callback: (error: ServiceError | null, response: CancelDatasetResponse) => void): ClientUnaryCall;
|
|
388
|
+
cancelDataset(request: CancelDatasetRequest, metadata: Metadata, options: Partial<CallOptions>, callback: (error: ServiceError | null, response: CancelDatasetResponse) => void): ClientUnaryCall;
|
|
389
|
+
}
|
|
390
|
+
export declare const GravityServiceClient: {
|
|
391
|
+
new (address: string, credentials: ChannelCredentials, options?: Partial<ClientOptions>): GravityServiceClient;
|
|
392
|
+
service: typeof GravityServiceService;
|
|
393
|
+
serviceName: string;
|
|
394
|
+
};
|
|
395
|
+
type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
|
|
396
|
+
export type DeepPartial<T> = T extends Builtin ? T : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> : T extends {} ? {
|
|
397
|
+
[K in keyof T]?: DeepPartial<T[K]>;
|
|
398
|
+
} : Partial<T>;
|
|
399
|
+
export interface MessageFns<T> {
|
|
400
|
+
encode(message: T, writer?: BinaryWriter): BinaryWriter;
|
|
401
|
+
decode(input: BinaryReader | Uint8Array, length?: number): T;
|
|
402
|
+
fromJSON(object: any): T;
|
|
403
|
+
toJSON(message: T): unknown;
|
|
404
|
+
create(base?: DeepPartial<T>): T;
|
|
405
|
+
fromPartial(object: DeepPartial<T>): T;
|
|
406
|
+
}
|
|
407
|
+
export {};
|