@flyteorg/flyteidl 1.16.0 → 2.0.0-alpha2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/package.json +6 -9
  2. package/LICENSE +0 -202
  3. package/NOTICE +0 -4
  4. package/README.md +0 -14
  5. package/gen/pb-js/flyteidl.d.ts +0 -28307
  6. package/gen/pb-js/flyteidl.js +0 -66636
  7. package/protos/buf.lock +0 -18
  8. package/protos/buf.yaml +0 -12
  9. package/protos/docs/admin/admin.rst +0 -4623
  10. package/protos/docs/admin/index.rst +0 -13
  11. package/protos/docs/contributing.md +0 -68
  12. package/protos/docs/core/core.rst +0 -3952
  13. package/protos/docs/core/index.rst +0 -15
  14. package/protos/docs/datacatalog/datacatalog.rst +0 -1313
  15. package/protos/docs/datacatalog/index.rst +0 -16
  16. package/protos/docs/event/event.rst +0 -726
  17. package/protos/docs/event/index.rst +0 -27
  18. package/protos/docs/plugins/index.rst +0 -14
  19. package/protos/docs/plugins/plugins.rst +0 -780
  20. package/protos/docs/restructuredtext.tmpl +0 -129
  21. package/protos/docs/service/index.rst +0 -13
  22. package/protos/docs/service/service.rst +0 -543
  23. package/protos/docs/withoutscalar_restructuredtext.tmpl +0 -105
  24. package/protos/docs_index.rst +0 -19
  25. package/protos/flyteidl/admin/agent.proto +0 -309
  26. package/protos/flyteidl/admin/cluster_assignment.proto +0 -11
  27. package/protos/flyteidl/admin/common.proto +0 -337
  28. package/protos/flyteidl/admin/description_entity.proto +0 -95
  29. package/protos/flyteidl/admin/event.proto +0 -70
  30. package/protos/flyteidl/admin/execution.proto +0 -436
  31. package/protos/flyteidl/admin/launch_plan.proto +0 -231
  32. package/protos/flyteidl/admin/matchable_resource.proto +0 -194
  33. package/protos/flyteidl/admin/node_execution.proto +0 -248
  34. package/protos/flyteidl/admin/notification.proto +0 -27
  35. package/protos/flyteidl/admin/project.proto +0 -132
  36. package/protos/flyteidl/admin/project_attributes.proto +0 -69
  37. package/protos/flyteidl/admin/project_domain_attributes.proto +0 -80
  38. package/protos/flyteidl/admin/schedule.proto +0 -43
  39. package/protos/flyteidl/admin/signal.proto +0 -86
  40. package/protos/flyteidl/admin/task.proto +0 -71
  41. package/protos/flyteidl/admin/task_execution.proto +0 -171
  42. package/protos/flyteidl/admin/version.proto +0 -27
  43. package/protos/flyteidl/admin/workflow.proto +0 -92
  44. package/protos/flyteidl/admin/workflow_attributes.proto +0 -89
  45. package/protos/flyteidl/cacheservice/cacheservice.proto +0 -143
  46. package/protos/flyteidl/core/artifact_id.proto +0 -112
  47. package/protos/flyteidl/core/catalog.proto +0 -63
  48. package/protos/flyteidl/core/compiler.proto +0 -64
  49. package/protos/flyteidl/core/condition.proto +0 -63
  50. package/protos/flyteidl/core/dynamic_job.proto +0 -32
  51. package/protos/flyteidl/core/errors.proto +0 -42
  52. package/protos/flyteidl/core/execution.proto +0 -156
  53. package/protos/flyteidl/core/execution_envs.proto +0 -45
  54. package/protos/flyteidl/core/identifier.proto +0 -80
  55. package/protos/flyteidl/core/interface.proto +0 -64
  56. package/protos/flyteidl/core/literals.proto +0 -205
  57. package/protos/flyteidl/core/metrics.proto +0 -50
  58. package/protos/flyteidl/core/security.proto +0 -135
  59. package/protos/flyteidl/core/tasks.proto +0 -392
  60. package/protos/flyteidl/core/types.proto +0 -208
  61. package/protos/flyteidl/core/workflow.proto +0 -361
  62. package/protos/flyteidl/core/workflow_closure.proto +0 -18
  63. package/protos/flyteidl/datacatalog/datacatalog.proto +0 -420
  64. package/protos/flyteidl/event/cloudevents.proto +0 -81
  65. package/protos/flyteidl/event/event.proto +0 -347
  66. package/protos/flyteidl/plugins/array_job.proto +0 -30
  67. package/protos/flyteidl/plugins/common.proto +0 -27
  68. package/protos/flyteidl/plugins/dask.proto +0 -41
  69. package/protos/flyteidl/plugins/kubeflow/common.proto +0 -28
  70. package/protos/flyteidl/plugins/kubeflow/mpi.proto +0 -47
  71. package/protos/flyteidl/plugins/kubeflow/pytorch.proto +0 -53
  72. package/protos/flyteidl/plugins/kubeflow/tensorflow.proto +0 -46
  73. package/protos/flyteidl/plugins/mpi.proto +0 -20
  74. package/protos/flyteidl/plugins/presto.proto +0 -14
  75. package/protos/flyteidl/plugins/pytorch.proto +0 -25
  76. package/protos/flyteidl/plugins/qubole.proto +0 -26
  77. package/protos/flyteidl/plugins/ray.proto +0 -59
  78. package/protos/flyteidl/plugins/spark.proto +0 -42
  79. package/protos/flyteidl/plugins/tensorflow.proto +0 -18
  80. package/protos/flyteidl/plugins/waitable.proto +0 -15
  81. package/protos/flyteidl/service/admin.proto +0 -668
  82. package/protos/flyteidl/service/agent.proto +0 -79
  83. package/protos/flyteidl/service/auth.proto +0 -94
  84. package/protos/flyteidl/service/dataproxy.proto +0 -205
  85. package/protos/flyteidl/service/external_plugin_service.proto +0 -79
  86. package/protos/flyteidl/service/identity.proto +0 -51
  87. package/protos/flyteidl/service/signal.proto +0 -55
@@ -1,420 +0,0 @@
1
- syntax = "proto3";
2
-
3
- package datacatalog;
4
-
5
- import "flyteidl/core/literals.proto";
6
- import "google/protobuf/duration.proto";
7
- import "google/protobuf/timestamp.proto";
8
-
9
- option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/datacatalog";
10
-
11
- /*
12
- * Data Catalog service definition
13
- * Data Catalog is a service for indexing parameterized, strongly-typed data artifacts across revisions.
14
- * Artifacts are associated with a Dataset, and can be tagged for retrieval.
15
- */
16
- service DataCatalog {
17
- // Create a new Dataset. Datasets are unique based on the DatasetID. Datasets are logical groupings of artifacts.
18
- // Each dataset can have one or more artifacts
19
- rpc CreateDataset (CreateDatasetRequest) returns (CreateDatasetResponse);
20
-
21
- // Get a Dataset by the DatasetID. This returns the Dataset with the associated metadata.
22
- rpc GetDataset (GetDatasetRequest) returns (GetDatasetResponse);
23
-
24
- // Create an artifact and the artifact data associated with it. An artifact can be a hive partition or arbitrary
25
- // files or data values
26
- rpc CreateArtifact (CreateArtifactRequest) returns (CreateArtifactResponse);
27
-
28
- // Retrieve an artifact by an identifying handle. This returns an artifact along with the artifact data.
29
- rpc GetArtifact (GetArtifactRequest) returns (GetArtifactResponse);
30
-
31
- // Associate a tag with an artifact. Tags are unique within a Dataset.
32
- rpc AddTag (AddTagRequest) returns (AddTagResponse);
33
-
34
- // Return a paginated list of artifacts
35
- rpc ListArtifacts (ListArtifactsRequest) returns (ListArtifactsResponse);
36
-
37
- // Return a paginated list of datasets
38
- rpc ListDatasets (ListDatasetsRequest) returns (ListDatasetsResponse);
39
-
40
- // Updates an existing artifact, overwriting the stored artifact data in the underlying blob storage.
41
- rpc UpdateArtifact (UpdateArtifactRequest) returns (UpdateArtifactResponse);
42
-
43
- // Attempts to get or extend a reservation for the corresponding artifact. If one already exists
44
- // (ie. another entity owns the reservation) then that reservation is retrieved.
45
- // Once you acquire a reservation, you need to periodically extend the reservation with an
46
- // identical call. If the reservation is not extended before the defined expiration, it may be
47
- // acquired by another task.
48
- // Note: We may have multiple concurrent tasks with the same signature and the same input that
49
- // try to populate the same artifact at the same time. Thus with reservation, only one task can
50
- // run at a time, until the reservation expires.
51
- // Note: If task A does not extend the reservation in time and the reservation expires, another
52
- // task B may take over the reservation, resulting in two tasks A and B running in parallel. So
53
- // a third task C may get the Artifact from A or B, whichever writes last.
54
- rpc GetOrExtendReservation (GetOrExtendReservationRequest) returns (GetOrExtendReservationResponse);
55
-
56
- // Release the reservation when the task holding the spot fails so that the other tasks
57
- // can grab the spot.
58
- rpc ReleaseReservation (ReleaseReservationRequest) returns (ReleaseReservationResponse);
59
- }
60
-
61
- /*
62
- * Request message for creating a Dataset.
63
- */
64
- message CreateDatasetRequest {
65
- Dataset dataset = 1;
66
- }
67
-
68
- /*
69
- * Response message for creating a Dataset
70
- */
71
- message CreateDatasetResponse {
72
-
73
- }
74
-
75
- /*
76
- * Request message for retrieving a Dataset. The Dataset is retrieved by it's unique identifier
77
- * which is a combination of several fields.
78
- */
79
- message GetDatasetRequest {
80
- DatasetID dataset = 1;
81
- }
82
-
83
- /*
84
- * Response message for retrieving a Dataset. The response will include the metadata for the
85
- * Dataset.
86
- */
87
- message GetDatasetResponse {
88
- Dataset dataset = 1;
89
- }
90
-
91
- /*
92
- * Request message for retrieving an Artifact. Retrieve an artifact based on a query handle that
93
- * can be one of artifact_id or tag. The result returned will include the artifact data and metadata
94
- * associated with the artifact.
95
- */
96
- message GetArtifactRequest {
97
- DatasetID dataset = 1;
98
-
99
- oneof query_handle {
100
- string artifact_id = 2;
101
- string tag_name = 3;
102
- }
103
- }
104
-
105
- /*
106
- * Response message for retrieving an Artifact. The result returned will include the artifact data
107
- * and metadata associated with the artifact.
108
- */
109
- message GetArtifactResponse {
110
- Artifact artifact = 1;
111
- }
112
-
113
- /*
114
- * Request message for creating an Artifact and its associated artifact Data.
115
- */
116
- message CreateArtifactRequest {
117
- Artifact artifact = 1;
118
- }
119
-
120
- /*
121
- * Response message for creating an Artifact.
122
- */
123
- message CreateArtifactResponse {
124
-
125
- }
126
-
127
- /*
128
- * Request message for tagging an Artifact.
129
- */
130
- message AddTagRequest {
131
- Tag tag = 1;
132
- }
133
-
134
- /*
135
- * Response message for tagging an Artifact.
136
- */
137
- message AddTagResponse {
138
-
139
- }
140
-
141
- // List the artifacts that belong to the Dataset, optionally filtered using filtered expression.
142
- message ListArtifactsRequest {
143
- // Use a datasetID for which you want to retrieve the artifacts
144
- DatasetID dataset = 1;
145
-
146
- // Apply the filter expression to this query
147
- FilterExpression filter = 2;
148
- // Pagination options to get a page of artifacts
149
- PaginationOptions pagination = 3;
150
- }
151
-
152
- // Response to list artifacts
153
- message ListArtifactsResponse {
154
- // The list of artifacts
155
- repeated Artifact artifacts = 1;
156
- // Token to use to request the next page, pass this into the next requests PaginationOptions
157
- string next_token = 2;
158
- }
159
-
160
- // List the datasets for the given query
161
- message ListDatasetsRequest {
162
- // Apply the filter expression to this query
163
- FilterExpression filter = 1;
164
- // Pagination options to get a page of datasets
165
- PaginationOptions pagination = 2;
166
- }
167
-
168
- // List the datasets response with token for next pagination
169
- message ListDatasetsResponse {
170
- // The list of datasets
171
- repeated Dataset datasets = 1;
172
- // Token to use to request the next page, pass this into the next requests PaginationOptions
173
- string next_token = 2;
174
- }
175
-
176
- /*
177
- * Request message for updating an Artifact and overwriting its associated ArtifactData.
178
- */
179
- message UpdateArtifactRequest {
180
- // ID of dataset the artifact is associated with
181
- DatasetID dataset = 1;
182
-
183
- // Either ID of artifact or name of tag to retrieve existing artifact from
184
- oneof query_handle {
185
- string artifact_id = 2;
186
- string tag_name = 3;
187
- }
188
-
189
- // List of data to overwrite stored artifact data with. Must contain ALL data for updated Artifact as any missing
190
- // ArtifactData entries will be removed from the underlying blob storage and database.
191
- repeated ArtifactData data = 4;
192
-
193
- // Update execution metadata(including execution domain, name, node, project data) when overwriting cache
194
- Metadata metadata = 5;
195
- }
196
-
197
- /*
198
- * Response message for updating an Artifact.
199
- */
200
- message UpdateArtifactResponse {
201
- // The unique ID of the artifact updated
202
- string artifact_id = 1;
203
- }
204
-
205
- /*
206
- * ReservationID message that is composed of several string fields.
207
- */
208
- message ReservationID {
209
- // The unique ID for the reserved dataset
210
- DatasetID dataset_id = 1;
211
-
212
- // The specific artifact tag for the reservation
213
- string tag_name = 2;
214
- }
215
-
216
- // Try to acquire or extend an artifact reservation. If an active reservation exists, retrieve that instance.
217
- message GetOrExtendReservationRequest {
218
- // The unique ID for the reservation
219
- ReservationID reservation_id = 1;
220
-
221
- // The unique ID of the owner for the reservation
222
- string owner_id = 2;
223
-
224
- // Requested reservation extension heartbeat interval
225
- google.protobuf.Duration heartbeat_interval = 3;
226
- }
227
-
228
- // A reservation including owner, heartbeat interval, expiration timestamp, and various metadata.
229
- message Reservation {
230
- // The unique ID for the reservation
231
- ReservationID reservation_id = 1;
232
-
233
- // The unique ID of the owner for the reservation
234
- string owner_id = 2;
235
-
236
- // Recommended heartbeat interval to extend reservation
237
- google.protobuf.Duration heartbeat_interval = 3;
238
-
239
- // Expiration timestamp of this reservation
240
- google.protobuf.Timestamp expires_at = 4;
241
-
242
- // Free-form metadata associated with the artifact
243
- Metadata metadata = 6;
244
- }
245
-
246
- // Response including either a newly minted reservation or the existing reservation
247
- message GetOrExtendReservationResponse {
248
- // The reservation to be acquired or extended
249
- Reservation reservation = 1;
250
- }
251
-
252
- // Request to release reservation
253
- message ReleaseReservationRequest {
254
- // The unique ID for the reservation
255
- ReservationID reservation_id = 1;
256
-
257
- // The unique ID of the owner for the reservation
258
- string owner_id = 2;
259
- }
260
-
261
- // Response to release reservation
262
- message ReleaseReservationResponse {
263
-
264
- }
265
-
266
- /*
267
- * Dataset message. It is uniquely identified by DatasetID.
268
- */
269
- message Dataset {
270
- DatasetID id = 1;
271
- Metadata metadata = 2;
272
- repeated string partitionKeys = 3;
273
- }
274
-
275
- /*
276
- * An artifact could have multiple partitions and each partition can have an arbitrary string key/value pair
277
- */
278
- message Partition {
279
- string key = 1;
280
- string value = 2;
281
- }
282
-
283
- /*
284
- * DatasetID message that is composed of several string fields.
285
- */
286
- message DatasetID {
287
- string project = 1; // The name of the project
288
- string name = 2; // The name of the dataset
289
- string domain = 3; // The domain (eg. environment)
290
- string version = 4; // Version of the data schema
291
- string UUID = 5; // UUID for the dataset (if set the above fields are optional)
292
-
293
- // Optional, org key applied to the resource.
294
- string org = 6;
295
- }
296
-
297
- /*
298
- * Artifact message. It is composed of several string fields.
299
- */
300
- message Artifact {
301
- string id = 1; // The unique ID of the artifact
302
- DatasetID dataset = 2; // The Dataset that the artifact belongs to
303
- repeated ArtifactData data = 3; // A list of data that is associated with the artifact
304
- Metadata metadata = 4; // Free-form metadata associated with the artifact
305
- repeated Partition partitions = 5;
306
- repeated Tag tags = 6;
307
- google.protobuf.Timestamp created_at = 7; // creation timestamp of artifact, autogenerated by service
308
- }
309
-
310
- /*
311
- * ArtifactData that belongs to an artifact
312
- */
313
- message ArtifactData {
314
- string name = 1;
315
- flyteidl.core.Literal value = 2;
316
- }
317
-
318
- /*
319
- * Tag message that is unique to a Dataset. It is associated to a single artifact and
320
- * can be retrieved by name later.
321
- */
322
- message Tag {
323
- string name = 1; // Name of tag
324
- string artifact_id = 2; // The tagged artifact
325
- DatasetID dataset = 3; // The Dataset that this tag belongs to
326
- }
327
-
328
- /*
329
- * Metadata representation for artifacts and datasets
330
- */
331
- message Metadata {
332
- map<string, string> key_map = 1; // key map is a dictionary of key/val strings that represent metadata
333
- }
334
-
335
- // Filter expression that is composed of a combination of single filters
336
- message FilterExpression {
337
- repeated SinglePropertyFilter filters = 1;
338
- }
339
-
340
- // A single property to filter on.
341
- message SinglePropertyFilter {
342
- oneof property_filter {
343
- TagPropertyFilter tag_filter = 1;
344
- PartitionPropertyFilter partition_filter = 2;
345
- ArtifactPropertyFilter artifact_filter = 3;
346
- DatasetPropertyFilter dataset_filter = 4;
347
- }
348
-
349
- // as use-cases come up we can add more operators, ex: gte, like, not eq etc.
350
- enum ComparisonOperator {
351
- EQUALS = 0;
352
- }
353
-
354
- ComparisonOperator operator = 10; // field 10 in case we add more entities to query
355
- // Next field number: 11
356
- }
357
-
358
- // Artifact properties we can filter by
359
- message ArtifactPropertyFilter {
360
- // oneof because we can add more properties in the future
361
- oneof property {
362
- string artifact_id = 1;
363
- }
364
- }
365
-
366
- // Tag properties we can filter by
367
- message TagPropertyFilter {
368
- oneof property {
369
- string tag_name = 1;
370
- }
371
- }
372
-
373
- // Partition properties we can filter by
374
- message PartitionPropertyFilter {
375
- oneof property {
376
- KeyValuePair key_val = 1;
377
- }
378
- }
379
-
380
- message KeyValuePair {
381
- string key = 1;
382
- string value = 2;
383
- }
384
-
385
- // Dataset properties we can filter by
386
- message DatasetPropertyFilter {
387
- oneof property {
388
- string project = 1;
389
- string name = 2;
390
- string domain = 3;
391
- string version = 4;
392
- // Optional, org key applied to the dataset.
393
- string org = 5;
394
- }
395
- }
396
-
397
- // Pagination options for making list requests
398
- message PaginationOptions {
399
-
400
- // the max number of results to return
401
- uint32 limit = 1;
402
-
403
- // the token to pass to fetch the next page
404
- string token = 2;
405
-
406
- // the property that we want to sort the results by
407
- SortKey sortKey = 3;
408
-
409
- // the sort order of the results
410
- SortOrder sortOrder = 4;
411
-
412
- enum SortOrder {
413
- DESCENDING = 0;
414
- ASCENDING = 1;
415
- }
416
-
417
- enum SortKey {
418
- CREATION_TIME = 0;
419
- }
420
- }
@@ -1,81 +0,0 @@
1
- syntax = "proto3";
2
-
3
- package flyteidl.event;
4
-
5
- option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event";
6
-
7
- import "flyteidl/event/event.proto";
8
- import "flyteidl/core/literals.proto";
9
- import "flyteidl/core/interface.proto";
10
- import "flyteidl/core/artifact_id.proto";
11
- import "flyteidl/core/identifier.proto";
12
- import "google/protobuf/timestamp.proto";
13
-
14
- // This is the cloud event parallel to the raw WorkflowExecutionEvent message. It's filled in with additional
15
- // information that downstream consumers may find useful.
16
- message CloudEventWorkflowExecution {
17
- event.WorkflowExecutionEvent raw_event = 1;
18
-
19
- core.TypedInterface output_interface = 2;
20
-
21
- // The following are ExecutionMetadata fields
22
- // We can't have the ExecutionMetadata object directly because of import cycle
23
- repeated core.ArtifactID artifact_ids = 3;
24
- core.WorkflowExecutionIdentifier reference_execution = 4;
25
- string principal = 5;
26
-
27
- // The ID of the LP that generated the execution that generated the Artifact.
28
- // Here for provenance information.
29
- // Launch plan IDs are easier to get than workflow IDs so we'll use these for now.
30
- core.Identifier launch_plan_id = 6;
31
-
32
- // We can't have the ExecutionMetadata object directly because of import cycle
33
- map<string,string> labels = 7;
34
- }
35
-
36
- message CloudEventNodeExecution {
37
- event.NodeExecutionEvent raw_event = 1;
38
-
39
- // The relevant task execution if applicable
40
- core.TaskExecutionIdentifier task_exec_id = 2;
41
-
42
- // The typed interface for the task that produced the event.
43
- core.TypedInterface output_interface = 3;
44
-
45
- // The following are ExecutionMetadata fields
46
- // We can't have the ExecutionMetadata object directly because of import cycle
47
- repeated core.ArtifactID artifact_ids = 4;
48
- string principal = 5;
49
-
50
- // The ID of the LP that generated the execution that generated the Artifact.
51
- // Here for provenance information.
52
- // Launch plan IDs are easier to get than workflow IDs so we'll use these for now.
53
- core.Identifier launch_plan_id = 6;
54
-
55
- // We can't have the ExecutionMetadata object directly because of import cycle
56
- map<string,string> labels = 7;
57
- }
58
-
59
- message CloudEventTaskExecution {
60
- event.TaskExecutionEvent raw_event = 1;
61
- // We can't have the ExecutionMetadata object directly because of import cycle
62
- map<string,string> labels = 2;
63
- }
64
-
65
- // This event is to be sent by Admin after it creates an execution.
66
- message CloudEventExecutionStart {
67
- // The execution created.
68
- core.WorkflowExecutionIdentifier execution_id = 1;
69
- // The launch plan used.
70
- core.Identifier launch_plan_id = 2;
71
-
72
- core.Identifier workflow_id = 3;
73
-
74
- // Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
75
- repeated core.ArtifactID artifact_ids = 4;
76
-
77
- // Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
78
- repeated string artifact_trackers = 5;
79
-
80
- string principal = 6;
81
- }