@flyteorg/flyteidl 0.24.18 → 0.24.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/protos/docs/admin/admin.rst +3765 -0
- package/protos/docs/core/core.rst +3656 -0
- package/protos/docs/datacatalog/datacatalog.rst +1237 -0
- package/protos/docs/event/event.rst +694 -0
- package/protos/docs/plugins/plugins.rst +549 -0
- package/protos/docs/service/service.rst +355 -0
|
@@ -0,0 +1,549 @@
|
|
|
1
|
+
######################
|
|
2
|
+
Protocol Documentation
|
|
3
|
+
######################
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
.. _ref_flyteidl/plugins/array_job.proto:
|
|
9
|
+
|
|
10
|
+
flyteidl/plugins/array_job.proto
|
|
11
|
+
==================================================================
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
.. _ref_flyteidl.plugins.ArrayJob:
|
|
18
|
+
|
|
19
|
+
ArrayJob
|
|
20
|
+
------------------------------------------------------------------
|
|
21
|
+
|
|
22
|
+
Describes a job that can process independent pieces of data concurrently. Multiple copies of the runnable component
|
|
23
|
+
will be executed concurrently.
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
.. csv-table:: ArrayJob type fields
|
|
28
|
+
:header: "Field", "Type", "Label", "Description"
|
|
29
|
+
:widths: auto
|
|
30
|
+
|
|
31
|
+
"parallelism", ":ref:`ref_int64`", "", "Defines the minimum number of instances to bring up concurrently at any given point. Note that this is an optimistic restriction and that, due to network partitioning or other failures, the actual number of currently running instances might be more. This has to be a positive number if assigned. Default value is size."
|
|
32
|
+
"size", ":ref:`ref_int64`", "", "Defines the number of instances to launch at most. This number should match the size of the input if the job requires processing of all input data. This has to be a positive number. In the case this is not defined, the back-end will determine the size at run-time by reading the inputs."
|
|
33
|
+
"min_successes", ":ref:`ref_int64`", "", "An absolute number of the minimum number of successful completions of subtasks. As soon as this criteria is met, the array job will be marked as successful and outputs will be computed. This has to be a non-negative number if assigned. Default value is size (if specified)."
|
|
34
|
+
"min_success_ratio", ":ref:`ref_float`", "", "If the array job size is not known beforehand, the min_success_ratio can instead be used to determine when an array job can be marked successful."
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
<!-- end messages -->
|
|
41
|
+
|
|
42
|
+
<!-- end enums -->
|
|
43
|
+
|
|
44
|
+
<!-- end HasExtensions -->
|
|
45
|
+
|
|
46
|
+
<!-- end services -->
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
.. _ref_flyteidl/plugins/mpi.proto:
|
|
52
|
+
|
|
53
|
+
flyteidl/plugins/mpi.proto
|
|
54
|
+
==================================================================
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
.. _ref_flyteidl.plugins.DistributedMPITrainingTask:
|
|
61
|
+
|
|
62
|
+
DistributedMPITrainingTask
|
|
63
|
+
------------------------------------------------------------------
|
|
64
|
+
|
|
65
|
+
MPI operator proposal https://github.com/kubeflow/community/blob/master/proposals/mpi-operator-proposal.md
|
|
66
|
+
Custom proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
.. csv-table:: DistributedMPITrainingTask type fields
|
|
71
|
+
:header: "Field", "Type", "Label", "Description"
|
|
72
|
+
:widths: auto
|
|
73
|
+
|
|
74
|
+
"num_workers", ":ref:`ref_int32`", "", "number of worker spawned in the cluster for this job"
|
|
75
|
+
"num_launcher_replicas", ":ref:`ref_int32`", "", "number of launcher replicas spawned in the cluster for this job The launcher pod invokes mpirun and communicates with worker pods through MPI."
|
|
76
|
+
"slots", ":ref:`ref_int32`", "", "number of slots per worker used in hostfile. The available slots (GPUs) in each pod."
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
<!-- end messages -->
|
|
83
|
+
|
|
84
|
+
<!-- end enums -->
|
|
85
|
+
|
|
86
|
+
<!-- end HasExtensions -->
|
|
87
|
+
|
|
88
|
+
<!-- end services -->
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
.. _ref_flyteidl/plugins/presto.proto:
|
|
94
|
+
|
|
95
|
+
flyteidl/plugins/presto.proto
|
|
96
|
+
==================================================================
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
.. _ref_flyteidl.plugins.PrestoQuery:
|
|
103
|
+
|
|
104
|
+
PrestoQuery
|
|
105
|
+
------------------------------------------------------------------
|
|
106
|
+
|
|
107
|
+
This message works with the 'presto' task type in the SDK and is the object that will be in the 'custom' field
|
|
108
|
+
of a Presto task's TaskTemplate
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
.. csv-table:: PrestoQuery type fields
|
|
113
|
+
:header: "Field", "Type", "Label", "Description"
|
|
114
|
+
:widths: auto
|
|
115
|
+
|
|
116
|
+
"routing_group", ":ref:`ref_string`", "", ""
|
|
117
|
+
"catalog", ":ref:`ref_string`", "", ""
|
|
118
|
+
"schema", ":ref:`ref_string`", "", ""
|
|
119
|
+
"statement", ":ref:`ref_string`", "", ""
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
<!-- end messages -->
|
|
126
|
+
|
|
127
|
+
<!-- end enums -->
|
|
128
|
+
|
|
129
|
+
<!-- end HasExtensions -->
|
|
130
|
+
|
|
131
|
+
<!-- end services -->
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
.. _ref_flyteidl/plugins/pytorch.proto:
|
|
137
|
+
|
|
138
|
+
flyteidl/plugins/pytorch.proto
|
|
139
|
+
==================================================================
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
.. _ref_flyteidl.plugins.DistributedPyTorchTrainingTask:
|
|
146
|
+
|
|
147
|
+
DistributedPyTorchTrainingTask
|
|
148
|
+
------------------------------------------------------------------
|
|
149
|
+
|
|
150
|
+
Custom proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
.. csv-table:: DistributedPyTorchTrainingTask type fields
|
|
155
|
+
:header: "Field", "Type", "Label", "Description"
|
|
156
|
+
:widths: auto
|
|
157
|
+
|
|
158
|
+
"workers", ":ref:`ref_int32`", "", "number of worker replicas spawned in the cluster for this job"
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
<!-- end messages -->
|
|
165
|
+
|
|
166
|
+
<!-- end enums -->
|
|
167
|
+
|
|
168
|
+
<!-- end HasExtensions -->
|
|
169
|
+
|
|
170
|
+
<!-- end services -->
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
.. _ref_flyteidl/plugins/qubole.proto:
|
|
176
|
+
|
|
177
|
+
flyteidl/plugins/qubole.proto
|
|
178
|
+
==================================================================
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
.. _ref_flyteidl.plugins.HiveQuery:
|
|
185
|
+
|
|
186
|
+
HiveQuery
|
|
187
|
+
------------------------------------------------------------------
|
|
188
|
+
|
|
189
|
+
Defines a query to execute on a hive cluster.
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
.. csv-table:: HiveQuery type fields
|
|
194
|
+
:header: "Field", "Type", "Label", "Description"
|
|
195
|
+
:widths: auto
|
|
196
|
+
|
|
197
|
+
"query", ":ref:`ref_string`", "", ""
|
|
198
|
+
"timeout_sec", ":ref:`ref_uint32`", "", ""
|
|
199
|
+
"retryCount", ":ref:`ref_uint32`", "", ""
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
.. _ref_flyteidl.plugins.HiveQueryCollection:
|
|
208
|
+
|
|
209
|
+
HiveQueryCollection
|
|
210
|
+
------------------------------------------------------------------
|
|
211
|
+
|
|
212
|
+
Defines a collection of hive queries.
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
.. csv-table:: HiveQueryCollection type fields
|
|
217
|
+
:header: "Field", "Type", "Label", "Description"
|
|
218
|
+
:widths: auto
|
|
219
|
+
|
|
220
|
+
"queries", ":ref:`ref_flyteidl.plugins.HiveQuery`", "repeated", ""
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
.. _ref_flyteidl.plugins.QuboleHiveJob:
|
|
229
|
+
|
|
230
|
+
QuboleHiveJob
|
|
231
|
+
------------------------------------------------------------------
|
|
232
|
+
|
|
233
|
+
This message works with the 'hive' task type in the SDK and is the object that will be in the 'custom' field
|
|
234
|
+
of a hive task's TaskTemplate
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
.. csv-table:: QuboleHiveJob type fields
|
|
239
|
+
:header: "Field", "Type", "Label", "Description"
|
|
240
|
+
:widths: auto
|
|
241
|
+
|
|
242
|
+
"cluster_label", ":ref:`ref_string`", "", ""
|
|
243
|
+
"query_collection", ":ref:`ref_flyteidl.plugins.HiveQueryCollection`", "", "**Deprecated.** "
|
|
244
|
+
"tags", ":ref:`ref_string`", "repeated", ""
|
|
245
|
+
"query", ":ref:`ref_flyteidl.plugins.HiveQuery`", "", ""
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
<!-- end messages -->
|
|
252
|
+
|
|
253
|
+
<!-- end enums -->
|
|
254
|
+
|
|
255
|
+
<!-- end HasExtensions -->
|
|
256
|
+
|
|
257
|
+
<!-- end services -->
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
.. _ref_flyteidl/plugins/sidecar.proto:
|
|
263
|
+
|
|
264
|
+
flyteidl/plugins/sidecar.proto
|
|
265
|
+
==================================================================
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
.. _ref_flyteidl.plugins.SidecarJob:
|
|
272
|
+
|
|
273
|
+
SidecarJob
|
|
274
|
+
------------------------------------------------------------------
|
|
275
|
+
|
|
276
|
+
A sidecar job brings up the desired pod_spec.
|
|
277
|
+
The plugin executor is responsible for keeping the pod alive until the primary container terminates
|
|
278
|
+
or the task itself times out.
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
.. csv-table:: SidecarJob type fields
|
|
283
|
+
:header: "Field", "Type", "Label", "Description"
|
|
284
|
+
:widths: auto
|
|
285
|
+
|
|
286
|
+
"pod_spec", ":ref:`ref_k8s.io.api.core.v1.PodSpec`", "", ""
|
|
287
|
+
"primary_container_name", ":ref:`ref_string`", "", ""
|
|
288
|
+
"annotations", ":ref:`ref_flyteidl.plugins.SidecarJob.AnnotationsEntry`", "repeated", "Pod annotations"
|
|
289
|
+
"labels", ":ref:`ref_flyteidl.plugins.SidecarJob.LabelsEntry`", "repeated", "Pod labels"
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
.. _ref_flyteidl.plugins.SidecarJob.AnnotationsEntry:
|
|
298
|
+
|
|
299
|
+
SidecarJob.AnnotationsEntry
|
|
300
|
+
------------------------------------------------------------------
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
.. csv-table:: SidecarJob.AnnotationsEntry type fields
|
|
307
|
+
:header: "Field", "Type", "Label", "Description"
|
|
308
|
+
:widths: auto
|
|
309
|
+
|
|
310
|
+
"key", ":ref:`ref_string`", "", ""
|
|
311
|
+
"value", ":ref:`ref_string`", "", ""
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
.. _ref_flyteidl.plugins.SidecarJob.LabelsEntry:
|
|
320
|
+
|
|
321
|
+
SidecarJob.LabelsEntry
|
|
322
|
+
------------------------------------------------------------------
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
.. csv-table:: SidecarJob.LabelsEntry type fields
|
|
329
|
+
:header: "Field", "Type", "Label", "Description"
|
|
330
|
+
:widths: auto
|
|
331
|
+
|
|
332
|
+
"key", ":ref:`ref_string`", "", ""
|
|
333
|
+
"value", ":ref:`ref_string`", "", ""
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
<!-- end messages -->
|
|
340
|
+
|
|
341
|
+
<!-- end enums -->
|
|
342
|
+
|
|
343
|
+
<!-- end HasExtensions -->
|
|
344
|
+
|
|
345
|
+
<!-- end services -->
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
.. _ref_flyteidl/plugins/spark.proto:
|
|
351
|
+
|
|
352
|
+
flyteidl/plugins/spark.proto
|
|
353
|
+
==================================================================
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
.. _ref_flyteidl.plugins.SparkApplication:
|
|
360
|
+
|
|
361
|
+
SparkApplication
|
|
362
|
+
------------------------------------------------------------------
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
.. _ref_flyteidl.plugins.SparkJob:
|
|
374
|
+
|
|
375
|
+
SparkJob
|
|
376
|
+
------------------------------------------------------------------
|
|
377
|
+
|
|
378
|
+
Custom Proto for Spark Plugin.
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
.. csv-table:: SparkJob type fields
|
|
383
|
+
:header: "Field", "Type", "Label", "Description"
|
|
384
|
+
:widths: auto
|
|
385
|
+
|
|
386
|
+
"applicationType", ":ref:`ref_flyteidl.plugins.SparkApplication.Type`", "", ""
|
|
387
|
+
"mainApplicationFile", ":ref:`ref_string`", "", ""
|
|
388
|
+
"mainClass", ":ref:`ref_string`", "", ""
|
|
389
|
+
"sparkConf", ":ref:`ref_flyteidl.plugins.SparkJob.SparkConfEntry`", "repeated", ""
|
|
390
|
+
"hadoopConf", ":ref:`ref_flyteidl.plugins.SparkJob.HadoopConfEntry`", "repeated", ""
|
|
391
|
+
"executorPath", ":ref:`ref_string`", "", "Executor path for Python jobs."
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
.. _ref_flyteidl.plugins.SparkJob.HadoopConfEntry:
|
|
400
|
+
|
|
401
|
+
SparkJob.HadoopConfEntry
|
|
402
|
+
------------------------------------------------------------------
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
.. csv-table:: SparkJob.HadoopConfEntry type fields
|
|
409
|
+
:header: "Field", "Type", "Label", "Description"
|
|
410
|
+
:widths: auto
|
|
411
|
+
|
|
412
|
+
"key", ":ref:`ref_string`", "", ""
|
|
413
|
+
"value", ":ref:`ref_string`", "", ""
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
.. _ref_flyteidl.plugins.SparkJob.SparkConfEntry:
|
|
422
|
+
|
|
423
|
+
SparkJob.SparkConfEntry
|
|
424
|
+
------------------------------------------------------------------
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
.. csv-table:: SparkJob.SparkConfEntry type fields
|
|
431
|
+
:header: "Field", "Type", "Label", "Description"
|
|
432
|
+
:widths: auto
|
|
433
|
+
|
|
434
|
+
"key", ":ref:`ref_string`", "", ""
|
|
435
|
+
"value", ":ref:`ref_string`", "", ""
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
<!-- end messages -->
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
.. _ref_flyteidl.plugins.SparkApplication.Type:
|
|
446
|
+
|
|
447
|
+
SparkApplication.Type
|
|
448
|
+
------------------------------------------------------------------
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
.. csv-table:: Enum SparkApplication.Type values
|
|
453
|
+
:header: "Name", "Number", "Description"
|
|
454
|
+
:widths: auto
|
|
455
|
+
|
|
456
|
+
"PYTHON", "0", ""
|
|
457
|
+
"JAVA", "1", ""
|
|
458
|
+
"SCALA", "2", ""
|
|
459
|
+
"R", "3", ""
|
|
460
|
+
|
|
461
|
+
<!-- end enums -->
|
|
462
|
+
|
|
463
|
+
<!-- end HasExtensions -->
|
|
464
|
+
|
|
465
|
+
<!-- end services -->
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
.. _ref_flyteidl/plugins/tensorflow.proto:
|
|
471
|
+
|
|
472
|
+
flyteidl/plugins/tensorflow.proto
|
|
473
|
+
==================================================================
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
.. _ref_flyteidl.plugins.DistributedTensorflowTrainingTask:
|
|
480
|
+
|
|
481
|
+
DistributedTensorflowTrainingTask
|
|
482
|
+
------------------------------------------------------------------
|
|
483
|
+
|
|
484
|
+
Custom proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
.. csv-table:: DistributedTensorflowTrainingTask type fields
|
|
489
|
+
:header: "Field", "Type", "Label", "Description"
|
|
490
|
+
:widths: auto
|
|
491
|
+
|
|
492
|
+
"workers", ":ref:`ref_int32`", "", "number of worker, ps, chief replicas spawned in the cluster for this job"
|
|
493
|
+
"ps_replicas", ":ref:`ref_int32`", "", "PS -> Parameter server"
|
|
494
|
+
"chief_replicas", ":ref:`ref_int32`", "", ""
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
<!-- end messages -->
|
|
501
|
+
|
|
502
|
+
<!-- end enums -->
|
|
503
|
+
|
|
504
|
+
<!-- end HasExtensions -->
|
|
505
|
+
|
|
506
|
+
<!-- end services -->
|
|
507
|
+
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
.. _ref_flyteidl/plugins/waitable.proto:
|
|
512
|
+
|
|
513
|
+
flyteidl/plugins/waitable.proto
|
|
514
|
+
==================================================================
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
.. _ref_flyteidl.plugins.Waitable:
|
|
521
|
+
|
|
522
|
+
Waitable
|
|
523
|
+
------------------------------------------------------------------
|
|
524
|
+
|
|
525
|
+
Represents an Execution that was launched and could be waited on.
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
.. csv-table:: Waitable type fields
|
|
530
|
+
:header: "Field", "Type", "Label", "Description"
|
|
531
|
+
:widths: auto
|
|
532
|
+
|
|
533
|
+
"wf_exec_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", ""
|
|
534
|
+
"phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", ""
|
|
535
|
+
"workflow_id", ":ref:`ref_string`", "", ""
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
<!-- end messages -->
|
|
542
|
+
|
|
543
|
+
<!-- end enums -->
|
|
544
|
+
|
|
545
|
+
<!-- end HasExtensions -->
|
|
546
|
+
|
|
547
|
+
<!-- end services -->
|
|
548
|
+
|
|
549
|
+
|