indexify 0.3.25__tar.gz → 0.3.26__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {indexify-0.3.25 → indexify-0.3.26}/PKG-INFO +1 -1
  2. {indexify-0.3.25 → indexify-0.3.26}/pyproject.toml +1 -1
  3. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/state_reconciler.py +38 -28
  4. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/state_reporter.py +17 -8
  5. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/host_resources/nvidia_gpu.py +7 -6
  6. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/proto/executor_api.proto +5 -5
  7. indexify-0.3.26/src/indexify/proto/executor_api_pb2.py +86 -0
  8. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/proto/executor_api_pb2.pyi +9 -8
  9. indexify-0.3.25/src/indexify/proto/executor_api_pb2.py +0 -86
  10. {indexify-0.3.25 → indexify-0.3.26}/README.md +0 -0
  11. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/cli/cli.py +0 -0
  12. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/README.md +0 -0
  13. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/api_objects.py +0 -0
  14. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/blob_store/blob_store.py +0 -0
  15. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/blob_store/local_fs_blob_store.py +0 -0
  16. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/blob_store/metrics/blob_store.py +0 -0
  17. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/blob_store/s3_blob_store.py +0 -0
  18. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/downloader.py +0 -0
  19. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/executor.py +0 -0
  20. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/executor_flavor.py +0 -0
  21. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/function_executor.py +0 -0
  22. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/function_executor_state.py +0 -0
  23. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/function_executor_states_container.py +0 -0
  24. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/function_executor_status.py +0 -0
  25. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/health_checker.py +0 -0
  26. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/invocation_state_client.py +0 -0
  27. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/metrics/function_executor.py +0 -0
  28. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/metrics/function_executor_state.py +0 -0
  29. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/metrics/function_executor_state_container.py +0 -0
  30. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/metrics/health_checker.py +0 -0
  31. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/metrics/invocation_state_client.py +0 -0
  32. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/metrics/single_task_runner.py +0 -0
  33. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/server/client_configuration.py +0 -0
  34. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/server/function_executor_server.py +0 -0
  35. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/server/function_executor_server_factory.py +0 -0
  36. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/server/subprocess_function_executor_server.py +0 -0
  37. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/server/subprocess_function_executor_server_factory.py +0 -0
  38. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/single_task_runner.py +0 -0
  39. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/task_input.py +0 -0
  40. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/function_executor/task_output.py +0 -0
  41. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/channel_manager.py +0 -0
  42. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/function_executor_controller.py +0 -0
  43. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/metrics/channel_manager.py +0 -0
  44. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/metrics/state_reconciler.py +0 -0
  45. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/metrics/state_reporter.py +0 -0
  46. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/metrics/task_controller.py +0 -0
  47. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/grpc/task_controller.py +0 -0
  48. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/host_resources/host_resources.py +0 -0
  49. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/host_resources/nvidia_gpu_allocator.py +0 -0
  50. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/metrics/downloader.py +0 -0
  51. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/metrics/executor.py +0 -0
  52. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/metrics/task_fetcher.py +0 -0
  53. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/metrics/task_reporter.py +0 -0
  54. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/metrics/task_runner.py +0 -0
  55. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/function_allowlist.py +0 -0
  56. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/handler.py +0 -0
  57. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/health_check_handler.py +0 -0
  58. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/health_checker/generic_health_checker.py +0 -0
  59. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/health_checker/health_checker.py +0 -0
  60. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/metrics.py +0 -0
  61. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/prometheus_metrics_handler.py +0 -0
  62. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/server.py +0 -0
  63. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/monitoring/startup_probe_handler.py +0 -0
  64. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/runtime_probes.py +0 -0
  65. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/task_fetcher.py +0 -0
  66. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/task_reporter.py +0 -0
  67. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/executor/task_runner.py +0 -0
  68. {indexify-0.3.25 → indexify-0.3.26}/src/indexify/proto/executor_api_pb2_grpc.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: indexify
3
- Version: 0.3.25
3
+ Version: 0.3.26
4
4
  Summary: Open Source Indexify components and helper tools
5
5
  Home-page: https://github.com/tensorlakeai/indexify
6
6
  License: Apache 2.0
@@ -1,7 +1,7 @@
1
1
  [tool.poetry]
2
2
  name = "indexify"
3
3
  # Incremented if any of the components provided in this packages are updated.
4
- version = "0.3.25"
4
+ version = "0.3.26"
5
5
  description = "Open Source Indexify components and helper tools"
6
6
  authors = ["Tensorlake Inc. <support@tensorlake.ai>"]
7
7
  license = "Apache 2.0"
@@ -52,6 +52,7 @@ class ExecutorStateReconciler:
52
52
  channel_manager: ChannelManager,
53
53
  state_reporter: ExecutorStateReporter,
54
54
  logger: Any,
55
+ server_backoff_interval_sec: int = _RECONCILE_STREAM_BACKOFF_INTERVAL_SEC,
55
56
  ):
56
57
  self._executor_id: str = executor_id
57
58
  self._function_executor_server_factory: FunctionExecutorServerFactory = (
@@ -65,6 +66,7 @@ class ExecutorStateReconciler:
65
66
  self._state_reporter: ExecutorStateReporter = state_reporter
66
67
  self._reconciliation_loop_task: Optional[asyncio.Task] = None
67
68
  self._logger: Any = logger.bind(module=__name__)
69
+ self._server_backoff_interval_sec: int = server_backoff_interval_sec
68
70
 
69
71
  # Mutable state. Doesn't need lock because we access from async tasks running in the same thread.
70
72
  self._is_shutdown: bool = False
@@ -93,26 +95,29 @@ class ExecutorStateReconciler:
93
95
 
94
96
  # TODO: Move this into a new async task and cancel it in shutdown().
95
97
  while not self._is_shutdown:
96
- stub = ExecutorAPIStub(await self._channel_manager.get_channel())
97
- while not self._is_shutdown:
98
- try:
99
- # Report state once before starting the stream so Server
100
- # doesn't use stale state it knew about this Executor in the past.
101
- await self._state_reporter.report_state(stub)
102
-
103
- desired_states_stream: AsyncGenerator[
104
- DesiredExecutorState, None
105
- ] = stub.get_desired_executor_states(
98
+ try:
99
+ stub = ExecutorAPIStub(await self._channel_manager.get_channel())
100
+ # Report state once before starting the stream so Server
101
+ # doesn't use stale state it knew about this Executor in the past.
102
+ await self._state_reporter.report_state(stub)
103
+
104
+ desired_states_stream: AsyncGenerator[DesiredExecutorState, None] = (
105
+ stub.get_desired_executor_states(
106
106
  GetDesiredExecutorStatesRequest(executor_id=self._executor_id)
107
107
  )
108
- await self._process_desired_states_stream(desired_states_stream)
109
- except Exception as e:
110
- self._logger.error(
111
- f"Failed processing desired states stream, reconnecting in {_RECONCILE_STREAM_BACKOFF_INTERVAL_SEC} sec.",
112
- exc_info=e,
113
- )
114
- await asyncio.sleep(_RECONCILE_STREAM_BACKOFF_INTERVAL_SEC)
115
- break
108
+ )
109
+ self._logger.info("created new desired states stream")
110
+ await self._process_desired_states_stream(desired_states_stream)
111
+ except Exception as e:
112
+ self._logger.error(
113
+ f"error while processing desired states stream",
114
+ exc_info=e,
115
+ )
116
+
117
+ self._logger.info(
118
+ f"desired states stream closed, reconnecting in {self._server_backoff_interval_sec} sec"
119
+ )
120
+ await asyncio.sleep(self._server_backoff_interval_sec)
116
121
 
117
122
  async def _process_desired_states_stream(
118
123
  self, desired_states: AsyncGenerator[DesiredExecutorState, None]
@@ -127,13 +132,18 @@ class ExecutorStateReconciler:
127
132
  validator.required_field("clock")
128
133
  except ValueError as e:
129
134
  self._logger.error(
130
- "Received invalid DesiredExecutorState from Server. Ignoring.",
135
+ "received invalid DesiredExecutorState from Server, ignoring",
131
136
  exc_info=e,
132
137
  )
133
138
  continue
134
139
 
135
140
  if self._last_server_clock is not None:
136
141
  if self._last_server_clock >= new_state.clock:
142
+ self._logger.warning(
143
+ "received outdated DesiredExecutorState from Server, ignoring",
144
+ current_clock=self._last_server_clock,
145
+ ignored_clock=new_state.clock,
146
+ )
137
147
  continue # Duplicate or outdated message state sent by Server.
138
148
 
139
149
  self._last_server_clock = new_state.clock
@@ -151,7 +161,7 @@ class ExecutorStateReconciler:
151
161
  self._is_shutdown = True
152
162
  if self._reconciliation_loop_task is not None:
153
163
  self._reconciliation_loop_task.cancel()
154
- self._logger.info("Reconciliation loop shutdown.")
164
+ self._logger.info("reconciliation loop shutdown")
155
165
 
156
166
  for controller in self._task_controllers.values():
157
167
  await controller.destroy()
@@ -195,7 +205,7 @@ class ExecutorStateReconciler:
195
205
  return
196
206
  except Exception as e:
197
207
  self._logger.error(
198
- "Failed to reconcile desired state. Retrying in 5 secs.",
208
+ "failed to reconcile desired state, retrying in 5 secs",
199
209
  exc_info=e,
200
210
  attempt=attempt,
201
211
  attempts_left=_RECONCILIATION_RETRIES - attempt,
@@ -204,7 +214,7 @@ class ExecutorStateReconciler:
204
214
 
205
215
  metric_state_reconciliation_errors.inc()
206
216
  self._logger.error(
207
- f"Failed to reconcile desired state after {_RECONCILIATION_RETRIES} attempts.",
217
+ f"failed to reconcile desired state after {_RECONCILIATION_RETRIES} attempts",
208
218
  )
209
219
 
210
220
  async def _reconcile_function_executors(
@@ -246,7 +256,7 @@ class ExecutorStateReconciler:
246
256
  validate_function_executor_description(function_executor_description)
247
257
  except ValueError as e:
248
258
  logger.error(
249
- "Received invalid FunctionExecutorDescription from Server. Dropping it from desired state.",
259
+ "received invalid FunctionExecutorDescription from Server, dropping it from desired state",
250
260
  exc_info=e,
251
261
  )
252
262
  continue
@@ -309,7 +319,7 @@ class ExecutorStateReconciler:
309
319
  # IDLE and start running tasks on it. Server currently doesn't explicitly manage the desired FE status.
310
320
  await controller.startup()
311
321
  except Exception as e:
312
- logger.error("Failed adding Function Executor", exc_info=e)
322
+ logger.error("failed adding Function Executor", exc_info=e)
313
323
 
314
324
  async def _remove_function_executor_after_shutdown(
315
325
  self, function_executor_id: str
@@ -362,7 +372,7 @@ class ExecutorStateReconciler:
362
372
  logger=self._logger,
363
373
  )
364
374
  except Exception as e:
365
- logger.error("Failed adding TaskController", exc_info=e)
375
+ logger.error("failed adding TaskController", exc_info=e)
366
376
 
367
377
  async def _remove_task(self, task_id: str) -> None:
368
378
  """Schedules removal of an existing task.
@@ -382,7 +392,7 @@ class ExecutorStateReconciler:
382
392
  except ValueError as e:
383
393
  # There's no way to report this error to Server so just log it.
384
394
  logger.error(
385
- "Received invalid TaskAllocation from Server. Dropping it from desired state.",
395
+ "received invalid TaskAllocation from Server, dropping it from desired state",
386
396
  exc_info=e,
387
397
  )
388
398
  continue
@@ -393,7 +403,7 @@ class ExecutorStateReconciler:
393
403
  except ValueError as e:
394
404
  # There's no way to report this error to Server so just log it.
395
405
  logger.error(
396
- "Received invalid TaskAllocation from Server. Dropping it from desired state.",
406
+ "received invalid TaskAllocation from Server, dropping it from desired state",
397
407
  exc_info=e,
398
408
  )
399
409
  continue
@@ -405,7 +415,7 @@ class ExecutorStateReconciler:
405
415
  # Current policy: don't report task outcomes for tasks that didn't run.
406
416
  # This is required to simplify the protocol so Server doesn't need to care about task states.
407
417
  logger.error(
408
- "Received TaskAllocation for a Function Executor that doesn't exist. Dropping it from desired state."
418
+ "received TaskAllocation for a Function Executor that doesn't exist, dropping it from desired state"
409
419
  )
410
420
  continue
411
421
 
@@ -19,9 +19,7 @@ from indexify.proto.executor_api_pb2 import (
19
19
  FunctionExecutorStatus as FunctionExecutorStatusProto,
20
20
  )
21
21
  from indexify.proto.executor_api_pb2 import GPUModel as GPUModelProto
22
- from indexify.proto.executor_api_pb2 import (
23
- GPUResources,
24
- )
22
+ from indexify.proto.executor_api_pb2 import GPUResources as GPUResourcesProto
25
23
  from indexify.proto.executor_api_pb2 import HostResources as HostResourcesProto
26
24
  from indexify.proto.executor_api_pb2 import (
27
25
  ReportExecutorStateRequest,
@@ -36,6 +34,7 @@ from ..function_executor.function_executor_states_container import (
36
34
  )
37
35
  from ..function_executor.function_executor_status import FunctionExecutorStatus
38
36
  from ..host_resources.host_resources import HostResources, HostResourcesProvider
37
+ from ..host_resources.nvidia_gpu import NVIDIA_GPU_MODEL
39
38
  from ..runtime_probes import RuntimeProbes
40
39
  from .channel_manager import ChannelManager
41
40
  from .metrics.state_reporter import (
@@ -271,12 +270,22 @@ def _host_resources_to_proto(host_resources: HostResources) -> HostResourcesProt
271
270
  )
272
271
  if len(host_resources.gpus) > 0:
273
272
  proto.gpu.CopyFrom(
274
- GPUResources(
273
+ GPUResourcesProto(
275
274
  count=len(host_resources.gpus),
276
- deprecated_model=GPUModelProto.GPU_MODEL_UNKNOWN, # TODO: Remove this field
277
- model=host_resources.gpus[
278
- 0
279
- ].model.value, # All GPUs should have the same model
275
+ model=_gpu_model_to_proto(
276
+ host_resources.gpus[0].model
277
+ ), # All GPUs have the same model
280
278
  )
281
279
  )
282
280
  return proto
281
+
282
+
283
+ def _gpu_model_to_proto(gpu_model: NVIDIA_GPU_MODEL) -> GPUModelProto:
284
+ if gpu_model == NVIDIA_GPU_MODEL.A100_40GB:
285
+ return GPUModelProto.GPU_MODEL_NVIDIA_A100_40GB
286
+ elif gpu_model == NVIDIA_GPU_MODEL.A100_80GB:
287
+ return GPUModelProto.GPU_MODEL_NVIDIA_A100_80GB
288
+ elif gpu_model == NVIDIA_GPU_MODEL.H100_80GB:
289
+ return GPUModelProto.GPU_MODEL_NVIDIA_H100_80GB
290
+ else:
291
+ return GPUModelProto.GPU_MODEL_UNKNOWN
@@ -9,9 +9,9 @@ from tensorlake.functions_sdk.resources import GPU_MODEL
9
9
  # Only NVIDIA GPUs currently supported in Tensorlake SDK are listed here.
10
10
  class NVIDIA_GPU_MODEL(str, Enum):
11
11
  UNKNOWN = "UNKNOWN"
12
- A100_40GB = GPU_MODEL.A100_40GB
13
- A100_80GB = GPU_MODEL.A100_80GB
14
- H100_80GB = GPU_MODEL.H100
12
+ A100_40GB = "A100-40GB"
13
+ A100_80GB = "A100-80GB"
14
+ H100_80GB = "H100"
15
15
 
16
16
 
17
17
  class NvidiaGPUInfo(BaseModel):
@@ -65,9 +65,10 @@ def fetch_nvidia_gpu_infos(logger: Any) -> List[NvidiaGPUInfo]:
65
65
  model = NVIDIA_GPU_MODEL.A100_40GB
66
66
  elif product_name.startswith("NVIDIA H100"):
67
67
  model = NVIDIA_GPU_MODEL.H100_80GB
68
-
69
- if model == NVIDIA_GPU_MODEL.UNKNOWN:
70
- logger.warning("Unknown GPU model detected", nvidia_smi_output=line)
68
+ else:
69
+ logger.warning(
70
+ "Unknown GPU model was detected, ignoring", nvidia_smi_output=line
71
+ )
71
72
 
72
73
  infos.append(
73
74
  NvidiaGPUInfo(
@@ -27,18 +27,18 @@ message DataPayload {
27
27
  }
28
28
 
29
29
  // ===== report_executor_state RPC =====
30
-
31
- // Deprecated enum. TODO: remove when all the code is using model string.
32
30
  enum GPUModel {
33
31
  GPU_MODEL_UNKNOWN = 0;
32
+ GPU_MODEL_NVIDIA_A100_40GB = 1;
33
+ GPU_MODEL_NVIDIA_A100_80GB = 2;
34
+ GPU_MODEL_NVIDIA_H100_80GB = 3;
34
35
  }
35
36
 
36
37
  // Free GPUs available at the Executor.
37
38
  message GPUResources {
38
39
  optional uint32 count = 1;
39
- optional GPUModel deprecated_model = 2;
40
- // Either GPU_MODEL value from Tensorlake SDK or "UNKNOWN"
41
- optional string model = 3;
40
+ optional GPUModel model = 2;
41
+ reserved 3;
42
42
  }
43
43
 
44
44
  // Resources that we're currently tracking and limiting on Executor.
@@ -0,0 +1,86 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: indexify/proto/executor_api.proto
5
+ # Protobuf Python Version: 5.29.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+
13
+ _runtime_version.ValidateProtobufRuntimeVersion(
14
+ _runtime_version.Domain.PUBLIC, 5, 29, 0, "", "indexify/proto/executor_api.proto"
15
+ )
16
+ # @@protoc_insertion_point(imports)
17
+
18
+ _sym_db = _symbol_database.Default()
19
+
20
+
21
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
22
+ b'\n!indexify/proto/executor_api.proto\x12\x0f\x65xecutor_api_pb"\x87\x02\n\x0b\x44\x61taPayload\x12\x11\n\x04path\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04size\x18\x02 \x01(\x04H\x01\x88\x01\x01\x12\x18\n\x0bsha256_hash\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x10\n\x03uri\x18\x04 \x01(\tH\x03\x88\x01\x01\x12;\n\x08\x65ncoding\x18\x05 \x01(\x0e\x32$.executor_api_pb.DataPayloadEncodingH\x04\x88\x01\x01\x12\x1d\n\x10\x65ncoding_version\x18\x06 \x01(\x04H\x05\x88\x01\x01\x42\x07\n\x05_pathB\x07\n\x05_sizeB\x0e\n\x0c_sha256_hashB\x06\n\x04_uriB\x0b\n\t_encodingB\x13\n\x11_encoding_version"k\n\x0cGPUResources\x12\x12\n\x05\x63ount\x18\x01 \x01(\rH\x00\x88\x01\x01\x12-\n\x05model\x18\x02 \x01(\x0e\x32\x19.executor_api_pb.GPUModelH\x01\x88\x01\x01\x42\x08\n\x06_countB\x08\n\x06_modelJ\x04\x08\x03\x10\x04"\xc2\x01\n\rHostResources\x12\x16\n\tcpu_count\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x19\n\x0cmemory_bytes\x18\x02 \x01(\x04H\x01\x88\x01\x01\x12\x17\n\ndisk_bytes\x18\x03 \x01(\x04H\x02\x88\x01\x01\x12/\n\x03gpu\x18\x04 \x01(\x0b\x32\x1d.executor_api_pb.GPUResourcesH\x03\x88\x01\x01\x42\x0c\n\n_cpu_countB\x0f\n\r_memory_bytesB\r\n\x0b_disk_bytesB\x06\n\x04_gpu"\xbb\x01\n\x0f\x41llowedFunction\x12\x16\n\tnamespace\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\ngraph_name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rgraph_version\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_function_nameB\x10\n\x0e_graph_version"\xc5\x01\n\x19\x46unctionExecutorResources\x12\x1b\n\x0e\x63pu_ms_per_sec\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x19\n\x0cmemory_bytes\x18\x02 \x01(\x04H\x01\x88\x01\x01\x12\x17\n\ndisk_bytes\x18\x03 \x01(\x04H\x02\x88\x01\x01\x12\x16\n\tgpu_count\x18\x04 \x01(\rH\x03\x88\x01\x01\x42\x11\n\x0f_cpu_ms_per_secB\x0f\n\r_memory_bytesB\r\n\x0b_disk_bytesB\x0c\n\n_gpu_count"\xbf\x04\n\x1b\x46unctionExecutorDescription\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tnamespace\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\ngraph_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rgraph_version\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x16\n\timage_uri\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x14\n\x0csecret_names\x18\x07 \x03(\t\x12<\n\x0fresource_limits\x18\x08 \x01(\x0b\x32\x1e.executor_api_pb.HostResourcesH\x06\x88\x01\x01\x12%\n\x18\x63ustomer_code_timeout_ms\x18\t \x01(\rH\x07\x88\x01\x01\x12\x30\n\x05graph\x18\n \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\x08\x88\x01\x01\x12\x42\n\tresources\x18\x0b \x01(\x0b\x32*.executor_api_pb.FunctionExecutorResourcesH\t\x88\x01\x01\x42\x05\n\x03_idB\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_graph_versionB\x10\n\x0e_function_nameB\x0c\n\n_image_uriB\x12\n\x10_resource_limitsB\x1b\n\x19_customer_code_timeout_msB\x08\n\x06_graphB\x0c\n\n_resources"\xe8\x01\n\x15\x46unctionExecutorState\x12\x46\n\x0b\x64\x65scription\x18\x01 \x01(\x0b\x32,.executor_api_pb.FunctionExecutorDescriptionH\x00\x88\x01\x01\x12<\n\x06status\x18\x02 \x01(\x0e\x32\'.executor_api_pb.FunctionExecutorStatusH\x01\x88\x01\x01\x12\x1b\n\x0estatus_message\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x0e\n\x0c_descriptionB\t\n\x07_statusB\x11\n\x0f_status_message"\x9d\x06\n\rExecutorState\x12\x18\n\x0b\x65xecutor_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1d\n\x10\x64\x65velopment_mode\x18\x02 \x01(\x08H\x01\x88\x01\x01\x12\x15\n\x08hostname\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x34\n\x06\x66lavor\x18\x04 \x01(\x0e\x32\x1f.executor_api_pb.ExecutorFlavorH\x03\x88\x01\x01\x12\x14\n\x07version\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x34\n\x06status\x18\x06 \x01(\x0e\x32\x1f.executor_api_pb.ExecutorStatusH\x05\x88\x01\x01\x12<\n\x0ftotal_resources\x18\r \x01(\x0b\x32\x1e.executor_api_pb.HostResourcesH\x06\x88\x01\x01\x12;\n\x0e\x66ree_resources\x18\x07 \x01(\x0b\x32\x1e.executor_api_pb.HostResourcesH\x07\x88\x01\x01\x12;\n\x11\x61llowed_functions\x18\x08 \x03(\x0b\x32 .executor_api_pb.AllowedFunction\x12H\n\x18\x66unction_executor_states\x18\t \x03(\x0b\x32&.executor_api_pb.FunctionExecutorState\x12:\n\x06labels\x18\n \x03(\x0b\x32*.executor_api_pb.ExecutorState.LabelsEntry\x12\x17\n\nstate_hash\x18\x0b \x01(\tH\x08\x88\x01\x01\x12\x19\n\x0cserver_clock\x18\x0c \x01(\x04H\t\x88\x01\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0e\n\x0c_executor_idB\x13\n\x11_development_modeB\x0b\n\t_hostnameB\t\n\x07_flavorB\n\n\x08_versionB\t\n\x07_statusB\x12\n\x10_total_resourcesB\x11\n\x0f_free_resourcesB\r\n\x0b_state_hashB\x0f\n\r_server_clock"l\n\x1aReportExecutorStateRequest\x12;\n\x0e\x65xecutor_state\x18\x01 \x01(\x0b\x32\x1e.executor_api_pb.ExecutorStateH\x00\x88\x01\x01\x42\x11\n\x0f_executor_state"\x1d\n\x1bReportExecutorStateResponse"\xcf\x01\n\x0fTaskRetryPolicy\x12\x18\n\x0bmax_retries\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x1d\n\x10initial_delay_ms\x18\x02 \x01(\rH\x01\x88\x01\x01\x12\x19\n\x0cmax_delay_ms\x18\x03 \x01(\rH\x02\x88\x01\x01\x12\x1d\n\x10\x64\x65lay_multiplier\x18\x04 \x01(\rH\x03\x88\x01\x01\x42\x0e\n\x0c_max_retriesB\x13\n\x11_initial_delay_msB\x0f\n\r_max_delay_msB\x13\n\x11_delay_multiplier"\xa4\x05\n\x04Task\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tnamespace\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\ngraph_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rgraph_version\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x05 \x01(\tH\x04\x88\x01\x01\x12 \n\x13graph_invocation_id\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x16\n\tinput_key\x18\x08 \x01(\tH\x06\x88\x01\x01\x12\x1f\n\x12reducer_output_key\x18\t \x01(\tH\x07\x88\x01\x01\x12\x17\n\ntimeout_ms\x18\n \x01(\rH\x08\x88\x01\x01\x12\x30\n\x05input\x18\x0b \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\t\x88\x01\x01\x12\x38\n\rreducer_input\x18\x0c \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\n\x88\x01\x01\x12&\n\x19output_payload_uri_prefix\x18\r \x01(\tH\x0b\x88\x01\x01\x12;\n\x0cretry_policy\x18\x0e \x01(\x0b\x32 .executor_api_pb.TaskRetryPolicyH\x0c\x88\x01\x01\x42\x05\n\x03_idB\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_graph_versionB\x10\n\x0e_function_nameB\x16\n\x14_graph_invocation_idB\x0c\n\n_input_keyB\x15\n\x13_reducer_output_keyB\r\n\x0b_timeout_msB\x08\n\x06_inputB\x10\n\x0e_reducer_inputB\x1c\n\x1a_output_payload_uri_prefixB\x0f\n\r_retry_policy"\x7f\n\x0eTaskAllocation\x12!\n\x14\x66unction_executor_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12(\n\x04task\x18\x02 \x01(\x0b\x32\x15.executor_api_pb.TaskH\x01\x88\x01\x01\x42\x17\n\x15_function_executor_idB\x07\n\x05_task"K\n\x1fGetDesiredExecutorStatesRequest\x12\x18\n\x0b\x65xecutor_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_executor_id"\xb9\x01\n\x14\x44\x65siredExecutorState\x12H\n\x12\x66unction_executors\x18\x01 \x03(\x0b\x32,.executor_api_pb.FunctionExecutorDescription\x12\x39\n\x10task_allocations\x18\x02 \x03(\x0b\x32\x1f.executor_api_pb.TaskAllocation\x12\x12\n\x05\x63lock\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x08\n\x06_clock"\x87\x06\n\x18ReportTaskOutcomeRequest\x12\x14\n\x07task_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tnamespace\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\ngraph_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12 \n\x13graph_invocation_id\x18\x06 \x01(\tH\x04\x88\x01\x01\x12\x32\n\x07outcome\x18\x07 \x01(\x0e\x32\x1c.executor_api_pb.TaskOutcomeH\x05\x88\x01\x01\x12\x1a\n\rinvocation_id\x18\x08 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0b\x65xecutor_id\x18\t \x01(\tH\x07\x88\x01\x01\x12\x14\n\x07reducer\x18\n \x01(\x08H\x08\x88\x01\x01\x12\x16\n\x0enext_functions\x18\x0b \x03(\t\x12\x30\n\nfn_outputs\x18\x0c \x03(\x0b\x32\x1c.executor_api_pb.DataPayload\x12\x31\n\x06stdout\x18\x0e \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\t\x88\x01\x01\x12\x31\n\x06stderr\x18\x0f \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\n\x88\x01\x01\x12=\n\x0foutput_encoding\x18\r \x01(\x0e\x32\x1f.executor_api_pb.OutputEncodingH\x0b\x88\x01\x01\x12$\n\x17output_encoding_version\x18\x05 \x01(\x04H\x0c\x88\x01\x01\x42\n\n\x08_task_idB\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_function_nameB\x16\n\x14_graph_invocation_idB\n\n\x08_outcomeB\x10\n\x0e_invocation_idB\x0e\n\x0c_executor_idB\n\n\x08_reducerB\t\n\x07_stdoutB\t\n\x07_stderrB\x12\n\x10_output_encodingB\x1a\n\x18_output_encoding_version"\x1b\n\x19ReportTaskOutcomeResponse*\xab\x01\n\x13\x44\x61taPayloadEncoding\x12!\n\x1d\x44\x41TA_PAYLOAD_ENCODING_UNKNOWN\x10\x00\x12#\n\x1f\x44\x41TA_PAYLOAD_ENCODING_UTF8_JSON\x10\x01\x12#\n\x1f\x44\x41TA_PAYLOAD_ENCODING_UTF8_TEXT\x10\x02\x12\'\n#DATA_PAYLOAD_ENCODING_BINARY_PICKLE\x10\x03*\x81\x01\n\x08GPUModel\x12\x15\n\x11GPU_MODEL_UNKNOWN\x10\x00\x12\x1e\n\x1aGPU_MODEL_NVIDIA_A100_40GB\x10\x01\x12\x1e\n\x1aGPU_MODEL_NVIDIA_A100_80GB\x10\x02\x12\x1e\n\x1aGPU_MODEL_NVIDIA_H100_80GB\x10\x03*\xca\x03\n\x16\x46unctionExecutorStatus\x12$\n FUNCTION_EXECUTOR_STATUS_UNKNOWN\x10\x00\x12(\n$FUNCTION_EXECUTOR_STATUS_STARTING_UP\x10\x01\x12:\n6FUNCTION_EXECUTOR_STATUS_STARTUP_FAILED_CUSTOMER_ERROR\x10\x02\x12:\n6FUNCTION_EXECUTOR_STATUS_STARTUP_FAILED_PLATFORM_ERROR\x10\x03\x12!\n\x1d\x46UNCTION_EXECUTOR_STATUS_IDLE\x10\x04\x12)\n%FUNCTION_EXECUTOR_STATUS_RUNNING_TASK\x10\x05\x12&\n"FUNCTION_EXECUTOR_STATUS_UNHEALTHY\x10\x06\x12%\n!FUNCTION_EXECUTOR_STATUS_STOPPING\x10\x07\x12$\n FUNCTION_EXECUTOR_STATUS_STOPPED\x10\x08\x12%\n!FUNCTION_EXECUTOR_STATUS_SHUTDOWN\x10\t*\xc3\x01\n\x0e\x45xecutorStatus\x12\x1b\n\x17\x45XECUTOR_STATUS_UNKNOWN\x10\x00\x12\x1f\n\x1b\x45XECUTOR_STATUS_STARTING_UP\x10\x01\x12\x1b\n\x17\x45XECUTOR_STATUS_RUNNING\x10\x02\x12\x1b\n\x17\x45XECUTOR_STATUS_DRAINED\x10\x03\x12\x1c\n\x18\x45XECUTOR_STATUS_STOPPING\x10\x04\x12\x1b\n\x17\x45XECUTOR_STATUS_STOPPED\x10\x05*d\n\x0e\x45xecutorFlavor\x12\x1b\n\x17\x45XECUTOR_FLAVOR_UNKNOWN\x10\x00\x12\x17\n\x13\x45XECUTOR_FLAVOR_OSS\x10\x01\x12\x1c\n\x18\x45XECUTOR_FLAVOR_PLATFORM\x10\x02*[\n\x0bTaskOutcome\x12\x18\n\x14TASK_OUTCOME_UNKNOWN\x10\x00\x12\x18\n\x14TASK_OUTCOME_SUCCESS\x10\x01\x12\x18\n\x14TASK_OUTCOME_FAILURE\x10\x02*\x7f\n\x0eOutputEncoding\x12\x1b\n\x17OUTPUT_ENCODING_UNKNOWN\x10\x00\x12\x18\n\x14OUTPUT_ENCODING_JSON\x10\x01\x12\x1a\n\x16OUTPUT_ENCODING_PICKLE\x10\x02\x12\x1a\n\x16OUTPUT_ENCODING_BINARY\x10\x03\x32\xef\x02\n\x0b\x45xecutorAPI\x12t\n\x15report_executor_state\x12+.executor_api_pb.ReportExecutorStateRequest\x1a,.executor_api_pb.ReportExecutorStateResponse"\x00\x12z\n\x1bget_desired_executor_states\x12\x30.executor_api_pb.GetDesiredExecutorStatesRequest\x1a%.executor_api_pb.DesiredExecutorState"\x00\x30\x01\x12n\n\x13report_task_outcome\x12).executor_api_pb.ReportTaskOutcomeRequest\x1a*.executor_api_pb.ReportTaskOutcomeResponse"\x00\x62\x06proto3'
23
+ )
24
+
25
+ _globals = globals()
26
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
27
+ _builder.BuildTopDescriptorsAndMessages(
28
+ DESCRIPTOR, "indexify.proto.executor_api_pb2", _globals
29
+ )
30
+ if not _descriptor._USE_C_DESCRIPTORS:
31
+ DESCRIPTOR._loaded_options = None
32
+ _globals["_EXECUTORSTATE_LABELSENTRY"]._loaded_options = None
33
+ _globals["_EXECUTORSTATE_LABELSENTRY"]._serialized_options = b"8\001"
34
+ _globals["_DATAPAYLOADENCODING"]._serialized_start = 4861
35
+ _globals["_DATAPAYLOADENCODING"]._serialized_end = 5032
36
+ _globals["_GPUMODEL"]._serialized_start = 5035
37
+ _globals["_GPUMODEL"]._serialized_end = 5164
38
+ _globals["_FUNCTIONEXECUTORSTATUS"]._serialized_start = 5167
39
+ _globals["_FUNCTIONEXECUTORSTATUS"]._serialized_end = 5625
40
+ _globals["_EXECUTORSTATUS"]._serialized_start = 5628
41
+ _globals["_EXECUTORSTATUS"]._serialized_end = 5823
42
+ _globals["_EXECUTORFLAVOR"]._serialized_start = 5825
43
+ _globals["_EXECUTORFLAVOR"]._serialized_end = 5925
44
+ _globals["_TASKOUTCOME"]._serialized_start = 5927
45
+ _globals["_TASKOUTCOME"]._serialized_end = 6018
46
+ _globals["_OUTPUTENCODING"]._serialized_start = 6020
47
+ _globals["_OUTPUTENCODING"]._serialized_end = 6147
48
+ _globals["_DATAPAYLOAD"]._serialized_start = 55
49
+ _globals["_DATAPAYLOAD"]._serialized_end = 318
50
+ _globals["_GPURESOURCES"]._serialized_start = 320
51
+ _globals["_GPURESOURCES"]._serialized_end = 427
52
+ _globals["_HOSTRESOURCES"]._serialized_start = 430
53
+ _globals["_HOSTRESOURCES"]._serialized_end = 624
54
+ _globals["_ALLOWEDFUNCTION"]._serialized_start = 627
55
+ _globals["_ALLOWEDFUNCTION"]._serialized_end = 814
56
+ _globals["_FUNCTIONEXECUTORRESOURCES"]._serialized_start = 817
57
+ _globals["_FUNCTIONEXECUTORRESOURCES"]._serialized_end = 1014
58
+ _globals["_FUNCTIONEXECUTORDESCRIPTION"]._serialized_start = 1017
59
+ _globals["_FUNCTIONEXECUTORDESCRIPTION"]._serialized_end = 1592
60
+ _globals["_FUNCTIONEXECUTORSTATE"]._serialized_start = 1595
61
+ _globals["_FUNCTIONEXECUTORSTATE"]._serialized_end = 1827
62
+ _globals["_EXECUTORSTATE"]._serialized_start = 1830
63
+ _globals["_EXECUTORSTATE"]._serialized_end = 2627
64
+ _globals["_EXECUTORSTATE_LABELSENTRY"]._serialized_start = 2427
65
+ _globals["_EXECUTORSTATE_LABELSENTRY"]._serialized_end = 2472
66
+ _globals["_REPORTEXECUTORSTATEREQUEST"]._serialized_start = 2629
67
+ _globals["_REPORTEXECUTORSTATEREQUEST"]._serialized_end = 2737
68
+ _globals["_REPORTEXECUTORSTATERESPONSE"]._serialized_start = 2739
69
+ _globals["_REPORTEXECUTORSTATERESPONSE"]._serialized_end = 2768
70
+ _globals["_TASKRETRYPOLICY"]._serialized_start = 2771
71
+ _globals["_TASKRETRYPOLICY"]._serialized_end = 2978
72
+ _globals["_TASK"]._serialized_start = 2981
73
+ _globals["_TASK"]._serialized_end = 3657
74
+ _globals["_TASKALLOCATION"]._serialized_start = 3659
75
+ _globals["_TASKALLOCATION"]._serialized_end = 3786
76
+ _globals["_GETDESIREDEXECUTORSTATESREQUEST"]._serialized_start = 3788
77
+ _globals["_GETDESIREDEXECUTORSTATESREQUEST"]._serialized_end = 3863
78
+ _globals["_DESIREDEXECUTORSTATE"]._serialized_start = 3866
79
+ _globals["_DESIREDEXECUTORSTATE"]._serialized_end = 4051
80
+ _globals["_REPORTTASKOUTCOMEREQUEST"]._serialized_start = 4054
81
+ _globals["_REPORTTASKOUTCOMEREQUEST"]._serialized_end = 4829
82
+ _globals["_REPORTTASKOUTCOMERESPONSE"]._serialized_start = 4831
83
+ _globals["_REPORTTASKOUTCOMERESPONSE"]._serialized_end = 4858
84
+ _globals["_EXECUTORAPI"]._serialized_start = 6150
85
+ _globals["_EXECUTORAPI"]._serialized_end = 6517
86
+ # @@protoc_insertion_point(module_scope)
@@ -21,6 +21,9 @@ class DataPayloadEncoding(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
21
21
  class GPUModel(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
22
22
  __slots__ = ()
23
23
  GPU_MODEL_UNKNOWN: _ClassVar[GPUModel]
24
+ GPU_MODEL_NVIDIA_A100_40GB: _ClassVar[GPUModel]
25
+ GPU_MODEL_NVIDIA_A100_80GB: _ClassVar[GPUModel]
26
+ GPU_MODEL_NVIDIA_H100_80GB: _ClassVar[GPUModel]
24
27
 
25
28
  class FunctionExecutorStatus(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
26
29
  __slots__ = ()
@@ -72,6 +75,9 @@ DATA_PAYLOAD_ENCODING_UTF8_JSON: DataPayloadEncoding
72
75
  DATA_PAYLOAD_ENCODING_UTF8_TEXT: DataPayloadEncoding
73
76
  DATA_PAYLOAD_ENCODING_BINARY_PICKLE: DataPayloadEncoding
74
77
  GPU_MODEL_UNKNOWN: GPUModel
78
+ GPU_MODEL_NVIDIA_A100_40GB: GPUModel
79
+ GPU_MODEL_NVIDIA_A100_80GB: GPUModel
80
+ GPU_MODEL_NVIDIA_H100_80GB: GPUModel
75
81
  FUNCTION_EXECUTOR_STATUS_UNKNOWN: FunctionExecutorStatus
76
82
  FUNCTION_EXECUTOR_STATUS_STARTING_UP: FunctionExecutorStatus
77
83
  FUNCTION_EXECUTOR_STATUS_STARTUP_FAILED_CUSTOMER_ERROR: FunctionExecutorStatus
@@ -124,18 +130,13 @@ class DataPayload(_message.Message):
124
130
  ) -> None: ...
125
131
 
126
132
  class GPUResources(_message.Message):
127
- __slots__ = ("count", "deprecated_model", "model")
133
+ __slots__ = ("count", "model")
128
134
  COUNT_FIELD_NUMBER: _ClassVar[int]
129
- DEPRECATED_MODEL_FIELD_NUMBER: _ClassVar[int]
130
135
  MODEL_FIELD_NUMBER: _ClassVar[int]
131
136
  count: int
132
- deprecated_model: GPUModel
133
- model: str
137
+ model: GPUModel
134
138
  def __init__(
135
- self,
136
- count: _Optional[int] = ...,
137
- deprecated_model: _Optional[_Union[GPUModel, str]] = ...,
138
- model: _Optional[str] = ...,
139
+ self, count: _Optional[int] = ..., model: _Optional[_Union[GPUModel, str]] = ...
139
140
  ) -> None: ...
140
141
 
141
142
  class HostResources(_message.Message):
@@ -1,86 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- # Generated by the protocol buffer compiler. DO NOT EDIT!
3
- # NO CHECKED-IN PROTOBUF GENCODE
4
- # source: indexify/proto/executor_api.proto
5
- # Protobuf Python Version: 5.29.0
6
- """Generated protocol buffer code."""
7
- from google.protobuf import descriptor as _descriptor
8
- from google.protobuf import descriptor_pool as _descriptor_pool
9
- from google.protobuf import runtime_version as _runtime_version
10
- from google.protobuf import symbol_database as _symbol_database
11
- from google.protobuf.internal import builder as _builder
12
-
13
- _runtime_version.ValidateProtobufRuntimeVersion(
14
- _runtime_version.Domain.PUBLIC, 5, 29, 0, "", "indexify/proto/executor_api.proto"
15
- )
16
- # @@protoc_insertion_point(imports)
17
-
18
- _sym_db = _symbol_database.Default()
19
-
20
-
21
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
22
- b'\n!indexify/proto/executor_api.proto\x12\x0f\x65xecutor_api_pb"\x87\x02\n\x0b\x44\x61taPayload\x12\x11\n\x04path\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04size\x18\x02 \x01(\x04H\x01\x88\x01\x01\x12\x18\n\x0bsha256_hash\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x10\n\x03uri\x18\x04 \x01(\tH\x03\x88\x01\x01\x12;\n\x08\x65ncoding\x18\x05 \x01(\x0e\x32$.executor_api_pb.DataPayloadEncodingH\x04\x88\x01\x01\x12\x1d\n\x10\x65ncoding_version\x18\x06 \x01(\x04H\x05\x88\x01\x01\x42\x07\n\x05_pathB\x07\n\x05_sizeB\x0e\n\x0c_sha256_hashB\x06\n\x04_uriB\x0b\n\t_encodingB\x13\n\x11_encoding_version"\x99\x01\n\x0cGPUResources\x12\x12\n\x05\x63ount\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x38\n\x10\x64\x65precated_model\x18\x02 \x01(\x0e\x32\x19.executor_api_pb.GPUModelH\x01\x88\x01\x01\x12\x12\n\x05model\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x08\n\x06_countB\x13\n\x11_deprecated_modelB\x08\n\x06_model"\xc2\x01\n\rHostResources\x12\x16\n\tcpu_count\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x19\n\x0cmemory_bytes\x18\x02 \x01(\x04H\x01\x88\x01\x01\x12\x17\n\ndisk_bytes\x18\x03 \x01(\x04H\x02\x88\x01\x01\x12/\n\x03gpu\x18\x04 \x01(\x0b\x32\x1d.executor_api_pb.GPUResourcesH\x03\x88\x01\x01\x42\x0c\n\n_cpu_countB\x0f\n\r_memory_bytesB\r\n\x0b_disk_bytesB\x06\n\x04_gpu"\xbb\x01\n\x0f\x41llowedFunction\x12\x16\n\tnamespace\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\ngraph_name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rgraph_version\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_function_nameB\x10\n\x0e_graph_version"\xc5\x01\n\x19\x46unctionExecutorResources\x12\x1b\n\x0e\x63pu_ms_per_sec\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x19\n\x0cmemory_bytes\x18\x02 \x01(\x04H\x01\x88\x01\x01\x12\x17\n\ndisk_bytes\x18\x03 \x01(\x04H\x02\x88\x01\x01\x12\x16\n\tgpu_count\x18\x04 \x01(\rH\x03\x88\x01\x01\x42\x11\n\x0f_cpu_ms_per_secB\x0f\n\r_memory_bytesB\r\n\x0b_disk_bytesB\x0c\n\n_gpu_count"\xbf\x04\n\x1b\x46unctionExecutorDescription\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tnamespace\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\ngraph_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rgraph_version\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x16\n\timage_uri\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x14\n\x0csecret_names\x18\x07 \x03(\t\x12<\n\x0fresource_limits\x18\x08 \x01(\x0b\x32\x1e.executor_api_pb.HostResourcesH\x06\x88\x01\x01\x12%\n\x18\x63ustomer_code_timeout_ms\x18\t \x01(\rH\x07\x88\x01\x01\x12\x30\n\x05graph\x18\n \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\x08\x88\x01\x01\x12\x42\n\tresources\x18\x0b \x01(\x0b\x32*.executor_api_pb.FunctionExecutorResourcesH\t\x88\x01\x01\x42\x05\n\x03_idB\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_graph_versionB\x10\n\x0e_function_nameB\x0c\n\n_image_uriB\x12\n\x10_resource_limitsB\x1b\n\x19_customer_code_timeout_msB\x08\n\x06_graphB\x0c\n\n_resources"\xe8\x01\n\x15\x46unctionExecutorState\x12\x46\n\x0b\x64\x65scription\x18\x01 \x01(\x0b\x32,.executor_api_pb.FunctionExecutorDescriptionH\x00\x88\x01\x01\x12<\n\x06status\x18\x02 \x01(\x0e\x32\'.executor_api_pb.FunctionExecutorStatusH\x01\x88\x01\x01\x12\x1b\n\x0estatus_message\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x0e\n\x0c_descriptionB\t\n\x07_statusB\x11\n\x0f_status_message"\x9d\x06\n\rExecutorState\x12\x18\n\x0b\x65xecutor_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1d\n\x10\x64\x65velopment_mode\x18\x02 \x01(\x08H\x01\x88\x01\x01\x12\x15\n\x08hostname\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x34\n\x06\x66lavor\x18\x04 \x01(\x0e\x32\x1f.executor_api_pb.ExecutorFlavorH\x03\x88\x01\x01\x12\x14\n\x07version\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x34\n\x06status\x18\x06 \x01(\x0e\x32\x1f.executor_api_pb.ExecutorStatusH\x05\x88\x01\x01\x12<\n\x0ftotal_resources\x18\r \x01(\x0b\x32\x1e.executor_api_pb.HostResourcesH\x06\x88\x01\x01\x12;\n\x0e\x66ree_resources\x18\x07 \x01(\x0b\x32\x1e.executor_api_pb.HostResourcesH\x07\x88\x01\x01\x12;\n\x11\x61llowed_functions\x18\x08 \x03(\x0b\x32 .executor_api_pb.AllowedFunction\x12H\n\x18\x66unction_executor_states\x18\t \x03(\x0b\x32&.executor_api_pb.FunctionExecutorState\x12:\n\x06labels\x18\n \x03(\x0b\x32*.executor_api_pb.ExecutorState.LabelsEntry\x12\x17\n\nstate_hash\x18\x0b \x01(\tH\x08\x88\x01\x01\x12\x19\n\x0cserver_clock\x18\x0c \x01(\x04H\t\x88\x01\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0e\n\x0c_executor_idB\x13\n\x11_development_modeB\x0b\n\t_hostnameB\t\n\x07_flavorB\n\n\x08_versionB\t\n\x07_statusB\x12\n\x10_total_resourcesB\x11\n\x0f_free_resourcesB\r\n\x0b_state_hashB\x0f\n\r_server_clock"l\n\x1aReportExecutorStateRequest\x12;\n\x0e\x65xecutor_state\x18\x01 \x01(\x0b\x32\x1e.executor_api_pb.ExecutorStateH\x00\x88\x01\x01\x42\x11\n\x0f_executor_state"\x1d\n\x1bReportExecutorStateResponse"\xcf\x01\n\x0fTaskRetryPolicy\x12\x18\n\x0bmax_retries\x18\x01 \x01(\rH\x00\x88\x01\x01\x12\x1d\n\x10initial_delay_ms\x18\x02 \x01(\rH\x01\x88\x01\x01\x12\x19\n\x0cmax_delay_ms\x18\x03 \x01(\rH\x02\x88\x01\x01\x12\x1d\n\x10\x64\x65lay_multiplier\x18\x04 \x01(\rH\x03\x88\x01\x01\x42\x0e\n\x0c_max_retriesB\x13\n\x11_initial_delay_msB\x0f\n\r_max_delay_msB\x13\n\x11_delay_multiplier"\xa4\x05\n\x04Task\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tnamespace\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\ngraph_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rgraph_version\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x05 \x01(\tH\x04\x88\x01\x01\x12 \n\x13graph_invocation_id\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x16\n\tinput_key\x18\x08 \x01(\tH\x06\x88\x01\x01\x12\x1f\n\x12reducer_output_key\x18\t \x01(\tH\x07\x88\x01\x01\x12\x17\n\ntimeout_ms\x18\n \x01(\rH\x08\x88\x01\x01\x12\x30\n\x05input\x18\x0b \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\t\x88\x01\x01\x12\x38\n\rreducer_input\x18\x0c \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\n\x88\x01\x01\x12&\n\x19output_payload_uri_prefix\x18\r \x01(\tH\x0b\x88\x01\x01\x12;\n\x0cretry_policy\x18\x0e \x01(\x0b\x32 .executor_api_pb.TaskRetryPolicyH\x0c\x88\x01\x01\x42\x05\n\x03_idB\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_graph_versionB\x10\n\x0e_function_nameB\x16\n\x14_graph_invocation_idB\x0c\n\n_input_keyB\x15\n\x13_reducer_output_keyB\r\n\x0b_timeout_msB\x08\n\x06_inputB\x10\n\x0e_reducer_inputB\x1c\n\x1a_output_payload_uri_prefixB\x0f\n\r_retry_policy"\x7f\n\x0eTaskAllocation\x12!\n\x14\x66unction_executor_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12(\n\x04task\x18\x02 \x01(\x0b\x32\x15.executor_api_pb.TaskH\x01\x88\x01\x01\x42\x17\n\x15_function_executor_idB\x07\n\x05_task"K\n\x1fGetDesiredExecutorStatesRequest\x12\x18\n\x0b\x65xecutor_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_executor_id"\xb9\x01\n\x14\x44\x65siredExecutorState\x12H\n\x12\x66unction_executors\x18\x01 \x03(\x0b\x32,.executor_api_pb.FunctionExecutorDescription\x12\x39\n\x10task_allocations\x18\x02 \x03(\x0b\x32\x1f.executor_api_pb.TaskAllocation\x12\x12\n\x05\x63lock\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x08\n\x06_clock"\x87\x06\n\x18ReportTaskOutcomeRequest\x12\x14\n\x07task_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tnamespace\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\ngraph_name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\rfunction_name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12 \n\x13graph_invocation_id\x18\x06 \x01(\tH\x04\x88\x01\x01\x12\x32\n\x07outcome\x18\x07 \x01(\x0e\x32\x1c.executor_api_pb.TaskOutcomeH\x05\x88\x01\x01\x12\x1a\n\rinvocation_id\x18\x08 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0b\x65xecutor_id\x18\t \x01(\tH\x07\x88\x01\x01\x12\x14\n\x07reducer\x18\n \x01(\x08H\x08\x88\x01\x01\x12\x16\n\x0enext_functions\x18\x0b \x03(\t\x12\x30\n\nfn_outputs\x18\x0c \x03(\x0b\x32\x1c.executor_api_pb.DataPayload\x12\x31\n\x06stdout\x18\x0e \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\t\x88\x01\x01\x12\x31\n\x06stderr\x18\x0f \x01(\x0b\x32\x1c.executor_api_pb.DataPayloadH\n\x88\x01\x01\x12=\n\x0foutput_encoding\x18\r \x01(\x0e\x32\x1f.executor_api_pb.OutputEncodingH\x0b\x88\x01\x01\x12$\n\x17output_encoding_version\x18\x05 \x01(\x04H\x0c\x88\x01\x01\x42\n\n\x08_task_idB\x0c\n\n_namespaceB\r\n\x0b_graph_nameB\x10\n\x0e_function_nameB\x16\n\x14_graph_invocation_idB\n\n\x08_outcomeB\x10\n\x0e_invocation_idB\x0e\n\x0c_executor_idB\n\n\x08_reducerB\t\n\x07_stdoutB\t\n\x07_stderrB\x12\n\x10_output_encodingB\x1a\n\x18_output_encoding_version"\x1b\n\x19ReportTaskOutcomeResponse*\xab\x01\n\x13\x44\x61taPayloadEncoding\x12!\n\x1d\x44\x41TA_PAYLOAD_ENCODING_UNKNOWN\x10\x00\x12#\n\x1f\x44\x41TA_PAYLOAD_ENCODING_UTF8_JSON\x10\x01\x12#\n\x1f\x44\x41TA_PAYLOAD_ENCODING_UTF8_TEXT\x10\x02\x12\'\n#DATA_PAYLOAD_ENCODING_BINARY_PICKLE\x10\x03*!\n\x08GPUModel\x12\x15\n\x11GPU_MODEL_UNKNOWN\x10\x00*\xca\x03\n\x16\x46unctionExecutorStatus\x12$\n FUNCTION_EXECUTOR_STATUS_UNKNOWN\x10\x00\x12(\n$FUNCTION_EXECUTOR_STATUS_STARTING_UP\x10\x01\x12:\n6FUNCTION_EXECUTOR_STATUS_STARTUP_FAILED_CUSTOMER_ERROR\x10\x02\x12:\n6FUNCTION_EXECUTOR_STATUS_STARTUP_FAILED_PLATFORM_ERROR\x10\x03\x12!\n\x1d\x46UNCTION_EXECUTOR_STATUS_IDLE\x10\x04\x12)\n%FUNCTION_EXECUTOR_STATUS_RUNNING_TASK\x10\x05\x12&\n"FUNCTION_EXECUTOR_STATUS_UNHEALTHY\x10\x06\x12%\n!FUNCTION_EXECUTOR_STATUS_STOPPING\x10\x07\x12$\n FUNCTION_EXECUTOR_STATUS_STOPPED\x10\x08\x12%\n!FUNCTION_EXECUTOR_STATUS_SHUTDOWN\x10\t*\xc3\x01\n\x0e\x45xecutorStatus\x12\x1b\n\x17\x45XECUTOR_STATUS_UNKNOWN\x10\x00\x12\x1f\n\x1b\x45XECUTOR_STATUS_STARTING_UP\x10\x01\x12\x1b\n\x17\x45XECUTOR_STATUS_RUNNING\x10\x02\x12\x1b\n\x17\x45XECUTOR_STATUS_DRAINED\x10\x03\x12\x1c\n\x18\x45XECUTOR_STATUS_STOPPING\x10\x04\x12\x1b\n\x17\x45XECUTOR_STATUS_STOPPED\x10\x05*d\n\x0e\x45xecutorFlavor\x12\x1b\n\x17\x45XECUTOR_FLAVOR_UNKNOWN\x10\x00\x12\x17\n\x13\x45XECUTOR_FLAVOR_OSS\x10\x01\x12\x1c\n\x18\x45XECUTOR_FLAVOR_PLATFORM\x10\x02*[\n\x0bTaskOutcome\x12\x18\n\x14TASK_OUTCOME_UNKNOWN\x10\x00\x12\x18\n\x14TASK_OUTCOME_SUCCESS\x10\x01\x12\x18\n\x14TASK_OUTCOME_FAILURE\x10\x02*\x7f\n\x0eOutputEncoding\x12\x1b\n\x17OUTPUT_ENCODING_UNKNOWN\x10\x00\x12\x18\n\x14OUTPUT_ENCODING_JSON\x10\x01\x12\x1a\n\x16OUTPUT_ENCODING_PICKLE\x10\x02\x12\x1a\n\x16OUTPUT_ENCODING_BINARY\x10\x03\x32\xef\x02\n\x0b\x45xecutorAPI\x12t\n\x15report_executor_state\x12+.executor_api_pb.ReportExecutorStateRequest\x1a,.executor_api_pb.ReportExecutorStateResponse"\x00\x12z\n\x1bget_desired_executor_states\x12\x30.executor_api_pb.GetDesiredExecutorStatesRequest\x1a%.executor_api_pb.DesiredExecutorState"\x00\x30\x01\x12n\n\x13report_task_outcome\x12).executor_api_pb.ReportTaskOutcomeRequest\x1a*.executor_api_pb.ReportTaskOutcomeResponse"\x00\x62\x06proto3'
23
- )
24
-
25
- _globals = globals()
26
- _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
27
- _builder.BuildTopDescriptorsAndMessages(
28
- DESCRIPTOR, "indexify.proto.executor_api_pb2", _globals
29
- )
30
- if not _descriptor._USE_C_DESCRIPTORS:
31
- DESCRIPTOR._loaded_options = None
32
- _globals["_EXECUTORSTATE_LABELSENTRY"]._loaded_options = None
33
- _globals["_EXECUTORSTATE_LABELSENTRY"]._serialized_options = b"8\001"
34
- _globals["_DATAPAYLOADENCODING"]._serialized_start = 4908
35
- _globals["_DATAPAYLOADENCODING"]._serialized_end = 5079
36
- _globals["_GPUMODEL"]._serialized_start = 5081
37
- _globals["_GPUMODEL"]._serialized_end = 5114
38
- _globals["_FUNCTIONEXECUTORSTATUS"]._serialized_start = 5117
39
- _globals["_FUNCTIONEXECUTORSTATUS"]._serialized_end = 5575
40
- _globals["_EXECUTORSTATUS"]._serialized_start = 5578
41
- _globals["_EXECUTORSTATUS"]._serialized_end = 5773
42
- _globals["_EXECUTORFLAVOR"]._serialized_start = 5775
43
- _globals["_EXECUTORFLAVOR"]._serialized_end = 5875
44
- _globals["_TASKOUTCOME"]._serialized_start = 5877
45
- _globals["_TASKOUTCOME"]._serialized_end = 5968
46
- _globals["_OUTPUTENCODING"]._serialized_start = 5970
47
- _globals["_OUTPUTENCODING"]._serialized_end = 6097
48
- _globals["_DATAPAYLOAD"]._serialized_start = 55
49
- _globals["_DATAPAYLOAD"]._serialized_end = 318
50
- _globals["_GPURESOURCES"]._serialized_start = 321
51
- _globals["_GPURESOURCES"]._serialized_end = 474
52
- _globals["_HOSTRESOURCES"]._serialized_start = 477
53
- _globals["_HOSTRESOURCES"]._serialized_end = 671
54
- _globals["_ALLOWEDFUNCTION"]._serialized_start = 674
55
- _globals["_ALLOWEDFUNCTION"]._serialized_end = 861
56
- _globals["_FUNCTIONEXECUTORRESOURCES"]._serialized_start = 864
57
- _globals["_FUNCTIONEXECUTORRESOURCES"]._serialized_end = 1061
58
- _globals["_FUNCTIONEXECUTORDESCRIPTION"]._serialized_start = 1064
59
- _globals["_FUNCTIONEXECUTORDESCRIPTION"]._serialized_end = 1639
60
- _globals["_FUNCTIONEXECUTORSTATE"]._serialized_start = 1642
61
- _globals["_FUNCTIONEXECUTORSTATE"]._serialized_end = 1874
62
- _globals["_EXECUTORSTATE"]._serialized_start = 1877
63
- _globals["_EXECUTORSTATE"]._serialized_end = 2674
64
- _globals["_EXECUTORSTATE_LABELSENTRY"]._serialized_start = 2474
65
- _globals["_EXECUTORSTATE_LABELSENTRY"]._serialized_end = 2519
66
- _globals["_REPORTEXECUTORSTATEREQUEST"]._serialized_start = 2676
67
- _globals["_REPORTEXECUTORSTATEREQUEST"]._serialized_end = 2784
68
- _globals["_REPORTEXECUTORSTATERESPONSE"]._serialized_start = 2786
69
- _globals["_REPORTEXECUTORSTATERESPONSE"]._serialized_end = 2815
70
- _globals["_TASKRETRYPOLICY"]._serialized_start = 2818
71
- _globals["_TASKRETRYPOLICY"]._serialized_end = 3025
72
- _globals["_TASK"]._serialized_start = 3028
73
- _globals["_TASK"]._serialized_end = 3704
74
- _globals["_TASKALLOCATION"]._serialized_start = 3706
75
- _globals["_TASKALLOCATION"]._serialized_end = 3833
76
- _globals["_GETDESIREDEXECUTORSTATESREQUEST"]._serialized_start = 3835
77
- _globals["_GETDESIREDEXECUTORSTATESREQUEST"]._serialized_end = 3910
78
- _globals["_DESIREDEXECUTORSTATE"]._serialized_start = 3913
79
- _globals["_DESIREDEXECUTORSTATE"]._serialized_end = 4098
80
- _globals["_REPORTTASKOUTCOMEREQUEST"]._serialized_start = 4101
81
- _globals["_REPORTTASKOUTCOMEREQUEST"]._serialized_end = 4876
82
- _globals["_REPORTTASKOUTCOMERESPONSE"]._serialized_start = 4878
83
- _globals["_REPORTTASKOUTCOMERESPONSE"]._serialized_end = 4905
84
- _globals["_EXECUTORAPI"]._serialized_start = 6100
85
- _globals["_EXECUTORAPI"]._serialized_end = 6467
86
- # @@protoc_insertion_point(module_scope)
File without changes