flwr-nightly 1.7.0.dev20231228__py3-none-any.whl → 1.7.0.dev20240104__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
flwr/client/app.py CHANGED
@@ -349,7 +349,7 @@ def _start_client_internal(
349
349
  break
350
350
 
351
351
  # Register state
352
- node_state.register_workloadstate(workload_id=task_ins.workload_id)
352
+ node_state.register_workloadstate(run_id=task_ins.run_id)
353
353
 
354
354
  # Load app
355
355
  app: Flower = load_flower_callable_fn()
@@ -357,15 +357,13 @@ def _start_client_internal(
357
357
  # Handle task message
358
358
  fwd_msg: Fwd = Fwd(
359
359
  task_ins=task_ins,
360
- state=node_state.retrieve_workloadstate(
361
- workload_id=task_ins.workload_id
362
- ),
360
+ state=node_state.retrieve_workloadstate(run_id=task_ins.run_id),
363
361
  )
364
362
  bwd_msg: Bwd = app(fwd=fwd_msg)
365
363
 
366
364
  # Update node state
367
365
  node_state.update_workloadstate(
368
- workload_id=bwd_msg.task_res.workload_id,
366
+ run_id=bwd_msg.task_res.run_id,
369
367
  workload_state=bwd_msg.state,
370
368
  )
371
369
 
@@ -119,7 +119,7 @@ def grpc_connection(
119
119
  return TaskIns(
120
120
  task_id=str(uuid.uuid4()),
121
121
  group_id="",
122
- workload_id=0,
122
+ run_id=0,
123
123
  task=Task(
124
124
  producer=Node(node_id=0, anonymous=True),
125
125
  consumer=Node(node_id=0, anonymous=True),
@@ -112,7 +112,7 @@ def handle(
112
112
  task_res = TaskRes(
113
113
  task_id="",
114
114
  group_id="",
115
- workload_id=0,
115
+ run_id=0,
116
116
  task=Task(
117
117
  ancestry=[],
118
118
  sa=SecureAggregation(named_values=serde.named_values_to_proto(res)),
@@ -70,7 +70,7 @@ def validate_task_res(task_res: TaskRes) -> bool:
70
70
  Returns
71
71
  -------
72
72
  is_valid: bool
73
- True if the `task_id`, `group_id`, and `workload_id` fields in TaskRes
73
+ True if the `task_id`, `group_id`, and `run_id` fields in TaskRes
74
74
  and the `producer`, `consumer`, and `ancestry` fields in its sub-message Task
75
75
  are not initialized accidentally elsewhere,
76
76
  False otherwise.
@@ -84,7 +84,7 @@ def validate_task_res(task_res: TaskRes) -> bool:
84
84
  if (
85
85
  "task_id" in initialized_fields_in_task_res
86
86
  or "group_id" in initialized_fields_in_task_res
87
- or "workload_id" in initialized_fields_in_task_res
87
+ or "run_id" in initialized_fields_in_task_res
88
88
  or "producer" in initialized_fields_in_task
89
89
  or "consumer" in initialized_fields_in_task
90
90
  or "ancestry" in initialized_fields_in_task
@@ -129,7 +129,7 @@ def wrap_client_message_in_task_res(client_message: ClientMessage) -> TaskRes:
129
129
  return TaskRes(
130
130
  task_id="",
131
131
  group_id="",
132
- workload_id=0,
132
+ run_id=0,
133
133
  task=Task(ancestry=[], legacy_client_message=client_message),
134
134
  )
135
135
 
@@ -139,7 +139,7 @@ def configure_task_res(
139
139
  ) -> TaskRes:
140
140
  """Set the metadata of a TaskRes.
141
141
 
142
- Fill `group_id` and `workload_id` in TaskRes
142
+ Fill `group_id` and `run_id` in TaskRes
143
143
  and `producer`, `consumer`, and `ancestry` in Task in TaskRes.
144
144
 
145
145
  `producer` in Task in TaskRes will remain unchanged/unset.
@@ -152,7 +152,7 @@ def configure_task_res(
152
152
  task_res = TaskRes(
153
153
  task_id="", # This will be generated by the server
154
154
  group_id=ref_task_ins.group_id,
155
- workload_id=ref_task_ins.workload_id,
155
+ run_id=ref_task_ins.run_id,
156
156
  task=task_res.task,
157
157
  )
158
158
  # pylint: disable-next=no-member
flwr/client/node_state.py CHANGED
@@ -27,24 +27,22 @@ class NodeState:
27
27
  self._meta: Dict[str, Any] = {} # holds metadata about the node
28
28
  self.workload_states: Dict[int, WorkloadState] = {}
29
29
 
30
- def register_workloadstate(self, workload_id: int) -> None:
30
+ def register_workloadstate(self, run_id: int) -> None:
31
31
  """Register new workload state for this node."""
32
- if workload_id not in self.workload_states:
33
- self.workload_states[workload_id] = WorkloadState({})
32
+ if run_id not in self.workload_states:
33
+ self.workload_states[run_id] = WorkloadState({})
34
34
 
35
- def retrieve_workloadstate(self, workload_id: int) -> WorkloadState:
36
- """Get workload state given a workload_id."""
37
- if workload_id in self.workload_states:
38
- return self.workload_states[workload_id]
35
+ def retrieve_workloadstate(self, run_id: int) -> WorkloadState:
36
+ """Get workload state given a run_id."""
37
+ if run_id in self.workload_states:
38
+ return self.workload_states[run_id]
39
39
 
40
40
  raise RuntimeError(
41
- f"WorkloadState for workload_id={workload_id} doesn't exist."
41
+ f"WorkloadState for run_id={run_id} doesn't exist."
42
42
  " A workload must be registered before it can be retrieved or updated "
43
43
  " by a client."
44
44
  )
45
45
 
46
- def update_workloadstate(
47
- self, workload_id: int, workload_state: WorkloadState
48
- ) -> None:
46
+ def update_workloadstate(self, run_id: int, workload_state: WorkloadState) -> None:
49
47
  """Update workload state."""
50
- self.workload_states[workload_id] = workload_state
48
+ self.workload_states[run_id] = workload_state
@@ -32,7 +32,7 @@ def _run_dummy_task(state: WorkloadState) -> WorkloadState:
32
32
  def test_multiworkload_in_node_state() -> None:
33
33
  """Test basic NodeState logic."""
34
34
  # Tasks to perform
35
- tasks = [TaskIns(workload_id=w_id) for w_id in [0, 1, 1, 2, 3, 2, 1, 5]]
35
+ tasks = [TaskIns(run_id=r_id) for r_id in [0, 1, 1, 2, 3, 2, 1, 5]]
36
36
  # the "tasks" is to count how many times each workload is executed
37
37
  expected_values = {0: "1", 1: "1" * 3, 2: "1" * 2, 3: "1", 5: "1"}
38
38
 
@@ -40,20 +40,20 @@ def test_multiworkload_in_node_state() -> None:
40
40
  node_state = NodeState()
41
41
 
42
42
  for task in tasks:
43
- w_id = task.workload_id
43
+ r_id = task.run_id
44
44
 
45
45
  # Register
46
- node_state.register_workloadstate(workload_id=w_id)
46
+ node_state.register_workloadstate(run_id=r_id)
47
47
 
48
48
  # Get workload state
49
- state = node_state.retrieve_workloadstate(workload_id=w_id)
49
+ state = node_state.retrieve_workloadstate(run_id=r_id)
50
50
 
51
51
  # Run "task"
52
52
  updated_state = _run_dummy_task(state)
53
53
 
54
54
  # Update workload state
55
- node_state.update_workloadstate(workload_id=w_id, workload_state=updated_state)
55
+ node_state.update_workloadstate(run_id=r_id, workload_state=updated_state)
56
56
 
57
57
  # Verify values
58
- for w_id, state in node_state.workload_states.items():
59
- assert state.state["counter"] == expected_values[w_id]
58
+ for r_id, state in node_state.workload_states.items():
59
+ assert state.state["counter"] == expected_values[r_id]
flwr/driver/app.py CHANGED
@@ -170,8 +170,8 @@ def update_client_manager(
170
170
  and dead nodes will be removed from the ClientManager via
171
171
  `client_manager.unregister()`.
172
172
  """
173
- # Request for workload_id
174
- workload_id = driver.create_workload(driver_pb2.CreateWorkloadRequest()).workload_id
173
+ # Request for run_id
174
+ run_id = driver.create_workload(driver_pb2.CreateWorkloadRequest()).run_id
175
175
 
176
176
  # Loop until the driver is disconnected
177
177
  registered_nodes: Dict[int, DriverClientProxy] = {}
@@ -181,7 +181,7 @@ def update_client_manager(
181
181
  if driver.stub is None:
182
182
  break
183
183
  get_nodes_res = driver.get_nodes(
184
- req=driver_pb2.GetNodesRequest(workload_id=workload_id)
184
+ req=driver_pb2.GetNodesRequest(run_id=run_id)
185
185
  )
186
186
  all_node_ids = {node.node_id for node in get_nodes_res.nodes}
187
187
  dead_nodes = set(registered_nodes).difference(all_node_ids)
@@ -199,7 +199,7 @@ def update_client_manager(
199
199
  node_id=node_id,
200
200
  driver=driver,
201
201
  anonymous=False,
202
- workload_id=workload_id,
202
+ run_id=run_id,
203
203
  )
204
204
  if client_manager.register(client_proxy):
205
205
  registered_nodes[node_id] = client_proxy
flwr/driver/driver.py CHANGED
@@ -54,37 +54,37 @@ class Driver:
54
54
  self.addr = driver_service_address
55
55
  self.certificates = certificates
56
56
  self.grpc_driver: Optional[GrpcDriver] = None
57
- self.workload_id: Optional[int] = None
57
+ self.run_id: Optional[int] = None
58
58
  self.node = Node(node_id=0, anonymous=True)
59
59
 
60
- def _get_grpc_driver_and_workload_id(self) -> Tuple[GrpcDriver, int]:
60
+ def _get_grpc_driver_and_run_id(self) -> Tuple[GrpcDriver, int]:
61
61
  # Check if the GrpcDriver is initialized
62
- if self.grpc_driver is None or self.workload_id is None:
62
+ if self.grpc_driver is None or self.run_id is None:
63
63
  # Connect and create workload
64
64
  self.grpc_driver = GrpcDriver(
65
65
  driver_service_address=self.addr, certificates=self.certificates
66
66
  )
67
67
  self.grpc_driver.connect()
68
68
  res = self.grpc_driver.create_workload(CreateWorkloadRequest())
69
- self.workload_id = res.workload_id
69
+ self.run_id = res.run_id
70
70
 
71
- return self.grpc_driver, self.workload_id
71
+ return self.grpc_driver, self.run_id
72
72
 
73
73
  def get_nodes(self) -> List[Node]:
74
74
  """Get node IDs."""
75
- grpc_driver, workload_id = self._get_grpc_driver_and_workload_id()
75
+ grpc_driver, run_id = self._get_grpc_driver_and_run_id()
76
76
 
77
77
  # Call GrpcDriver method
78
- res = grpc_driver.get_nodes(GetNodesRequest(workload_id=workload_id))
78
+ res = grpc_driver.get_nodes(GetNodesRequest(run_id=run_id))
79
79
  return list(res.nodes)
80
80
 
81
81
  def push_task_ins(self, task_ins_list: List[TaskIns]) -> List[str]:
82
82
  """Schedule tasks."""
83
- grpc_driver, workload_id = self._get_grpc_driver_and_workload_id()
83
+ grpc_driver, run_id = self._get_grpc_driver_and_run_id()
84
84
 
85
- # Set workload_id
85
+ # Set run_id
86
86
  for task_ins in task_ins_list:
87
- task_ins.workload_id = workload_id
87
+ task_ins.run_id = run_id
88
88
 
89
89
  # Call GrpcDriver method
90
90
  res = grpc_driver.push_task_ins(PushTaskInsRequest(task_ins_list=task_ins_list))
@@ -92,7 +92,7 @@ class Driver:
92
92
 
93
93
  def pull_task_res(self, task_ids: Iterable[str]) -> List[TaskRes]:
94
94
  """Get task results."""
95
- grpc_driver, _ = self._get_grpc_driver_and_workload_id()
95
+ grpc_driver, _ = self._get_grpc_driver_and_run_id()
96
96
 
97
97
  # Call GrpcDriver method
98
98
  res = grpc_driver.pull_task_res(
@@ -31,13 +31,11 @@ SLEEP_TIME = 1
31
31
  class DriverClientProxy(ClientProxy):
32
32
  """Flower client proxy which delegates work using the Driver API."""
33
33
 
34
- def __init__(
35
- self, node_id: int, driver: GrpcDriver, anonymous: bool, workload_id: int
36
- ):
34
+ def __init__(self, node_id: int, driver: GrpcDriver, anonymous: bool, run_id: int):
37
35
  super().__init__(str(node_id))
38
36
  self.node_id = node_id
39
37
  self.driver = driver
40
- self.workload_id = workload_id
38
+ self.run_id = run_id
41
39
  self.anonymous = anonymous
42
40
 
43
41
  def get_properties(
@@ -106,7 +104,7 @@ class DriverClientProxy(ClientProxy):
106
104
  task_ins = task_pb2.TaskIns(
107
105
  task_id="",
108
106
  group_id="",
109
- workload_id=self.workload_id,
107
+ run_id=self.run_id,
110
108
  task=task_pb2.Task(
111
109
  producer=node_pb2.Node(
112
110
  node_id=0,
flwr/proto/driver_pb2.py CHANGED
@@ -16,7 +16,7 @@ from flwr.proto import node_pb2 as flwr_dot_proto_dot_node__pb2
16
16
  from flwr.proto import task_pb2 as flwr_dot_proto_dot_task__pb2
17
17
 
18
18
 
19
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66lwr/proto/driver.proto\x12\nflwr.proto\x1a\x15\x66lwr/proto/node.proto\x1a\x15\x66lwr/proto/task.proto\"\x17\n\x15\x43reateWorkloadRequest\"-\n\x16\x43reateWorkloadResponse\x12\x13\n\x0bworkload_id\x18\x01 \x01(\x12\"&\n\x0fGetNodesRequest\x12\x13\n\x0bworkload_id\x18\x01 \x01(\x12\"3\n\x10GetNodesResponse\x12\x1f\n\x05nodes\x18\x01 \x03(\x0b\x32\x10.flwr.proto.Node\"@\n\x12PushTaskInsRequest\x12*\n\rtask_ins_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskIns\"\'\n\x13PushTaskInsResponse\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"F\n\x12PullTaskResRequest\x12\x1e\n\x04node\x18\x01 \x01(\x0b\x32\x10.flwr.proto.Node\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"A\n\x13PullTaskResResponse\x12*\n\rtask_res_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskRes2\xd0\x02\n\x06\x44river\x12Y\n\x0e\x43reateWorkload\x12!.flwr.proto.CreateWorkloadRequest\x1a\".flwr.proto.CreateWorkloadResponse\"\x00\x12G\n\x08GetNodes\x12\x1b.flwr.proto.GetNodesRequest\x1a\x1c.flwr.proto.GetNodesResponse\"\x00\x12P\n\x0bPushTaskIns\x12\x1e.flwr.proto.PushTaskInsRequest\x1a\x1f.flwr.proto.PushTaskInsResponse\"\x00\x12P\n\x0bPullTaskRes\x12\x1e.flwr.proto.PullTaskResRequest\x1a\x1f.flwr.proto.PullTaskResResponse\"\x00\x62\x06proto3')
19
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66lwr/proto/driver.proto\x12\nflwr.proto\x1a\x15\x66lwr/proto/node.proto\x1a\x15\x66lwr/proto/task.proto\"\x17\n\x15\x43reateWorkloadRequest\"(\n\x16\x43reateWorkloadResponse\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"!\n\x0fGetNodesRequest\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"3\n\x10GetNodesResponse\x12\x1f\n\x05nodes\x18\x01 \x03(\x0b\x32\x10.flwr.proto.Node\"@\n\x12PushTaskInsRequest\x12*\n\rtask_ins_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskIns\"\'\n\x13PushTaskInsResponse\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"F\n\x12PullTaskResRequest\x12\x1e\n\x04node\x18\x01 \x01(\x0b\x32\x10.flwr.proto.Node\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"A\n\x13PullTaskResResponse\x12*\n\rtask_res_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskRes2\xd0\x02\n\x06\x44river\x12Y\n\x0e\x43reateWorkload\x12!.flwr.proto.CreateWorkloadRequest\x1a\".flwr.proto.CreateWorkloadResponse\"\x00\x12G\n\x08GetNodes\x12\x1b.flwr.proto.GetNodesRequest\x1a\x1c.flwr.proto.GetNodesResponse\"\x00\x12P\n\x0bPushTaskIns\x12\x1e.flwr.proto.PushTaskInsRequest\x1a\x1f.flwr.proto.PushTaskInsResponse\"\x00\x12P\n\x0bPullTaskRes\x12\x1e.flwr.proto.PullTaskResRequest\x1a\x1f.flwr.proto.PullTaskResResponse\"\x00\x62\x06proto3')
20
20
 
21
21
 
22
22
 
@@ -91,19 +91,19 @@ if _descriptor._USE_C_DESCRIPTORS == False:
91
91
  _CREATEWORKLOADREQUEST._serialized_start=85
92
92
  _CREATEWORKLOADREQUEST._serialized_end=108
93
93
  _CREATEWORKLOADRESPONSE._serialized_start=110
94
- _CREATEWORKLOADRESPONSE._serialized_end=155
95
- _GETNODESREQUEST._serialized_start=157
96
- _GETNODESREQUEST._serialized_end=195
97
- _GETNODESRESPONSE._serialized_start=197
98
- _GETNODESRESPONSE._serialized_end=248
99
- _PUSHTASKINSREQUEST._serialized_start=250
100
- _PUSHTASKINSREQUEST._serialized_end=314
101
- _PUSHTASKINSRESPONSE._serialized_start=316
102
- _PUSHTASKINSRESPONSE._serialized_end=355
103
- _PULLTASKRESREQUEST._serialized_start=357
104
- _PULLTASKRESREQUEST._serialized_end=427
105
- _PULLTASKRESRESPONSE._serialized_start=429
106
- _PULLTASKRESRESPONSE._serialized_end=494
107
- _DRIVER._serialized_start=497
108
- _DRIVER._serialized_end=833
94
+ _CREATEWORKLOADRESPONSE._serialized_end=150
95
+ _GETNODESREQUEST._serialized_start=152
96
+ _GETNODESREQUEST._serialized_end=185
97
+ _GETNODESRESPONSE._serialized_start=187
98
+ _GETNODESRESPONSE._serialized_end=238
99
+ _PUSHTASKINSREQUEST._serialized_start=240
100
+ _PUSHTASKINSREQUEST._serialized_end=304
101
+ _PUSHTASKINSRESPONSE._serialized_start=306
102
+ _PUSHTASKINSRESPONSE._serialized_end=345
103
+ _PULLTASKRESREQUEST._serialized_start=347
104
+ _PULLTASKRESREQUEST._serialized_end=417
105
+ _PULLTASKRESRESPONSE._serialized_start=419
106
+ _PULLTASKRESRESPONSE._serialized_end=484
107
+ _DRIVER._serialized_start=487
108
+ _DRIVER._serialized_end=823
109
109
  # @@protoc_insertion_point(module_scope)
flwr/proto/driver_pb2.pyi CHANGED
@@ -22,25 +22,25 @@ global___CreateWorkloadRequest = CreateWorkloadRequest
22
22
 
23
23
  class CreateWorkloadResponse(google.protobuf.message.Message):
24
24
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
25
- WORKLOAD_ID_FIELD_NUMBER: builtins.int
26
- workload_id: builtins.int
25
+ RUN_ID_FIELD_NUMBER: builtins.int
26
+ run_id: builtins.int
27
27
  def __init__(self,
28
28
  *,
29
- workload_id: builtins.int = ...,
29
+ run_id: builtins.int = ...,
30
30
  ) -> None: ...
31
- def ClearField(self, field_name: typing_extensions.Literal["workload_id",b"workload_id"]) -> None: ...
31
+ def ClearField(self, field_name: typing_extensions.Literal["run_id",b"run_id"]) -> None: ...
32
32
  global___CreateWorkloadResponse = CreateWorkloadResponse
33
33
 
34
34
  class GetNodesRequest(google.protobuf.message.Message):
35
35
  """GetNodes messages"""
36
36
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
37
- WORKLOAD_ID_FIELD_NUMBER: builtins.int
38
- workload_id: builtins.int
37
+ RUN_ID_FIELD_NUMBER: builtins.int
38
+ run_id: builtins.int
39
39
  def __init__(self,
40
40
  *,
41
- workload_id: builtins.int = ...,
41
+ run_id: builtins.int = ...,
42
42
  ) -> None: ...
43
- def ClearField(self, field_name: typing_extensions.Literal["workload_id",b"workload_id"]) -> None: ...
43
+ def ClearField(self, field_name: typing_extensions.Literal["run_id",b"run_id"]) -> None: ...
44
44
  global___GetNodesRequest = GetNodesRequest
45
45
 
46
46
  class GetNodesResponse(google.protobuf.message.Message):
flwr/proto/task_pb2.py CHANGED
@@ -16,7 +16,7 @@ from flwr.proto import node_pb2 as flwr_dot_proto_dot_node__pb2
16
16
  from flwr.proto import transport_pb2 as flwr_dot_proto_dot_transport__pb2
17
17
 
18
18
 
19
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x66lwr/proto/task.proto\x12\nflwr.proto\x1a\x15\x66lwr/proto/node.proto\x1a\x1a\x66lwr/proto/transport.proto\"\xbe\x02\n\x04Task\x12\"\n\x08producer\x18\x01 \x01(\x0b\x32\x10.flwr.proto.Node\x12\"\n\x08\x63onsumer\x18\x02 \x01(\x0b\x32\x10.flwr.proto.Node\x12\x12\n\ncreated_at\x18\x03 \x01(\t\x12\x14\n\x0c\x64\x65livered_at\x18\x04 \x01(\t\x12\x0b\n\x03ttl\x18\x05 \x01(\t\x12\x10\n\x08\x61ncestry\x18\x06 \x03(\t\x12)\n\x02sa\x18\x07 \x01(\x0b\x32\x1d.flwr.proto.SecureAggregation\x12<\n\x15legacy_server_message\x18\x65 \x01(\x0b\x32\x19.flwr.proto.ServerMessageB\x02\x18\x01\x12<\n\x15legacy_client_message\x18\x66 \x01(\x0b\x32\x19.flwr.proto.ClientMessageB\x02\x18\x01\"a\n\x07TaskIns\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x10\n\x08group_id\x18\x02 \x01(\t\x12\x13\n\x0bworkload_id\x18\x03 \x01(\x12\x12\x1e\n\x04task\x18\x04 \x01(\x0b\x32\x10.flwr.proto.Task\"a\n\x07TaskRes\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x10\n\x08group_id\x18\x02 \x01(\t\x12\x13\n\x0bworkload_id\x18\x03 \x01(\x12\x12\x1e\n\x04task\x18\x04 \x01(\x0b\x32\x10.flwr.proto.Task\"\xf3\x03\n\x05Value\x12\x10\n\x06\x64ouble\x18\x01 \x01(\x01H\x00\x12\x10\n\x06sint64\x18\x02 \x01(\x12H\x00\x12\x0e\n\x04\x62ool\x18\x03 \x01(\x08H\x00\x12\x10\n\x06string\x18\x04 \x01(\tH\x00\x12\x0f\n\x05\x62ytes\x18\x05 \x01(\x0cH\x00\x12\x33\n\x0b\x64ouble_list\x18\x15 \x01(\x0b\x32\x1c.flwr.proto.Value.DoubleListH\x00\x12\x33\n\x0bsint64_list\x18\x16 \x01(\x0b\x32\x1c.flwr.proto.Value.Sint64ListH\x00\x12/\n\tbool_list\x18\x17 \x01(\x0b\x32\x1a.flwr.proto.Value.BoolListH\x00\x12\x33\n\x0bstring_list\x18\x18 \x01(\x0b\x32\x1c.flwr.proto.Value.StringListH\x00\x12\x31\n\nbytes_list\x18\x19 \x01(\x0b\x32\x1b.flwr.proto.Value.BytesListH\x00\x1a\x1a\n\nDoubleList\x12\x0c\n\x04vals\x18\x01 \x03(\x01\x1a\x1a\n\nSint64List\x12\x0c\n\x04vals\x18\x01 \x03(\x12\x1a\x18\n\x08\x42oolList\x12\x0c\n\x04vals\x18\x01 \x03(\x08\x1a\x1a\n\nStringList\x12\x0c\n\x04vals\x18\x01 \x03(\t\x1a\x19\n\tBytesList\x12\x0c\n\x04vals\x18\x01 \x03(\x0c\x42\x07\n\x05value\"\xa0\x01\n\x11SecureAggregation\x12\x44\n\x0cnamed_values\x18\x01 \x03(\x0b\x32..flwr.proto.SecureAggregation.NamedValuesEntry\x1a\x45\n\x10NamedValuesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.flwr.proto.Value:\x02\x38\x01\x62\x06proto3')
19
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x66lwr/proto/task.proto\x12\nflwr.proto\x1a\x15\x66lwr/proto/node.proto\x1a\x1a\x66lwr/proto/transport.proto\"\xbe\x02\n\x04Task\x12\"\n\x08producer\x18\x01 \x01(\x0b\x32\x10.flwr.proto.Node\x12\"\n\x08\x63onsumer\x18\x02 \x01(\x0b\x32\x10.flwr.proto.Node\x12\x12\n\ncreated_at\x18\x03 \x01(\t\x12\x14\n\x0c\x64\x65livered_at\x18\x04 \x01(\t\x12\x0b\n\x03ttl\x18\x05 \x01(\t\x12\x10\n\x08\x61ncestry\x18\x06 \x03(\t\x12)\n\x02sa\x18\x07 \x01(\x0b\x32\x1d.flwr.proto.SecureAggregation\x12<\n\x15legacy_server_message\x18\x65 \x01(\x0b\x32\x19.flwr.proto.ServerMessageB\x02\x18\x01\x12<\n\x15legacy_client_message\x18\x66 \x01(\x0b\x32\x19.flwr.proto.ClientMessageB\x02\x18\x01\"\\\n\x07TaskIns\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x10\n\x08group_id\x18\x02 \x01(\t\x12\x0e\n\x06run_id\x18\x03 \x01(\x12\x12\x1e\n\x04task\x18\x04 \x01(\x0b\x32\x10.flwr.proto.Task\"\\\n\x07TaskRes\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x10\n\x08group_id\x18\x02 \x01(\t\x12\x0e\n\x06run_id\x18\x03 \x01(\x12\x12\x1e\n\x04task\x18\x04 \x01(\x0b\x32\x10.flwr.proto.Task\"\xf3\x03\n\x05Value\x12\x10\n\x06\x64ouble\x18\x01 \x01(\x01H\x00\x12\x10\n\x06sint64\x18\x02 \x01(\x12H\x00\x12\x0e\n\x04\x62ool\x18\x03 \x01(\x08H\x00\x12\x10\n\x06string\x18\x04 \x01(\tH\x00\x12\x0f\n\x05\x62ytes\x18\x05 \x01(\x0cH\x00\x12\x33\n\x0b\x64ouble_list\x18\x15 \x01(\x0b\x32\x1c.flwr.proto.Value.DoubleListH\x00\x12\x33\n\x0bsint64_list\x18\x16 \x01(\x0b\x32\x1c.flwr.proto.Value.Sint64ListH\x00\x12/\n\tbool_list\x18\x17 \x01(\x0b\x32\x1a.flwr.proto.Value.BoolListH\x00\x12\x33\n\x0bstring_list\x18\x18 \x01(\x0b\x32\x1c.flwr.proto.Value.StringListH\x00\x12\x31\n\nbytes_list\x18\x19 \x01(\x0b\x32\x1b.flwr.proto.Value.BytesListH\x00\x1a\x1a\n\nDoubleList\x12\x0c\n\x04vals\x18\x01 \x03(\x01\x1a\x1a\n\nSint64List\x12\x0c\n\x04vals\x18\x01 \x03(\x12\x1a\x18\n\x08\x42oolList\x12\x0c\n\x04vals\x18\x01 \x03(\x08\x1a\x1a\n\nStringList\x12\x0c\n\x04vals\x18\x01 \x03(\t\x1a\x19\n\tBytesList\x12\x0c\n\x04vals\x18\x01 \x03(\x0c\x42\x07\n\x05value\"\xa0\x01\n\x11SecureAggregation\x12\x44\n\x0cnamed_values\x18\x01 \x03(\x0b\x32..flwr.proto.SecureAggregation.NamedValuesEntry\x1a\x45\n\x10NamedValuesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.flwr.proto.Value:\x02\x38\x01\x62\x06proto3')
20
20
 
21
21
 
22
22
 
@@ -126,23 +126,23 @@ if _descriptor._USE_C_DESCRIPTORS == False:
126
126
  _TASK._serialized_start=89
127
127
  _TASK._serialized_end=407
128
128
  _TASKINS._serialized_start=409
129
- _TASKINS._serialized_end=506
130
- _TASKRES._serialized_start=508
131
- _TASKRES._serialized_end=605
132
- _VALUE._serialized_start=608
133
- _VALUE._serialized_end=1107
134
- _VALUE_DOUBLELIST._serialized_start=963
135
- _VALUE_DOUBLELIST._serialized_end=989
136
- _VALUE_SINT64LIST._serialized_start=991
137
- _VALUE_SINT64LIST._serialized_end=1017
138
- _VALUE_BOOLLIST._serialized_start=1019
139
- _VALUE_BOOLLIST._serialized_end=1043
140
- _VALUE_STRINGLIST._serialized_start=1045
141
- _VALUE_STRINGLIST._serialized_end=1071
142
- _VALUE_BYTESLIST._serialized_start=1073
143
- _VALUE_BYTESLIST._serialized_end=1098
144
- _SECUREAGGREGATION._serialized_start=1110
145
- _SECUREAGGREGATION._serialized_end=1270
146
- _SECUREAGGREGATION_NAMEDVALUESENTRY._serialized_start=1201
147
- _SECUREAGGREGATION_NAMEDVALUESENTRY._serialized_end=1270
129
+ _TASKINS._serialized_end=501
130
+ _TASKRES._serialized_start=503
131
+ _TASKRES._serialized_end=595
132
+ _VALUE._serialized_start=598
133
+ _VALUE._serialized_end=1097
134
+ _VALUE_DOUBLELIST._serialized_start=953
135
+ _VALUE_DOUBLELIST._serialized_end=979
136
+ _VALUE_SINT64LIST._serialized_start=981
137
+ _VALUE_SINT64LIST._serialized_end=1007
138
+ _VALUE_BOOLLIST._serialized_start=1009
139
+ _VALUE_BOOLLIST._serialized_end=1033
140
+ _VALUE_STRINGLIST._serialized_start=1035
141
+ _VALUE_STRINGLIST._serialized_end=1061
142
+ _VALUE_BYTESLIST._serialized_start=1063
143
+ _VALUE_BYTESLIST._serialized_end=1088
144
+ _SECUREAGGREGATION._serialized_start=1100
145
+ _SECUREAGGREGATION._serialized_end=1260
146
+ _SECUREAGGREGATION_NAMEDVALUESENTRY._serialized_start=1191
147
+ _SECUREAGGREGATION_NAMEDVALUESENTRY._serialized_end=1260
148
148
  # @@protoc_insertion_point(module_scope)
flwr/proto/task_pb2.pyi CHANGED
@@ -59,44 +59,44 @@ class TaskIns(google.protobuf.message.Message):
59
59
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
60
60
  TASK_ID_FIELD_NUMBER: builtins.int
61
61
  GROUP_ID_FIELD_NUMBER: builtins.int
62
- WORKLOAD_ID_FIELD_NUMBER: builtins.int
62
+ RUN_ID_FIELD_NUMBER: builtins.int
63
63
  TASK_FIELD_NUMBER: builtins.int
64
64
  task_id: typing.Text
65
65
  group_id: typing.Text
66
- workload_id: builtins.int
66
+ run_id: builtins.int
67
67
  @property
68
68
  def task(self) -> global___Task: ...
69
69
  def __init__(self,
70
70
  *,
71
71
  task_id: typing.Text = ...,
72
72
  group_id: typing.Text = ...,
73
- workload_id: builtins.int = ...,
73
+ run_id: builtins.int = ...,
74
74
  task: typing.Optional[global___Task] = ...,
75
75
  ) -> None: ...
76
76
  def HasField(self, field_name: typing_extensions.Literal["task",b"task"]) -> builtins.bool: ...
77
- def ClearField(self, field_name: typing_extensions.Literal["group_id",b"group_id","task",b"task","task_id",b"task_id","workload_id",b"workload_id"]) -> None: ...
77
+ def ClearField(self, field_name: typing_extensions.Literal["group_id",b"group_id","run_id",b"run_id","task",b"task","task_id",b"task_id"]) -> None: ...
78
78
  global___TaskIns = TaskIns
79
79
 
80
80
  class TaskRes(google.protobuf.message.Message):
81
81
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
82
82
  TASK_ID_FIELD_NUMBER: builtins.int
83
83
  GROUP_ID_FIELD_NUMBER: builtins.int
84
- WORKLOAD_ID_FIELD_NUMBER: builtins.int
84
+ RUN_ID_FIELD_NUMBER: builtins.int
85
85
  TASK_FIELD_NUMBER: builtins.int
86
86
  task_id: typing.Text
87
87
  group_id: typing.Text
88
- workload_id: builtins.int
88
+ run_id: builtins.int
89
89
  @property
90
90
  def task(self) -> global___Task: ...
91
91
  def __init__(self,
92
92
  *,
93
93
  task_id: typing.Text = ...,
94
94
  group_id: typing.Text = ...,
95
- workload_id: builtins.int = ...,
95
+ run_id: builtins.int = ...,
96
96
  task: typing.Optional[global___Task] = ...,
97
97
  ) -> None: ...
98
98
  def HasField(self, field_name: typing_extensions.Literal["task",b"task"]) -> builtins.bool: ...
99
- def ClearField(self, field_name: typing_extensions.Literal["group_id",b"group_id","task",b"task","task_id",b"task_id","workload_id",b"workload_id"]) -> None: ...
99
+ def ClearField(self, field_name: typing_extensions.Literal["group_id",b"group_id","run_id",b"run_id","task",b"task","task_id",b"task_id"]) -> None: ...
100
100
  global___TaskRes = TaskRes
101
101
 
102
102
  class Value(google.protobuf.message.Message):
@@ -51,7 +51,7 @@ class DriverServicer(driver_pb2_grpc.DriverServicer):
51
51
  """Get available nodes."""
52
52
  log(INFO, "DriverServicer.GetNodes")
53
53
  state: State = self.state_factory.state()
54
- all_ids: Set[int] = state.get_nodes(request.workload_id)
54
+ all_ids: Set[int] = state.get_nodes(request.run_id)
55
55
  nodes: List[Node] = [
56
56
  Node(node_id=node_id, anonymous=False) for node_id in all_ids
57
57
  ]
@@ -63,8 +63,8 @@ class DriverServicer(driver_pb2_grpc.DriverServicer):
63
63
  """Create workload ID."""
64
64
  log(INFO, "DriverServicer.CreateWorkload")
65
65
  state: State = self.state_factory.state()
66
- workload_id = state.create_workload()
67
- return CreateWorkloadResponse(workload_id=workload_id)
66
+ run_id = state.create_workload()
67
+ return CreateWorkloadResponse(run_id=run_id)
68
68
 
69
69
  def PushTaskIns(
70
70
  self, request: PushTaskInsRequest, context: grpc.ServicerContext
@@ -32,7 +32,7 @@ class InMemoryState(State):
32
32
 
33
33
  def __init__(self) -> None:
34
34
  self.node_ids: Set[int] = set()
35
- self.workload_ids: Set[int] = set()
35
+ self.run_ids: Set[int] = set()
36
36
  self.task_ins_store: Dict[UUID, TaskIns] = {}
37
37
  self.task_res_store: Dict[UUID, TaskRes] = {}
38
38
 
@@ -43,9 +43,9 @@ class InMemoryState(State):
43
43
  if any(errors):
44
44
  log(ERROR, errors)
45
45
  return None
46
- # Validate workload_id
47
- if task_ins.workload_id not in self.workload_ids:
48
- log(ERROR, "`workload_id` is invalid")
46
+ # Validate run_id
47
+ if task_ins.run_id not in self.run_ids:
48
+ log(ERROR, "`run_id` is invalid")
49
49
  return None
50
50
 
51
51
  # Create task_id, created_at and ttl
@@ -104,9 +104,9 @@ class InMemoryState(State):
104
104
  log(ERROR, errors)
105
105
  return None
106
106
 
107
- # Validate workload_id
108
- if task_res.workload_id not in self.workload_ids:
109
- log(ERROR, "`workload_id` is invalid")
107
+ # Validate run_id
108
+ if task_res.run_id not in self.run_ids:
109
+ log(ERROR, "`run_id` is invalid")
110
110
  return None
111
111
 
112
112
  # Create task_id, created_at and ttl
@@ -199,25 +199,25 @@ class InMemoryState(State):
199
199
  raise ValueError(f"Node {node_id} not found")
200
200
  self.node_ids.remove(node_id)
201
201
 
202
- def get_nodes(self, workload_id: int) -> Set[int]:
202
+ def get_nodes(self, run_id: int) -> Set[int]:
203
203
  """Return all available client nodes.
204
204
 
205
205
  Constraints
206
206
  -----------
207
- If the provided `workload_id` does not exist or has no matching nodes,
207
+ If the provided `run_id` does not exist or has no matching nodes,
208
208
  an empty `Set` MUST be returned.
209
209
  """
210
- if workload_id not in self.workload_ids:
210
+ if run_id not in self.run_ids:
211
211
  return set()
212
212
  return self.node_ids
213
213
 
214
214
  def create_workload(self) -> int:
215
215
  """Create one workload."""
216
- # Sample a random int64 as workload_id
217
- workload_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
216
+ # Sample a random int64 as run_id
217
+ run_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
218
218
 
219
- if workload_id not in self.workload_ids:
220
- self.workload_ids.add(workload_id)
221
- return workload_id
219
+ if run_id not in self.run_ids:
220
+ self.run_ids.add(run_id)
221
+ return run_id
222
222
  log(ERROR, "Unexpected workload creation failure.")
223
223
  return 0
@@ -39,7 +39,7 @@ CREATE TABLE IF NOT EXISTS node(
39
39
 
40
40
  SQL_CREATE_TABLE_WORKLOAD = """
41
41
  CREATE TABLE IF NOT EXISTS workload(
42
- workload_id INTEGER UNIQUE
42
+ run_id INTEGER UNIQUE
43
43
  );
44
44
  """
45
45
 
@@ -47,7 +47,7 @@ SQL_CREATE_TABLE_TASK_INS = """
47
47
  CREATE TABLE IF NOT EXISTS task_ins(
48
48
  task_id TEXT UNIQUE,
49
49
  group_id TEXT,
50
- workload_id INTEGER,
50
+ run_id INTEGER,
51
51
  producer_anonymous BOOLEAN,
52
52
  producer_node_id INTEGER,
53
53
  consumer_anonymous BOOLEAN,
@@ -58,7 +58,7 @@ CREATE TABLE IF NOT EXISTS task_ins(
58
58
  ancestry TEXT,
59
59
  legacy_server_message BLOB,
60
60
  legacy_client_message BLOB,
61
- FOREIGN KEY(workload_id) REFERENCES workload(workload_id)
61
+ FOREIGN KEY(run_id) REFERENCES workload(run_id)
62
62
  );
63
63
  """
64
64
 
@@ -67,7 +67,7 @@ SQL_CREATE_TABLE_TASK_RES = """
67
67
  CREATE TABLE IF NOT EXISTS task_res(
68
68
  task_id TEXT UNIQUE,
69
69
  group_id TEXT,
70
- workload_id INTEGER,
70
+ run_id INTEGER,
71
71
  producer_anonymous BOOLEAN,
72
72
  producer_node_id INTEGER,
73
73
  consumer_anonymous BOOLEAN,
@@ -78,7 +78,7 @@ CREATE TABLE IF NOT EXISTS task_res(
78
78
  ancestry TEXT,
79
79
  legacy_server_message BLOB,
80
80
  legacy_client_message BLOB,
81
- FOREIGN KEY(workload_id) REFERENCES workload(workload_id)
81
+ FOREIGN KEY(run_id) REFERENCES workload(run_id)
82
82
  );
83
83
  """
84
84
 
@@ -198,7 +198,7 @@ class SqliteState(State):
198
198
  columns = ", ".join([f":{key}" for key in data[0]])
199
199
  query = f"INSERT INTO task_ins VALUES({columns});"
200
200
 
201
- # Only invalid workload_id can trigger IntegrityError.
201
+ # Only invalid run_id can trigger IntegrityError.
202
202
  # This may need to be changed in the future version with more integrity checks.
203
203
  try:
204
204
  self.query(query, data)
@@ -333,7 +333,7 @@ class SqliteState(State):
333
333
  columns = ", ".join([f":{key}" for key in data[0]])
334
334
  query = f"INSERT INTO task_res VALUES({columns});"
335
335
 
336
- # Only invalid workload_id can trigger IntegrityError.
336
+ # Only invalid run_id can trigger IntegrityError.
337
337
  # This may need to be changed in the future version with more integrity checks.
338
338
  try:
339
339
  self.query(query, data)
@@ -485,17 +485,17 @@ class SqliteState(State):
485
485
  query = "DELETE FROM node WHERE node_id = :node_id;"
486
486
  self.query(query, {"node_id": node_id})
487
487
 
488
- def get_nodes(self, workload_id: int) -> Set[int]:
488
+ def get_nodes(self, run_id: int) -> Set[int]:
489
489
  """Retrieve all currently stored node IDs as a set.
490
490
 
491
491
  Constraints
492
492
  -----------
493
- If the provided `workload_id` does not exist or has no matching nodes,
493
+ If the provided `run_id` does not exist or has no matching nodes,
494
494
  an empty `Set` MUST be returned.
495
495
  """
496
496
  # Validate workload ID
497
- query = "SELECT COUNT(*) FROM workload WHERE workload_id = ?;"
498
- if self.query(query, (workload_id,))[0]["COUNT(*)"] == 0:
497
+ query = "SELECT COUNT(*) FROM workload WHERE run_id = ?;"
498
+ if self.query(query, (run_id,))[0]["COUNT(*)"] == 0:
499
499
  return set()
500
500
 
501
501
  # Get nodes
@@ -506,16 +506,16 @@ class SqliteState(State):
506
506
 
507
507
  def create_workload(self) -> int:
508
508
  """Create one workload and store it in state."""
509
- # Sample a random int64 as workload_id
510
- workload_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
509
+ # Sample a random int64 as run_id
510
+ run_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
511
511
 
512
512
  # Check conflicts
513
- query = "SELECT COUNT(*) FROM workload WHERE workload_id = ?;"
514
- # If workload_id does not exist
515
- if self.query(query, (workload_id,))[0]["COUNT(*)"] == 0:
516
- query = "INSERT INTO workload VALUES(:workload_id);"
517
- self.query(query, {"workload_id": workload_id})
518
- return workload_id
513
+ query = "SELECT COUNT(*) FROM workload WHERE run_id = ?;"
514
+ # If run_id does not exist
515
+ if self.query(query, (run_id,))[0]["COUNT(*)"] == 0:
516
+ query = "INSERT INTO workload VALUES(:run_id);"
517
+ self.query(query, {"run_id": run_id})
518
+ return run_id
519
519
  log(ERROR, "Unexpected workload creation failure.")
520
520
  return 0
521
521
 
@@ -537,7 +537,7 @@ def task_ins_to_dict(task_msg: TaskIns) -> Dict[str, Any]:
537
537
  result = {
538
538
  "task_id": task_msg.task_id,
539
539
  "group_id": task_msg.group_id,
540
- "workload_id": task_msg.workload_id,
540
+ "run_id": task_msg.run_id,
541
541
  "producer_anonymous": task_msg.task.producer.anonymous,
542
542
  "producer_node_id": task_msg.task.producer.node_id,
543
543
  "consumer_anonymous": task_msg.task.consumer.anonymous,
@@ -559,7 +559,7 @@ def task_res_to_dict(task_msg: TaskRes) -> Dict[str, Any]:
559
559
  result = {
560
560
  "task_id": task_msg.task_id,
561
561
  "group_id": task_msg.group_id,
562
- "workload_id": task_msg.workload_id,
562
+ "run_id": task_msg.run_id,
563
563
  "producer_anonymous": task_msg.task.producer.anonymous,
564
564
  "producer_node_id": task_msg.task.producer.node_id,
565
565
  "consumer_anonymous": task_msg.task.consumer.anonymous,
@@ -584,7 +584,7 @@ def dict_to_task_ins(task_dict: Dict[str, Any]) -> TaskIns:
584
584
  result = TaskIns(
585
585
  task_id=task_dict["task_id"],
586
586
  group_id=task_dict["group_id"],
587
- workload_id=task_dict["workload_id"],
587
+ run_id=task_dict["run_id"],
588
588
  task=Task(
589
589
  producer=Node(
590
590
  node_id=task_dict["producer_node_id"],
@@ -612,7 +612,7 @@ def dict_to_task_res(task_dict: Dict[str, Any]) -> TaskRes:
612
612
  result = TaskRes(
613
613
  task_id=task_dict["task_id"],
614
614
  group_id=task_dict["group_id"],
615
- workload_id=task_dict["workload_id"],
615
+ run_id=task_dict["run_id"],
616
616
  task=Task(
617
617
  producer=Node(
618
618
  node_id=task_dict["producer_node_id"],
@@ -43,7 +43,7 @@ class State(abc.ABC):
43
43
  If `task_ins.task.consumer.anonymous` is `False`, then
44
44
  `task_ins.task.consumer.node_id` MUST be set (not 0)
45
45
 
46
- If `task_ins.workload_id` is invalid, then
46
+ If `task_ins.run_id` is invalid, then
47
47
  storing the `task_ins` MUST fail.
48
48
  """
49
49
 
@@ -92,7 +92,7 @@ class State(abc.ABC):
92
92
  If `task_res.task.consumer.anonymous` is `False`, then
93
93
  `task_res.task.consumer.node_id` MUST be set (not 0)
94
94
 
95
- If `task_res.workload_id` is invalid, then
95
+ If `task_res.run_id` is invalid, then
96
96
  storing the `task_res` MUST fail.
97
97
  """
98
98
 
@@ -140,12 +140,12 @@ class State(abc.ABC):
140
140
  """Remove `node_id` from state."""
141
141
 
142
142
  @abc.abstractmethod
143
- def get_nodes(self, workload_id: int) -> Set[int]:
143
+ def get_nodes(self, run_id: int) -> Set[int]:
144
144
  """Retrieve all currently stored node IDs as a set.
145
145
 
146
146
  Constraints
147
147
  -----------
148
- If the provided `workload_id` does not exist or has no matching nodes,
148
+ If the provided `run_id` does not exist or has no matching nodes,
149
149
  an empty `Set` MUST be returned.
150
150
  """
151
151
 
@@ -133,15 +133,15 @@ class RayActorClientProxy(ClientProxy):
133
133
 
134
134
  def _submit_job(self, job_fn: JobFn, timeout: Optional[float]) -> ClientRes:
135
135
  # The VCE is not exposed to TaskIns, it won't handle multilple workloads
136
- # For the time being, fixing workload_id is a small compromise
136
+ # For the time being, fixing run_id is a small compromise
137
137
  # This will be one of the first points to address integrating VCE + DriverAPI
138
- workload_id = 0
138
+ run_id = 0
139
139
 
140
140
  # Register state
141
- self.proxy_state.register_workloadstate(workload_id=workload_id)
141
+ self.proxy_state.register_workloadstate(run_id=run_id)
142
142
 
143
143
  # Retrieve state
144
- state = self.proxy_state.retrieve_workloadstate(workload_id=workload_id)
144
+ state = self.proxy_state.retrieve_workloadstate(run_id=run_id)
145
145
 
146
146
  try:
147
147
  self.actor_pool.submit_client_job(
@@ -152,7 +152,7 @@ class RayActorClientProxy(ClientProxy):
152
152
 
153
153
  # Update state
154
154
  self.proxy_state.update_workloadstate(
155
- workload_id=workload_id, workload_state=updated_state
155
+ run_id=run_id, workload_state=updated_state
156
156
  )
157
157
 
158
158
  except Exception as ex:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: flwr-nightly
3
- Version: 1.7.0.dev20231228
3
+ Version: 1.7.0.dev20240104
4
4
  Summary: Flower: A Friendly Federated Learning Framework
5
5
  Home-page: https://flower.dev
6
6
  License: Apache-2.0
@@ -1,20 +1,20 @@
1
1
  flwr/__init__.py,sha256=6zbcS7z2q-VUdmpFppLH6BacsE-ZFmfq6OvtKNOyYE0,981
2
2
  flwr/client/__init__.py,sha256=2T4enmlE4PsoKiGTvXwBKSlhOjZ7MXRy5oCGNf0UH9Y,1111
3
- flwr/client/app.py,sha256=X5OGCBtKtEj-JBk9tXJyE6Rfllpo6Nse46pkznzofdg,19252
3
+ flwr/client/app.py,sha256=te-CJj1VYsm6XTPPkVrvF1Tyg_fC_URU0bqaqDyPUw0,19176
4
4
  flwr/client/client.py,sha256=o7uPXVvLJExyqGz8W1JdjnVL3HTxvyAZ6hhIQTEjwrg,8233
5
5
  flwr/client/dpfedavg_numpy_client.py,sha256=LxUcPBO0mU3VScAx9vx2PlsghXjQZVEBOn3rolgrgio,7216
6
6
  flwr/client/flower.py,sha256=PPYROFVnu7NEe7sR8XFVbkfvfwecIuQc5-VllPz0LlQ,4049
7
7
  flwr/client/grpc_client/__init__.py,sha256=LsnbqXiJhgQcB0XzAlUQgPx011Uf7Y7yabIC1HxivJ8,735
8
- flwr/client/grpc_client/connection.py,sha256=5EcdnedH0SvNA4daIfRWzQLS_3a59SJb2o8RGzvnDO0,5007
8
+ flwr/client/grpc_client/connection.py,sha256=ki7ZaYvSDJK1bvW2UxrldyToiEvqk2j_cuIaICT19ZA,5002
9
9
  flwr/client/grpc_rere_client/__init__.py,sha256=avn6W_vHEM_yZEB1S7hCZgnTbXb6ZujqRP_vAzyXu-0,752
10
10
  flwr/client/grpc_rere_client/connection.py,sha256=r0wzwJp9ugcHlD0iF6wj45KqpfDe_AwBbI9jUjiIVrI,6560
11
11
  flwr/client/message_handler/__init__.py,sha256=abHvBRJJiiaAMNgeILQbMOa6h8WqMK2BcnvxwQZFpic,719
12
- flwr/client/message_handler/message_handler.py,sha256=kX5ruNg71mqkFWC6zhBw_xFN3y7LwotoaIxZPOdOfhc,8360
13
- flwr/client/message_handler/task_handler.py,sha256=1ZTFP3nJejrIo_hEwhyjMpxf7jewI5kVKdHE5ljtZNY,5743
12
+ flwr/client/message_handler/message_handler.py,sha256=DUUBNEFhRAby8Oyf0_M9O2ZgNSdnfEygHRd8RgUg-Gs,8355
13
+ flwr/client/message_handler/task_handler.py,sha256=zy9BuJ_BLiF3-ngDlTA_cOZW6QfqNkYV00X4P7po9hE,5713
14
14
  flwr/client/middleware/__init__.py,sha256=Eo3JvAV5XqmyRySNqeiw93YNETmmP5ixEOMeBA6ah4w,769
15
15
  flwr/client/middleware/utils.py,sha256=QUghso_SWsKTUPfKwrtBwPyyJoEI9AV9hRY2acu1TYE,1168
16
- flwr/client/node_state.py,sha256=ZXL6L4GUku1X-lLcS-khiDwZTphGHdzUzj82vvopweQ,1933
17
- flwr/client/node_state_tests.py,sha256=-iHTtcjQJwo_D4gmiEnRtxR8Oz_7wRYzKQo8Xk8TwSU,1988
16
+ flwr/client/node_state.py,sha256=3y6_Ibn_7Zg9KrkADPLwNBrl9dSJsC3KYC7VqecMw2U,1864
17
+ flwr/client/node_state_tests.py,sha256=OBowSSvTg3ckHzGFmGpJnhrmJB0eyNJjv48fOm1_cmo,1963
18
18
  flwr/client/numpy_client.py,sha256=W1u85DLrT5s-gmjGI2SeT-OQyPBshHJOQwqT8OZErrk,10321
19
19
  flwr/client/rest_client/__init__.py,sha256=ThwOnkMdzxo_UuyTI47Q7y9oSpuTgNT2OuFvJCfuDiw,735
20
20
  flwr/client/rest_client/connection.py,sha256=0vzOKxdnqgLtne4kIXVREtMWr40HqlqIvE6ZRur-ViM,11583
@@ -45,14 +45,14 @@ flwr/common/telemetry.py,sha256=se_-pHgEWcmN09ChSpTeek72l1UJHf7GbwXBB1KXBjQ,7683
45
45
  flwr/common/typing.py,sha256=e21UHUqHIzucAkTt2Xh7sOO-qTCZjqEbY09lHaCE9tc,3959
46
46
  flwr/common/version.py,sha256=A0MKvyKPrV8wLg0YCAODTqM71v26NEH36c6JYtfgg0o,667
47
47
  flwr/driver/__init__.py,sha256=NQ4KeZ5fP9wdxGjcr2cP41_7TLuuYQ3u4J7GwYtQ488,870
48
- flwr/driver/app.py,sha256=h_Er4UDwdcX1agiHzl8sQtFmgSVLUkc3qxLUutLPTxA,7263
49
- flwr/driver/driver.py,sha256=zNeGvaq-t_JOeMLdN2UGsffPqNQ6dVUX6HpJPlLQbIs,3992
50
- flwr/driver/driver_client_proxy.py,sha256=qZznLAwy_wj_g_0hG74qxB-j0zniAiB16Y_y2ueqv7Q,5758
48
+ flwr/driver/app.py,sha256=Lg21jwMTUyDjdA55n--ySNh-DK4se_YZou3Gditk1-g,7228
49
+ flwr/driver/driver.py,sha256=1X8caHcchoeV96cKlGr9b8EZhWdJqTpzTjv6-6J7CyI,3912
50
+ flwr/driver/driver_client_proxy.py,sha256=4OQWjX-L8FOfCStij9dzTqkw_lrkbHqNGDrLFiUFcbo,5719
51
51
  flwr/driver/grpc_driver.py,sha256=A9llpjE8diK_-s0vZ0yKnSRa1xr7y51d5JB8vfBbmOM,4526
52
52
  flwr/flower/__init__.py,sha256=VlOKQzq4OecSM4Ga2uk1iu2K8mxMc-j1XA0bdhaTeMw,892
53
53
  flwr/proto/__init__.py,sha256=hbY7JYakwZwCkYgCNlmHdc8rtvfoJbAZLalMdc--CGc,683
54
- flwr/proto/driver_pb2.py,sha256=UcS3znnj9lb0nTbPUgO8G43vJ4oOTXzp85AsxQhMTDw,5954
55
- flwr/proto/driver_pb2.pyi,sha256=-hpthCr8T4NAzHwPCBgPmw1-iySQu22eHSICxXGE2LA,4755
54
+ flwr/proto/driver_pb2.py,sha256=kik2QjFAb9hNyrjMv_xMsThBzpsw5QxZSkzaur3PvwQ,5944
55
+ flwr/proto/driver_pb2.pyi,sha256=o95CkfxURsu6rsIlfSuq8F4YSTHdPtDOPAgpw3GW19s,4705
56
56
  flwr/proto/driver_pb2_grpc.py,sha256=FL875wzg0E9qd70ssV55YpEwAdEHUt7XTu7MNL0soTQ,7374
57
57
  flwr/proto/driver_pb2_grpc.pyi,sha256=70HgehRecB93VgQQzv1goESXPHQeEdmb6A1St2lrpG0,2062
58
58
  flwr/proto/fleet_pb2.py,sha256=h_lDZDTEauPy53lomiYaSngwmveNkMw94rvCQjg3yx8,7295
@@ -63,8 +63,8 @@ flwr/proto/node_pb2.py,sha256=bWqn3w6wvUjBrQQFA4P5lIpfFXQQ4u0kEMavk4xqoQo,1188
63
63
  flwr/proto/node_pb2.pyi,sha256=aX3BHhgXvJE1rvcRnEE_gB-5GcaFQ0SJ88yTE223bjI,751
64
64
  flwr/proto/node_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXHDCNMhZD2VXqocC9l_gk,159
65
65
  flwr/proto/node_pb2_grpc.pyi,sha256=ff2TSiLVnG6IVQcTGzb2DIH3XRSoAvAo_RMcvbMFyc0,76
66
- flwr/proto/task_pb2.py,sha256=1aJ15GzR7Stvf6O7DMdxxcBflUeg2r-5chB5QLqAqNo,8236
67
- flwr/proto/task_pb2.pyi,sha256=Je30RzZFzUXhMGCrYPEHUwXNTcEUDpOYfA59AyWABaA,10893
66
+ flwr/proto/task_pb2.py,sha256=k64g-fJh-PgKghjMKlDLWJcrlOH5aFswyzd6OzClhGs,8228
67
+ flwr/proto/task_pb2.pyi,sha256=vdsSlvMkg9vN-qQsf6HgV_JbbYrBK17f-fkeJo8a55Y,10843
68
68
  flwr/proto/task_pb2_grpc.py,sha256=1oboBPFxaTEXt9Aw7EAj8gXHDCNMhZD2VXqocC9l_gk,159
69
69
  flwr/proto/task_pb2_grpc.pyi,sha256=ff2TSiLVnG6IVQcTGzb2DIH3XRSoAvAo_RMcvbMFyc0,76
70
70
  flwr/proto/transport_pb2.py,sha256=XWXbVa9pylkn-1mdlSaV9x0lk8zIC40DZzBcB_3LaMk,18721
@@ -78,7 +78,7 @@ flwr/server/client_manager.py,sha256=T8UDSRJBVD3fyIDI7NTAA-NA7GPrMNNgH2OAF54RRxE
78
78
  flwr/server/client_proxy.py,sha256=8ScGDvP3jHbl8DV3hyFID5N5VEVlXn8ZTQXtkdOfssI,2234
79
79
  flwr/server/criterion.py,sha256=ypbAexbztzGUxNen9RCHF91QeqiEQix4t4Ih3E-42MM,1061
80
80
  flwr/server/driver/__init__.py,sha256=STB1_DASVEg7Cu6L7VYxTzV7UMkgtBkFim09Z82Dh8I,712
81
- flwr/server/driver/driver_servicer.py,sha256=Ru6iR2oIgVJmir48a_DXv0ph4R5P-k7l_CaK7iQHXdA,4518
81
+ flwr/server/driver/driver_servicer.py,sha256=C_gPF4nHyBM3p1CNSDpBOb6GLqkuBeLocB81giL3hAU,4498
82
82
  flwr/server/fleet/__init__.py,sha256=C6GCSD5eP5Of6_dIeSe1jx9HnV0icsvWyQ5EKAUHJRU,711
83
83
  flwr/server/fleet/grpc_bidi/__init__.py,sha256=mgGJGjwT6VU7ovC1gdnnqttjyBPlNIcZnYRqx4K3IBQ,735
84
84
  flwr/server/fleet/grpc_bidi/driver_client_manager.py,sha256=179z_IVT_95G7dIMsP9ec0JpoPEA62Eu12YyM-TKsyM,4284
@@ -96,9 +96,9 @@ flwr/server/fleet/rest_rere/rest_api.py,sha256=_72gqtux9js0NBS-qnhLF--Pb93Em6sGP
96
96
  flwr/server/history.py,sha256=W7PHCFX7dLXrdnaVfl5V4tuzmtxh6zArkWYxVXvTZ1c,4904
97
97
  flwr/server/server.py,sha256=skrNgQp9vlCVHruovlaB0Rh1W7xdH7KqEfVCMZGpK7c,15965
98
98
  flwr/server/state/__init__.py,sha256=ij-7Ms-hyordQdRmGQxY1-nVa4OhixJ0jr7_YDkys0s,1003
99
- flwr/server/state/in_memory_state.py,sha256=j4redg12x5ULd40V-NLIuw5A1LtZZJo3xgzDINe0W3g,7983
100
- flwr/server/state/sqlite_state.py,sha256=wb0vSGUXN_A60I3qHj3bhosPxiPhbv1wRzRSqy-a-sI,21423
101
- flwr/server/state/state.py,sha256=gsCYc7c5OO3Y2O5k7McS2WIJNzvlswMTSpuTPIr20-E,5321
99
+ flwr/server/state/in_memory_state.py,sha256=iDdk5y0vNkoiWPbEKj72JARzEP7pahWQ9Q_sQ-9eZp8,7883
100
+ flwr/server/state/sqlite_state.py,sha256=myd9MsI3aeDDBJG3oyDDV4F5q9aJCOI4jrtol40lAr0,21273
101
+ flwr/server/state/state.py,sha256=d464gfbAdBpqpcKZ8MlKphJruE4MX-x-8G5ENg1UB4M,5301
102
102
  flwr/server/state/state_factory.py,sha256=91cSB-KOAFM37z7T098WxTkVeKNaAZ_mTI75snn2_tk,1654
103
103
  flwr/server/strategy/__init__.py,sha256=EDTv_lU67VmZ8pRqy5fabQDhq5x4oRiD-KHoXhOIWMs,2096
104
104
  flwr/server/strategy/aggregate.py,sha256=KhxPZbc0riuxci214jNDsiD65jT7XLW6oaU2Gux9u9s,12528
@@ -129,10 +129,10 @@ flwr/simulation/__init__.py,sha256=E2eD5FlTmZZ80u21FmWCkacrM7O4mrEHD8iXqeCaBUQ,1
129
129
  flwr/simulation/app.py,sha256=ep8QOHPGCZ3121eu9EJ0T9xnM_KgTyDjYKRakgQ7_F4,13098
130
130
  flwr/simulation/ray_transport/__init__.py,sha256=FsaAnzC4cw4DqoouBCix6496k29jACkfeIam55BvW9g,734
131
131
  flwr/simulation/ray_transport/ray_actor.py,sha256=kCaH8I-F7Ckk1LbwTSpYvwB8Rqar8Kl1V8jbhwmHcQg,17133
132
- flwr/simulation/ray_transport/ray_client_proxy.py,sha256=FIyqnhBU2I34ZAHRJrPIRYoNQwz1PaU7c0ERiHSepDE,9530
132
+ flwr/simulation/ray_transport/ray_client_proxy.py,sha256=u_tvBCY3EiL7FAbWJwveQHdqLBrp6T2d7i-HZd_ftdo,9490
133
133
  flwr/simulation/ray_transport/utils.py,sha256=v36Oy9U4adcVjrqGUJjoWNnZVd_KFoixLmRmDe_CCzs,3373
134
- flwr_nightly-1.7.0.dev20231228.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
135
- flwr_nightly-1.7.0.dev20231228.dist-info/METADATA,sha256=oa63sMaby4cj4ONWFDljqanrBII8rz02w_QgocAyzl4,13449
136
- flwr_nightly-1.7.0.dev20231228.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
137
- flwr_nightly-1.7.0.dev20231228.dist-info/entry_points.txt,sha256=1uLlD5tIunkzALMfMWnqjdE_D5hRUX_I1iMmOMv6tZI,181
138
- flwr_nightly-1.7.0.dev20231228.dist-info/RECORD,,
134
+ flwr_nightly-1.7.0.dev20240104.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
135
+ flwr_nightly-1.7.0.dev20240104.dist-info/METADATA,sha256=H_d6qitb63LZRKdoUoK5Ayn40-ac7D_bViwn0uO87bA,13449
136
+ flwr_nightly-1.7.0.dev20240104.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
137
+ flwr_nightly-1.7.0.dev20240104.dist-info/entry_points.txt,sha256=1uLlD5tIunkzALMfMWnqjdE_D5hRUX_I1iMmOMv6tZI,181
138
+ flwr_nightly-1.7.0.dev20240104.dist-info/RECORD,,