flwr-nightly 1.10.0.dev20240710__py3-none-any.whl → 1.10.0.dev20240711__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flwr-nightly might be problematic. Click here for more details.

flwr/cli/config_utils.py CHANGED
@@ -108,6 +108,14 @@ def load(path: Optional[Path] = None) -> Optional[Dict[str, Any]]:
108
108
  return load_from_string(toml_file.read())
109
109
 
110
110
 
111
+ def _validate_run_config(config_dict: Dict[str, Any], errors: List[str]) -> None:
112
+ for key, value in config_dict.items():
113
+ if isinstance(value, dict):
114
+ _validate_run_config(config_dict[key], errors)
115
+ elif not isinstance(value, str):
116
+ errors.append(f"Config value of key {key} is not of type `str`.")
117
+
118
+
111
119
  # pylint: disable=too-many-branches
112
120
  def validate_fields(config: Dict[str, Any]) -> Tuple[bool, List[str], List[str]]:
113
121
  """Validate pyproject.toml fields."""
@@ -133,6 +141,8 @@ def validate_fields(config: Dict[str, Any]) -> Tuple[bool, List[str], List[str]]
133
141
  else:
134
142
  if "publisher" not in config["flower"]:
135
143
  errors.append('Property "publisher" missing in [flower]')
144
+ if "config" in config["flower"]:
145
+ _validate_run_config(config["flower"]["config"], errors)
136
146
  if "components" not in config["flower"]:
137
147
  errors.append("Missing [flower.components] section")
138
148
  else:
flwr/cli/run/run.py CHANGED
@@ -18,13 +18,14 @@ import sys
18
18
  from enum import Enum
19
19
  from logging import DEBUG
20
20
  from pathlib import Path
21
- from typing import Optional
21
+ from typing import Dict, Optional
22
22
 
23
23
  import typer
24
24
  from typing_extensions import Annotated
25
25
 
26
26
  from flwr.cli import config_utils
27
27
  from flwr.cli.build import build
28
+ from flwr.common.config import parse_config_args
28
29
  from flwr.common.constant import SUPEREXEC_DEFAULT_ADDRESS
29
30
  from flwr.common.grpc import GRPC_MAX_MESSAGE_LENGTH, create_channel
30
31
  from flwr.common.logger import log
@@ -58,15 +59,20 @@ def run(
58
59
  Optional[Path],
59
60
  typer.Option(help="Path of the Flower project to run"),
60
61
  ] = None,
62
+ config_overrides: Annotated[
63
+ Optional[str],
64
+ typer.Option(
65
+ "--config",
66
+ "-c",
67
+ help="Override configuration key-value pairs",
68
+ ),
69
+ ] = None,
61
70
  ) -> None:
62
71
  """Run Flower project."""
63
- if use_superexec:
64
- _start_superexec_run(directory)
65
- return
66
-
67
72
  typer.secho("Loading project configuration... ", fg=typer.colors.BLUE)
68
73
 
69
- config, errors, warnings = config_utils.load_and_validate()
74
+ pyproject_path = directory / "pyproject.toml" if directory else None
75
+ config, errors, warnings = config_utils.load_and_validate(path=pyproject_path)
70
76
 
71
77
  if config is None:
72
78
  typer.secho(
@@ -88,6 +94,12 @@ def run(
88
94
 
89
95
  typer.secho("Success", fg=typer.colors.GREEN)
90
96
 
97
+ if use_superexec:
98
+ _start_superexec_run(
99
+ parse_config_args(config_overrides, separator=","), directory
100
+ )
101
+ return
102
+
91
103
  server_app_ref = config["flower"]["components"]["serverapp"]
92
104
  client_app_ref = config["flower"]["components"]["clientapp"]
93
105
 
@@ -115,7 +127,9 @@ def run(
115
127
  )
116
128
 
117
129
 
118
- def _start_superexec_run(directory: Optional[Path]) -> None:
130
+ def _start_superexec_run(
131
+ override_config: Dict[str, str], directory: Optional[Path]
132
+ ) -> None:
119
133
  def on_channel_state_change(channel_connectivity: str) -> None:
120
134
  """Log channel connectivity."""
121
135
  log(DEBUG, channel_connectivity)
@@ -132,6 +146,9 @@ def _start_superexec_run(directory: Optional[Path]) -> None:
132
146
 
133
147
  fab_path = build(directory)
134
148
 
135
- req = StartRunRequest(fab_file=Path(fab_path).read_bytes())
149
+ req = StartRunRequest(
150
+ fab_file=Path(fab_path).read_bytes(),
151
+ override_config=override_config,
152
+ )
136
153
  res = stub.StartRun(req)
137
154
  typer.secho(f"🎊 Successfully started run {res.run_id}", fg=typer.colors.GREEN)
flwr/client/app.py CHANGED
@@ -19,6 +19,7 @@ import sys
19
19
  import time
20
20
  from dataclasses import dataclass
21
21
  from logging import DEBUG, ERROR, INFO, WARN
22
+ from pathlib import Path
22
23
  from typing import Callable, ContextManager, Dict, Optional, Tuple, Type, Union
23
24
 
24
25
  from cryptography.hazmat.primitives.asymmetric import ec
@@ -193,6 +194,7 @@ def _start_client_internal(
193
194
  max_retries: Optional[int] = None,
194
195
  max_wait_time: Optional[float] = None,
195
196
  partition_id: Optional[int] = None,
197
+ flwr_dir: Optional[Path] = None,
196
198
  ) -> None:
197
199
  """Start a Flower client node which connects to a Flower server.
198
200
 
@@ -239,6 +241,8 @@ def _start_client_internal(
239
241
  partition_id: Optional[int] (default: None)
240
242
  The data partition index associated with this node. Better suited for
241
243
  prototyping purposes.
244
+ flwr_dir: Optional[Path] (default: None)
245
+ The fully resolved path containing installed Flower Apps.
242
246
  """
243
247
  if insecure is None:
244
248
  insecure = root_certificates is None
@@ -316,7 +320,7 @@ def _start_client_internal(
316
320
  )
317
321
 
318
322
  node_state = NodeState(partition_id=partition_id)
319
- run_info: Dict[int, Run] = {}
323
+ runs: Dict[int, Run] = {}
320
324
 
321
325
  while not app_state_tracker.interrupt:
322
326
  sleep_duration: int = 0
@@ -366,15 +370,17 @@ def _start_client_internal(
366
370
 
367
371
  # Get run info
368
372
  run_id = message.metadata.run_id
369
- if run_id not in run_info:
373
+ if run_id not in runs:
370
374
  if get_run is not None:
371
- run_info[run_id] = get_run(run_id)
375
+ runs[run_id] = get_run(run_id)
372
376
  # If get_run is None, i.e., in grpc-bidi mode
373
377
  else:
374
- run_info[run_id] = Run(run_id, "", "", {})
378
+ runs[run_id] = Run(run_id, "", "", {})
375
379
 
376
380
  # Register context for this run
377
- node_state.register_context(run_id=run_id)
381
+ node_state.register_context(
382
+ run_id=run_id, run=runs[run_id], flwr_dir=flwr_dir
383
+ )
378
384
 
379
385
  # Retrieve context for this run
380
386
  context = node_state.retrieve_context(run_id=run_id)
@@ -388,7 +394,7 @@ def _start_client_internal(
388
394
  # Handle app loading and task message
389
395
  try:
390
396
  # Load ClientApp instance
391
- run: Run = run_info[run_id]
397
+ run: Run = runs[run_id]
392
398
  client_app: ClientApp = load_client_app_fn(
393
399
  run.fab_id, run.fab_version
394
400
  )
flwr/client/node_state.py CHANGED
@@ -15,9 +15,21 @@
15
15
  """Node state."""
16
16
 
17
17
 
18
+ from dataclasses import dataclass
19
+ from pathlib import Path
18
20
  from typing import Any, Dict, Optional
19
21
 
20
22
  from flwr.common import Context, RecordSet
23
+ from flwr.common.config import get_fused_config
24
+ from flwr.common.typing import Run
25
+
26
+
27
+ @dataclass()
28
+ class RunInfo:
29
+ """Contains the Context and initial run_config of a Run."""
30
+
31
+ context: Context
32
+ initial_run_config: Dict[str, str]
21
33
 
22
34
 
23
35
  class NodeState:
@@ -25,20 +37,31 @@ class NodeState:
25
37
 
26
38
  def __init__(self, partition_id: Optional[int]) -> None:
27
39
  self._meta: Dict[str, Any] = {} # holds metadata about the node
28
- self.run_contexts: Dict[int, Context] = {}
40
+ self.run_infos: Dict[int, RunInfo] = {}
29
41
  self._partition_id = partition_id
30
42
 
31
- def register_context(self, run_id: int) -> None:
43
+ def register_context(
44
+ self,
45
+ run_id: int,
46
+ run: Optional[Run] = None,
47
+ flwr_dir: Optional[Path] = None,
48
+ ) -> None:
32
49
  """Register new run context for this node."""
33
- if run_id not in self.run_contexts:
34
- self.run_contexts[run_id] = Context(
35
- state=RecordSet(), run_config={}, partition_id=self._partition_id
50
+ if run_id not in self.run_infos:
51
+ initial_run_config = get_fused_config(run, flwr_dir) if run else {}
52
+ self.run_infos[run_id] = RunInfo(
53
+ initial_run_config=initial_run_config,
54
+ context=Context(
55
+ state=RecordSet(),
56
+ run_config=initial_run_config.copy(),
57
+ partition_id=self._partition_id,
58
+ ),
36
59
  )
37
60
 
38
61
  def retrieve_context(self, run_id: int) -> Context:
39
62
  """Get run context given a run_id."""
40
- if run_id in self.run_contexts:
41
- return self.run_contexts[run_id]
63
+ if run_id in self.run_infos:
64
+ return self.run_infos[run_id].context
42
65
 
43
66
  raise RuntimeError(
44
67
  f"Context for run_id={run_id} doesn't exist."
@@ -48,4 +71,9 @@ class NodeState:
48
71
 
49
72
  def update_context(self, run_id: int, context: Context) -> None:
50
73
  """Update run context."""
51
- self.run_contexts[run_id] = context
74
+ if context.run_config != self.run_infos[run_id].initial_run_config:
75
+ raise ValueError(
76
+ "The `run_config` field of the `Context` object cannot be "
77
+ f"modified (run_id: {run_id})."
78
+ )
79
+ self.run_infos[run_id].context = context
@@ -59,7 +59,8 @@ def test_multirun_in_node_state() -> None:
59
59
  node_state.update_context(run_id=run_id, context=updated_state)
60
60
 
61
61
  # Verify values
62
- for run_id, context in node_state.run_contexts.items():
62
+ for run_id, run_info in node_state.run_infos.items():
63
63
  assert (
64
- context.state.configs_records["counter"]["count"] == expected_values[run_id]
64
+ run_info.context.state.configs_records["counter"]["count"]
65
+ == expected_values[run_id]
65
66
  )
@@ -68,6 +68,7 @@ def run_supernode() -> None:
68
68
  max_retries=args.max_retries,
69
69
  max_wait_time=args.max_wait_time,
70
70
  partition_id=args.partition_id,
71
+ flwr_dir=get_flwr_dir(args.flwr_dir),
71
72
  )
72
73
 
73
74
  # Graceful shutdown
flwr/common/logger.py CHANGED
@@ -197,6 +197,31 @@ def warn_deprecated_feature(name: str) -> None:
197
197
  )
198
198
 
199
199
 
200
+ def warn_deprecated_feature_with_example(
201
+ deprecation_message: str, example_message: str, code_example: str
202
+ ) -> None:
203
+ """Warn if a feature is deprecated and show code example."""
204
+ log(
205
+ WARN,
206
+ """DEPRECATED FEATURE: %s
207
+
208
+ Check the following `FEATURE UPDATE` warning message for the preferred
209
+ new mechanism to use this feature in Flower.
210
+ """,
211
+ deprecation_message,
212
+ )
213
+ log(
214
+ WARN,
215
+ """FEATURE UPDATE: %s
216
+ ------------------------------------------------------------
217
+ %s
218
+ ------------------------------------------------------------
219
+ """,
220
+ example_message,
221
+ code_example,
222
+ )
223
+
224
+
200
225
  def warn_unsupported_feature(name: str) -> None:
201
226
  """Warn the user when they use an unsupported feature."""
202
227
  log(
flwr/server/__init__.py CHANGED
@@ -28,6 +28,7 @@ from .run_serverapp import run_server_app as run_server_app
28
28
  from .server import Server as Server
29
29
  from .server_app import ServerApp as ServerApp
30
30
  from .server_config import ServerConfig as ServerConfig
31
+ from .serverapp_components import ServerAppComponents as ServerAppComponents
31
32
 
32
33
  __all__ = [
33
34
  "ClientManager",
@@ -36,6 +37,7 @@ __all__ = [
36
37
  "LegacyContext",
37
38
  "Server",
38
39
  "ServerApp",
40
+ "ServerAppComponents",
39
41
  "ServerConfig",
40
42
  "SimpleClientManager",
41
43
  "run_server_app",
flwr/server/server_app.py CHANGED
@@ -17,8 +17,11 @@
17
17
 
18
18
  from typing import Callable, Optional
19
19
 
20
- from flwr.common import Context, RecordSet
21
- from flwr.common.logger import warn_preview_feature
20
+ from flwr.common import Context
21
+ from flwr.common.logger import (
22
+ warn_deprecated_feature_with_example,
23
+ warn_preview_feature,
24
+ )
22
25
  from flwr.server.strategy import Strategy
23
26
 
24
27
  from .client_manager import ClientManager
@@ -26,7 +29,20 @@ from .compat import start_driver
26
29
  from .driver import Driver
27
30
  from .server import Server
28
31
  from .server_config import ServerConfig
29
- from .typing import ServerAppCallable
32
+ from .typing import ServerAppCallable, ServerFn
33
+
34
+ SERVER_FN_USAGE_EXAMPLE = """
35
+
36
+ def server_fn(context: Context):
37
+ server_config = ServerConfig(num_rounds=3)
38
+ strategy = FedAvg()
39
+ return ServerAppComponents(
40
+ strategy=strategy,
41
+ server_config=server_config,
42
+ )
43
+
44
+ app = ServerApp(server_fn=server_fn)
45
+ """
30
46
 
31
47
 
32
48
  class ServerApp:
@@ -36,13 +52,15 @@ class ServerApp:
36
52
  --------
37
53
  Use the `ServerApp` with an existing `Strategy`:
38
54
 
39
- >>> server_config = ServerConfig(num_rounds=3)
40
- >>> strategy = FedAvg()
55
+ >>> def server_fn(context: Context):
56
+ >>> server_config = ServerConfig(num_rounds=3)
57
+ >>> strategy = FedAvg()
58
+ >>> return ServerAppComponents(
59
+ >>> strategy=strategy,
60
+ >>> server_config=server_config,
61
+ >>> )
41
62
  >>>
42
- >>> app = ServerApp(
43
- >>> server_config=server_config,
44
- >>> strategy=strategy,
45
- >>> )
63
+ >>> app = ServerApp(server_fn=server_fn)
46
64
 
47
65
  Use the `ServerApp` with a custom main function:
48
66
 
@@ -53,23 +71,52 @@ class ServerApp:
53
71
  >>> print("ServerApp running")
54
72
  """
55
73
 
74
+ # pylint: disable=too-many-arguments
56
75
  def __init__(
57
76
  self,
58
77
  server: Optional[Server] = None,
59
78
  config: Optional[ServerConfig] = None,
60
79
  strategy: Optional[Strategy] = None,
61
80
  client_manager: Optional[ClientManager] = None,
81
+ server_fn: Optional[ServerFn] = None,
62
82
  ) -> None:
83
+ if any([server, config, strategy, client_manager]):
84
+ warn_deprecated_feature_with_example(
85
+ deprecation_message="Passing either `server`, `config`, `strategy` or "
86
+ "`client_manager` directly to the ServerApp "
87
+ "constructor is deprecated.",
88
+ example_message="Pass `ServerApp` arguments wrapped "
89
+ "in a `flwr.server.ServerAppComponents` object that gets "
90
+ "returned by a function passed as the `server_fn` argument "
91
+ "to the `ServerApp` constructor. For example: ",
92
+ code_example=SERVER_FN_USAGE_EXAMPLE,
93
+ )
94
+
95
+ if server_fn:
96
+ raise ValueError(
97
+ "Passing `server_fn` is incompatible with passing the "
98
+ "other arguments (now deprecated) to ServerApp. "
99
+ "Use `server_fn` exclusively."
100
+ )
101
+
63
102
  self._server = server
64
103
  self._config = config
65
104
  self._strategy = strategy
66
105
  self._client_manager = client_manager
106
+ self._server_fn = server_fn
67
107
  self._main: Optional[ServerAppCallable] = None
68
108
 
69
109
  def __call__(self, driver: Driver, context: Context) -> None:
70
110
  """Execute `ServerApp`."""
71
111
  # Compatibility mode
72
112
  if not self._main:
113
+ if self._server_fn:
114
+ # Execute server_fn()
115
+ components = self._server_fn(context)
116
+ self._server = components.server
117
+ self._config = components.config
118
+ self._strategy = components.strategy
119
+ self._client_manager = components.client_manager
73
120
  start_driver(
74
121
  server=self._server,
75
122
  config=self._config,
@@ -80,7 +127,6 @@ class ServerApp:
80
127
  return
81
128
 
82
129
  # New execution mode
83
- context = Context(state=RecordSet(), run_config={})
84
130
  self._main(driver, context)
85
131
 
86
132
  def main(self) -> Callable[[ServerAppCallable], ServerAppCallable]:
@@ -0,0 +1,52 @@
1
+ # Copyright 2024 Flower Labs GmbH. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """ServerAppComponents for the ServerApp."""
16
+
17
+
18
+ from dataclasses import dataclass
19
+ from typing import Optional
20
+
21
+ from .client_manager import ClientManager
22
+ from .server import Server
23
+ from .server_config import ServerConfig
24
+ from .strategy import Strategy
25
+
26
+
27
+ @dataclass
28
+ class ServerAppComponents: # pylint: disable=too-many-instance-attributes
29
+ """Components to construct a ServerApp.
30
+
31
+ Parameters
32
+ ----------
33
+ server : Optional[Server] (default: None)
34
+ A server implementation, either `flwr.server.Server` or a subclass
35
+ thereof. If no instance is provided, one will be created internally.
36
+ config : Optional[ServerConfig] (default: None)
37
+ Currently supported values are `num_rounds` (int, default: 1) and
38
+ `round_timeout` in seconds (float, default: None).
39
+ strategy : Optional[Strategy] (default: None)
40
+ An implementation of the abstract base class
41
+ `flwr.server.strategy.Strategy`. If no strategy is provided, then
42
+ `flwr.server.strategy.FedAvg` will be used.
43
+ client_manager : Optional[ClientManager] (default: None)
44
+ An implementation of the class `flwr.server.ClientManager`. If no
45
+ implementation is provided, then `flwr.server.SimpleClientManager`
46
+ will be used.
47
+ """
48
+
49
+ server: Optional[Server] = None
50
+ config: Optional[ServerConfig] = None
51
+ strategy: Optional[Strategy] = None
52
+ client_manager: Optional[ClientManager] = None
@@ -33,8 +33,8 @@ class Backend(ABC):
33
33
  """Construct a backend."""
34
34
 
35
35
  @abstractmethod
36
- async def build(self) -> None:
37
- """Build backend asynchronously.
36
+ def build(self) -> None:
37
+ """Build backend.
38
38
 
39
39
  Different components need to be in place before workers in a backend are ready
40
40
  to accept jobs. When this method finishes executing, the backend should be fully
@@ -54,11 +54,11 @@ class Backend(ABC):
54
54
  """Report whether a backend worker is idle and can therefore run a ClientApp."""
55
55
 
56
56
  @abstractmethod
57
- async def terminate(self) -> None:
57
+ def terminate(self) -> None:
58
58
  """Terminate backend."""
59
59
 
60
60
  @abstractmethod
61
- async def process_message(
61
+ def process_message(
62
62
  self,
63
63
  app: Callable[[], ClientApp],
64
64
  message: Message,
@@ -153,12 +153,12 @@ class RayBackend(Backend):
153
153
  """Report whether the pool has idle actors."""
154
154
  return self.pool.is_actor_available()
155
155
 
156
- async def build(self) -> None:
156
+ def build(self) -> None:
157
157
  """Build pool of Ray actors that this backend will submit jobs to."""
158
- await self.pool.add_actors_to_pool(self.pool.actors_capacity)
158
+ self.pool.add_actors_to_pool(self.pool.actors_capacity)
159
159
  log(DEBUG, "Constructed ActorPool with: %i actors", self.pool.num_actors)
160
160
 
161
- async def process_message(
161
+ def process_message(
162
162
  self,
163
163
  app: Callable[[], ClientApp],
164
164
  message: Message,
@@ -172,17 +172,16 @@ class RayBackend(Backend):
172
172
 
173
173
  try:
174
174
  # Submit a task to the pool
175
- future = await self.pool.submit(
175
+ future = self.pool.submit(
176
176
  lambda a, a_fn, mssg, cid, state: a.run.remote(a_fn, mssg, cid, state),
177
177
  (app, message, str(partition_id), context),
178
178
  )
179
179
 
180
- await future
181
180
  # Fetch result
182
181
  (
183
182
  out_mssg,
184
183
  updated_context,
185
- ) = await self.pool.fetch_result_and_return_actor_to_pool(future)
184
+ ) = self.pool.fetch_result_and_return_actor_to_pool(future)
186
185
 
187
186
  return out_mssg, updated_context
188
187
 
@@ -193,11 +192,11 @@ class RayBackend(Backend):
193
192
  self.__class__.__name__,
194
193
  )
195
194
  # add actor back into pool
196
- await self.pool.add_actor_back_to_pool(future)
195
+ self.pool.add_actor_back_to_pool(future)
197
196
  raise ex
198
197
 
199
- async def terminate(self) -> None:
198
+ def terminate(self) -> None:
200
199
  """Terminate all actors in actor pool."""
201
- await self.pool.terminate_all_actors()
200
+ self.pool.terminate_all_actors()
202
201
  ray.shutdown()
203
202
  log(DEBUG, "Terminated %s", self.__class__.__name__)
@@ -14,14 +14,18 @@
14
14
  # ==============================================================================
15
15
  """Fleet Simulation Engine API."""
16
16
 
17
- import asyncio
17
+
18
18
  import json
19
19
  import sys
20
+ import threading
20
21
  import time
21
22
  import traceback
23
+ from concurrent.futures import ThreadPoolExecutor
22
24
  from logging import DEBUG, ERROR, INFO, WARN
23
25
  from pathlib import Path
24
- from typing import Callable, Dict, List, Optional
26
+ from queue import Empty, Queue
27
+ from time import sleep
28
+ from typing import Callable, Dict, Optional
25
29
 
26
30
  from flwr.client.client_app import ClientApp, ClientAppException, LoadClientAppError
27
31
  from flwr.client.node_state import NodeState
@@ -31,7 +35,7 @@ from flwr.common.message import Error
31
35
  from flwr.common.object_ref import load_app
32
36
  from flwr.common.serde import message_from_taskins, message_to_taskres
33
37
  from flwr.proto.task_pb2 import TaskIns, TaskRes # pylint: disable=E0611
34
- from flwr.server.superlink.state import StateFactory
38
+ from flwr.server.superlink.state import State, StateFactory
35
39
 
36
40
  from .backend import Backend, error_messages_backends, supported_backends
37
41
 
@@ -52,18 +56,21 @@ def _register_nodes(
52
56
 
53
57
 
54
58
  # pylint: disable=too-many-arguments,too-many-locals
55
- async def worker(
59
+ def worker(
56
60
  app_fn: Callable[[], ClientApp],
57
- taskins_queue: "asyncio.Queue[TaskIns]",
58
- taskres_queue: "asyncio.Queue[TaskRes]",
61
+ taskins_queue: "Queue[TaskIns]",
62
+ taskres_queue: "Queue[TaskRes]",
59
63
  node_states: Dict[int, NodeState],
60
64
  backend: Backend,
65
+ f_stop: threading.Event,
61
66
  ) -> None:
62
67
  """Get TaskIns from queue and pass it to an actor in the pool to execute it."""
63
- while True:
68
+ while not f_stop.is_set():
64
69
  out_mssg = None
65
70
  try:
66
- task_ins: TaskIns = await taskins_queue.get()
71
+ # Fetch from queue with timeout. We use a timeout so
72
+ # the stopping event can be evaluated even when the queue is empty.
73
+ task_ins: TaskIns = taskins_queue.get(timeout=1.0)
67
74
  node_id = task_ins.task.consumer.node_id
68
75
 
69
76
  # Register and retrieve runstate
@@ -74,7 +81,7 @@ async def worker(
74
81
  message = message_from_taskins(task_ins)
75
82
 
76
83
  # Let backend process message
77
- out_mssg, updated_context = await backend.process_message(
84
+ out_mssg, updated_context = backend.process_message(
78
85
  app_fn, message, context
79
86
  )
80
87
 
@@ -82,11 +89,9 @@ async def worker(
82
89
  node_states[node_id].update_context(
83
90
  task_ins.run_id, context=updated_context
84
91
  )
85
-
86
- except asyncio.CancelledError as e:
87
- log(DEBUG, "Terminating async worker: %s", e)
88
- break
89
-
92
+ except Empty:
93
+ # An exception raised if queue.get times out
94
+ pass
90
95
  # Exceptions aren't raised but reported as an error message
91
96
  except Exception as ex: # pylint: disable=broad-exception-caught
92
97
  log(ERROR, ex)
@@ -110,83 +115,48 @@ async def worker(
110
115
  task_res = message_to_taskres(out_mssg)
111
116
  # Store TaskRes in state
112
117
  task_res.task.pushed_at = time.time()
113
- await taskres_queue.put(task_res)
118
+ taskres_queue.put(task_res)
114
119
 
115
120
 
116
- async def add_taskins_to_queue(
117
- queue: "asyncio.Queue[TaskIns]",
118
- state_factory: StateFactory,
121
+ def add_taskins_to_queue(
122
+ state: State,
123
+ queue: "Queue[TaskIns]",
119
124
  nodes_mapping: NodeToPartitionMapping,
120
- backend: Backend,
121
- consumers: List["asyncio.Task[None]"],
122
- f_stop: asyncio.Event,
125
+ f_stop: threading.Event,
123
126
  ) -> None:
124
- """Retrieve TaskIns and add it to the queue."""
125
- state = state_factory.state()
126
- num_initial_consumers = len(consumers)
127
+ """Put TaskIns in a queue from State."""
127
128
  while not f_stop.is_set():
128
129
  for node_id in nodes_mapping.keys():
129
- task_ins = state.get_task_ins(node_id=node_id, limit=1)
130
- if task_ins:
131
- await queue.put(task_ins[0])
132
-
133
- # Count consumers that are running
134
- num_active = sum(not (cc.done()) for cc in consumers)
135
-
136
- # Alert if number of consumers decreased by half
137
- if num_active < num_initial_consumers // 2:
138
- log(
139
- WARN,
140
- "Number of active workers has more than halved: (%i/%i active)",
141
- num_active,
142
- num_initial_consumers,
143
- )
130
+ task_ins_list = state.get_task_ins(node_id=node_id, limit=1)
131
+ for task_ins in task_ins_list:
132
+ queue.put(task_ins)
133
+ sleep(0.1)
144
134
 
145
- # Break if consumers died
146
- if num_active == 0:
147
- raise RuntimeError("All workers have died. Ending Simulation.")
148
135
 
149
- # Log some stats
150
- log(
151
- DEBUG,
152
- "Simulation Engine stats: "
153
- "Active workers: (%i/%i) | %s (%i workers) | Tasks in queue: %i)",
154
- num_active,
155
- num_initial_consumers,
156
- backend.__class__.__name__,
157
- backend.num_workers,
158
- queue.qsize(),
159
- )
160
- await asyncio.sleep(1.0)
161
- log(DEBUG, "Async producer: Stopped pulling from StateFactory.")
162
-
163
-
164
- async def put_taskres_into_state(
165
- queue: "asyncio.Queue[TaskRes]",
166
- state_factory: StateFactory,
167
- f_stop: asyncio.Event,
136
+ def put_taskres_into_state(
137
+ state: State, queue: "Queue[TaskRes]", f_stop: threading.Event
168
138
  ) -> None:
169
- """Remove TaskRes from queue and add into State."""
170
- state = state_factory.state()
139
+ """Put TaskRes into State from a queue."""
171
140
  while not f_stop.is_set():
172
- if queue.qsize():
173
- task_res = await queue.get()
174
- state.store_task_res(task_res)
175
- else:
176
- await asyncio.sleep(0.1)
141
+ try:
142
+ taskres = queue.get(timeout=1.0)
143
+ state.store_task_res(taskres)
144
+ except Empty:
145
+ # queue is empty when timeout was triggered
146
+ pass
177
147
 
178
148
 
179
- async def run(
149
+ def run(
180
150
  app_fn: Callable[[], ClientApp],
181
151
  backend_fn: Callable[[], Backend],
182
152
  nodes_mapping: NodeToPartitionMapping,
183
153
  state_factory: StateFactory,
184
154
  node_states: Dict[int, NodeState],
185
- f_stop: asyncio.Event,
155
+ f_stop: threading.Event,
186
156
  ) -> None:
187
- """Run the VCE async."""
188
- taskins_queue: "asyncio.Queue[TaskIns]" = asyncio.Queue(128)
189
- taskres_queue: "asyncio.Queue[TaskRes]" = asyncio.Queue(128)
157
+ """Run the VCE."""
158
+ taskins_queue: "Queue[TaskIns]" = Queue()
159
+ taskres_queue: "Queue[TaskRes]" = Queue()
190
160
 
191
161
  try:
192
162
 
@@ -194,42 +164,48 @@ async def run(
194
164
  backend = backend_fn()
195
165
 
196
166
  # Build backend
197
- await backend.build()
167
+ backend.build()
198
168
 
199
169
  # Add workers (they submit Messages to Backend)
200
- worker_tasks = [
201
- asyncio.create_task(
202
- worker(
203
- app_fn,
204
- taskins_queue,
205
- taskres_queue,
206
- node_states,
207
- backend,
208
- )
209
- )
210
- for _ in range(backend.num_workers)
211
- ]
212
- # Create producer (adds TaskIns into Queue)
213
- taskins_producer = asyncio.create_task(
214
- add_taskins_to_queue(
170
+ state = state_factory.state()
171
+
172
+ extractor_th = threading.Thread(
173
+ target=add_taskins_to_queue,
174
+ args=(
175
+ state,
215
176
  taskins_queue,
216
- state_factory,
217
177
  nodes_mapping,
218
- backend,
219
- worker_tasks,
220
178
  f_stop,
221
- )
179
+ ),
222
180
  )
181
+ extractor_th.start()
223
182
 
224
- taskres_consumer = asyncio.create_task(
225
- put_taskres_into_state(taskres_queue, state_factory, f_stop)
183
+ injector_th = threading.Thread(
184
+ target=put_taskres_into_state,
185
+ args=(
186
+ state,
187
+ taskres_queue,
188
+ f_stop,
189
+ ),
226
190
  )
191
+ injector_th.start()
192
+
193
+ with ThreadPoolExecutor() as executor:
194
+ _ = [
195
+ executor.submit(
196
+ worker,
197
+ app_fn,
198
+ taskins_queue,
199
+ taskres_queue,
200
+ node_states,
201
+ backend,
202
+ f_stop,
203
+ )
204
+ for _ in range(backend.num_workers)
205
+ ]
227
206
 
228
- # Wait for asyncio taks pulling/pushing TaskIns/TaskRes.
229
- # These run forever until f_stop is set or until
230
- # all worker (consumer) coroutines are completed. Workers
231
- # also run forever and only end if an exception is raised.
232
- await asyncio.gather(*(taskins_producer, taskres_consumer))
207
+ extractor_th.join()
208
+ injector_th.join()
233
209
 
234
210
  except Exception as ex:
235
211
 
@@ -244,18 +220,9 @@ async def run(
244
220
  raise RuntimeError("Simulation Engine crashed.") from ex
245
221
 
246
222
  finally:
247
- # Produced task terminated, now cancel worker tasks
248
- for w_t in worker_tasks:
249
- _ = w_t.cancel()
250
-
251
- while not all(w_t.done() for w_t in worker_tasks):
252
- log(DEBUG, "Terminating async workers...")
253
- await asyncio.sleep(0.5)
254
-
255
- await asyncio.gather(*[w_t for w_t in worker_tasks if not w_t.done()])
256
223
 
257
224
  # Terminate backend
258
- await backend.terminate()
225
+ backend.terminate()
259
226
 
260
227
 
261
228
  # pylint: disable=too-many-arguments,unused-argument,too-many-locals,too-many-branches
@@ -264,7 +231,7 @@ def start_vce(
264
231
  backend_name: str,
265
232
  backend_config_json_stream: str,
266
233
  app_dir: str,
267
- f_stop: asyncio.Event,
234
+ f_stop: threading.Event,
268
235
  client_app: Optional[ClientApp] = None,
269
236
  client_app_attr: Optional[str] = None,
270
237
  num_supernodes: Optional[int] = None,
@@ -368,15 +335,13 @@ def start_vce(
368
335
  _ = app_fn()
369
336
 
370
337
  # Run main simulation loop
371
- asyncio.run(
372
- run(
373
- app_fn,
374
- backend_fn,
375
- nodes_mapping,
376
- state_factory,
377
- node_states,
378
- f_stop,
379
- )
338
+ run(
339
+ app_fn,
340
+ backend_fn,
341
+ nodes_mapping,
342
+ state_factory,
343
+ node_states,
344
+ f_stop,
380
345
  )
381
346
  except LoadClientAppError as loadapp_ex:
382
347
  f_stop_delay = 10
flwr/server/typing.py CHANGED
@@ -20,6 +20,8 @@ from typing import Callable
20
20
  from flwr.common import Context
21
21
 
22
22
  from .driver import Driver
23
+ from .serverapp_components import ServerAppComponents
23
24
 
24
25
  ServerAppCallable = Callable[[Driver, Context], None]
25
26
  Workflow = Callable[[Driver, Context], None]
27
+ ServerFn = Callable[[Context], ServerAppComponents]
@@ -14,7 +14,6 @@
14
14
  # ==============================================================================
15
15
  """Ray-based Flower Actor and ActorPool implementation."""
16
16
 
17
- import asyncio
18
17
  import threading
19
18
  from abc import ABC
20
19
  from logging import DEBUG, ERROR, WARNING
@@ -411,9 +410,7 @@ class BasicActorPool:
411
410
  self.client_resources = client_resources
412
411
 
413
412
  # Queue of idle actors
414
- self.pool: "asyncio.Queue[Type[VirtualClientEngineActor]]" = asyncio.Queue(
415
- maxsize=1024
416
- )
413
+ self.pool: List[VirtualClientEngineActor] = []
417
414
  self.num_actors = 0
418
415
 
419
416
  # Resolve arguments to pass during actor init
@@ -427,38 +424,37 @@ class BasicActorPool:
427
424
  # Figure out how many actors can be created given the cluster resources
428
425
  # and the resources the user indicates each VirtualClient will need
429
426
  self.actors_capacity = pool_size_from_resources(client_resources)
430
- self._future_to_actor: Dict[Any, Type[VirtualClientEngineActor]] = {}
427
+ self._future_to_actor: Dict[Any, VirtualClientEngineActor] = {}
431
428
 
432
429
  def is_actor_available(self) -> bool:
433
430
  """Return true if there is an idle actor."""
434
- return self.pool.qsize() > 0
431
+ return len(self.pool) > 0
435
432
 
436
- async def add_actors_to_pool(self, num_actors: int) -> None:
433
+ def add_actors_to_pool(self, num_actors: int) -> None:
437
434
  """Add actors to the pool.
438
435
 
439
436
  This method may be executed also if new resources are added to your Ray cluster
440
437
  (e.g. you add a new node).
441
438
  """
442
439
  for _ in range(num_actors):
443
- await self.pool.put(self.create_actor_fn()) # type: ignore
440
+ self.pool.append(self.create_actor_fn()) # type: ignore
444
441
  self.num_actors += num_actors
445
442
 
446
- async def terminate_all_actors(self) -> None:
443
+ def terminate_all_actors(self) -> None:
447
444
  """Terminate actors in pool."""
448
445
  num_terminated = 0
449
- while self.pool.qsize():
450
- actor = await self.pool.get()
446
+ for actor in self.pool:
451
447
  actor.terminate.remote() # type: ignore
452
448
  num_terminated += 1
453
449
 
454
450
  log(DEBUG, "Terminated %i actors", num_terminated)
455
451
 
456
- async def submit(
452
+ def submit(
457
453
  self, actor_fn: Any, job: Tuple[ClientAppFn, Message, str, Context]
458
454
  ) -> Any:
459
455
  """On idle actor, submit job and return future."""
460
456
  # Remove idle actor from pool
461
- actor = await self.pool.get()
457
+ actor = self.pool.pop()
462
458
  # Submit job to actor
463
459
  app_fn, mssg, cid, context = job
464
460
  future = actor_fn(actor, app_fn, mssg, cid, context)
@@ -467,18 +463,18 @@ class BasicActorPool:
467
463
  self._future_to_actor[future] = actor
468
464
  return future
469
465
 
470
- async def add_actor_back_to_pool(self, future: Any) -> None:
466
+ def add_actor_back_to_pool(self, future: Any) -> None:
471
467
  """Ad actor assigned to run future back into the pool."""
472
468
  actor = self._future_to_actor.pop(future)
473
- await self.pool.put(actor)
469
+ self.pool.append(actor)
474
470
 
475
- async def fetch_result_and_return_actor_to_pool(
471
+ def fetch_result_and_return_actor_to_pool(
476
472
  self, future: Any
477
473
  ) -> Tuple[Message, Context]:
478
474
  """Pull result given a future and add actor back to pool."""
479
- # Get actor that ran job
480
- await self.add_actor_back_to_pool(future)
481
475
  # Retrieve result for object store
482
476
  # Instead of doing ray.get(future) we await it
483
- _, out_mssg, updated_context = await future
477
+ _, out_mssg, updated_context = ray.get(future)
478
+ # Get actor that ran job
479
+ self.add_actor_back_to_pool(future)
484
480
  return out_mssg, updated_context
@@ -22,7 +22,7 @@ import threading
22
22
  import traceback
23
23
  from logging import DEBUG, ERROR, INFO, WARNING
24
24
  from time import sleep
25
- from typing import Optional
25
+ from typing import Dict, Optional
26
26
 
27
27
  from flwr.client import ClientApp
28
28
  from flwr.common import EventType, event, log
@@ -126,16 +126,25 @@ def run_simulation(
126
126
  def run_serverapp_th(
127
127
  server_app_attr: Optional[str],
128
128
  server_app: Optional[ServerApp],
129
+ server_app_run_config: Dict[str, str],
129
130
  driver: Driver,
130
131
  app_dir: str,
131
- f_stop: asyncio.Event,
132
+ f_stop: threading.Event,
133
+ has_exception: threading.Event,
132
134
  enable_tf_gpu_growth: bool,
133
135
  delay_launch: int = 3,
134
136
  ) -> threading.Thread:
135
137
  """Run SeverApp in a thread."""
136
138
 
137
- def server_th_with_start_checks( # type: ignore
138
- tf_gpu_growth: bool, stop_event: asyncio.Event, **kwargs
139
+ def server_th_with_start_checks(
140
+ tf_gpu_growth: bool,
141
+ stop_event: threading.Event,
142
+ exception_event: threading.Event,
143
+ _driver: Driver,
144
+ _server_app_dir: str,
145
+ _server_app_run_config: Dict[str, str],
146
+ _server_app_attr: Optional[str],
147
+ _server_app: Optional[ServerApp],
139
148
  ) -> None:
140
149
  """Run SeverApp, after check if GPU memory growth has to be set.
141
150
 
@@ -147,10 +156,18 @@ def run_serverapp_th(
147
156
  enable_gpu_growth()
148
157
 
149
158
  # Run ServerApp
150
- run(**kwargs)
159
+ run(
160
+ driver=_driver,
161
+ server_app_dir=_server_app_dir,
162
+ server_app_run_config=_server_app_run_config,
163
+ server_app_attr=_server_app_attr,
164
+ loaded_server_app=_server_app,
165
+ )
151
166
  except Exception as ex: # pylint: disable=broad-exception-caught
152
167
  log(ERROR, "ServerApp thread raised an exception: %s", ex)
153
168
  log(ERROR, traceback.format_exc())
169
+ exception_event.set()
170
+ raise
154
171
  finally:
155
172
  log(DEBUG, "ServerApp finished running.")
156
173
  # Upon completion, trigger stop event if one was passed
@@ -160,13 +177,16 @@ def run_serverapp_th(
160
177
 
161
178
  serverapp_th = threading.Thread(
162
179
  target=server_th_with_start_checks,
163
- args=(enable_tf_gpu_growth, f_stop),
164
- kwargs={
165
- "server_app_attr": server_app_attr,
166
- "loaded_server_app": server_app,
167
- "driver": driver,
168
- "server_app_dir": app_dir,
169
- },
180
+ args=(
181
+ enable_tf_gpu_growth,
182
+ f_stop,
183
+ has_exception,
184
+ driver,
185
+ app_dir,
186
+ server_app_run_config,
187
+ server_app_attr,
188
+ server_app,
189
+ ),
170
190
  )
171
191
  sleep(delay_launch)
172
192
  serverapp_th.start()
@@ -196,20 +216,18 @@ def _main_loop(
196
216
  server_app: Optional[ServerApp] = None,
197
217
  server_app_attr: Optional[str] = None,
198
218
  ) -> None:
199
- """Launch SuperLink with Simulation Engine, then ServerApp on a separate thread.
200
-
201
- Everything runs on the main thread or a separate one, depending on whether the main
202
- thread already contains a running Asyncio event loop. This is the case if running
203
- the Simulation Engine on a Jupyter/Colab notebook.
204
- """
219
+ """Launch SuperLink with Simulation Engine, then ServerApp on a separate thread."""
205
220
  # Initialize StateFactory
206
221
  state_factory = StateFactory(":flwr-in-memory-state:")
207
222
 
208
- f_stop = asyncio.Event()
223
+ f_stop = threading.Event()
224
+ # A Threading event to indicate if an exception was raised in the ServerApp thread
225
+ server_app_thread_has_exception = threading.Event()
209
226
  serverapp_th = None
210
227
  try:
211
228
  # Create run (with empty fab_id and fab_version)
212
229
  run_id_ = state_factory.state().create_run("", "", {})
230
+ server_app_run_config: Dict[str, str] = {}
213
231
 
214
232
  if run_id:
215
233
  _override_run_id(state_factory, run_id_to_replace=run_id_, run_id=run_id)
@@ -222,9 +240,11 @@ def _main_loop(
222
240
  serverapp_th = run_serverapp_th(
223
241
  server_app_attr=server_app_attr,
224
242
  server_app=server_app,
243
+ server_app_run_config=server_app_run_config,
225
244
  driver=driver,
226
245
  app_dir=app_dir,
227
246
  f_stop=f_stop,
247
+ has_exception=server_app_thread_has_exception,
228
248
  enable_tf_gpu_growth=enable_tf_gpu_growth,
229
249
  )
230
250
 
@@ -253,6 +273,8 @@ def _main_loop(
253
273
  event(EventType.RUN_SUPERLINK_LEAVE)
254
274
  if serverapp_th:
255
275
  serverapp_th.join()
276
+ if server_app_thread_has_exception.is_set():
277
+ raise RuntimeError("Exception in ServerApp thread")
256
278
 
257
279
  log(DEBUG, "Stopping Simulation Engine now.")
258
280
 
@@ -349,7 +371,6 @@ def _run_simulation(
349
371
  # Convert config to original JSON-stream format
350
372
  backend_config_stream = json.dumps(backend_config)
351
373
 
352
- simulation_engine_th = None
353
374
  args = (
354
375
  num_supernodes,
355
376
  backend_name,
@@ -363,31 +384,26 @@ def _run_simulation(
363
384
  server_app_attr,
364
385
  )
365
386
  # Detect if there is an Asyncio event loop already running.
366
- # If yes, run everything on a separate thread. In environments
367
- # like Jupyter/Colab notebooks, there is an event loop present.
368
- run_in_thread = False
387
+ # If yes, disable logger propagation. In environmnets
388
+ # like Jupyter/Colab notebooks, it's often better to do this.
389
+ asyncio_loop_running = False
369
390
  try:
370
391
  _ = (
371
392
  asyncio.get_running_loop()
372
393
  ) # Raises RuntimeError if no event loop is present
373
394
  log(DEBUG, "Asyncio event loop already running.")
374
395
 
375
- run_in_thread = True
396
+ asyncio_loop_running = True
376
397
 
377
398
  except RuntimeError:
378
- log(DEBUG, "No asyncio event loop running")
399
+ pass
379
400
 
380
401
  finally:
381
- if run_in_thread:
402
+ if asyncio_loop_running:
382
403
  # Set logger propagation to False to prevent duplicated log output in Colab.
383
404
  logger = set_logger_propagation(logger, False)
384
- log(DEBUG, "Starting Simulation Engine on a new thread.")
385
- simulation_engine_th = threading.Thread(target=_main_loop, args=args)
386
- simulation_engine_th.start()
387
- simulation_engine_th.join()
388
- else:
389
- log(DEBUG, "Starting Simulation Engine on the main thread.")
390
- _main_loop(*args)
405
+
406
+ _main_loop(*args)
391
407
 
392
408
 
393
409
  def _parse_args_run_simulation() -> argparse.ArgumentParser:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: flwr-nightly
3
- Version: 1.10.0.dev20240710
3
+ Version: 1.10.0.dev20240711
4
4
  Summary: Flower: A Friendly Federated Learning Framework
5
5
  Home-page: https://flower.ai
6
6
  License: Apache-2.0
@@ -33,7 +33,7 @@ Classifier: Typing :: Typed
33
33
  Provides-Extra: rest
34
34
  Provides-Extra: simulation
35
35
  Requires-Dist: cryptography (>=42.0.4,<43.0.0)
36
- Requires-Dist: grpcio (>=1.60.0,<2.0.0)
36
+ Requires-Dist: grpcio (>=1.60.0,<2.0.0,!=1.64.2,!=1.65.0)
37
37
  Requires-Dist: iterators (>=0.0.2,<0.0.3)
38
38
  Requires-Dist: numpy (>=1.21.0,<2.0.0)
39
39
  Requires-Dist: pathspec (>=0.12.1,<0.13.0)
@@ -2,7 +2,7 @@ flwr/__init__.py,sha256=VmBWedrCxqmt4QvUHBLqyVEH6p7zaFMD_oCHerXHSVw,937
2
2
  flwr/cli/__init__.py,sha256=cZJVgozlkC6Ni2Hd_FAIrqefrkCGOV18fikToq-6iLw,720
3
3
  flwr/cli/app.py,sha256=FBcSrE35ll88VE11ib67qgsJe2GYDN25UswV9-cYcX8,1267
4
4
  flwr/cli/build.py,sha256=G0wgNrgxir_H0Qb_YlT2itxETEb-9q_3RQflqIqNXTU,4737
5
- flwr/cli/config_utils.py,sha256=ugUlqH52yxTPMtKw6q4xv5k2OVWUy89cwyJ5LB2RLgk,6037
5
+ flwr/cli/config_utils.py,sha256=jz7ODmLE62tXHorg73zzB7POMe4WXLUzX8lt-WNfNVM,6488
6
6
  flwr/cli/example.py,sha256=1bGDYll3BXQY2kRqSN-oICqS5n1b9m0g0RvXTopXHl4,2215
7
7
  flwr/cli/install.py,sha256=Wz7Hqg2PE9N-w5CnqlH9Zr8mzADN2J7NLcUhgldZLWU,6579
8
8
  flwr/cli/new/__init__.py,sha256=cQzK1WH4JP2awef1t2UQ2xjl1agVEz9rwutV18SWV1k,789
@@ -50,10 +50,10 @@ flwr/cli/new/templates/app/pyproject.pytorch.toml.tpl,sha256=wxN6I8uvWZ4MErvTbQJ
50
50
  flwr/cli/new/templates/app/pyproject.sklearn.toml.tpl,sha256=wFeJuhqnBPQtKCBvnE3ySBpxmbeNdxcsq2Eb_RmSDIg,655
51
51
  flwr/cli/new/templates/app/pyproject.tensorflow.toml.tpl,sha256=zkxLTQRvujF76sIlzNNGPVU7Y9nVCwNBxAx82AOBaJY,654
52
52
  flwr/cli/run/__init__.py,sha256=oCd6HmQDx-sqver1gecgx-uMA38BLTSiiKpl7RGNceg,789
53
- flwr/cli/run/run.py,sha256=WsOknYnwm_iD-s6jAHxjGKbm0PgV3VZdQ04v6s4nPQY,4449
53
+ flwr/cli/run/run.py,sha256=eFYZwHOw9pHo_jxtS-UQIf7LVIOiNwjnJdaykcZQz5Q,4969
54
54
  flwr/cli/utils.py,sha256=l65Ul0YsSBPuypk0uorAtEDmLEYiUrzpCXi6zCg9mJ4,4506
55
55
  flwr/client/__init__.py,sha256=wzJZsYJIHf_8-PMzvfbinyzzjgh1UP1vLrAw2_yEbKI,1345
56
- flwr/client/app.py,sha256=J4O5tyOP0LDOJgaXFEli97aSbE5dy8hIirRndAUlL0k,24887
56
+ flwr/client/app.py,sha256=TC9wO6dSuXxRt0YsqFTz8Usf-ophSww9cwMRjEaXKjI,25123
57
57
  flwr/client/client.py,sha256=Vp9UkOkoHdNfn6iMYZsj_5m_GICiFfUlKEVaLad-YhM,8183
58
58
  flwr/client/client_app.py,sha256=cvY-km3JEOWKxUio4xvksNFBk2FQQXliUfQTlDty71w,9648
59
59
  flwr/client/dpfedavg_numpy_client.py,sha256=ylZ-LpBIKmL1HCiS8kq4pkp2QGalc8rYEzDHdRG3VRQ,7435
@@ -77,13 +77,13 @@ flwr/client/mod/secure_aggregation/__init__.py,sha256=A7DzZ3uvXTUkuHBzrxJMWQQD4R
77
77
  flwr/client/mod/secure_aggregation/secagg_mod.py,sha256=wI9tuIEvMUETz-wVIEbPYvh-1nK9CEylBLGoVpNhL94,1095
78
78
  flwr/client/mod/secure_aggregation/secaggplus_mod.py,sha256=fZTfIELkYS64lpgxQKL66s-QHjCn-159qfLoNoIMJjc,19699
79
79
  flwr/client/mod/utils.py,sha256=UAJXiB0wwVyLkCkpW_i5BXikdBR65p8sNFr7VNHm2nk,1226
80
- flwr/client/node_state.py,sha256=f_zZaoSCLUVwbJDqQGZbRvQkEK82UlhSVtgtCKFVM3s,1937
81
- flwr/client/node_state_tests.py,sha256=fadnOTT3VAuzzs_UAbOukcuyx-oQPv2lBq92qTuUecw,2212
80
+ flwr/client/node_state.py,sha256=8nsvz8IndKLI1VeAzO7EwHh4rZdvaw03W0llLQhifiw,2830
81
+ flwr/client/node_state_tests.py,sha256=vXxS3vHMQxl66SfD2MO-JNi83EabYs8Jhd8N7H2zfEM,2231
82
82
  flwr/client/numpy_client.py,sha256=u76GWAdHmJM88Agm2EgLQSvO8Jnk225mJTk-_TmPjFE,10283
83
83
  flwr/client/rest_client/__init__.py,sha256=5KGlp7pjc1dhNRkKlaNtUfQmg8wrRFh9lS3P3uRS-7Q,735
84
84
  flwr/client/rest_client/connection.py,sha256=nowX8_TMnaiIhBMU5f60sIOkvcS3DHOHBT_YrvCnxnw,12096
85
85
  flwr/client/supernode/__init__.py,sha256=SUhWOzcgXRNXk1V9UgB5-FaWukqqrOEajVUHEcPkwyQ,865
86
- flwr/client/supernode/app.py,sha256=jVg5vWJnE50jUJPOlK_hwA_RsGo_heygZeWHGsWH76g,15275
86
+ flwr/client/supernode/app.py,sha256=GQ9N2ydrTkgo7ZVb3AFOWYeLSru3qwQ_SvsYU6fdhI4,15321
87
87
  flwr/client/typing.py,sha256=RJGVF64Z0nqW-qmdFuFaY4Jig3dMUFgNhFi-5dq-8-I,1069
88
88
  flwr/common/__init__.py,sha256=4cBLNNnNTwHDnL_HCxhU5ILCSZ6fYh3A_aMBtlvHTVw,3721
89
89
  flwr/common/address.py,sha256=wRu1Luezx1PWadwV9OA_KNko01oVvbRnPqfzaDn8QOk,1882
@@ -96,7 +96,7 @@ flwr/common/differential_privacy_constants.py,sha256=c7b7tqgvT7yMK0XN9ndiTBs4mQf
96
96
  flwr/common/dp.py,sha256=SZ3MtJKpjxUeQeyb2pqWSF0S_h9rZtCGYPToIxqcNj8,2004
97
97
  flwr/common/exit_handlers.py,sha256=2Nt0wLhc17KQQsLPFSRAjjhUiEFfJK6tNozdGiIY4Fs,2812
98
98
  flwr/common/grpc.py,sha256=_9838_onFLx7W6_lakUN35ziKpdcKp7fA-0jE0EhcEQ,2460
99
- flwr/common/logger.py,sha256=2WW8xV49EHNTBGQP9N83ekYYLVgzjtxLYCM-m0UXwgw,7426
99
+ flwr/common/logger.py,sha256=42rLD69BetRkVKYXZLUaTgyjPxsCn6phVbndMC1fWjk,8130
100
100
  flwr/common/message.py,sha256=QmFYYXA-3e9M8tGO-3NPyAI8yvdmcpdYaA_noR1DE88,13194
101
101
  flwr/common/object_ref.py,sha256=PQR0tztVOkD1nn_uGuNz4bHm7z4fwsosTsUKvWIGF5Y,6506
102
102
  flwr/common/parameter.py,sha256=-bFAUayToYDF50FZGrBC1hQYJCQDtB2bbr3ZuVLMtdE,2095
@@ -172,7 +172,7 @@ flwr/proto/transport_pb2.pyi,sha256=CZvJRWTU3QWFWLXNFtyLSrSKFatIyMcy-ohzLbQ-G9c,
172
172
  flwr/proto/transport_pb2_grpc.py,sha256=vLN3EHtx2aEEMCO4f1Upu-l27BPzd3-5pV-u8wPcosk,2598
173
173
  flwr/proto/transport_pb2_grpc.pyi,sha256=AGXf8RiIiW2J5IKMlm_3qT3AzcDa4F3P5IqUjve_esA,766
174
174
  flwr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
175
- flwr/server/__init__.py,sha256=PAoUaE5kQ9toC6gS7L68d74AgbRrHHoHt8wNr81CcEw,1575
175
+ flwr/server/__init__.py,sha256=BxzPhvouvWFGi7CFpI5b4EeVR9XDqbK7Ndqg24EL_Rw,1679
176
176
  flwr/server/app.py,sha256=WABxljYzn9mEaHvnXXBceVkBx2V6lmlFGpl5f0Uue08,23736
177
177
  flwr/server/client_manager.py,sha256=T8UDSRJBVD3fyIDI7NTAA-NA7GPrMNNgH2OAF54RRxE,6127
178
178
  flwr/server/client_proxy.py,sha256=4G-oTwhb45sfWLx2uZdcXD98IZwdTS6F88xe3akCdUg,2399
@@ -189,8 +189,9 @@ flwr/server/driver/inmemory_driver.py,sha256=RcK94_NtjGZ4aZDIscnU7A3Uv1u8jGx29-x
189
189
  flwr/server/history.py,sha256=bBOHKyX1eQONIsUx4EUU-UnAk1i0EbEl8ioyMq_UWQ8,5063
190
190
  flwr/server/run_serverapp.py,sha256=s8KyWbANv9kyj8_tJoDiLkUj9D6QrPWfC5M_xDCOtYU,9445
191
191
  flwr/server/server.py,sha256=wsXsxMZ9SQ0B42nBnUlcV83NJPycgrgg5bFwcQ4BYBE,17821
192
- flwr/server/server_app.py,sha256=WdsLcMsdi_pKk2y9fKkaWqT3CgCPX55-C8qhkDXCet8,4415
192
+ flwr/server/server_app.py,sha256=1hul76ospG8L_KooK_ewn1sWPNTNYLTtZMeGNOBNruA,6267
193
193
  flwr/server/server_config.py,sha256=CZaHVAsMvGLjpWVcLPkiYxgJN4xfIyAiUrCI3fETKY4,1349
194
+ flwr/server/serverapp_components.py,sha256=-IV_CitOfrJclJj2jNdbN1Q65PyFmtKtrTIg1hc6WQw,2118
194
195
  flwr/server/strategy/__init__.py,sha256=tQer2SwjDnvgFFuJMZM-S01Z615N5XK6MaCvpm4BMU0,2836
195
196
  flwr/server/strategy/aggregate.py,sha256=QyRIJtI5gnuY1NbgrcrOvkHxGIxBvApq7d9Y4xl-6W4,13468
196
197
  flwr/server/strategy/bulyan.py,sha256=zHBSZ40KtRg07tXtwWg2SRvpt89oeRbUO9zTnOFYx2k,6532
@@ -236,16 +237,16 @@ flwr/server/superlink/fleet/rest_rere/__init__.py,sha256=5jbYbAn75sGv-gBwOPDySE0
236
237
  flwr/server/superlink/fleet/rest_rere/rest_api.py,sha256=yoSU-6nCJF9ASHGNpSY69nZbUhPGXkMIKYDgybKQX3c,7672
237
238
  flwr/server/superlink/fleet/vce/__init__.py,sha256=36MHKiefnJeyjwMQzVUK4m06Ojon3WDcwZGQsAcyVhQ,783
238
239
  flwr/server/superlink/fleet/vce/backend/__init__.py,sha256=oBIzmnrSSRvH_H0vRGEGWhWzQQwqe3zn6e13RsNwlIY,1466
239
- flwr/server/superlink/fleet/vce/backend/backend.py,sha256=LJsKl7oixVvptcG98Rd9ejJycNWcEVB0ODvSreLGp-A,2260
240
- flwr/server/superlink/fleet/vce/backend/raybackend.py,sha256=dwaebZfzvzlvjkMflH5hJ19-Sszvxt0AWwIEGk9BliU,7495
241
- flwr/server/superlink/fleet/vce/vce_api.py,sha256=JvkrLB26-sXbrsQKG4iGwgBYDIfuix3PJ1P6qd_nqxQ,13296
240
+ flwr/server/superlink/fleet/vce/backend/backend.py,sha256=iG3KSIY7DzNfcxmuLfTs7VdQJnqPCvvn5DFkTWKG5lI,2227
241
+ flwr/server/superlink/fleet/vce/backend/raybackend.py,sha256=pIJm6YXZw-Jv3okRgred1yjm7b3EWuj1BvtyBd3Fk08,7422
242
+ flwr/server/superlink/fleet/vce/vce_api.py,sha256=Aiv8fulgx4decS_PQ17L94VWc0Mxx7_lL4BmIX1-PMg,11745
242
243
  flwr/server/superlink/state/__init__.py,sha256=Gj2OTFLXvA-mAjBvwuKDM3rDrVaQPcIoybSa2uskMTE,1003
243
244
  flwr/server/superlink/state/in_memory_state.py,sha256=fb-f4RGiqXON0DC7aSEMNuNIjH406BhBYrNNX5Kza2g,13061
244
245
  flwr/server/superlink/state/sqlite_state.py,sha256=dO374mTkvhWQSiwbqwUXVnAYHev-j2mHaX9v8wFmmMA,29044
245
246
  flwr/server/superlink/state/state.py,sha256=mwvYPLK_udk4-yV8Q2MLX5FfTlgLG0P9biyyOZGibRY,8115
246
247
  flwr/server/superlink/state/state_factory.py,sha256=Fo8pBQ1WWrVJK5TOEPZ_zgJE69_mfTGjTO6czh6571o,2021
247
248
  flwr/server/superlink/state/utils.py,sha256=155ngcaSePy7nD8X4LHgpuVok6fcH5_CPNRiFAbLWDA,2407
248
- flwr/server/typing.py,sha256=2zSG-KuDAgwFPuzgVjTLDaEqJ8gXXGqFR2RD-qIk730,913
249
+ flwr/server/typing.py,sha256=5kaRLZuxTEse9A0g7aVna2VhYxU3wTq1f3d3mtw7kXs,1019
249
250
  flwr/server/utils/__init__.py,sha256=pltsPHJoXmUIr3utjwwYxu7_ZAGy5u4MVHzv9iA5Un8,908
250
251
  flwr/server/utils/tensorboard.py,sha256=l6aMVdtZbbfCX8uwFW-WxH6P171-R-tulMcPhlykowo,5485
251
252
  flwr/server/utils/validator.py,sha256=pzyXoOEEPSoYC2UEzened8IKSFRI-kIqqI0QlwRK9jk,5301
@@ -258,18 +259,18 @@ flwr/server/workflow/secure_aggregation/secaggplus_workflow.py,sha256=BRqhlnVe8C
258
259
  flwr/simulation/__init__.py,sha256=9x8OCkK3jpFAPJB1aeEMOddz6V58bExQPtwE8Z3q-RY,1359
259
260
  flwr/simulation/app.py,sha256=8NDXoQ8oC11khXIGnydrsUh5JfaH7c2Fwzix8vDFK1I,15144
260
261
  flwr/simulation/ray_transport/__init__.py,sha256=wzcEEwUUlulnXsg6raCA1nGpP3LlAQDtJ8zNkCXcVbA,734
261
- flwr/simulation/ray_transport/ray_actor.py,sha256=bu6gEnbHYtlUxLtzjzpEUtvkQDRzl1PVMjJuCDZvfgQ,19196
262
+ flwr/simulation/ray_transport/ray_actor.py,sha256=3j0HgzjrlYjnzdTRy8aA4Nf6VoUvxi1hGRQkGSU5z6c,19020
262
263
  flwr/simulation/ray_transport/ray_client_proxy.py,sha256=zGLVebfwFhBo1CAqEQ0MtW-fPG8ark3e4n6OksFGch4,6954
263
264
  flwr/simulation/ray_transport/utils.py,sha256=TYdtfg1P9VfTdLMOJlifInGpxWHYs9UfUqIv2wfkRLA,2392
264
- flwr/simulation/run_simulation.py,sha256=7dlpFtviRsD7TncoWEqwsdZ758JQbc0Sxvxlw1mkuMg,16862
265
+ flwr/simulation/run_simulation.py,sha256=qGP8sHKAzJT9nGeqMw36iCsVXm4ZFMBisCORuTswr-g,17277
265
266
  flwr/superexec/__init__.py,sha256=9h94ogLxi6eJ3bUuJYq3E3pApThSabTPiSmPAGlTkHE,800
266
267
  flwr/superexec/app.py,sha256=dm0o3O6dlsk8hZHondat5QWrBh9UfKzHxn4dVrqwiRk,6151
267
268
  flwr/superexec/deployment.py,sha256=xv5iQWuaMeeL0XE5KMLWq3gRU4lvsGu1-_oPIXi5x9E,3955
268
269
  flwr/superexec/exec_grpc.py,sha256=u-rztpOleqSGqgvNE-ZLw1HchNsBHU1-eB3m52GZ0pQ,1852
269
270
  flwr/superexec/exec_servicer.py,sha256=4R1f_9v0vly_bXpIYaXAeV1tO5LAy1AYygGGGNZmlQk,2194
270
271
  flwr/superexec/executor.py,sha256=TMQMMf-vv0htlv6v-eEBI67J1WL3Yz7dp_Fm1lgMEyU,1718
271
- flwr_nightly-1.10.0.dev20240710.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
272
- flwr_nightly-1.10.0.dev20240710.dist-info/METADATA,sha256=d1HBDVTaENA9E7Peceqo7v7bNogHZJCWA4dKq581OCo,15614
273
- flwr_nightly-1.10.0.dev20240710.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
274
- flwr_nightly-1.10.0.dev20240710.dist-info/entry_points.txt,sha256=7qBQcA-bDGDxnJmLd9FYqglFQubjCNqyg9M8a-lukps,336
275
- flwr_nightly-1.10.0.dev20240710.dist-info/RECORD,,
272
+ flwr_nightly-1.10.0.dev20240711.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
273
+ flwr_nightly-1.10.0.dev20240711.dist-info/METADATA,sha256=6FH6xn5sDwtX8aB4bz6zCf3au-O319V8I2iOIWWXDXE,15632
274
+ flwr_nightly-1.10.0.dev20240711.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
275
+ flwr_nightly-1.10.0.dev20240711.dist-info/entry_points.txt,sha256=7qBQcA-bDGDxnJmLd9FYqglFQubjCNqyg9M8a-lukps,336
276
+ flwr_nightly-1.10.0.dev20240711.dist-info/RECORD,,