flwr-nightly 1.6.0.dev20231026__py3-none-any.whl → 1.6.0.dev20231101__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
flwr/client/app.py CHANGED
@@ -19,7 +19,7 @@ import sys
19
19
  import time
20
20
  import warnings
21
21
  from logging import INFO
22
- from typing import Optional, Union
22
+ from typing import Callable, ContextManager, Optional, Tuple, Union
23
23
 
24
24
  from flwr.client.client import Client
25
25
  from flwr.client.typing import ClientFn
@@ -33,6 +33,7 @@ from flwr.common.constant import (
33
33
  TRANSPORT_TYPES,
34
34
  )
35
35
  from flwr.common.logger import log
36
+ from flwr.proto.task_pb2 import TaskIns, TaskRes
36
37
 
37
38
  from .grpc_client.connection import grpc_connection
38
39
  from .grpc_rere_client.connection import grpc_request_response
@@ -134,44 +135,15 @@ def start_client(
134
135
 
135
136
  client_fn = single_client_factory
136
137
 
137
- # Parse IP address
138
- parsed_address = parse_address(server_address)
139
- if not parsed_address:
140
- sys.exit(f"Server address ({server_address}) cannot be parsed.")
141
- host, port, is_v6 = parsed_address
142
- address = f"[{host}]:{port}" if is_v6 else f"{host}:{port}"
143
-
144
- # Set the default transport layer
145
- if transport is None:
146
- transport = TRANSPORT_TYPE_GRPC_BIDI
147
-
148
- # Use either gRPC bidirectional streaming or REST request/response
149
- if transport == TRANSPORT_TYPE_REST:
150
- try:
151
- from .rest_client.connection import http_request_response
152
- except ModuleNotFoundError:
153
- sys.exit(MISSING_EXTRA_REST)
154
- if server_address[:4] != "http":
155
- sys.exit(
156
- "When using the REST API, please provide `https://` or "
157
- "`http://` before the server address (e.g. `http://127.0.0.1:8080`)"
158
- )
159
- connection = http_request_response
160
- elif transport == TRANSPORT_TYPE_GRPC_RERE:
161
- connection = grpc_request_response
162
- elif transport == TRANSPORT_TYPE_GRPC_BIDI:
163
- connection = grpc_connection
164
- else:
165
- raise ValueError(
166
- f"Unknown transport type: {transport} (possible: {TRANSPORT_TYPES})"
167
- )
138
+ # Initialize connection context manager
139
+ connection, address = _init_connection(transport, server_address)
168
140
 
169
141
  while True:
170
142
  sleep_duration: int = 0
171
143
  with connection(
172
144
  address,
173
- max_message_length=grpc_max_message_length,
174
- root_certificates=root_certificates,
145
+ grpc_max_message_length,
146
+ root_certificates,
175
147
  ) as conn:
176
148
  receive, send, create_node, delete_node = conn
177
149
 
@@ -285,3 +257,54 @@ def start_numpy_client(
285
257
  root_certificates=root_certificates,
286
258
  transport=transport,
287
259
  )
260
+
261
+
262
+ def _init_connection(
263
+ transport: Optional[str], server_address: str
264
+ ) -> Tuple[
265
+ Callable[
266
+ [str, int, Union[bytes, str, None]],
267
+ ContextManager[
268
+ Tuple[
269
+ Callable[[], Optional[TaskIns]],
270
+ Callable[[TaskRes], None],
271
+ Optional[Callable[[], None]],
272
+ Optional[Callable[[], None]],
273
+ ]
274
+ ],
275
+ ],
276
+ str,
277
+ ]:
278
+ # Parse IP address
279
+ parsed_address = parse_address(server_address)
280
+ if not parsed_address:
281
+ sys.exit(f"Server address ({server_address}) cannot be parsed.")
282
+ host, port, is_v6 = parsed_address
283
+ address = f"[{host}]:{port}" if is_v6 else f"{host}:{port}"
284
+
285
+ # Set the default transport layer
286
+ if transport is None:
287
+ transport = TRANSPORT_TYPE_GRPC_BIDI
288
+
289
+ # Use either gRPC bidirectional streaming or REST request/response
290
+ if transport == TRANSPORT_TYPE_REST:
291
+ try:
292
+ from .rest_client.connection import http_request_response
293
+ except ModuleNotFoundError:
294
+ sys.exit(MISSING_EXTRA_REST)
295
+ if server_address[:4] != "http":
296
+ sys.exit(
297
+ "When using the REST API, please provide `https://` or "
298
+ "`http://` before the server address (e.g. `http://127.0.0.1:8080`)"
299
+ )
300
+ connection = http_request_response
301
+ elif transport == TRANSPORT_TYPE_GRPC_RERE:
302
+ connection = grpc_request_response
303
+ elif transport == TRANSPORT_TYPE_GRPC_BIDI:
304
+ connection = grpc_connection
305
+ else:
306
+ raise ValueError(
307
+ f"Unknown transport type: {transport} (possible: {TRANSPORT_TYPES})"
308
+ )
309
+
310
+ return connection, address
@@ -0,0 +1,288 @@
1
+ # Copyright 2023 Flower Labs GmbH. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """`RetryInvoker` to augment other callables with error handling and retries."""
16
+
17
+
18
+ import itertools
19
+ import random
20
+ import time
21
+ from dataclasses import dataclass
22
+ from typing import (
23
+ Any,
24
+ Callable,
25
+ Dict,
26
+ Generator,
27
+ Iterable,
28
+ List,
29
+ Optional,
30
+ Tuple,
31
+ Type,
32
+ Union,
33
+ cast,
34
+ )
35
+
36
+
37
+ def exponential(
38
+ base_delay: float = 1,
39
+ multiplier: float = 2,
40
+ max_delay: Optional[int] = None,
41
+ ) -> Generator[float, None, None]:
42
+ """Wait time generator for exponential backoff strategy.
43
+
44
+ Parameters
45
+ ----------
46
+ base_delay: float (default: 1)
47
+ Initial delay duration before the first retry.
48
+ multiplier: float (default: 2)
49
+ Factor by which the delay is multiplied after each retry.
50
+ max_delay: Optional[float] (default: None)
51
+ The maximum delay duration between two consecutive retries.
52
+ """
53
+ delay = base_delay if max_delay is None else min(base_delay, max_delay)
54
+ while True:
55
+ yield delay
56
+ delay *= multiplier
57
+ if max_delay is not None:
58
+ delay = min(delay, max_delay)
59
+
60
+
61
+ def constant(
62
+ interval: Union[float, Iterable[float]] = 1,
63
+ ) -> Generator[float, None, None]:
64
+ """Wait time generator for specified intervals.
65
+
66
+ Parameters
67
+ ----------
68
+ interval: Union[float, Iterable[float]] (default: 1)
69
+ A constant value to yield or an iterable of such values.
70
+ """
71
+ if not isinstance(interval, Iterable):
72
+ interval = itertools.repeat(interval)
73
+ yield from interval
74
+
75
+
76
+ def full_jitter(max_value: float) -> float:
77
+ """Randomize a float between 0 and the given maximum value.
78
+
79
+ This function implements the "Full Jitter" algorithm as described in the
80
+ AWS article discussing the efficacy of different jitter algorithms.
81
+ Reference: https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
82
+
83
+ Parameters
84
+ ----------
85
+ max_value : float
86
+ The upper limit for the randomized value.
87
+ """
88
+ return random.uniform(0, max_value)
89
+
90
+
91
+ @dataclass
92
+ class RetryState:
93
+ """State for callbacks in RetryInvoker."""
94
+
95
+ target: Callable[..., Any]
96
+ args: Tuple[Any, ...]
97
+ kwargs: Dict[str, Any]
98
+ tries: int
99
+ elapsed_time: float
100
+ exception: Optional[Exception] = None
101
+ actual_wait: Optional[float] = None
102
+
103
+
104
+ # pylint: disable-next=too-many-instance-attributes
105
+ class RetryInvoker:
106
+ """Wrapper class for retry (with backoff) triggered by exceptions.
107
+
108
+ Parameters
109
+ ----------
110
+ wait_factory: Callable[[], Generator[float, None, None]]
111
+ A generator yielding successive wait times in seconds. If the generator
112
+ is finite, the giveup event will be triggered when the generator raises
113
+ `StopIteration`.
114
+ recoverable_exceptions: Union[Type[Exception], Tuple[Type[Exception]]]
115
+ An exception type (or tuple of types) that triggers backoff.
116
+ max_tries: Optional[int]
117
+ The maximum number of attempts to make before giving up. Once exhausted,
118
+ the exception will be allowed to escape. If set to None, there is no limit
119
+ to the number of tries.
120
+ max_time: Optional[float]
121
+ The maximum total amount of time to try before giving up. Once this time
122
+ has expired, this method won't be interrupted immediately, but the exception
123
+ will be allowed to escape. If set to None, there is no limit to the total time.
124
+ on_success: Optional[Callable[[RetryState], None]] (default: None)
125
+ A callable to be executed in the event of success. The parameter is a
126
+ data class object detailing the invocation.
127
+ on_backoff: Optional[Callable[[RetryState], None]] (default: None)
128
+ A callable to be executed in the event of a backoff. The parameter is a
129
+ data class object detailing the invocation.
130
+ on_giveup: Optional[Callable[[RetryState], None]] (default: None)
131
+ A callable to be executed in the event that `max_tries` or `max_time` is
132
+ exceeded, `should_giveup` returns True, or `wait_factory()` generator raises
133
+ `StopInteration`. The parameter is a data class object detailing the
134
+ invocation.
135
+ jitter: Optional[Callable[[float], float]] (default: full_jitter)
136
+ A function of the value yielded by `wait_factory()` returning the actual time
137
+ to wait. This function helps distribute wait times stochastically to avoid
138
+ timing collisions across concurrent clients. Wait times are jittered by
139
+ default using the `full_jitter` function. To disable jittering, pass
140
+ `jitter=None`.
141
+ should_giveup: Optional[Callable[[Exception], bool]] (default: None)
142
+ A function accepting an exception instance, returning whether or not
143
+ to give up prematurely before other give-up conditions are evaluated.
144
+ If set to None, the strategy is to never give up prematurely.
145
+
146
+ Examples
147
+ --------
148
+ Initialize a `RetryInvoker` with exponential backoff and invoke a function:
149
+
150
+ >>> invoker = RetryInvoker(
151
+ ... exponential, # Or use `lambda: exponential(3, 2)` to pass arguments
152
+ ... grpc.RpcError,
153
+ ... max_tries=3,
154
+ ... max_time=None,
155
+ ... )
156
+ >>> invoker.invoke(my_func, arg1, arg2, kw1=kwarg1)
157
+ """
158
+
159
+ def __init__(
160
+ self,
161
+ wait_factory: Callable[[], Generator[float, None, None]],
162
+ recoverable_exceptions: Union[Type[Exception], Tuple[Type[Exception], ...]],
163
+ max_tries: Optional[int],
164
+ max_time: Optional[float],
165
+ *,
166
+ on_success: Optional[Callable[[RetryState], None]] = None,
167
+ on_backoff: Optional[Callable[[RetryState], None]] = None,
168
+ on_giveup: Optional[Callable[[RetryState], None]] = None,
169
+ jitter: Optional[Callable[[float], float]] = full_jitter,
170
+ should_giveup: Optional[Callable[[Exception], bool]] = None,
171
+ ) -> None:
172
+ self.wait_factory = wait_factory
173
+ self.recoverable_exceptions = recoverable_exceptions
174
+ self.max_tries = max_tries
175
+ self.max_time = max_time
176
+ self.on_success = on_success
177
+ self.on_backoff = on_backoff
178
+ self.on_giveup = on_giveup
179
+ self.jitter = jitter
180
+ self.should_giveup = should_giveup
181
+
182
+ # pylint: disable-next=too-many-locals
183
+ def invoke(
184
+ self,
185
+ target: Callable[..., Any],
186
+ *args: Any,
187
+ **kwargs: Any,
188
+ ) -> Any:
189
+ """Safely invoke the provided callable with retry mechanisms.
190
+
191
+ This method attempts to invoke the given callable, and in the event of
192
+ a recoverable exception, employs a retry mechanism that considers
193
+ wait times, jitter, maximum attempts, and maximum time. During the
194
+ retry process, various callbacks (`on_backoff`, `on_success`, and
195
+ `on_giveup`) can be triggered based on the outcome.
196
+
197
+ Parameters
198
+ ----------
199
+ target: Callable[..., Any]
200
+ The callable to be invoked.
201
+ *args: Tuple[Any, ...]
202
+ Positional arguments to pass to `target`.
203
+ **kwargs: Dict[str, Any]
204
+ Keyword arguments to pass to `target`.
205
+
206
+ Returns
207
+ -------
208
+ Any
209
+ The result of the given callable invocation.
210
+
211
+ Raises
212
+ ------
213
+ Exception
214
+ If the number of tries exceeds `max_tries`, if the total time
215
+ exceeds `max_time`, if `wait_factory()` generator raises `StopInteration`,
216
+ or if the `should_giveup` returns True for a raised exception.
217
+
218
+ Notes
219
+ -----
220
+ The time between retries is determined by the provided `wait_factory()`
221
+ generator and can optionally be jittered using the `jitter` function.
222
+ The recoverable exceptions that trigger a retry, as well as conditions to
223
+ stop retries, are also determined by the class's initialization parameters.
224
+ """
225
+
226
+ def try_call_event_handler(
227
+ handler: Optional[Callable[[RetryState], None]]
228
+ ) -> None:
229
+ if handler is not None:
230
+ handler(cast(RetryState, ref_state[0]))
231
+
232
+ try_cnt = 0
233
+ wait_generator = self.wait_factory()
234
+ start = time.time()
235
+ ref_state: List[Optional[RetryState]] = [None]
236
+
237
+ while True:
238
+ try_cnt += 1
239
+ elapsed_time = time.time() - start
240
+ state = RetryState(
241
+ target=target,
242
+ args=args,
243
+ kwargs=kwargs,
244
+ tries=try_cnt,
245
+ elapsed_time=elapsed_time,
246
+ )
247
+ ref_state[0] = state
248
+
249
+ try:
250
+ ret = target(*args, **kwargs)
251
+ except self.recoverable_exceptions as err:
252
+ # Check if giveup event should be triggered
253
+ max_tries_exceeded = try_cnt == self.max_tries
254
+ max_time_exceeded = (
255
+ self.max_time is not None and elapsed_time >= self.max_time
256
+ )
257
+
258
+ def giveup_check(_exception: Exception) -> bool:
259
+ if self.should_giveup is None:
260
+ return False
261
+ return self.should_giveup(_exception)
262
+
263
+ if giveup_check(err) or max_tries_exceeded or max_time_exceeded:
264
+ # Trigger giveup event
265
+ try_call_event_handler(self.on_giveup)
266
+ raise
267
+
268
+ try:
269
+ wait_time = next(wait_generator)
270
+ if self.jitter is not None:
271
+ wait_time = self.jitter(wait_time)
272
+ if self.max_time is not None:
273
+ wait_time = min(wait_time, self.max_time - elapsed_time)
274
+ state.actual_wait = wait_time
275
+ except StopIteration:
276
+ # Trigger giveup event
277
+ try_call_event_handler(self.on_giveup)
278
+ raise err from None
279
+
280
+ # Trigger backoff event
281
+ try_call_event_handler(self.on_backoff)
282
+
283
+ # Sleep
284
+ time.sleep(wait_time)
285
+ else:
286
+ # Trigger success event
287
+ try_call_event_handler(self.on_success)
288
+ return ret
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: flwr-nightly
3
- Version: 1.6.0.dev20231026
3
+ Version: 1.6.0.dev20231101
4
4
  Summary: Flower: A Friendly Federated Learning Framework
5
5
  Home-page: https://flower.dev
6
6
  License: Apache-2.0
@@ -71,22 +71,21 @@ Description-Content-Type: text/markdown
71
71
  Flower (`flwr`) is a framework for building federated learning systems. The
72
72
  design of Flower is based on a few guiding principles:
73
73
 
74
- * **Customizable**: Federated learning systems vary wildly from one use case to
74
+ - **Customizable**: Federated learning systems vary wildly from one use case to
75
75
  another. Flower allows for a wide range of different configurations depending
76
76
  on the needs of each individual use case.
77
77
 
78
- * **Extendable**: Flower originated from a research project at the University of
78
+ - **Extendable**: Flower originated from a research project at the University of
79
79
  Oxford, so it was built with AI research in mind. Many components can be
80
80
  extended and overridden to build new state-of-the-art systems.
81
81
 
82
- * **Framework-agnostic**: Different machine learning frameworks have different
82
+ - **Framework-agnostic**: Different machine learning frameworks have different
83
83
  strengths. Flower can be used with any machine learning framework, for
84
84
  example, [PyTorch](https://pytorch.org),
85
- [TensorFlow](https://tensorflow.org), [Hugging Face Transformers](https://huggingface.co/), [PyTorch Lightning](https://pytorchlightning.ai/), [MXNet](https://mxnet.apache.org/), [scikit-learn](https://scikit-learn.org/), [JAX](https://jax.readthedocs.io/), [TFLite](https://tensorflow.org/lite/), [fastai](https://www.fast.ai/), [Pandas](https://pandas.pydata.org/
86
- ) for federated analytics, or even raw [NumPy](https://numpy.org/)
85
+ [TensorFlow](https://tensorflow.org), [Hugging Face Transformers](https://huggingface.co/), [PyTorch Lightning](https://pytorchlightning.ai/), [MXNet](https://mxnet.apache.org/), [scikit-learn](https://scikit-learn.org/), [JAX](https://jax.readthedocs.io/), [TFLite](https://tensorflow.org/lite/), [fastai](https://www.fast.ai/), [Pandas](https://pandas.pydata.org/) for federated analytics, or even raw [NumPy](https://numpy.org/)
87
86
  for users who enjoy computing gradients by hand.
88
87
 
89
- * **Understandable**: Flower is written with maintainability in mind. The
88
+ - **Understandable**: Flower is written with maintainability in mind. The
90
89
  community is encouraged to both read and contribute to the codebase.
91
90
 
92
91
  Meet the Flower community on [flower.dev](https://flower.dev)!
@@ -106,11 +105,11 @@ Flower's goal is to make federated learning accessible to everyone. This series
106
105
  2. **Using Strategies in Federated Learning**
107
106
 
108
107
  [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/adap/flower/blob/main/doc/source/tutorial-use-a-federated-learning-strategy-pytorch.ipynb) (or open the [Jupyter Notebook](https://github.com/adap/flower/blob/main/doc/source/tutorial-use-a-federated-learning-strategy-pytorch.ipynb))
109
-
108
+
110
109
  3. **Building Strategies for Federated Learning**
111
110
 
112
111
  [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/adap/flower/blob/main/doc/source/tutorial-series-use-a-federated-learning-strategy-pytorch.ipynb) (or open the [Jupyter Notebook](https://github.com/adap/flower/blob/main/doc/source/tutorial-series-use-a-federated-learning-strategy-pytorch.ipynb))
113
-
112
+
114
113
  4. **Custom Clients for Federated Learning**
115
114
 
116
115
  [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/adap/flower/blob/main/doc/source/tutorial-series-customize-the-client-pytorch.ipynb) (or open the [Jupyter Notebook](https://github.com/adap/flower/blob/main/doc/source/tutorial-series-customize-the-client-pytorch.ipynb))
@@ -121,39 +120,39 @@ Stay tuned, more tutorials are coming soon. Topics include **Privacy and Securit
121
120
 
122
121
  [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/adap/flower/blob/main/examples/flower-in-30-minutes/tutorial.ipynb) (or open the [Jupyter Notebook](https://github.com/adap/flower/blob/main/examples/flower-in-30-minutes/tutorial.ipynb))
123
122
 
124
-
125
123
  ## Documentation
126
124
 
127
125
  [Flower Docs](https://flower.dev/docs):
128
- * [Installation](https://flower.dev/docs/framework/how-to-install-flower.html)
129
- * [Quickstart (TensorFlow)](https://flower.dev/docs/framework/tutorial-quickstart-tensorflow.html)
130
- * [Quickstart (PyTorch)](https://flower.dev/docs/framework/tutorial-quickstart-pytorch.html)
131
- * [Quickstart (Hugging Face)](https://flower.dev/docs/framework/tutorial-quickstart-huggingface.html)
132
- * [Quickstart (PyTorch Lightning [code example])](https://flower.dev/docs/framework/tutorial-quickstart-pytorch-lightning.html)
133
- * [Quickstart (MXNet)](https://flower.dev/docs/framework/example-mxnet-walk-through.html)
134
- * [Quickstart (Pandas)](https://flower.dev/docs/framework/tutorial-quickstart-pandas.html)
135
- * [Quickstart (fastai)](https://flower.dev/docs/framework/tutorial-quickstart-fastai.html)
136
- * [Quickstart (JAX)](https://flower.dev/docs/framework/tutorial-quickstart-jax.html)
137
- * [Quickstart (scikit-learn)](https://flower.dev/docs/framework/tutorial-quickstart-scikitlearn.html)
138
- * [Quickstart (Android [TFLite])](https://flower.dev/docs/framework/tutorial-quickstart-android.html)
139
- * [Quickstart (iOS [CoreML])](https://flower.dev/docs/framework/tutorial-quickstart-ios.html)
126
+
127
+ - [Installation](https://flower.dev/docs/framework/how-to-install-flower.html)
128
+ - [Quickstart (TensorFlow)](https://flower.dev/docs/framework/tutorial-quickstart-tensorflow.html)
129
+ - [Quickstart (PyTorch)](https://flower.dev/docs/framework/tutorial-quickstart-pytorch.html)
130
+ - [Quickstart (Hugging Face)](https://flower.dev/docs/framework/tutorial-quickstart-huggingface.html)
131
+ - [Quickstart (PyTorch Lightning [code example])](https://flower.dev/docs/framework/tutorial-quickstart-pytorch-lightning.html)
132
+ - [Quickstart (MXNet)](https://flower.dev/docs/framework/example-mxnet-walk-through.html)
133
+ - [Quickstart (Pandas)](https://flower.dev/docs/framework/tutorial-quickstart-pandas.html)
134
+ - [Quickstart (fastai)](https://flower.dev/docs/framework/tutorial-quickstart-fastai.html)
135
+ - [Quickstart (JAX)](https://flower.dev/docs/framework/tutorial-quickstart-jax.html)
136
+ - [Quickstart (scikit-learn)](https://flower.dev/docs/framework/tutorial-quickstart-scikitlearn.html)
137
+ - [Quickstart (Android [TFLite])](https://flower.dev/docs/framework/tutorial-quickstart-android.html)
138
+ - [Quickstart (iOS [CoreML])](https://flower.dev/docs/framework/tutorial-quickstart-ios.html)
140
139
 
141
140
  ## Flower Baselines
142
141
 
143
142
  Flower Baselines is a collection of community-contributed experiments that reproduce the experiments performed in popular federated learning publications. Researchers can build on Flower Baselines to quickly evaluate new ideas:
144
143
 
145
- * [FedAvg](https://arxiv.org/abs/1602.05629):
146
- * [MNIST](https://github.com/adap/flower/tree/main/baselines/flwr_baselines/flwr_baselines/publications/fedavg_mnist)
147
- * [FedProx](https://arxiv.org/abs/1812.06127):
148
- * [MNIST](https://github.com/adap/flower/tree/main/baselines/fedprox/)
149
- * [FedBN: Federated Learning on non-IID Features via Local Batch Normalization](https://arxiv.org/abs/2102.07623):
150
- * [Convergence Rate](https://github.com/adap/flower/tree/main/baselines/flwr_baselines/flwr_baselines/publications/fedbn/convergence_rate)
151
- * [Adaptive Federated Optimization](https://arxiv.org/abs/2003.00295):
152
- * [CIFAR-10/100](https://github.com/adap/flower/tree/main/baselines/flwr_baselines/flwr_baselines/publications/adaptive_federated_optimization)
144
+ - [FedAvg](https://arxiv.org/abs/1602.05629):
145
+ - [MNIST](https://github.com/adap/flower/tree/main/baselines/flwr_baselines/flwr_baselines/publications/fedavg_mnist)
146
+ - [FedProx](https://arxiv.org/abs/1812.06127):
147
+ - [MNIST](https://github.com/adap/flower/tree/main/baselines/fedprox/)
148
+ - [FedBN: Federated Learning on non-IID Features via Local Batch Normalization](https://arxiv.org/abs/2102.07623):
149
+ - [Convergence Rate](https://github.com/adap/flower/tree/main/baselines/flwr_baselines/flwr_baselines/publications/fedbn/convergence_rate)
150
+ - [Adaptive Federated Optimization](https://arxiv.org/abs/2003.00295):
151
+ - [CIFAR-10/100](https://github.com/adap/flower/tree/main/baselines/flwr_baselines/flwr_baselines/publications/adaptive_federated_optimization)
153
152
 
154
- Check the Flower documentation to learn more: [Using Baselines](https://flower.dev/docs/baselines/using-baselines.html)
153
+ Check the Flower documentation to learn more: [Using Baselines](https://flower.dev/docs/baselines/how-to-use-baselines.html)
155
154
 
156
- The Flower community loves contributions! Make your work more visible and enable others to build on it by contributing it as a baseline: [Contributing Baselines](https://flower.dev/docs/baselines/contributing-baselines.html)
155
+ The Flower community loves contributions! Make your work more visible and enable others to build on it by contributing it as a baseline: [Contributing Baselines](https://flower.dev/docs/baselines/how-to-contribute-baselines.html)
157
156
 
158
157
  ## Flower Usage Examples
159
158
 
@@ -161,26 +160,26 @@ Several code examples show different usage scenarios of Flower (in combination w
161
160
 
162
161
  Quickstart examples:
163
162
 
164
- * [Quickstart (TensorFlow)](https://github.com/adap/flower/tree/main/examples/quickstart-tensorflow)
165
- * [Quickstart (PyTorch)](https://github.com/adap/flower/tree/main/examples/quickstart-pytorch)
166
- * [Quickstart (Hugging Face)](https://github.com/adap/flower/tree/main/examples/quickstart-huggingface)
167
- * [Quickstart (PyTorch Lightning)](https://github.com/adap/flower/tree/main/examples/quickstart-pytorch-lightning)
168
- * [Quickstart (fastai)](https://github.com/adap/flower/tree/main/examples/quickstart-fastai)
169
- * [Quickstart (Pandas)](https://github.com/adap/flower/tree/main/examples/quickstart-pandas)
170
- * [Quickstart (MXNet)](https://github.com/adap/flower/tree/main/examples/quickstart-mxnet)
171
- * [Quickstart (JAX)](https://github.com/adap/flower/tree/main/examples/quickstart-jax)
172
- * [Quickstart (scikit-learn)](https://github.com/adap/flower/tree/main/examples/sklearn-logreg-mnist)
173
- * [Quickstart (Android [TFLite])](https://github.com/adap/flower/tree/main/examples/android)
174
- * [Quickstart (iOS [CoreML])](https://github.com/adap/flower/tree/main/examples/ios)
163
+ - [Quickstart (TensorFlow)](https://github.com/adap/flower/tree/main/examples/quickstart-tensorflow)
164
+ - [Quickstart (PyTorch)](https://github.com/adap/flower/tree/main/examples/quickstart-pytorch)
165
+ - [Quickstart (Hugging Face)](https://github.com/adap/flower/tree/main/examples/quickstart-huggingface)
166
+ - [Quickstart (PyTorch Lightning)](https://github.com/adap/flower/tree/main/examples/quickstart-pytorch-lightning)
167
+ - [Quickstart (fastai)](https://github.com/adap/flower/tree/main/examples/quickstart-fastai)
168
+ - [Quickstart (Pandas)](https://github.com/adap/flower/tree/main/examples/quickstart-pandas)
169
+ - [Quickstart (MXNet)](https://github.com/adap/flower/tree/main/examples/quickstart-mxnet)
170
+ - [Quickstart (JAX)](https://github.com/adap/flower/tree/main/examples/quickstart-jax)
171
+ - [Quickstart (scikit-learn)](https://github.com/adap/flower/tree/main/examples/sklearn-logreg-mnist)
172
+ - [Quickstart (Android [TFLite])](https://github.com/adap/flower/tree/main/examples/android)
173
+ - [Quickstart (iOS [CoreML])](https://github.com/adap/flower/tree/main/examples/ios)
175
174
 
176
175
  Other [examples](https://github.com/adap/flower/tree/main/examples):
177
176
 
178
- * [Raspberry Pi & Nvidia Jetson Tutorial](https://github.com/adap/flower/tree/main/examples/embedded-devices)
179
- * [PyTorch: From Centralized to Federated](https://github.com/adap/flower/tree/main/examples/pytorch-from-centralized-to-federated)
180
- * [MXNet: From Centralized to Federated](https://github.com/adap/flower/tree/main/examples/mxnet-from-centralized-to-federated)
181
- * [Advanced Flower with TensorFlow/Keras](https://github.com/adap/flower/tree/main/examples/advanced-tensorflow)
182
- * [Advanced Flower with PyTorch](https://github.com/adap/flower/tree/main/examples/advanced-pytorch)
183
- * Single-Machine Simulation of Federated Learning Systems ([PyTorch](https://github.com/adap/flower/tree/main/examples/simulation_pytorch)) ([Tensorflow](https://github.com/adap/flower/tree/main/examples/simulation_tensorflow))
177
+ - [Raspberry Pi & Nvidia Jetson Tutorial](https://github.com/adap/flower/tree/main/examples/embedded-devices)
178
+ - [PyTorch: From Centralized to Federated](https://github.com/adap/flower/tree/main/examples/pytorch-from-centralized-to-federated)
179
+ - [MXNet: From Centralized to Federated](https://github.com/adap/flower/tree/main/examples/mxnet-from-centralized-to-federated)
180
+ - [Advanced Flower with TensorFlow/Keras](https://github.com/adap/flower/tree/main/examples/advanced-tensorflow)
181
+ - [Advanced Flower with PyTorch](https://github.com/adap/flower/tree/main/examples/advanced-pytorch)
182
+ - Single-Machine Simulation of Federated Learning Systems ([PyTorch](https://github.com/adap/flower/tree/main/examples/simulation_pytorch)) ([Tensorflow](https://github.com/adap/flower/tree/main/examples/simulation_tensorflow))
184
183
 
185
184
  ## Community
186
185
 
@@ -192,12 +191,12 @@ Flower is built by a wonderful community of researchers and engineers. [Join Sla
192
191
 
193
192
  ## Citation
194
193
 
195
- If you publish work that uses Flower, please cite Flower as follows:
194
+ If you publish work that uses Flower, please cite Flower as follows:
196
195
 
197
196
  ```bibtex
198
197
  @article{beutel2020flower,
199
198
  title={Flower: A Friendly Federated Learning Research Framework},
200
- author={Beutel, Daniel J and Topal, Taner and Mathur, Akhil and Qiu, Xinchi and Fernandez-Marques, Javier and Gao, Yan and Sani, Lorenzo and Kwing, Hei Li and Parcollet, Titouan and Gusmão, Pedro PB de and Lane, Nicholas D},
199
+ author={Beutel, Daniel J and Topal, Taner and Mathur, Akhil and Qiu, Xinchi and Fernandez-Marques, Javier and Gao, Yan and Sani, Lorenzo and Kwing, Hei Li and Parcollet, Titouan and Gusmão, Pedro PB de and Lane, Nicholas D},
201
200
  journal={arXiv preprint arXiv:2007.14390},
202
201
  year={2020}
203
202
  }
@@ -1,6 +1,6 @@
1
1
  flwr/__init__.py,sha256=_lnE8lIgk0i4hpag6GWVGPPr1w4IrA5z4y_kFwIpZYM,959
2
2
  flwr/client/__init__.py,sha256=7GyT3nZpvzL0grFGMcKhXrVS_IpctJpUmMk9whAzFNE,1111
3
- flwr/client/app.py,sha256=qwvSYITDDnYS4QDU0rOavK1mCWWli3okQ5hm_r4aogY,10504
3
+ flwr/client/app.py,sha256=-ohwtOxk9O2ESlnA_OeWNkishUoNt4hbO3tmNiChdBc,11111
4
4
  flwr/client/client.py,sha256=WPDQzzPQ3VePYZDEjMEMxKWUPDWB21sL_iP12NUPt5o,7894
5
5
  flwr/client/dpfedavg_numpy_client.py,sha256=LxUcPBO0mU3VScAx9vx2PlsghXjQZVEBOn3rolgrgio,7216
6
6
  flwr/client/grpc_client/__init__.py,sha256=LsnbqXiJhgQcB0XzAlUQgPx011Uf7Y7yabIC1HxivJ8,735
@@ -26,6 +26,7 @@ flwr/common/dp.py,sha256=hF45cPElXxcQsh4AoquAyaTrNi0xCrIcKx7xOcV_1XU,1782
26
26
  flwr/common/grpc.py,sha256=JmFrGeEqFjMKkf6Mn6NlMJdjk27pYHzsn7CquAfR4R0,1896
27
27
  flwr/common/logger.py,sha256=Plgdf5NULsv9leDeNOXTeCqK-7LkZh8R34RPT4EkOs8,3466
28
28
  flwr/common/parameter.py,sha256=-bFAUayToYDF50FZGrBC1hQYJCQDtB2bbr3ZuVLMtdE,2095
29
+ flwr/common/retry_invoker.py,sha256=RBTiDnYyePWvhBH9GqcWZl0tQaUOVUqOVBwfGprXWEg,10810
29
30
  flwr/common/secure_aggregation/__init__.py,sha256=29nHIUO2L8-KhNHQ2KmIgRo_4CPkq4LgLCUN0on5FgI,731
30
31
  flwr/common/secure_aggregation/crypto/__init__.py,sha256=dz7pVx2aPrHxr_AwgO5mIiTzu4PcvUxRq9NLBbFcsf8,738
31
32
  flwr/common/secure_aggregation/crypto/shamir.py,sha256=yY35ZgHlB4YyGW_buG-1X-0M-ejXuQzISgYLgC_Z9TY,2792
@@ -121,8 +122,8 @@ flwr/simulation/ray_transport/__init__.py,sha256=FsaAnzC4cw4DqoouBCix6496k29jACk
121
122
  flwr/simulation/ray_transport/ray_actor.py,sha256=EiyXPRnur8pOzGu_h5ZU4t8C5M_amnVPAQWyPSbAnEU,16564
122
123
  flwr/simulation/ray_transport/ray_client_proxy.py,sha256=cPo3Ny1k5arqTLHD0bB5DSEuS66aE2SwEumtr8TzOGc,8778
123
124
  flwr/simulation/ray_transport/utils.py,sha256=MVOH4l1ZPt64WidgBBay--1M3lS82CQ3ebmwU9Cvlo0,3369
124
- flwr_nightly-1.6.0.dev20231026.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
125
- flwr_nightly-1.6.0.dev20231026.dist-info/METADATA,sha256=sbAgePqdUg8CrO4n__nUbNsobNcLcELA03RLDqeyegY,13240
126
- flwr_nightly-1.6.0.dev20231026.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
127
- flwr_nightly-1.6.0.dev20231026.dist-info/entry_points.txt,sha256=1uLlD5tIunkzALMfMWnqjdE_D5hRUX_I1iMmOMv6tZI,181
128
- flwr_nightly-1.6.0.dev20231026.dist-info/RECORD,,
125
+ flwr_nightly-1.6.0.dev20231101.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
126
+ flwr_nightly-1.6.0.dev20231101.dist-info/METADATA,sha256=R2keD6GeJ359rUi7o0nVXaiIXrzkPfaZs71po_Ul96g,13241
127
+ flwr_nightly-1.6.0.dev20231101.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
128
+ flwr_nightly-1.6.0.dev20231101.dist-info/entry_points.txt,sha256=1uLlD5tIunkzALMfMWnqjdE_D5hRUX_I1iMmOMv6tZI,181
129
+ flwr_nightly-1.6.0.dev20231101.dist-info/RECORD,,