Skip to content

Commit

Permalink
Merge pull request #492 from FedML-AI/dev/v0.7.0
Browse files Browse the repository at this point in the history
Dev/v0.7.0
  • Loading branch information
chaoyanghe authored Aug 26, 2022
2 parents 800a7df + c457920 commit cbea7cf
Show file tree
Hide file tree
Showing 8 changed files with 28 additions and 68 deletions.
55 changes: 0 additions & 55 deletions .github/workflows/smoke_test_cross_device_mnn_server_win.yml

This file was deleted.

11 changes: 10 additions & 1 deletion doc/en/starter/installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,16 @@ FedML supports Linux, MacOS, Windows, and Android.
```
pip install fedml
```
(Note: please use python 3.6, 3.7, 3.8, 3.9. We will support 3.10 systematically in the next iteration.)

The default machine learning engine is `PyTorch`. FedML also supports `TensorFlow`, `Jax`, and `MXNet`.
You can install related engine as follows:
```
pip install "fedml[tensorflow]"
pip install "fedml[jax]"
pip install "fedml[mxnet]"
```
Note that the commands above only install the CPU version.
If you need GPU/TPU version, please follow TensorFlow/Jax/MXNet official guidance.

## Installing FedML with Anaconda

Expand Down
3 changes: 3 additions & 0 deletions python/fedml/model/model_hub.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,14 @@ def create(args, output_dim):
model = (gen, disc)
elif model_name == "lenet" and hasattr(args, "deeplearning_backend") and args.deeplearning_backend == "mnn":
from .mobile.mnn_lenet import create_mnn_lenet5_model

create_mnn_lenet5_model(args.global_model_file_path)
model = None # for server MNN, the model is saved as computational graph and then send it to clients.
elif model_name == "resnet20" and hasattr(args, "deeplearning_backend") and args.deeplearning_backend == "mnn":
from .mobile.mnn_resnet import create_mnn_resnet20_model

create_mnn_resnet20_model(args.global_model_file_path)
model = None # for server MNN, the model is saved as computational graph and then send it to clients.
else:
raise Exception("no such model definition, please check the argument spelling or customize your own model")
return model
4 changes: 2 additions & 2 deletions python/quick_start/beehive/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ brew install android-platform-tools

[https://github.com/FedML-AI/FedML/tree/master/python/quick_start/beehive](https://github.com/FedML-AI/FedML/tree/master/python/quick_start/beehive)

5. For local debugging of cross-device server, please try

5. For local debugging of cross-device server, please try
(Cross device server on Windows is not working because MNN lib can't work on Windows correctly, so you should run cross-device server on linux or other OS in which MNN lib can be working correctly)
```
sh run_server.sh
```
Expand Down
6 changes: 3 additions & 3 deletions python/quick_start/beehive/config/fedml_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ environment_args:

data_args:
dataset: "mnist"
data_cache_dir: "../../../data/mnist"
data_cache_dir: ~/fedml_data
partition_method: "hetero"
partition_alpha: 0.5
train_size: 10000
Expand All @@ -18,8 +18,8 @@ data_args:
model_args:
model: "lenet"
deeplearning_backend: "mnn"
model_file_cache_folder: "./model_file_cache" # will be filled by the server automatically
global_model_file_path: "./model_file_cache/global_model.mnn"
model_file_cache_folder: "model_file_cache" # will be filled by the server automatically
global_model_file_path: "model_file_cache/global_model.mnn"

train_args:
federated_optimizer: "FedAvg"
Expand Down
6 changes: 3 additions & 3 deletions python/quick_start/beehive/my_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@


class MnistDataset(MNN.data.Dataset):
def __init__(self, training_dataset=True):
def __init__(self, training_dataset=True, root_path="./data"):
super(MnistDataset, self).__init__()
self.is_training_dataset = training_dataset
trainset = MNIST(root="./data", train=True, download=True)
testset = MNIST(root="./data", train=False, download=True)
trainset = MNIST(root=root_path, train=True, download=True)
testset = MNIST(root=root_path, train=False, download=True)
if self.is_training_dataset:
self.data = trainset.data / 255.0
self.labels = trainset.targets
Expand Down
9 changes: 6 additions & 3 deletions python/quick_start/beehive/torch_server.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import os
from os.path import expanduser

import MNN

import fedml
Expand All @@ -12,16 +15,16 @@
device = fedml.device.get_device(args)

# load data
train_dataset = MnistDataset(True)
test_dataset = MnistDataset(False)
train_dataset = MnistDataset(True, root_path=args.data_cache_dir)
test_dataset = MnistDataset(False, root_path=args.data_cache_dir)
train_dataloader = MNN.data.DataLoader(train_dataset, batch_size=64, shuffle=True)
test_dataloader = MNN.data.DataLoader(
test_dataset, batch_size=args.batch_size, shuffle=False
)
class_num = 10

# load model
model = fedml.model.create(args, output_dim=class_num)
fedml.model.create(args, output_dim=class_num)

# start training
server = ServerMNN(
Expand Down
2 changes: 1 addition & 1 deletion python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def finalize_options(self):
]

requirements_extra_jax = [
"jax[cuda]",
"jax[cpu]",
"dm-haiku",
"optax",
"jaxlib"
Expand Down

0 comments on commit cbea7cf

Please sign in to comment.