From 481a9af57904fea9c860bfae4042d32da630ce5d Mon Sep 17 00:00:00 2001 From: Taner Topal Date: Mon, 2 Dec 2019 15:28:42 +0100 Subject: [PATCH] Project renaming XP-225 (#165) * Rename package (part of XP-225) --- .circleci/config.yml | 12 +- CHANGELOG.md | 9 +- CODEOWNERS | 10 +- Dockerfile | 8 +- README.md | 24 ++- RELEASE_PROCESS.md | 57 +++--- .../benchmark/aggregation/aggregation.py | 2 +- .../aggregation/final_task_accuracies.py | 2 +- .../aggregation/final_task_accuracies_test.py | 2 +- .../benchmark/aggregation/learning_rate.py | 2 +- .../aggregation/learning_rate_test.py | 4 +- .../benchmark/aggregation/participant_hist.py | 2 +- benchmarks/benchmark/aggregation/plot.py | 2 +- benchmarks/benchmark/aggregation/results.py | 2 +- .../benchmark/aggregation/task_accuracies.py | 2 +- .../aggregation/task_accuracies_test.py | 2 +- benchmarks/benchmark/bench_ea.py | 6 +- benchmarks/benchmark/benchmark_test.py | 6 +- benchmarks/benchmark/exec/__main__.py | 2 +- benchmarks/benchmark/exec/run.py | 10 +- benchmarks/conftest.py | 2 +- benchmarks/generator/README.md | 2 +- benchmarks/generator/__main__.py | 2 +- benchmarks/generator/config.py | 2 +- benchmarks/generator/conftest.py | 2 +- benchmarks/generator/data.py | 2 +- benchmarks/generator/persistence.py | 4 +- benchmarks/generator/persistence_test.py | 4 +- benchmarks/ops/docker.py | 2 +- benchmarks/ops/run.py | 2 +- docs/Makefile | 2 +- docs/conf.py | 10 +- docs/index.rst | 16 +- docs/install.md | 22 +- docs/quick.md | 6 +- examples/tensorflow_hello_world/helloworld.py | 12 +- .../{xain => xain_fl}/grpc/coordinator.proto | 2 +- .../grpc/hellonumproto.proto | 0 pytest.ini | 2 +- scripts/format.sh | 6 +- scripts/rm_caches.sh | 2 + scripts/test.sh | 10 +- setup.py | 10 +- xain/sdk/__init__.py | 1 - {xain => xain_fl}/CONFIG.md | 7 +- {xain => xain_fl}/__init__.py | 8 +- {xain => xain_fl}/__version__.py | 0 {xain => xain_fl}/config.py | 6 +- {xain => xain_fl}/conftest.py | 2 +- {xain => xain_fl}/datasets/__init__.py | 6 +- {xain => xain_fl}/datasets/conftest.py | 0 {xain => xain_fl}/datasets/dataset.py | 6 +- {xain => xain_fl}/datasets/dataset_test.py | 0 {xain => xain_fl}/datasets/hashes/README.md | 0 {xain => xain_fl}/datasets/hashes/__init__.py | 0 .../datasets/hashes/cifar-10-100p-b1_000.json | 0 .../datasets/hashes/cifar-10-100p-b1_005.json | 0 .../datasets/hashes/cifar-10-100p-b1_010.json | 0 .../datasets/hashes/cifar-10-100p-b1_015.json | 0 .../datasets/hashes/cifar-10-100p-b1_020.json | 0 .../datasets/hashes/cifar-10-100p-b1_025.json | 0 .../datasets/hashes/cifar-10-100p-b1_030.json | 0 .../datasets/hashes/cifar-10-100p-b1_035.json | 0 .../datasets/hashes/cifar-10-100p-b1_040.json | 0 .../datasets/hashes/cifar-10-100p-b1_045.json | 0 .../hashes/cifar-10-100p-iid-balanced.json | 0 .../hashes/cifar-10-100p-noniid-01cpp.json | 0 .../hashes/cifar-10-100p-noniid-02cpp.json | 0 .../hashes/cifar-10-100p-noniid-03cpp.json | 0 .../hashes/cifar-10-100p-noniid-04cpp.json | 0 .../hashes/cifar-10-100p-noniid-05cpp.json | 0 .../hashes/cifar-10-100p-noniid-06cpp.json | 0 .../hashes/cifar-10-100p-noniid-07cpp.json | 0 .../hashes/cifar-10-100p-noniid-08cpp.json | 0 .../hashes/cifar-10-100p-noniid-09cpp.json | 0 .../hashes/fashion-mnist-100p-b1_000.json | 0 .../hashes/fashion-mnist-100p-b1_005.json | 0 .../hashes/fashion-mnist-100p-b1_010.json | 0 .../hashes/fashion-mnist-100p-b1_015.json | 0 .../hashes/fashion-mnist-100p-b1_020.json | 0 .../hashes/fashion-mnist-100p-b1_025.json | 0 .../hashes/fashion-mnist-100p-b1_030.json | 0 .../hashes/fashion-mnist-100p-b1_035.json | 0 .../hashes/fashion-mnist-100p-b1_040.json | 0 .../hashes/fashion-mnist-100p-b1_045.json | 0 .../fashion-mnist-100p-iid-balanced.json | 0 .../fashion-mnist-100p-noniid-01cpp.json | 0 .../fashion-mnist-100p-noniid-02cpp.json | 0 .../fashion-mnist-100p-noniid-03cpp.json | 0 .../fashion-mnist-100p-noniid-04cpp.json | 0 .../fashion-mnist-100p-noniid-05cpp.json | 0 .../fashion-mnist-100p-noniid-06cpp.json | 0 .../fashion-mnist-100p-noniid-07cpp.json | 0 .../fashion-mnist-100p-noniid-08cpp.json | 0 .../fashion-mnist-100p-noniid-09cpp.json | 0 {xain => xain_fl}/datasets/hashes/load.py | 0 .../datasets/hashes/load_test.py | 2 +- {xain => xain_fl}/datasets/prep.py | 2 +- {xain => xain_fl}/datasets/prep_test.py | 0 {xain => xain_fl}/datasets/stats/__init__.py | 0 {xain => xain_fl}/datasets/stats/__main__.py | 4 +- .../stats/datasets/cifar-10-100p-b1_000.txt | 0 .../stats/datasets/cifar-10-100p-b1_005.txt | 0 .../stats/datasets/cifar-10-100p-b1_010.txt | 0 .../stats/datasets/cifar-10-100p-b1_015.txt | 0 .../stats/datasets/cifar-10-100p-b1_020.txt | 0 .../stats/datasets/cifar-10-100p-b1_025.txt | 0 .../stats/datasets/cifar-10-100p-b1_030.txt | 0 .../stats/datasets/cifar-10-100p-b1_035.txt | 0 .../stats/datasets/cifar-10-100p-b1_040.txt | 0 .../stats/datasets/cifar-10-100p-b1_045.txt | 0 .../datasets/cifar-10-100p-iid-balanced.txt | 0 .../datasets/cifar-10-100p-noniid-01cpp.txt | 0 .../datasets/cifar-10-100p-noniid-02cpp.txt | 0 .../datasets/cifar-10-100p-noniid-03cpp.txt | 0 .../datasets/cifar-10-100p-noniid-04cpp.txt | 0 .../datasets/cifar-10-100p-noniid-05cpp.txt | 0 .../datasets/cifar-10-100p-noniid-06cpp.txt | 0 .../datasets/cifar-10-100p-noniid-07cpp.txt | 0 .../datasets/cifar-10-100p-noniid-08cpp.txt | 0 .../datasets/cifar-10-100p-noniid-09cpp.txt | 0 .../datasets/fashion-mnist-100p-b1_000.txt | 0 .../datasets/fashion-mnist-100p-b1_005.txt | 0 .../datasets/fashion-mnist-100p-b1_010.txt | 0 .../datasets/fashion-mnist-100p-b1_015.txt | 0 .../datasets/fashion-mnist-100p-b1_020.txt | 0 .../datasets/fashion-mnist-100p-b1_025.txt | 0 .../datasets/fashion-mnist-100p-b1_030.txt | 0 .../datasets/fashion-mnist-100p-b1_035.txt | 0 .../datasets/fashion-mnist-100p-b1_040.txt | 0 .../datasets/fashion-mnist-100p-b1_045.txt | 0 .../fashion-mnist-100p-iid-balanced.txt | 0 .../fashion-mnist-100p-noniid-01cpp.txt | 0 .../fashion-mnist-100p-noniid-02cpp.txt | 0 .../fashion-mnist-100p-noniid-03cpp.txt | 0 .../fashion-mnist-100p-noniid-04cpp.txt | 0 .../fashion-mnist-100p-noniid-05cpp.txt | 0 .../fashion-mnist-100p-noniid-06cpp.txt | 0 .../fashion-mnist-100p-noniid-07cpp.txt | 0 .../fashion-mnist-100p-noniid-08cpp.txt | 0 .../fashion-mnist-100p-noniid-09cpp.txt | 0 {xain => xain_fl}/datasets/stats/stats.py | 2 +- .../datasets/stats/stats_test.py | 0 {xain => xain_fl}/datasets/storage.py | 6 +- {xain => xain_fl}/datasets/storage_test.py | 0 {xain => xain_fl}/datasets/testing.py | 2 +- {xain => xain_fl}/datasets/testing_test.py | 0 {xain => xain_fl}/fl/__init__.py | 0 {xain => xain_fl}/fl/coordinator/__init__.py | 0 {xain => xain_fl}/fl/coordinator/aggregate.py | 4 +- .../fl/coordinator/aggregate_test.py | 0 .../fl/coordinator/controller.py | 0 .../fl/coordinator/controller_test.py | 0 .../fl/coordinator/coordinator.py | 10 +- .../fl/coordinator/coordinator_test.py | 0 {xain => xain_fl}/fl/coordinator/evaluator.py | 4 +- {xain => xain_fl}/fl/logging/__init__.py | 0 {xain => xain_fl}/fl/logging/logging.py | 0 {xain => xain_fl}/fl/participant/__init__.py | 0 .../fl/participant/model_provider.py | 0 .../fl/participant/participant.py | 6 +- .../fl/participant/participant_test.py | 2 +- {xain => xain_fl}/grpc/__init__.py | 0 {xain => xain_fl}/grpc/conftest.py | 6 +- {xain => xain_fl}/grpc/coordinator.py | 6 +- {xain => xain_fl}/grpc/coordinator_pb2.py | 190 +++++++++--------- {xain => xain_fl}/grpc/coordinator_pb2.pyi | 0 .../grpc/coordinator_pb2_grpc.py | 44 ++-- {xain => xain_fl}/grpc/hellonumproto_pb2.py | 22 +- {xain => xain_fl}/grpc/hellonumproto_pb2.pyi | 0 .../grpc/hellonumproto_pb2_grpc.py | 10 +- {xain => xain_fl}/grpc/numproto_client.py | 4 +- {xain => xain_fl}/grpc/numproto_server.py | 4 +- {xain => xain_fl}/grpc/participant.py | 6 +- .../grpc/test_coordinator_logic.py | 4 +- {xain => xain_fl}/grpc/test_grpc.py | 10 +- {xain => xain_fl}/grpc/test_participant.py | 4 +- {xain => xain_fl}/helpers/__init__.py | 0 {xain => xain_fl}/helpers/project.py | 0 {xain => xain_fl}/helpers/sha1.py | 0 {xain => xain_fl}/logger.py | 4 +- xain_fl/sdk/__init__.py | 1 + {xain => xain_fl}/sdk/coordinator.py | 0 {xain => xain_fl}/sdk/coordinator_test.py | 0 {xain => xain_fl}/sdk/participant.py | 0 {xain => xain_fl}/sdk/participant_test.py | 0 {xain => xain_fl}/sdk/use_case.py | 0 {xain => xain_fl}/sdk/use_case_test.py | 0 {xain => xain_fl}/types/__init__.py | 0 189 files changed, 346 insertions(+), 339 deletions(-) rename protobuf/{xain => xain_fl}/grpc/coordinator.proto (97%) rename protobuf/{xain => xain_fl}/grpc/hellonumproto.proto (100%) delete mode 100644 xain/sdk/__init__.py rename {xain => xain_fl}/CONFIG.md (88%) rename {xain => xain_fl}/__init__.py (82%) rename {xain => xain_fl}/__version__.py (100%) rename {xain => xain_fl}/config.py (93%) rename {xain => xain_fl}/conftest.py (97%) rename {xain => xain_fl}/datasets/__init__.py (79%) rename {xain => xain_fl}/datasets/conftest.py (100%) rename {xain => xain_fl}/datasets/dataset.py (95%) rename {xain => xain_fl}/datasets/dataset_test.py (100%) rename {xain => xain_fl}/datasets/hashes/README.md (100%) rename {xain => xain_fl}/datasets/hashes/__init__.py (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_000.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_005.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_010.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_015.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_020.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_025.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_030.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_035.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_040.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-b1_045.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-iid-balanced.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-01cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-02cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-03cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-04cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-05cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-06cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-07cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-08cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/cifar-10-100p-noniid-09cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_000.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_005.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_010.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_015.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_020.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_025.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_030.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_035.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_040.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-b1_045.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-iid-balanced.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-01cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-02cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-03cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-04cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-05cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-06cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-07cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-08cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/fashion-mnist-100p-noniid-09cpp.json (100%) rename {xain => xain_fl}/datasets/hashes/load.py (100%) rename {xain => xain_fl}/datasets/hashes/load_test.py (87%) rename {xain => xain_fl}/datasets/prep.py (99%) rename {xain => xain_fl}/datasets/prep_test.py (100%) rename {xain => xain_fl}/datasets/stats/__init__.py (100%) rename {xain => xain_fl}/datasets/stats/__main__.py (80%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_000.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_005.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_010.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_015.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_020.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_025.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_030.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_035.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_040.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-b1_045.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-iid-balanced.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-01cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-02cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-03cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-04cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-05cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-06cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-07cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-08cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/cifar-10-100p-noniid-09cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_000.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_005.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_010.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_015.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_020.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_025.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_030.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_035.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_040.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-b1_045.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-iid-balanced.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-01cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-02cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-03cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-04cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-05cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-06cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-07cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-08cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/datasets/fashion-mnist-100p-noniid-09cpp.txt (100%) rename {xain => xain_fl}/datasets/stats/stats.py (97%) rename {xain => xain_fl}/datasets/stats/stats_test.py (100%) rename {xain => xain_fl}/datasets/storage.py (97%) rename {xain => xain_fl}/datasets/storage_test.py (100%) rename {xain => xain_fl}/datasets/testing.py (97%) rename {xain => xain_fl}/datasets/testing_test.py (100%) rename {xain => xain_fl}/fl/__init__.py (100%) rename {xain => xain_fl}/fl/coordinator/__init__.py (100%) rename {xain => xain_fl}/fl/coordinator/aggregate.py (98%) rename {xain => xain_fl}/fl/coordinator/aggregate_test.py (100%) rename {xain => xain_fl}/fl/coordinator/controller.py (100%) rename {xain => xain_fl}/fl/coordinator/controller_test.py (100%) rename {xain => xain_fl}/fl/coordinator/coordinator.py (97%) rename {xain => xain_fl}/fl/coordinator/coordinator_test.py (100%) rename {xain => xain_fl}/fl/coordinator/evaluator.py (88%) rename {xain => xain_fl}/fl/logging/__init__.py (100%) rename {xain => xain_fl}/fl/logging/logging.py (100%) rename {xain => xain_fl}/fl/participant/__init__.py (100%) rename {xain => xain_fl}/fl/participant/model_provider.py (100%) rename {xain => xain_fl}/fl/participant/participant.py (97%) rename {xain => xain_fl}/fl/participant/participant_test.py (99%) rename {xain => xain_fl}/grpc/__init__.py (100%) rename {xain => xain_fl}/grpc/conftest.py (82%) rename {xain => xain_fl}/grpc/coordinator.py (99%) rename {xain => xain_fl}/grpc/coordinator_pb2.py (67%) rename {xain => xain_fl}/grpc/coordinator_pb2.pyi (100%) rename {xain => xain_fl}/grpc/coordinator_pb2_grpc.py (52%) rename {xain => xain_fl}/grpc/hellonumproto_pb2.py (91%) rename {xain => xain_fl}/grpc/hellonumproto_pb2.pyi (100%) rename {xain => xain_fl}/grpc/hellonumproto_pb2_grpc.py (70%) rename {xain => xain_fl}/grpc/numproto_client.py (86%) rename {xain => xain_fl}/grpc/numproto_server.py (90%) rename {xain => xain_fl}/grpc/participant.py (98%) rename {xain => xain_fl}/grpc/test_coordinator_logic.py (98%) rename {xain => xain_fl}/grpc/test_grpc.py (97%) rename {xain => xain_fl}/grpc/test_participant.py (97%) rename {xain => xain_fl}/helpers/__init__.py (100%) rename {xain => xain_fl}/helpers/project.py (100%) rename {xain => xain_fl}/helpers/sha1.py (100%) rename {xain => xain_fl}/logger.py (83%) create mode 100644 xain_fl/sdk/__init__.py rename {xain => xain_fl}/sdk/coordinator.py (100%) rename {xain => xain_fl}/sdk/coordinator_test.py (100%) rename {xain => xain_fl}/sdk/participant.py (100%) rename {xain => xain_fl}/sdk/participant_test.py (100%) rename {xain => xain_fl}/sdk/use_case.py (100%) rename {xain => xain_fl}/sdk/use_case_test.py (100%) rename {xain => xain_fl}/types/__init__.py (100%) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0d45db1eb..ed2e57add 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,7 +11,7 @@ jobs: # Download and cache dependencies - restore_cache: keys: - - v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain/config.py" }} + - v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain_fl/config.py" }} - run: name: install dependencies @@ -25,7 +25,7 @@ jobs: - save_cache: paths: - ./venv - key: v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain/config.py" }} + key: v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain_fl/config.py" }} - run: name: run tests @@ -43,7 +43,7 @@ jobs: # Download and cache dependencies - restore_cache: keys: - - v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain/config.py" }} + - v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain_fl/config.py" }} - run: name: install dependencies @@ -55,7 +55,7 @@ jobs: - save_cache: paths: - ./venv - key: v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain/config.py" }} + key: v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain_fl/config.py" }} - run: name: run tests slow @@ -73,7 +73,7 @@ jobs: # Download and cache dependencies - restore_cache: keys: - - v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain/config.py" }} + - v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain_fl/config.py" }} - run: name: install dependencies @@ -85,7 +85,7 @@ jobs: - save_cache: paths: - ./venv - key: v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain/config.py" }} + key: v1-dependencies-{{ checksum "setup.py" }}-{{ checksum "xain_fl/config.py" }} - run: name: run tests benchmark diff --git a/CHANGELOG.md b/CHANGELOG.md index 1e2c5f810..6a8573aef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ # Changelog + All notable changes to this project will be documented in this file. The format is based on [Keep a @@ -17,7 +18,6 @@ For reference, the possible headings are: - `External Contributors` to list all external contributors. - `Notes` for notes regarding this particular release. - ## [Unreleased] ## [0.1.0] - 2019-09-25 @@ -27,9 +27,8 @@ The first public release of **XAIN** ### Added - FedML implementation on well known - [benchmarks](https://github.com/xainag/xain/tree/master/benchmarks/benchmark) using + [benchmarks](https://github.com/xainag/xain-fl/tree/master/benchmarks/benchmark) using a realistic deep learning model structure. - -[Unreleased]: https://github.com/xainag/xain/pulls?utf8=%E2%9C%93&q=merged%3A%3E2019-09-25+ -[0.1.0]: https://github.com/xainag/xain/pulls?utf8=%E2%9C%93&q=merged%3A%3C%3D2019-09-25+ +[unreleased]: https://github.com/xainag/xain-fl/pulls?utf8=%E2%9C%93&q=merged%3A%3E2019-09-25+ +[0.1.0]: https://github.com/xainag/xain-fl/pulls?utf8=%E2%9C%93&q=merged%3A%3C%3D2019-09-25+ diff --git a/CODEOWNERS b/CODEOWNERS index bc1342d9a..085e80bfc 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -21,8 +21,8 @@ /scripts/ @tanertopal @danieljanes ## Python packages -/xain/datasets/ @tanertopal -/xain/fl/ @danieljanes -/xain/grpc/ @r-marques @finiteprods -/xain/helpers/ @danieljanes @tanertopal -/xain/types/ @danieljanes @tanertopal +/xain_fl/datasets/ @tanertopal +/xain_fl/fl/ @danieljanes +/xain_fl/grpc/ @r-marques @finiteprods +/xain_fl/helpers/ @danieljanes @tanertopal +/xain_fl/types/ @danieljanes @tanertopal diff --git a/Dockerfile b/Dockerfile index 8e98bd9d0..3cbf49473 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,16 +13,16 @@ RUN python -m pip install -U pip==19.3.1 setuptools==41.6.0 COPY setup.py setup.py # These files are needed for the setup.py to work -COPY xain/__version__.py xain/__version__.py +COPY xain_fl/__version__.py xain_fl/__version__.py COPY README.md README.md # Install only install_requires RUN python setup.py egg_info && \ - LN=$(awk '/tensorflow/{ print NR; exit }' xain.egg-info/requires.txt) && \ - IR=$(head -n $LN xain.egg-info/requires.txt | awk '{gsub(/\[.+\]/,"");}1') && \ + LN=$(awk '/tensorflow/{ print NR; exit }' xain_fl.egg-info/requires.txt) && \ + IR=$(head -n $LN xain_fl.egg-info/requires.txt | awk '{gsub(/\[.+\]/,"");}1') && \ python -m pip install $IR -COPY xain xain +COPY xain_fl xain_fl COPY protobuf protobuf RUN python -m pip install . diff --git a/README.md b/README.md index 2ae17a083..24db49a93 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![CircleCI](https://img.shields.io/circleci/build/github/xainag/xain/master?style=flat-square)](https://circleci.com/gh/xainag/xain/tree/master) -[![PyPI](https://img.shields.io/pypi/v/xain?style=flat-square)](https://pypi.org/project/xain/) -[![GitHub license](https://img.shields.io/github/license/xainag/xain?style=flat-square)](https://github.com/xainag/xain/blob/master/LICENSE) +[![CircleCI](https://img.shields.io/circleci/build/github/xainag/xain-fl/master?style=flat-square)](https://circleci.com/gh/xainag/xain-fl/tree/master) +[![PyPI](https://img.shields.io/pypi/v/xain-fl?style=flat-square)](https://pypi.org/project/xain-fl/) +[![GitHub license](https://img.shields.io/github/license/xainag/xain-fl?style=flat-square)](https://github.com/xainag/xain-fl/blob/master/LICENSE) # XAIN @@ -14,29 +14,30 @@ POLITE NOTE: We want to point out that running the benchmarks as described below ## Quick Start -XAIN requires [Python 3.6+](https://python.org/). To install the `xain` package just run: +XAIN requires [Python 3.6+](https://python.org/). To install the `xain-fl` package just run: ```shell -$ python -m pip install xain +$ python -m pip install xain-fl ``` XAIN can also be installed with GPU support through the `gpu` extra feature. To -install the `xain` package with support for GPUs just run: +install the `xain-fl` package with support for GPUs just run: ```shell -$ python -m pip install xain[gpu] +$ python -m pip install xain-fl[gpu] ``` ### Running training sessions and benchmarks To run training sessions, see the [benchmark -package](https://github.com/xainag/xain/tree/master/benchmarks/benchmark) and the +package](https://github.com/xainag/xain-fl/tree/master/benchmarks/benchmark) and the [benchmark -documentation](https://github.com/xainag/xain/blob/master/docs/quick.md#training). +documentation](https://github.com/xainag/xain-fl/blob/master/docs/quick.md#training). ## Install from source For development we require some extra system dependencies: + - [clang-format 8+](https://clang.llvm.org/docs/ClangFormat.html) - Linux: `sudo apt install clang-format` - macOS: `brew install clang-format` @@ -46,8 +47,8 @@ For development we require some extra system dependencies: To clone this repository and to install the XAIN project, please execute the following commands: ```shell -$ git clone https://github.com/xainag/xain.git -$ cd xain +$ git clone https://github.com/xainag/xain-fl.git +$ cd xain-fl $ python -m pip install -e .[dev] ``` @@ -64,6 +65,7 @@ $ pytest The project documentation resides under `docs/`. To build the documentation run: + ```shell $ cd docs/ $ make docs diff --git a/RELEASE_PROCESS.md b/RELEASE_PROCESS.md index 90be07be3..b7f1a0ba4 100644 --- a/RELEASE_PROCESS.md +++ b/RELEASE_PROCESS.md @@ -17,26 +17,26 @@ A release on git is just a tagged commit on the `master` branch. Here we detail the process of creating a new Github release. 1. Create and merge a pull request that: - - increases the version number in - [`xain/__version__.py`](https://github.com/xainag/xain/blob/master/xain/__version__.py) - according the versioning schema. - - updates the - [`CHANGELOG.md`](https://github.com/xainag/xain/blob/master/CHANGELOG.md) - with all notable changes for the release. - - possibly update the `Development Status` classifiers in the - [`setup.py`](https://github.com/xainag/xain/blob/master/setup.py). You - can check supported classifiers in the [pypi - website](https://pypi.org/classifiers/). -2. Got to the [Github Releases tab](https://github.com/xainag/xain/releases) + - increases the version number in + [`xain_fl/__version__.py`](https://github.com/xainag/xain-fl/blob/master/xain_fl/__version__.py) + according the versioning schema. + - updates the + [`CHANGELOG.md`](https://github.com/xainag/xain-fl/blob/master/CHANGELOG.md) + with all notable changes for the release. + - possibly update the `Development Status` classifiers in the + [`setup.py`](https://github.com/xainag/xain-fl/blob/master/setup.py). You + can check supported classifiers in the [pypi + website](https://pypi.org/classifiers/). +2. Got to the [Github Releases tab](https://github.com/xainag/xain-fl/releases) and create a new release: - - for the tag version use the version defined in 1. preceded by a `v`, e.g. - v0.3.2, and target master. - - for the release title use the same as the tag version. - - for the release description, copy the section from the - [`CHANGELOG.md`](https://github.com/xainag/xain/blob/master/CHANGELOG.md) - related to this version. - - possibly check the `This is a pre-release` check box. - - Publish the release. + - for the tag version use the version defined in 1. preceded by a `v`, e.g. + v0.3.2, and target master. + - for the release title use the same as the tag version. + - for the release description, copy the section from the + [`CHANGELOG.md`](https://github.com/xainag/xain-fl/blob/master/CHANGELOG.md) + related to this version. + - possibly check the `This is a pre-release` check box. + - Publish the release. ### How to publish a new release to PyPi @@ -45,15 +45,14 @@ You can check more information in the [Python Packaging User Guide](https://packaging.python.org/tutorials/packaging-projects/). 1. Checkout the current git tag e.g. - ```bash - $ git checkout v0.3.2 - ``` + ```bash + $ git checkout v0.3.2 + ``` 2. Generate the distribution archives: - ```bash - $ python setup.py sdist bdist_wheel - ``` + ```bash + $ python setup.py sdist bdist_wheel + ``` 3. Upload the distribution archives using the correct PyPi credentials: - ```bash - $ python -m twine upload dist/* - ``` - + ```bash + $ python -m twine upload dist/* + ``` diff --git a/benchmarks/benchmark/aggregation/aggregation.py b/benchmarks/benchmark/aggregation/aggregation.py index 00d0683b3..7a10c76d9 100644 --- a/benchmarks/benchmark/aggregation/aggregation.py +++ b/benchmarks/benchmark/aggregation/aggregation.py @@ -13,7 +13,7 @@ task_accuracies, ) from benchmarks.helpers import storage -from xain.logger import get_logger +from xain_fl.logger import get_logger FLAGS = flags.FLAGS diff --git a/benchmarks/benchmark/aggregation/final_task_accuracies.py b/benchmarks/benchmark/aggregation/final_task_accuracies.py index 62805498a..2aef617ad 100644 --- a/benchmarks/benchmark/aggregation/final_task_accuracies.py +++ b/benchmarks/benchmark/aggregation/final_task_accuracies.py @@ -4,7 +4,7 @@ from absl import flags, logging from benchmarks.helpers import storage -from xain.types import PlotValues, XticksLabels, XticksLocations +from xain_fl.types import PlotValues, XticksLabels, XticksLocations from .plot import plot from .results import GroupResult, TaskResult diff --git a/benchmarks/benchmark/aggregation/final_task_accuracies_test.py b/benchmarks/benchmark/aggregation/final_task_accuracies_test.py index e932ef353..723a120cd 100644 --- a/benchmarks/benchmark/aggregation/final_task_accuracies_test.py +++ b/benchmarks/benchmark/aggregation/final_task_accuracies_test.py @@ -3,7 +3,7 @@ import pytest from absl import flags -from xain.helpers import sha1 +from xain_fl.helpers import sha1 from . import final_task_accuracies from .results import TaskResult diff --git a/benchmarks/benchmark/aggregation/learning_rate.py b/benchmarks/benchmark/aggregation/learning_rate.py index ec5e7804b..c48ac06b1 100644 --- a/benchmarks/benchmark/aggregation/learning_rate.py +++ b/benchmarks/benchmark/aggregation/learning_rate.py @@ -4,7 +4,7 @@ from absl import flags, logging from benchmarks.helpers import storage -from xain.types import PlotValues +from xain_fl.types import PlotValues from .plot import plot from .results import GroupResult, TaskResult diff --git a/benchmarks/benchmark/aggregation/learning_rate_test.py b/benchmarks/benchmark/aggregation/learning_rate_test.py index 785442816..0ddd0dd28 100644 --- a/benchmarks/benchmark/aggregation/learning_rate_test.py +++ b/benchmarks/benchmark/aggregation/learning_rate_test.py @@ -3,8 +3,8 @@ import pytest from absl import flags -from xain.helpers import sha1 -from xain.logger import get_logger +from xain_fl.helpers import sha1 +from xain_fl.logger import get_logger from . import learning_rate diff --git a/benchmarks/benchmark/aggregation/participant_hist.py b/benchmarks/benchmark/aggregation/participant_hist.py index 4e313db11..ad761bb6a 100644 --- a/benchmarks/benchmark/aggregation/participant_hist.py +++ b/benchmarks/benchmark/aggregation/participant_hist.py @@ -10,7 +10,7 @@ from benchmarks.benchmark.aggregation.plot import plot_history_data from benchmarks.benchmark.aggregation.results import GroupResult, TaskResult from benchmarks.helpers.storage import create_output_subdir, fname_with_default_dir -from xain.types import Metrics +from xain_fl.types import Metrics FLAGS = flags.FLAGS diff --git a/benchmarks/benchmark/aggregation/plot.py b/benchmarks/benchmark/aggregation/plot.py index 1eb70083e..1ed80b5ff 100644 --- a/benchmarks/benchmark/aggregation/plot.py +++ b/benchmarks/benchmark/aggregation/plot.py @@ -7,7 +7,7 @@ from numpy import ndarray from benchmarks.helpers import storage -from xain.types import PlotValues +from xain_fl.types import PlotValues matplotlib.use("AGG") diff --git a/benchmarks/benchmark/aggregation/results.py b/benchmarks/benchmark/aggregation/results.py index d0ca0d9ef..34a0eae82 100644 --- a/benchmarks/benchmark/aggregation/results.py +++ b/benchmarks/benchmark/aggregation/results.py @@ -7,7 +7,7 @@ from typing import List, Optional, cast from benchmarks.helpers import storage -from xain.types import Metrics +from xain_fl.types import Metrics class TaskResult(ABC): diff --git a/benchmarks/benchmark/aggregation/task_accuracies.py b/benchmarks/benchmark/aggregation/task_accuracies.py index 9e641a977..c76064ecb 100644 --- a/benchmarks/benchmark/aggregation/task_accuracies.py +++ b/benchmarks/benchmark/aggregation/task_accuracies.py @@ -4,7 +4,7 @@ from absl import flags, logging from benchmarks.helpers import storage -from xain.types import PlotValues +from xain_fl.types import PlotValues from .plot import plot from .results import GroupResult, TaskResult diff --git a/benchmarks/benchmark/aggregation/task_accuracies_test.py b/benchmarks/benchmark/aggregation/task_accuracies_test.py index 25a76fe9d..b9e47d2cf 100644 --- a/benchmarks/benchmark/aggregation/task_accuracies_test.py +++ b/benchmarks/benchmark/aggregation/task_accuracies_test.py @@ -2,7 +2,7 @@ import pytest -from xain.helpers import sha1 +from xain_fl.helpers import sha1 from . import task_accuracies diff --git a/benchmarks/benchmark/bench_ea.py b/benchmarks/benchmark/bench_ea.py index 518d69d84..f26c9d339 100644 --- a/benchmarks/benchmark/bench_ea.py +++ b/benchmarks/benchmark/bench_ea.py @@ -4,9 +4,9 @@ from benchmarks.benchmark.exec import run from benchmarks.benchmark.net import orig_cnn_compiled from benchmarks.helpers import storage -from xain.datasets import load_splits -from xain.fl.coordinator.aggregate import EvoAgg -from xain.fl.coordinator.evaluator import Evaluator +from xain_fl.datasets import load_splits +from xain_fl.fl.coordinator.aggregate import EvoAgg +from xain_fl.fl.coordinator.evaluator import Evaluator DEFAULT_R = 50 DEFAULT_E = 1 # Number of training epochs in each round diff --git a/benchmarks/benchmark/benchmark_test.py b/benchmarks/benchmark/benchmark_test.py index 4d378945a..8e8f9a2ff 100644 --- a/benchmarks/benchmark/benchmark_test.py +++ b/benchmarks/benchmark/benchmark_test.py @@ -1,5 +1,5 @@ from benchmarks.ops.run import cores -from xain.datasets.dataset import config +from xain_fl.datasets.dataset import config from .aggregation.aggregation import aggregations from .benchmark import benchmarks @@ -34,7 +34,7 @@ def test_valid_model_names(): def test_valid_dataset_names(): """ - Verify that all dataset names used in `Task` objects are available in `xain.datasets` + Verify that all dataset names used in `Task` objects are available in `xain_fl.datasets` """ # Prepare @@ -50,7 +50,7 @@ def test_valid_dataset_names(): def test_valid_instance_cores(): """ - Verify that all dataset names used in `Task` objects are available in `xain.datasets` + Verify that all dataset names used in `Task` objects are available in `xain_fl.datasets` """ # Prepare diff --git a/benchmarks/benchmark/exec/__main__.py b/benchmarks/benchmark/exec/__main__.py index 1cac37033..4b1b65dac 100644 --- a/benchmarks/benchmark/exec/__main__.py +++ b/benchmarks/benchmark/exec/__main__.py @@ -5,7 +5,7 @@ from benchmarks.helpers import storage from benchmarks.ops import results -from xain.datasets import load_splits +from xain_fl.datasets import load_splits from . import run diff --git a/benchmarks/benchmark/exec/run.py b/benchmarks/benchmark/exec/run.py index e3e25a88d..fe67c24cc 100644 --- a/benchmarks/benchmark/exec/run.py +++ b/benchmarks/benchmark/exec/run.py @@ -9,11 +9,11 @@ from benchmarks.benchmark.aggregation import task_accuracies from benchmarks.benchmark.net import load_lr_fn_fn, load_model_fn from benchmarks.helpers import storage -from xain.datasets import load_splits -from xain.fl.coordinator import Coordinator, RandomController -from xain.fl.coordinator.aggregate import Aggregator -from xain.fl.participant import ModelProvider, Participant -from xain.types import History, Metrics, Partition +from xain_fl.datasets import load_splits +from xain_fl.fl.coordinator import Coordinator, RandomController +from xain_fl.fl.coordinator.aggregate import Aggregator +from xain_fl.fl.participant import ModelProvider, Participant +from xain_fl.types import History, Metrics, Partition random.seed(0) np.random.seed(1) diff --git a/benchmarks/conftest.py b/benchmarks/conftest.py index 54057462a..fb4cda4d9 100644 --- a/benchmarks/conftest.py +++ b/benchmarks/conftest.py @@ -2,7 +2,7 @@ import pytest from absl import flags -from xain.types import FederatedDataset, KerasDataset +from xain_fl.types import FederatedDataset, KerasDataset FLAGS = flags.FLAGS diff --git a/benchmarks/generator/README.md b/benchmarks/generator/README.md index fe492ea3f..63559854d 100644 --- a/benchmarks/generator/README.md +++ b/benchmarks/generator/README.md @@ -1,4 +1,4 @@ # Generator Generates various datasets which will than be uploaded into -All generated datasets will be stored in (by default) ~/.xain/generator/datasets +All generated datasets will be stored in (by default) ~/.xain-fl/generator/datasets diff --git a/benchmarks/generator/__main__.py b/benchmarks/generator/__main__.py index 82fe2e282..b7a4a13a2 100644 --- a/benchmarks/generator/__main__.py +++ b/benchmarks/generator/__main__.py @@ -1,7 +1,7 @@ from absl import app, logging from benchmarks.generator import config, data, persistence -from xain.datasets import testing +from xain_fl.datasets import testing def generate_dataset(dataset_name): diff --git a/benchmarks/generator/config.py b/benchmarks/generator/config.py index 1fc96549f..4f0e5372f 100644 --- a/benchmarks/generator/config.py +++ b/benchmarks/generator/config.py @@ -6,7 +6,7 @@ from benchmarks.generator import data from benchmarks.generator import partition_volume_distributions as pvd from benchmarks.generator import transformer -from xain import config +from xain_fl import config local_generator_datasets_dir = config.storage_dir.joinpath("generator/datasets") diff --git a/benchmarks/generator/conftest.py b/benchmarks/generator/conftest.py index 541aaf51f..8a868ae37 100644 --- a/benchmarks/generator/conftest.py +++ b/benchmarks/generator/conftest.py @@ -2,7 +2,7 @@ import pytest from benchmarks.conftest import create_mock_keras_dataset -from xain.types import FederatedDataset, KerasDataset +from xain_fl.types import FederatedDataset, KerasDataset from . import data diff --git a/benchmarks/generator/data.py b/benchmarks/generator/data.py index 401e3f4f6..de0f2c196 100644 --- a/benchmarks/generator/data.py +++ b/benchmarks/generator/data.py @@ -3,7 +3,7 @@ import numpy as np from numpy import ndarray -from xain.types import FederatedDataset, KerasDataset +from xain_fl.types import FederatedDataset, KerasDataset from .transformer import classes_balanced_randomized_per_partition, sort_by_class diff --git a/benchmarks/generator/persistence.py b/benchmarks/generator/persistence.py index 0c6b5c794..e5598b161 100644 --- a/benchmarks/generator/persistence.py +++ b/benchmarks/generator/persistence.py @@ -14,8 +14,8 @@ from absl import logging from benchmarks.helpers import storage -from xain.helpers import sha1 -from xain.types import FederatedDataset, FnameNDArrayTuple +from xain_fl.helpers import sha1 +from xain_fl.types import FederatedDataset, FnameNDArrayTuple def save(fname: str, data: np.ndarray, storage_dir: str): diff --git a/benchmarks/generator/persistence_test.py b/benchmarks/generator/persistence_test.py index 1423fe1f6..197dfa1fc 100644 --- a/benchmarks/generator/persistence_test.py +++ b/benchmarks/generator/persistence_test.py @@ -3,8 +3,8 @@ import numpy as np import pytest -from xain.helpers import sha1 -from xain.types import FederatedDataset +from xain_fl.helpers import sha1 +from xain_fl.types import FederatedDataset from . import persistence diff --git a/benchmarks/ops/docker.py b/benchmarks/ops/docker.py index 743c09b4b..84c827f49 100644 --- a/benchmarks/ops/docker.py +++ b/benchmarks/ops/docker.py @@ -5,7 +5,7 @@ from faker import Faker from faker.providers import person -from xain.helpers import project +from xain_fl.helpers import project fake = Faker() fake.add_provider(person) diff --git a/benchmarks/ops/run.py b/benchmarks/ops/run.py index 60566ea94..24857ebd3 100644 --- a/benchmarks/ops/run.py +++ b/benchmarks/ops/run.py @@ -5,7 +5,7 @@ import boto3 from absl import flags, logging -from xain.helpers import project +from xain_fl.helpers import project from .ec2 import user_data diff --git a/docs/Makefile b/docs/Makefile index 924fa7652..b420b948b 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -15,7 +15,7 @@ help: .PHONY: help Makefile docs: - sphinx-apidoc --doc-project "Code Reference XAIN" -M -f -d 3 --tocfile index -o ./_code_reference_xain/ ../xain/ ../xain/**_test.py + sphinx-apidoc --doc-project "Code Reference XAIN" -M -f -d 3 --tocfile index -o ./_code_reference_xain_fl/ ../xain_fl/ ../xain_fl/**_test.py sphinx-apidoc --doc-project "Code Reference Benchmarks" -M -f -d 3 --tocfile index -o ./_code_reference_benchmarks/ ../benchmarks/ ../benchmarks/**_test.py make html diff --git a/docs/conf.py b/docs/conf.py index 00335ba06..883e072d6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,14 +18,14 @@ # get version _version = {} -with open("../xain/__version__.py") as fp: +with open("../xain_fl/__version__.py") as fp: exec(fp.read(), _version) # -- Project information ----------------------------------------------------- -project = "XAIN" -copyright = "2019, XAIN Contributors" -author = "XAIN Contributors" +project = "XAIN FL" +copyright = "2019, XAIN FL Contributors" +author = "XAIN FL Contributors" # The major project version, used as the replacement for |version|. For example, # for the Python documentation, this may be something like 2.6. @@ -72,7 +72,7 @@ "logo": "brainy.svg", "github_banner": True, "github_user": "xainag", - "github_repo": "xain", + "github_repo": "xain-fl", "github_button": False, "sidebar_collapse": False, } diff --git a/docs/index.rst b/docs/index.rst index 74b670f09..b4f6bd96a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,22 +6,22 @@ XAIN: Federated Learning ======================== -.. image:: https://img.shields.io/circleci/build/github/xainag/xain/master?style=flat-square - :target: https://circleci.com/gh/xainag/xain/tree/master +.. image:: https://img.shields.io/circleci/build/github/xainag/xain-fl/master?style=flat-square + :target: https://circleci.com/gh/xainag/xain-fl/tree/master .. image:: https://img.shields.io/pypi/v/xain?style=flat-square - :target: https://pypi.org/project/xain/ + :target: https://pypi.org/project/xain-fl/ -.. image:: https://img.shields.io/github/license/xainag/xain?style=flat-square - :target: https://github.com/xainag/xain/blob/master/LICENSE +.. image:: https://img.shields.io/github/license/xainag/xain-fl?style=flat-square + :target: https://github.com/xainag/xain-fl/blob/master/LICENSE -**XAIN** is an open source framework for federated learning. +**XAIN FL** is an open source framework for federated learning. .. image:: _static/home.svg -The XAIN project is building a GDPR-compliance layer for machine learning. The +The XAIN FL project is building a GDPR-compliance layer for machine learning. The approach relies on federated machine learning (FedML) as enabling technology that removes compliance-related adoption barriers of AI applications used in production. @@ -45,5 +45,5 @@ API Documentation .. toctree:: :maxdepth: 1 - _code_reference_xain/index + _code_reference_xain_fl/index _code_reference_benchmarks/index diff --git a/docs/install.md b/docs/install.md index 9aad1250b..06115ba2f 100644 --- a/docs/install.md +++ b/docs/install.md @@ -1,36 +1,37 @@ # Installation of XAIN -XAIN requires [Python 3.6+](https://python.org/). +XAIN FL requires [Python 3.6+](https://python.org/). ## Install from PyPi package -To install the `xain` package just run: +To install the `xain_fl` package just run: ```shell -$ python -m pip install xain +$ python -m pip install xain_fl ``` -XAIN can also be installed with GPU support through the `gpu` extra feature. To -install the `xain` package with support for GPUs just run: +XAIN FL can also be installed with GPU support through the `gpu` extra feature. To +install the `xain_fl` package with support for GPUs just run: ```shell -$ python -m pip install xain[gpu] +$ python -m pip install xain_fl[gpu] ``` ## Install from source For development we require some extra system dependencies: + - [clang-format 8+](https://clang.llvm.org/docs/ClangFormat.html) - Linux: `sudo apt install clang-format` - macOS: `brew install clang-format` -### Clone Repository & Install XAIN in development mode +### Clone Repository & Install XAIN FL in development mode -To clone this repository and to install the XAIN project, please execute the following commands: +To clone this repository and to install the XAIN FL project, please execute the following commands: ```shell -$ git clone https://github.com/xainag/xain.git -$ cd xain +$ git clone https://github.com/xainag/xain-fl.git +$ cd xain-fl $ python -m pip install -e .[dev] ``` @@ -47,6 +48,7 @@ $ pytest The project documentation resides under `docs/`. To build the documentation run: + ```shell $ cd docs/ $ make docs diff --git a/docs/quick.md b/docs/quick.md index b926cbd3d..2490796d9 100644 --- a/docs/quick.md +++ b/docs/quick.md @@ -102,7 +102,7 @@ Package encapsulates most OPS related tasks. Run a task locally ```python -from xain.ops import docker, run +from xain_fl.ops import docker, run image_name = docker.build(should_push=True) run.docker(image_name=image_name, benchmark_name="fashion-mnist-100p-iid-balanced") @@ -113,7 +113,7 @@ run.docker(image_name=image_name, benchmark_name="fashion-mnist-100p-iid-balance Run a task on EC2 ```python -from xain.ops import docker, run +from xain_fl.ops import docker, run image_name = docker.build(should_push=True) run.ec2( @@ -133,5 +133,5 @@ You can find all public methods of the package in its `api` module. **Example:** ```python -from xain.datasets.api import cifar10_random_splits_10_load_split +from xain_fl.datasets.api import cifar10_random_splits_10_load_split ``` diff --git a/examples/tensorflow_hello_world/helloworld.py b/examples/tensorflow_hello_world/helloworld.py index 41a3e22c5..9b14d24c2 100644 --- a/examples/tensorflow_hello_world/helloworld.py +++ b/examples/tensorflow_hello_world/helloworld.py @@ -5,11 +5,11 @@ from absl import app, flags from tensorflow.keras.layers import Conv2D, Dense, Flatten, Input, MaxPool2D -from xain.datasets import load_splits -from xain.fl.coordinator import Coordinator, RandomController -from xain.fl.coordinator.aggregate import FederatedAveragingAgg -from xain.fl.participant import ModelProvider, Participant -from xain.types import Partition +from xain_fl.datasets import load_splits +from xain_fl.fl.coordinator import Coordinator, RandomController +from xain_fl.fl.coordinator.aggregate import FederatedAveragingAgg +from xain_fl.fl.participant import ModelProvider, Participant +from xain_fl.types import Partition # Defining the 'task_name' flag here, to be used by the absl-py app. FLAGS = flags.FLAGS @@ -20,7 +20,7 @@ """Specifying a dataset name for this example. We will use a partitioned version of the Fashion MNIST dataset. -Please see here: https://xainag.github.io/xain/ +Please see here: https://xainag.github.io/xain-fl/ 100p means that the dataset is split into 100 partitions, which are IID Each partition represents the dataset a single client stores locally. diff --git a/protobuf/xain/grpc/coordinator.proto b/protobuf/xain_fl/grpc/coordinator.proto similarity index 97% rename from protobuf/xain/grpc/coordinator.proto rename to protobuf/xain_fl/grpc/coordinator.proto index 9c0818d3e..71445d04d 100644 --- a/protobuf/xain/grpc/coordinator.proto +++ b/protobuf/xain_fl/grpc/coordinator.proto @@ -2,7 +2,7 @@ syntax = "proto3"; import "numproto/protobuf/ndarray.proto"; -package xain.protobuf.coordinator; +package xain_fl.protobuf.coordinator; service Coordinator { rpc Rendezvous(RendezvousRequest) returns (RendezvousReply) {} diff --git a/protobuf/xain/grpc/hellonumproto.proto b/protobuf/xain_fl/grpc/hellonumproto.proto similarity index 100% rename from protobuf/xain/grpc/hellonumproto.proto rename to protobuf/xain_fl/grpc/hellonumproto.proto diff --git a/pytest.ini b/pytest.ini index 52f1f4843..f92f55666 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,5 @@ [pytest] -testpaths = benchmarks xain +testpaths = benchmarks xain_fl markers = unmarked: every test without a marker will automatically have this marker assigned. You could senselessly add it manually integration: mark a integration test diff --git a/scripts/format.sh b/scripts/format.sh index 7a2e4c19d..13e2efdc1 100755 --- a/scripts/format.sh +++ b/scripts/format.sh @@ -7,6 +7,6 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd $DIR/../ -isort --indent=4 -rc setup.py conftest.py benchmarks examples xain docs/conf.py -black --exclude "xain/grpc/.*_pb2.*" setup.py conftest.py benchmarks examples xain docs/conf.py -clang-format -style="{Language: Proto, BasedOnStyle: Google}" -i protobuf/xain/grpc/*.proto +isort --indent=4 -rc setup.py conftest.py benchmarks examples xain_fl docs/conf.py +black --exclude "xain_fl/grpc/.*_pb2.*" setup.py conftest.py benchmarks examples xain_fl docs/conf.py +clang-format -style="{Language: Proto, BasedOnStyle: Google}" -i protobuf/xain_fl/grpc/*.proto diff --git a/scripts/rm_caches.sh b/scripts/rm_caches.sh index 246ea553a..a71fb1bfe 100755 --- a/scripts/rm_caches.sh +++ b/scripts/rm_caches.sh @@ -6,3 +6,5 @@ cd $DIR/../ rm -rf .mypy_cache rm -rf .pytest_cache find . -type d -name __pycache__ -exec rm -r {} \+ +rm -rf docs/_code_reference_* +rm -rf docs/_build diff --git a/scripts/test.sh b/scripts/test.sh index 66f2fc840..94b3a53da 100755 --- a/scripts/test.sh +++ b/scripts/test.sh @@ -6,7 +6,7 @@ clang_format() { local_ret=0 - for f in ./protobuf/xain/grpc/*.proto + for f in ./protobuf/xain_fl/grpc/*.proto do echo "Processing $f" clang-format -style="{Language: Proto, BasedOnStyle: Google}" $f | diff $f - @@ -24,19 +24,19 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd $DIR/../ # sort import -isort --check-only --indent=4 -rc setup.py conftest.py benchmarks examples xain && echo "===> isort says: well done <===" && +isort --check-only --indent=4 -rc setup.py conftest.py benchmarks examples xain_fl && echo "===> isort says: well done <===" && # format code -black --check --exclude "xain/grpc/.*_pb2.*" setup.py conftest.py benchmarks examples xain && echo "===> black says: well done <===" && +black --check --exclude "xain_fl/grpc/.*_pb2.*" setup.py conftest.py benchmarks examples xain_fl && echo "===> black says: well done <===" && # check format of proto files clang_format && echo "===> clang-format says: well done <===" && # lint -pylint --rcfile=pylint.ini benchmarks examples xain && echo "===> pylint says: well done <===" && +pylint --rcfile=pylint.ini benchmarks examples xain_fl && echo "===> pylint says: well done <===" && # type checks -mypy --ignore-missing-imports benchmarks examples/* xain && echo "===> mypy says: well done <===" && +mypy --ignore-missing-imports benchmarks examples/* xain_fl && echo "===> mypy says: well done <===" && # documentation checks (cd docs/ && SPHINXOPTS="-W" make docs) && echo "===> sphinx-build says: well done <===" && diff --git a/setup.py b/setup.py index caab05d39..5dea4dd4e 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ sys.exit("Please use Python version 3.6 or higher.") project_dir = os.path.dirname(os.path.abspath(__file__)) -version_file_path = os.path.join(project_dir, "xain/__version__.py") +version_file_path = os.path.join(project_dir, "xain_fl/__version__.py") readme_file_path = os.path.join(project_dir, "README.md") # get version @@ -43,7 +43,7 @@ def run(self): # get the path of grpc_tools protofiles grpc_path = grpc_tools.__path__[0] - proto_files = glob.glob("./protobuf/xain/grpc/*.proto") + proto_files = glob.glob("./protobuf/xain_fl/grpc/*.proto") command = [ "grpc_tools.protoc", # path to numproto .proto files @@ -109,12 +109,12 @@ def run(self): docs_require = ["Sphinx==2.2.0", "recommonmark==0.6.0", "sphinxcontrib-mermaid==0.3.1"] setup( - name="xain", + name="xain_fl", version=version["__version__"], description="XAIN is an open source framework for federated learning.", long_description=readme, long_description_content_type="text/markdown", - url="https://github.com/xainag/xain", + url="https://github.com/xainag/xain-fl", author=[ "Daniel J. Beutel ", "Taner Topal ", @@ -163,7 +163,7 @@ def run(self): entry_points={ "console_scripts": [ "train_remote=benchmarks.train_remote:main", - "pull_results=xain.ops.__main__:download", + "pull_results=xain_fl.ops.__main__:download", "aggregate=benchmarks.aggregate:main", ] }, diff --git a/xain/sdk/__init__.py b/xain/sdk/__init__.py deleted file mode 100644 index 58a6177f1..000000000 --- a/xain/sdk/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Provides xain package SDK""" diff --git a/xain/CONFIG.md b/xain_fl/CONFIG.md similarity index 88% rename from xain/CONFIG.md rename to xain_fl/CONFIG.md index 9394fd232..468ad3279 100644 --- a/xain/CONFIG.md +++ b/xain_fl/CONFIG.md @@ -1,14 +1,17 @@ # Configuration + Currently configuration happens two fold. ## `config.cfg` -On the initial run of the library the `xain.config.init_config()` function -will be executed and initialize a `config.cfg` file in the projects root directory. + +On the initial run of the library the `xain_fl.config.init_config()` function +will be executed and initialize a `config.cfg` file in the projects root directory. If the user wants to run benchmarks and wants results to be uploaded to S3 he will need to replace the `ACCESSIBLE_S3_BUCKET_FOR_RESULTS_TO_BE_UPLOADED` string in the `config.cfg` with an actual S3 bucket name. The bucket should be accessible with the currently active AWS credentials (either default credentials or the currently set AWS_PROFILE). ## abseil.io flags + The second form of configuration happens via abseil.io flags. All entry points are configured as abseil.io apps which accept various flags. To find out what valid flag exist one can run any entry point with the `--helpfull` flag. For example diff --git a/xain/__init__.py b/xain_fl/__init__.py similarity index 82% rename from xain/__init__.py rename to xain_fl/__init__.py index d3bf36418..e762f6393 100644 --- a/xain/__init__.py +++ b/xain_fl/__init__.py @@ -2,12 +2,12 @@ from absl import flags -import xain.config +import xain_fl.config -if not xain.config.check_config_file_exists(): - xain.config.init_config() +if not xain_fl.config.check_config_file_exists(): + xain_fl.config.init_config() -c = xain.config.load() +c = xain_fl.config.load() # following: https://abseil.io/docs/cpp/guides/flags#flags-best-practices # we will define our flags in this file diff --git a/xain/__version__.py b/xain_fl/__version__.py similarity index 100% rename from xain/__version__.py rename to xain_fl/__version__.py diff --git a/xain/config.py b/xain_fl/config.py similarity index 93% rename from xain/config.py rename to xain_fl/config.py index 76d193508..fc19a482b 100644 --- a/xain/config.py +++ b/xain_fl/config.py @@ -3,11 +3,11 @@ from functools import lru_cache from pathlib import Path -from xain.helpers import project -from xain.logger import get_logger +from xain_fl.helpers import project +from xain_fl.logger import get_logger # Storage dir for bigger files like the datasets -storage_dir = Path.home().joinpath(".xain") +storage_dir = Path.home().joinpath(".xain-fl") datasets_dir_default = storage_dir.joinpath("datasets") # Local outputs and remote results diff --git a/xain/conftest.py b/xain_fl/conftest.py similarity index 97% rename from xain/conftest.py rename to xain_fl/conftest.py index 54057462a..fb4cda4d9 100644 --- a/xain/conftest.py +++ b/xain_fl/conftest.py @@ -2,7 +2,7 @@ import pytest from absl import flags -from xain.types import FederatedDataset, KerasDataset +from xain_fl.types import FederatedDataset, KerasDataset FLAGS = flags.FLAGS diff --git a/xain/datasets/__init__.py b/xain_fl/datasets/__init__.py similarity index 79% rename from xain/datasets/__init__.py rename to xain_fl/datasets/__init__.py index cd737c525..6f86323de 100644 --- a/xain/datasets/__init__.py +++ b/xain_fl/datasets/__init__.py @@ -2,18 +2,18 @@ from absl import flags -import xain.config +import xain_fl.config from .dataset import load_splits -c = xain.config.load() +c = xain_fl.config.load() FLAGS = flags.FLAGS flags.DEFINE_string( "local_datasets_dir", c.get("Path", "local_datasets_dir"), - "Local directory to store datasets in. Usually ~/.xain/datasets", + "Local directory to store datasets in. Usually ~/.xain-fl/datasets", ) flags.DEFINE_string( "datasets_repository", diff --git a/xain/datasets/conftest.py b/xain_fl/datasets/conftest.py similarity index 100% rename from xain/datasets/conftest.py rename to xain_fl/datasets/conftest.py diff --git a/xain/datasets/dataset.py b/xain_fl/datasets/dataset.py similarity index 95% rename from xain/datasets/dataset.py rename to xain_fl/datasets/dataset.py index 970414d0b..66325144c 100644 --- a/xain/datasets/dataset.py +++ b/xain_fl/datasets/dataset.py @@ -1,8 +1,8 @@ import tensorflow as tf from absl import flags -from xain.datasets import storage -from xain.types import FederatedDataset +from xain_fl.datasets import storage +from xain_fl.types import FederatedDataset FLAGS = flags.FLAGS @@ -62,7 +62,7 @@ def load_splits( Args: dataset_name (str): Name of dataset to be loaded. Valid names can be found - in xain.datasets.dataset.config dict + in xain_fl.datasets.dataset.config dict get_local_datasets_dir (Callable): Function which returns the local_datasets_dir Returns: diff --git a/xain/datasets/dataset_test.py b/xain_fl/datasets/dataset_test.py similarity index 100% rename from xain/datasets/dataset_test.py rename to xain_fl/datasets/dataset_test.py diff --git a/xain/datasets/hashes/README.md b/xain_fl/datasets/hashes/README.md similarity index 100% rename from xain/datasets/hashes/README.md rename to xain_fl/datasets/hashes/README.md diff --git a/xain/datasets/hashes/__init__.py b/xain_fl/datasets/hashes/__init__.py similarity index 100% rename from xain/datasets/hashes/__init__.py rename to xain_fl/datasets/hashes/__init__.py diff --git a/xain/datasets/hashes/cifar-10-100p-b1_000.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_000.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_000.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_000.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_005.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_005.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_005.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_005.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_010.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_010.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_010.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_010.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_015.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_015.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_015.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_015.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_020.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_020.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_020.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_020.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_025.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_025.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_025.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_025.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_030.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_030.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_030.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_030.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_035.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_035.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_035.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_035.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_040.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_040.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_040.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_040.json diff --git a/xain/datasets/hashes/cifar-10-100p-b1_045.json b/xain_fl/datasets/hashes/cifar-10-100p-b1_045.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-b1_045.json rename to xain_fl/datasets/hashes/cifar-10-100p-b1_045.json diff --git a/xain/datasets/hashes/cifar-10-100p-iid-balanced.json b/xain_fl/datasets/hashes/cifar-10-100p-iid-balanced.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-iid-balanced.json rename to xain_fl/datasets/hashes/cifar-10-100p-iid-balanced.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-01cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-01cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-01cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-01cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-02cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-02cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-02cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-02cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-03cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-03cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-03cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-03cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-04cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-04cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-04cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-04cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-05cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-05cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-05cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-05cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-06cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-06cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-06cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-06cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-07cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-07cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-07cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-07cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-08cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-08cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-08cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-08cpp.json diff --git a/xain/datasets/hashes/cifar-10-100p-noniid-09cpp.json b/xain_fl/datasets/hashes/cifar-10-100p-noniid-09cpp.json similarity index 100% rename from xain/datasets/hashes/cifar-10-100p-noniid-09cpp.json rename to xain_fl/datasets/hashes/cifar-10-100p-noniid-09cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_000.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_000.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_000.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_000.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_005.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_005.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_005.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_005.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_010.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_010.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_010.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_010.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_015.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_015.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_015.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_015.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_020.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_020.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_020.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_020.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_025.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_025.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_025.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_025.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_030.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_030.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_030.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_030.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_035.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_035.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_035.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_035.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_040.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_040.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_040.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_040.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-b1_045.json b/xain_fl/datasets/hashes/fashion-mnist-100p-b1_045.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-b1_045.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-b1_045.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-iid-balanced.json b/xain_fl/datasets/hashes/fashion-mnist-100p-iid-balanced.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-iid-balanced.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-iid-balanced.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-01cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-01cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-01cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-01cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-02cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-02cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-02cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-02cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-03cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-03cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-03cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-03cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-04cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-04cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-04cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-04cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-05cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-05cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-05cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-05cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-06cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-06cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-06cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-06cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-07cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-07cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-07cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-07cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-08cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-08cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-08cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-08cpp.json diff --git a/xain/datasets/hashes/fashion-mnist-100p-noniid-09cpp.json b/xain_fl/datasets/hashes/fashion-mnist-100p-noniid-09cpp.json similarity index 100% rename from xain/datasets/hashes/fashion-mnist-100p-noniid-09cpp.json rename to xain_fl/datasets/hashes/fashion-mnist-100p-noniid-09cpp.json diff --git a/xain/datasets/hashes/load.py b/xain_fl/datasets/hashes/load.py similarity index 100% rename from xain/datasets/hashes/load.py rename to xain_fl/datasets/hashes/load.py diff --git a/xain/datasets/hashes/load_test.py b/xain_fl/datasets/hashes/load_test.py similarity index 87% rename from xain/datasets/hashes/load_test.py rename to xain_fl/datasets/hashes/load_test.py index 85553d756..483bc13a0 100644 --- a/xain/datasets/hashes/load_test.py +++ b/xain_fl/datasets/hashes/load_test.py @@ -1,4 +1,4 @@ -from xain.datasets.hashes import load +from xain_fl.datasets.hashes import load def test_load_hashes(): diff --git a/xain/datasets/prep.py b/xain_fl/datasets/prep.py similarity index 99% rename from xain/datasets/prep.py rename to xain_fl/datasets/prep.py index b279ff811..05b599eb4 100644 --- a/xain/datasets/prep.py +++ b/xain_fl/datasets/prep.py @@ -2,7 +2,7 @@ import tensorflow as tf from tensorflow.data import Dataset -from xain.types import Partition +from xain_fl.types import Partition AUTOTUNE = tf.data.experimental.AUTOTUNE SEED = 2017 diff --git a/xain/datasets/prep_test.py b/xain_fl/datasets/prep_test.py similarity index 100% rename from xain/datasets/prep_test.py rename to xain_fl/datasets/prep_test.py diff --git a/xain/datasets/stats/__init__.py b/xain_fl/datasets/stats/__init__.py similarity index 100% rename from xain/datasets/stats/__init__.py rename to xain_fl/datasets/stats/__init__.py diff --git a/xain/datasets/stats/__main__.py b/xain_fl/datasets/stats/__main__.py similarity index 80% rename from xain/datasets/stats/__main__.py rename to xain_fl/datasets/stats/__main__.py index dacf54340..dada552b4 100644 --- a/xain/datasets/stats/__main__.py +++ b/xain_fl/datasets/stats/__main__.py @@ -2,7 +2,7 @@ from absl import app -from xain.datasets.dataset import config, load_splits +from xain_fl.datasets.dataset import config, load_splits from .stats import DSStats @@ -16,7 +16,7 @@ def main(_): with open(fname, "w") as f: s = DSStats(name=dataset_name, ds=load_splits(dataset_name)).__repr__() - # Don't log with xain.helper.logger as repl in DSStats expects to + # Don't log with xain_fl.logger as repl in DSStats expects to # be printed with print print(s) diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_000.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_000.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_000.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_000.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_005.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_005.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_005.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_005.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_010.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_010.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_010.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_010.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_015.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_015.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_015.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_015.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_020.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_020.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_020.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_020.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_025.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_025.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_025.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_025.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_030.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_030.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_030.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_030.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_035.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_035.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_035.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_035.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_040.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_040.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_040.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_040.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-b1_045.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-b1_045.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-b1_045.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-b1_045.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-iid-balanced.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-iid-balanced.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-iid-balanced.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-iid-balanced.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-01cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-01cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-01cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-01cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-02cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-02cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-02cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-02cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-03cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-03cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-03cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-03cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-04cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-04cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-04cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-04cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-05cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-05cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-05cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-05cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-06cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-06cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-06cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-06cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-07cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-07cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-07cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-07cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-08cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-08cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-08cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-08cpp.txt diff --git a/xain/datasets/stats/datasets/cifar-10-100p-noniid-09cpp.txt b/xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-09cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/cifar-10-100p-noniid-09cpp.txt rename to xain_fl/datasets/stats/datasets/cifar-10-100p-noniid-09cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_000.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_000.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_000.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_000.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_005.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_005.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_005.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_005.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_010.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_010.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_010.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_010.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_015.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_015.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_015.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_015.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_020.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_020.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_020.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_020.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_025.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_025.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_025.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_025.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_030.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_030.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_030.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_030.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_035.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_035.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_035.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_035.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_040.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_040.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_040.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_040.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-b1_045.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_045.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-b1_045.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-b1_045.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-iid-balanced.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-iid-balanced.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-iid-balanced.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-iid-balanced.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-01cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-01cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-01cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-01cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-02cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-02cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-02cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-02cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-03cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-03cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-03cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-03cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-04cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-04cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-04cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-04cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-05cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-05cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-05cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-05cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-06cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-06cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-06cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-06cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-07cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-07cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-07cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-07cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-08cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-08cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-08cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-08cpp.txt diff --git a/xain/datasets/stats/datasets/fashion-mnist-100p-noniid-09cpp.txt b/xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-09cpp.txt similarity index 100% rename from xain/datasets/stats/datasets/fashion-mnist-100p-noniid-09cpp.txt rename to xain_fl/datasets/stats/datasets/fashion-mnist-100p-noniid-09cpp.txt diff --git a/xain/datasets/stats/stats.py b/xain_fl/datasets/stats/stats.py similarity index 97% rename from xain/datasets/stats/stats.py rename to xain_fl/datasets/stats/stats.py index 803237ed2..f0bc48bc4 100644 --- a/xain/datasets/stats/stats.py +++ b/xain_fl/datasets/stats/stats.py @@ -2,7 +2,7 @@ import numpy as np -from xain.types import FederatedDataset, Partition +from xain_fl.types import FederatedDataset, Partition PartitionStat = Dict[str, List[int]] diff --git a/xain/datasets/stats/stats_test.py b/xain_fl/datasets/stats/stats_test.py similarity index 100% rename from xain/datasets/stats/stats_test.py rename to xain_fl/datasets/stats/stats_test.py diff --git a/xain/datasets/storage.py b/xain_fl/datasets/storage.py similarity index 97% rename from xain/datasets/storage.py rename to xain_fl/datasets/storage.py index fc020973b..f4a6cd09d 100644 --- a/xain/datasets/storage.py +++ b/xain_fl/datasets/storage.py @@ -6,9 +6,9 @@ import requests from absl import flags -from xain.helpers.sha1 import checksum -from xain.logger import get_logger -from xain.types import FederatedDataset, Partition +from xain_fl.helpers.sha1 import checksum +from xain_fl.logger import get_logger +from xain_fl.types import FederatedDataset, Partition from . import hashes diff --git a/xain/datasets/storage_test.py b/xain_fl/datasets/storage_test.py similarity index 100% rename from xain/datasets/storage_test.py rename to xain_fl/datasets/storage_test.py diff --git a/xain/datasets/testing.py b/xain_fl/datasets/testing.py similarity index 97% rename from xain/datasets/testing.py rename to xain_fl/datasets/testing.py index c6422858f..64d2a850f 100644 --- a/xain/datasets/testing.py +++ b/xain_fl/datasets/testing.py @@ -3,7 +3,7 @@ import numpy as np from tensorflow.data import Dataset -from xain.types import FederatedDataset, KerasDataset +from xain_fl.types import FederatedDataset, KerasDataset def load(keras_dataset: Dataset) -> KerasDataset: diff --git a/xain/datasets/testing_test.py b/xain_fl/datasets/testing_test.py similarity index 100% rename from xain/datasets/testing_test.py rename to xain_fl/datasets/testing_test.py diff --git a/xain/fl/__init__.py b/xain_fl/fl/__init__.py similarity index 100% rename from xain/fl/__init__.py rename to xain_fl/fl/__init__.py diff --git a/xain/fl/coordinator/__init__.py b/xain_fl/fl/coordinator/__init__.py similarity index 100% rename from xain/fl/coordinator/__init__.py rename to xain_fl/fl/coordinator/__init__.py diff --git a/xain/fl/coordinator/aggregate.py b/xain_fl/fl/coordinator/aggregate.py similarity index 98% rename from xain/fl/coordinator/aggregate.py rename to xain_fl/fl/coordinator/aggregate.py index fdb15e692..a6d7bd63a 100644 --- a/xain/fl/coordinator/aggregate.py +++ b/xain_fl/fl/coordinator/aggregate.py @@ -7,8 +7,8 @@ import numpy as np -from xain.logger import get_logger -from xain.types import Theta +from xain_fl.logger import get_logger +from xain_fl.types import Theta from .evaluator import Evaluator diff --git a/xain/fl/coordinator/aggregate_test.py b/xain_fl/fl/coordinator/aggregate_test.py similarity index 100% rename from xain/fl/coordinator/aggregate_test.py rename to xain_fl/fl/coordinator/aggregate_test.py diff --git a/xain/fl/coordinator/controller.py b/xain_fl/fl/coordinator/controller.py similarity index 100% rename from xain/fl/coordinator/controller.py rename to xain_fl/fl/coordinator/controller.py diff --git a/xain/fl/coordinator/controller_test.py b/xain_fl/fl/coordinator/controller_test.py similarity index 100% rename from xain/fl/coordinator/controller_test.py rename to xain_fl/fl/coordinator/controller_test.py diff --git a/xain/fl/coordinator/coordinator.py b/xain_fl/fl/coordinator/coordinator.py similarity index 97% rename from xain/fl/coordinator/coordinator.py rename to xain_fl/fl/coordinator/coordinator.py index a7891cb11..956c9b046 100644 --- a/xain/fl/coordinator/coordinator.py +++ b/xain_fl/fl/coordinator/coordinator.py @@ -10,11 +10,11 @@ import tensorflow as tf from absl import flags -from xain.datasets import prep -from xain.fl.logging.logging import create_summary_writer, write_summaries -from xain.fl.participant import ModelProvider, Participant -from xain.logger import get_logger -from xain.types import History, Metrics, Partition, Theta +from xain_fl.datasets import prep +from xain_fl.fl.logging.logging import create_summary_writer, write_summaries +from xain_fl.fl.participant import ModelProvider, Participant +from xain_fl.logger import get_logger +from xain_fl.types import History, Metrics, Partition, Theta from .aggregate import Aggregator, FederatedAveragingAgg diff --git a/xain/fl/coordinator/coordinator_test.py b/xain_fl/fl/coordinator/coordinator_test.py similarity index 100% rename from xain/fl/coordinator/coordinator_test.py rename to xain_fl/fl/coordinator/coordinator_test.py diff --git a/xain/fl/coordinator/evaluator.py b/xain_fl/fl/coordinator/evaluator.py similarity index 88% rename from xain/fl/coordinator/evaluator.py rename to xain_fl/fl/coordinator/evaluator.py index aec71fefc..2b5a22238 100644 --- a/xain/fl/coordinator/evaluator.py +++ b/xain_fl/fl/coordinator/evaluator.py @@ -2,8 +2,8 @@ import tensorflow as tf -from xain.datasets import prep -from xain.types import Partition, Theta +from xain_fl.datasets import prep +from xain_fl.types import Partition, Theta class Evaluator: diff --git a/xain/fl/logging/__init__.py b/xain_fl/fl/logging/__init__.py similarity index 100% rename from xain/fl/logging/__init__.py rename to xain_fl/fl/logging/__init__.py diff --git a/xain/fl/logging/logging.py b/xain_fl/fl/logging/logging.py similarity index 100% rename from xain/fl/logging/logging.py rename to xain_fl/fl/logging/logging.py diff --git a/xain/fl/participant/__init__.py b/xain_fl/fl/participant/__init__.py similarity index 100% rename from xain/fl/participant/__init__.py rename to xain_fl/fl/participant/__init__.py diff --git a/xain/fl/participant/model_provider.py b/xain_fl/fl/participant/model_provider.py similarity index 100% rename from xain/fl/participant/model_provider.py rename to xain_fl/fl/participant/model_provider.py diff --git a/xain/fl/participant/participant.py b/xain_fl/fl/participant/participant.py similarity index 97% rename from xain/fl/participant/participant.py rename to xain_fl/fl/participant/participant.py index f68b6f331..95dc52523 100644 --- a/xain/fl/participant/participant.py +++ b/xain_fl/fl/participant/participant.py @@ -7,9 +7,9 @@ import numpy as np import tensorflow as tf -from xain.datasets import prep -from xain.logger import get_logger -from xain.types import History, Metrics, Partition, Theta, VolumeByClass +from xain_fl.datasets import prep +from xain_fl.logger import get_logger +from xain_fl.types import History, Metrics, Partition, Theta, VolumeByClass from .model_provider import ModelProvider diff --git a/xain/fl/participant/participant_test.py b/xain_fl/fl/participant/participant_test.py similarity index 99% rename from xain/fl/participant/participant_test.py rename to xain_fl/fl/participant/participant_test.py index bf22998a7..8c9bd49b4 100644 --- a/xain/fl/participant/participant_test.py +++ b/xain_fl/fl/participant/participant_test.py @@ -2,7 +2,7 @@ import pytest # from benchmarks.benchmark.net import model_fns # FIXME refactor -from xain.datasets import load_splits +from xain_fl.datasets import load_splits # from .model_provider import ModelProvider # FIXME refactor from .participant import Participant, _xy_train_volume_by_class diff --git a/xain/grpc/__init__.py b/xain_fl/grpc/__init__.py similarity index 100% rename from xain/grpc/__init__.py rename to xain_fl/grpc/__init__.py diff --git a/xain/grpc/conftest.py b/xain_fl/grpc/conftest.py similarity index 82% rename from xain/grpc/conftest.py rename to xain_fl/grpc/conftest.py index 4e6d27054..54f595e4e 100644 --- a/xain/grpc/conftest.py +++ b/xain_fl/grpc/conftest.py @@ -3,9 +3,9 @@ import grpc import pytest -from xain.grpc import coordinator_pb2_grpc, hellonumproto_pb2_grpc -from xain.grpc.coordinator import Coordinator, CoordinatorGrpc -from xain.grpc.numproto_server import NumProtoServer +from xain_fl.grpc import coordinator_pb2_grpc, hellonumproto_pb2_grpc +from xain_fl.grpc.coordinator import Coordinator, CoordinatorGrpc +from xain_fl.grpc.numproto_server import NumProtoServer @pytest.fixture diff --git a/xain/grpc/coordinator.py b/xain_fl/grpc/coordinator.py similarity index 99% rename from xain/grpc/coordinator.py rename to xain_fl/grpc/coordinator.py index 7abef19af..25cc3d5e1 100644 --- a/xain/grpc/coordinator.py +++ b/xain_fl/grpc/coordinator.py @@ -14,9 +14,9 @@ from google.protobuf.internal.python_message import GeneratedProtocolMessageType from numproto import ndarray_to_proto, proto_to_ndarray -from xain.fl.coordinator.aggregate import Aggregator, FederatedAveragingAgg -from xain.grpc import coordinator_pb2, coordinator_pb2_grpc -from xain.logger import get_logger +from xain_fl.fl.coordinator.aggregate import Aggregator, FederatedAveragingAgg +from xain_fl.grpc import coordinator_pb2, coordinator_pb2_grpc +from xain_fl.logger import get_logger logger = get_logger(__name__, level=os.environ.get("XAIN_LOGLEVEL", "INFO")) diff --git a/xain/grpc/coordinator_pb2.py b/xain_fl/grpc/coordinator_pb2.py similarity index 67% rename from xain/grpc/coordinator_pb2.py rename to xain_fl/grpc/coordinator_pb2.py index 27caf6b7f..e59660aad 100644 --- a/xain/grpc/coordinator_pb2.py +++ b/xain_fl/grpc/coordinator_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: xain/grpc/coordinator.proto +# source: xain_fl/grpc/coordinator.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) @@ -18,17 +18,17 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='xain/grpc/coordinator.proto', - package='xain.protobuf.coordinator', + name='xain_fl/grpc/coordinator.proto', + package='xain_fl.protobuf.coordinator', syntax='proto3', serialized_options=None, - serialized_pb=_b('\n\x1bxain/grpc/coordinator.proto\x12\x19xain.protobuf.coordinator\x1a\x1fnumproto/protobuf/ndarray.proto\"\x13\n\x11RendezvousRequest\"R\n\x0fRendezvousReply\x12?\n\x08response\x18\x01 \x01(\x0e\x32-.xain.protobuf.coordinator.RendezvousResponse\"R\n\x10HeartbeatRequest\x12/\n\x05state\x18\x01 \x01(\x0e\x32 .xain.protobuf.coordinator.State\x12\r\n\x05round\x18\x02 \x01(\x05\"P\n\x0eHeartbeatReply\x12/\n\x05state\x18\x01 \x01(\x0e\x32 .xain.protobuf.coordinator.State\x12\r\n\x05round\x18\x02 \x01(\x05\"\x16\n\x14StartTrainingRequest\"c\n\x12StartTrainingReply\x12)\n\x05theta\x18\x01 \x03(\x0b\x32\x1a.numproto.protobuf.NDArray\x12\x0e\n\x06\x65pochs\x18\x02 \x01(\x05\x12\x12\n\nepoch_base\x18\x03 \x01(\x05\"\x8a\x04\n\x12\x45ndTrainingRequest\x12O\n\x0ctheta_update\x18\x01 \x01(\x0b\x32\x39.xain.protobuf.coordinator.EndTrainingRequest.ThetaUpdate\x12K\n\x07history\x18\x02 \x03(\x0b\x32:.xain.protobuf.coordinator.EndTrainingRequest.HistoryEntry\x12\x46\n\x07metrics\x18\x03 \x01(\x0b\x32\x35.xain.protobuf.coordinator.EndTrainingRequest.Metrics\x1aj\n\x0cHistoryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12I\n\x05value\x18\x02 \x01(\x0b\x32:.xain.protobuf.coordinator.EndTrainingRequest.HistoryValue:\x02\x38\x01\x1aT\n\x0bThetaUpdate\x12/\n\x0btheta_prime\x18\x01 \x03(\x0b\x32\x1a.numproto.protobuf.NDArray\x12\x14\n\x0cnum_examples\x18\x02 \x01(\x05\x1a\x1e\n\x0cHistoryValue\x12\x0e\n\x06values\x18\x01 \x03(\x02\x1a,\n\x07Metrics\x12\x0b\n\x03\x63id\x18\x01 \x01(\x05\x12\x14\n\x0cvol_by_class\x18\x02 \x03(\x05\"\x12\n\x10\x45ndTrainingReply*+\n\x12RendezvousResponse\x12\n\n\x06\x41\x43\x43\x45PT\x10\x00\x12\t\n\x05LATER\x10\x01*F\n\x05State\x12\x0b\n\x07STANDBY\x10\x00\x12\t\n\x05ROUND\x10\x01\x12\x0c\n\x08\x46INISHED\x10\x02\x12\t\n\x05READY\x10\x03\x12\x0c\n\x08TRAINING\x10\x04\x32\xbe\x03\n\x0b\x43oordinator\x12h\n\nRendezvous\x12,.xain.protobuf.coordinator.RendezvousRequest\x1a*.xain.protobuf.coordinator.RendezvousReply\"\x00\x12\x65\n\tHeartbeat\x12+.xain.protobuf.coordinator.HeartbeatRequest\x1a).xain.protobuf.coordinator.HeartbeatReply\"\x00\x12q\n\rStartTraining\x12/.xain.protobuf.coordinator.StartTrainingRequest\x1a-.xain.protobuf.coordinator.StartTrainingReply\"\x00\x12k\n\x0b\x45ndTraining\x12-.xain.protobuf.coordinator.EndTrainingRequest\x1a+.xain.protobuf.coordinator.EndTrainingReply\"\x00\x62\x06proto3') + serialized_pb=_b('\n\x1exain_fl/grpc/coordinator.proto\x12\x1cxain_fl.protobuf.coordinator\x1a\x1fnumproto/protobuf/ndarray.proto\"\x13\n\x11RendezvousRequest\"U\n\x0fRendezvousReply\x12\x42\n\x08response\x18\x01 \x01(\x0e\x32\x30.xain_fl.protobuf.coordinator.RendezvousResponse\"U\n\x10HeartbeatRequest\x12\x32\n\x05state\x18\x01 \x01(\x0e\x32#.xain_fl.protobuf.coordinator.State\x12\r\n\x05round\x18\x02 \x01(\x05\"S\n\x0eHeartbeatReply\x12\x32\n\x05state\x18\x01 \x01(\x0e\x32#.xain_fl.protobuf.coordinator.State\x12\r\n\x05round\x18\x02 \x01(\x05\"\x16\n\x14StartTrainingRequest\"c\n\x12StartTrainingReply\x12)\n\x05theta\x18\x01 \x03(\x0b\x32\x1a.numproto.protobuf.NDArray\x12\x0e\n\x06\x65pochs\x18\x02 \x01(\x05\x12\x12\n\nepoch_base\x18\x03 \x01(\x05\"\x96\x04\n\x12\x45ndTrainingRequest\x12R\n\x0ctheta_update\x18\x01 \x01(\x0b\x32<.xain_fl.protobuf.coordinator.EndTrainingRequest.ThetaUpdate\x12N\n\x07history\x18\x02 \x03(\x0b\x32=.xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryEntry\x12I\n\x07metrics\x18\x03 \x01(\x0b\x32\x38.xain_fl.protobuf.coordinator.EndTrainingRequest.Metrics\x1am\n\x0cHistoryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12L\n\x05value\x18\x02 \x01(\x0b\x32=.xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryValue:\x02\x38\x01\x1aT\n\x0bThetaUpdate\x12/\n\x0btheta_prime\x18\x01 \x03(\x0b\x32\x1a.numproto.protobuf.NDArray\x12\x14\n\x0cnum_examples\x18\x02 \x01(\x05\x1a\x1e\n\x0cHistoryValue\x12\x0e\n\x06values\x18\x01 \x03(\x02\x1a,\n\x07Metrics\x12\x0b\n\x03\x63id\x18\x01 \x01(\x05\x12\x14\n\x0cvol_by_class\x18\x02 \x03(\x05\"\x12\n\x10\x45ndTrainingReply*+\n\x12RendezvousResponse\x12\n\n\x06\x41\x43\x43\x45PT\x10\x00\x12\t\n\x05LATER\x10\x01*F\n\x05State\x12\x0b\n\x07STANDBY\x10\x00\x12\t\n\x05ROUND\x10\x01\x12\x0c\n\x08\x46INISHED\x10\x02\x12\t\n\x05READY\x10\x03\x12\x0c\n\x08TRAINING\x10\x04\x32\xd6\x03\n\x0b\x43oordinator\x12n\n\nRendezvous\x12/.xain_fl.protobuf.coordinator.RendezvousRequest\x1a-.xain_fl.protobuf.coordinator.RendezvousReply\"\x00\x12k\n\tHeartbeat\x12..xain_fl.protobuf.coordinator.HeartbeatRequest\x1a,.xain_fl.protobuf.coordinator.HeartbeatReply\"\x00\x12w\n\rStartTraining\x12\x32.xain_fl.protobuf.coordinator.StartTrainingRequest\x1a\x30.xain_fl.protobuf.coordinator.StartTrainingReply\"\x00\x12q\n\x0b\x45ndTraining\x12\x30.xain_fl.protobuf.coordinator.EndTrainingRequest\x1a..xain_fl.protobuf.coordinator.EndTrainingReply\"\x00\x62\x06proto3') , dependencies=[numproto_dot_protobuf_dot_ndarray__pb2.DESCRIPTOR,]) _RENDEZVOUSRESPONSE = _descriptor.EnumDescriptor( name='RendezvousResponse', - full_name='xain.protobuf.coordinator.RendezvousResponse', + full_name='xain_fl.protobuf.coordinator.RendezvousResponse', filename=None, file=DESCRIPTOR, values=[ @@ -43,15 +43,15 @@ ], containing_type=None, serialized_options=None, - serialized_start=1032, - serialized_end=1075, + serialized_start=1059, + serialized_end=1102, ) _sym_db.RegisterEnumDescriptor(_RENDEZVOUSRESPONSE) RendezvousResponse = enum_type_wrapper.EnumTypeWrapper(_RENDEZVOUSRESPONSE) _STATE = _descriptor.EnumDescriptor( name='State', - full_name='xain.protobuf.coordinator.State', + full_name='xain_fl.protobuf.coordinator.State', filename=None, file=DESCRIPTOR, values=[ @@ -78,8 +78,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1077, - serialized_end=1147, + serialized_start=1104, + serialized_end=1174, ) _sym_db.RegisterEnumDescriptor(_STATE) @@ -96,7 +96,7 @@ _RENDEZVOUSREQUEST = _descriptor.Descriptor( name='RendezvousRequest', - full_name='xain.protobuf.coordinator.RendezvousRequest', + full_name='xain_fl.protobuf.coordinator.RendezvousRequest', filename=None, file=DESCRIPTOR, containing_type=None, @@ -113,20 +113,20 @@ extension_ranges=[], oneofs=[ ], - serialized_start=91, - serialized_end=110, + serialized_start=97, + serialized_end=116, ) _RENDEZVOUSREPLY = _descriptor.Descriptor( name='RendezvousReply', - full_name='xain.protobuf.coordinator.RendezvousReply', + full_name='xain_fl.protobuf.coordinator.RendezvousReply', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='response', full_name='xain.protobuf.coordinator.RendezvousReply.response', index=0, + name='response', full_name='xain_fl.protobuf.coordinator.RendezvousReply.response', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, @@ -144,27 +144,27 @@ extension_ranges=[], oneofs=[ ], - serialized_start=112, - serialized_end=194, + serialized_start=118, + serialized_end=203, ) _HEARTBEATREQUEST = _descriptor.Descriptor( name='HeartbeatRequest', - full_name='xain.protobuf.coordinator.HeartbeatRequest', + full_name='xain_fl.protobuf.coordinator.HeartbeatRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='state', full_name='xain.protobuf.coordinator.HeartbeatRequest.state', index=0, + name='state', full_name='xain_fl.protobuf.coordinator.HeartbeatRequest.state', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='round', full_name='xain.protobuf.coordinator.HeartbeatRequest.round', index=1, + name='round', full_name='xain_fl.protobuf.coordinator.HeartbeatRequest.round', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, @@ -182,27 +182,27 @@ extension_ranges=[], oneofs=[ ], - serialized_start=196, - serialized_end=278, + serialized_start=205, + serialized_end=290, ) _HEARTBEATREPLY = _descriptor.Descriptor( name='HeartbeatReply', - full_name='xain.protobuf.coordinator.HeartbeatReply', + full_name='xain_fl.protobuf.coordinator.HeartbeatReply', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='state', full_name='xain.protobuf.coordinator.HeartbeatReply.state', index=0, + name='state', full_name='xain_fl.protobuf.coordinator.HeartbeatReply.state', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='round', full_name='xain.protobuf.coordinator.HeartbeatReply.round', index=1, + name='round', full_name='xain_fl.protobuf.coordinator.HeartbeatReply.round', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, @@ -220,14 +220,14 @@ extension_ranges=[], oneofs=[ ], - serialized_start=280, - serialized_end=360, + serialized_start=292, + serialized_end=375, ) _STARTTRAININGREQUEST = _descriptor.Descriptor( name='StartTrainingRequest', - full_name='xain.protobuf.coordinator.StartTrainingRequest', + full_name='xain_fl.protobuf.coordinator.StartTrainingRequest', filename=None, file=DESCRIPTOR, containing_type=None, @@ -244,34 +244,34 @@ extension_ranges=[], oneofs=[ ], - serialized_start=362, - serialized_end=384, + serialized_start=377, + serialized_end=399, ) _STARTTRAININGREPLY = _descriptor.Descriptor( name='StartTrainingReply', - full_name='xain.protobuf.coordinator.StartTrainingReply', + full_name='xain_fl.protobuf.coordinator.StartTrainingReply', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='theta', full_name='xain.protobuf.coordinator.StartTrainingReply.theta', index=0, + name='theta', full_name='xain_fl.protobuf.coordinator.StartTrainingReply.theta', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='epochs', full_name='xain.protobuf.coordinator.StartTrainingReply.epochs', index=1, + name='epochs', full_name='xain_fl.protobuf.coordinator.StartTrainingReply.epochs', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='epoch_base', full_name='xain.protobuf.coordinator.StartTrainingReply.epoch_base', index=2, + name='epoch_base', full_name='xain_fl.protobuf.coordinator.StartTrainingReply.epoch_base', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, @@ -289,27 +289,27 @@ extension_ranges=[], oneofs=[ ], - serialized_start=386, - serialized_end=485, + serialized_start=401, + serialized_end=500, ) _ENDTRAININGREQUEST_HISTORYENTRY = _descriptor.Descriptor( name='HistoryEntry', - full_name='xain.protobuf.coordinator.EndTrainingRequest.HistoryEntry', + full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryEntry', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='key', full_name='xain.protobuf.coordinator.EndTrainingRequest.HistoryEntry.key', index=0, + name='key', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='value', full_name='xain.protobuf.coordinator.EndTrainingRequest.HistoryEntry.value', index=1, + name='value', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -327,26 +327,26 @@ extension_ranges=[], oneofs=[ ], - serialized_start=740, - serialized_end=846, + serialized_start=764, + serialized_end=873, ) _ENDTRAININGREQUEST_THETAUPDATE = _descriptor.Descriptor( name='ThetaUpdate', - full_name='xain.protobuf.coordinator.EndTrainingRequest.ThetaUpdate', + full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.ThetaUpdate', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='theta_prime', full_name='xain.protobuf.coordinator.EndTrainingRequest.ThetaUpdate.theta_prime', index=0, + name='theta_prime', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.ThetaUpdate.theta_prime', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='num_examples', full_name='xain.protobuf.coordinator.EndTrainingRequest.ThetaUpdate.num_examples', index=1, + name='num_examples', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.ThetaUpdate.num_examples', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, @@ -364,19 +364,19 @@ extension_ranges=[], oneofs=[ ], - serialized_start=848, - serialized_end=932, + serialized_start=875, + serialized_end=959, ) _ENDTRAININGREQUEST_HISTORYVALUE = _descriptor.Descriptor( name='HistoryValue', - full_name='xain.protobuf.coordinator.EndTrainingRequest.HistoryValue', + full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryValue', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='values', full_name='xain.protobuf.coordinator.EndTrainingRequest.HistoryValue.values', index=0, + name='values', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryValue.values', index=0, number=1, type=2, cpp_type=6, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, @@ -394,26 +394,26 @@ extension_ranges=[], oneofs=[ ], - serialized_start=934, - serialized_end=964, + serialized_start=961, + serialized_end=991, ) _ENDTRAININGREQUEST_METRICS = _descriptor.Descriptor( name='Metrics', - full_name='xain.protobuf.coordinator.EndTrainingRequest.Metrics', + full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.Metrics', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='cid', full_name='xain.protobuf.coordinator.EndTrainingRequest.Metrics.cid', index=0, + name='cid', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.Metrics.cid', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='vol_by_class', full_name='xain.protobuf.coordinator.EndTrainingRequest.Metrics.vol_by_class', index=1, + name='vol_by_class', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.Metrics.vol_by_class', index=1, number=2, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, @@ -431,33 +431,33 @@ extension_ranges=[], oneofs=[ ], - serialized_start=966, - serialized_end=1010, + serialized_start=993, + serialized_end=1037, ) _ENDTRAININGREQUEST = _descriptor.Descriptor( name='EndTrainingRequest', - full_name='xain.protobuf.coordinator.EndTrainingRequest', + full_name='xain_fl.protobuf.coordinator.EndTrainingRequest', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='theta_update', full_name='xain.protobuf.coordinator.EndTrainingRequest.theta_update', index=0, + name='theta_update', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.theta_update', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='history', full_name='xain.protobuf.coordinator.EndTrainingRequest.history', index=1, + name='history', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.history', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='metrics', full_name='xain.protobuf.coordinator.EndTrainingRequest.metrics', index=2, + name='metrics', full_name='xain_fl.protobuf.coordinator.EndTrainingRequest.metrics', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -475,14 +475,14 @@ extension_ranges=[], oneofs=[ ], - serialized_start=488, - serialized_end=1010, + serialized_start=503, + serialized_end=1037, ) _ENDTRAININGREPLY = _descriptor.Descriptor( name='EndTrainingReply', - full_name='xain.protobuf.coordinator.EndTrainingReply', + full_name='xain_fl.protobuf.coordinator.EndTrainingReply', filename=None, file=DESCRIPTOR, containing_type=None, @@ -499,8 +499,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1012, - serialized_end=1030, + serialized_start=1039, + serialized_end=1057, ) _RENDEZVOUSREPLY.fields_by_name['response'].enum_type = _RENDEZVOUSRESPONSE @@ -530,43 +530,43 @@ RendezvousRequest = _reflection.GeneratedProtocolMessageType('RendezvousRequest', (_message.Message,), { 'DESCRIPTOR' : _RENDEZVOUSREQUEST, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.RendezvousRequest) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.RendezvousRequest) }) _sym_db.RegisterMessage(RendezvousRequest) RendezvousReply = _reflection.GeneratedProtocolMessageType('RendezvousReply', (_message.Message,), { 'DESCRIPTOR' : _RENDEZVOUSREPLY, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.RendezvousReply) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.RendezvousReply) }) _sym_db.RegisterMessage(RendezvousReply) HeartbeatRequest = _reflection.GeneratedProtocolMessageType('HeartbeatRequest', (_message.Message,), { 'DESCRIPTOR' : _HEARTBEATREQUEST, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.HeartbeatRequest) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.HeartbeatRequest) }) _sym_db.RegisterMessage(HeartbeatRequest) HeartbeatReply = _reflection.GeneratedProtocolMessageType('HeartbeatReply', (_message.Message,), { 'DESCRIPTOR' : _HEARTBEATREPLY, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.HeartbeatReply) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.HeartbeatReply) }) _sym_db.RegisterMessage(HeartbeatReply) StartTrainingRequest = _reflection.GeneratedProtocolMessageType('StartTrainingRequest', (_message.Message,), { 'DESCRIPTOR' : _STARTTRAININGREQUEST, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.StartTrainingRequest) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.StartTrainingRequest) }) _sym_db.RegisterMessage(StartTrainingRequest) StartTrainingReply = _reflection.GeneratedProtocolMessageType('StartTrainingReply', (_message.Message,), { 'DESCRIPTOR' : _STARTTRAININGREPLY, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.StartTrainingReply) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.StartTrainingReply) }) _sym_db.RegisterMessage(StartTrainingReply) @@ -574,34 +574,34 @@ 'HistoryEntry' : _reflection.GeneratedProtocolMessageType('HistoryEntry', (_message.Message,), { 'DESCRIPTOR' : _ENDTRAININGREQUEST_HISTORYENTRY, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.EndTrainingRequest.HistoryEntry) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryEntry) }) , 'ThetaUpdate' : _reflection.GeneratedProtocolMessageType('ThetaUpdate', (_message.Message,), { 'DESCRIPTOR' : _ENDTRAININGREQUEST_THETAUPDATE, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.EndTrainingRequest.ThetaUpdate) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.EndTrainingRequest.ThetaUpdate) }) , 'HistoryValue' : _reflection.GeneratedProtocolMessageType('HistoryValue', (_message.Message,), { 'DESCRIPTOR' : _ENDTRAININGREQUEST_HISTORYVALUE, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.EndTrainingRequest.HistoryValue) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.EndTrainingRequest.HistoryValue) }) , 'Metrics' : _reflection.GeneratedProtocolMessageType('Metrics', (_message.Message,), { 'DESCRIPTOR' : _ENDTRAININGREQUEST_METRICS, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.EndTrainingRequest.Metrics) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.EndTrainingRequest.Metrics) }) , 'DESCRIPTOR' : _ENDTRAININGREQUEST, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.EndTrainingRequest) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.EndTrainingRequest) }) _sym_db.RegisterMessage(EndTrainingRequest) _sym_db.RegisterMessage(EndTrainingRequest.HistoryEntry) @@ -611,8 +611,8 @@ EndTrainingReply = _reflection.GeneratedProtocolMessageType('EndTrainingReply', (_message.Message,), { 'DESCRIPTOR' : _ENDTRAININGREPLY, - '__module__' : 'xain.grpc.coordinator_pb2' - # @@protoc_insertion_point(class_scope:xain.protobuf.coordinator.EndTrainingReply) + '__module__' : 'xain_fl.grpc.coordinator_pb2' + # @@protoc_insertion_point(class_scope:xain_fl.protobuf.coordinator.EndTrainingReply) }) _sym_db.RegisterMessage(EndTrainingReply) @@ -621,16 +621,16 @@ _COORDINATOR = _descriptor.ServiceDescriptor( name='Coordinator', - full_name='xain.protobuf.coordinator.Coordinator', + full_name='xain_fl.protobuf.coordinator.Coordinator', file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=1150, - serialized_end=1596, + serialized_start=1177, + serialized_end=1647, methods=[ _descriptor.MethodDescriptor( name='Rendezvous', - full_name='xain.protobuf.coordinator.Coordinator.Rendezvous', + full_name='xain_fl.protobuf.coordinator.Coordinator.Rendezvous', index=0, containing_service=None, input_type=_RENDEZVOUSREQUEST, @@ -639,7 +639,7 @@ ), _descriptor.MethodDescriptor( name='Heartbeat', - full_name='xain.protobuf.coordinator.Coordinator.Heartbeat', + full_name='xain_fl.protobuf.coordinator.Coordinator.Heartbeat', index=1, containing_service=None, input_type=_HEARTBEATREQUEST, @@ -648,7 +648,7 @@ ), _descriptor.MethodDescriptor( name='StartTraining', - full_name='xain.protobuf.coordinator.Coordinator.StartTraining', + full_name='xain_fl.protobuf.coordinator.Coordinator.StartTraining', index=2, containing_service=None, input_type=_STARTTRAININGREQUEST, @@ -657,7 +657,7 @@ ), _descriptor.MethodDescriptor( name='EndTraining', - full_name='xain.protobuf.coordinator.Coordinator.EndTraining', + full_name='xain_fl.protobuf.coordinator.Coordinator.EndTraining', index=3, containing_service=None, input_type=_ENDTRAININGREQUEST, diff --git a/xain/grpc/coordinator_pb2.pyi b/xain_fl/grpc/coordinator_pb2.pyi similarity index 100% rename from xain/grpc/coordinator_pb2.pyi rename to xain_fl/grpc/coordinator_pb2.pyi diff --git a/xain/grpc/coordinator_pb2_grpc.py b/xain_fl/grpc/coordinator_pb2_grpc.py similarity index 52% rename from xain/grpc/coordinator_pb2_grpc.py rename to xain_fl/grpc/coordinator_pb2_grpc.py index 19614f765..9285718ee 100644 --- a/xain/grpc/coordinator_pb2_grpc.py +++ b/xain_fl/grpc/coordinator_pb2_grpc.py @@ -1,7 +1,7 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from xain.grpc import coordinator_pb2 as xain_dot_grpc_dot_coordinator__pb2 +from xain_fl.grpc import coordinator_pb2 as xain__fl_dot_grpc_dot_coordinator__pb2 class CoordinatorStub(object): @@ -15,24 +15,24 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Rendezvous = channel.unary_unary( - '/xain.protobuf.coordinator.Coordinator/Rendezvous', - request_serializer=xain_dot_grpc_dot_coordinator__pb2.RendezvousRequest.SerializeToString, - response_deserializer=xain_dot_grpc_dot_coordinator__pb2.RendezvousReply.FromString, + '/xain_fl.protobuf.coordinator.Coordinator/Rendezvous', + request_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.RendezvousRequest.SerializeToString, + response_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.RendezvousReply.FromString, ) self.Heartbeat = channel.unary_unary( - '/xain.protobuf.coordinator.Coordinator/Heartbeat', - request_serializer=xain_dot_grpc_dot_coordinator__pb2.HeartbeatRequest.SerializeToString, - response_deserializer=xain_dot_grpc_dot_coordinator__pb2.HeartbeatReply.FromString, + '/xain_fl.protobuf.coordinator.Coordinator/Heartbeat', + request_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.HeartbeatRequest.SerializeToString, + response_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.HeartbeatReply.FromString, ) self.StartTraining = channel.unary_unary( - '/xain.protobuf.coordinator.Coordinator/StartTraining', - request_serializer=xain_dot_grpc_dot_coordinator__pb2.StartTrainingRequest.SerializeToString, - response_deserializer=xain_dot_grpc_dot_coordinator__pb2.StartTrainingReply.FromString, + '/xain_fl.protobuf.coordinator.Coordinator/StartTraining', + request_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.StartTrainingRequest.SerializeToString, + response_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.StartTrainingReply.FromString, ) self.EndTraining = channel.unary_unary( - '/xain.protobuf.coordinator.Coordinator/EndTraining', - request_serializer=xain_dot_grpc_dot_coordinator__pb2.EndTrainingRequest.SerializeToString, - response_deserializer=xain_dot_grpc_dot_coordinator__pb2.EndTrainingReply.FromString, + '/xain_fl.protobuf.coordinator.Coordinator/EndTraining', + request_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.EndTrainingRequest.SerializeToString, + response_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.EndTrainingReply.FromString, ) @@ -73,25 +73,25 @@ def add_CoordinatorServicer_to_server(servicer, server): rpc_method_handlers = { 'Rendezvous': grpc.unary_unary_rpc_method_handler( servicer.Rendezvous, - request_deserializer=xain_dot_grpc_dot_coordinator__pb2.RendezvousRequest.FromString, - response_serializer=xain_dot_grpc_dot_coordinator__pb2.RendezvousReply.SerializeToString, + request_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.RendezvousRequest.FromString, + response_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.RendezvousReply.SerializeToString, ), 'Heartbeat': grpc.unary_unary_rpc_method_handler( servicer.Heartbeat, - request_deserializer=xain_dot_grpc_dot_coordinator__pb2.HeartbeatRequest.FromString, - response_serializer=xain_dot_grpc_dot_coordinator__pb2.HeartbeatReply.SerializeToString, + request_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.HeartbeatRequest.FromString, + response_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.HeartbeatReply.SerializeToString, ), 'StartTraining': grpc.unary_unary_rpc_method_handler( servicer.StartTraining, - request_deserializer=xain_dot_grpc_dot_coordinator__pb2.StartTrainingRequest.FromString, - response_serializer=xain_dot_grpc_dot_coordinator__pb2.StartTrainingReply.SerializeToString, + request_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.StartTrainingRequest.FromString, + response_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.StartTrainingReply.SerializeToString, ), 'EndTraining': grpc.unary_unary_rpc_method_handler( servicer.EndTraining, - request_deserializer=xain_dot_grpc_dot_coordinator__pb2.EndTrainingRequest.FromString, - response_serializer=xain_dot_grpc_dot_coordinator__pb2.EndTrainingReply.SerializeToString, + request_deserializer=xain__fl_dot_grpc_dot_coordinator__pb2.EndTrainingRequest.FromString, + response_serializer=xain__fl_dot_grpc_dot_coordinator__pb2.EndTrainingReply.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( - 'xain.protobuf.coordinator.Coordinator', rpc_method_handlers) + 'xain_fl.protobuf.coordinator.Coordinator', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) diff --git a/xain/grpc/hellonumproto_pb2.py b/xain_fl/grpc/hellonumproto_pb2.py similarity index 91% rename from xain/grpc/hellonumproto_pb2.py rename to xain_fl/grpc/hellonumproto_pb2.py index 08ff66795..22c441c3c 100644 --- a/xain/grpc/hellonumproto_pb2.py +++ b/xain_fl/grpc/hellonumproto_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: xain/grpc/hellonumproto.proto +# source: xain_fl/grpc/hellonumproto.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) @@ -17,11 +17,11 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='xain/grpc/hellonumproto.proto', + name='xain_fl/grpc/hellonumproto.proto', package='hellonumproto', syntax='proto3', serialized_options=None, - serialized_pb=_b('\n\x1dxain/grpc/hellonumproto.proto\x12\rhellonumproto\x1a\x1fnumproto/protobuf/ndarray.proto\":\n\x0fNumProtoRequest\x12\'\n\x03\x61rr\x18\x01 \x01(\x0b\x32\x1a.numproto.protobuf.NDArray\"8\n\rNumProtoReply\x12\'\n\x03\x61rr\x18\x01 \x01(\x0b\x32\x1a.numproto.protobuf.NDArray2d\n\x0eNumProtoServer\x12R\n\x10SayHelloNumProto\x12\x1e.hellonumproto.NumProtoRequest\x1a\x1c.hellonumproto.NumProtoReply\"\x00\x62\x06proto3') + serialized_pb=_b('\n xain_fl/grpc/hellonumproto.proto\x12\rhellonumproto\x1a\x1fnumproto/protobuf/ndarray.proto\":\n\x0fNumProtoRequest\x12\'\n\x03\x61rr\x18\x01 \x01(\x0b\x32\x1a.numproto.protobuf.NDArray\"8\n\rNumProtoReply\x12\'\n\x03\x61rr\x18\x01 \x01(\x0b\x32\x1a.numproto.protobuf.NDArray2d\n\x0eNumProtoServer\x12R\n\x10SayHelloNumProto\x12\x1e.hellonumproto.NumProtoRequest\x1a\x1c.hellonumproto.NumProtoReply\"\x00\x62\x06proto3') , dependencies=[numproto_dot_protobuf_dot_ndarray__pb2.DESCRIPTOR,]) @@ -54,8 +54,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=81, - serialized_end=139, + serialized_start=84, + serialized_end=142, ) @@ -85,8 +85,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=141, - serialized_end=197, + serialized_start=144, + serialized_end=200, ) _NUMPROTOREQUEST.fields_by_name['arr'].message_type = numproto_dot_protobuf_dot_ndarray__pb2._NDARRAY @@ -97,14 +97,14 @@ NumProtoRequest = _reflection.GeneratedProtocolMessageType('NumProtoRequest', (_message.Message,), { 'DESCRIPTOR' : _NUMPROTOREQUEST, - '__module__' : 'xain.grpc.hellonumproto_pb2' + '__module__' : 'xain_fl.grpc.hellonumproto_pb2' # @@protoc_insertion_point(class_scope:hellonumproto.NumProtoRequest) }) _sym_db.RegisterMessage(NumProtoRequest) NumProtoReply = _reflection.GeneratedProtocolMessageType('NumProtoReply', (_message.Message,), { 'DESCRIPTOR' : _NUMPROTOREPLY, - '__module__' : 'xain.grpc.hellonumproto_pb2' + '__module__' : 'xain_fl.grpc.hellonumproto_pb2' # @@protoc_insertion_point(class_scope:hellonumproto.NumProtoReply) }) _sym_db.RegisterMessage(NumProtoReply) @@ -117,8 +117,8 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=199, - serialized_end=299, + serialized_start=202, + serialized_end=302, methods=[ _descriptor.MethodDescriptor( name='SayHelloNumProto', diff --git a/xain/grpc/hellonumproto_pb2.pyi b/xain_fl/grpc/hellonumproto_pb2.pyi similarity index 100% rename from xain/grpc/hellonumproto_pb2.pyi rename to xain_fl/grpc/hellonumproto_pb2.pyi diff --git a/xain/grpc/hellonumproto_pb2_grpc.py b/xain_fl/grpc/hellonumproto_pb2_grpc.py similarity index 70% rename from xain/grpc/hellonumproto_pb2_grpc.py rename to xain_fl/grpc/hellonumproto_pb2_grpc.py index 09d80dc6b..042b1b309 100644 --- a/xain/grpc/hellonumproto_pb2_grpc.py +++ b/xain_fl/grpc/hellonumproto_pb2_grpc.py @@ -1,7 +1,7 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from xain.grpc import hellonumproto_pb2 as xain_dot_grpc_dot_hellonumproto__pb2 +from xain_fl.grpc import hellonumproto_pb2 as xain__fl_dot_grpc_dot_hellonumproto__pb2 class NumProtoServerStub(object): @@ -16,8 +16,8 @@ def __init__(self, channel): """ self.SayHelloNumProto = channel.unary_unary( '/hellonumproto.NumProtoServer/SayHelloNumProto', - request_serializer=xain_dot_grpc_dot_hellonumproto__pb2.NumProtoRequest.SerializeToString, - response_deserializer=xain_dot_grpc_dot_hellonumproto__pb2.NumProtoReply.FromString, + request_serializer=xain__fl_dot_grpc_dot_hellonumproto__pb2.NumProtoRequest.SerializeToString, + response_deserializer=xain__fl_dot_grpc_dot_hellonumproto__pb2.NumProtoReply.FromString, ) @@ -37,8 +37,8 @@ def add_NumProtoServerServicer_to_server(servicer, server): rpc_method_handlers = { 'SayHelloNumProto': grpc.unary_unary_rpc_method_handler( servicer.SayHelloNumProto, - request_deserializer=xain_dot_grpc_dot_hellonumproto__pb2.NumProtoRequest.FromString, - response_serializer=xain_dot_grpc_dot_hellonumproto__pb2.NumProtoReply.SerializeToString, + request_deserializer=xain__fl_dot_grpc_dot_hellonumproto__pb2.NumProtoRequest.FromString, + response_serializer=xain__fl_dot_grpc_dot_hellonumproto__pb2.NumProtoReply.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( diff --git a/xain/grpc/numproto_client.py b/xain_fl/grpc/numproto_client.py similarity index 86% rename from xain/grpc/numproto_client.py rename to xain_fl/grpc/numproto_client.py index 93b65aa99..af6ace6cf 100644 --- a/xain/grpc/numproto_client.py +++ b/xain_fl/grpc/numproto_client.py @@ -4,8 +4,8 @@ import numpy as np from numproto import ndarray_to_proto, proto_to_ndarray -from xain.grpc import hellonumproto_pb2, hellonumproto_pb2_grpc -from xain.logger import get_logger +from xain_fl.grpc import hellonumproto_pb2, hellonumproto_pb2_grpc +from xain_fl.logger import get_logger logger = get_logger(__name__, level=os.environ.get("XAIN_LOGLEVEL", "INFO")) diff --git a/xain/grpc/numproto_server.py b/xain_fl/grpc/numproto_server.py similarity index 90% rename from xain/grpc/numproto_server.py rename to xain_fl/grpc/numproto_server.py index 5e74fc85c..24615d0f8 100644 --- a/xain/grpc/numproto_server.py +++ b/xain_fl/grpc/numproto_server.py @@ -5,8 +5,8 @@ import grpc from numproto import ndarray_to_proto, proto_to_ndarray -from xain.grpc import hellonumproto_pb2, hellonumproto_pb2_grpc -from xain.logger import get_logger +from xain_fl.grpc import hellonumproto_pb2, hellonumproto_pb2_grpc +from xain_fl.logger import get_logger _ONE_DAY_IN_SECONDS = 60 * 60 * 24 logger = get_logger(__name__, level=os.environ.get("XAIN_LOGLEVEL", "INFO")) diff --git a/xain/grpc/participant.py b/xain_fl/grpc/participant.py similarity index 98% rename from xain/grpc/participant.py rename to xain_fl/grpc/participant.py index 37f1af0e8..4facd4ccb 100644 --- a/xain/grpc/participant.py +++ b/xain_fl/grpc/participant.py @@ -9,9 +9,9 @@ import grpc from numproto import ndarray_to_proto, proto_to_ndarray -from xain.grpc import coordinator_pb2, coordinator_pb2_grpc -from xain.logger import get_logger -from xain.types import History, Metrics, Theta +from xain_fl.grpc import coordinator_pb2, coordinator_pb2_grpc +from xain_fl.logger import get_logger +from xain_fl.types import History, Metrics, Theta RETRY_TIMEOUT = 5 HEARTBEAT_TIME = 10 diff --git a/xain/grpc/test_coordinator_logic.py b/xain_fl/grpc/test_coordinator_logic.py similarity index 98% rename from xain/grpc/test_coordinator_logic.py rename to xain_fl/grpc/test_coordinator_logic.py index 43c9d6698..59341389b 100644 --- a/xain/grpc/test_coordinator_logic.py +++ b/xain_fl/grpc/test_coordinator_logic.py @@ -2,8 +2,8 @@ import pytest from numproto import proto_to_ndarray -from xain.grpc import coordinator_pb2 -from xain.grpc.coordinator import ( +from xain_fl.grpc import coordinator_pb2 +from xain_fl.grpc.coordinator import ( Coordinator, DuplicatedUpdateError, InvalidRequestError, diff --git a/xain/grpc/test_grpc.py b/xain_fl/grpc/test_grpc.py similarity index 97% rename from xain/grpc/test_grpc.py rename to xain_fl/grpc/test_grpc.py index c4b923f03..fd8466aed 100644 --- a/xain/grpc/test_grpc.py +++ b/xain_fl/grpc/test_grpc.py @@ -8,19 +8,19 @@ import pytest from numproto import ndarray_to_proto, proto_to_ndarray -from xain.grpc import ( +from xain_fl.grpc import ( coordinator_pb2, coordinator_pb2_grpc, hellonumproto_pb2, hellonumproto_pb2_grpc, ) -from xain.grpc.coordinator import ( +from xain_fl.grpc.coordinator import ( Coordinator, CoordinatorGrpc, Participants, monitor_heartbeats, ) -from xain.grpc.participant import ( +from xain_fl.grpc.participant import ( StateRecord, end_training, message_loop, @@ -101,7 +101,7 @@ def test_heartbeat_denied(participant_stub, coordinator_service): @mock.patch("threading.Event.is_set", side_effect=[False, True]) @mock.patch("time.sleep", return_value=None) -@mock.patch("xain.grpc.coordinator.Coordinator.remove_participant") +@mock.patch("xain_fl.grpc.coordinator.Coordinator.remove_participant") def test_monitor_heartbeats(mock_participants_remove, _mock_sleep, _mock_event): participants = Participants() participants.add("participant_1") @@ -134,7 +134,7 @@ def test_monitor_heartbeats_remove_participant(_mock_sleep, _mock_event): @mock.patch("threading.Event.is_set", side_effect=[False, False, True]) @mock.patch("time.sleep", return_value=None) -@mock.patch("xain.grpc.coordinator_pb2.HeartbeatRequest") +@mock.patch("xain_fl.grpc.coordinator_pb2.HeartbeatRequest") def test_participant_heartbeat(mock_heartbeat_request, _mock_sleep, _mock_event): channel = mock.MagicMock() terminate_event = threading.Event() diff --git a/xain/grpc/test_participant.py b/xain_fl/grpc/test_participant.py similarity index 97% rename from xain/grpc/test_participant.py rename to xain_fl/grpc/test_participant.py index 85c3500ac..ad1ba4986 100644 --- a/xain/grpc/test_participant.py +++ b/xain_fl/grpc/test_participant.py @@ -1,5 +1,5 @@ -from xain.grpc import coordinator_pb2 -from xain.grpc.participant import ParState, StateRecord, transit +from xain_fl.grpc import coordinator_pb2 +from xain_fl.grpc.participant import ParState, StateRecord, transit def test_from_start(): diff --git a/xain/helpers/__init__.py b/xain_fl/helpers/__init__.py similarity index 100% rename from xain/helpers/__init__.py rename to xain_fl/helpers/__init__.py diff --git a/xain/helpers/project.py b/xain_fl/helpers/project.py similarity index 100% rename from xain/helpers/project.py rename to xain_fl/helpers/project.py diff --git a/xain/helpers/sha1.py b/xain_fl/helpers/sha1.py similarity index 100% rename from xain/helpers/sha1.py rename to xain_fl/helpers/sha1.py diff --git a/xain/logger.py b/xain_fl/logger.py similarity index 83% rename from xain/logger.py rename to xain_fl/logger.py index 6bf7eb0b1..2a665d084 100644 --- a/xain/logger.py +++ b/xain_fl/logger.py @@ -1,11 +1,11 @@ -"""This module contains all the xain logging configuration""" +"""This module contains custom logging configuration""" import logging def get_logger(name: str, level: str = "INFO") -> logging.Logger: - """Returns an instance of the xain logger. + """Returns an instance of the custom xain-fl logger. Args: name (:obj:`str`): The name of the logger. Typically `__name__`. diff --git a/xain_fl/sdk/__init__.py b/xain_fl/sdk/__init__.py new file mode 100644 index 000000000..857509f71 --- /dev/null +++ b/xain_fl/sdk/__init__.py @@ -0,0 +1 @@ +"""Provides xain-fl package SDK""" diff --git a/xain/sdk/coordinator.py b/xain_fl/sdk/coordinator.py similarity index 100% rename from xain/sdk/coordinator.py rename to xain_fl/sdk/coordinator.py diff --git a/xain/sdk/coordinator_test.py b/xain_fl/sdk/coordinator_test.py similarity index 100% rename from xain/sdk/coordinator_test.py rename to xain_fl/sdk/coordinator_test.py diff --git a/xain/sdk/participant.py b/xain_fl/sdk/participant.py similarity index 100% rename from xain/sdk/participant.py rename to xain_fl/sdk/participant.py diff --git a/xain/sdk/participant_test.py b/xain_fl/sdk/participant_test.py similarity index 100% rename from xain/sdk/participant_test.py rename to xain_fl/sdk/participant_test.py diff --git a/xain/sdk/use_case.py b/xain_fl/sdk/use_case.py similarity index 100% rename from xain/sdk/use_case.py rename to xain_fl/sdk/use_case.py diff --git a/xain/sdk/use_case_test.py b/xain_fl/sdk/use_case_test.py similarity index 100% rename from xain/sdk/use_case_test.py rename to xain_fl/sdk/use_case_test.py diff --git a/xain/types/__init__.py b/xain_fl/types/__init__.py similarity index 100% rename from xain/types/__init__.py rename to xain_fl/types/__init__.py