diff --git a/.ci/tests/examples/api_test.py b/.ci/tests/examples/api_test.py new file mode 100644 index 000000000..e9a5bd06d --- /dev/null +++ b/.ci/tests/examples/api_test.py @@ -0,0 +1,83 @@ +import fire +import yaml + +from fedn import APIClient + + +def _download_config(output): + """ Download the client configuration file from the controller. + + :param output: The output file path. + :type output: str + """ + client = APIClient(host="localhost", port=8092) + config = client.get_client_config(checksum=True) + with open(output, 'w') as f: + f.write(yaml.dump(config)) + + +def test_api_get_methods(): + client = APIClient(host="localhost", port=8092) + status = client.get_controller_status() + assert status + print("Controller status: ", status, flush=True) + + events = client.get_events() + assert events + print("Events: ", events, flush=True) + + validations = client.list_validations() + assert validations + print("Validations: ", validations, flush=True) + + models = client.get_model_trail() + assert models + print("Models: ", models, flush=True) + + clients = client.list_clients() + assert clients + print("Clients: ", clients, flush=True) + + combiners = client.list_combiners() + assert combiners + print("Combiners: ", combiners, flush=True) + + combiner = client.get_combiner("combiner") + assert combiner + print("Combiner: ", combiner, flush=True) + + first_model = client.get_initial_model() + assert first_model + print("First model: ", first_model, flush=True) + + package = client.get_package() + assert package + print("Package: ", package, flush=True) + + checksum = client.get_package_checksum() + assert checksum + print("Checksum: ", checksum, flush=True) + + rounds = client.list_rounds() + assert rounds + print("Rounds: ", rounds, flush=True) + + round = client.get_round(1) + assert round + print("Round: ", round, flush=True) + + sessions = client.list_sessions() + assert sessions + print("Sessions: ", sessions, flush=True) + + +if __name__ == '__main__': + + client = APIClient(host="localhost", port=8092) + fire.Fire({ + 'set_seed': client.set_initial_model, + 'set_package': client.set_package, + 'start_session': client.start_session, + 'get_client_config': _download_config, + 'test_api_get_methods': test_api_get_methods, + }) diff --git a/.ci/tests/examples/configure.sh b/.ci/tests/examples/configure.sh index 285374fb6..b3fb9d678 100755 --- a/.ci/tests/examples/configure.sh +++ b/.ci/tests/examples/configure.sh @@ -3,7 +3,7 @@ set -e # Parse example name if [ "$#" -ne 2 ]; then - >&2 echo "Wrong number of arguments (usage: run.sh )" + >&2 echo "Wrong number of arguments (usage: configure.sh )" exit 1 fi example="$1" diff --git a/.ci/tests/examples/inference_test.py b/.ci/tests/examples/inference_test.py new file mode 100644 index 000000000..6e27d2499 --- /dev/null +++ b/.ci/tests/examples/inference_test.py @@ -0,0 +1,35 @@ +import sys +from time import sleep + +import pymongo + +N_CLIENTS = 2 +RETRIES = 18 +SLEEP = 10 + + +def _eprint(*args, **kwargs): + print(*args, file=sys.stderr, **kwargs) + + +def _wait_n_rounds(collection): + n = 0 + for _ in range(RETRIES): + query = {'type': 'INFERENCE'} + n = collection.count_documents(query) + if n == N_CLIENTS: + return n + _eprint(f'Succeded cleints {n}. Sleeping for {SLEEP}.') + sleep(SLEEP) + _eprint(f'Succeded clients: {n}. Giving up.') + return n + + +if __name__ == '__main__': + # Connect to mongo + client = pymongo.MongoClient("mongodb://fedn_admin:password@localhost:6534") + + # Wait for successful rounds + succeded = _wait_n_rounds(client['fedn-test-network']['control']['status']) + assert(succeded == N_CLIENTS) # check that all rounds succeeded + _eprint(f'Succeded inference clients: {succeded}. Test passed.') diff --git a/.ci/tests/examples/print_logs.sh b/.ci/tests/examples/print_logs.sh index 4c63f141e..6979000ed 100755 --- a/.ci/tests/examples/print_logs.sh +++ b/.ci/tests/examples/print_logs.sh @@ -5,8 +5,11 @@ docker logs "$(basename $PWD)_minio_1" echo "Mongo logs" docker logs "$(basename $PWD)_mongo_1" -echo "Reducer logs" -docker logs "$(basename $PWD)_reducer_1" +echo "Dashboard logs" +docker logs "$(basename $PWD)_dashboard_1" + +echo "API-Server logs" +docker logs "$(basename $PWD)_api-server_1" echo "Combiner logs" docker logs "$(basename $PWD)_combiner_1" diff --git a/.ci/tests/examples/run.sh b/.ci/tests/examples/run.sh index 7afe8b9cc..da1c7981f 100755 --- a/.ci/tests/examples/run.sh +++ b/.ci/tests/examples/run.sh @@ -23,34 +23,23 @@ docker-compose \ ".$example/bin/python" ../../.ci/tests/examples/wait_for.py combiners >&2 echo "Upload compute package" -curl -k -X POST \ - -F file=@package.tgz \ - -F helper="$helper" \ - http://localhost:8090/context -printf '\n' +".$example/bin/python" ../../.ci/tests/examples/api_test.py set_package --path package.tgz --helper "$helper" >&2 echo "Upload seed" -curl -k -X POST \ - -F seed=@seed.npz \ - http://localhost:8090/models -printf '\n' +".$example/bin/python" ../../.ci/tests/examples/api_test.py set_seed --path seed.npz >&2 echo "Wait for clients to connect" ".$example/bin/python" ../../.ci/tests/examples/wait_for.py clients ->&2 echo "Start round" -curl -k -X POST \ - -F rounds=3 \ - -F validate=True \ - http://localhost:8090/control -printf '\n' +>&2 echo "Start session" +".$example/bin/python" ../../.ci/tests/examples/api_test.py start_session --rounds 3 --helper "$helper" >&2 echo "Checking rounds success" ".$example/bin/python" ../../.ci/tests/examples/wait_for.py rounds >&2 echo "Test client connection with dowloaded settings" # Get config -curl -k http://localhost:8090/config/download > ../../client.yaml +".$example/bin/python" ../../.ci/tests/examples/api_test.py get_client_config --output ../../client.yaml # Redeploy clients with config docker-compose \ @@ -62,5 +51,8 @@ docker-compose \ >&2 echo "Wait for clients to reconnect" ".$example/bin/python" ../../.ci/tests/examples/wait_for.py clients +>&2 echo "Test API GET requests" +".$example/bin/python" ../../.ci/tests/examples/api_test.py test_api_get_methods + popd >&2 echo "Test completed successfully" \ No newline at end of file diff --git a/.ci/tests/examples/run_inference.sh b/.ci/tests/examples/run_inference.sh new file mode 100755 index 000000000..d78771d70 --- /dev/null +++ b/.ci/tests/examples/run_inference.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -e + +# Parse example name +if [ "$#" -lt 1 ]; then + >&2 echo "Wrong number of arguments (usage: run_infrence.sh )" + exit 1 +fi +example="$1" + +>&2 echo "Run inference" +pushd "examples/$example" +curl -k -X POST https://localhost:8090/infer + +>&2 echo "Checking inference success" +".$example/bin/python" ../../.ci/tests/examples/inference_test.py + +>&2 echo "Test completed successfully" +popd \ No newline at end of file diff --git a/.ci/tests/examples/wait_for.py b/.ci/tests/examples/wait_for.py index 20454cf2e..ccd76859d 100644 --- a/.ci/tests/examples/wait_for.py +++ b/.ci/tests/examples/wait_for.py @@ -18,7 +18,7 @@ def _retry(try_func, **func_args): for _ in range(RETRIES): is_success = try_func(**func_args) if is_success: - _eprint('Sucess.') + _eprint('Success.') return True _eprint(f'Sleeping for {SLEEP}.') sleep(SLEEP) @@ -29,29 +29,39 @@ def _retry(try_func, **func_args): def _test_rounds(n_rounds): client = pymongo.MongoClient( "mongodb://fedn_admin:password@localhost:6534") - collection = client['fedn-test-network']['control']['round'] - query = {'reducer.status': 'Success'} + collection = client['fedn-network']['control']['rounds'] + query = {'status': 'Finished'} n = collection.count_documents(query) client.close() _eprint(f'Succeded rounds: {n}.') return n == n_rounds -def _test_nodes(n_nodes, node_type, reducer_host='localhost', reducer_port='8090'): +def _test_nodes(n_nodes, node_type, reducer_host='localhost', reducer_port='8092'): try: - resp = requests.get( - f'http://{reducer_host}:{reducer_port}/netgraph', verify=False) + + endpoint = "list_clients" if node_type == "client" else "list_combiners" + + response = requests.get( + f'http://{reducer_host}:{reducer_port}/{endpoint}', verify=False) + + if response.status_code == 200: + + data = json.loads(response.content) + + count = 0 + if node_type == "client": + arr = data.get('result') + count = sum(element.get('status') == "online" for element in arr) + else: + count = data.get('count') + + _eprint(f'Active {node_type}s: {count}.') + return count == n_nodes + except Exception as e: - _eprint(f'Reques exception econuntered: {e}.') + _eprint(f'Request exception enconuntered: {e}.') return False - if resp.status_code == 200: - gr = json.loads(resp.content) - n = sum(values.get('type') == node_type and values.get( - 'status') == 'active' for values in gr['nodes']) - _eprint(f'Active {node_type}s: {n}.') - return n == n_nodes - _eprint(f'Reducer returned {resp.status_code}.') - return False def rounds(n_rounds=3): diff --git a/.github/workflows/code-checks.yaml b/.github/workflows/code-checks.yaml index c1ec38548..3b0f615f6 100644 --- a/.github/workflows/code-checks.yaml +++ b/.github/workflows/code-checks.yaml @@ -18,6 +18,8 @@ jobs: --skip .venv --skip .mnist-keras --skip .mnist-pytorch + --skip fedn_pb2.py + --skip fedn_pb2_grpc.py - name: check Python formatting run: > @@ -25,12 +27,14 @@ jobs: --exclude .venv --exclude .mnist-keras --exclude .mnist-pytorch + --exclude fedn_pb2.py + --exclude fedn_pb2_grpc.py . - name: run Python linter run: > .venv/bin/flake8 . - --exclude ".venv,.mnist-keras,.mnist-pytorch,fedn_pb2.py" + --exclude ".venv,.mnist-keras,.mnist-pytorch,fedn_pb2.py,fedn_pb2_grpc.py" - name: check for floating imports run: > @@ -38,7 +42,8 @@ jobs: --exclude-dir='.venv' --exclude-dir='.mnist-pytorch' --exclude-dir='.mnist-keras' - --exclude-dir='docs' + --exclude-dir='docs' + --exclude='tests.py' '^[ \t]+(import|from) ' -I . # TODO: add linting/formatting for all file types \ No newline at end of file diff --git a/.github/workflows/integration-tests.yaml b/.github/workflows/integration-tests.yaml index d5c49a8f0..1b451975e 100644 --- a/.github/workflows/integration-tests.yaml +++ b/.github/workflows/integration-tests.yaml @@ -15,13 +15,12 @@ jobs: strategy: matrix: to_test: - - "mnist-keras keras" - - "mnist-pytorch pytorch" + - "mnist-keras kerashelper" + - "mnist-pytorch pytorchhelper" python_version: ["3.8", "3.9","3.10"] os: - ubuntu-20.04 - ubuntu-22.04 - - macos-11 runs-on: ${{ matrix.os }} steps: - name: checkout @@ -38,7 +37,10 @@ jobs: - name: run ${{ matrix.to_test }} run: .ci/tests/examples/run.sh ${{ matrix.to_test }} - if: ${{ matrix.os != 'macos-11' }} # skip Docker part for MacOS + + - name: run ${{ matrix.to_test }} inference + run: .ci/tests/examples/run_inference.sh ${{ matrix.to_test }} + if: ${{ matrix.os != 'macos-11' && matrix.to_test == 'mnist-keras keras' }} # example available for Keras - name: print logs if: failure() diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 0ccd43f49..bc45dc53b 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -6,4 +6,10 @@ build: python: "3.9" sphinx: - configuration: docs/conf.py \ No newline at end of file + configuration: docs/conf.py + +python: + install: + - method: pip + path: ./fedn + - requirements: docs/requirements.txt diff --git a/Dockerfile b/Dockerfile index 67f026d03..fa8c5bd22 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Base image -ARG BASE_IMG=python:3.9-slim +ARG BASE_IMG=python:3.10-slim FROM $BASE_IMG # Requirements (use MNIST Keras as default) diff --git a/LICENSE b/LICENSE index ae263d310..a8b7d2c09 100644 --- a/LICENSE +++ b/LICENSE @@ -199,3 +199,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + diff --git a/README.rst b/README.rst index 42503985a..2afb60ebc 100644 --- a/README.rst +++ b/README.rst @@ -101,7 +101,7 @@ To connect a client that uses the data partition 'data/clients/1/mnist.pt': -v $PWD/data/clients/1:/var/data \ -e ENTRYPOINT_OPTS=--data_path=/var/data/mnist.pt \ --network=fedn_default \ - ghcr.io/scaleoutsystems/fedn/fedn:develop-mnist-pytorch run client -in client.yaml --name client1 + ghcr.io/scaleoutsystems/fedn/fedn:master-mnist-pytorch run client -in client.yaml --name client1 You are now ready to start training the model at http://localhost:8090/control. diff --git a/config/settings-client.yaml.template b/config/settings-client.yaml.template index dbc370cd3..d7146af26 100644 --- a/config/settings-client.yaml.template +++ b/config/settings-client.yaml.template @@ -1,3 +1,3 @@ -network_id: fedn-test-network -discover_host: reducer -discover_port: 8090 +network_id: fedn-network +discover_host: api-server +discover_port: 8092 diff --git a/config/settings-combiner.yaml.template b/config/settings-combiner.yaml.template index 39c8c5985..8cef6643a 100644 --- a/config/settings-combiner.yaml.template +++ b/config/settings-combiner.yaml.template @@ -1,10 +1,10 @@ -network_id: fedn-test-network -controller: - discover_host: reducer - discover_port: 8090 +network_id: fedn-network +discover_host: api-server +discover_port: 8092 + +name: combiner +host: combiner +port: 12080 +max_clients: 30 + -combiner: - name: combiner - host: combiner - port: 12080 - max_clients: 30 diff --git a/config/settings-reducer.yaml.template b/config/settings-reducer.yaml.template index 4f6009e4b..fd9352331 100644 --- a/config/settings-reducer.yaml.template +++ b/config/settings-reducer.yaml.template @@ -1,9 +1,8 @@ -network_id: fedn-test-network -token: fedn_token - -control: - state: idle - helper: keras +network_id: fedn-network +controller: + host: api-server + port: 8092 + debug: True statestore: type: MongoDB diff --git a/docker-compose.yaml b/docker-compose.yaml index 4529dc231..aa4550c25 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -58,27 +58,50 @@ services: ports: - 8081:8081 - # Reducer - reducer: + dashboard: environment: - GET_HOSTS_FROM=dns - USER=test - PROJECT=project - FLASK_DEBUG=1 - - FLASK_ENV=development + - STATESTORE_CONFIG=/app/config/settings-reducer.yaml build: context: . args: - BASE_IMG: ${BASE_IMG:-python:3.9-slim} + BASE_IMG: ${BASE_IMG:-python:3.10-slim} working_dir: /app volumes: - ${HOST_REPO_DIR:-.}/fedn:/app/fedn entrypoint: [ "sh", "-c" ] command: - - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run reducer -n reducer --init=config/settings-reducer.yaml" + - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run dashboard -n reducer --init=config/settings-reducer.yaml" ports: - 8090:8090 + api-server: + environment: + - GET_HOSTS_FROM=dns + - USER=test + - PROJECT=project + - FLASK_DEBUG=1 + - STATESTORE_CONFIG=/app/config/settings-reducer.yaml + - MODELSTORAGE_CONFIG=/app/config/settings-reducer.yaml + build: + context: . + args: + BASE_IMG: ${BASE_IMG:-python:3.10-slim} + working_dir: /app + volumes: + - ${HOST_REPO_DIR:-.}/fedn:/app/fedn + depends_on: + - minio + - mongo + entrypoint: [ "sh", "-c" ] + command: + - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/python fedn/fedn/network/api/server.py" + ports: + - 8092:8092 + # Combiner combiner: environment: @@ -87,13 +110,13 @@ services: build: context: . args: - BASE_IMG: ${BASE_IMG:-python:3.9-slim} + BASE_IMG: ${BASE_IMG:-python:3.10-slim} working_dir: /app volumes: - ${HOST_REPO_DIR:-.}/fedn:/app/fedn entrypoint: [ "sh", "-c" ] command: - - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run combiner -in config/settings-combiner.yaml" + - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run combiner --init config/settings-combiner.yaml" ports: - 12080:12080 @@ -104,12 +127,12 @@ services: build: context: . args: - BASE_IMG: ${BASE_IMG:-python:3.9-slim} + BASE_IMG: ${BASE_IMG:-python:3.10-slim} working_dir: /app volumes: - ${HOST_REPO_DIR:-.}/fedn:/app/fedn entrypoint: [ "sh", "-c" ] command: - - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run client -in config/settings-client.yaml" + - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run client --init config/settings-client.yaml" deploy: replicas: 0 diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..ab335dcea --- /dev/null +++ b/docs/README.md @@ -0,0 +1,4 @@ +FEDn is using sphinx with reStructuredText. + +sphinx-apidoc --ext-autodoc --module-first -o _source ../fedn/fedn ../*tests* ../*exceptions* ../*common* ../ ../fedn/fedn/network/api/server.py ../fedn/fedn/network/controller/controlbase.py +sphinx-build . _build \ No newline at end of file diff --git a/docs/architecture.rst b/docs/architecture.rst index 6e04d2af0..0a770c5b6 100644 --- a/docs/architecture.rst +++ b/docs/architecture.rst @@ -1,53 +1,63 @@ Architecture overview ===================== -Constructing a federated model with FEDn amounts to a) specifying the details of the client-side training code and data integrations, and b) deploying the reducer-combiner network. A FEDn network, as illustrated in the picture below, is made up of three main components: the *Reducer*, one or more *Combiners*, and a number of *Clients*. The combiner network forms the backbone of the FedML orchestration mechanism, while the Reducer provides discovery services and provides controls to coordinate training over the combiner network. By horizontally scaling the combiner network, one can meet the needs of a growing number of clients. +Constructing a federated model with FEDn amounts to a) specifying the details of the client-side training code and data integrations, and b) deploying the federated network. A FEDn network, as illustrated in the picture below, is made up of components into three different tiers: the *Controller* tier (3), one or more *Combiners* in second tier (2), and a number of *Clients* in tier (1). +The combiners forms the backbone of the federated ML orchestration mechanism, while the Controller tier provides discovery services and controls to coordinate training over the federated network. +By horizontally scaling the number of combiners, one can meet the needs of a growing number of clients. -.. image:: img/overview.png +.. image:: img/FEDn_network.png :alt: FEDn network :width: 100% :align: center -Main components ---------------- -Client -...... -A Client is a data node, holding private data and connecting to a Combiner to receive model update requests and model validation requests during training rounds. Importantly, clients do not require any open ingress ports. A client receives the code to be executed from the Reducer upon connecting to the network, and thus they only need to be configured prior to connection to read the local datasets during training and validation. Python3 client implementation is provided out of the box, and it is possible to write clients in a variety of languages to target different software and hardware requirements. -Combiner -........ +The clients: tier 1 +................... -A combiner is an actor whose main role is to orchestrate and aggregate model updates from a number of clients during a training round. When and how to trigger such orchestration rounds are specified in the overall *compute plan* laid out by the Reducer. Each combiner in the network runs an independent gRPC server, providing RPCs for interacting with the alliance subsystem it controls. Hence, the total number of clients that can be accommodated in a FEDn network is proportional to the number of active combiners in the FEDn network. Combiners can be deployed anywhere, e.g. in a cloud or on a fog node to provide aggregation services near the cloud edge. +A Client (gRPC client) is a data node, holding private data and connecting to a Combiner (gRPC server) to receive model update requests and model validation requests during training sessions. +Importantly, clients uses remote procedure calls (RPC) to ask for model updates tasks, thus the clients not require any open ingress ports! A client receives the code (called package or compute package) to be executed from the *Controller* +upon connecting to the network, and thus they only need to be configured prior to connection to read the local datasets during training and validation. The package is based on entry points in the client code, and can be customized to fit the needs of the user. +This allows for a high degree of flexibility in terms of what kind of training and validation tasks that can be performed on the client side. Such as different types of machine learning models and framework, and even programming languages. +A python3 client implementation is provided out of the box, and it is possible to write clients in a variety of languages to target different software and hardware requirements. -Reducer -....... +The combiners: tier 2 +..................... -The reducer fills three main roles in the FEDn network: 1.) it lays out the overall, global training strategy and communicates that to the combiner network. It also dictates the strategy to aggregate model updates from individual combiners into a single global model, 2.) it handles global state and maintains the *model trail* - an immutable trail of global model updates uniquely defining the FedML training timeline, and 3.) it provides discovery services, mediating connections between clients and combiners. For this purpose, the Reducer exposes a standard REST API. +A combiner is an actor whose main role is to orchestrate and aggregate model updates from a number of clients during a training session. +When and how to trigger such orchestration are specified in the overall *compute plan* laid out by the *Controller*. +Each combiner in the network runs an independent gRPC server, providing RPCs for interacting with the federated network it controls. +Hence, the total number of clients that can be accommodated in a FEDn network is proportional to the number of active combiners in the FEDn network. +Combiners can be deployed anywhere, e.g. in a cloud or on a fog node to provide aggregation services near the cloud edge. -Services and communication --------------------------- +The controller: tier 3 +...................... -The figure below provides a logical architecture view of the services provided by each agent and how they interact. +Tier 3 does actually contain several components and services, but we tend to associate it with the *Controller* the most. The *Controller* fills three main roles in the FEDn network: -.. image:: img/FEDn-arch-overview.png - :alt: FEDn architecture overview - :width: 100% - :align: center +1. it lays out the overall, global training strategy and communicates that to the combiner network. +It also dictates the strategy to aggregate model updates from individual combiners into a single global model, +2. it handles global state and maintains the *model trail* - an immutable trail of global model updates uniquely defining the federated ML training timeline, and +3. it provides discovery services, mediating connections between clients and combiners. For this purpose, the *Controller* exposes a standard REST API both for RPC clients and servers, but also for user interfaces and other services. + +Tier 3 also contain a *Reducer* component, which is responsible for aggregating combiner-level models into a single global model. Further, it contains a *StateStore* database, +which is responsible for storing various states of the network and training sessions. The final global model trail from a traning session is stored in the *ModelRegistry* database. -Control flows and algorithms ----------------------------- +Notes on aggregating algorithms +............................... -FEDn is designed to allow customization of the FedML algorithm, following a specified pattern, or programming model. Model aggregation happens on two levels in the system. First, each Combiner can be configured with a custom orchestration and aggregation implementation, that reduces model updates from Clients into a single, *combiner level* model. Then, a configurable aggregation protocol on the Reducer level is responsible for combining the combiner-level models into a global model. By varying the aggregation schemes on the two levels in the system, many different possible outcomes can be achieved. Good staring configurations are provided out-of-the-box to help the user get started. +FEDn is designed to allow customization of the FedML algorithm, following a specified pattern, or programming model. +Model aggregation happens on two levels in the network. First, each Combiner can be configured with a custom orchestration and aggregation implementation, that reduces model updates from Clients into a single, *combiner level* model. +Then, a configurable aggregation protocol on the *Controller* level is responsible for combining the combiner-level models into a global model. By varying the aggregation schemes on the two levels in the system, +many different possible outcomes can be achieved. Good starting configurations are provided out-of-the-box to help the user get started. See API reference for more details. Hierarchical Federated Averaging ................................ -The currently implemented default scheme uses a local SGD strategy on the Combiner level aggregation and a simple average of models on the reducer level. This results in a highly horizontally scalable FedAvg scheme. The strategy works well with most artificial neural network (ANNs) models, and can in general be applied to models where it is possible and makes sense to form mean values of model parameters (for example SVMs). Additional FedML training protocols, including support for various types of federated ensemble models, are in active development. +The currently implemented default scheme uses a local SGD strategy on the Combiner level aggregation and a simple average of models on the reducer level. +This results in a highly horizontally scalable FedAvg scheme. The strategy works well with most artificial neural network (ANNs) models, +and can in general be applied to models where it is possible and makes sense to form mean values of model parameters (for example SVMs). + -.. image:: img/HFedAvg.png - :alt: FEDn architecture overview - :width: 100% - :align: center diff --git a/docs/conf.py b/docs/conf.py index 8133e96fc..bd2032b0e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,7 +12,7 @@ author = 'Scaleout Systems AB' # The full version, including alpha/beta/rc tags -release = '0.4.1' +release = '0.6.0' # Add any Sphinx extension module names here, as strings extensions = [ @@ -106,3 +106,5 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None} + +pygments_style = 'sphinx' diff --git a/docs/deployment.rst b/docs/deployment.rst index ff8f3ea0e..974d98842 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -1,4 +1,4 @@ -Deployment +Distributed Deployment ====================== This guide serves as reference deployment for setting up a FEDn network consisting of: @@ -29,7 +29,7 @@ The reducer and clients need to be able to resolve the hostname for the combiner we show how this can be achieved if no external DNS resolution is available, by setting "extra host" in the Docker containers for the Reducer and client. Note that there are many other possible ways to achieve this, depending on your setup. 1. Deploy storage and database services (MinIO, MongoDB and MongoExpress) --------------------------------------------------------------------- +------------------------------------------------------------------------- First, deploy MinIO and Mongo services on one of the hosts. Edit the `docker-compose.yaml` file to change the default passwords and ports. diff --git a/docs/faq.rst b/docs/faq.rst index b3eab3c6b..948e53d57 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -71,7 +71,7 @@ Q: How can I configure the round validity policy: In the main control implementation https://github.com/scaleoutsystems/fedn/blob/master/fedn/fedn/clients/reducer/control.py you can modify or replace the wiwmethod "check_round_validity_policy". As we expand with more implementations of this policy, we plan to make it runtime configurable. Q: Can I start a client listening only to training requests or only on validation requests?: -------------------------------------------------- +-------------------------------------------------------------------------------------------- Yes! From FEDn 0.3.0 there is an option to toggle which message streams a client subscibes to. For example, to start a pure validation client: diff --git a/docs/fedn.network.api.rst b/docs/fedn.network.api.rst new file mode 100644 index 000000000..b14090da3 --- /dev/null +++ b/docs/fedn.network.api.rst @@ -0,0 +1,34 @@ +fedn.network.api package +======================== + +.. automodule:: fedn.network.api + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.network.api.client module +------------------------------ + +.. automodule:: fedn.network.api.client + :members: + :undoc-members: + :show-inheritance: + +fedn.network.api.interface module +--------------------------------- + +.. automodule:: fedn.network.api.interface + :members: + :undoc-members: + :show-inheritance: + +fedn.network.api.network module +------------------------------- + +.. automodule:: fedn.network.api.network + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.clients.rst b/docs/fedn.network.clients.rst new file mode 100644 index 000000000..81b070aa1 --- /dev/null +++ b/docs/fedn.network.clients.rst @@ -0,0 +1,42 @@ +fedn.network.clients package +============================ + +.. automodule:: fedn.network.clients + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.network.clients.client module +---------------------------------- + +.. automodule:: fedn.network.clients.client + :members: + :undoc-members: + :show-inheritance: + +fedn.network.clients.connect module +----------------------------------- + +.. automodule:: fedn.network.clients.connect + :members: + :undoc-members: + :show-inheritance: + +fedn.network.clients.package module +----------------------------------- + +.. automodule:: fedn.network.clients.package + :members: + :undoc-members: + :show-inheritance: + +fedn.network.clients.state module +--------------------------------- + +.. automodule:: fedn.network.clients.state + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.combiner.aggregators.rst b/docs/fedn.network.combiner.aggregators.rst new file mode 100644 index 000000000..a26abf1f4 --- /dev/null +++ b/docs/fedn.network.combiner.aggregators.rst @@ -0,0 +1,26 @@ +fedn.network.combiner.aggregators package +========================================= + +.. automodule:: fedn.network.combiner.aggregators + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.network.combiner.aggregators.aggregatorbase module +------------------------------------------------------- + +.. automodule:: fedn.network.combiner.aggregators.aggregatorbase + :members: + :undoc-members: + :show-inheritance: + +fedn.network.combiner.aggregators.fedavg module +----------------------------------------------- + +.. automodule:: fedn.network.combiner.aggregators.fedavg + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.combiner.rst b/docs/fedn.network.combiner.rst new file mode 100644 index 000000000..a894200f4 --- /dev/null +++ b/docs/fedn.network.combiner.rst @@ -0,0 +1,58 @@ +fedn.network.combiner package +============================= + +.. automodule:: fedn.network.combiner + :members: + :undoc-members: + :show-inheritance: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + fedn.network.combiner.aggregators + +Submodules +---------- + +fedn.network.combiner.connect module +------------------------------------ + +.. automodule:: fedn.network.combiner.connect + :members: + :undoc-members: + :show-inheritance: + +fedn.network.combiner.interfaces module +--------------------------------------- + +.. automodule:: fedn.network.combiner.interfaces + :members: + :undoc-members: + :show-inheritance: + +fedn.network.combiner.modelservice module +----------------------------------------- + +.. automodule:: fedn.network.combiner.modelservice + :members: + :undoc-members: + :show-inheritance: + +fedn.network.combiner.round module +---------------------------------- + +.. automodule:: fedn.network.combiner.round + :members: + :undoc-members: + :show-inheritance: + +fedn.network.combiner.server module +----------------------------------- + +.. automodule:: fedn.network.combiner.server + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.controller.rst b/docs/fedn.network.controller.rst new file mode 100644 index 000000000..a0e995805 --- /dev/null +++ b/docs/fedn.network.controller.rst @@ -0,0 +1,18 @@ +fedn.network.controller package +=============================== + +.. automodule:: fedn.network.controller + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.network.controller.control module +-------------------------------------- + +.. automodule:: fedn.network.controller.control + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.dashboard.rst b/docs/fedn.network.dashboard.rst new file mode 100644 index 000000000..25ee3e8d8 --- /dev/null +++ b/docs/fedn.network.dashboard.rst @@ -0,0 +1,26 @@ +fedn.network.dashboard package +============================== + +.. automodule:: fedn.network.dashboard + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.network.dashboard.plots module +----------------------------------- + +.. automodule:: fedn.network.dashboard.plots + :members: + :undoc-members: + :show-inheritance: + +fedn.network.dashboard.restservice module +----------------------------------------- + +.. automodule:: fedn.network.dashboard.restservice + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.loadbalancer.rst b/docs/fedn.network.loadbalancer.rst new file mode 100644 index 000000000..7934f2228 --- /dev/null +++ b/docs/fedn.network.loadbalancer.rst @@ -0,0 +1,34 @@ +fedn.network.loadbalancer package +================================= + +.. automodule:: fedn.network.loadbalancer + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.network.loadbalancer.firstavailable module +----------------------------------------------- + +.. automodule:: fedn.network.loadbalancer.firstavailable + :members: + :undoc-members: + :show-inheritance: + +fedn.network.loadbalancer.leastpacked module +-------------------------------------------- + +.. automodule:: fedn.network.loadbalancer.leastpacked + :members: + :undoc-members: + :show-inheritance: + +fedn.network.loadbalancer.loadbalancerbase module +------------------------------------------------- + +.. automodule:: fedn.network.loadbalancer.loadbalancerbase + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.rst b/docs/fedn.network.rst new file mode 100644 index 000000000..2e0ccd753 --- /dev/null +++ b/docs/fedn.network.rst @@ -0,0 +1,48 @@ +fedn.network package +==================== + +.. automodule:: fedn.network + :members: + :undoc-members: + :show-inheritance: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + fedn.network.api + fedn.network.clients + fedn.network.combiner + fedn.network.controller + fedn.network.dashboard + fedn.network.loadbalancer + fedn.network.statestore + +Submodules +---------- + +fedn.network.config module +-------------------------- + +.. automodule:: fedn.network.config + :members: + :undoc-members: + :show-inheritance: + +fedn.network.reducer module +--------------------------- + +.. automodule:: fedn.network.reducer + :members: + :undoc-members: + :show-inheritance: + +fedn.network.state module +------------------------- + +.. automodule:: fedn.network.state + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.network.statestore.rst b/docs/fedn.network.statestore.rst new file mode 100644 index 000000000..06d2d4607 --- /dev/null +++ b/docs/fedn.network.statestore.rst @@ -0,0 +1,26 @@ +fedn.network.statestore package +=============================== + +.. automodule:: fedn.network.statestore + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.network.statestore.mongostatestore module +---------------------------------------------- + +.. automodule:: fedn.network.statestore.mongostatestore + :members: + :undoc-members: + :show-inheritance: + +fedn.network.statestore.statestorebase module +--------------------------------------------- + +.. automodule:: fedn.network.statestore.statestorebase + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.rst b/docs/fedn.rst new file mode 100644 index 000000000..0ef5dce19 --- /dev/null +++ b/docs/fedn.rst @@ -0,0 +1,16 @@ +fedn (python package) +===================== + +.. automodule:: fedn + :members: + :undoc-members: + :show-inheritance: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + fedn.network + fedn.utils diff --git a/docs/fedn.utils.plugins.rst b/docs/fedn.utils.plugins.rst new file mode 100644 index 000000000..adc4ee88a --- /dev/null +++ b/docs/fedn.utils.plugins.rst @@ -0,0 +1,42 @@ +fedn.utils.plugins package +========================== + +.. automodule:: fedn.utils.plugins + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +fedn.utils.plugins.helperbase module +------------------------------------ + +.. automodule:: fedn.utils.plugins.helperbase + :members: + :undoc-members: + :show-inheritance: + +fedn.utils.plugins.kerashelper module +------------------------------------- + +.. automodule:: fedn.utils.plugins.kerashelper + :members: + :undoc-members: + :show-inheritance: + +fedn.utils.plugins.numpyarrayhelper module +------------------------------------------ + +.. automodule:: fedn.utils.plugins.numpyarrayhelper + :members: + :undoc-members: + :show-inheritance: + +fedn.utils.plugins.pytorchhelper module +--------------------------------------- + +.. automodule:: fedn.utils.plugins.pytorchhelper + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/fedn.utils.rst b/docs/fedn.utils.rst new file mode 100644 index 000000000..7fcc67d44 --- /dev/null +++ b/docs/fedn.utils.rst @@ -0,0 +1,58 @@ +fedn.utils package +================== + +.. automodule:: fedn.utils + :members: + :undoc-members: + :show-inheritance: + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + fedn.utils.plugins + +Submodules +---------- + +fedn.utils.checksum module +-------------------------- + +.. automodule:: fedn.utils.checksum + :members: + :undoc-members: + :show-inheritance: + +fedn.utils.dispatcher module +---------------------------- + +.. automodule:: fedn.utils.dispatcher + :members: + :undoc-members: + :show-inheritance: + +fedn.utils.helpers module +------------------------- + +.. automodule:: fedn.utils.helpers + :members: + :undoc-members: + :show-inheritance: + +fedn.utils.logger module +------------------------ + +.. automodule:: fedn.utils.logger + :members: + :undoc-members: + :show-inheritance: + +fedn.utils.process module +------------------------- + +.. automodule:: fedn.utils.process + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/img/FEDn_network.png b/docs/img/FEDn_network.png new file mode 100644 index 000000000..76d1a53d2 Binary files /dev/null and b/docs/img/FEDn_network.png differ diff --git a/docs/index.rst b/docs/index.rst index b4bc2a16f..fe253738a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,12 +1,20 @@ -.. include:: ../README.rst - -Table of Contents ------------------ .. toctree:: :maxdepth: 2 :caption: Table of Contents + introduction + quickstart architecture deployment + interfaces tutorial - faq \ No newline at end of file + faq + modules + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` \ No newline at end of file diff --git a/docs/interfaces.rst b/docs/interfaces.rst new file mode 100644 index 000000000..f32020261 --- /dev/null +++ b/docs/interfaces.rst @@ -0,0 +1,37 @@ +User interfaces +=============== + +FEDn comes with an *APIClient* and a *Dashboard* for interacting with the FEDn network. The APIClient is a Python3 library that can be used to interact with the FEDn network programmatically. +The Dashboard is a web-based user interface that can be used to interact with the FEDn network through a web browser. + +APIClient +-------------- +The APIClient is a Python3 library that can be used to interact with the FEDn network programmatically. The APIClient is available as a Python package on PyPI, and can be installed using pip: + +.. code-block:: bash + + $ pip install fedn + + +To initialize the APIClient, you need to provide the hostname and port of the FEDn API server. The default port is 8092. The following code snippet shows how to initialize the APIClient: + +.. code-block:: python + + from fedn import APIClient + client = APIClient("localhost", 8092) + +For more information on how to use the APIClient, see the :py:mod:`fedn.network.api.client`. + +Dashboard +-------------- +The Dashboard is a web-based user interface that can be used to interact with the FEDn network through a web browser. The Dashboard is available as a Docker image, and can be run using the following command: + +.. code:: bash + + $ docker-compose up -d dashboard + +OBS! If you have followed any of the examples, the dashboard will already be running! +The Dashboard is now available at http://localhost:8090. If no compute package has been configured, the Dashboard will ask you to upload a compute package. +A compute package is a zip file containing the ML code that will be executed on the clients. +For more information on how to create a compute package, see the :ref:`compute package documentation `. After uploading a compute package, you will also need to upload an initial model. This initial model is +usually the initial weights for the model that will be trained. You can then navigate to the Control Panel to start a training session. diff --git a/docs/introduction.rst b/docs/introduction.rst new file mode 100644 index 000000000..6897690ba --- /dev/null +++ b/docs/introduction.rst @@ -0,0 +1,61 @@ +Introduction to Federated Learning +================================== + +Federated Learning stands at the forefront of modern machine learning techniques, offering a novel approach to address challenges related to data privacy, security, +and decentralized data distribution. In contrast to traditional machine learning setups where data is collected and stored centrally, +Federated Learning allows for collaborative model training while keeping data localized. This innovative paradigm proves to be particularly advantageous in +scenarios where data cannot be easily shared due to privacy regulations, network limitations, or ownership concerns. + +At its core, Federated Learning orchestrates model training across distributed devices or servers, referred to as clients or participants. +These participants could be diverse endpoints such as mobile devices, IoT gadgets, or remote servers. Rather than transmitting raw data to a central location, +each participant computes gradients locally based on its data. These gradients are then communicated to a central server, often called the aggregator or orchestrator. +The central server aggregates and combines the gradients from multiple participants to update a global model. +This iterative process allows the global model to improve without the need to share the raw data. + +FEDn: the SDK for scalable federated learning +--------------------------------------------- + +FEDn serves as a System Development Kit (SDK) tailored for scalable federated learning. +It is used to implement the core server side logic (including model aggregation) and the client side integrations. +It implements functionality to deploy and scale the server side in geographically distributed setups. +Developers and ML engineers can use FEDn to build custom federated learning systems and bespoke deployments. + + +One of the standout features of FEDn is its ability to deploy and scale the server-side in geographically distributed setups, +adapting to varying project needs and geographical considerations. + + +Scalable and Resilient +...................... + +FEDn exhibits scalability and resilience, thanks to its multi-tiered architecture. Multiple aggregation servers, known as combiners, +form a network to divide the workload, coordinating clients, and aggregating models. +This architecture allows for high performance in various settings, from thousands of clients in a cross-device environment to +large model updates in a cross-silo scenario. Crucially, FEDn has built-in recovery capabilities for all critical components, enhancing system reliability. + +ML-Framework Agnostic +..................... + +With FEDn, model updates are treated as black-box computations, meaning it can support any ML model type or framework. +This flexibility allows for out-of-the-box support for popular frameworks like Keras and PyTorch, making it a versatile tool for any machine learning project. + +Security +......... + +A key security feature of FEDn is its client protection capabilities, negating the need for clients to expose any ingress ports, +thus reducing potential security vulnerabilities. + +Event Tracking and Training progress +.................................... + +To ensure transparency and control over the learning process, +FEDn logs events in the federation and does real-time tracking of training progress. A flexible API lets the user define validation strategies locally on clients. +Data is logged as JSON to MongoDB, enabling users to create custom dashboards and visualizations easily. + +User Interfaces +............... + +FEDn offers a Flask-based Dashboard that allows users to monitor client model validations in real time. It also facilitates tracking client training time distributions +and key performance metrics for clients and combiners, providing a comprehensive view of the system’s operation and performance. + +FEDn also comes with an REST-API for integration with external dashboards and visualization tools, or integration with other systems. \ No newline at end of file diff --git a/docs/modules.rst b/docs/modules.rst new file mode 100644 index 000000000..c4dfb74d1 --- /dev/null +++ b/docs/modules.rst @@ -0,0 +1,7 @@ +API reference +============= + +.. toctree:: + :maxdepth: 4 + + fedn diff --git a/docs/quickstart.rst b/docs/quickstart.rst new file mode 100644 index 000000000..2b89ff165 --- /dev/null +++ b/docs/quickstart.rst @@ -0,0 +1,118 @@ +Quick Start +=========== + +Clone this repository, locate into it and start a pseudo-distributed FEDn network using docker-compose: + +.. code-block:: + + docker-compose up + + + +This will start up all neccecary components for a FEDn network, execept for the clients. + +.. warning:: + The FEDn network is configured to use a local Minio and MongoDB instances for storage. This is not suitable for production, but is fine for testing. + +.. note:: + You have the option to programmatically interact with the FEDn network using the Python APIClient, or you can use the Dashboard. In these Note sections we will use the APIClient. + Install the FEDn via pip: + + .. code-block:: bash + + $ pip install fedn + # or from source + $ cd fedn + $ pip install . + +Navigate to http://localhost:8090. You should see the FEDn Dashboard, asking you to upload a compute package. The compute package is a tarball of a project. +The project in turn implements the entrypoints used by clients to compute model updates and to validate a model. + +Locate into 'examples/mnist-pytorch'. + +Start by initializing a virtual enviroment with all of the required dependencies for this project. + +.. code-block:: python + + bin/init_venv.sh + +Now create the compute package and an initial model: + +.. code-block:: + + bin/build.sh + +Upload the generated files 'package.tgz' and 'seed.npz' in the FEDn Dashboard. + +.. note:: + Instead of uploading in the dashboard do: + + .. code:: python + + >>> from fedn import APIClient + >>> client = APIClient(host="localhost", port=8092) + >>> client.set_package("package.tgz", helper="pytorchhelper") + >>> client.set_initial_model("seed.npz") + +The next step is to configure and attach clients. For this we need to download data and make data partitions: + +Download the data: + +.. code-block:: + + bin/get_data + + +Split the data in 2 parts for the clients: + +.. code-block:: + + bin/split_data + +Data partitions will be generated in the folder 'data/clients'. + +Now navigate to http://localhost:8090/network and download the client config file. Place it in the example working directory. + +.. note:: + In the python enviroment you installed FEDn: + + .. code:: python + + >>> import yaml + >>> config = client.get_client_config(checksum=True) + >>> with open("client.yaml", "w") as f: + >>> f.write(yaml.dump(config)) + +To connect a client that uses the data partition 'data/clients/1/mnist.pt': + +.. code-block:: + + docker run \ + -v $PWD/client.yaml:/app/client.yaml \ + -v $PWD/data/clients/1:/var/data \ + -e ENTRYPOINT_OPTS=--data_path=/var/data/mnist.pt \ + --network=fedn_default \ + ghcr.io/scaleoutsystems/fedn/fedn:develop-mnist-pytorch run client -in client.yaml --name client1 + +.. note:: + If you are using the APIClient you must also start the training client via "docker run" command as above. + +You are now ready to start training the model at http://localhost:8090/control. + +.. note:: + In the python enviroment you installed FEDn you can start training via: + + .. code:: python + + >>> ... + >>> client.start_session(session_id="test-session", rounds=3) + # Wait for training to complete, when controller is idle: + >>> client.get_controller_status() + # Show model trail: + >>> client.get_model_trail() + # Show model performance: + >>> client.list_validations() + + Please see :py:mod:`fedn.network.api` for more details on the APIClient. + +To scale up the experiment, refer to the README at 'examples/mnist-pytorch' (or the corresponding Keras version), where we explain how to use docker-compose to automate deployment of several clients. diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..4170c03ef --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1 @@ +sphinx-rtd-theme \ No newline at end of file diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 8e2bb7f78..355d86919 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -1,9 +1,9 @@ Tutorial: Compute Package ================================================ -This tutorial walks you through the key step done by the *model initiator* when setting up a federated project. -The code for this guideline has been taken from the mnist-keras example provided `here `_. -However, some modification to the code has been made for educational purposes. +This tutorial walks you through the design of a *compute package* for a FEDn client. The compute package is a tar.gz bundle of the code to be executed by each data-provider/client. +You will learn how to design the compute package and how to write the entry points for training and validation. Examples are provided for the Keras and PyTorch frameworks, which can be +found in the `examples `_. The compute package ----------------------------- @@ -14,23 +14,28 @@ The compute package :align: center The *compute package* is a tar.gz bundle of the code to be executed by each data-provider/client. -This package is uploaded to the Reducer upon initialization of the FEDN Network (along with the initial model). +This package is uploaded to the *Controller* upon initialization of the FEDN Network (along with the initial model). When a client connects to the network, it downloads and unpacks the package locally and are then ready to participate in training and/or validation. -The logic is illustrated in the above figure. When the `FEDn client `_. +The logic is illustrated in the above figure. When the :py:mod:`fedn.network.clients` recieves a model update request from the combiner, it calls upon a Dispatcher that looks up entry point definitions in the compute package. These entrypoints define commands executed by the client to update/train or validate a model. Designing the compute package ------------------------------ -We recommend to use the project structure followed by most example projects `here `_. -In the examples we have roughly the following structure: - -.. image:: img/tree_package.png - :alt: Project structure - :scale: 70 - :align: center +We recommend to use the project structure followed by most example `projects `_. +In the examples we have roughly the following file and folder structure: + +| project +| ├── client +| │ ├── entrypoint.py +| │ └── fedn.yaml +| ├── data +| │ └── mnist.npz +| ├── requirements.txt +| └── docker-compose.yml/Dockerfile +| The "client" folder is the *compute package* which will become a tar.gz bundle of the code to be executed by each data-provider/client. The entry points, mentioned above, are defined in the *fedn.yaml*: @@ -39,217 +44,266 @@ each data-provider/client. The entry points, mentioned above, are defined in the entry_points: train: - command: python train.py + command: python entrypoint.py validate: - command: python validate.py - -Where the training entry point has the following logical overview: - -.. image:: img/TrainSISO.png - :alt: Training entrypoint - :width: 100% - :align: center + command: python entrypoint.py The training entry point should be a single-input single-output program, taking as input a model update file -and writing a model update file (same file format). Staging and upload of these files are handled by the FEDn client. A helper class in the FEDn SDK handled the ML-framework +and writing a model update file (same file format). Staging and upload of these files are handled by the FEDn client. A helper class in the FEDn SDK handles the ML-framework specific file serialization and deserialization. The validation entry point acts very similar except we perform validation on the *model_in* and outputs a json containing a validation scores (see more below). -Upon training (model update) requests from the combiner, the client will download the latest (current) global model and *train.py* will be executed with this model update as input. After training / updating completes, the local client will capture the output file and send back the updated model to the combiner. For the local execution this means that the program (in this case train.py) will be executed as: +Upon training (model update) requests from the combiner, the client will download the latest (current) global model and *entrypoint.py train* will be executed with this model update as input. +After training / updating completes, the local client will capture the output file and send back the updated model to the combiner. +For the local execution this means that the program (in this case entrypoint.py) will be executed as: .. code-block:: python - python train.py model_in model_out + python entrypoint.py train in_model_path out_model_path -A typical *train.py* example can look like this: +A *entrypoint.py* example can look like this: .. code-block:: python - from __future__ import print_function - import sys - import yaml + import collections + import math + import os + + import docker + import fire + import torch + + from fedn.utils.helpers import get_helper, save_metadata, save_metrics - from data.read_data import read_data + HELPER_MODULE = 'pytorchhelper' + NUM_CLASSES = 10 + def _compile_model(): + """ Compile the pytorch model. - def train(model,data_path,settings): + :return: The compiled model. + :rtype: torch.nn.Module """ - Training function which will be called upon model update requests - from the combiner - - :param model: The latest global model, see '__main__' - :type model: User defined - :param data: Traning data - :type data: User defined - :param settings: Hyper-parameters settings - :type settings: dict - :return: Trained/updated model - :rtype: User defined + class Net(torch.nn.Module): + def __init__(self): + super(Net, self).__init__() + self.fc1 = torch.nn.Linear(784, 64) + self.fc2 = torch.nn.Linear(64, 32) + self.fc3 = torch.nn.Linear(32, 10) + + def forward(self, x): + x = torch.nn.functional.relu(self.fc1(x.reshape(x.size(0), 784))) + x = torch.nn.functional.dropout(x, p=0.5, training=self.training) + x = torch.nn.functional.relu(self.fc2(x)) + x = torch.nn.functional.log_softmax(self.fc3(x), dim=1) + return x + + # Return model + return Net() + + + def _load_data(data_path, is_train=True): + """ Load data from disk. + + :param data_path: Path to data file. + :type data_path: str + :param is_train: Whether to load training or test data. + :type is_train: bool + :return: Tuple of data and labels. + :rtype: tuple """ - print("-- RUNNING TRAINING --", flush=True) + if data_path is None: + data = torch.load(_get_data_path()) + else: + data = torch.load(data_path) - #CODE TO READ DATA - - #EXAMPLE, SOME USER DEFINED FUNCION THAT READS THE TRAINING DATA - (x_train, y_train) = read_data(data_path, trainset=True) + if is_train: + X = data['x_train'] + y = data['y_train'] + else: + X = data['x_test'] + y = data['y_test'] - #CODE FOR START TRAINING - #EXAMPLE (Tensoflow) - model.fit(x_train, y_train, batch_size=settings['batch_size'], epochs=settings['epochs'], verbose=1) + # Normalize + X = X / 255 - print("-- TRAINING COMPLETED --", flush=True) - return model + return X, y - if __name__ == '__main__': - - #READ HYPER_PARAMETER SETTINGS FROM YAML FILE - with open('settings.yaml', 'r') as fh: - try: - settings = dict(yaml.safe_load(fh)) - except yaml.YAMLError as e: - raise(e) - - #CREATE THE SEED MODEL AND UPDATE WITH LATEST WEIGHTS - #EXAMPLE, USE KERAS HELPER IN FEDN SDK FOR READING WEIGHTS - from fedn.utils.kerashelper import KerasHelper - helper = KerasHelper() - weights = helper.load_model(sys.argv[1]) - - #EXAMPLE, A USER DEFINED FUNCTION THAT CONSTRUCTS THE MODEL, E.G THE ARCHITECTURE OF A NEURAL NETWORK - from models.model import create_seed_model - model = create_seed_model() - #EXAMPLE (HOW TO SET WEIGHTS OF A MODEL DIFFERS BETWEEN LIBRARIES) - model.set_weights(weights) - - #CALL TRAINING FUNCTION AND GET UPDATED MODEL - model = train(model,'../data/your_data.file',settings) - - #SAVE/SEND MODEL - #EXAMPLE, USING KERAS HELPER IN FEDN SDK - helper.save_model(model.get_weights(),sys.argv[2]) - + def _save_model(model, out_path): + """ Save model to disk. -The format of the input and output files (model updates) are dependent on the ML framework used. A `helper class `_. -defines serializaion and de-serialization of the model updates. -Observe that the functions `create_seed_model `_ -and `read_data `_ are implemented by the user, where the first function -constructs (compiles) and returns an untrained (seed) model. We then take this model and set the weights to be equal to the current global model recieved -from the commbiner. In the example above we use the Keras helper class to de-serialize those weights and the keras funcion *model.set_weights()* to set the seed model to be equal to the current model. -We then call the *train* function to first read the training data -(obs. the location of the data can differ depending on if you run the client in a native or containerized environment, in the latter case it's recommend to mount the data to the container, -the location should then be relative to the mount path) and then start the training. -In this example, training equals fitting the keras model, thus we call *model.fit()* fucntion. -The *settings.yaml* is for conveniance and is not required but contains the hyper-parameter settings for the local training as key/value pairs. + :param model: The model to save. + :type model: torch.nn.Module + :param out_path: The path to save to. + :type out_path: str + """ + weights = model.state_dict() + weights_np = collections.OrderedDict() + for w in weights: + weights_np[w] = weights[w].cpu().detach().numpy() + helper = get_helper(HELPER_MODULE) + helper.save(weights, out_path) -For validations it is a requirement that the output is valid json: -.. code-block:: python + def _load_model(model_path): + """ Load model from disk. - python validate.py model_in validation.json - -The Dahboard in the FEDn UI will plot any scalar metric in this json file, but you can include any type in the file assuming that it is valid json. These values can then be obtained (by an athorized user) from the MongoDB database (via Mongo Express, or any query interface or API). Typically, the actual model is defined in a small library, and does not depend on FEDn. An example (based on the keras case) of the *validate.py* is povided below: + param model_path: The path to load from. + :type model_path: str + :return: The loaded model. + :rtype: torch.nn.Module + """ + helper = get_helper(HELPER_MODULE) + weights_np = helper.load(model_path) + weights = collections.OrderedDict() + for w in weights_np: + weights[w] = torch.tensor(weights_np[w]) + model = _compile_model() + model.load_state_dict(weights) + model.eval() + return model -.. code-block:: python - import sys - from data.read_data import read_data - import json - from sklearn import metrics - import os - import yaml - import numpy as np + def init_seed(out_path='seed.npz'): + """ Initialize seed model. - def validate(model,data): + :param out_path: The path to save the seed model to. + :type out_path: str """ - Validation function which will be called upon model validation requests - from the combiner. - - :param model: The latest global model, see '__main__' - :type model: User defined - :param data: The data used for validation, could include both training and test/validation data - :type data: User defined - :return: Model scores from the validation - :rtype: dict + # Init and save + model = _compile_model() + _save_model(model, out_path) + + + def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1, lr=0.01): + """ Train model. + + :param in_model_path: The path to the input model. + :type in_model_path: str + :param out_model_path: The path to save the output model to. + :type out_model_path: str + :param data_path: The path to the data file. + :type data_path: str + :param batch_size: The batch size to use. + :type batch_size: int + :param epochs: The number of epochs to train. + :type epochs: int + :param lr: The learning rate to use. + :type lr: float """ - print("-- RUNNING VALIDATION --", flush=True) - - #CODE TO READ DATA - - #EXAMPLE - (x_train, y_train) = read_data(data, trainset=True) - - #EXAMPLE - (x_test, y_test) = read_data(data, trainset=False) - - try: - #CODE HERE FOR OBTAINING VALIDATION SCORES - - #EXAMPLE - model_score = model.evaluate(x_train, y_train, verbose=0) - model_score_test = model.evaluate(x_test, y_test, verbose=0) - y_pred = model.predict(x_test) - y_pred = np.argmax(y_pred, axis=1) - clf_report = metrics.classification_report(y_test.argmax(axis=-1),y_pred) - - except Exception as e: - print("failed to validate the model {}".format(e),flush=True) - raise - - #PUT SCORES AS VALUES FOR CORRESPONDING KEYS (CHANGE VARIABLES): + # Load data + x_train, y_train = _load_data(data_path) + + # Load model + model = _load_model(in_model_path) + + # Train + optimizer = torch.optim.SGD(model.parameters(), lr=lr) + n_batches = int(math.ceil(len(x_train) / batch_size)) + criterion = torch.nn.NLLLoss() + for e in range(epochs): # epoch loop + for b in range(n_batches): # batch loop + # Retrieve current batch + batch_x = x_train[b * batch_size:(b + 1) * batch_size] + batch_y = y_train[b * batch_size:(b + 1) * batch_size] + # Train on batch + optimizer.zero_grad() + outputs = model(batch_x) + loss = criterion(outputs, batch_y) + loss.backward() + optimizer.step() + # Log + if b % 100 == 0: + print( + f"Epoch {e}/{epochs-1} | Batch: {b}/{n_batches-1} | Loss: {loss.item()}") + + # Metadata needed for aggregation server side + metadata = { + 'num_examples': len(x_train), + 'batch_size': batch_size, + 'epochs': epochs, + 'lr': lr + } + + # Save JSON metadata file + save_metadata(metadata, out_model_path) + + # Save model update + _save_model(model, out_model_path) + + + def validate(in_model_path, out_json_path, data_path=None): + """ Validate model. + + :param in_model_path: The path to the input model. + :type in_model_path: str + :param out_json_path: The path to save the output JSON to. + :type out_json_path: str + :param data_path: The path to the data file. + :type data_path: str + """ + # Load data + x_train, y_train = _load_data(data_path) + x_test, y_test = _load_data(data_path, is_train=False) + + # Load model + model = _load_model(in_model_path) + + # Evaluate + criterion = torch.nn.NLLLoss() + with torch.no_grad(): + train_out = model(x_train) + training_loss = criterion(train_out, y_train) + training_accuracy = torch.sum(torch.argmax( + train_out, dim=1) == y_train) / len(train_out) + test_out = model(x_test) + test_loss = criterion(test_out, y_test) + test_accuracy = torch.sum(torch.argmax( + test_out, dim=1) == y_test) / len(test_out) + + # JSON schema report = { - "classification_report": clf_report, - "training_loss": model_score[0], - "training_accuracy": model_score[1], - "test_loss": model_score_test[0], - "test_accuracy": model_score_test[1], - } + "training_loss": training_loss.item(), + "training_accuracy": training_accuracy.item(), + "test_loss": test_loss.item(), + "test_accuracy": test_accuracy.item(), + } - print("-- VALIDATION COMPLETE! --", flush=True) - return report + # Save JSON + save_metrics(report, out_json_path) - if __name__ == '__main__': - #READS THE LATEST WEIGHTS FROM GLOBAL MODEL (COMBINER) - - from fedn.utils.kerashelper import KerasHelper - helper = KerasHelper() - weights = helper.load_model(sys.argv[1]) - - #CREATE THE SEED MODEL AND UPDATE WITH LATEST WEIGHTS - from models.model import create_seed_model - model = create_seed_model() - #EXAMPLE (HOW TO SET WEIGHTS OF A MODEL DIFFERS BETWEEN LIBRARIES) - model.set_weights(weights) + if __name__ == '__main__': + fire.Fire({ + 'init_seed': init_seed, + 'train': train, + 'validate': validate, + # '_get_data_path': _get_data_path, # for testing + }) - #START VALIDATION - report = validate(model,'../data/your_data.file') - #SAVE/SEND SCORE REPORT - with open(sys.argv[2],"w") as fh: - fh.write(json.dumps(report)) -As demonstrated in the code above, the structure is very similar to *train.py*. The main difference is that we perform validation of a current model provided by the combiner instead of training. Again, the *read_data* function is defined by the user. Once, we have optained a validation -*report* as a dictionary we can dump as json (required). Observe that the key/values are arbitrary. +The format of the input and output files (model updates) are dependent on the ML framework used. A helper instance :py:mod:`fedn.utils.plugins.pytorchhelper` is used to handle the serialization and deserialization of the model updates. +The first function (_compile_model) is used to define the model architecture and creates an initial model (which is then used by _init_seed). The second function (_load_data) is used to read the data (train and test) from disk. +The third function (_save_model) is used to save the model to disk using the pytorch helper module :py:mod:`fedn.utils.plugins.pytorchhelper`. The fourth function (_load_model) is used to load the model from disk, again +using the pytorch helper module. The fifth function (_init_seed) is used to initialize the seed model. The sixth function (_train) is used to train the model, observe the two first arguments which will be set by the FEDn client. +The seventh function (_validate) is used to validate the model, again observe the two first arguments which will be set by the FEDn client. -For the initialization of the Reducer, both the compute package and an initial model (weights) are required as individual files. To obtain the initial weights file we can use the fedn helpers to save the seed model to an output file (*init_model.py*): - -.. code-block:: python +Finally, we use the python package fire to create a command line interface for the entry points. This is not required but convenient. - from fedn.utils.kerashelper import KerasHelper - from models.mnist_model import create_seed_model +For validations it is a requirement that the output is saved in a valid json format: - if __name__ == '__main__': +.. code-block:: python - #CREATE INITIAL MODEL, UPLOAD TO REDUCER - model = create_seed_model() - outfile_name = "../initial_model/initial_model.npz" + python entrypoint.py validate in_model_path out_json_path + +In the code example we use the helper function :py:meth:`fedn.utils.helpers.save_metrics` to save the validation scores as a json file. - weights = model.get_weights() - helper = KerasHelper() - helper.save_model(weights, outfile_name) +The Dahboard in the FEDn UI will plot any scalar metric in this json file, but you can include any type in the file assuming that it is valid json. These values can then be obtained (by an athorized user) from the MongoDB database or using the :py:mod:`fedn.network.api.client`. -Which will be saved into the *initial_model* folder for convenience. Of course this file can also be a pretrained seed model. +Packaging for distribution +-------------------------- For the compute package we need to compress the *client* folder as .tar.gz file. E.g. using: .. code-block:: bash @@ -257,25 +311,39 @@ For the compute package we need to compress the *client* folder as .tar.gz file. tar -czvf package.tar.gz client -More on Data access -------------------- +This file can then be uploaded to the FEDn network using the FEDn UI or the :py:mod:`fedn.network.api.client`. + + +More on local data access +------------------------- There are many possible ways to interact with the local dataset. In principle, the only requirement is that the train and validate endpoints are able to correctly -read and use the data. In practice, it is then necessary to make some assumption on the local environemnt when writing train.py and validate.py. This is best explained -by looking at the code above. Here we assume that the dataset is present in a file called "your_data.file" in a folder "data" one level up in the file hierarchy relative to -the exection of train.py. Then, independent on the preferred way to run the client (native, Docker, K8s etc) this structure needs to be maintained for this particular +read and use the data. In practice, it is then necessary to make some assumption on the local environemnt when writing entrypoint.py. This is best explained +by looking at the code above. Here we assume that the dataset is present in a file called "mnist.npz" in a folder "data" one level up in the file hierarchy relative to +the exection of entrypoint.py. Then, independent on the preferred way to run the client (native, Docker, K8s etc) this structure needs to be maintained for this particular compute package. Note however, that there are many ways to accompish this on a local operational level. Running the client ------------------ -We recommend you to test your code before running the client. For example, you can simply test *train.py* and *validate.py* by: +We recommend you to test your code before running the client. For example, you can simply test *train* and *validate* by: -.. code-block:: python +.. code-block:: bash + + python entrypoint.py train ../seed.npz ../model_update.npz --data_path ../data/mnist.npz + python entrypoint.py validate ../model_update.npz ../validation.json --data_path ../data/mnist.npz - python train.py ../initial_model/initial_model.npz +Once everything works as expected you can start the federated network, upload the tar.gz compute package and the initial model. +Finally connect a client to the network: + +.. code-block:: bash -Once everything works as expected you can start the Reducer, upload the tar.gz compute package and the initial weights, followed by starting one or many combiners. -Finally connect a client to the network. Instructions for how to connect clients can be found in the `examples `_. + docker run \ + -v $PWD/client.yaml:/app/client.yaml \ + -v $PWD/data/clients/1:/var/data \ + -e ENTRYPOINT_OPTS=--data_path=/var/data/mnist.pt \ + --network=fedn_default \ + ghcr.io/scaleoutsystems/fedn/fedn:master-mnist-pytorch run client -in client.yaml --name client1 +The container image "ghcr.io/scaleoutsystems/fedn/fedn:develop-mnist-pytorch" is a pre-built image with the FEDn client and the PyTorch framework installed. diff --git a/examples/mnist-keras/README.md b/examples/mnist-keras/README.md index fde0d3569..b4b5c0672 100644 --- a/examples/mnist-keras/README.md +++ b/examples/mnist-keras/README.md @@ -66,3 +66,22 @@ Finally, you can start the experiment from the "control" tab of the UI. ## Clean up You can clean up by running `docker-compose down`. + +## Connecting to a distributed deployment +To start and remotely connect a client with the required dependencies for this example, start by downloading the `client.yaml` file. You can either navigate the reducer UI or run the following command. + +```bash +curl -k https://:/config/download > client.yaml +``` +> **Note** make sure to replace `` and `` with appropriate values. + +Now you are ready to start the client via Docker by running the following command. + +```bash +docker run -d \ + -v $PWD/client.yaml:/app/client.yaml \ + -v $PWD/data:/var/data \ + -e ENTRYPOINT_OPTS=--data_path=/var/data/mnist.npz \ + ghcr.io/scaleoutsystems/fedn/fedn:develop-mnist-keras run client -in client.yaml +``` +> **Note** If reducer and combiner host names, as specfied in the configuration files, are not resolvable in the client host network you need to use the docker option `--add-hosts` to make them resolvable. Please refer to the Docker documentation for more detail. diff --git a/examples/mnist-keras/bin/build.sh b/examples/mnist-keras/bin/build.sh index 18cdb5128..44eda61df 100755 --- a/examples/mnist-keras/bin/build.sh +++ b/examples/mnist-keras/bin/build.sh @@ -5,4 +5,4 @@ set -e client/entrypoint init_seed # Make compute package -tar -czvf package.tgz client \ No newline at end of file +tar -czvf package.tgz client diff --git a/examples/mnist-keras/bin/init_venv.sh b/examples/mnist-keras/bin/init_venv.sh index 65c860ec4..6856700a9 100755 --- a/examples/mnist-keras/bin/init_venv.sh +++ b/examples/mnist-keras/bin/init_venv.sh @@ -2,7 +2,7 @@ set -e # Init venv -python -m venv .mnist-keras +python3 -m venv .mnist-keras # Pip deps .mnist-keras/bin/pip install --upgrade pip diff --git a/examples/mnist-keras/client/entrypoint b/examples/mnist-keras/client/entrypoint index e9609f2ca..5b1d76c01 100755 --- a/examples/mnist-keras/client/entrypoint +++ b/examples/mnist-keras/client/entrypoint @@ -7,8 +7,9 @@ import fire import numpy as np import tensorflow as tf -from fedn.utils.kerashelper import KerasHelper +from fedn.utils.helpers import get_helper, save_metadata, save_metrics +HELPER_MODULE = 'kerashelper' NUM_CLASSES = 10 @@ -17,7 +18,6 @@ def _get_data_path(): client = docker.from_env() container = client.containers.get(os.environ['HOSTNAME']) number = container.name[-1] - # Return data path return f"/var/data/clients/{number}/mnist.npz" @@ -64,8 +64,8 @@ def _load_data(data_path, is_train=True): def init_seed(out_path='seed.npz'): weights = _compile_model().get_weights() - helper = KerasHelper() - helper.save_model(weights, out_path) + helper = get_helper(HELPER_MODULE) + helper.save(weights, out_path) def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1): @@ -74,8 +74,8 @@ def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1 # Load model model = _compile_model() - helper = KerasHelper() - weights = helper.load_model(in_model_path) + helper = get_helper(HELPER_MODULE) + weights = helper.load(in_model_path) model.set_weights(weights) # Train @@ -83,7 +83,17 @@ def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1 # Save weights = model.get_weights() - helper.save_model(weights, out_model_path) + helper.save(weights, out_model_path) + + # Metadata needed for aggregation server side + metadata = { + 'num_examples': len(x_train), + 'batch_size': batch_size, + 'epochs': epochs, + } + + # Save JSON metadata file + save_metadata(metadata, out_model_path) def validate(in_model_path, out_json_path, data_path=None): @@ -93,8 +103,8 @@ def validate(in_model_path, out_json_path, data_path=None): # Load model model = _compile_model() - helper = KerasHelper() - weights = helper.load_model(in_model_path) + helper = get_helper(HELPER_MODULE) + weights = helper.load(in_model_path) model.set_weights(weights) # Evaluate @@ -111,9 +121,27 @@ def validate(in_model_path, out_json_path, data_path=None): "test_accuracy": model_score_test[1], } + # Save JSON + save_metrics(report, out_json_path) + + +def infer(in_model_path, out_json_path, data_path=None): + # Using test data for inference but another dataset could be loaded + x_test, _ = _load_data(data_path, is_train=False) + + # Load model + model = _compile_model() + helper = get_helper(HELPER_MODULE) + weights = helper.load(in_model_path) + model.set_weights(weights) + + # Infer + y_pred = model.predict(x_test) + y_pred = np.argmax(y_pred, axis=1) + # Save JSON with open(out_json_path, "w") as fh: - fh.write(json.dumps(report)) + fh.write(json.dumps({'predictions': y_pred.tolist()})) if __name__ == '__main__': @@ -121,5 +149,6 @@ if __name__ == '__main__': 'init_seed': init_seed, 'train': train, 'validate': validate, + 'infer': infer, '_get_data_path': _get_data_path, # for testing }) diff --git a/examples/mnist-keras/client/fedn.yaml b/examples/mnist-keras/client/fedn.yaml index 29c475270..91ec40c2a 100644 --- a/examples/mnist-keras/client/fedn.yaml +++ b/examples/mnist-keras/client/fedn.yaml @@ -2,4 +2,6 @@ entry_points: train: command: /venv/bin/python entrypoint train $ENTRYPOINT_OPTS validate: - command: /venv/bin/python entrypoint validate $ENTRYPOINT_OPTS \ No newline at end of file + command: /venv/bin/python entrypoint validate $ENTRYPOINT_OPTS + infer: + command: /venv/bin/python entrypoint infer $ENTRYPOINT_OPTS diff --git a/examples/mnist-pytorch/client/entrypoint b/examples/mnist-pytorch/client/entrypoint index 3a4189cd2..8d7953b59 100755 --- a/examples/mnist-pytorch/client/entrypoint +++ b/examples/mnist-pytorch/client/entrypoint @@ -1,6 +1,5 @@ #!./.mnist-pytorch/bin/python import collections -import json import math import os @@ -8,8 +7,9 @@ import docker import fire import torch -from fedn.utils.pytorchhelper import PytorchHelper +from fedn.utils.helpers import get_helper, save_metadata, save_metrics +HELPER_MODULE = 'pytorchhelper' NUM_CLASSES = 10 @@ -25,7 +25,11 @@ def _get_data_path(): def _compile_model(): - # Define model + """ Compile the pytorch model. + + :return: The compiled model. + :rtype: torch.nn.Module + """ class Net(torch.nn.Module): def __init__(self): super(Net, self).__init__() @@ -45,7 +49,15 @@ def _compile_model(): def _load_data(data_path, is_train=True): - # Load data + """ Load data from disk. + + :param data_path: Path to data file. + :type data_path: str + :param is_train: Whether to load training or test data. + :type is_train: bool + :return: Tuple of data and labels. + :rtype: tuple + """ if data_path is None: data = torch.load(_get_data_path()) else: @@ -65,17 +77,31 @@ def _load_data(data_path, is_train=True): def _save_model(model, out_path): + """ Save model to disk. + + :param model: The model to save. + :type model: torch.nn.Module + :param out_path: The path to save to. + :type out_path: str + """ weights = model.state_dict() weights_np = collections.OrderedDict() for w in weights: weights_np[w] = weights[w].cpu().detach().numpy() - helper = PytorchHelper() - helper.save_model(weights, out_path) + helper = get_helper(HELPER_MODULE) + helper.save(weights, out_path) def _load_model(model_path): - helper = PytorchHelper() - weights_np = helper.load_model(model_path) + """ Load model from disk. + + param model_path: The path to load from. + :type model_path: str + :return: The loaded model. + :rtype: torch.nn.Module + """ + helper = get_helper(HELPER_MODULE) + weights_np = helper.load(model_path) weights = collections.OrderedDict() for w in weights_np: weights[w] = torch.tensor(weights_np[w]) @@ -86,12 +112,32 @@ def _load_model(model_path): def init_seed(out_path='seed.npz'): + """ Initialize seed model. + + :param out_path: The path to save the seed model to. + :type out_path: str + """ # Init and save model = _compile_model() _save_model(model, out_path) def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1, lr=0.01): + """ Train model. + + :param in_model_path: The path to the input model. + :type in_model_path: str + :param out_model_path: The path to save the output model to. + :type out_model_path: str + :param data_path: The path to the data file. + :type data_path: str + :param batch_size: The batch size to use. + :type batch_size: int + :param epochs: The number of epochs to train. + :type epochs: int + :param lr: The learning rate to use. + :type lr: float + """ # Load data x_train, y_train = _load_data(data_path) @@ -118,11 +164,31 @@ def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1 print( f"Epoch {e}/{epochs-1} | Batch: {b}/{n_batches-1} | Loss: {loss.item()}") - # Save + # Metadata needed for aggregation server side + metadata = { + 'num_examples': len(x_train), + 'batch_size': batch_size, + 'epochs': epochs, + 'lr': lr + } + + # Save JSON metadata file + save_metadata(metadata, out_model_path) + + # Save model update _save_model(model, out_model_path) def validate(in_model_path, out_json_path, data_path=None): + """ Validate model. + + :param in_model_path: The path to the input model. + :type in_model_path: str + :param out_json_path: The path to save the output JSON to. + :type out_json_path: str + :param data_path: The path to the data file. + :type data_path: str + """ # Load data x_train, y_train = _load_data(data_path) x_test, y_test = _load_data(data_path, is_train=False) @@ -151,8 +217,7 @@ def validate(in_model_path, out_json_path, data_path=None): } # Save JSON - with open(out_json_path, "w") as fh: - fh.write(json.dumps(report)) + save_metrics(report, out_json_path) if __name__ == '__main__': @@ -160,5 +225,5 @@ if __name__ == '__main__': 'init_seed': init_seed, 'train': train, 'validate': validate, - '_get_data_path': _get_data_path, # for testing + # '_get_data_path': _get_data_path, # for testing }) diff --git a/examples/mnist-pytorch/requirements.txt b/examples/mnist-pytorch/requirements.txt index 3eadc6248..0bf7a6e78 100644 --- a/examples/mnist-pytorch/requirements.txt +++ b/examples/mnist-pytorch/requirements.txt @@ -1,4 +1,4 @@ torch==1.13.1 torchvision==0.14.1 fire==0.3.1 -docker==6.1.1 \ No newline at end of file +docker==6.1.1 diff --git a/fedn/README.md b/fedn/README.md index cd59e8a1b..b90c5e510 100644 --- a/fedn/README.md +++ b/fedn/README.md @@ -1 +1 @@ -# FEDn SDk # +FEDn diff --git a/fedn/__init__.py b/fedn/__init__.py index 52ce8c9c3..31be09d81 100644 --- a/fedn/__init__.py +++ b/fedn/__init__.py @@ -1,3 +1,3 @@ -# -# Scaleout Systems AB -# __author__ = 'Morgan Ekmefjord morgan@scaleout.se' +"""The fedn package.""" + +# flake8: noqa diff --git a/fedn/cli/__init__.py b/fedn/cli/__init__.py index 13c9b1c51..840d4252b 100644 --- a/fedn/cli/__init__.py +++ b/fedn/cli/__init__.py @@ -1,3 +1,2 @@ -from .control_cmd import control_cmd # noqa: F401 from .main import main # noqa: F401 from .run_cmd import run_cmd # noqa: F401 diff --git a/fedn/cli/control_cmd.py b/fedn/cli/control_cmd.py deleted file mode 100644 index 9305c7015..000000000 --- a/fedn/cli/control_cmd.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -from datetime import datetime - -import click - -from fedn.common.control.package import Package, PackageRuntime - -from .main import main - - -@main.group('control') -@click.pass_context -def control_cmd(ctx): - """ - - :param ctx: - """ - # if daemon: - # print('{} NYI should run as daemon...'.format(__file__)) - pass - - -@control_cmd.command('package') -@click.option('-r', '--reducer', required=False) -@click.option('-p', '--port', required=False) -@click.option('-t', '--token', required=False) -@click.option('-n', '--name', required=False, default=None) -@click.option('-u', '--upload', required=False, default=None) -@click.option('-v', '--validate', required=False, default=False) -@click.option('-d', '--cwd', required=False, default=None) -@click.pass_context -def package_cmd(ctx, reducer, port, token, name, upload, validate, cwd): - """ - - :param ctx: - :param reducer: - :param port: - :param token: - :param name: - :param upload: - :param validate: - :param cwd: - """ - if not cwd: - cwd = os.getcwd() - - print("CONTROL: Bundling {} dir for distribution. Please wait for operation to complete..".format(cwd)) - - if not name: - name = str(os.path.basename(cwd)) + '-' + \ - datetime.today().strftime('%Y-%m-%d-%H%M%S') - - config = {'host': reducer, 'port': port, 'token': token, 'name': name, - 'cwd': cwd} - - package = Package(config) - - print("CONTROL: Bundling package..") - package.package(validate=validate) - print("CONTROL: Bundle completed\nCONTROL: Resulted in: {}.tar.gz".format(name)) - if upload: - print("CONTROL: started upload") - package.upload() - print("CONTROL: upload finished!") - else: - print("CONTROL: set --upload flag along with --reducer and --port if you want to upload directly from client.") - - -@control_cmd.command('unpack') -@click.option('-r', '--reducer', required=True) -@click.option('-p', '--port', required=True) -@click.option('-t', '--token', required=True) -@click.option('-n', '--name', required=False, default=None) -@click.option('-d', '--download', required=False, default=None) -@click.option('-v', '--validate', required=False, default=False) -@click.option('-c', '--cwd', required=False, default=None) -@click.pass_context -def unpack_cmd(ctx, reducer, port, token, name, download, validate, cwd): - """ - - :param ctx: - :param reducer: - :param port: - :param token: - :param name: - :param download: - :param validate: - :param cwd: - """ - if not cwd: - cwd = os.getcwd() - - # config = {'host': reducer, 'port': port, 'token': token, 'name': name, - # 'cwd': cwd} - - package = PackageRuntime(cwd, os.path.join(cwd, 'client')) - package.download(reducer, port, token) - package.unpack() - - -@control_cmd.command('template') -@click.pass_context -def template_cmd(ctx): - """ - - :param ctx: - """ - print("TODO: generate template") - pass - - -@control_cmd.command('start') -@click.option('-r', '--reducer', required=True) -@click.option('-p', '--port', required=True) -@click.option('-t', '--token', required=True) -@click.pass_context -def control_cmd(ctx, reducer, port, token): - """ - - :param ctx: - :param reducer: - :param port: - :param token: - """ - pass diff --git a/fedn/cli/run_cmd.py b/fedn/cli/run_cmd.py index 55f93eba8..119b8de45 100644 --- a/fedn/cli/run_cmd.py +++ b/fedn/cli/run_cmd.py @@ -4,14 +4,13 @@ import click import yaml -from fedn.client import Client -from fedn.clients.reducer.restservice import (decode_auth_token, - encode_auth_token) -from fedn.clients.reducer.statestore.mongoreducerstatestore import \ - MongoReducerStateStore -from fedn.combiner import Combiner from fedn.common.exceptions import InvalidClientConfig -from fedn.reducer import Reducer +from fedn.network.clients.client import Client +from fedn.network.combiner.server import Combiner +from fedn.network.dashboard.restservice import (decode_auth_token, + encode_auth_token) +from fedn.network.reducer import Reducer +from fedn.network.statestore.mongostatestore import MongoStateStore from .main import main @@ -40,7 +39,7 @@ def check_helper_config_file(config): return helper -def parse_client_config(config): +def apply_config(config): """Parse client config from file. Override configs from the CLI with settings in config file. @@ -71,7 +70,7 @@ def validate_client_config(config): if 'discover_port' not in config.keys(): config['discover_port'] = None except Exception: - raise InvalidClientConfig("Could not load config appropriately. Check config") + raise InvalidClientConfig("Could not load config from file. Check config") @main.group('run') @@ -87,8 +86,8 @@ def run_cmd(ctx): @run_cmd.command('client') -@click.option('-d', '--discoverhost', required=False) -@click.option('-p', '--discoverport', required=False) +@click.option('-d', '--discoverhost', required=False, help='Hostname for discovery services(reducer).') +@click.option('-p', '--discoverport', required=False, help='Port for discovery services (reducer).') @click.option('--token', required=False, help='Set token provided by reducer if enabled') @click.option('-n', '--name', required=False, default="client" + str(uuid.uuid4())[:8]) @click.option('-i', '--client_id', required=False) @@ -138,7 +137,7 @@ def client_cmd(ctx, discoverhost, discoverport, token, name, client_id, local_pa 'reconnect_after_missed_heartbeat': reconnect_after_missed_heartbeat} if init: - parse_client_config(config) + apply_config(config) validate_client_config(config) @@ -146,28 +145,29 @@ def client_cmd(ctx, discoverhost, discoverport, token, name, client_id, local_pa client.run() -@run_cmd.command('reducer') +@run_cmd.command('dashboard') @click.option('-h', '--host', required=False) @click.option('-p', '--port', required=False, default='8090', show_default=True) @click.option('-k', '--secret-key', required=False, help='Set secret key to enable jwt token authentication.') @click.option('-l', '--local-package', is_flag=True, help='Enable use of local compute package') @click.option('-n', '--name', required=False, default="reducer" + str(uuid.uuid4())[:8], help='Set service name') -@click.option('-i', '--init', required=True, default=None, +@click.option('-in', '--init', required=True, default=None, help='Set to a filename to (re)init reducer state from file.') @click.pass_context -def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): - """ - - :param ctx: - :param discoverhost: - :param discoverport: - :param token: - :param name: - :param init: +def dashboard_cmd(ctx, host, port, secret_key, local_package, name, init): + """ Start the dashboard service. + + :param ctx: Click context. + :param discoverhost: Hostname for discovery services (dashboard). + :param discoverport: Port for discovery services (dashboard). + :param secret_key: Set secret key to enable jwt token authentication. + :param local_package: Enable use of local compute package. + :param name: Set service name. + :param init: Set to a filename to (re)init config state from file. """ remote = False if local_package else True config = {'host': host, 'port': port, 'secret_key': secret_key, - 'name': name, 'remote_compute_context': remote, 'init': init} + 'name': name, 'remote_compute_package': remote, 'init': init} # Read settings from config file try: @@ -189,8 +189,8 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): # Obtain state from database, in case already initialized (service restart) statestore_config = fedn_config['statestore'] if statestore_config['type'] == 'MongoDB': - statestore = MongoReducerStateStore( - network_id, statestore_config['mongo_config'], defaults=config['init']) + statestore = MongoStateStore( + network_id, statestore_config['mongo_config'], fedn_config['storage']) else: print("Unsupported statestore type, exiting. ", flush=True) exit(-1) @@ -219,7 +219,7 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): print("Failed to set reducer config in statestore, exiting.", flush=True) exit(-1) - # Configure storage backend (currently supports MinIO) + # Configure storage backend. try: statestore.set_storage_backend(fedn_config['storage']) except KeyError: @@ -229,14 +229,6 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): print("Failed to set storage config in statestore, exiting.", flush=True) exit(-1) - # Configure controller - control_config = fedn_config['control'] - try: - statestore.set_round_config(control_config) - except Exception: - print("Failed to set control config, exiting.", flush=True) - exit(-1) - reducer = Reducer(statestore) reducer.run() @@ -244,17 +236,19 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): @run_cmd.command('combiner') @click.option('-d', '--discoverhost', required=False, help='Hostname for discovery services (reducer).') @click.option('-p', '--discoverport', required=False, help='Port for discovery services (reducer).') -@click.option('-t', '--token', required=False, help='Specify token for connecting to the reducer.') +@click.option('-t', '--token', required=False, help='Set token provided by reducer if enabled') @click.option('-n', '--name', required=False, default="combiner" + str(uuid.uuid4())[:8], help='Set name for combiner.') @click.option('-h', '--host', required=False, default="combiner", help='Set hostname.') @click.option('-i', '--port', required=False, default=12080, help='Set port.') +@click.option('-f', '--fqdn', required=False, default=None, help='Set fully qualified domain name') @click.option('-s', '--secure', is_flag=True, help='Enable SSL/TLS encrypted gRPC channels.') -@click.option('-v', '--verify', is_flag=True, help='Verify SSL/TLS for REST service') +@click.option('-v', '--verify', is_flag=True, help='Verify SSL/TLS for REST discovery service (reducer)') @click.option('-c', '--max_clients', required=False, default=30, help='The maximal number of client connections allowed.') @click.option('-in', '--init', required=False, default=None, help='Path to configuration file to (re)init combiner.') +@click.option('-a', '--aggregator', required=False, default='fedavg', help='Filename of the aggregator module to use.') @click.pass_context -def combiner_cmd(ctx, discoverhost, discoverport, token, name, host, port, secure, verify, max_clients, init): +def combiner_cmd(ctx, discoverhost, discoverport, token, name, host, port, fqdn, secure, verify, max_clients, init, aggregator): """ :param ctx: @@ -268,33 +262,12 @@ def combiner_cmd(ctx, discoverhost, discoverport, token, name, host, port, secur :param max_clients: :param init: """ - config = {'discover_host': discoverhost, 'discover_port': discoverport, 'token': token, 'myhost': host, - 'myport': port, 'myname': name, 'secure': secure, 'verify': verify, 'max_clients': max_clients, 'init': init} + config = {'discover_host': discoverhost, 'discover_port': discoverport, 'token': token, 'host': host, + 'port': port, 'fqdn': fqdn, 'name': name, 'secure': secure, 'verify': verify, 'max_clients': max_clients, + 'init': init, 'aggregator': aggregator} if config['init']: - with open(config['init'], 'r') as file: - try: - settings = dict(yaml.safe_load(file)) - except yaml.YAMLError as e: - print('Failed to read config from settings file, exiting.', flush=True) - raise (e) - - # Read/overide settings from config file - if 'controller' in settings: - controller_config = settings['controller'] - for key, val in controller_config.items(): - config[key] = val - - if 'combiner' in settings: - combiner_config = settings['combiner'] - config['myname'] = combiner_config['name'] - config['myhost'] = combiner_config['host'] - if 'fqdn' in combiner_config.keys(): - config['fqdn'] = combiner_config['fqdn'] - else: - config['fqdn'] = None - config['myport'] = combiner_config['port'] - config['max_clients'] = combiner_config['max_clients'] + apply_config(config) combiner = Combiner(config) combiner.run() diff --git a/fedn/fedn/__init__.py b/fedn/fedn/__init__.py index f04a9cd80..7e3df239e 100644 --- a/fedn/fedn/__init__.py +++ b/fedn/fedn/__init__.py @@ -2,6 +2,11 @@ import os from os.path import basename, dirname, isfile +from fedn.network.api.client import APIClient + +# flake8: noqa + + modules = glob.glob(dirname(__file__) + "/*.py") __all__ = [basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')] diff --git a/fedn/fedn/aggregators/__init__.py b/fedn/fedn/aggregators/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/fedn/fedn/aggregators/aggregator.py b/fedn/fedn/aggregators/aggregator.py deleted file mode 100644 index d03a1811f..000000000 --- a/fedn/fedn/aggregators/aggregator.py +++ /dev/null @@ -1,38 +0,0 @@ -from abc import ABC, abstractmethod - - -class AggregatorBase(ABC): - """ Abstract class defining helpers. """ - - @abstractmethod - def __init__(self, id, storage, server, modelservice, control): - """ """ - self.name = "" - self.storage = storage - self.id = id - self.server = server - self.modelservice = modelservice - self.control = control - - @abstractmethod - def on_model_update(self, model_id): - pass - - @abstractmethod - def on_model_validation(self, validation): - pass - - @abstractmethod - def combine_models(self, nr_expected_models=None, nr_required_models=1, helper=None, timeout=180): - pass - - -# def get_aggregator(aggregator_type): -# """ Return an instance of the aggregator class. -# -# :param aggregator_type (str): The aggregator type ('fedavg') -# :return: -# """ -# if helper_type == 'fedavg': -# from fedn.aggregators.fedavg import FedAvgAggregator -# return FedAvgAggregator() diff --git a/fedn/fedn/aggregators/fedavg.py b/fedn/fedn/aggregators/fedavg.py deleted file mode 100644 index 317ddc5eb..000000000 --- a/fedn/fedn/aggregators/fedavg.py +++ /dev/null @@ -1,160 +0,0 @@ -import json -import queue -import time - -import fedn.common.net.grpc.fedn_pb2 as fedn -from fedn.aggregators.aggregator import AggregatorBase - - -class FedAvgAggregator(AggregatorBase): - """ Local SGD / Federated Averaging (FedAvg) aggregator. - - :param id: A reference to id of :class: `fedn.combiner.Combiner` - :type id: str - :param storage: Model repository for :class: `fedn.combiner.Combiner` - :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` - :param server: A handle to the Combiner class :class: `fedn.combiner.Combiner` - :type server: class: `fedn.combiner.Combiner` - :param modelservice: A handle to the model service :class: `fedn.clients.combiner.modelservice.ModelService` - :type modelservice: class: `fedn.clients.combiner.modelservice.ModelService` - :param control: A handle to the :class: `fedn.clients.combiner.roundcontrol.RoundControl` - :type control: class: `fedn.clients.combiner.roundcontrol.RoundControl` - - """ - - def __init__(self, id, storage, server, modelservice, control): - """Constructor method - """ - - super().__init__(id, storage, server, modelservice, control) - - self.name = "FedAvg" - self.validations = {} - self.model_updates = queue.Queue() - - def on_model_update(self, model_id): - """Callback when a new model update is recieved from a client. - Performs (optional) pre-processing and the puts the update id - on the aggregation queue. - - :param model_id: ID of model update - :type model_id: str - """ - try: - self.server.report_status("AGGREGATOR({}): callback received model {}".format(self.name, model_id), - log_level=fedn.Status.INFO) - - # Push the model update to the processing queue - self.model_updates.put(model_id) - except Exception as e: - self.server.report_status("AGGREGATOR({}): Failed to receive candidate model! {}".format(self.name, e), - log_level=fedn.Status.WARNING) - pass - - def on_model_validation(self, validation): - """ Callback when a new model validation is recieved from a client. - - :param validation: Dict containing validation data sent by client. - Must be valid JSON. - :type validation: dict - """ - - # Currently, the validations are actually sent as status messages - # directly in the client, so here we are just storing them in the - # combiner memory. This will need to be refactored later so that this - # callback is responsible for reporting the validation to the db. - - model_id = validation.model_id - data = json.loads(validation.data) - try: - self.validations[model_id].append(data) - except KeyError: - self.validations[model_id] = [data] - - self.server.report_status("AGGREGATOR({}): callback processed validation {}".format(self.name, validation.model_id), - log_level=fedn.Status.INFO) - - def combine_models(self, nr_expected_models=None, nr_required_models=1, helper=None, timeout=180): - """Compute a running average of model updates. - - :param nr_expected_models: The number of updates expected in this round, defaults to None - :type nr_expected_models: int, optional - :param nr_required_models: The number of updates needed to a valid round, defaults to 1 - :type nr_required_models: int, optional - :param helper: An instance of :class: `fedn.utils.helpers.HelperBase`, ML framework specific helper, defaults to None - :type helper: class: `fedn.utils.helpers.HelperBase`, optional - :param timeout: Timeout for model updates, defaults to 180 - :type timeout: int, optional - :return: The global model and metadata - :rtype: tuple - """ - - data = {} - data['time_model_load'] = 0.0 - data['time_model_aggregation'] = 0.0 - - self.server.report_status( - "AGGREGATOR({}): Aggregating model updates...".format(self.name)) - - round_time = 0.0 - polling_interval = 1.0 - nr_processed_models = 0 - while nr_processed_models < nr_expected_models: - try: - model_id = self.model_updates.get(block=False) - self.server.report_status( - "AGGREGATOR({}): Received model update with id {}".format(self.name, model_id)) - - # Load the model update from disk - tic = time.time() - model_str = self.control.load_model_fault_tolerant(model_id) - if model_str: - try: - model_next = helper.load_model_from_BytesIO( - model_str.getbuffer()) - except IOError: - self.server.report_status( - "AGGREGATOR({}): Failed to load model!".format(self.name)) - else: - raise - data['time_model_load'] += time.time() - tic - - # Aggregate / reduce - tic = time.time() - if nr_processed_models == 0: - model = model_next - else: - model = helper.increment_average( - model, model_next, nr_processed_models + 1) - data['time_model_aggregation'] += time.time() - tic - - nr_processed_models += 1 - self.model_updates.task_done() - except queue.Empty: - self.server.report_status("AGGREGATOR({}): waiting for model updates: {} of {} completed.".format(self.name, - nr_processed_models, - nr_expected_models)) - time.sleep(polling_interval) - round_time += polling_interval - except Exception as e: - self.server.report_status( - "AGGERGATOR({}): Error encoutered while reading model update, skipping this update. {}".format(self.name, e)) - nr_expected_models -= 1 - if nr_expected_models <= 0: - return None, data - self.model_updates.task_done() - - if round_time >= timeout: - self.server.report_status("AGGREGATOR({}): training round timed out.".format( - self.name), log_level=fedn.Status.WARNING) - # TODO: Generalize policy for what to do in case of timeout. - if nr_processed_models >= nr_required_models: - break - else: - return None, data - - data['nr_successful_updates'] = nr_processed_models - - self.server.report_status("AGGREGATOR({}): Training round completed, aggregated {} models.".format(self.name, nr_processed_models), - log_level=fedn.Status.INFO) - return model, data diff --git a/fedn/fedn/clients/client/__init__.py b/fedn/fedn/clients/client/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/fedn/fedn/clients/combiner/__init__.py b/fedn/fedn/clients/combiner/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/fedn/fedn/clients/combiner/roundcontrol.py b/fedn/fedn/clients/combiner/roundcontrol.py deleted file mode 100644 index 7e19065f7..000000000 --- a/fedn/fedn/clients/combiner/roundcontrol.py +++ /dev/null @@ -1,319 +0,0 @@ -import queue -import random -import sys -import time -import uuid - -from fedn.aggregators.fedavg import FedAvgAggregator -from fedn.utils.helpers import get_helper - - -class RoundControl: - """ Combiner level round controller. - - The controller recieves round configurations from the global controller - and acts on them by soliciting model updates and model validations from - the connected clients. - - :param id: A reference to id of :class: `fedn.combiner.Combiner` - :type id: str - :param storage: Model repository for :class: `fedn.combiner.Combiner` - :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` - :param server: A handle to the Combiner class :class: `fedn.combiner.Combiner` - :type server: class: `fedn.combiner.Combiner` - :param modelservice: A handle to the model service :class: `fedn.clients.combiner.modelservice.ModelService` - :type modelservice: class: `fedn.clients.combiner.modelservice.ModelService` - """ - - def __init__(self, id, storage, server, modelservice): - - self.id = id - self.round_configs = queue.Queue() - self.storage = storage - self.server = server - self.modelservice = modelservice - self.config = {} - - # TODO, make runtime configurable - - self.aggregator = FedAvgAggregator( - self.id, self.storage, self.server, self.modelservice, self) - - def push_round_config(self, round_config): - """ Recieve a round_config (job description) and push on the queue. - - :param round_config: A dict containing round configurations. - :type round_config: dict - :return: A generated job id (universally unique identifier) for the round configuration - :rtype: str - """ - try: - - round_config['_job_id'] = str(uuid.uuid4()) - self.round_configs.put(round_config) - except Exception: - self.server.report_status( - "ROUNDCONTROL: Failed to push round config.", flush=True) - raise - return round_config['_job_id'] - - def load_model_fault_tolerant(self, model_id, retry=3): - """Load model update object. - - :param model_id: The ID of the model - :type model_id: str - :param retry: number of times retrying load model update, defaults to 3 - :type retry: int, optional - :return: Updated model - :rtype: class: `io.BytesIO` - """ - # Try reading model update from local disk/combiner memory - model_str = self.modelservice.models.get(model_id) - # And if we cannot access that, try downloading from the server - if model_str is None: - model_str = self.modelservice.get_model(model_id) - # TODO: use retrying library - tries = 0 - while tries < retry: - tries += 1 - if not model_str or sys.getsizeof(model_str) == 80: - self.server.report_status( - "ROUNDCONTROL: Model download failed. retrying", flush=True) - - time.sleep(1) - model_str = self.modelservice.get_model(model_id) - - return model_str - - def _training_round(self, config, clients): - """Send model update requests to clients and aggregate results. - - :param config: [description] - :type config: [type] - :param clients: [description] - :type clients: [type] - :return: [description] - :rtype: [type] - """ - - # We flush the queue at a beginning of a round (no stragglers allowed) - # TODO: Support other ways to handle stragglers. - with self.aggregator.model_updates.mutex: - self.aggregator.model_updates.queue.clear() - - self.server.report_status( - "ROUNDCONTROL: Initiating training round, participating members: {}".format(clients)) - self.server.request_model_update(config['model_id'], clients=clients) - - meta = {} - meta['nr_expected_updates'] = len(clients) - meta['nr_required_updates'] = int(config['clients_required']) - meta['timeout'] = float(config['round_timeout']) - tic = time.time() - model = None - data = None - try: - helper = get_helper(config['helper_type']) - model, data = self.aggregator.combine_models(nr_expected_models=len(clients), - nr_required_models=int( - config['clients_required']), - helper=helper, timeout=float(config['round_timeout'])) - except Exception as e: - print("TRAINING ROUND FAILED AT COMBINER! {}".format(e), flush=True) - meta['time_combination'] = time.time() - tic - meta['aggregation_time'] = data - return model, meta - - def _validation_round(self, config, clients, model_id): - """[summary] - - :param config: [description] - :type config: [type] - :param clients: [description] - :type clients: [type] - :param model_id: [description] - :type model_id: [type] - """ - self.server.request_model_validation(model_id, clients=clients) - - def stage_model(self, model_id, timeout_retry=3, retry=2): - """Download model from persistent storage. - - :param model_id: ID of the model update object to stage. - :type model_id: str - :param timeout_retry: Sleep before retrying download again (sec), defaults to 3 - :type timeout_retry: int, optional - :param retry: Number of retries, defaults to 2 - :type retry: int, optional - """ - - # If the model is already in memory at the server we do not need to do anything. - if self.modelservice.models.exist(model_id): - return - - # If it is not there, download it from storage and stage it in memory at the server. - tries = 0 - while True: - try: - model = self.storage.get_model_stream(model_id) - if model: - break - except Exception: - self.server.report_status("ROUNDCONTROL: Could not fetch model from storage backend, retrying.", - flush=True) - time.sleep(timeout_retry) - tries += 1 - if tries > retry: - self.server.report_status( - "ROUNDCONTROL: Failed to stage model {} from storage backend!".format(model_id), flush=True) - return - - self.modelservice.set_model(model, model_id) - - def __assign_round_clients(self, n, type="trainers"): - """ Obtain a list of clients (trainers or validators) to talk to in a round. - - :param n: Size of a random set taken from active trainers (clients), if n > "active trainers" all is used - :type n: int - :param type: type of clients, either "trainers" or "validators", defaults to "trainers" - :type type: str, optional - :return: Set of clients - :rtype: list - """ - - if type == "validators": - clients = self.server.get_active_validators() - elif type == "trainers": - clients = self.server.get_active_trainers() - else: - self.server.report_status( - "ROUNDCONTROL(ERROR): {} is not a supported type of client".format(type), flush=True) - raise - - # If the number of requested trainers exceeds the number of available, use all available. - if n > len(clients): - n = len(clients) - - # If not, we pick a random subsample of all available clients. - - clients = random.sample(clients, n) - - return clients - - def __check_nr_round_clients(self, config, timeout=0.0): - """Check that the minimal number of required clients to start a round are connected. - - :param config: [description] - :type config: [type] - :param timeout: [description], defaults to 0.0 - :type timeout: float, optional - :return: [description] - :rtype: [type] - """ - - ready = False - t = 0.0 - while not ready: - active = self.server.nr_active_trainers() - - if active >= int(config['clients_requested']): - return True - else: - self.server.report_status("waiting for {} clients to get started, currently: {}".format( - int(config['clients_requested']) - active, - active), flush=True) - if t >= timeout: - if active >= int(config['clients_required']): - return True - else: - return False - - time.sleep(1.0) - t += 1.0 - - return ready - - def execute_validation(self, round_config): - """ Coordinate validation rounds as specified in config. - - :param round_config: [description] - :type round_config: [type] - """ - model_id = round_config['model_id'] - self.server.report_status( - "COMBINER orchestrating validation of model {}".format(model_id)) - self.stage_model(model_id) - validators = self.__assign_round_clients( - self.server.max_clients, type="validators") - self._validation_round(round_config, validators, model_id) - - def execute_training(self, config): - """ Coordinates clients to execute training and validation tasks. """ - - round_meta = {} - round_meta['config'] = config - round_meta['round_id'] = config['round_id'] - - self.stage_model(config['model_id']) - - # Execute the configured number of rounds - round_meta['local_round'] = {} - for r in range(1, int(config['rounds']) + 1): - self.server.report_status( - "ROUNDCONTROL: Starting training round {}".format(r), flush=True) - clients = self.__assign_round_clients(self.server.max_clients) - model, meta = self._training_round(config, clients) - round_meta['local_round'][str(r)] = meta - if model is None: - self.server.report_status( - "\t Failed to update global model in round {0}!".format(r)) - - if model is not None: - helper = get_helper(config['helper_type']) - a = helper.serialize_model_to_BytesIO(model) - # Send aggregated model to server - model_id = str(uuid.uuid4()) - self.modelservice.set_model(a, model_id) - a.close() - - # Update Combiner latest model - self.server.set_active_model(model_id) - - print("------------------------------------------") - self.server.report_status( - "ROUNDCONTROL: TRAINING ROUND COMPLETED.", flush=True) - print("\n") - return round_meta - - def run(self): - """ Main control loop. Sequentially execute rounds based on round config. - - """ - try: - while True: - try: - round_config = self.round_configs.get(block=False) - - ready = self.__check_nr_round_clients(round_config) - if ready: - if round_config['task'] == 'training': - tic = time.time() - round_meta = self.execute_training(round_config) - round_meta['time_exec_training'] = time.time() - \ - tic - round_meta['name'] = self.id - self.server.tracer.set_round_meta(round_meta) - elif round_config['task'] == 'validation': - self.execute_validation(round_config) - else: - self.server.report_status( - "ROUNDCONTROL: Round config contains unkown task type.", flush=True) - else: - self.server.report_status( - "ROUNDCONTROL: Failed to meet client allocation requirements for this round config.", flush=True) - - except queue.Empty: - time.sleep(1) - - except (KeyboardInterrupt, SystemExit): - pass diff --git a/fedn/fedn/clients/reducer/control.py b/fedn/fedn/clients/reducer/control.py deleted file mode 100644 index 25de88344..000000000 --- a/fedn/fedn/clients/reducer/control.py +++ /dev/null @@ -1,581 +0,0 @@ -import copy -import os -import time -import uuid -from datetime import datetime - -import fedn.utils.helpers -from fedn.clients.reducer.interfaces import CombinerUnavailableError -from fedn.clients.reducer.network import Network -from fedn.common.storage.s3.s3repo import S3ModelRepository -from fedn.common.tracer.mongotracer import MongoTracer - -from .state import ReducerState - - -class UnsupportedStorageBackend(Exception): - pass - - -class MisconfiguredStorageBackend(Exception): - pass - - -class ReducerControl: - """ Main conroller for training round. - - """ - - def __init__(self, statestore): - - self.__state = ReducerState.setup - self.statestore = statestore - if self.statestore.is_inited(): - self.network = Network(self, statestore) - - try: - config = self.statestore.get_storage_backend() - except Exception: - print( - "REDUCER CONTROL: Failed to retrive storage configuration, exiting.", flush=True) - raise MisconfiguredStorageBackend() - if not config: - print( - "REDUCER CONTROL: No storage configuration available, exiting.", flush=True) - raise MisconfiguredStorageBackend() - - if config['storage_type'] == 'S3': - - self.model_repository = S3ModelRepository(config['storage_config']) - else: - print("REDUCER CONTROL: Unsupported storage backend, exiting.", flush=True) - raise UnsupportedStorageBackend() - - self.client_allocation_policy = self.client_allocation_policy_least_packed - - if self.statestore.is_inited(): - self.__state = ReducerState.idle - - def get_helper(self): - """ - - :return: - """ - helper_type = self.statestore.get_framework() - helper = fedn.utils.helpers.get_helper(helper_type) - if not helper: - print("CONTROL: Unsupported helper type {}, please configure compute_context.helper !".format(helper_type), - flush=True) - return None - return helper - - def delete_bucket_objects(self): - """ - - :return: - """ - return self.model_repository.delete_objects() - - def get_state(self): - """ - - :return: - """ - return self.__state - - def idle(self): - """ - - :return: - """ - if self.__state == ReducerState.idle: - return True - else: - return False - - def get_first_model(self): - """ - - :return: - """ - return self.statestore.get_first() - - def get_latest_model(self): - """ - - :return: - """ - return self.statestore.get_latest() - - def get_model_info(self): - """ - - :return: - """ - return self.statestore.get_model_info() - - def get_events(self): - """ - - :return: - """ - return self.statestore.get_events() - - def drop_models(self): - """ - - """ - self.statestore.drop_models() - - def get_compute_context(self): - """ - - :return: - """ - definition = self.statestore.get_compute_context() - if definition: - try: - context = definition['filename'] - return context - except (IndexError, KeyError): - print( - "No context filename set for compute context definition", flush=True) - return None - else: - return None - - def set_compute_context(self, filename, path): - """ Persist the configuration for the compute package. """ - self.model_repository.set_compute_context(filename, path) - self.statestore.set_compute_context(filename) - - def get_compute_package(self, compute_package=''): - """ - - :param compute_package: - :return: - """ - if compute_package == '': - compute_package = self.get_compute_context() - return self.model_repository.get_compute_package(compute_package) - - def commit(self, model_id, model=None): - """ Commit a model to the global model trail. The model commited becomes the lastest consensus model. """ - - helper = self.get_helper() - if model is not None: - print("Saving model to disk...", flush=True) - outfile_name = helper.save_model(model) - print("DONE", flush=True) - print("Uploading model to Minio...", flush=True) - model_id = self.model_repository.set_model( - outfile_name, is_file=True) - print("DONE", flush=True) - os.unlink(outfile_name) - - self.statestore.set_latest(model_id) - - def _out_of_sync(self, combiners=None): - - if not combiners: - combiners = self.network.get_combiners() - - osync = [] - for combiner in combiners: - try: - model_id = combiner.get_model_id() - except CombinerUnavailableError: - self._handle_unavailable_combiner(combiner) - model_id = None - - if model_id and (model_id != self.get_latest_model()): - osync.append(combiner) - return osync - - def check_round_participation_policy(self, compute_plan, combiner_state): - """ Evaluate reducer level policy for combiner round-participation. - This is a decision on ReducerControl level, additional checks - applies on combiner level. Not all reducer control flows might - need or want to use a participation policy. """ - - if compute_plan['task'] == 'training': - nr_active_clients = int(combiner_state['nr_active_trainers']) - elif compute_plan['task'] == 'validation': - nr_active_clients = int(combiner_state['nr_active_validators']) - else: - print("Invalid task type!", flush=True) - return False - - if int(compute_plan['clients_required']) <= nr_active_clients: - return True - else: - return False - - def check_round_start_policy(self, combiners): - """ Check if the overall network state meets the policy to start a round. """ - if len(combiners) > 0: - return True - else: - return False - - def check_round_validity_policy(self, combiners): - """ - At the end of the round, before committing a model to the model ledger, - we check if a round validity policy has been met. This can involve - e.g. asserting that a certain number of combiners have reported in an - updated model, or that criteria on model performance have been met. - """ - if combiners == []: - return False - else: - return True - - def _handle_unavailable_combiner(self, combiner): - """ This callback is triggered if a combiner is found to be unresponsive. """ - # TODO: Implement strategy to handle the case. - print("REDUCER CONTROL: Combiner {} unavailable.".format( - combiner.name), flush=True) - - def _select_round_combiners(self, compute_plan): - combiners = [] - for combiner in self.network.get_combiners(): - try: - combiner_state = combiner.report() - except CombinerUnavailableError: - self._handle_unavailable_combiner(combiner) - combiner_state = None - - if combiner_state: - is_participating = self.check_round_participation_policy( - compute_plan, combiner_state) - if is_participating: - combiners.append((combiner, compute_plan)) - return combiners - - def round(self, config, round_number): - """ Execute one global round. """ - - round_meta = {'round_id': round_number} - - if len(self.network.get_combiners()) < 1: - print("REDUCER: No combiners connected!") - return None, round_meta - - # 1. Formulate compute plans for this round and determine which combiners should participate in the round. - compute_plan = copy.deepcopy(config) - compute_plan['rounds'] = 1 - compute_plan['round_id'] = round_number - compute_plan['task'] = 'training' - compute_plan['model_id'] = self.get_latest_model() - compute_plan['helper_type'] = self.statestore.get_framework() - - round_meta['compute_plan'] = compute_plan - - combiners = [] - for combiner in self.network.get_combiners(): - - try: - combiner_state = combiner.report() - except CombinerUnavailableError: - self._handle_unavailable_combiner(combiner) - combiner_state = None - - if combiner_state is not None: - is_participating = self.check_round_participation_policy( - compute_plan, combiner_state) - if is_participating: - combiners.append((combiner, compute_plan)) - - round_start = self.check_round_start_policy(combiners) - - print("CONTROL: round start policy met, participating combiners {}".format( - combiners), flush=True) - if not round_start: - print("CONTROL: Round start policy not met, skipping round!", flush=True) - return None - - # 2. Sync up and ask participating combiners to coordinate model updates - # TODO refactor - - statestore_config = self.statestore.get_config() - - self.tracer = MongoTracer( - statestore_config['mongo_config'], statestore_config['network_id']) - - start_time = datetime.now() - - for combiner, compute_plan in combiners: - try: - self.sync_combiners([combiner], self.get_latest_model()) - _ = combiner.start(compute_plan) - except CombinerUnavailableError: - # This is OK, handled by round accept policy - self._handle_unavailable_combiner(combiner) - pass - except Exception: - # Unknown error - raise - - # Wait until participating combiners have a model that is out of sync with the current global model. - # TODO: We do not need to wait until all combiners complete before we start reducing. - cl = [] - for combiner, plan in combiners: - cl.append(combiner) - - wait = 0.0 - while len(self._out_of_sync(cl)) < len(combiners): - time.sleep(1.0) - wait += 1.0 - if wait >= config['round_timeout']: - break - - # TODO refactor - end_time = datetime.now() - round_time = end_time - start_time - self.tracer.set_combiner_time(round_number, round_time.seconds) - - round_meta['time_combiner_update'] = round_time.seconds - - # OBS! Here we are checking against all combiners, not just those that computed in this round. - # This means we let straggling combiners participate in the update - updated = self._out_of_sync() - print("COMBINERS UPDATED MODELS: {}".format(updated), flush=True) - - print("Checking round validity policy...", flush=True) - round_valid = self.check_round_validity_policy(updated) - if not round_valid: - # TODO: Should we reset combiner state here? - print("REDUCER CONTROL: Round invalid!", flush=True) - return None, round_meta - print("Round valid.") - - print("Starting reducing models...", flush=True) - # 3. Reduce combiner models into a global model - try: - model, data = self.reduce(updated) - round_meta['reduce'] = data - except Exception as e: - print("CONTROL: Failed to reduce models from combiners: {}".format( - updated), flush=True) - print(e, flush=True) - return None, round_meta - print("DONE", flush=True) - - # 6. Commit the global model to the ledger - print("Committing global model...", flush=True) - if model is not None: - # Commit to model ledger - tic = time.time() - - model_id = uuid.uuid4() - self.commit(model_id, model) - round_meta['time_commit'] = time.time() - tic - else: - print("REDUCER: failed to update model in round with config {}".format( - config), flush=True) - return None, round_meta - print("DONE", flush=True) - - # 4. Trigger participating combiner nodes to execute a validation round for the current model - validate = config['validate'] - if validate: - combiner_config = copy.deepcopy(config) - combiner_config['model_id'] = self.get_latest_model() - combiner_config['task'] = 'validation' - combiner_config['helper_type'] = self.statestore.get_framework() - - validating_combiners = self._select_round_combiners( - combiner_config) - - for combiner, combiner_config in validating_combiners: - try: - self.sync_combiners([combiner], self.get_latest_model()) - combiner.start(combiner_config) - except CombinerUnavailableError: - # OK if validation fails for a combiner - self._handle_unavailable_combiner(combiner) - pass - - # 5. Check commit policy based on validation result (optionally) - # TODO: Implement. - - return model_id, round_meta - - def sync_combiners(self, combiners, model_id): - """ Spread the current consensus model to all active combiner nodes. """ - if not model_id: - print("GOT NO MODEL TO SET! Have you seeded the FedML model?", flush=True) - return - for combiner in combiners: - _ = combiner.set_model_id(model_id) - - def instruct(self, config): - """ Main entrypoint, executes the compute plan. """ - - if self.__state == ReducerState.instructing: - print("Already set in INSTRUCTING state", flush=True) - return - - self.__state = ReducerState.instructing - - if not self.get_latest_model(): - print("No model in model chain, please seed the alliance!") - - self.__state = ReducerState.monitoring - - # TODO: Validate and set the round config object - # self.set_config(config) - - # TODO: Refactor - - statestore_config = self.statestore.get_config() - self.tracer = MongoTracer( - statestore_config['mongo_config'], statestore_config['network_id']) - last_round = self.tracer.get_latest_round() - - for round in range(1, int(config['rounds'] + 1)): - tic = time.time() - if last_round: - current_round = last_round + round - else: - current_round = round - - start_time = datetime.now() - # start round monitor - self.tracer.start_monitor(round) - # todo add try except bloc for round meta - model_id = None - round_meta = {'round_id': current_round} - try: - model_id, round_meta = self.round(config, current_round) - except TypeError: - print("Could not unpack data from round...", flush=True) - - end_time = datetime.now() - - if model_id: - print("REDUCER: Global round completed, new model: {}".format( - model_id), flush=True) - round_time = end_time - start_time - self.tracer.set_latest_time(current_round, round_time.seconds) - round_meta['status'] = 'Success' - else: - print("REDUCER: Global round failed!") - round_meta['status'] = 'Failed' - - # stop round monitor - self.tracer.stop_monitor() - round_meta['time_round'] = time.time() - tic - self.tracer.set_round_meta_reducer(round_meta) - - self.__state = ReducerState.idle - - def reduce(self, combiners): - """ Combine current models at Combiner nodes into one global model. """ - - meta = {} - meta['time_fetch_model'] = 0.0 - meta['time_load_model'] = 0.0 - meta['time_aggregate_model'] = 0.0 - - i = 1 - model = None - for combiner in combiners: - - # TODO: Handle inactive RPC error in get_model and raise specific error - try: - tic = time.time() - data = combiner.get_model() - meta['time_fetch_model'] += (time.time() - tic) - except Exception: - pass - - helper = self.get_helper() - - if data is not None: - try: - tic = time.time() - model_str = combiner.get_model().getbuffer() - model_next = helper.load_model_from_BytesIO(model_str) - meta['time_load_model'] += (time.time() - tic) - tic = time.time() - model = helper.increment_average(model, model_next, i) - meta['time_aggregate_model'] += (time.time() - tic) - except Exception: - tic = time.time() - model = helper.load_model_from_BytesIO(data.getbuffer()) - meta['time_aggregate_model'] += (time.time() - tic) - i = i + 1 - - return model, meta - - def monitor(self, config=None): - """ - - :param config: - """ - # status = self.network.check_health() - pass - - def client_allocation_policy_first_available(self): - """ - Allocate client to the first available combiner in the combiner list. - Packs one combiner full before filling up next combiner. - """ - for combiner in self.network.get_combiners(): - if combiner.allowing_clients(): - return combiner - return None - - def client_allocation_policy_least_packed(self): - """ - Allocate client to the available combiner with the smallest number of clients. - Spreads clients evenly over all active combiners. - - TODO: Not thread safe - not garanteed to result in a perfectly even partition. - - """ - min_clients = None - selected_combiner = None - - for combiner in self.network.get_combiners(): - try: - if combiner.allowing_clients(): - combiner_state = combiner.report() - nac = combiner_state['nr_active_clients'] - if not min_clients: - min_clients = nac - selected_combiner = combiner - elif nac < min_clients: - min_clients = nac - selected_combiner = combiner - except CombinerUnavailableError: - print("Combiner was not responding, continuing to next") - - return selected_combiner - - def find(self, name): - """ - - :param name: - :return: - """ - for combiner in self.network.get_combiners(): - if name == combiner.name: - return combiner - return None - - def find_available_combiner(self): - """ - - :return: - """ - combiner = self.client_allocation_policy() - return combiner - - def state(self): - """ - - :return: - """ - return self.__state diff --git a/fedn/fedn/clients/reducer/network.py b/fedn/fedn/clients/reducer/network.py deleted file mode 100644 index d851b13ff..000000000 --- a/fedn/fedn/clients/reducer/network.py +++ /dev/null @@ -1,131 +0,0 @@ -import base64 - -from fedn.clients.reducer.interfaces import (CombinerInterface, - CombinerUnavailableError) - - -class Network: - """ FEDn network. """ - - def __init__(self, control, statestore): - """ """ - self.statestore = statestore - self.control = control - self.id = statestore.network_id - - @classmethod - def from_statestore(self, network_id): - """ """ - - def get_combiner(self, name): - """ - - :param name: - :return: - """ - return self.statestore.get_combiner(name) - - def get_combiners(self): - """ - - :return: - """ - data = self.statestore.get_combiners() - combiners = [] - for c in data: - if c['certificate']: - cert = base64.b64decode(c['certificate']) - key = base64.b64decode(c['key']) - else: - cert = None - key = None - - combiners.append( - CombinerInterface(c['parent'], c['name'], c['address'], c['fqdn'], c['port'], - certificate=cert, key=key, ip=c['ip'])) - - return combiners - - def add_combiner(self, combiner): - """ - - :param combiner: - :return: - """ - if not self.control.idle(): - print("Reducer is not idle, cannot add additional combiner.") - return - - if self.find(combiner.name): - return - - print("adding combiner {}".format(combiner.name), flush=True) - self.statestore.set_combiner(combiner.to_dict()) - - def add_client(self, client): - """ Add a new client to the network. - - :param client: - :return: - """ - - if self.find_client(client['name']): - return - - print("adding client {}".format(client['name']), flush=True) - self.statestore.set_client(client) - - def remove(self, combiner): - """ - - :param combiner: - :return: - """ - if not self.control.idle(): - print("Reducer is not idle, cannot remove combiner.") - return - self.statestore.delete_combiner(combiner.name) - - def find(self, name): - """ - - :param name: - :return: - """ - combiners = self.get_combiners() - for combiner in combiners: - if name == combiner.name: - return combiner - return None - - def find_client(self, name): - """ - - :param name: - :return: - """ - ret = self.statestore.get_client(name) - return ret - - def describe(self): - """ """ - network = [] - for combiner in self.get_combiners(): - try: - network.append(combiner.report()) - except CombinerUnavailableError: - # TODO, do better here. - pass - return network - - def check_health(self): - """ """ - pass - - def update_client_data(self, client_data, status, role): - """ Update client status on DB""" - self.statestore.update_client_status(client_data, status, role) - - def get_client_info(self): - """ list available client in DB""" - return self.statestore.list_clients() diff --git a/fedn/fedn/clients/reducer/restservice.py b/fedn/fedn/clients/reducer/restservice.py deleted file mode 100644 index 432c6fa92..000000000 --- a/fedn/fedn/clients/reducer/restservice.py +++ /dev/null @@ -1,971 +0,0 @@ -import base64 -import copy -import datetime -import json -import os -import re -import threading -from io import BytesIO -from threading import Lock - -import jwt -import pandas as pd -from bokeh.embed import json_item -from bson import json_util -from flask import (Flask, abort, flash, jsonify, make_response, redirect, - render_template, request, send_file, send_from_directory, - url_for) -from werkzeug.utils import secure_filename - -from fedn.clients.reducer.interfaces import CombinerInterface -from fedn.clients.reducer.plots import Plot -from fedn.clients.reducer.state import ReducerState, ReducerStateToString -from fedn.common.tracer.mongotracer import MongoTracer -from fedn.utils.checksum import sha - -UPLOAD_FOLDER = '/app/client/package/' -ALLOWED_EXTENSIONS = {'gz', 'bz2', 'tar', 'zip', 'tgz'} - - -def allowed_file(filename): - """ - - :param filename: - :return: - """ - return '.' in filename and \ - filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS - - -def encode_auth_token(secret_key): - """Generates the Auth Token - :return: string - """ - try: - payload = { - 'exp': datetime.datetime.utcnow() + datetime.timedelta(days=90, seconds=0), - 'iat': datetime.datetime.utcnow(), - 'status': 'Success' - } - token = jwt.encode( - payload, - secret_key, - algorithm='HS256' - ) - print('\n\n\nSECURE MODE ENABLED, USE TOKEN TO ACCESS REDUCER: **** {} ****\n\n\n'.format(token)) - return token - except Exception as e: - return e - - -def decode_auth_token(auth_token, secret): - """Decodes the auth token - :param auth_token: - :return: string - """ - try: - payload = jwt.decode( - auth_token, - secret, - algorithms=['HS256'] - ) - return payload["status"] - except jwt.ExpiredSignatureError as e: - print(e) - return 'Token has expired.' - except jwt.InvalidTokenError as e: - print(e) - return 'Invalid token.' - - -class ReducerRestService: - """ - - """ - - def __init__(self, config, control, certificate_manager): - - print("config object!: \n\n\n\n{}".format(config)) - if config['host']: - self.host = config['host'] - else: - self.host = None - - self.name = config['name'] - - self.port = config['port'] - self.network_id = config['name'] + '-network' - - if 'token' in config.keys(): - self.token_auth_enabled = True - else: - self.token_auth_enabled = False - - if 'secret_key' in config.keys(): - self.SECRET_KEY = config['secret_key'] - else: - self.SECRET_KEY = None - - self.remote_compute_context = config["remote_compute_context"] - if self.remote_compute_context: - self.package = 'remote' - else: - self.package = 'local' - - self.control = control - self.certificate_manager = certificate_manager - self.current_compute_context = None - - def to_dict(self): - """ - - :return: - """ - data = { - 'name': self.name - } - return data - - def check_compute_context(self): - """Check if the compute context/package has been configured, - if remote compute context is set to False, True will be returned - - :return: True if configured - :rtype: bool - """ - if not self.remote_compute_context: - return True - - if not self.control.get_compute_context(): - return False - else: - return True - - def check_initial_model(self): - """Check if initial model (seed model) has been configured - - :return: True if configured, else False - :rtype: bool - """ - - if self.control.get_latest_model(): - return True - else: - return False - - def check_configured_response(self): - """Check if everything has been configured for client to connect, - return response if not. - - :return: Reponse with message if not configured, else None - :rtype: json - """ - if self.control.state() == ReducerState.setup: - return jsonify({'status': 'retry', - 'package': self.package, - 'msg': "Controller is not configured."}) - - if not self.check_compute_context(): - return jsonify({'status': 'retry', - 'package': self.package, - 'msg': "Compute package is not configured. Please upload the compute package."}) - - if not self.check_initial_model(): - return jsonify({'status': 'retry', - 'package': self.package, - 'msg': "Initial model is not configured. Please upload the model."}) - - if not self.control.idle(): - return jsonify({'status': 'retry', - 'package': self.package, - 'msg': "Conroller is not in idle state, try again later. "}) - return None - - def check_configured(self): - """Check if compute package has been configured and that and that the - state of the ReducerControl is not in setup otherwise render setup template. - Check if initial model has been configured, otherwise render setup_model template. - :return: Rendered html template or None - """ - if not self.check_compute_context(): - return render_template('setup.html', client=self.name, state=ReducerStateToString(self.control.state()), - logs=None, refresh=False, - message='Please set the compute package') - - if self.control.state() == ReducerState.setup: - return render_template('setup.html', client=self.name, state=ReducerStateToString(self.control.state()), - logs=None, refresh=True, - message='Warning. Reducer is not base-configured. please do so with config file.') - - if not self.check_initial_model(): - return render_template('setup_model.html', message="Please set the initial model.") - - return None - - def authorize(self, r, secret): - """Authorize client token - - :param r: Request - :type r: [type] - :param token: Token to verify against - :type token: string - """ - try: - # Get token - if 'Authorization' in r.headers: # header auth - request_token = r.headers.get('Authorization').split()[1] - elif 'token' in r.args: # args auth - request_token = str(r.args.get('token')) - elif 'fedn_token' in r.cookies: - request_token = r.cookies.get('fedn_token') - else: # no token provided - print('Authorization failed. No token provided.', flush=True) - abort(401) - - # Log token and secret - print( - f'Secret: {secret}. Request token: {request_token}.', flush=True) - - # Authenticate - status = decode_auth_token(request_token, secret) - if status == 'Success': - return True - else: - print('Authorization failed. Status: "{}"'.format( - status), flush=True) - abort(401) - except Exception as e: - print('Authorization failed. Expection encountered: "{}".'.format( - e), flush=True) - abort(401) - - def run(self): - """ - - :return: - """ - app = Flask(__name__) - - app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER - app.config['SECRET_KEY'] = self.SECRET_KEY - - @app.route('/') - def index(): - """ - - :return: - """ - # Token auth - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - - # Render template - not_configured_template = self.check_configured() - if not_configured_template: - template = not_configured_template - else: - events = self.control.get_events() - message = request.args.get('message', None) - message_type = request.args.get('message_type', None) - template = render_template('events.html', client=self.name, state=ReducerStateToString(self.control.state()), - events=events, - logs=None, refresh=True, configured=True, message=message, message_type=message_type) - - # Set token cookie in response if needed - response = make_response(template) - if 'token' in request.args: # args auth - response.set_cookie('fedn_token', str(request.args['token'])) - - # Return response - return response - - @app.route('/status') - def status(): - """ - - :return: - """ - return {'state': ReducerStateToString(self.control.state())} - - @app.route('/netgraph') - def netgraph(): - """ - Creates nodes and edges for network graph - - :return: nodes and edges as keys - :rtype: dict - """ - result = {'nodes': [], 'edges': []} - - result['nodes'].append({ - "id": "reducer", - "label": "Reducer", - "role": 'reducer', - "status": 'active', - "name": 'reducer', # TODO: get real host name - "type": 'reducer', - }) - - combiner_info = combiner_status() - client_info = client_status() - - if len(combiner_info) < 1: - return result - - for combiner in combiner_info: - print("combiner info {}".format(combiner_info), flush=True) - try: - result['nodes'].append({ - "id": combiner['name'], # "n{}".format(count), - "label": "Combiner ({} clients)".format(combiner['nr_active_clients']), - "role": 'combiner', - "status": 'active', # TODO: Hard-coded, combiner_info does not contain status - "name": combiner['name'], - "type": 'combiner', - }) - except Exception as err: - print(err) - - for client in client_info['active_clients']: - try: - if client['status'] != 'offline': - result['nodes'].append({ - "id": str(client['_id']), - "label": "Client", - "role": client['role'], - "status": client['status'], - "name": client['name'], - "combiner": client['combiner'], - "type": 'client', - }) - except Exception as err: - print(err) - - count = 0 - for node in result['nodes']: - try: - if node['type'] == 'combiner': - result['edges'].append( - { - "id": "e{}".format(count), - "source": node['id'], - "target": 'reducer', - } - ) - elif node['type'] == 'client': - result['edges'].append( - { - "id": "e{}".format(count), - "source": node['combiner'], - "target": node['id'], - } - ) - except Exception: - pass - count = count + 1 - return result - - @app.route('/networkgraph') - def network_graph(): - - try: - plot = Plot(self.control.statestore) - result = netgraph() - df_nodes = pd.DataFrame(result['nodes']) - df_edges = pd.DataFrame(result['edges']) - graph = plot.make_netgraph_plot(df_edges, df_nodes) - return json.dumps(json_item(graph, "myplot")) - except Exception: - raise - # return '' - - @app.route('/events') - def events(): - """ - - :return: - """ - - json_docs = [] - for doc in self.control.get_events(): - json_doc = json.dumps(doc, default=json_util.default) - json_docs.append(json_doc) - - json_docs.reverse() - return {'events': json_docs} - - @app.route('/add') - def add(): - """ Add a combiner to the network. """ - print("Adding combiner to network:", flush=True) - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - if self.control.state() == ReducerState.setup: - return jsonify({'status': 'retry'}) - - name = request.args.get('name', None) - address = str(request.args.get('address', None)) - fqdn = str(request.args.get('fqdn', None)) - port = request.args.get('port', None) - secure_grpc = request.args.get('secure', None) - - if port is None or address is None or name is None or secure_grpc is None: - return "Please specify correct parameters." - - # Try to retrieve combiner from db - combiner = self.control.network.get_combiner(name) - if not combiner: - if secure_grpc == 'True': - certificate, key = self.certificate_manager.get_or_create( - address).get_keypair_raw() - _ = base64.b64encode(certificate) - _ = base64.b64encode(key) - certificate = copy.deepcopy(certificate) - key = copy.deepcopy(key) - else: - certificate = None - key = None - - combiner = CombinerInterface( - self, - name=name, - address=address, - fqdn=fqdn, - port=port, - certificate=certificate, - key=key, - ip=request.remote_addr) - self.control.network.add_combiner(combiner) - - combiner = self.control.network.get_combiner(name) - - ret = { - 'status': 'added', - 'storage': self.control.statestore.get_storage_backend(), - 'statestore': self.control.statestore.get_config(), - 'certificate': combiner['certificate'], - 'key': combiner['key'] - } - - return jsonify(ret) - - @app.route('/eula', methods=['GET', 'POST']) - def eula(): - """ - - :return: - """ - for r in request.headers: - print("header contains: {}".format(r), flush=True) - - return render_template('eula.html', configured=True) - - @app.route('/models', methods=['GET', 'POST']) - def models(): - """ - - :return: - """ - # Token auth - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - - if request.method == 'POST': - # upload seed file - uploaded_seed = request.files['seed'] - if uploaded_seed: - - a = BytesIO() - a.seek(0, 0) - uploaded_seed.seek(0) - a.write(uploaded_seed.read()) - helper = self.control.get_helper() - model = helper.load_model_from_BytesIO(a.getbuffer()) - self.control.commit(uploaded_seed.filename, model) - else: - not_configured = self.check_configured() - if not_configured: - return not_configured - - plot = Plot(self.control.statestore) - try: - valid_metrics = plot.fetch_valid_metrics() - box_plot = plot.create_box_plot(valid_metrics[0]) - except Exception as e: - valid_metrics = None - box_plot = None - print(e, flush=True) - - h_latest_model_id = self.control.get_latest_model() - - model_info = self.control.get_model_info() - return render_template('models.html', box_plot=box_plot, metrics=valid_metrics, h_latest_model_id=h_latest_model_id, seed=True, - model_info=model_info, configured=True) - - seed = True - return redirect(url_for('models', seed=seed)) - - @app.route('/delete_model_trail', methods=['GET', 'POST']) - def delete_model_trail(): - """ - - :return: - """ - if request.method == 'POST': - - statestore_config = self.control.statestore.get_config() - self.tracer = MongoTracer( - statestore_config['mongo_config'], statestore_config['network_id']) - try: - self.control.drop_models() - except Exception: - pass - - # drop objects in minio - self.control.delete_bucket_objects() - return redirect(url_for('models')) - seed = True - return redirect(url_for('models', seed=seed)) - - @app.route('/drop_control', methods=['GET', 'POST']) - def drop_control(): - """ - - :return: - """ - if request.method == 'POST': - self.control.statestore.drop_control() - return redirect(url_for('control')) - return redirect(url_for('control')) - - # http://localhost:8090/control?rounds=4&model_id=879fa112-c861-4cb1-a25d-775153e5b548 - @app.route('/control', methods=['GET', 'POST']) - def control(): - """ Main page for round control. Configure, start and stop global training rounds. """ - # Token auth - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - - not_configured = self.check_configured() - if not_configured: - return not_configured - client = self.name - state = ReducerStateToString(self.control.state()) - logs = None - refresh = True - - if self.remote_compute_context: - try: - self.current_compute_context = self.control.get_compute_context() - except Exception: - self.current_compute_context = None - else: - self.current_compute_context = "None:Local" - if self.control.state() == ReducerState.monitoring: - return redirect( - url_for('index', state=state, refresh=refresh, message="Reducer is in monitoring state")) - - if request.method == 'POST': - timeout = float(request.form.get('timeout', 180)) - rounds = int(request.form.get('rounds', 1)) - task = (request.form.get('task', '')) - clients_required = request.form.get('clients_required', 1) - clients_requested = request.form.get('clients_requested', 8) - - # checking if there are enough clients connected to start! - clients_available = 0 - for combiner in self.control.network.get_combiners(): - try: - combiner_state = combiner.report() - nac = combiner_state['nr_active_clients'] - clients_available = clients_available + int(nac) - except Exception: - pass - - if clients_available < clients_required: - return redirect(url_for('index', state=state, - message="Not enough clients available to start rounds! " - "check combiner client capacity", - message_type='warning')) - - validate = request.form.get('validate', False) - if validate == 'False': - validate = False - helper_type = request.form.get('helper', 'keras') - # self.control.statestore.set_framework(helper_type) - - latest_model_id = self.control.get_latest_model() - - config = {'round_timeout': timeout, 'model_id': latest_model_id, - 'rounds': rounds, 'clients_required': clients_required, - 'clients_requested': clients_requested, 'task': task, - 'validate': validate, 'helper_type': helper_type} - - threading.Thread(target=self.control.instruct, - args=(config,)).start() - # self.control.instruct(config) - return redirect(url_for('index', state=state, refresh=refresh, message="Sent execution plan.", - message_type='SUCCESS')) - - else: - seed_model_id = None - latest_model_id = None - try: - seed_model_id = self.control.get_first_model()[0] - latest_model_id = self.control.get_latest_model() - except Exception: - pass - - return render_template('index.html', latest_model_id=latest_model_id, - compute_package=self.current_compute_context, - seed_model_id=seed_model_id, - helper=self.control.statestore.get_framework(), validate=True, configured=True) - - client = self.name - state = ReducerStateToString(self.control.state()) - logs = None - refresh = False - return render_template('index.html', client=client, state=state, logs=logs, refresh=refresh, - configured=True) - - @app.route('/assign') - def assign(): - """Handle client assignment requests. """ - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - - response = self.check_configured_response() - - if response: - return response - - name = request.args.get('name', None) - combiner_preferred = request.args.get('combiner', None) - - if combiner_preferred: - combiner = self.control.find(combiner_preferred) - else: - combiner = self.control.find_available_combiner() - - if combiner is None: - return jsonify({'status': 'retry', - 'package': self.package, - 'msg': "Failed to assign to a combiner, try again later."}) - - client = { - 'name': name, - 'combiner_preferred': combiner_preferred, - 'combiner': combiner.name, - 'ip': request.remote_addr, - 'status': 'available' - } - - # Add client to database - self.control.network.add_client(client) - - # Return connection information to client - if combiner.certificate: - cert_b64 = base64.b64encode(combiner.certificate) - cert = str(cert_b64).split('\'')[1] - else: - cert = None - - response = { - 'status': 'assigned', - 'host': combiner.address, - 'fqdn': combiner.fqdn, - 'package': self.package, - 'ip': combiner.ip, - 'port': combiner.port, - 'certificate': cert, - 'model_type': self.control.statestore.get_framework() - } - - return jsonify(response) - - def combiner_status(): - """ Get current status reports from all combiners registered in the network. - - :return: - """ - combiner_info = [] - for combiner in self.control.network.get_combiners(): - try: - report = combiner.report() - combiner_info.append(report) - except Exception: - pass - return combiner_info - - def client_status(): - """ - Get current status of clients (available) from DB compared with client status from all combiners, - update client status to DB and add their roles. - """ - client_info = self.control.network.get_client_info() - combiner_info = combiner_status() - try: - all_active_trainers = [] - all_active_validators = [] - - for client in combiner_info: - active_trainers_str = client['active_trainers'] - active_validators_str = client['active_validators'] - active_trainers_str = re.sub( - '[^a-zA-Z0-9-:\n\.]', '', active_trainers_str).replace('name:', ' ') # noqa: W605 - active_validators_str = re.sub( - '[^a-zA-Z0-9-:\n\.]', '', active_validators_str).replace('name:', ' ') # noqa: W605 - all_active_trainers.extend( - ' '.join(active_trainers_str.split(" ")).split()) - all_active_validators.extend( - ' '.join(active_validators_str.split(" ")).split()) - - active_trainers_list = [ - client for client in client_info if client['name'] in all_active_trainers] - active_validators_list = [ - cl for cl in client_info if cl['name'] in all_active_validators] - all_clients = [cl for cl in client_info] - - for client in all_clients: - status = 'offline' - role = 'None' - self.control.network.update_client_data( - client, status, role) - - all_active_clients = active_validators_list + active_trainers_list - for client in all_active_clients: - status = 'active' - if client in active_trainers_list and client in active_validators_list: - role = 'trainer-validator' - elif client in active_trainers_list: - role = 'trainer' - elif client in active_validators_list: - role = 'validator' - else: - role = 'unknown' - self.control.network.update_client_data( - client, status, role) - - return {'active_clients': all_clients, - 'active_trainers': active_trainers_list, - 'active_validators': active_validators_list - } - except Exception: - pass - - return {'active_clients': [], - 'active_trainers': [], - 'active_validators': [] - } - - @app.route('/metric_type', methods=['GET', 'POST']) - def change_features(): - """ - - :return: - """ - feature = request.args['selected'] - plot = Plot(self.control.statestore) - graphJSON = plot.create_box_plot(feature) - return graphJSON - - @app.route('/dashboard') - def dashboard(): - """ - - :return: - """ - # Token auth - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - - not_configured = self.check_configured() - if not_configured: - return not_configured - - plot = Plot(self.control.statestore) - combiners_plot = plot.create_combiner_plot() - - timeline_plot = None - table_plot = None - clients_plot = plot.create_client_plot() - client_histogram_plot = plot.create_client_histogram_plot() - - return render_template('dashboard.html', show_plot=True, - table_plot=table_plot, - timeline_plot=timeline_plot, - clients_plot=clients_plot, - client_histogram_plot=client_histogram_plot, - combiners_plot=combiners_plot, - configured=True - ) - - @app.route('/network') - def network(): - """ - - :return: - """ - # Token auth - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - - not_configured = self.check_configured() - if not_configured: - return not_configured - plot = Plot(self.control.statestore) - round_time_plot = plot.create_round_plot() - mem_cpu_plot = plot.create_cpu_plot() - combiner_info = combiner_status() - active_clients = client_status() - return render_template('network.html', network_plot=True, - round_time_plot=round_time_plot, - mem_cpu_plot=mem_cpu_plot, - combiner_info=combiner_info, - active_clients=active_clients['active_clients'], - active_trainers=active_clients['active_trainers'], - active_validators=active_clients['active_validators'], - configured=True - ) - - @app.route('/config/download', methods=['GET']) - def config_download(): - """ - - :return: - """ - chk_string = "" - name = self.control.get_compute_context() - if name is None or name == '': - chk_string = '' - else: - file_path = os.path.join(UPLOAD_FOLDER, name) - print("trying to get {}".format(file_path)) - - try: - sum = str(sha(file_path)) - except FileNotFoundError: - sum = '' - chk_string = "checksum: {}".format(sum) - - network_id = self.network_id - discover_host = self.name - discover_port = self.port - ctx = """network_id: {network_id} -discover_host: {discover_host} -discover_port: {discover_port} -{chk_string}""".format(network_id=network_id, - discover_host=discover_host, - discover_port=discover_port, - chk_string=chk_string) - - obj = BytesIO() - obj.write(ctx.encode('UTF-8')) - obj.seek(0) - return send_file(obj, - as_attachment=True, - download_name='client.yaml', - mimetype='application/x-yaml') - - @app.route('/context', methods=['GET', 'POST']) - def context(): - """ - - :return: - """ - # Token auth - if self.token_auth_enabled: - self.authorize(request, app.config.get('SECRET_KEY')) - - # if reset is not empty then allow context re-set - reset = request.args.get('reset', None) - if reset: - return render_template('context.html') - - if request.method == 'POST': - - if 'file' not in request.files: - flash('No file part') - return redirect(url_for('context')) - - file = request.files['file'] - helper_type = request.form.get('helper', 'keras') - # if user does not select file, browser also - # submit an empty part without filename - if file.filename == '': - flash('No selected file') - return redirect(url_for('context')) - - if file and allowed_file(file.filename): - filename = secure_filename(file.filename) - file_path = os.path.join( - app.config['UPLOAD_FOLDER'], filename) - file.save(file_path) - - if self.control.state() == ReducerState.instructing or self.control.state() == ReducerState.monitoring: - return "Not allowed to change context while execution is ongoing." - - self.control.set_compute_context(filename, file_path) - self.control.statestore.set_framework(helper_type) - return redirect(url_for('control')) - - name = request.args.get('name', '') - - if name == '': - name = self.control.get_compute_context() - if name is None or name == '': - return render_template('context.html') - - # There is a potential race condition here, if one client requests a package and at - # the same time another one triggers a fetch from Minio and writes to disk. - try: - mutex = Lock() - mutex.acquire() - return send_from_directory(app.config['UPLOAD_FOLDER'], name, as_attachment=True) - except Exception: - try: - data = self.control.get_compute_package(name) - file_path = os.path.join(app.config['UPLOAD_FOLDER'], name) - with open(file_path, 'wb') as fh: - fh.write(data) - return send_from_directory(app.config['UPLOAD_FOLDER'], name, as_attachment=True) - except Exception: - raise - finally: - mutex.release() - - return render_template('context.html') - - @app.route('/checksum', methods=['GET', 'POST']) - def checksum(): - """ - - :return: - """ - # sum = '' - name = request.args.get('name', None) - if name == '' or name is None: - name = self.control.get_compute_context() - if name is None or name == '': - return jsonify({}) - - file_path = os.path.join(UPLOAD_FOLDER, name) - print("trying to get {}".format(file_path)) - - try: - sum = str(sha(file_path)) - except FileNotFoundError: - sum = '' - - data = {'checksum': sum} - - return jsonify(data) - - if not self.host: - bind = "0.0.0.0" - else: - bind = self.host - - app.run(host=bind, port=self.port) - - return app diff --git a/fedn/fedn/clients/reducer/statestore/__init__.py b/fedn/fedn/clients/reducer/statestore/__init__.py deleted file mode 100644 index 52ce8c9c3..000000000 --- a/fedn/fedn/clients/reducer/statestore/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Scaleout Systems AB -# __author__ = 'Morgan Ekmefjord morgan@scaleout.se' diff --git a/fedn/fedn/clients/reducer/statestore/mongoreducerstatestore.py b/fedn/fedn/clients/reducer/statestore/mongoreducerstatestore.py deleted file mode 100644 index f60ae0bad..000000000 --- a/fedn/fedn/clients/reducer/statestore/mongoreducerstatestore.py +++ /dev/null @@ -1,416 +0,0 @@ -import copy -from datetime import datetime - -import pymongo -import yaml - -from fedn.clients.reducer.state import (ReducerStateToString, - StringToReducerState) -from fedn.common.storage.db.mongo import connect_to_mongodb - -from .reducerstatestore import ReducerStateStore - - -class MongoReducerStateStore(ReducerStateStore): - """ - - """ - - def __init__(self, network_id, config, defaults=None): - self.__inited = False - try: - self.config = config - self.network_id = network_id - self.mdb = connect_to_mongodb(self.config, self.network_id) - - # FEDn network - self.network = self.mdb['network'] - self.reducer = self.network['reducer'] - self.combiners = self.network['combiners'] - self.clients = self.network['clients'] - self.storage = self.network['storage'] - self.certificates = self.network['certificates'] - # Control - self.control = self.mdb['control'] - self.control_config = self.control['config'] - self.state = self.control['state'] - self.model = self.control['model'] - self.round = self.control["round"] - - # Logging and dashboards - self.status = self.control["status"] - self.round_time = self.control["round_time"] - self.psutil_monitoring = self.control["psutil_monitoring"] - self.combiner_round_time = self.control['combiner_round_time'] - - self.__inited = True - except Exception as e: - print("FAILED TO CONNECT TO MONGO, {}".format(e), flush=True) - self.state = None - self.model = None - self.control = None - self.network = None - self.combiners = None - self.clients = None - raise - - if defaults: - with open(defaults, 'r') as file: - try: - settings = dict(yaml.safe_load(file)) - print(settings, flush=True) - - # Control settings - if "control" in settings and settings["control"]: - control = settings['control'] - try: - self.transition(str(control['state'])) - except KeyError: - self.transition("idle") - - if "model" in control: - if not self.get_latest(): - self.set_latest(str(control['model'])) - else: - print( - "Model trail already initialized - refusing to overwrite from config. Purge model trail if you want to reseed the system.", - flush=True) - - if "context" in control: - print("Setting filepath to {}".format( - control['context']), flush=True) - # TODO Fix the ugly latering of indirection due to a bug in secure_filename returning an object with filename as attribute - # TODO fix with unboxing of value before storing and where consuming. - self.control.config.update_one({'key': 'package'}, - {'$set': {'filename': control['context']}}, True) - if "helper" in control: - # self.set_framework(control['helper']) - pass - - round_config = {'timeout': 180, 'validate': True} - try: - round_config['timeout'] = control['timeout'] - except Exception: - pass - - try: - round_config['validate'] = control['validate'] - except Exception: - pass - - # Storage settings - self.set_storage_backend(settings['storage']) - - self.__inited = True - except yaml.YAMLError as e: - print(e) - - def is_inited(self): - """ - - :return: - """ - return self.__inited - - def get_config(self): - """ - - :return: - """ - data = { - 'type': 'MongoDB', - 'mongo_config': self.config, - 'network_id': self.network_id - } - return data - - def state(self): - """ - - :return: - """ - return StringToReducerState(self.state.find_one()['current_state']) - - def transition(self, state): - """ - - :param state: - :return: - """ - old_state = self.state.find_one({'state': 'current_state'}) - if old_state != state: - return self.state.update_one({'state': 'current_state'}, {'$set': {'state': ReducerStateToString(state)}}, True) - else: - print("Not updating state, already in {}".format( - ReducerStateToString(state))) - - def set_latest(self, model_id): - """ - - :param model_id: - """ - - self.model.update_one({'key': 'current_model'}, { - '$set': {'model': model_id}}, True) - self.model.update_one({'key': 'model_trail'}, {'$push': {'model': model_id, 'committed_at': str(datetime.now())}}, - True) - - def get_first(self): - """ Return model_id for the latest model in the model_trail """ - - ret = self.model.find_one({'key': 'model_trail'}, sort=[ - ("committed_at", pymongo.ASCENDING)]) - if ret is None: - return None - - try: - model_id = ret['model'] - if model_id == '' or model_id == ' ': # ugly check for empty string - return None - return model_id - except (KeyError, IndexError): - return None - - def get_latest(self): - """ Return model_id for the latest model in the model_trail """ - ret = self.model.find_one({'key': 'current_model'}) - if ret is None: - return None - - try: - model_id = ret['model'] - if model_id == '' or model_id == ' ': # ugly check for empty string - return None - return model_id - except (KeyError, IndexError): - return None - - def set_round_config(self, config): - """ - - :param config: - """ - self.control.config.update_one( - {'key': 'round_config'}, {'$set': config}, True) - - def get_round_config(self): - """ - - :return: - """ - ret = self.control.config.find({'key': 'round_config'}) - try: - retcheck = ret[0] - if retcheck is None or retcheck == '' or retcheck == ' ': # ugly check for empty string - return None - return retcheck - except (KeyError, IndexError): - return None - - def set_compute_context(self, filename): - """ - - :param filename: - """ - self.control.config.update_one( - {'key': 'package'}, {'$set': {'filename': filename}}, True) - self.control.config.update_one({'key': 'package_trail'}, - {'$push': {'filename': filename, 'committed_at': str(datetime.now())}}, True) - - def get_compute_context(self): - """ - - :return: - """ - ret = self.control.config.find({'key': 'package'}) - try: - retcheck = ret[0] - if retcheck is None or retcheck == '' or retcheck == ' ': # ugly check for empty string - return None - return retcheck - except (KeyError, IndexError): - return None - - def set_framework(self, helper): - """ - - :param helper: - """ - self.control.config.update_one({'key': 'package'}, - {'$set': {'helper': helper}}, True) - - def get_framework(self): - """ - - :return: - """ - ret = self.control.config.find_one({'key': 'package'}) - # if local compute package used, then 'package' is None - if not ret: - # get framework from round_config instead - ret = self.control.config.find_one({'key': 'round_config'}) - print('FRAMEWORK:', ret) - try: - retcheck = ret['helper'] - if retcheck == '' or retcheck == ' ': # ugly check for empty string - return None - return retcheck - except (KeyError, IndexError): - return None - - def get_model_info(self): - """ - - :return: - """ - ret = self.model.find_one({'key': 'model_trail'}) - try: - if ret: - committed_at = ret['committed_at'] - model = ret['model'] - model_dictionary = dict(zip(model, committed_at)) - return model_dictionary - else: - return None - except (KeyError, IndexError): - return None - - def get_events(self): - """ - - :return: - """ - ret = self.control.status.find({}) - return ret - - def get_storage_backend(self): - """ """ - try: - ret = self.storage.find( - {'status': 'enabled'}, projection={'_id': False}) - return ret[0] - except (KeyError, IndexError): - return None - - def set_storage_backend(self, config): - """ """ - config = copy.deepcopy(config) - config['updated_at'] = str(datetime.now()) - config['status'] = 'enabled' - self.storage.update_one( - {'storage_type': config['storage_type']}, {'$set': config}, True) - - def set_reducer(self, reducer_data): - """ """ - reducer_data['updated_at'] = str(datetime.now()) - self.reducer.update_one({'name': reducer_data['name']}, { - '$set': reducer_data}, True) - - def get_reducer(self): - """ """ - try: - ret = self.reducer.find_one() - return ret - except Exception: - return None - - def list_combiners(self): - """ """ - try: - ret = self.combiners.find() - return list(ret) - except Exception: - return None - - def get_combiner(self, name): - """ """ - try: - ret = self.combiners.find_one({'name': name}) - return ret - except Exception: - return None - - def get_combiners(self): - """ """ - try: - ret = self.combiners.find() - return list(ret) - except Exception: - return None - - def set_combiner(self, combiner_data): - """ - Set or update combiner record. - combiner_data: dictionary, output of combiner.to_dict()) - """ - - combiner_data['updated_at'] = str(datetime.now()) - self.combiners.update_one({'name': combiner_data['name']}, { - '$set': combiner_data}, True) - - def delete_combiner(self, combiner): - """ """ - try: - self.combiners.delete_one({'name': combiner}) - except Exception: - print("WARNING, failed to delete combiner: {}".format( - combiner), flush=True) - - def set_client(self, client_data): - """ - Set or update client record. - client_data: dictionarys - """ - client_data['updated_at'] = str(datetime.now()) - self.clients.update_one({'name': client_data['name']}, { - '$set': client_data}, True) - - def get_client(self, name): - """ """ - try: - ret = self.clients.find({'key': name}) - if list(ret) == []: - return None - else: - return ret - except Exception: - return None - - def list_clients(self): - """ """ - try: - ret = self.clients.find() - return list(ret) - except Exception: - return None - - def drop_control(self): - """ """ - # Control - self.state.drop() - self.control_config.drop() - self.control.drop() - - self.drop_models() - - def drop_models(self): - """ """ - self.model.drop() - self.combiner_round_time.drop() - self.status.drop() - self.psutil_monitoring.drop() - self.round_time.drop() - self.round.drop() - - def update_client_status(self, client_data, status, role): - """ - Set or update client status. - assign roles to the active clients (trainer, validator, trainer-validator) - """ - self.clients.update_one({"name": client_data['name']}, - {"$set": - { - "status": status, - "role": role - } - }) diff --git a/fedn/fedn/clients/reducer/statestore/reducerstatestore.py b/fedn/fedn/clients/reducer/statestore/reducerstatestore.py deleted file mode 100644 index 45ef0ff10..000000000 --- a/fedn/fedn/clients/reducer/statestore/reducerstatestore.py +++ /dev/null @@ -1,47 +0,0 @@ -from abc import ABC, abstractmethod - - -class ReducerStateStore(ABC): - """ - - """ - - def __init__(self): - pass - - @abstractmethod - def state(self): - """ - - """ - pass - - @abstractmethod - def transition(self, state): - """ - - :param state: - """ - pass - - @abstractmethod - def set_latest(self, model_id): - """ - - :param model_id: - """ - pass - - @abstractmethod - def get_latest(self): - """ - - """ - pass - - @abstractmethod - def is_inited(self): - """ - - """ - pass diff --git a/fedn/fedn/clients/reducer/templates/context.html b/fedn/fedn/clients/reducer/templates/context.html deleted file mode 100644 index 5d814f467..000000000 --- a/fedn/fedn/clients/reducer/templates/context.html +++ /dev/null @@ -1,34 +0,0 @@ -{% extends "index.html" %} - -{% block content %} -
-
-
Upload and set compute package
-
{{ message }}
-
-
-
-
- -
- -
- - -
- -
-
- -
-
- -
-
-
- - -{% endblock %} diff --git a/fedn/fedn/clients/reducer/templates/events.html b/fedn/fedn/clients/reducer/templates/events.html deleted file mode 100644 index d3c34beb5..000000000 --- a/fedn/fedn/clients/reducer/templates/events.html +++ /dev/null @@ -1,43 +0,0 @@ -{% extends "index.html" %} - -{% block content %} - - -
-
-
Events
-
-
- - - - -
- -
-
- - -{% endblock %} diff --git a/fedn/fedn/clients/reducer/templates/index.html b/fedn/fedn/clients/reducer/templates/index.html deleted file mode 100644 index a096fa037..000000000 --- a/fedn/fedn/clients/reducer/templates/index.html +++ /dev/null @@ -1,371 +0,0 @@ - - - - - - - - - - {% if refresh %} - - {% endif %} - - FEDn Reducer - - - - - - - - - - - - - - - - -
- -
- - -
-
-

-
-
- {% if message %} - {% if message_type == 'WARNING' %} - -
-
- - -
-
- - - - - - \ No newline at end of file diff --git a/fedn/fedn/combiner.py b/fedn/fedn/combiner.py deleted file mode 100644 index daaab3454..000000000 --- a/fedn/fedn/combiner.py +++ /dev/null @@ -1,685 +0,0 @@ -import base64 -import queue -import re -import signal -import sys -import threading -import time -import uuid -from datetime import datetime, timedelta -from enum import Enum - -import fedn.common.net.grpc.fedn_pb2 as fedn -import fedn.common.net.grpc.fedn_pb2_grpc as rpc -from fedn.clients.combiner.modelservice import ModelService -from fedn.clients.combiner.roundcontrol import RoundControl -from fedn.common.net.connect import ConnectorCombiner, Status -from fedn.common.net.grpc.server import Server -from fedn.common.storage.s3.s3repo import S3ModelRepository -from fedn.common.tracer.mongotracer import MongoTracer - -VALID_NAME_REGEX = '^[a-zA-Z0-9_-]*$' - - -class Role(Enum): - WORKER = 1 - COMBINER = 2 - REDUCER = 3 - OTHER = 4 - - -def role_to_proto_role(role): - """ - - :param role: - :return: - """ - if role == Role.COMBINER: - return fedn.COMBINER - if role == Role.WORKER: - return fedn.WORKER - if role == Role.REDUCER: - return fedn.REDUCER - if role == Role.OTHER: - return fedn.OTHER - - -#################################################################################################################### -#################################################################################################################### - -class Combiner(rpc.CombinerServicer, rpc.ReducerServicer, rpc.ConnectorServicer, rpc.ControlServicer): - """ Combiner gRPC server. """ - - def __init__(self, connect_config): - - # Holds client queues - self.clients = {} - - self.modelservice = ModelService() - - # Validate combiner name - match = re.search(VALID_NAME_REGEX, connect_config['myname']) - if not match: - raise ValueError('Unallowed character in combiner name. Allowed characters: a-z, A-Z, 0-9, _, -.') - - self.id = connect_config['myname'] - self.role = Role.COMBINER - self.max_clients = connect_config['max_clients'] - - self.model_id = None - - announce_client = ConnectorCombiner(host=connect_config['discover_host'], - port=connect_config['discover_port'], - myhost=connect_config['myhost'], - fqdn=connect_config['fqdn'], - myport=connect_config['myport'], - token=connect_config['token'], - name=connect_config['myname'], - secure=connect_config['secure'], - verify=connect_config['verify']) - - response = None - while True: - status, response = announce_client.announce() - if status == Status.TryAgain: - print(response, flush=True) - time.sleep(5) - continue - if status == Status.Assigned: - config = response - print( - "COMBINER: was announced successfully. Waiting for clients and commands!", flush=True) - break - if status == Status.UnAuthorized: - print(response, flush=True) - sys.exit("Exiting: Unauthorized") - - cert = config['certificate'] - key = config['key'] - - if config['certificate']: - cert = base64.b64decode(config['certificate']) # .decode('utf-8') - key = base64.b64decode(config['key']) # .decode('utf-8') - - grpc_config = {'port': connect_config['myport'], - 'secure': connect_config['secure'], - 'certificate': cert, - 'key': key} - - self.repository = S3ModelRepository( - config['storage']['storage_config']) - self.server = Server(self, self.modelservice, grpc_config) - - self.tracer = MongoTracer( - config['statestore']['mongo_config'], config['statestore']['network_id']) - - self.control = RoundControl( - self.id, self.repository, self, self.modelservice) - threading.Thread(target=self.control.run, daemon=True).start() - - self.server.start() - - def __whoami(self, client, instance): - - def role_to_proto_role(role): - """ - - :param role: - :return: - """ - if role == Role.COMBINER: - return fedn.COMBINER - if role == Role.WORKER: - return fedn.WORKER - if role == Role.REDUCER: - return fedn.REDUCER - if role == Role.OTHER: - return fedn.OTHER - - client.name = instance.id - client.role = role_to_proto_role(instance.role) - return client - - def get_active_model(self): - """ - - :return: - """ - return self.model_id - - def set_active_model(self, model_id): - """ - - :param model_id: - """ - self.model_id = model_id - - def report_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None, flush=True): - print("{}:COMBINER({}):{} {}".format(datetime.now().strftime( - '%Y-%m-%d %H:%M:%S'), self.id, log_level, msg), flush=flush) - - def request_model_update(self, model_id, clients=[]): - """ Ask clients to update the current global model. - - Parameters - ---------- - model_id : str - The id of the model to be updated. - clients : list - List of clients to submit a model update request to. - An empty list (default) results in a broadcast to - all connected trainig clients. - - """ - - request = fedn.ModelUpdateRequest() - self.__whoami(request.sender, self) - request.model_id = model_id - request.correlation_id = str(uuid.uuid4()) - request.timestamp = str(datetime.now()) - - if len(clients) == 0: - clients = self.get_active_trainers() - - for client in clients: - request.receiver.name = client.name - request.receiver.role = fedn.WORKER - self.SendModelUpdateRequest(request, self) - - print("COMBINER: Sent model update request for model {} to clients {}".format( - model_id, clients), flush=True) - - def request_model_validation(self, model_id, clients=[]): - """ Ask clients to validate the current global model. - - Parameters - ---------- - model_id : str - The id of the model to be updated. - clients : list - List of clients to submit a model update request to. - An empty list (default) results in a broadcast to - all connected trainig clients. - - """ - - request = fedn.ModelValidationRequest() - self.__whoami(request.sender, self) - request.model_id = model_id - request.correlation_id = str(uuid.uuid4()) - request.timestamp = str(datetime.now()) - - if len(clients) == 0: - clients = self.get_active_validators() - - for client in clients: - request.receiver.name = client.name - request.receiver.role = fedn.WORKER - self.SendModelValidationRequest(request, self) - - print("COMBINER: Sent validation request for model {} to clients {}".format( - model_id, clients), flush=True) - - def _list_clients(self, channel): - request = fedn.ListClientsRequest() - self.__whoami(request.sender, self) - request.channel = channel - clients = self.ListActiveClients(request, self) - return clients.client - - def get_active_trainers(self): - """ - - :return: - """ - trainers = self._list_clients(fedn.Channel.MODEL_UPDATE_REQUESTS) - return trainers - - def get_active_validators(self): - """ - - :return: - """ - validators = self._list_clients(fedn.Channel.MODEL_VALIDATION_REQUESTS) - return validators - - def nr_active_trainers(self): - """ - - :return: - """ - return len(self.get_active_trainers()) - - def nr_active_validators(self): - """ - - :return: - """ - return len(self.get_active_validators()) - - #################################################################################################################### - - def __join_client(self, client): - """ Add a client to the combiner. """ - if client.name not in self.clients.keys(): - self.clients[client.name] = {"lastseen": datetime.now()} - - def _subscribe_client_to_queue(self, client, queue_name): - self.__join_client(client) - if queue_name not in self.clients[client.name].keys(): - self.clients[client.name][queue_name] = queue.Queue() - - def __get_queue(self, client, queue_name): - try: - return self.clients[client.name][queue_name] - except KeyError: - raise - - def __get_status_queue(self, client): - return self.__get_queue(client, fedn.Channel.STATUS) - - def _send_request(self, request, queue_name): - self.__route_request_to_client(request, request.receiver, queue_name) - - def _broadcast_request(self, request, queue_name): - """ Publish a request to all subscribed members. """ - active_clients = self._list_active_clients() - for client in active_clients: - self.clients[client.name][queue_name].put(request) - - def __route_request_to_client(self, request, client, queue_name): - try: - q = self.__get_queue(client, queue_name) - q.put(request) - except Exception: - print("Failed to route request to client: {} {}", - request.receiver, queue_name) - raise - - def _send_status(self, status): - - self.tracer.report(status) - for name, client in self.clients.items(): - try: - q = client[fedn.Channel.STATUS] - status.timestamp = str(datetime.now()) - q.put(status) - except KeyError: - pass - - def __register_heartbeat(self, client): - """ Register a client if first time connecting. Update heartbeat timestamp. """ - self.__join_client(client) - self.clients[client.name]["lastseen"] = datetime.now() - - ##################################################################################################################### - - # Control Service - - def Start(self, control: fedn.ControlRequest, context): - """ Push a round config to RoundControl. - - :param control: - :param context: - :return: - """ - response = fedn.ControlResponse() - print("\n\n GOT CONTROL **START** from Command {}\n\n".format(control.command), flush=True) - - config = {} - for parameter in control.parameter: - config.update({parameter.key: parameter.value}) - print("\n\nSTARTING ROUND AT COMBINER WITH ROUND CONFIG: {}\n\n".format( - config), flush=True) - - self.control.push_round_config(config) - return response - - def Configure(self, control: fedn.ControlRequest, context): - """ - - :param control: - :param context: - :return: - """ - response = fedn.ControlResponse() - for parameter in control.parameter: - setattr(self, parameter.key, parameter.value) - return response - - def Stop(self, control: fedn.ControlRequest, context): - """ - - :param control: - :param context: - :return: - """ - response = fedn.ControlResponse() - print("\n\n\n\n\n GOT CONTROL **STOP** from Command\n\n\n\n\n", flush=True) - return response - - def Report(self, control: fedn.ControlRequest, context): - """ Descibe current state of the Combiner. """ - - response = fedn.ControlResponse() - print("\n\n\n\n\n GOT CONTROL **REPORT** from Command\n\n\n\n\n", flush=True) - - active_trainers = self.get_active_trainers() - p = response.parameter.add() - p.key = "nr_active_trainers" - p.value = str(len(active_trainers)) - - active_validators = self.get_active_validators() - p = response.parameter.add() - p.key = "nr_active_validators" - p.value = str(len(active_validators)) - - active_trainers_ = self.get_active_trainers() - active_trainers = [] - for client in active_trainers_: - active_trainers.append(client) - p = response.parameter.add() - p.key = "active_trainers" - p.value = str(active_trainers) - - active_validators_ = self.get_active_validators() - active_validators = [] - for client in active_validators_: - active_validators.append(client) - p = response.parameter.add() - p.key = "active_validators" - p.value = str(active_validators) - - p = response.parameter.add() - p.key = "nr_active_clients" - p.value = str(len(active_trainers)+len(active_validators)) - - p = response.parameter.add() - p.key = "model_id" - model_id = self.get_active_model() - if model_id is None: - model_id = "" - p.value = str(model_id) - - p = response.parameter.add() - p.key = "nr_unprocessed_compute_plans" - p.value = str(self.control.round_configs.qsize()) - - p = response.parameter.add() - p.key = "name" - p.value = str(self.id) - - return response - - ##################################################################################################################### - - def AllianceStatusStream(self, response, context): - """ A server stream RPC endpoint that emits status messages. """ - status = fedn.Status( - status="Client {} connecting to AllianceStatusStream.".format(response.sender)) - status.log_level = fedn.Status.INFO - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - self._subscribe_client_to_queue(response.sender, fedn.Channel.STATUS) - q = self.__get_queue(response.sender, fedn.Channel.STATUS) - self._send_status(status) - - while True: - yield q.get() - - def SendStatus(self, status: fedn.Status, context): - """ - - :param status: - :param context: - :return: - """ - # Add the status message to all subscribers of the status channel - self._send_status(status) - - response = fedn.Response() - response.response = "Status received." - return response - - def _list_subscribed_clients(self, queue_name): - subscribed_clients = [] - for name, client in self.clients.items(): - if queue_name in client.keys(): - subscribed_clients.append(name) - return subscribed_clients - - def _list_active_clients(self, channel): - active_clients = [] - for client in self._list_subscribed_clients(channel): - # This can break with different timezones. - now = datetime.now() - then = self.clients[client]["lastseen"] - # TODO: move the heartbeat timeout to config. - if (now - then) < timedelta(seconds=10): - active_clients.append(client) - return active_clients - - def _drop_inactive_clients(self): - """ Clean up clients that has missed heartbeat """ - - def ListActiveClients(self, request: fedn.ListClientsRequest, context): - """ RPC endpoint that returns a ClientList containing the names of all active clients. - An active client has sent a status message / responded to a heartbeat - request in the last 10 seconds. - """ - clients = fedn.ClientList() - active_clients = self._list_active_clients(request.channel) - - for client in active_clients: - clients.client.append(fedn.Client(name=client, role=fedn.WORKER)) - return clients - - def AcceptingClients(self, request: fedn.ConnectionRequest, context): - """ - - :param request: - :param context: - :return: - """ - response = fedn.ConnectionResponse() - active_clients = self._list_active_clients( - fedn.Channel.MODEL_UPDATE_REQUESTS) - - try: - requested = int(self.max_clients) - if len(active_clients) >= requested: - response.status = fedn.ConnectionStatus.NOT_ACCEPTING - return response - if len(active_clients) < requested: - response.status = fedn.ConnectionStatus.ACCEPTING - return response - - except Exception as e: - print("Combiner not properly configured! {}".format(e), flush=True) - raise - - response.status = fedn.ConnectionStatus.TRY_AGAIN_LATER - return response - - def SendHeartbeat(self, heartbeat: fedn.Heartbeat, context): - """ RPC that lets clients send a hearbeat, notifying the server that - the client is available. """ - self.__register_heartbeat(heartbeat.sender) - response = fedn.Response() - response.sender.name = heartbeat.sender.name - response.sender.role = heartbeat.sender.role - response.response = "Heartbeat received" - return response - - # Combiner Service - - def ModelUpdateStream(self, update, context): - """ - - :param update: - :param context: - """ - client = update.sender - status = fedn.Status( - status="Client {} connecting to ModelUpdateStream.".format(client.name)) - status.log_level = fedn.Status.INFO - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - - self._subscribe_client_to_queue(client, fedn.Channel.MODEL_UPDATES) - q = self.__get_queue(client, fedn.Channel.MODEL_UPDATES) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def ModelUpdateRequestStream(self, response, context): - """ A server stream RPC endpoint. Messages from client stream. """ - - client = response.sender - metadata = context.invocation_metadata() - if metadata: - print("\n\n\nGOT METADATA: {}\n\n\n".format(metadata), flush=True) - - status = fedn.Status( - status="Client {} connecting to ModelUpdateRequestStream.".format(client.name)) - status.log_level = fedn.Status.INFO - status.timestamp = str(datetime.now()) - - self.__whoami(status.sender, self) - - self._subscribe_client_to_queue( - client, fedn.Channel.MODEL_UPDATE_REQUESTS) - q = self.__get_queue(client, fedn.Channel.MODEL_UPDATE_REQUESTS) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def ModelValidationStream(self, update, context): - """ - - :param update: - :param context: - """ - client = update.sender - status = fedn.Status( - status="Client {} connecting to ModelValidationStream.".format(client.name)) - status.log_level = fedn.Status.INFO - - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - - self._subscribe_client_to_queue(client, fedn.Channel.MODEL_VALIDATIONS) - q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATIONS) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def ModelValidationRequestStream(self, response, context): - """ A server stream RPC endpoint. Messages from client stream. """ - - client = response.sender - status = fedn.Status( - status="Client {} connecting to ModelValidationRequestStream.".format(client.name)) - status.log_level = fedn.Status.INFO - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - status.timestamp = str(datetime.now()) - - self._subscribe_client_to_queue( - client, fedn.Channel.MODEL_VALIDATION_REQUESTS) - q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATION_REQUESTS) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def SendModelUpdateRequest(self, request, context): - """ Send a model update request. """ - self._send_request(request, fedn.Channel.MODEL_UPDATE_REQUESTS) - - response = fedn.Response() - response.response = "CONTROLLER RECEIVED ModelUpdateRequest from client {}".format( - request.sender.name) - return response # TODO Fill later - - def SendModelUpdate(self, request, context): - """ Send a model update response. """ - self.control.aggregator.on_model_update(request.model_update_id) - print("ORCHESTRATOR: Received model update", flush=True) - - response = fedn.Response() - response.response = "RECEIVED ModelUpdate {} from client {}".format( - response, response.sender.name) - return response # TODO Fill later - - def SendModelValidationRequest(self, request, context): - """ Send a model update request. """ - self._send_request(request, fedn.Channel.MODEL_VALIDATION_REQUESTS) - - response = fedn.Response() - response.response = "CONTROLLER RECEIVED ModelValidationRequest from client {}".format( - request.sender.name) - return response # TODO Fill later - - def SendModelValidation(self, request, context): - """ Send a model update response. """ - self.control.aggregator.on_model_validation(request) - print("ORCHESTRATOR received validation ", flush=True) - response = fedn.Response() - response.response = "RECEIVED ModelValidation {} from client {}".format( - response, response.sender.name) - return response # TODO Fill later - - # Reducer Service - - def GetGlobalModel(self, request, context): - """ - - :param request: - :param context: - :return: - """ - response = fedn.GetGlobalModelResponse() - self.__whoami(response.sender, self) - response.receiver.name = "reducer" - response.receiver.role = role_to_proto_role(Role.REDUCER) - if not self.get_active_model(): - response.model_id = '' - else: - response.model_id = self.get_active_model() - return response - - #################################################################################################################### - - def run(self): - """ - - """ - - print("COMBINER: {} started, ready for requests. ".format( - self.id), flush=True) - try: - while True: - signal.pause() - except (KeyboardInterrupt, SystemExit): - pass - self.server.stop() diff --git a/fedn/fedn/common/config.py b/fedn/fedn/common/config.py new file mode 100644 index 000000000..f6c827d0d --- /dev/null +++ b/fedn/fedn/common/config.py @@ -0,0 +1,94 @@ +import os + +import yaml + +global STATESTORE_CONFIG +global MODELSTORAGE_CONFIG + + +def get_environment_config(): + """ Get the configuration from environment variables. + """ + global STATESTORE_CONFIG + global MODELSTORAGE_CONFIG + + STATESTORE_CONFIG = os.environ.get('STATESTORE_CONFIG', + '/workspaces/fedn/config/settings-reducer.yaml.template') + MODELSTORAGE_CONFIG = os.environ.get('MODELSTORAGE_CONFIG', + '/workspaces/fedn/config/settings-reducer.yaml.template') + + +def get_statestore_config(file=None): + """ Get the statestore configuration from file. + + :param file: The statestore configuration file (yaml) path (optional). + :type file: str + :return: The statestore configuration as a dict. + :rtype: dict + """ + if file is None: + get_environment_config() + file = STATESTORE_CONFIG + with open(file, 'r') as config_file: + try: + settings = dict(yaml.safe_load(config_file)) + except yaml.YAMLError as e: + raise (e) + return settings["statestore"] + + +def get_modelstorage_config(file=None): + """ Get the model storage configuration from file. + + :param file: The model storage configuration file (yaml) path (optional). + :type file: str + :return: The model storage configuration as a dict. + :rtype: dict + """ + if file is None: + get_environment_config() + file = MODELSTORAGE_CONFIG + with open(file, 'r') as config_file: + try: + settings = dict(yaml.safe_load(config_file)) + except yaml.YAMLError as e: + raise (e) + return settings["storage"] + + +def get_network_config(file=None): + """ Get the network configuration from file. + + :param file: The network configuration file (yaml) path (optional). + :type file: str + :return: The network id. + :rtype: str + """ + if file is None: + get_environment_config() + file = STATESTORE_CONFIG + with open(file, 'r') as config_file: + try: + settings = dict(yaml.safe_load(config_file)) + except yaml.YAMLError as e: + raise (e) + return settings["network_id"] + + +def get_controller_config(file=None): + """ Get the controller configuration from file. + + :param file: The controller configuration file (yaml) path (optional). + :type file: str + :return: The controller configuration as a dict. + :rtype: dict + """ + if file is None: + get_environment_config() + file = STATESTORE_CONFIG + with open(file, 'r') as config_file: + try: + settings = dict(yaml.safe_load(config_file)) + except yaml.YAMLError as e: + raise (e) + return settings["controller"] diff --git a/fedn/fedn/common/control/__init__.py b/fedn/fedn/common/control/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/fedn/fedn/common/net/connect.py b/fedn/fedn/common/net/connect.py deleted file mode 100644 index 0a7cf4051..000000000 --- a/fedn/fedn/common/net/connect.py +++ /dev/null @@ -1,175 +0,0 @@ -import enum - -import requests as r - - -class State(enum.Enum): - Disconnected = 0 - Connected = 1 - Error = 2 - - -class Status(enum.Enum): - Unassigned = 0 - Assigned = 1 - TryAgain = 2 - UnAuthorized = 3 - UnMatchedConfig = 4 - - -class ConnectorClient: - """ - Connector for assigning client to a combiner in the FEDn network. - """ - - def __init__(self, host, port, token, name, remote_package, force_ssl=False, verify=False, combiner=None, id=None): - - self.host = host - self.port = port - self.token = token - self.name = name - self.verify = verify - self.preferred_combiner = combiner - self.id = id - self.package = 'remote' if remote_package else 'local' - - # for https we assume a an ingress handles permanent redirect (308) - if force_ssl: - self.prefix = "https://" - else: - self.prefix = "http://" - if self.port: - self.connect_string = "{}{}:{}".format( - self.prefix, self.host, self.port) - else: - self.connect_string = "{}{}".format( - self.prefix, self.host) - - print("\n\nsetting the connection string to {}\n\n".format( - self.connect_string), flush=True) - - def state(self): - """ - - :return: Connector State - """ - return self.state - - def assign(self): - """ - Connect client to FEDn network discovery service, ask for combiner assignment. - - :return: Tuple with assingment status, combiner connection information - if sucessful, else None. - :rtype: Status, json - """ - try: - retval = None - if self.preferred_combiner: - retval = r.get("{}?name={}&combiner={}".format(self.connect_string + '/assign', self.name, - self.preferred_combiner), - verify=self.verify, - allow_redirects=True, - headers={'Authorization': 'Token {}'.format(self.token)}) - else: - retval = r.get("{}?name={}".format(self.connect_string + '/assign', self.name), - verify=self.verify, - allow_redirects=True, - headers={'Authorization': 'Token {}'.format(self.token)}) - except Exception as e: - print('***** {}'.format(e), flush=True) - return Status.Unassigned, {} - - if retval.status_code == 401: - reason = "Unauthorized connection to reducer, make sure the correct token is set" - return Status.UnAuthorized, reason - - reducer_package = retval.json()['package'] - if reducer_package != self.package: - reason = "Unmatched config of compute package between client and reducer.\n" +\ - "Reducer uses {} package and client uses {}.".format( - reducer_package, self.package) - return Status.UnMatchedConfig, reason - - if retval.status_code >= 200 and retval.status_code < 204: - if retval.json()['status'] == 'retry': - if 'msg' in retval.json(): - reason = retval.json()['msg'] - else: - reason = "Reducer was not ready. Try again later." - - return Status.TryAgain, reason - - return Status.Assigned, retval.json() - - return Status.Unassigned, None - - -class ConnectorCombiner: - """ - Connector for annnouncing combiner to the FEDn network. - """ - - def __init__(self, host, port, myhost, fqdn, myport, token, name, secure=False, verify=False): - - self.host = host - self.fqdn = fqdn - self.port = port - self.myhost = myhost - self.myport = myport - self.token = token - self.name = name - self.secure = secure - self.verify = verify - - # for https we assume a an ingress handles permanent redirect (308) - self.prefix = "http://" - if port: - self.connect_string = "{}{}:{}".format( - self.prefix, self.host, self.port) - else: - self.connect_string = "{}{}".format( - self.prefix, self.host) - - print("\n\nsetting the connection string to {}\n\n".format( - self.connect_string), flush=True) - - def state(self): - """ - - :return: Combiner State - """ - return self.state - - def announce(self): - """ - Announce combiner to FEDn network via discovery service. - - :return: Tuple with announcement Status, FEDn network configuration - if sucessful, else None. - :rtype: Staus, json - """ - try: - retval = r.get("{}?name={}&address={}&fqdn={}&port={}&secure={}".format( - self.connect_string + '/add', - self.name, - self.myhost, - self.fqdn, - self.myport, - self.secure), - verify=self.verify, - headers={'Authorization': 'Token {}'.format(self.token)}) - except Exception: - return Status.Unassigned, {} - - if retval.status_code == 401: - reason = "Unauthorized connection to reducer, make sure the correct token is set" - return Status.UnAuthorized, reason - - if retval.status_code >= 200 and retval.status_code < 204: - if retval.json()['status'] == 'retry': - reason = "Reducer was not ready. Try again later." - return Status.TryAgain, reason - return Status.Assigned, retval.json() - - return Status.Unassigned, None diff --git a/fedn/fedn/common/net/grpc/fedn.proto b/fedn/fedn/common/net/grpc/fedn.proto index 0e595b451..ff0ee293c 100644 --- a/fedn/fedn/common/net/grpc/fedn.proto +++ b/fedn/fedn/common/net/grpc/fedn.proto @@ -4,7 +4,6 @@ package grpc; message Response { Client sender = 1; - //string client = 1; string response = 2; } @@ -14,11 +13,11 @@ enum StatusType { MODEL_UPDATE = 2; MODEL_VALIDATION_REQUEST = 3; MODEL_VALIDATION = 4; + INFERENCE = 5; } message Status { Client sender = 1; - //string client = 1; string status = 2; enum LogLevel { @@ -54,6 +53,7 @@ message ModelUpdateRequest { string data = 4; string correlation_id = 5; string timestamp = 6; + string meta = 7; } message ModelUpdate { @@ -73,6 +73,8 @@ message ModelValidationRequest { string data = 4; string correlation_id = 5; string timestamp = 6; + string meta = 7; + bool is_inference = 8; } message ModelValidation { @@ -91,6 +93,7 @@ enum ModelStatus { IN_PROGRESS_OK = 2; FAILED = 3; } + message ModelRequest { Client sender = 1; Client receiver = 2; @@ -200,7 +203,8 @@ message ReportResponse { service Control { rpc Start(ControlRequest) returns (ControlResponse); rpc Stop(ControlRequest) returns (ControlResponse); - rpc Configure(ControlRequest) returns (ReportResponse); + rpc Configure(ControlRequest) returns (ReportResponse); + rpc FlushAggregationQueue(ControlRequest) returns (ControlResponse); rpc Report(ControlRequest) returns (ReportResponse); } diff --git a/fedn/fedn/common/net/grpc/fedn_pb2.py b/fedn/fedn/common/net/grpc/fedn_pb2.py index f53fd40e6..fa4fbb16d 100644 --- a/fedn/fedn/common/net/grpc/fedn_pb2.py +++ b/fedn/fedn/common/net/grpc/fedn_pb2.py @@ -2,266 +2,39 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: fedn/common/net/grpc/fedn.proto """Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import enum_type_wrapper - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name='fedn/common/net/grpc/fedn.proto', - package='grpc', - syntax='proto3', - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\x1f\x66\x65\x64n/common/net/grpc/fedn.proto\x12\x04grpc\":\n\x08Response\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08response\x18\x02 \x01(\t\"\x8c\x02\n\x06Status\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06status\x18\x02 \x01(\t\x12(\n\tlog_level\x18\x03 \x01(\x0e\x32\x15.grpc.Status.LogLevel\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x1e\n\x04type\x18\x07 \x01(\x0e\x32\x10.grpc.StatusType\x12\r\n\x05\x65xtra\x18\x08 \x01(\t\"B\n\x08LogLevel\x12\x08\n\x04INFO\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\t\n\x05\x41UDIT\x10\x04\"\x9d\x01\n\x12ModelUpdateRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\"\xaf\x01\n\x0bModelUpdate\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x17\n\x0fmodel_update_id\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\xa1\x01\n\x16ModelValidationRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\"\xa8\x01\n\x0fModelValidation\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\x89\x01\n\x0cModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\n\n\x02id\x18\x04 \x01(\t\x12!\n\x06status\x18\x05 \x01(\x0e\x32\x11.grpc.ModelStatus\"]\n\rModelResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\n\n\x02id\x18\x02 \x01(\t\x12!\n\x06status\x18\x03 \x01(\x0e\x32\x11.grpc.ModelStatus\x12\x0f\n\x07message\x18\x04 \x01(\t\"U\n\x15GetGlobalModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\"h\n\x16GetGlobalModelResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\")\n\tHeartbeat\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\"W\n\x16\x43lientAvailableMessage\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\t\"R\n\x12ListClientsRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x07\x63hannel\x18\x02 \x01(\x0e\x32\r.grpc.Channel\"*\n\nClientList\x12\x1c\n\x06\x63lient\x18\x01 \x03(\x0b\x32\x0c.grpc.Client\"0\n\x06\x43lient\x12\x18\n\x04role\x18\x01 \x01(\x0e\x32\n.grpc.Role\x12\x0c\n\x04name\x18\x02 \x01(\t\"m\n\x0fReassignRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06server\x18\x03 \x01(\t\x12\x0c\n\x04port\x18\x04 \x01(\r\"c\n\x10ReconnectRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x11\n\treconnect\x18\x03 \x01(\r\"\'\n\tParameter\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"T\n\x0e\x43ontrolRequest\x12\x1e\n\x07\x63ommand\x18\x01 \x01(\x0e\x32\r.grpc.Command\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"F\n\x0f\x43ontrolResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"R\n\x0eReportResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"\x13\n\x11\x43onnectionRequest\"<\n\x12\x43onnectionResponse\x12&\n\x06status\x18\x01 \x01(\x0e\x32\x16.grpc.ConnectionStatus*u\n\nStatusType\x12\x07\n\x03LOG\x10\x00\x12\x18\n\x14MODEL_UPDATE_REQUEST\x10\x01\x12\x10\n\x0cMODEL_UPDATE\x10\x02\x12\x1c\n\x18MODEL_VALIDATION_REQUEST\x10\x03\x12\x14\n\x10MODEL_VALIDATION\x10\x04*\x86\x01\n\x07\x43hannel\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x19\n\x15MODEL_UPDATE_REQUESTS\x10\x01\x12\x11\n\rMODEL_UPDATES\x10\x02\x12\x1d\n\x19MODEL_VALIDATION_REQUESTS\x10\x03\x12\x15\n\x11MODEL_VALIDATIONS\x10\x04\x12\n\n\x06STATUS\x10\x05*F\n\x0bModelStatus\x12\x06\n\x02OK\x10\x00\x12\x0f\n\x0bIN_PROGRESS\x10\x01\x12\x12\n\x0eIN_PROGRESS_OK\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03*8\n\x04Role\x12\n\n\x06WORKER\x10\x00\x12\x0c\n\x08\x43OMBINER\x10\x01\x12\x0b\n\x07REDUCER\x10\x02\x12\t\n\x05OTHER\x10\x03*J\n\x07\x43ommand\x12\x08\n\x04IDLE\x10\x00\x12\t\n\x05START\x10\x01\x12\t\n\x05PAUSE\x10\x02\x12\x08\n\x04STOP\x10\x03\x12\t\n\x05RESET\x10\x04\x12\n\n\x06REPORT\x10\x05*I\n\x10\x43onnectionStatus\x12\x11\n\rNOT_ACCEPTING\x10\x00\x12\r\n\tACCEPTING\x10\x01\x12\x13\n\x0fTRY_AGAIN_LATER\x10\x02\x32z\n\x0cModelService\x12\x33\n\x06Upload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse(\x01\x12\x35\n\x08\x44ownload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse0\x01\x32\xe3\x01\n\x07\x43ontrol\x12\x34\n\x05Start\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x33\n\x04Stop\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x37\n\tConfigure\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse\x12\x34\n\x06Report\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse2V\n\x07Reducer\x12K\n\x0eGetGlobalModel\x12\x1b.grpc.GetGlobalModelRequest\x1a\x1c.grpc.GetGlobalModelResponse2\xab\x03\n\tConnector\x12\x44\n\x14\x41llianceStatusStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x0c.grpc.Status0\x01\x12*\n\nSendStatus\x12\x0c.grpc.Status\x1a\x0e.grpc.Response\x12?\n\x11ListActiveClients\x12\x18.grpc.ListClientsRequest\x1a\x10.grpc.ClientList\x12\x45\n\x10\x41\x63\x63\x65ptingClients\x12\x17.grpc.ConnectionRequest\x1a\x18.grpc.ConnectionResponse\x12\x30\n\rSendHeartbeat\x12\x0f.grpc.Heartbeat\x1a\x0e.grpc.Response\x12\x37\n\x0eReassignClient\x12\x15.grpc.ReassignRequest\x1a\x0e.grpc.Response\x12\x39\n\x0fReconnectClient\x12\x16.grpc.ReconnectRequest\x1a\x0e.grpc.Response2\xda\x04\n\x08\x43ombiner\x12T\n\x18ModelUpdateRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x18.grpc.ModelUpdateRequest0\x01\x12\x46\n\x11ModelUpdateStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x11.grpc.ModelUpdate0\x01\x12\\\n\x1cModelValidationRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x1c.grpc.ModelValidationRequest0\x01\x12N\n\x15ModelValidationStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x15.grpc.ModelValidation0\x01\x12\x42\n\x16SendModelUpdateRequest\x12\x18.grpc.ModelUpdateRequest\x1a\x0e.grpc.Response\x12\x34\n\x0fSendModelUpdate\x12\x11.grpc.ModelUpdate\x1a\x0e.grpc.Response\x12J\n\x1aSendModelValidationRequest\x12\x1c.grpc.ModelValidationRequest\x1a\x0e.grpc.Response\x12<\n\x13SendModelValidation\x12\x15.grpc.ModelValidation\x1a\x0e.grpc.Responseb\x06proto3' -) -_STATUSTYPE = _descriptor.EnumDescriptor( - name='StatusType', - full_name='grpc.StatusType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='LOG', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATE_REQUEST', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATE', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATION_REQUEST', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATION', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2361, - serialized_end=2478, -) -_sym_db.RegisterEnumDescriptor(_STATUSTYPE) -StatusType = enum_type_wrapper.EnumTypeWrapper(_STATUSTYPE) -_CHANNEL = _descriptor.EnumDescriptor( - name='Channel', - full_name='grpc.Channel', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='DEFAULT', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATE_REQUESTS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATES', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATION_REQUESTS', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATIONS', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STATUS', index=5, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2481, - serialized_end=2615, -) -_sym_db.RegisterEnumDescriptor(_CHANNEL) -Channel = enum_type_wrapper.EnumTypeWrapper(_CHANNEL) -_MODELSTATUS = _descriptor.EnumDescriptor( - name='ModelStatus', - full_name='grpc.ModelStatus', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='OK', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='IN_PROGRESS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='IN_PROGRESS_OK', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='FAILED', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2617, - serialized_end=2687, -) -_sym_db.RegisterEnumDescriptor(_MODELSTATUS) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x64n/common/net/grpc/fedn.proto\x12\x04grpc\":\n\x08Response\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08response\x18\x02 \x01(\t\"\x8c\x02\n\x06Status\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06status\x18\x02 \x01(\t\x12(\n\tlog_level\x18\x03 \x01(\x0e\x32\x15.grpc.Status.LogLevel\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x1e\n\x04type\x18\x07 \x01(\x0e\x32\x10.grpc.StatusType\x12\r\n\x05\x65xtra\x18\x08 \x01(\t\"B\n\x08LogLevel\x12\x08\n\x04INFO\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\t\n\x05\x41UDIT\x10\x04\"\xab\x01\n\x12ModelUpdateRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\xaf\x01\n\x0bModelUpdate\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x17\n\x0fmodel_update_id\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\xc5\x01\n\x16ModelValidationRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\x12\x14\n\x0cis_inference\x18\x08 \x01(\x08\"\xa8\x01\n\x0fModelValidation\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\x89\x01\n\x0cModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\n\n\x02id\x18\x04 \x01(\t\x12!\n\x06status\x18\x05 \x01(\x0e\x32\x11.grpc.ModelStatus\"]\n\rModelResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\n\n\x02id\x18\x02 \x01(\t\x12!\n\x06status\x18\x03 \x01(\x0e\x32\x11.grpc.ModelStatus\x12\x0f\n\x07message\x18\x04 \x01(\t\"U\n\x15GetGlobalModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\"h\n\x16GetGlobalModelResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\")\n\tHeartbeat\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\"W\n\x16\x43lientAvailableMessage\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\t\"R\n\x12ListClientsRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x07\x63hannel\x18\x02 \x01(\x0e\x32\r.grpc.Channel\"*\n\nClientList\x12\x1c\n\x06\x63lient\x18\x01 \x03(\x0b\x32\x0c.grpc.Client\"0\n\x06\x43lient\x12\x18\n\x04role\x18\x01 \x01(\x0e\x32\n.grpc.Role\x12\x0c\n\x04name\x18\x02 \x01(\t\"m\n\x0fReassignRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06server\x18\x03 \x01(\t\x12\x0c\n\x04port\x18\x04 \x01(\r\"c\n\x10ReconnectRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x11\n\treconnect\x18\x03 \x01(\r\"\'\n\tParameter\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"T\n\x0e\x43ontrolRequest\x12\x1e\n\x07\x63ommand\x18\x01 \x01(\x0e\x32\r.grpc.Command\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"F\n\x0f\x43ontrolResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"R\n\x0eReportResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"\x13\n\x11\x43onnectionRequest\"<\n\x12\x43onnectionResponse\x12&\n\x06status\x18\x01 \x01(\x0e\x32\x16.grpc.ConnectionStatus*\x84\x01\n\nStatusType\x12\x07\n\x03LOG\x10\x00\x12\x18\n\x14MODEL_UPDATE_REQUEST\x10\x01\x12\x10\n\x0cMODEL_UPDATE\x10\x02\x12\x1c\n\x18MODEL_VALIDATION_REQUEST\x10\x03\x12\x14\n\x10MODEL_VALIDATION\x10\x04\x12\r\n\tINFERENCE\x10\x05*\x86\x01\n\x07\x43hannel\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x19\n\x15MODEL_UPDATE_REQUESTS\x10\x01\x12\x11\n\rMODEL_UPDATES\x10\x02\x12\x1d\n\x19MODEL_VALIDATION_REQUESTS\x10\x03\x12\x15\n\x11MODEL_VALIDATIONS\x10\x04\x12\n\n\x06STATUS\x10\x05*F\n\x0bModelStatus\x12\x06\n\x02OK\x10\x00\x12\x0f\n\x0bIN_PROGRESS\x10\x01\x12\x12\n\x0eIN_PROGRESS_OK\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03*8\n\x04Role\x12\n\n\x06WORKER\x10\x00\x12\x0c\n\x08\x43OMBINER\x10\x01\x12\x0b\n\x07REDUCER\x10\x02\x12\t\n\x05OTHER\x10\x03*J\n\x07\x43ommand\x12\x08\n\x04IDLE\x10\x00\x12\t\n\x05START\x10\x01\x12\t\n\x05PAUSE\x10\x02\x12\x08\n\x04STOP\x10\x03\x12\t\n\x05RESET\x10\x04\x12\n\n\x06REPORT\x10\x05*I\n\x10\x43onnectionStatus\x12\x11\n\rNOT_ACCEPTING\x10\x00\x12\r\n\tACCEPTING\x10\x01\x12\x13\n\x0fTRY_AGAIN_LATER\x10\x02\x32z\n\x0cModelService\x12\x33\n\x06Upload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse(\x01\x12\x35\n\x08\x44ownload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse0\x01\x32\xa9\x02\n\x07\x43ontrol\x12\x34\n\x05Start\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x33\n\x04Stop\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x37\n\tConfigure\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse\x12\x44\n\x15\x46lushAggregationQueue\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x34\n\x06Report\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse2V\n\x07Reducer\x12K\n\x0eGetGlobalModel\x12\x1b.grpc.GetGlobalModelRequest\x1a\x1c.grpc.GetGlobalModelResponse2\xab\x03\n\tConnector\x12\x44\n\x14\x41llianceStatusStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x0c.grpc.Status0\x01\x12*\n\nSendStatus\x12\x0c.grpc.Status\x1a\x0e.grpc.Response\x12?\n\x11ListActiveClients\x12\x18.grpc.ListClientsRequest\x1a\x10.grpc.ClientList\x12\x45\n\x10\x41\x63\x63\x65ptingClients\x12\x17.grpc.ConnectionRequest\x1a\x18.grpc.ConnectionResponse\x12\x30\n\rSendHeartbeat\x12\x0f.grpc.Heartbeat\x1a\x0e.grpc.Response\x12\x37\n\x0eReassignClient\x12\x15.grpc.ReassignRequest\x1a\x0e.grpc.Response\x12\x39\n\x0fReconnectClient\x12\x16.grpc.ReconnectRequest\x1a\x0e.grpc.Response2\xda\x04\n\x08\x43ombiner\x12T\n\x18ModelUpdateRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x18.grpc.ModelUpdateRequest0\x01\x12\x46\n\x11ModelUpdateStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x11.grpc.ModelUpdate0\x01\x12\\\n\x1cModelValidationRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x1c.grpc.ModelValidationRequest0\x01\x12N\n\x15ModelValidationStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x15.grpc.ModelValidation0\x01\x12\x42\n\x16SendModelUpdateRequest\x12\x18.grpc.ModelUpdateRequest\x1a\x0e.grpc.Response\x12\x34\n\x0fSendModelUpdate\x12\x11.grpc.ModelUpdate\x1a\x0e.grpc.Response\x12J\n\x1aSendModelValidationRequest\x12\x1c.grpc.ModelValidationRequest\x1a\x0e.grpc.Response\x12<\n\x13SendModelValidation\x12\x15.grpc.ModelValidation\x1a\x0e.grpc.Responseb\x06proto3') +_STATUSTYPE = DESCRIPTOR.enum_types_by_name['StatusType'] +StatusType = enum_type_wrapper.EnumTypeWrapper(_STATUSTYPE) +_CHANNEL = DESCRIPTOR.enum_types_by_name['Channel'] +Channel = enum_type_wrapper.EnumTypeWrapper(_CHANNEL) +_MODELSTATUS = DESCRIPTOR.enum_types_by_name['ModelStatus'] ModelStatus = enum_type_wrapper.EnumTypeWrapper(_MODELSTATUS) -_ROLE = _descriptor.EnumDescriptor( - name='Role', - full_name='grpc.Role', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='WORKER', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='COMBINER', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='REDUCER', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='OTHER', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2689, - serialized_end=2745, -) -_sym_db.RegisterEnumDescriptor(_ROLE) - +_ROLE = DESCRIPTOR.enum_types_by_name['Role'] Role = enum_type_wrapper.EnumTypeWrapper(_ROLE) -_COMMAND = _descriptor.EnumDescriptor( - name='Command', - full_name='grpc.Command', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='IDLE', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='START', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='PAUSE', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STOP', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='RESET', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='REPORT', index=5, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2747, - serialized_end=2821, -) -_sym_db.RegisterEnumDescriptor(_COMMAND) - +_COMMAND = DESCRIPTOR.enum_types_by_name['Command'] Command = enum_type_wrapper.EnumTypeWrapper(_COMMAND) -_CONNECTIONSTATUS = _descriptor.EnumDescriptor( - name='ConnectionStatus', - full_name='grpc.ConnectionStatus', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='NOT_ACCEPTING', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ACCEPTING', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TRY_AGAIN_LATER', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2823, - serialized_end=2896, -) -_sym_db.RegisterEnumDescriptor(_CONNECTIONSTATUS) - +_CONNECTIONSTATUS = DESCRIPTOR.enum_types_by_name['ConnectionStatus'] ConnectionStatus = enum_type_wrapper.EnumTypeWrapper(_CONNECTIONSTATUS) LOG = 0 MODEL_UPDATE_REQUEST = 1 MODEL_UPDATE = 2 MODEL_VALIDATION_REQUEST = 3 MODEL_VALIDATION = 4 +INFERENCE = 5 DEFAULT = 0 MODEL_UPDATE_REQUESTS = 1 MODEL_UPDATES = 2 @@ -286,1646 +59,268 @@ ACCEPTING = 1 TRY_AGAIN_LATER = 2 -_STATUS_LOGLEVEL = _descriptor.EnumDescriptor( - name='LogLevel', - full_name='grpc.Status.LogLevel', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='INFO', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='DEBUG', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='WARNING', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ERROR', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='AUDIT', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=304, - serialized_end=370, -) -_sym_db.RegisterEnumDescriptor(_STATUS_LOGLEVEL) - -_RESPONSE = _descriptor.Descriptor( - name='Response', - full_name='grpc.Response', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.Response.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='response', full_name='grpc.Response.response', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=41, - serialized_end=99, -) - -_STATUS = _descriptor.Descriptor( - name='Status', - full_name='grpc.Status', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.Status.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='status', full_name='grpc.Status.status', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='log_level', full_name='grpc.Status.log_level', index=2, - number=3, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.Status.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.Status.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.Status.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='grpc.Status.type', index=6, - number=7, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extra', full_name='grpc.Status.extra', index=7, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _STATUS_LOGLEVEL, - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=102, - serialized_end=370, -) - -_MODELUPDATEREQUEST = _descriptor.Descriptor( - name='ModelUpdateRequest', - full_name='grpc.ModelUpdateRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelUpdateRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelUpdateRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelUpdateRequest.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelUpdateRequest.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelUpdateRequest.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelUpdateRequest.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=373, - serialized_end=530, -) - -_MODELUPDATE = _descriptor.Descriptor( - name='ModelUpdate', - full_name='grpc.ModelUpdate', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelUpdate.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelUpdate.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelUpdate.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_update_id', full_name='grpc.ModelUpdate.model_update_id', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelUpdate.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelUpdate.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='meta', full_name='grpc.ModelUpdate.meta', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=533, - serialized_end=708, -) - -_MODELVALIDATIONREQUEST = _descriptor.Descriptor( - name='ModelValidationRequest', - full_name='grpc.ModelValidationRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelValidationRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelValidationRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelValidationRequest.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelValidationRequest.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelValidationRequest.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelValidationRequest.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=711, - serialized_end=872, -) - -_MODELVALIDATION = _descriptor.Descriptor( - name='ModelValidation', - full_name='grpc.ModelValidation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelValidation.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelValidation.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelValidation.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelValidation.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelValidation.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelValidation.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='meta', full_name='grpc.ModelValidation.meta', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=875, - serialized_end=1043, -) - -_MODELREQUEST = _descriptor.Descriptor( - name='ModelRequest', - full_name='grpc.ModelRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelRequest.data', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=b"", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='id', full_name='grpc.ModelRequest.id', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='status', full_name='grpc.ModelRequest.status', index=4, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1046, - serialized_end=1183, -) - -_MODELRESPONSE = _descriptor.Descriptor( - name='ModelResponse', - full_name='grpc.ModelResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelResponse.data', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=b"", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='id', full_name='grpc.ModelResponse.id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='status', full_name='grpc.ModelResponse.status', index=2, - number=3, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='message', full_name='grpc.ModelResponse.message', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1185, - serialized_end=1278, -) - -_GETGLOBALMODELREQUEST = _descriptor.Descriptor( - name='GetGlobalModelRequest', - full_name='grpc.GetGlobalModelRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.GetGlobalModelRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.GetGlobalModelRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1280, - serialized_end=1365, -) - -_GETGLOBALMODELRESPONSE = _descriptor.Descriptor( - name='GetGlobalModelResponse', - full_name='grpc.GetGlobalModelResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.GetGlobalModelResponse.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.GetGlobalModelResponse.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.GetGlobalModelResponse.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1367, - serialized_end=1471, -) - -_HEARTBEAT = _descriptor.Descriptor( - name='Heartbeat', - full_name='grpc.Heartbeat', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.Heartbeat.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1473, - serialized_end=1514, -) - -_CLIENTAVAILABLEMESSAGE = _descriptor.Descriptor( - name='ClientAvailableMessage', - full_name='grpc.ClientAvailableMessage', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ClientAvailableMessage.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ClientAvailableMessage.data', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ClientAvailableMessage.timestamp', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1516, - serialized_end=1603, -) - -_LISTCLIENTSREQUEST = _descriptor.Descriptor( - name='ListClientsRequest', - full_name='grpc.ListClientsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ListClientsRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='channel', full_name='grpc.ListClientsRequest.channel', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1605, - serialized_end=1687, -) - -_CLIENTLIST = _descriptor.Descriptor( - name='ClientList', - full_name='grpc.ClientList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='client', full_name='grpc.ClientList.client', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1689, - serialized_end=1731, -) - -_CLIENT = _descriptor.Descriptor( - name='Client', - full_name='grpc.Client', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='role', full_name='grpc.Client.role', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='name', full_name='grpc.Client.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1733, - serialized_end=1781, -) - -_REASSIGNREQUEST = _descriptor.Descriptor( - name='ReassignRequest', - full_name='grpc.ReassignRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ReassignRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ReassignRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='server', full_name='grpc.ReassignRequest.server', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='port', full_name='grpc.ReassignRequest.port', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1783, - serialized_end=1892, -) - -_RECONNECTREQUEST = _descriptor.Descriptor( - name='ReconnectRequest', - full_name='grpc.ReconnectRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ReconnectRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ReconnectRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reconnect', full_name='grpc.ReconnectRequest.reconnect', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1894, - serialized_end=1993, -) - -_PARAMETER = _descriptor.Descriptor( - name='Parameter', - full_name='grpc.Parameter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='grpc.Parameter.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value', full_name='grpc.Parameter.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1995, - serialized_end=2034, -) - -_CONTROLREQUEST = _descriptor.Descriptor( - name='ControlRequest', - full_name='grpc.ControlRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='command', full_name='grpc.ControlRequest.command', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='parameter', full_name='grpc.ControlRequest.parameter', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2036, - serialized_end=2120, -) - -_CONTROLRESPONSE = _descriptor.Descriptor( - name='ControlResponse', - full_name='grpc.ControlResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='message', full_name='grpc.ControlResponse.message', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='parameter', full_name='grpc.ControlResponse.parameter', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2122, - serialized_end=2192, -) - -_REPORTRESPONSE = _descriptor.Descriptor( - name='ReportResponse', - full_name='grpc.ReportResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ReportResponse.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='parameter', full_name='grpc.ReportResponse.parameter', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2194, - serialized_end=2276, -) - -_CONNECTIONREQUEST = _descriptor.Descriptor( - name='ConnectionRequest', - full_name='grpc.ConnectionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2278, - serialized_end=2297, -) - -_CONNECTIONRESPONSE = _descriptor.Descriptor( - name='ConnectionResponse', - full_name='grpc.ConnectionResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='status', full_name='grpc.ConnectionResponse.status', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2299, - serialized_end=2359, -) - -_RESPONSE.fields_by_name['sender'].message_type = _CLIENT -_STATUS.fields_by_name['sender'].message_type = _CLIENT -_STATUS.fields_by_name['log_level'].enum_type = _STATUS_LOGLEVEL -_STATUS.fields_by_name['type'].enum_type = _STATUSTYPE -_STATUS_LOGLEVEL.containing_type = _STATUS -_MODELUPDATEREQUEST.fields_by_name['sender'].message_type = _CLIENT -_MODELUPDATEREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_MODELUPDATE.fields_by_name['sender'].message_type = _CLIENT -_MODELUPDATE.fields_by_name['receiver'].message_type = _CLIENT -_MODELVALIDATIONREQUEST.fields_by_name['sender'].message_type = _CLIENT -_MODELVALIDATIONREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_MODELVALIDATION.fields_by_name['sender'].message_type = _CLIENT -_MODELVALIDATION.fields_by_name['receiver'].message_type = _CLIENT -_MODELREQUEST.fields_by_name['sender'].message_type = _CLIENT -_MODELREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_MODELREQUEST.fields_by_name['status'].enum_type = _MODELSTATUS -_MODELRESPONSE.fields_by_name['status'].enum_type = _MODELSTATUS -_GETGLOBALMODELREQUEST.fields_by_name['sender'].message_type = _CLIENT -_GETGLOBALMODELREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_GETGLOBALMODELRESPONSE.fields_by_name['sender'].message_type = _CLIENT -_GETGLOBALMODELRESPONSE.fields_by_name['receiver'].message_type = _CLIENT -_HEARTBEAT.fields_by_name['sender'].message_type = _CLIENT -_CLIENTAVAILABLEMESSAGE.fields_by_name['sender'].message_type = _CLIENT -_LISTCLIENTSREQUEST.fields_by_name['sender'].message_type = _CLIENT -_LISTCLIENTSREQUEST.fields_by_name['channel'].enum_type = _CHANNEL -_CLIENTLIST.fields_by_name['client'].message_type = _CLIENT -_CLIENT.fields_by_name['role'].enum_type = _ROLE -_REASSIGNREQUEST.fields_by_name['sender'].message_type = _CLIENT -_REASSIGNREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_RECONNECTREQUEST.fields_by_name['sender'].message_type = _CLIENT -_RECONNECTREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_CONTROLREQUEST.fields_by_name['command'].enum_type = _COMMAND -_CONTROLREQUEST.fields_by_name['parameter'].message_type = _PARAMETER -_CONTROLRESPONSE.fields_by_name['parameter'].message_type = _PARAMETER -_REPORTRESPONSE.fields_by_name['sender'].message_type = _CLIENT -_REPORTRESPONSE.fields_by_name['parameter'].message_type = _PARAMETER -_CONNECTIONRESPONSE.fields_by_name['status'].enum_type = _CONNECTIONSTATUS -DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE -DESCRIPTOR.message_types_by_name['Status'] = _STATUS -DESCRIPTOR.message_types_by_name['ModelUpdateRequest'] = _MODELUPDATEREQUEST -DESCRIPTOR.message_types_by_name['ModelUpdate'] = _MODELUPDATE -DESCRIPTOR.message_types_by_name['ModelValidationRequest'] = _MODELVALIDATIONREQUEST -DESCRIPTOR.message_types_by_name['ModelValidation'] = _MODELVALIDATION -DESCRIPTOR.message_types_by_name['ModelRequest'] = _MODELREQUEST -DESCRIPTOR.message_types_by_name['ModelResponse'] = _MODELRESPONSE -DESCRIPTOR.message_types_by_name['GetGlobalModelRequest'] = _GETGLOBALMODELREQUEST -DESCRIPTOR.message_types_by_name['GetGlobalModelResponse'] = _GETGLOBALMODELRESPONSE -DESCRIPTOR.message_types_by_name['Heartbeat'] = _HEARTBEAT -DESCRIPTOR.message_types_by_name['ClientAvailableMessage'] = _CLIENTAVAILABLEMESSAGE -DESCRIPTOR.message_types_by_name['ListClientsRequest'] = _LISTCLIENTSREQUEST -DESCRIPTOR.message_types_by_name['ClientList'] = _CLIENTLIST -DESCRIPTOR.message_types_by_name['Client'] = _CLIENT -DESCRIPTOR.message_types_by_name['ReassignRequest'] = _REASSIGNREQUEST -DESCRIPTOR.message_types_by_name['ReconnectRequest'] = _RECONNECTREQUEST -DESCRIPTOR.message_types_by_name['Parameter'] = _PARAMETER -DESCRIPTOR.message_types_by_name['ControlRequest'] = _CONTROLREQUEST -DESCRIPTOR.message_types_by_name['ControlResponse'] = _CONTROLRESPONSE -DESCRIPTOR.message_types_by_name['ReportResponse'] = _REPORTRESPONSE -DESCRIPTOR.message_types_by_name['ConnectionRequest'] = _CONNECTIONREQUEST -DESCRIPTOR.message_types_by_name['ConnectionResponse'] = _CONNECTIONRESPONSE -DESCRIPTOR.enum_types_by_name['StatusType'] = _STATUSTYPE -DESCRIPTOR.enum_types_by_name['Channel'] = _CHANNEL -DESCRIPTOR.enum_types_by_name['ModelStatus'] = _MODELSTATUS -DESCRIPTOR.enum_types_by_name['Role'] = _ROLE -DESCRIPTOR.enum_types_by_name['Command'] = _COMMAND -DESCRIPTOR.enum_types_by_name['ConnectionStatus'] = _CONNECTIONSTATUS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) +_RESPONSE = DESCRIPTOR.message_types_by_name['Response'] +_STATUS = DESCRIPTOR.message_types_by_name['Status'] +_MODELUPDATEREQUEST = DESCRIPTOR.message_types_by_name['ModelUpdateRequest'] +_MODELUPDATE = DESCRIPTOR.message_types_by_name['ModelUpdate'] +_MODELVALIDATIONREQUEST = DESCRIPTOR.message_types_by_name['ModelValidationRequest'] +_MODELVALIDATION = DESCRIPTOR.message_types_by_name['ModelValidation'] +_MODELREQUEST = DESCRIPTOR.message_types_by_name['ModelRequest'] +_MODELRESPONSE = DESCRIPTOR.message_types_by_name['ModelResponse'] +_GETGLOBALMODELREQUEST = DESCRIPTOR.message_types_by_name['GetGlobalModelRequest'] +_GETGLOBALMODELRESPONSE = DESCRIPTOR.message_types_by_name['GetGlobalModelResponse'] +_HEARTBEAT = DESCRIPTOR.message_types_by_name['Heartbeat'] +_CLIENTAVAILABLEMESSAGE = DESCRIPTOR.message_types_by_name['ClientAvailableMessage'] +_LISTCLIENTSREQUEST = DESCRIPTOR.message_types_by_name['ListClientsRequest'] +_CLIENTLIST = DESCRIPTOR.message_types_by_name['ClientList'] +_CLIENT = DESCRIPTOR.message_types_by_name['Client'] +_REASSIGNREQUEST = DESCRIPTOR.message_types_by_name['ReassignRequest'] +_RECONNECTREQUEST = DESCRIPTOR.message_types_by_name['ReconnectRequest'] +_PARAMETER = DESCRIPTOR.message_types_by_name['Parameter'] +_CONTROLREQUEST = DESCRIPTOR.message_types_by_name['ControlRequest'] +_CONTROLRESPONSE = DESCRIPTOR.message_types_by_name['ControlResponse'] +_REPORTRESPONSE = DESCRIPTOR.message_types_by_name['ReportResponse'] +_CONNECTIONREQUEST = DESCRIPTOR.message_types_by_name['ConnectionRequest'] +_CONNECTIONRESPONSE = DESCRIPTOR.message_types_by_name['ConnectionResponse'] +_STATUS_LOGLEVEL = _STATUS.enum_types_by_name['LogLevel'] Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), { - 'DESCRIPTOR': _RESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Response) -}) + 'DESCRIPTOR' : _RESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Response) + }) _sym_db.RegisterMessage(Response) Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), { - 'DESCRIPTOR': _STATUS, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Status) -}) + 'DESCRIPTOR' : _STATUS, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Status) + }) _sym_db.RegisterMessage(Status) ModelUpdateRequest = _reflection.GeneratedProtocolMessageType('ModelUpdateRequest', (_message.Message,), { - 'DESCRIPTOR': _MODELUPDATEREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelUpdateRequest) -}) + 'DESCRIPTOR' : _MODELUPDATEREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelUpdateRequest) + }) _sym_db.RegisterMessage(ModelUpdateRequest) ModelUpdate = _reflection.GeneratedProtocolMessageType('ModelUpdate', (_message.Message,), { - 'DESCRIPTOR': _MODELUPDATE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelUpdate) -}) + 'DESCRIPTOR' : _MODELUPDATE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelUpdate) + }) _sym_db.RegisterMessage(ModelUpdate) ModelValidationRequest = _reflection.GeneratedProtocolMessageType('ModelValidationRequest', (_message.Message,), { - 'DESCRIPTOR': _MODELVALIDATIONREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelValidationRequest) -}) + 'DESCRIPTOR' : _MODELVALIDATIONREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelValidationRequest) + }) _sym_db.RegisterMessage(ModelValidationRequest) ModelValidation = _reflection.GeneratedProtocolMessageType('ModelValidation', (_message.Message,), { - 'DESCRIPTOR': _MODELVALIDATION, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelValidation) -}) + 'DESCRIPTOR' : _MODELVALIDATION, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelValidation) + }) _sym_db.RegisterMessage(ModelValidation) ModelRequest = _reflection.GeneratedProtocolMessageType('ModelRequest', (_message.Message,), { - 'DESCRIPTOR': _MODELREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelRequest) -}) + 'DESCRIPTOR' : _MODELREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelRequest) + }) _sym_db.RegisterMessage(ModelRequest) ModelResponse = _reflection.GeneratedProtocolMessageType('ModelResponse', (_message.Message,), { - 'DESCRIPTOR': _MODELRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelResponse) -}) + 'DESCRIPTOR' : _MODELRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelResponse) + }) _sym_db.RegisterMessage(ModelResponse) GetGlobalModelRequest = _reflection.GeneratedProtocolMessageType('GetGlobalModelRequest', (_message.Message,), { - 'DESCRIPTOR': _GETGLOBALMODELREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelRequest) -}) + 'DESCRIPTOR' : _GETGLOBALMODELREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelRequest) + }) _sym_db.RegisterMessage(GetGlobalModelRequest) GetGlobalModelResponse = _reflection.GeneratedProtocolMessageType('GetGlobalModelResponse', (_message.Message,), { - 'DESCRIPTOR': _GETGLOBALMODELRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelResponse) -}) + 'DESCRIPTOR' : _GETGLOBALMODELRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelResponse) + }) _sym_db.RegisterMessage(GetGlobalModelResponse) Heartbeat = _reflection.GeneratedProtocolMessageType('Heartbeat', (_message.Message,), { - 'DESCRIPTOR': _HEARTBEAT, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Heartbeat) -}) + 'DESCRIPTOR' : _HEARTBEAT, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Heartbeat) + }) _sym_db.RegisterMessage(Heartbeat) ClientAvailableMessage = _reflection.GeneratedProtocolMessageType('ClientAvailableMessage', (_message.Message,), { - 'DESCRIPTOR': _CLIENTAVAILABLEMESSAGE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ClientAvailableMessage) -}) + 'DESCRIPTOR' : _CLIENTAVAILABLEMESSAGE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ClientAvailableMessage) + }) _sym_db.RegisterMessage(ClientAvailableMessage) ListClientsRequest = _reflection.GeneratedProtocolMessageType('ListClientsRequest', (_message.Message,), { - 'DESCRIPTOR': _LISTCLIENTSREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ListClientsRequest) -}) + 'DESCRIPTOR' : _LISTCLIENTSREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ListClientsRequest) + }) _sym_db.RegisterMessage(ListClientsRequest) ClientList = _reflection.GeneratedProtocolMessageType('ClientList', (_message.Message,), { - 'DESCRIPTOR': _CLIENTLIST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ClientList) -}) + 'DESCRIPTOR' : _CLIENTLIST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ClientList) + }) _sym_db.RegisterMessage(ClientList) Client = _reflection.GeneratedProtocolMessageType('Client', (_message.Message,), { - 'DESCRIPTOR': _CLIENT, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Client) -}) + 'DESCRIPTOR' : _CLIENT, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Client) + }) _sym_db.RegisterMessage(Client) ReassignRequest = _reflection.GeneratedProtocolMessageType('ReassignRequest', (_message.Message,), { - 'DESCRIPTOR': _REASSIGNREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ReassignRequest) -}) + 'DESCRIPTOR' : _REASSIGNREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ReassignRequest) + }) _sym_db.RegisterMessage(ReassignRequest) ReconnectRequest = _reflection.GeneratedProtocolMessageType('ReconnectRequest', (_message.Message,), { - 'DESCRIPTOR': _RECONNECTREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ReconnectRequest) -}) + 'DESCRIPTOR' : _RECONNECTREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ReconnectRequest) + }) _sym_db.RegisterMessage(ReconnectRequest) Parameter = _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), { - 'DESCRIPTOR': _PARAMETER, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Parameter) -}) + 'DESCRIPTOR' : _PARAMETER, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Parameter) + }) _sym_db.RegisterMessage(Parameter) ControlRequest = _reflection.GeneratedProtocolMessageType('ControlRequest', (_message.Message,), { - 'DESCRIPTOR': _CONTROLREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ControlRequest) -}) + 'DESCRIPTOR' : _CONTROLREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ControlRequest) + }) _sym_db.RegisterMessage(ControlRequest) ControlResponse = _reflection.GeneratedProtocolMessageType('ControlResponse', (_message.Message,), { - 'DESCRIPTOR': _CONTROLRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ControlResponse) -}) + 'DESCRIPTOR' : _CONTROLRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ControlResponse) + }) _sym_db.RegisterMessage(ControlResponse) ReportResponse = _reflection.GeneratedProtocolMessageType('ReportResponse', (_message.Message,), { - 'DESCRIPTOR': _REPORTRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ReportResponse) -}) + 'DESCRIPTOR' : _REPORTRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ReportResponse) + }) _sym_db.RegisterMessage(ReportResponse) ConnectionRequest = _reflection.GeneratedProtocolMessageType('ConnectionRequest', (_message.Message,), { - 'DESCRIPTOR': _CONNECTIONREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ConnectionRequest) -}) + 'DESCRIPTOR' : _CONNECTIONREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ConnectionRequest) + }) _sym_db.RegisterMessage(ConnectionRequest) ConnectionResponse = _reflection.GeneratedProtocolMessageType('ConnectionResponse', (_message.Message,), { - 'DESCRIPTOR': _CONNECTIONRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ConnectionResponse) -}) + 'DESCRIPTOR' : _CONNECTIONRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ConnectionResponse) + }) _sym_db.RegisterMessage(ConnectionResponse) -_MODELSERVICE = _descriptor.ServiceDescriptor( - name='ModelService', - full_name='grpc.ModelService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=2898, - serialized_end=3020, - methods=[ - _descriptor.MethodDescriptor( - name='Upload', - full_name='grpc.ModelService.Upload', - index=0, - containing_service=None, - input_type=_MODELREQUEST, - output_type=_MODELRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Download', - full_name='grpc.ModelService.Download', - index=1, - containing_service=None, - input_type=_MODELREQUEST, - output_type=_MODELRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_MODELSERVICE) - -DESCRIPTOR.services_by_name['ModelService'] = _MODELSERVICE - -_CONTROL = _descriptor.ServiceDescriptor( - name='Control', - full_name='grpc.Control', - file=DESCRIPTOR, - index=1, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3023, - serialized_end=3250, - methods=[ - _descriptor.MethodDescriptor( - name='Start', - full_name='grpc.Control.Start', - index=0, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_CONTROLRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Stop', - full_name='grpc.Control.Stop', - index=1, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_CONTROLRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Configure', - full_name='grpc.Control.Configure', - index=2, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_REPORTRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Report', - full_name='grpc.Control.Report', - index=3, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_REPORTRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_CONTROL) - -DESCRIPTOR.services_by_name['Control'] = _CONTROL - -_REDUCER = _descriptor.ServiceDescriptor( - name='Reducer', - full_name='grpc.Reducer', - file=DESCRIPTOR, - index=2, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3252, - serialized_end=3338, - methods=[ - _descriptor.MethodDescriptor( - name='GetGlobalModel', - full_name='grpc.Reducer.GetGlobalModel', - index=0, - containing_service=None, - input_type=_GETGLOBALMODELREQUEST, - output_type=_GETGLOBALMODELRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_REDUCER) - -DESCRIPTOR.services_by_name['Reducer'] = _REDUCER - -_CONNECTOR = _descriptor.ServiceDescriptor( - name='Connector', - full_name='grpc.Connector', - file=DESCRIPTOR, - index=3, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3341, - serialized_end=3768, - methods=[ - _descriptor.MethodDescriptor( - name='AllianceStatusStream', - full_name='grpc.Connector.AllianceStatusStream', - index=0, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_STATUS, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendStatus', - full_name='grpc.Connector.SendStatus', - index=1, - containing_service=None, - input_type=_STATUS, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ListActiveClients', - full_name='grpc.Connector.ListActiveClients', - index=2, - containing_service=None, - input_type=_LISTCLIENTSREQUEST, - output_type=_CLIENTLIST, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='AcceptingClients', - full_name='grpc.Connector.AcceptingClients', - index=3, - containing_service=None, - input_type=_CONNECTIONREQUEST, - output_type=_CONNECTIONRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendHeartbeat', - full_name='grpc.Connector.SendHeartbeat', - index=4, - containing_service=None, - input_type=_HEARTBEAT, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ReassignClient', - full_name='grpc.Connector.ReassignClient', - index=5, - containing_service=None, - input_type=_REASSIGNREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ReconnectClient', - full_name='grpc.Connector.ReconnectClient', - index=6, - containing_service=None, - input_type=_RECONNECTREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_CONNECTOR) - -DESCRIPTOR.services_by_name['Connector'] = _CONNECTOR - -_COMBINER = _descriptor.ServiceDescriptor( - name='Combiner', - full_name='grpc.Combiner', - file=DESCRIPTOR, - index=4, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3771, - serialized_end=4373, - methods=[ - _descriptor.MethodDescriptor( - name='ModelUpdateRequestStream', - full_name='grpc.Combiner.ModelUpdateRequestStream', - index=0, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELUPDATEREQUEST, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ModelUpdateStream', - full_name='grpc.Combiner.ModelUpdateStream', - index=1, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELUPDATE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ModelValidationRequestStream', - full_name='grpc.Combiner.ModelValidationRequestStream', - index=2, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELVALIDATIONREQUEST, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ModelValidationStream', - full_name='grpc.Combiner.ModelValidationStream', - index=3, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELVALIDATION, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelUpdateRequest', - full_name='grpc.Combiner.SendModelUpdateRequest', - index=4, - containing_service=None, - input_type=_MODELUPDATEREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelUpdate', - full_name='grpc.Combiner.SendModelUpdate', - index=5, - containing_service=None, - input_type=_MODELUPDATE, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelValidationRequest', - full_name='grpc.Combiner.SendModelValidationRequest', - index=6, - containing_service=None, - input_type=_MODELVALIDATIONREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelValidation', - full_name='grpc.Combiner.SendModelValidation', - index=7, - containing_service=None, - input_type=_MODELVALIDATION, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_COMBINER) - -DESCRIPTOR.services_by_name['Combiner'] = _COMBINER - +_MODELSERVICE = DESCRIPTOR.services_by_name['ModelService'] +_CONTROL = DESCRIPTOR.services_by_name['Control'] +_REDUCER = DESCRIPTOR.services_by_name['Reducer'] +_CONNECTOR = DESCRIPTOR.services_by_name['Connector'] +_COMBINER = DESCRIPTOR.services_by_name['Combiner'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _STATUSTYPE._serialized_start=2412 + _STATUSTYPE._serialized_end=2544 + _CHANNEL._serialized_start=2547 + _CHANNEL._serialized_end=2681 + _MODELSTATUS._serialized_start=2683 + _MODELSTATUS._serialized_end=2753 + _ROLE._serialized_start=2755 + _ROLE._serialized_end=2811 + _COMMAND._serialized_start=2813 + _COMMAND._serialized_end=2887 + _CONNECTIONSTATUS._serialized_start=2889 + _CONNECTIONSTATUS._serialized_end=2962 + _RESPONSE._serialized_start=41 + _RESPONSE._serialized_end=99 + _STATUS._serialized_start=102 + _STATUS._serialized_end=370 + _STATUS_LOGLEVEL._serialized_start=304 + _STATUS_LOGLEVEL._serialized_end=370 + _MODELUPDATEREQUEST._serialized_start=373 + _MODELUPDATEREQUEST._serialized_end=544 + _MODELUPDATE._serialized_start=547 + _MODELUPDATE._serialized_end=722 + _MODELVALIDATIONREQUEST._serialized_start=725 + _MODELVALIDATIONREQUEST._serialized_end=922 + _MODELVALIDATION._serialized_start=925 + _MODELVALIDATION._serialized_end=1093 + _MODELREQUEST._serialized_start=1096 + _MODELREQUEST._serialized_end=1233 + _MODELRESPONSE._serialized_start=1235 + _MODELRESPONSE._serialized_end=1328 + _GETGLOBALMODELREQUEST._serialized_start=1330 + _GETGLOBALMODELREQUEST._serialized_end=1415 + _GETGLOBALMODELRESPONSE._serialized_start=1417 + _GETGLOBALMODELRESPONSE._serialized_end=1521 + _HEARTBEAT._serialized_start=1523 + _HEARTBEAT._serialized_end=1564 + _CLIENTAVAILABLEMESSAGE._serialized_start=1566 + _CLIENTAVAILABLEMESSAGE._serialized_end=1653 + _LISTCLIENTSREQUEST._serialized_start=1655 + _LISTCLIENTSREQUEST._serialized_end=1737 + _CLIENTLIST._serialized_start=1739 + _CLIENTLIST._serialized_end=1781 + _CLIENT._serialized_start=1783 + _CLIENT._serialized_end=1831 + _REASSIGNREQUEST._serialized_start=1833 + _REASSIGNREQUEST._serialized_end=1942 + _RECONNECTREQUEST._serialized_start=1944 + _RECONNECTREQUEST._serialized_end=2043 + _PARAMETER._serialized_start=2045 + _PARAMETER._serialized_end=2084 + _CONTROLREQUEST._serialized_start=2086 + _CONTROLREQUEST._serialized_end=2170 + _CONTROLRESPONSE._serialized_start=2172 + _CONTROLRESPONSE._serialized_end=2242 + _REPORTRESPONSE._serialized_start=2244 + _REPORTRESPONSE._serialized_end=2326 + _CONNECTIONREQUEST._serialized_start=2328 + _CONNECTIONREQUEST._serialized_end=2347 + _CONNECTIONRESPONSE._serialized_start=2349 + _CONNECTIONRESPONSE._serialized_end=2409 + _MODELSERVICE._serialized_start=2964 + _MODELSERVICE._serialized_end=3086 + _CONTROL._serialized_start=3089 + _CONTROL._serialized_end=3386 + _REDUCER._serialized_start=3388 + _REDUCER._serialized_end=3474 + _CONNECTOR._serialized_start=3477 + _CONNECTOR._serialized_end=3904 + _COMBINER._serialized_start=3907 + _COMBINER._serialized_end=4509 # @@protoc_insertion_point(module_scope) diff --git a/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py b/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py index 9989824f7..9590e2b5c 100644 --- a/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py +++ b/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py @@ -2,8 +2,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -from fedn.common.net.grpc import \ - fedn_pb2 as fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2 +from fedn.common.net.grpc import fedn_pb2 as fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2 class ModelServiceStub(object): @@ -16,15 +15,15 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Upload = channel.stream_unary( - '/grpc.ModelService/Upload', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - ) + '/grpc.ModelService/Upload', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + ) self.Download = channel.unary_stream( - '/grpc.ModelService/Download', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - ) + '/grpc.ModelService/Download', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + ) class ModelServiceServicer(object): @@ -44,95 +43,60 @@ def Download(self, request, context): def add_ModelServiceServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'Upload': grpc.stream_unary_rpc_method_handler( - servicer.Upload, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, - ), - 'Download': grpc.unary_stream_rpc_method_handler( - servicer.Download, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, - ), + 'Upload': grpc.stream_unary_rpc_method_handler( + servicer.Upload, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, + ), + 'Download': grpc.unary_stream_rpc_method_handler( + servicer.Download, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.ModelService', rpc_method_handlers) + 'grpc.ModelService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class ModelService(object): """Missing associated documentation comment in .proto file.""" @staticmethod def Upload(request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request_iterator: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.stream_unary(request_iterator, target, '/grpc.ModelService/Upload', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Download(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.ModelService/Download', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class ControlStub(object): @@ -145,25 +109,30 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Start = channel.unary_unary( - '/grpc.Control/Start', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - ) + '/grpc.Control/Start', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + ) self.Stop = channel.unary_unary( - '/grpc.Control/Stop', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - ) + '/grpc.Control/Stop', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + ) self.Configure = channel.unary_unary( - '/grpc.Control/Configure', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - ) + '/grpc.Control/Configure', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + ) + self.FlushAggregationQueue = channel.unary_unary( + '/grpc.Control/FlushAggregationQueue', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + ) self.Report = channel.unary_unary( - '/grpc.Control/Report', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - ) + '/grpc.Control/Report', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + ) class ControlServicer(object): @@ -187,6 +156,12 @@ def Configure(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def FlushAggregationQueue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Report(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -195,165 +170,126 @@ def Report(self, request, context): def add_ControlServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'Start': grpc.unary_unary_rpc_method_handler( - servicer.Start, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, - ), - 'Stop': grpc.unary_unary_rpc_method_handler( - servicer.Stop, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, - ), - 'Configure': grpc.unary_unary_rpc_method_handler( - servicer.Configure, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, - ), - 'Report': grpc.unary_unary_rpc_method_handler( - servicer.Report, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, - ), + 'Start': grpc.unary_unary_rpc_method_handler( + servicer.Start, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, + ), + 'Stop': grpc.unary_unary_rpc_method_handler( + servicer.Stop, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, + ), + 'Configure': grpc.unary_unary_rpc_method_handler( + servicer.Configure, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, + ), + 'FlushAggregationQueue': grpc.unary_unary_rpc_method_handler( + servicer.FlushAggregationQueue, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, + ), + 'Report': grpc.unary_unary_rpc_method_handler( + servicer.Report, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Control', rpc_method_handlers) + 'grpc.Control', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Control(object): """Missing associated documentation comment in .proto file.""" @staticmethod def Start(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Start', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Stop(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Stop', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Configure(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Configure', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def Report(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ + def FlushAggregationQueue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/grpc.Control/FlushAggregationQueue', + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + @staticmethod + def Report(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Report', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class ReducerStub(object): @@ -366,10 +302,10 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.GetGlobalModel = channel.unary_unary( - '/grpc.Reducer/GetGlobalModel', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, - ) + '/grpc.Reducer/GetGlobalModel', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, + ) class ReducerServicer(object): @@ -383,57 +319,38 @@ def GetGlobalModel(self, request, context): def add_ReducerServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'GetGlobalModel': grpc.unary_unary_rpc_method_handler( - servicer.GetGlobalModel, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.SerializeToString, - ), + 'GetGlobalModel': grpc.unary_unary_rpc_method_handler( + servicer.GetGlobalModel, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Reducer', rpc_method_handlers) + 'grpc.Reducer', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Reducer(object): """Missing associated documentation comment in .proto file.""" @staticmethod def GetGlobalModel(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Reducer/GetGlobalModel', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class ConnectorStub(object): @@ -446,40 +363,40 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.AllianceStatusStream = channel.unary_stream( - '/grpc.Connector/AllianceStatusStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, - ) + '/grpc.Connector/AllianceStatusStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, + ) self.SendStatus = channel.unary_unary( - '/grpc.Connector/SendStatus', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/SendStatus', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.ListActiveClients = channel.unary_unary( - '/grpc.Connector/ListActiveClients', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, - ) + '/grpc.Connector/ListActiveClients', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, + ) self.AcceptingClients = channel.unary_unary( - '/grpc.Connector/AcceptingClients', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, - ) + '/grpc.Connector/AcceptingClients', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, + ) self.SendHeartbeat = channel.unary_unary( - '/grpc.Connector/SendHeartbeat', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/SendHeartbeat', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.ReassignClient = channel.unary_unary( - '/grpc.Connector/ReassignClient', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/ReassignClient', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.ReconnectClient = channel.unary_unary( - '/grpc.Connector/ReconnectClient', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/ReconnectClient', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) class ConnectorServicer(object): @@ -534,274 +451,170 @@ def ReconnectClient(self, request, context): def add_ConnectorServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'AllianceStatusStream': grpc.unary_stream_rpc_method_handler( - servicer.AllianceStatusStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, - ), - 'SendStatus': grpc.unary_unary_rpc_method_handler( - servicer.SendStatus, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'ListActiveClients': grpc.unary_unary_rpc_method_handler( - servicer.ListActiveClients, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.SerializeToString, - ), - 'AcceptingClients': grpc.unary_unary_rpc_method_handler( - servicer.AcceptingClients, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.SerializeToString, - ), - 'SendHeartbeat': grpc.unary_unary_rpc_method_handler( - servicer.SendHeartbeat, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'ReassignClient': grpc.unary_unary_rpc_method_handler( - servicer.ReassignClient, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'ReconnectClient': grpc.unary_unary_rpc_method_handler( - servicer.ReconnectClient, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), + 'AllianceStatusStream': grpc.unary_stream_rpc_method_handler( + servicer.AllianceStatusStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, + ), + 'SendStatus': grpc.unary_unary_rpc_method_handler( + servicer.SendStatus, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'ListActiveClients': grpc.unary_unary_rpc_method_handler( + servicer.ListActiveClients, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.SerializeToString, + ), + 'AcceptingClients': grpc.unary_unary_rpc_method_handler( + servicer.AcceptingClients, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.SerializeToString, + ), + 'SendHeartbeat': grpc.unary_unary_rpc_method_handler( + servicer.SendHeartbeat, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'ReassignClient': grpc.unary_unary_rpc_method_handler( + servicer.ReassignClient, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'ReconnectClient': grpc.unary_unary_rpc_method_handler( + servicer.ReconnectClient, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Connector', rpc_method_handlers) + 'grpc.Connector', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Connector(object): """Missing associated documentation comment in .proto file.""" @staticmethod def AllianceStatusStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Connector/AllianceStatusStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendStatus(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/SendStatus', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ListActiveClients(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/ListActiveClients', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def AcceptingClients(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/AcceptingClients', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendHeartbeat(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/SendHeartbeat', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ReassignClient(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/ReassignClient', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ReconnectClient(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/ReconnectClient', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class CombinerStub(object): @@ -814,45 +627,45 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.ModelUpdateRequestStream = channel.unary_stream( - '/grpc.Combiner/ModelUpdateRequestStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, - ) + '/grpc.Combiner/ModelUpdateRequestStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, + ) self.ModelUpdateStream = channel.unary_stream( - '/grpc.Combiner/ModelUpdateStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, - ) + '/grpc.Combiner/ModelUpdateStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, + ) self.ModelValidationRequestStream = channel.unary_stream( - '/grpc.Combiner/ModelValidationRequestStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, - ) + '/grpc.Combiner/ModelValidationRequestStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, + ) self.ModelValidationStream = channel.unary_stream( - '/grpc.Combiner/ModelValidationStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, - ) + '/grpc.Combiner/ModelValidationStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, + ) self.SendModelUpdateRequest = channel.unary_unary( - '/grpc.Combiner/SendModelUpdateRequest', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelUpdateRequest', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.SendModelUpdate = channel.unary_unary( - '/grpc.Combiner/SendModelUpdate', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelUpdate', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.SendModelValidationRequest = channel.unary_unary( - '/grpc.Combiner/SendModelValidationRequest', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelValidationRequest', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.SendModelValidation = channel.unary_unary( - '/grpc.Combiner/SendModelValidation', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelValidation', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) class CombinerServicer(object): @@ -909,310 +722,189 @@ def SendModelValidation(self, request, context): def add_CombinerServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'ModelUpdateRequestStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelUpdateRequestStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, - ), - 'ModelUpdateStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelUpdateStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, - ), - 'ModelValidationRequestStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelValidationRequestStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, - ), - 'ModelValidationStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelValidationStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, - ), - 'SendModelUpdateRequest': grpc.unary_unary_rpc_method_handler( - servicer.SendModelUpdateRequest, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'SendModelUpdate': grpc.unary_unary_rpc_method_handler( - servicer.SendModelUpdate, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'SendModelValidationRequest': grpc.unary_unary_rpc_method_handler( - servicer.SendModelValidationRequest, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'SendModelValidation': grpc.unary_unary_rpc_method_handler( - servicer.SendModelValidation, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), + 'ModelUpdateRequestStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelUpdateRequestStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, + ), + 'ModelUpdateStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelUpdateStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, + ), + 'ModelValidationRequestStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelValidationRequestStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, + ), + 'ModelValidationStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelValidationStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, + ), + 'SendModelUpdateRequest': grpc.unary_unary_rpc_method_handler( + servicer.SendModelUpdateRequest, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'SendModelUpdate': grpc.unary_unary_rpc_method_handler( + servicer.SendModelUpdate, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'SendModelValidationRequest': grpc.unary_unary_rpc_method_handler( + servicer.SendModelValidationRequest, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'SendModelValidation': grpc.unary_unary_rpc_method_handler( + servicer.SendModelValidation, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Combiner', rpc_method_handlers) + 'grpc.Combiner', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Combiner(object): """Missing associated documentation comment in .proto file.""" @staticmethod def ModelUpdateRequestStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelUpdateRequestStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ModelUpdateStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelUpdateStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ModelValidationRequestStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelValidationRequestStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ModelValidationStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelValidationStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelUpdateRequest(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelUpdateRequest', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelUpdate(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelUpdate', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelValidationRequest(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelValidationRequest', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelValidation(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelValidation', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/fedn/fedn/common/net/grpc/server.py b/fedn/fedn/common/net/grpc/server.py index d85a4bf04..dbe260b7b 100644 --- a/fedn/fedn/common/net/grpc/server.py +++ b/fedn/fedn/common/net/grpc/server.py @@ -27,7 +27,7 @@ def __init__(self, servicer, modelservicer, config): rpc.add_ControlServicer_to_server(servicer, self.server) if config['secure']: - print(f"Creating secure gRPCS server using certificate: {config['certificate']}", flush=True) + print("Creating secure gRPCS server using certificate: {config['certificate']}", flush=True) server_credentials = grpc.ssl_server_credentials( ((config['key'], config['certificate'],),)) self.server.add_secure_port( diff --git a/fedn/fedn/common/storage/models/modelstorage.py b/fedn/fedn/common/storage/models/modelstorage.py index 423ac411b..c15f47c87 100644 --- a/fedn/fedn/common/storage/models/modelstorage.py +++ b/fedn/fedn/common/storage/models/modelstorage.py @@ -5,37 +5,66 @@ class ModelStorage(ABC): @abstractmethod def exist(self, model_id): - """ + """ Check if model exists in storage - :param model_id: + :param model_id: The model id + :type model_id: str + :return: True if model exists, False otherwise + :rtype: bool """ pass @abstractmethod def get(self, model_id): - """ + """ Get model from storage - :param model_id: + :param model_id: The model id + :type model_id: str + :return: The model + :rtype: object """ pass - # @abstractmethod - # def set(self, model_id, model): - # pass - @abstractmethod def get_meta(self, model_id): - """ + """ Get model metadata from storage - :param model_id: + :param model_id: The model id + :type model_id: str + :return: The model metadata + :rtype: dict """ pass @abstractmethod def set_meta(self, model_id, model_metadata): + """ Set model metadata in storage + + :param model_id: The model id + :type model_id: str + :param model_metadata: The model metadata + :type model_metadata: dict + :return: True if successful, False otherwise + :rtype: bool """ + pass + + @abstractmethod + def delete(self, model_id): + """ Delete model from storage + + :param model_id: The model id + :type model_id: str + :return: True if successful, False otherwise + :rtype: bool + """ + pass + + @abstractmethod + def delete_all(self): + """ Delete all models from storage - :param model_id: - :param model_metadata: + :return: True if successful, False otherwise + :rtype: bool """ pass diff --git a/fedn/fedn/common/storage/models/tempmodelstorage.py b/fedn/fedn/common/storage/models/tempmodelstorage.py index f87c516b0..ea8a65928 100644 --- a/fedn/fedn/common/storage/models/tempmodelstorage.py +++ b/fedn/fedn/common/storage/models/tempmodelstorage.py @@ -85,3 +85,45 @@ def set_meta(self, model_id, model_metadata): :param model_metadata: """ self.models_metadata.update({model_id: model_metadata}) + + # Delete model from disk + def delete(self, model_id): + """ Delete model from temp disk/storage + + :param model_id: model id + :type model_id: str + :return: True if successful, False otherwise + :rtype: bool + """ + try: + os.remove(os.path.join(self.default_dir, str(model_id))) + print("TEMPMODELSTORAGE: Deleted model with id: {}".format(model_id), flush=True) + # Delete id from metadata and models dict + del self.models_metadata[model_id] + del self.models[model_id] + except FileNotFoundError: + print("Could not delete model from disk. File not found!", flush=True) + return False + return True + + # Delete all models from disk + def delete_all(self): + """ Delete all models from temp disk/storage + + :return: True if successful, False otherwise + :rtype: bool + """ + ids_pop = [] + for model_id in self.models.keys(): + try: + os.remove(os.path.join(self.default_dir, str(model_id))) + print("TEMPMODELSTORAGE: Deleted model with id: {}".format(model_id), flush=True) + # Add id to list of ids to pop/delete from metadata and models dict + ids_pop.append(model_id) + except FileNotFoundError: + print("TEMPMODELSTORAGE: Could not delete model {} from disk. File not found!".format(model_id), flush=True) + # Remove id from metadata and models dict + for model_id in ids_pop: + del self.models_metadata[model_id] + del self.models[model_id] + return True diff --git a/fedn/fedn/common/storage/models/tests/test_tempmodelstorage.py b/fedn/fedn/common/storage/models/tests/test_tempmodelstorage.py new file mode 100644 index 000000000..19d88899e --- /dev/null +++ b/fedn/fedn/common/storage/models/tests/test_tempmodelstorage.py @@ -0,0 +1,104 @@ +import os +import unittest +from unittest.mock import MagicMock, patch + +from fedn.common.storage.models.tempmodelstorage import TempModelStorage + + +class TestTempModelStorage(unittest.TestCase): + + def setUp(self): + # Setup mock for os.environ.get for FEDN_MODEL_DIR + self.patcher = patch('os.environ.get') + self.mock_get = self.patcher.start() + # Return value of mock should same folder as this file + self.mock_get.return_value = os.path.dirname(os.path.realpath(__file__)) + + # Setup storage + self.storage = TempModelStorage() + + # add mock data to storage dicts + self.storage.models = {"model_id1": "model1", "model_id2": "model2"} + self.storage.models_metadata = {"model_id1": "model1", "model_id2": "model2"} + + # Create mock file as BytesIO object + self.mock_file = MagicMock() + self.mock_file.read.return_value = "model1" + self.mock_file.seek.return_value = 0 + self.mock_file.write.return_value = None + + # Test that the storage is initialized with the correct default directory and data structures + def test_init(self): + self.assertEqual(self.storage.default_dir, os.path.dirname(os.path.realpath(__file__))) + self.assertEqual(self.storage.models, {"model_id1": "model1", "model_id2": "model2"}) + self.assertEqual(self.storage.models_metadata, {"model_id1": "model1", "model_id2": "model2"}) + + # Test that the storage can get a model + + def test_get(self): + """ Test that the storage can get a model """ + + # Test that it returns None if model_id does not exist + self.assertEqual(self.storage.get("model_id3"), None) + + # TODO: Patch fedn.ModelStatus.OK and open to return True and mock_file respectively + + def test_get_metadata(self): + """ Test that the storage can get a model metadata """ + + # Test that it returns KeyError if model_id does not exist + with self.assertRaises(KeyError): + self.storage.get_meta("model_id3") + + # Test that it returns the correct metadata if model_id exists + self.assertEqual(self.storage.get_meta("model_id1"), "model1") + + def test_set_meta(self): + """ Test that the storage can set a model metadata """ + + # Test that it returns the correct metadata if model_id exists + self.storage.set_meta("model_id1", "model3") + self.assertEqual(self.storage.get_meta("model_id1"), "model3") + + def test_delete(self): + """ Test that the storage can delete a model """ + + # Test that it returns False if model_id does not exist + self.assertEqual(self.storage.delete("model_id3"), False) + + # Patch os.remove to return True + with patch('os.remove', return_value=True) as mock_remove: + + # Test that it returns True if model_id exists + self.assertEqual(self.storage.delete("model_id1"), True) + + # Test that os.remove is called with the correct path + mock_remove.assert_called_with(os.path.join(self.storage.default_dir, "model_id1")) + + # Test that the model is removed from the storage + self.assertEqual(self.storage.models, {"model_id2": "model2"}) + + # Test that the model metadata is removed from the storage + self.assertEqual(self.storage.models_metadata, {"model_id2": "model2"}) + + def test_delete_all(self): + """ Test that the storage can delete all models """ + + # Patch os.remove to return True + with patch('os.remove', return_value=True) as mock_remove: + + # Test that it returns True if model_id exists + self.assertEqual(self.storage.delete_all(), True) + + # Test that os.remove is called with the correct path + mock_remove.assert_called_with(os.path.join(self.storage.default_dir, "model_id2")) + + # Test that the model is removed from the storage + self.assertEqual(self.storage.models, {}) + + # Test that the model metadata is removed from the storage + self.assertEqual(self.storage.models_metadata, {}) + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/common/storage/s3/miniorepo.py b/fedn/fedn/common/storage/s3/miniorepo.py index 9341704e6..154cea7e9 100644 --- a/fedn/fedn/common/storage/s3/miniorepo.py +++ b/fedn/fedn/common/storage/s3/miniorepo.py @@ -62,11 +62,13 @@ def __init__(self, config): self.create_bucket(self.bucket) def create_bucket(self, bucket_name): - """ + """ Create a new bucket. If bucket exists, do nothing. - :param bucket_name: + :param bucket_name: The name of the bucket + :type bucket_name: str """ found = self.client.bucket_exists(bucket_name) + if not found: try: self.client.make_bucket(bucket_name) diff --git a/fedn/fedn/common/storage/s3/s3repo.py b/fedn/fedn/common/storage/s3/s3repo.py index e660e9124..1d673c3da 100644 --- a/fedn/fedn/common/storage/s3/s3repo.py +++ b/fedn/fedn/common/storage/s3/s3repo.py @@ -48,7 +48,7 @@ def set_model(self, model, is_file=True): raise return str(model_id) - def set_compute_context(self, name, compute_package, is_file=True): + def set_compute_package(self, name, compute_package, is_file=True): """ :param name: @@ -75,7 +75,7 @@ def get_compute_package(self, compute_package): raise return data - def delete_compute_context(self, compute_package): + def delete_compute_package(self, compute_package): """ :param compute_package: diff --git a/fedn/fedn/common/tracer/mongotracer.py b/fedn/fedn/common/tracer/mongotracer.py index 5cf6a93a8..aa5c0810b 100644 --- a/fedn/fedn/common/tracer/mongotracer.py +++ b/fedn/fedn/common/tracer/mongotracer.py @@ -1,7 +1,6 @@ -import threading +import uuid from datetime import datetime -import psutil from google.protobuf.json_format import MessageToDict from fedn.common.storage.db.mongo import connect_to_mongodb @@ -9,7 +8,7 @@ class MongoTracer(Tracer): - """ + """ Utitily for reporting and tracking state in the statestore. """ @@ -17,171 +16,110 @@ def __init__(self, mongo_config, network_id): try: self.mdb = connect_to_mongodb(mongo_config, network_id) self.status = self.mdb['control.status'] - self.round_time = self.mdb['control.round_time'] - self.psutil_monitoring = self.mdb['control.psutil_monitoring'] - self.model_trail = self.mdb['control.model_trail'] - self.latest_model = self.mdb['control.latest_model'] - self.combiner_round_time = self.mdb['control.combiner_round_time'] - # self.combiner_queue_length = self.mdb['control.combiner_queue_length'] - self.round = self.mdb['control.round'] + self.rounds = self.mdb['control.rounds'] + self.sessions = self.mdb['control.sessions'] + self.validations = self.mdb['control.validations'] + self.clients = self.mdb['network.clients'] except Exception as e: print("FAILED TO CONNECT TO MONGO, {}".format(e), flush=True) self.status = None raise - def report(self, msg): - """ + def report_status(self, msg): + """Write status message to the database. - :param msg: + :param msg: The status message. """ data = MessageToDict(msg, including_default_value_fields=True) - print("LOG: \n {} \n".format(data), flush=True) - if self.status is not None: self.status.insert_one(data) - def drop_round_time(self): - """ - - """ - if self.round_time: - self.round_time.drop() - - def drop_ps_util_monitor(self): - """ + def report_validation(self, validation): + """Write model validation to the database. + :param validation: The model validation. """ - if self.psutil_monitoring: - self.psutil_monitoring.drop() + data = MessageToDict(validation, including_default_value_fields=True) - def drop_model_trail(self): - """ - - """ - if self.model_trail: - self.model_trail.drop() - - def drop_latest_model(self): - """ - - """ - if self.latest_model: - self.latest_model.drop() + if self.validations is not None: + self.validations.insert_one(data) def drop_status(self): - """ + """Drop the status collection. """ if self.status: self.status.drop() - def drop_combiner_round_time(self): - """ + def create_session(self, id=None): + """ Create a new session. - """ - if self.combiner_round_time: - self.combiner_round_time.drop() + :param id: The ID of the created session. + :type id: uuid, str - def drop_combiner_round(self): """ + if not id: + id = uuid.uuid4() + data = {'session_id': str(id)} + self.sessions.insert_one(data) - """ - if self.round: - self.round.drop() + def create_round(self, round_data): + """ Create a new round. - def set_latest_time(self, round, round_time): + :param round_data: Dictionary with round data. + :type round_data: dict """ + # TODO: Add check if round_id already exists + self.rounds.insert_one(round_data) - :param round: - :param round_time: - """ - self.round_time.update_one({'key': 'round_time'}, { - '$push': {'round': round}}, True) - self.round_time.update_one({'key': 'round_time'}, { - '$push': {'round_time': round_time}}, True) + def set_session_config(self, id, config): + self.sessions.update_one({'session_id': str(id)}, { + '$push': {'session_config': config}}, True) - def set_combiner_time(self, round, round_time): + def set_round_combiner_data(self, data): """ - :param round: - :param round_time: + :param data: The combiner data + :type data: dict """ - self.combiner_round_time.update_one({'key': 'combiner_round_time'}, { - '$push': {'round': round}}, True) - self.combiner_round_time.update_one({'key': 'combiner_round_time'}, { - '$push': {'round_time': round_time}}, True) - - # def set_combiner_queue_length(self,timestamp,ql): - # self.combiner_queue_length({'key': 'combiner_queue_length'}, {'$push': {'queue_length': ql}}, True) - # self.combiner_queue_length.update({'key': 'combiner_queue_length'}, {'$push': {'timestamp': timestamp}}, True) + self.rounds.update_one({'round_id': str(data['round_id'])}, { + '$push': {'combiners': data}}, True) - # Round statistics - def set_round_meta(self, round_meta): + def set_round_config(self, round_id, round_config): """ :param round_meta: """ - self.round.update_one({'key': str(round_meta['round_id'])}, { - '$push': {'combiners': round_meta}}, True) + self.rounds.update_one({'round_id': round_id}, { + '$set': {'round_config': round_config}}, True) - def set_round_meta_reducer(self, round_meta): + def set_round_status(self, round_id, round_status): """ :param round_meta: """ - self.round.update_one({'key': str(round_meta['round_id'])}, { - '$push': {'reducer': round_meta}}, True) - - def get_latest_round(self): - """ + self.rounds.update_one({'round_id': round_id}, { + '$set': {'status': round_status}}, True) - :return: + def set_round_data(self, round_id, round_data): """ - for post in self.round_time.find({'key': 'round_time'}): - last_round = post['round'][-1] - return last_round - def ps_util_monitor(self, round=None): - """ - - :param round: - """ - global running - running = True - currentProcess = psutil.Process() - # start loop - while running: - cpu_percents = currentProcess.cpu_percent(interval=1) - mem_percents = currentProcess.memory_percent() - ps_time = str(datetime.now()) - - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'cpu': cpu_percents}}, True) - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'mem': mem_percents}}, True) - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'time': ps_time}}, True) - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'round': round}}, True) - - def start_monitor(self, round=None): - """ - - :param round: + :param round_meta: """ - global t - # create thread and start it - t = threading.Thread(target=self.ps_util_monitor, args=[round]) - t.start() + self.rounds.update_one({'round_id': round_id}, { + '$set': {'round_data': round_data}}, True) - def stop_monitor(self): + def update_client_status(self, client_name, status): + """ Update client status in statestore. + :param client_name: The client name + :type client_name: str + :param status: The client status + :type status: str + :return: None """ + datetime_now = datetime.now() + filter_query = {"name": client_name} - """ - global running - global t - # use `running` to stop loop in thread so thread will end - running = False - # wait for thread's end - t.join() + update_query = {"$set": {"last_seen": datetime_now, "status": status}} + self.clients.update_one(filter_query, update_query) diff --git a/fedn/fedn/common/tracer/tracer.py b/fedn/fedn/common/tracer/tracer.py index c5381ff81..95b5fec59 100644 --- a/fedn/fedn/common/tracer/tracer.py +++ b/fedn/fedn/common/tracer/tracer.py @@ -3,7 +3,7 @@ class Tracer(ABC): @abstractmethod - def report(self, msg): + def report_status(self, msg): """ :param msg: diff --git a/fedn/fedn/network/__init__.py b/fedn/fedn/network/__init__.py new file mode 100644 index 000000000..ec5dfd71a --- /dev/null +++ b/fedn/fedn/network/__init__.py @@ -0,0 +1,3 @@ +""" The statestore package is responsible for storing various states of the federated network. Such as announced combiners and assigned clients. It also stores metadata about +models, rounds, sessions, compute packages and model validations. """ +# flake8: noqa diff --git a/fedn/fedn/network/api/__init__.py b/fedn/fedn/network/api/__init__.py new file mode 100644 index 000000000..9cb788d09 --- /dev/null +++ b/fedn/fedn/network/api/__init__.py @@ -0,0 +1,3 @@ +""" API module for the FEDn network. Includes a REST-API server to interact with the controller +and statestore.""" +# flake8: noqa diff --git a/fedn/fedn/network/api/client.py b/fedn/fedn/network/api/client.py new file mode 100644 index 000000000..58fc27304 --- /dev/null +++ b/fedn/fedn/network/api/client.py @@ -0,0 +1,275 @@ +import requests + +__all__ = ['APIClient'] + + +class APIClient: + """ An API client for interacting with the statestore and controller. + + :param host: The host of the api server. + :type host: str + :param port: The port of the api server. + :type port: int + :param secure: Whether to use https. + :type secure: bool + :param verify: Whether to verify the server certificate. + :type verify: bool + """ + + def __init__(self, host, port, secure=False, verify=False): + self.host = host + self.port = port + self.secure = secure + self.verify = verify + + def _get_url(self, endpoint): + if self.secure: + protocol = 'https' + else: + protocol = 'http' + return f'{protocol}://{self.host}:{self.port}/{endpoint}' + + def get_model_trail(self): + """ Get the model trail. + + :return: The model trail as dict including commit timestamp. + :rtype: dict + """ + response = requests.get(self._get_url('get_model_trail'), verify=self.verify) + return response.json() + + def list_clients(self): + """ Get all clients from the statestore. + + return: All clients. + rtype: dict + """ + response = requests.get(self._get_url('list_clients')) + return response.json() + + def get_active_clients(self, combiner_id): + """ Get all active clients from the statestore. + + :param combiner_id: The combiner id to get active clients for. + :type combiner_id: str + :return: All active clients. + :rtype: dict + """ + response = requests.get(self._get_url('get_active_clients'), params={'combiner': combiner_id}, verify=self.verify) + return response.json() + + def get_client_config(self, checksum=True): + """ Get the controller configuration. Optionally include the checksum. + The config is used for clients to connect to the controller and ask for combiner assignment. + + :param checksum: Whether to include the checksum of the package. + :type checksum: bool + :return: The client configuration. + :rtype: dict + """ + response = requests.get(self._get_url('get_client_config'), params={'checksum': checksum}, verify=self.verify) + return response.json() + + def list_combiners(self): + """ Get all combiners in the network. + + :return: All combiners with info. + :rtype: dict + """ + response = requests.get(self._get_url('list_combiners')) + return response.json() + + def get_combiner(self, combiner_id): + """ Get a combiner from the statestore. + + :param combiner_id: The combiner id to get. + :type combiner_id: str + :return: The combiner info. + :rtype: dict + """ + response = requests.get(self._get_url(f'get_combiner?combiner={combiner_id}'), verify=self.verify) + return response.json() + + def list_rounds(self): + """ Get all rounds from the statestore. + + :return: All rounds with config and metrics. + :rtype: dict + """ + response = requests.get(self._get_url('list_rounds')) + return response.json() + + def get_round(self, round_id): + """ Get a round from the statestore. + + :param round_id: The round id to get. + :type round_id: str + :return: The round config and metrics. + :rtype: dict + """ + response = requests.get(self._get_url(f'get_round?round_id={round_id}'), verify=self.verify) + return response.json() + + def start_session(self, session_id=None, round_timeout=180, rounds=5, round_buffer_size=-1, delete_models=True, + validate=True, helper='kerashelper', min_clients=1, requested_clients=8): + """ Start a new session. + + :param session_id: The session id to start. + :type session_id: str + :param round_timeout: The round timeout to use in seconds. + :type round_timeout: int + :param rounds: The number of rounds to perform. + :type rounds: int + :param round_buffer_size: The round buffer size to use. + :type round_buffer_size: int + :param delete_models: Whether to delete models after each round at combiner (save storage). + :type delete_models: bool + :param validate: Whether to validate the model after each round. + :type validate: bool + :param helper: The helper type to use. + :type helper: str + :param min_clients: The minimum number of clients required. + :type min_clients: int + :param requested_clients: The requested number of clients. + :type requested_clients: int + :return: A dict with success or failure message and session config. + :rtype: dict + """ + response = requests.post(self._get_url('start_session'), json={ + 'session_id': session_id, + 'round_timeout': round_timeout, + 'rounds': rounds, + 'round_buffer_size': round_buffer_size, + 'delete_models': delete_models, + 'validate': validate, + 'helper': helper, + 'min_clients': min_clients, + 'requested_clients': requested_clients + }, verify=self.verify + ) + return response.json() + + def list_sessions(self): + """ Get all sessions from the statestore. + + :return: All sessions in dict. + :rtype: dict + """ + response = requests.get(self._get_url('list_sessions'), verify=self.verify) + return response.json() + + def get_session(self, session_id): + """ Get a session from the statestore. + + :param session_id: The session id to get. + :type session_id: str + :return: The session as a json object. + :rtype: dict + """ + response = requests.get(self._get_url(f'get_session?session_id={session_id}'), self.verify) + return response.json() + + def set_package(self, path, helper): + """ Set the compute package in the statestore. + + :param path: The file path of the compute package to set. + :type path: str + :param helper: The helper type to use. + :type helper: str + :return: A dict with success or failure message. + :rtype: dict + """ + with open(path, 'rb') as file: + response = requests.post(self._get_url('set_package'), files={'file': file}, data={'helper': helper}, verify=self.verify) + return response.json() + + def get_package(self): + """ Get the compute package from the statestore. + + :return: The compute package with info. + :rtype: dict + """ + response = requests.get(self._get_url('get_package'), verify=self.verify) + return response.json() + + def download_package(self, path): + """ Download the compute package. + + :param path: The path to download the compute package to. + :type path: str + :return: Message with success or failure. + :rtype: dict + """ + response = requests.get(self._get_url('download_package'), verify=self.verify) + if response.status_code == 200: + with open(path, 'wb') as file: + file.write(response.content) + return {'success': True, 'message': 'Package downloaded successfully.'} + else: + return {'success': False, 'message': 'Failed to download package.'} + + def get_package_checksum(self): + """ Get the checksum of the compute package. + + :return: The checksum. + :rtype: dict + """ + response = requests.get(self._get_url('get_package_checksum'), verify=self.verify) + return response.json() + + def get_latest_model(self): + """ Get the latest model from the statestore. + + :return: The latest model id. + :rtype: dict + """ + response = requests.get(self._get_url('get_latest_model'), verify=self.verify) + return response.json() + + def get_initial_model(self): + """ Get the initial model from the statestore. + + :return: The initial model id. + :rtype: dict + """ + response = requests.get(self._get_url('get_initial_model'), verify=self.verify) + return response.json() + + def set_initial_model(self, path): + """ Set the initial model in the statestore and upload to model repository. + + :param path: The file path of the initial model to set. + :type path: str + :return: A dict with success or failure message. + :rtype: dict + """ + with open(path, 'rb') as file: + response = requests.post(self._get_url('set_initial_model'), files={'file': file}, verify=self.verify) + return response.json() + + def get_controller_status(self): + """ Get the status of the controller. + + :return: The status of the controller. + :rtype: dict + """ + response = requests.get(self._get_url('get_controller_status'), verify=self.verify) + return response.json() + + def get_events(self, **kwargs): + """ Get the events from the statestore. Pass kwargs to filter events. + + :return: The events in dict + :rtype: dict + """ + response = requests.get(self._get_url('get_events'), params=kwargs, verify=self.verify) + return response.json() + + def list_validations(self, **kwargs): + """ Get all validations from the statestore. Pass kwargs to filter validations. + + :return: All validations in dict. + :rtype: dict + """ + response = requests.get(self._get_url('list_validations'), params=kwargs, verify=self.verify) + return response.json() diff --git a/fedn/fedn/network/api/interface.py b/fedn/fedn/network/api/interface.py new file mode 100644 index 000000000..e56462493 --- /dev/null +++ b/fedn/fedn/network/api/interface.py @@ -0,0 +1,892 @@ +import base64 +import copy +import os +import threading +import uuid +from io import BytesIO + +from flask import jsonify, send_from_directory +from werkzeug.utils import secure_filename + +from fedn.common.config import get_controller_config, get_network_config +from fedn.network.combiner.interfaces import (CombinerInterface, + CombinerUnavailableError) +from fedn.network.dashboard.plots import Plot +from fedn.network.state import ReducerState, ReducerStateToString +from fedn.utils.checksum import sha + +__all__ = ("API",) + + +class API: + """The API class is a wrapper for the statestore. It is used to expose the statestore to the network API.""" + + def __init__(self, statestore, control): + self.statestore = statestore + self.control = control + self.name = "api" + + def _to_dict(self): + """Convert the object to a dict. + + ::return: The object as a dict. + ::rtype: dict + """ + data = {"name": self.name} + return data + + def _get_combiner_report(self, combiner_id): + """Get report response from combiner. + + :param combiner_id: The combiner id to get report response from. + :type combiner_id: str + ::return: The report response from combiner. + ::rtype: dict + """ + # Get CombinerInterface (fedn.network.combiner.inferface.CombinerInterface) for combiner_id + combiner = self.control.network.get_combiner(combiner_id) + report = combiner.report + return report + + def _allowed_file_extension( + self, filename, ALLOWED_EXTENSIONS={"gz", "bz2", "tar", "zip", "tgz"} + ): + """Check if file extension is allowed. + + :param filename: The filename to check. + :type filename: str + :return: True if file extension is allowed, else False. + :rtype: bool + """ + + return ( + "." in filename + and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS + ) + + def get_clients(self, limit=None, skip=None, status=False): + """Get all clients from the statestore. + + :return: All clients as a json response. + :rtype: :class:`flask.Response` + """ + # Will return list of ObjectId + response = self.statestore.list_clients(limit, skip, status) + + arr = [] + + for element in response["result"]: + obj = { + "id": element["name"], + "combiner": element["combiner"], + "combiner_preferred": element["combiner_preferred"], + "ip": element["ip"], + "status": element["status"], + "last_seen": element["last_seen"] if "last_seen" in element else "", + } + + arr.append(obj) + + result = {"result": arr, "count": response["count"]} + + return jsonify(result) + + def get_active_clients(self, combiner_id): + """Get all active clients, i.e that are assigned to a combiner. + A report request to the combiner is neccessary to determine if a client is active or not. + + :param combiner_id: The combiner id to get active clients for. + :type combiner_id: str + :return: All active clients as a json response. + :rtype: :class:`flask.Response` + """ + # Get combiner interface object + combiner = self.control.network.get_combiner(combiner_id) + if combiner is None: + return ( + jsonify( + { + "success": False, + "message": f"Combiner {combiner_id} not found.", + } + ), + 404, + ) + response = combiner.list_active_clients() + return response + + def get_all_combiners(self, limit=None, skip=None): + """Get all combiners from the statestore. + + :return: All combiners as a json response. + :rtype: :class:`flask.Response` + """ + # Will return list of ObjectId + projection = {"name": True, "updated_at": True} + response = self.statestore.get_combiners(limit, skip, projection=projection) + arr = [] + for element in response["result"]: + obj = { + "name": element["name"], + "updated_at": element["updated_at"], + } + + arr.append(obj) + + result = {"result": arr, "count": response["count"]} + + return jsonify(result) + + def get_combiner(self, combiner_id): + """Get a combiner from the statestore. + + :param combiner_id: The combiner id to get. + :type combiner_id: str + :return: The combiner info dict as a json response. + :rtype: :class:`flask.Response` + """ + # Will return ObjectId + object = self.statestore.get_combiner(combiner_id) + payload = {} + id = object["name"] + info = { + "address": object["address"], + "fqdn": object["fqdn"], + "parent_reducer": object["parent"]["name"], + "port": object["port"], + "report": object["report"], + "updated_at": object["updated_at"], + } + payload[id] = info + + return jsonify(payload) + + def get_all_sessions(self, limit=None, skip=None): + """Get all sessions from the statestore. + + :return: All sessions as a json response. + :rtype: :class:`flask.Response` + """ + sessions_object = self.statestore.get_sessions(limit, skip) + if sessions_object is None: + return ( + jsonify({"success": False, "message": "No sessions found."}), + 404, + ) + arr = [] + for element in sessions_object["result"]: + obj = element["session_config"][0] + arr.append(obj) + + result = {"result": arr, "count": sessions_object["count"]} + + return jsonify(result) + + def get_session(self, session_id): + """Get a session from the statestore. + + :param session_id: The session id to get. + :type session_id: str + :return: The session info dict as a json response. + :rtype: :class:`flask.Response` + """ + session_object = self.statestore.get_session(session_id) + if session_object is None: + return ( + jsonify( + { + "success": False, + "message": f"Session {session_id} not found.", + } + ), + 404, + ) + payload = {} + id = session_object["session_id"] + info = session_object["session_config"][0] + payload[id] = info + return jsonify(payload) + + def set_compute_package(self, file, helper_type): + """Set the compute package in the statestore. + + :param file: The compute package to set. + :type file: file + :return: A json response with success or failure message. + :rtype: :class:`flask.Response` + """ + + if file and self._allowed_file_extension(file.filename): + filename = secure_filename(file.filename) + # TODO: make configurable, perhaps in config.py or package.py + file_path = os.path.join("/app/client/package/", filename) + file.save(file_path) + + if ( + self.control.state() == ReducerState.instructing + or self.control.state() == ReducerState.monitoring + ): + return ( + jsonify( + { + "success": False, + "message": "Reducer is in instructing or monitoring state." + "Cannot set compute package.", + } + ), + 400, + ) + + self.control.set_compute_package(filename, file_path) + self.statestore.set_helper(helper_type) + + success = self.statestore.set_compute_package(filename) + if not success: + return ( + jsonify( + { + "success": False, + "message": "Failed to set compute package.", + } + ), + 400, + ) + return jsonify({"success": True, "message": "Compute package set."}) + + def _get_compute_package_name(self): + """Get the compute package name from the statestore. + + :return: The compute package name. + :rtype: str + """ + package_objects = self.statestore.get_compute_package() + if package_objects is None: + message = "No compute package found." + return None, message + else: + try: + name = package_objects["filename"] + except KeyError as e: + message = "No compute package found. Key error." + print(e) + return None, message + return name, "success" + + def get_compute_package(self): + """Get the compute package from the statestore. + + :return: The compute package as a json response. + :rtype: :class:`flask.Response` + """ + package_object = self.statestore.get_compute_package() + if package_object is None: + return ( + jsonify( + {"success": False, "message": "No compute package found."} + ), + 404, + ) + payload = {} + id = str(package_object["_id"]) + info = { + "filename": package_object["filename"], + "helper": package_object["helper"], + } + payload[id] = info + return jsonify(payload) + + def download_compute_package(self, name): + """Download the compute package. + + :return: The compute package as a json object. + :rtype: :class:`flask.Response` + """ + if name is None: + name, message = self._get_compute_package_name() + if name is None: + return jsonify({"success": False, "message": message}), 404 + try: + mutex = threading.Lock() + mutex.acquire() + # TODO: make configurable, perhaps in config.py or package.py + return send_from_directory( + "/app/client/package/", name, as_attachment=True + ) + except Exception: + try: + data = self.control.get_compute_package(name) + # TODO: make configurable, perhaps in config.py or package.py + file_path = os.path.join("/app/client/package/", name) + with open(file_path, "wb") as fh: + fh.write(data) + # TODO: make configurable, perhaps in config.py or package.py + return send_from_directory( + "/app/client/package/", name, as_attachment=True + ) + except Exception: + raise + finally: + mutex.release() + + def _create_checksum(self, name=None): + """Create the checksum of the compute package. + + :param name: The name of the compute package. + :type name: str + :return: Success or failure boolean, message and the checksum. + :rtype: bool, str, str + """ + + if name is None: + name, message = self._get_compute_package_name() + if name is None: + return False, message, "" + file_path = os.path.join( + "/app/client/package/", name + ) # TODO: make configurable, perhaps in config.py or package.py + try: + sum = str(sha(file_path)) + except FileNotFoundError: + sum = "" + message = "File not found." + return True, message, sum + + def get_checksum(self, name): + """Get the checksum of the compute package. + + :param name: The name of the compute package. + :type name: str + :return: The checksum as a json object. + :rtype: :py:class:`flask.Response` + """ + + success, message, sum = self._create_checksum(name) + if not success: + return jsonify({"success": False, "message": message}), 404 + payload = {"checksum": sum} + + return jsonify(payload) + + def get_controller_status(self): + """Get the status of the controller. + + :return: The status of the controller as a json object. + :rtype: :py:class:`flask.Response` + """ + return jsonify({"state": ReducerStateToString(self.control.state())}) + + def get_events(self, **kwargs): + """Get the events of the federated network. + + :return: The events as a json object. + :rtype: :py:class:`flask.Response` + """ + response = self.statestore.get_events(**kwargs) + + result = response["result"] + if result is None: + return ( + jsonify({"success": False, "message": "No events found."}), + 404, + ) + + events = [] + for evt in result: + events.append(evt) + + return jsonify({"result": events, "count": response["count"]}) + + def get_all_validations(self, **kwargs): + """Get all validations from the statestore. + + :return: All validations as a json response. + :rtype: :class:`flask.Response` + """ + validations_objects = self.statestore.get_validations(**kwargs) + if validations_objects is None: + return ( + jsonify( + { + "success": False, + "message": "No validations found.", + "filter_used": kwargs, + } + ), + 404, + ) + payload = {} + for object in validations_objects: + id = str(object["_id"]) + info = { + "model_id": object["modelId"], + "data": object["data"], + "timestamp": object["timestamp"], + "meta": object["meta"], + "sender": object["sender"], + "receiver": object["receiver"], + } + payload[id] = info + return jsonify(payload) + + def add_combiner( + self, combiner_id, secure_grpc, address, remote_addr, fqdn, port + ): + """Add a combiner to the network. + + :param combiner_id: The combiner id to add. + :type combiner_id: str + :param secure_grpc: Whether to use secure grpc or not. + :type secure_grpc: bool + :param name: The name of the combiner. + :type name: str + :param address: The address of the combiner. + :type address: str + :param remote_addr: The remote address of the combiner. + :type remote_addr: str + :param fqdn: The fqdn of the combiner. + :type fqdn: str + :param port: The port of the combiner. + :type port: int + :return: Config of the combiner as a json response. + :rtype: :class:`flask.Response` + """ + # TODO: Any more required check for config? Formerly based on status: "retry" + if not self.control.idle(): + return jsonify( + { + "success": False, + "status": "retry", + "message": "Conroller is not in idle state, try again later. ", + } + ) + # Check if combiner already exists + combiner = self.control.network.get_combiner(combiner_id) + if not combiner: + if secure_grpc == "True": + certificate, key = self.certificate_manager.get_or_create( + address + ).get_keypair_raw() + _ = base64.b64encode(certificate) + _ = base64.b64encode(key) + + else: + certificate = None + key = None + + combiner_interface = CombinerInterface( + parent=self._to_dict(), + name=combiner_id, + address=address, + fqdn=fqdn, + port=port, + certificate=copy.deepcopy(certificate), + key=copy.deepcopy(key), + ip=remote_addr, + ) + + self.control.network.add_combiner(combiner_interface) + + # Check combiner now exists + combiner = self.control.network.get_combiner(combiner_id) + if not combiner: + return jsonify( + {"success": False, "message": "Combiner not added."} + ) + + payload = { + "success": True, + "message": "Combiner added successfully.", + "status": "added", + "storage": self.statestore.get_storage_backend(), + "statestore": self.statestore.get_config(), + "certificate": combiner.get_certificate(), + "key": combiner.get_key(), + } + + return jsonify(payload) + + def add_client(self, client_id, preferred_combiner, remote_addr): + """Add a client to the network. + + :param client_id: The client id to add. + :type client_id: str + :param preferred_combiner: The preferred combiner for the client.If None, the combiner will be chosen based on availability. + :type preferred_combiner: str + :return: A json response with combiner assignment config. + :rtype: :class:`flask.Response` + """ + # Check if package has been set + package_object = self.statestore.get_compute_package() + if package_object is None: + return ( + jsonify( + { + "success": False, + "status": "retry", + "message": "No compute package found. Set package in controller.", + } + ), + 203, + ) + + # Assign client to combiner + if preferred_combiner: + combiner = self.control.network.get_combiner(preferred_combiner) + if combiner is None: + return ( + jsonify( + { + "success": False, + "message": f"Combiner {preferred_combiner} not found or unavailable.", + } + ), + 400, + ) + else: + combiner = self.control.network.find_available_combiner() + if combiner is None: + return ( + jsonify( + {"success": False, "message": "No combiner available."} + ), + 400, + ) + + client_config = { + "name": client_id, + "combiner_preferred": preferred_combiner, + "combiner": combiner.name, + "ip": remote_addr, + "status": "available", + } + # Add client to network + self.control.network.add_client(client_config) + + # Setup response containing information about the combiner for assinging the client + if combiner.certificate: + cert_b64 = base64.b64encode(combiner.certificate) + cert = str(cert_b64).split("'")[1] + else: + cert = None + + payload = { + "status": "assigned", + "host": combiner.address, + "fqdn": combiner.fqdn, + "package": "remote", # TODO: Make this configurable + "ip": combiner.ip, + "port": combiner.port, + "certificate": cert, + "helper_type": self.control.statestore.get_helper(), + } + print("Seding payload: ", payload, flush=True) + + return jsonify(payload) + + def get_initial_model(self): + """Get the initial model from the statestore. + + :return: The initial model as a json response. + :rtype: :class:`flask.Response` + """ + model_id = self.statestore.get_initial_model() + payload = {"model_id": model_id} + return jsonify(payload) + + def set_initial_model(self, file): + """Add an initial model to the network. + + :param file: The initial model to add. + :type file: file + :return: A json response with success or failure message. + :rtype: :class:`flask.Response` + """ + try: + object = BytesIO() + object.seek(0, 0) + file.seek(0) + object.write(file.read()) + helper = self.control.get_helper() + object.seek(0) + model = helper.load(object) + self.control.commit(file.filename, model) + except Exception as e: + print(e, flush=True) + return jsonify({"success": False, "message": e}) + + return jsonify( + {"success": True, "message": "Initial model added successfully."} + ) + + def get_latest_model(self): + """Get the latest model from the statestore. + + :return: The initial model as a json response. + :rtype: :class:`flask.Response` + """ + if self.statestore.get_latest_model(): + model_id = self.statestore.get_latest_model() + payload = {"model_id": model_id} + return jsonify(payload) + else: + return jsonify( + {"success": False, "message": "No initial model set."} + ) + + def get_models(self, session_id=None, limit=None, skip=None): + result = self.statestore.list_models(session_id, limit, skip) + + if result is None: + return ( + jsonify({"success": False, "message": "No models found."}), + 404, + ) + + arr = [] + + for model in result["result"]: + arr.append(model) + + result = {"result": arr, "count": result["count"]} + + return jsonify(result) + + def get_model_trail(self): + """Get the model trail for a given session. + + :param session: The session id to get the model trail for. + :type session: str + :return: The model trail for the given session as a json response. + :rtype: :class:`flask.Response` + """ + model_info = self.statestore.get_model_trail() + if model_info: + return jsonify(model_info) + else: + return jsonify( + {"success": False, "message": "No model trail available."} + ) + + def get_all_rounds(self): + """Get all rounds. + + :return: The rounds as json response. + :rtype: :class:`flask.Response` + """ + rounds_objects = self.statestore.get_rounds() + if rounds_objects is None: + jsonify({"success": False, "message": "No rounds available."}) + payload = {} + for object in rounds_objects: + id = object["round_id"] + if "reducer" in object.keys(): + reducer = object["reducer"] + else: + reducer = None + if "combiners" in object.keys(): + combiners = object["combiners"] + else: + combiners = None + + info = { + "reducer": reducer, + "combiners": combiners, + } + payload[id] = info + else: + return jsonify(payload) + + def get_round(self, round_id): + """Get a round. + + :param round_id: The round id to get. + :type round_id: str + :return: The round as json response. + :rtype: :class:`flask.Response` + """ + round_object = self.statestore.get_round(round_id) + if round_object is None: + return jsonify({"success": False, "message": "Round not found."}) + payload = { + 'round_id': round_object['round_id'], + 'combiners': round_object['combiners'], + } + return jsonify(payload) + + def get_client_config(self, checksum=True): + """Get the client config. + + :return: The client config as json response. + :rtype: :py:class:`flask.Response` + """ + config = get_controller_config() + network_id = get_network_config() + port = config["port"] + host = config["host"] + payload = { + "network_id": network_id, + "discover_host": host, + "discover_port": port, + } + if checksum: + success, _, checksum_str = self._create_checksum() + if success: + payload["checksum"] = checksum_str + return jsonify(payload) + + def get_plot_data(self, feature=None): + """Get plot data. + + :return: The plot data as json response. + :rtype: :py:class:`flask.Response` + """ + + plot = Plot(self.control.statestore) + + try: + valid_metrics = plot.fetch_valid_metrics() + feature = feature or valid_metrics[0] + box_plot = plot.create_box_plot(feature) + except Exception as e: + valid_metrics = None + box_plot = None + print(e, flush=True) + + result = { + "valid_metrics": valid_metrics, + "box_plot": box_plot, + } + + return jsonify(result) + + def list_combiners_data(self, combiners): + """Get combiners data. + + :param combiners: The combiners to get data for. + :type combiners: list + :return: The combiners data as json response. + :rtype: :py:class:`flask.Response` + """ + + response = self.statestore.list_combiners_data(combiners) + + arr = [] + + # order list by combiner name + for element in response: + + obj = { + "combiner": element["_id"], + "count": element["count"], + } + + arr.append(obj) + + result = {"result": arr} + + return jsonify(result) + + def start_session( + self, + session_id, + rounds=5, + round_timeout=180, + round_buffer_size=-1, + delete_models=False, + validate=True, + helper="keras", + min_clients=1, + requested_clients=8, + ): + """Start a session. + + :param session_id: The session id to start. + :type session_id: str + :param rounds: The number of rounds to perform. + :type rounds: int + :param round_timeout: The round timeout to use in seconds. + :type round_timeout: int + :param round_buffer_size: The round buffer size to use. + :type round_buffer_size: int + :param delete_models: Whether to delete models after each round at combiner (save storage). + :type delete_models: bool + :param validate: Whether to validate the model after each round. + :type validate: bool + :param min_clients: The minimum number of clients required. + :type min_clients: int + :param requested_clients: The requested number of clients. + :type requested_clients: int + :return: A json response with success or failure message and session config. + :rtype: :class:`flask.Response` + """ + # Check if session already exists + session = self.statestore.get_session(session_id) + if session: + return jsonify( + {"success": False, "message": "Session already exists."} + ) + + # Check if session is running + if self.control.state() == ReducerState.monitoring: + return jsonify( + {"success": False, "message": "A session is already running."} + ) + + # Check available clients per combiner + clients_available = 0 + for combiner in self.control.network.get_combiners(): + try: + combiner_state = combiner.report() + nr_active_clients = combiner_state["nr_active_clients"] + clients_available = clients_available + int(nr_active_clients) + except CombinerUnavailableError as e: + # TODO: Handle unavailable combiner, stop session or continue? + print("COMBINER UNAVAILABLE: {}".format(e), flush=True) + continue + + if clients_available < min_clients: + return jsonify( + { + "success": False, + "message": "Not enough clients available to start session.", + } + ) + + # Check if validate is string and convert to bool + if isinstance(validate, str): + if validate.lower() == "true": + validate = True + else: + validate = False + + # Get lastest model as initial model for session + model_id = self.statestore.get_latest_model() + + # Setup session config + session_config = { + "session_id": session_id if session_id else str(uuid.uuid4()), + "round_timeout": round_timeout, + "buffer_size": round_buffer_size, + "model_id": model_id, + "rounds": rounds, + "delete_models_storage": delete_models, + "clients_required": min_clients, + "clients_requested": requested_clients, + "task": (""), + "validate": validate, + "helper_type": helper, + } + + # Start session + threading.Thread( + target=self.control.session, args=(session_config,) + ).start() + + # Return success response + return jsonify( + { + "success": True, + "message": "Session started successfully.", + "config": session_config, + } + ) diff --git a/fedn/fedn/network/api/network.py b/fedn/fedn/network/api/network.py new file mode 100644 index 000000000..6fcaad053 --- /dev/null +++ b/fedn/fedn/network/api/network.py @@ -0,0 +1,171 @@ +import base64 + +from fedn.network.combiner.interfaces import (CombinerInterface, + CombinerUnavailableError) +from fedn.network.loadbalancer.leastpacked import LeastPacked + +__all__ = 'Network', + + +class Network: + """ FEDn network interface. This class is used to interact with the network. + Note: This class contain redundant code, which is not used in the current version of FEDn. + Some methods has been moved to :class:`fedn.network.api.interface.API`. + """ + + def __init__(self, control, statestore, load_balancer=None): + """ """ + self.statestore = statestore + self.control = control + self.id = statestore.network_id + + if not load_balancer: + self.load_balancer = LeastPacked(self) + else: + self.load_balancer = load_balancer + + def get_combiner(self, name): + """ Get combiner by name. + + :param name: name of combiner + :type name: str + :return: The combiner instance object + :rtype: :class:`fedn.network.combiner.interfaces.CombinerInterface` + """ + combiners = self.get_combiners() + for combiner in combiners: + if name == combiner.name: + return combiner + return None + + def get_combiners(self): + """ Get all combiners in the network. + + :return: list of combiners objects + :rtype: list(:class:`fedn.network.combiner.interfaces.CombinerInterface`) + """ + data = self.statestore.get_combiners() + combiners = [] + for c in data["result"]: + if c['certificate']: + cert = base64.b64decode(c['certificate']) + key = base64.b64decode(c['key']) + else: + cert = None + key = None + + combiners.append( + CombinerInterface(c['parent'], c['name'], c['address'], c['fqdn'], c['port'], + certificate=cert, key=key, ip=c['ip'])) + + return combiners + + def add_combiner(self, combiner): + """ Add a new combiner to the network. + + :param combiner: The combiner instance object + :type combiner: :class:`fedn.network.combiner.interfaces.CombinerInterface` + :return: None + """ + if not self.control.idle(): + print("Reducer is not idle, cannot add additional combiner.") + return + + if self.get_combiner(combiner.name): + return + + print("adding combiner {}".format(combiner.name), flush=True) + self.statestore.set_combiner(combiner.to_dict()) + + def remove_combiner(self, combiner): + """ Remove a combiner from the network. + + :param combiner: The combiner instance object + :type combiner: :class:`fedn.network.combiner.interfaces.CombinerInterface` + :return: None + """ + if not self.control.idle(): + print("Reducer is not idle, cannot remove combiner.") + return + self.statestore.delete_combiner(combiner.name) + + def find_available_combiner(self): + """ Find an available combiner in the network. + + :return: The combiner instance object + :rtype: :class:`fedn.network.combiner.interfaces.CombinerInterface` + """ + combiner = self.load_balancer.find_combiner() + return combiner + + def handle_unavailable_combiner(self, combiner): + """ This callback is triggered if a combiner is found to be unresponsive. + + :param combiner: The combiner instance object + :type combiner: :class:`fedn.network.combiner.interfaces.CombinerInterface` + :return: None + """ + # TODO: Implement strategy to handle an unavailable combiner. + print("REDUCER CONTROL: Combiner {} unavailable.".format( + combiner.name), flush=True) + + def add_client(self, client): + """ Add a new client to the network. + + :param client: The client instance object + :type client: dict + :return: None + """ + + if self.get_client(client['name']): + return + + print("adding client {}".format(client['name']), flush=True) + self.statestore.set_client(client) + + def get_client(self, name): + """ Get client by name. + + :param name: name of client + :type name: str + :return: The client instance object + :rtype: ObjectId + """ + ret = self.statestore.get_client(name) + return ret + + def update_client_data(self, client_data, status, role): + """ Update client status in statestore. + + :param client_data: The client instance object + :type client_data: dict + :param status: The client status + :type status: str + :param role: The client role + :type role: str + :return: None + """ + self.statestore.update_client_status(client_data, status, role) + + def get_client_info(self): + """ list available client in statestore. + + :return: list of client objects + :rtype: list(ObjectId) + """ + return self.statestore.list_clients() + + def describe(self): + """ Describe the network. + + :return: The network description + :rtype: dict + """ + network = [] + for combiner in self.get_combiners(): + try: + network.append(combiner.report()) + except CombinerUnavailableError: + # TODO, do better here. + pass + return network diff --git a/fedn/fedn/network/api/server.py b/fedn/fedn/network/api/server.py new file mode 100644 index 000000000..cfb91bece --- /dev/null +++ b/fedn/fedn/network/api/server.py @@ -0,0 +1,388 @@ +from flask import Flask, jsonify, request + +from fedn.common.config import (get_controller_config, get_modelstorage_config, + get_network_config, get_statestore_config) +from fedn.network.api.interface import API +from fedn.network.controller.control import Control +from fedn.network.statestore.mongostatestore import MongoStateStore + +statestore_config = get_statestore_config() +network_id = get_network_config() +modelstorage_config = get_modelstorage_config() +statestore = MongoStateStore( + network_id, statestore_config["mongo_config"], modelstorage_config +) +control = Control(statestore=statestore) +api = API(statestore, control) +app = Flask(__name__) + + +@app.route("/get_model_trail", methods=["GET"]) +def get_model_trail(): + """Get the model trail for a given session. + param: session: The session id to get the model trail for. + type: session: str + return: The model trail for the given session as a json object. + rtype: json + """ + return api.get_model_trail() + + +@app.route("/list_models", methods=["GET"]) +def list_models(): + """Get models from the statestore. + param: + session_id: The session id to get the model trail for. + limit: The maximum number of models to return. + type: limit: int + param: skip: The number of models to skip. + type: skip: int + Returns: + _type_: json + """ + + session_id = request.args.get("session_id", None) + limit = request.args.get("limit", None) + skip = request.args.get("skip", None) + + return api.get_models(session_id, limit, skip) + + +@app.route("/delete_model_trail", methods=["GET", "POST"]) +def delete_model_trail(): + """Delete the model trail for a given session. + param: session: The session id to delete the model trail for. + type: session: str + return: The response from the statestore. + rtype: json + """ + return jsonify({"message": "Not implemented"}), 501 + + +@app.route("/list_clients", methods=["GET"]) +def list_clients(): + """Get all clients from the statestore. + return: All clients as a json object. + rtype: json + """ + + limit = request.args.get("limit", None) + skip = request.args.get("skip", None) + status = request.args.get("status", None) + + return api.get_clients(limit, skip, status) + + +@app.route("/get_active_clients", methods=["GET"]) +def get_active_clients(): + """Get all active clients from the statestore. + param: combiner_id: The combiner id to get active clients for. + type: combiner_id: str + return: All active clients as a json object. + rtype: json + """ + combiner_id = request.args.get("combiner", None) + if combiner_id is None: + return ( + jsonify({"success": False, "message": "Missing combiner id."}), + 400, + ) + return api.get_active_clients(combiner_id) + + +@app.route("/list_combiners", methods=["GET"]) +def list_combiners(): + """Get all combiners in the network. + return: All combiners as a json object. + rtype: json + """ + + limit = request.args.get("limit", None) + skip = request.args.get("skip", None) + + return api.get_all_combiners(limit, skip) + + +@app.route("/get_combiner", methods=["GET"]) +def get_combiner(): + """Get a combiner from the statestore. + param: combiner_id: The combiner id to get. + type: combiner_id: str + return: The combiner as a json object. + rtype: json + """ + combiner_id = request.args.get("combiner", None) + if combiner_id is None: + return ( + jsonify({"success": False, "message": "Missing combiner id."}), + 400, + ) + return api.get_combiner(combiner_id) + + +@app.route("/list_rounds", methods=["GET"]) +def list_rounds(): + """Get all rounds from the statestore. + return: All rounds as a json object. + rtype: json + """ + return api.get_all_rounds() + + +@app.route("/get_round", methods=["GET"]) +def get_round(): + """Get a round from the statestore. + param: round_id: The round id to get. + type: round_id: str + return: The round as a json object. + rtype: json + """ + round_id = request.args.get("round_id", None) + if round_id is None: + return jsonify({"success": False, "message": "Missing round id."}), 400 + return api.get_round(round_id) + + +@app.route("/start_session", methods=["GET", "POST"]) +def start_session(): + """Start a new session. + return: The response from control. + rtype: json + """ + json_data = request.get_json() + return api.start_session(**json_data) + + +@app.route("/list_sessions", methods=["GET"]) +def list_sessions(): + """Get all sessions from the statestore. + return: All sessions as a json object. + rtype: json + """ + limit = request.args.get("limit", None) + skip = request.args.get("skip", None) + + return api.get_all_sessions(limit, skip) + + +@app.route("/get_session", methods=["GET"]) +def get_session(): + """Get a session from the statestore. + param: session_id: The session id to get. + type: session_id: str + return: The session as a json object. + rtype: json + """ + session_id = request.args.get("session_id", None) + if session_id is None: + return ( + jsonify({"success": False, "message": "Missing session id."}), + 400, + ) + return api.get_session(session_id) + + +@app.route("/set_package", methods=["POST"]) +def set_package(): + """ Set the compute package in the statestore. + Usage with curl: + curl -k -X POST \ + -F file=@package.tgz \ + -F helper="kerashelper" \ + http://localhost:8092/set_package + + param: file: The compute package file to set. + type: file: file + return: The response from the statestore. + rtype: json + """ + helper_type = request.form.get("helper", None) + if helper_type is None: + return ( + jsonify({"success": False, "message": "Missing helper type."}), + 400, + ) + try: + file = request.files["file"] + except KeyError: + return jsonify({"success": False, "message": "Missing file."}), 400 + return api.set_compute_package(file=file, helper_type=helper_type) + + +@app.route("/get_package", methods=["GET"]) +def get_package(): + """Get the compute package from the statestore. + return: The compute package as a json object. + rtype: json + """ + return api.get_compute_package() + + +@app.route("/download_package", methods=["GET"]) +def download_package(): + """Download the compute package. + return: The compute package as a json object. + rtype: json + """ + name = request.args.get("name", None) + return api.download_compute_package(name) + + +@app.route("/get_package_checksum", methods=["GET"]) +def get_package_checksum(): + name = request.args.get("name", None) + return api.get_checksum(name) + + +@app.route("/get_latest_model", methods=["GET"]) +def get_latest_model(): + """Get the latest model from the statestore. + return: The initial model as a json object. + rtype: json + """ + return api.get_latest_model() + + +# Get initial model endpoint + + +@app.route("/get_initial_model", methods=["GET"]) +def get_initial_model(): + """Get the initial model from the statestore. + return: The initial model as a json object. + rtype: json + """ + return api.get_initial_model() + + +@app.route("/set_initial_model", methods=["POST"]) +def set_initial_model(): + """Set the initial model in the statestore and upload to model repository. + Usage with curl: + curl -k -X POST + -F file=@seed.npz + http://localhost:8092/set_initial_model + + param: file: The initial model file to set. + type: file: file + return: The response from the statestore. + rtype: json + """ + try: + file = request.files["file"] + except KeyError: + return jsonify({"success": False, "message": "Missing file."}), 400 + return api.set_initial_model(file) + + +@app.route("/get_controller_status", methods=["GET"]) +def get_controller_status(): + """Get the status of the controller. + return: The status as a json object. + rtype: json + """ + return api.get_controller_status() + + +@app.route("/get_client_config", methods=["GET"]) +def get_client_config(): + """Get the client configuration. + return: The client configuration as a json object. + rtype: json + """ + checksum = request.args.get("checksum", True) + return api.get_client_config(checksum) + + +@app.route("/get_events", methods=["GET"]) +def get_events(): + """Get the events from the statestore. + return: The events as a json object. + rtype: json + """ + # TODO: except filter with request.get_json() + kwargs = request.args.to_dict() + + return api.get_events(**kwargs) + + +@app.route("/list_validations", methods=["GET"]) +def list_validations(): + """Get all validations from the statestore. + return: All validations as a json object. + rtype: json + """ + # TODO: except filter with request.get_json() + kwargs = request.args.to_dict() + return api.get_all_validations(**kwargs) + + +@app.route("/add_combiner", methods=["POST"]) +def add_combiner(): + """Add a combiner to the network. + return: The response from the statestore. + rtype: json + """ + json_data = request.get_json() + remote_addr = request.remote_addr + try: + response = api.add_combiner(**json_data, remote_addr=remote_addr) + except TypeError as e: + return jsonify({"success": False, "message": str(e)}), 400 + return response + + +@app.route("/add_client", methods=["POST"]) +def add_client(): + """Add a client to the network. + return: The response from control. + rtype: json + """ + + json_data = request.get_json() + remote_addr = request.remote_addr + try: + response = api.add_client(**json_data, remote_addr=remote_addr) + except TypeError as e: + return jsonify({"success": False, "message": str(e)}), 400 + return response + + +@app.route("/list_combiners_data", methods=["POST"]) +def list_combiners_data(): + """List data from combiners. + return: The response from control. + rtype: json + """ + + json_data = request.get_json() + + # expects a list of combiner names (strings) in an array + combiners = json_data.get("combiners", None) + + try: + response = api.list_combiners_data(combiners) + except TypeError as e: + return jsonify({"success": False, "message": str(e)}), 400 + return response + + +@app.route("/get_plot_data", methods=["GET"]) +def get_plot_data(): + """Get plot data from the statestore. + rtype: json + """ + + try: + feature = request.args.get("feature", None) + response = api.get_plot_data(feature=feature) + except TypeError as e: + return jsonify({"success": False, "message": str(e)}), 400 + return response + + +if __name__ == "__main__": + config = get_controller_config() + port = config["port"] + debug = config["debug"] + app.run(debug=debug, port=port, host="0.0.0.0") diff --git a/fedn/fedn/network/api/tests.py b/fedn/fedn/network/api/tests.py new file mode 100644 index 000000000..7395d9bdf --- /dev/null +++ b/fedn/fedn/network/api/tests.py @@ -0,0 +1,335 @@ +# Unittest for Flask API endpoints +# +# Run with: +# python -m unittest fedn.tests.network.api.tests +# +# or +# +# python3 -m unittest fedn.tests.network.api.tests +# +# or +# +# python3 -m unittest fedn.tests.network.api.tests.NetworkAPITests +# +# or +# +# python -m unittest fedn.tests.network.api.tests.NetworkAPITests +# +# or +# +# python -m unittest fedn.tests.network.api.tests.NetworkAPITests.test_get_model_trail +# +# or +# +# python3 -m unittest fedn.tests.network.api.tests.NetworkAPITests.test_get_model_trail +# + +import io +import time +import unittest +from unittest.mock import MagicMock, patch + +import fedn + + +class NetworkAPITests(unittest.TestCase): + """ Unittests for the Network API. """ + @patch('fedn.network.statestore.mongostatestore.MongoStateStore', autospec=True) + @patch('fedn.network.controller.controlbase.ControlBase', autospec=True) + def setUp(self, mock_mongo, mock_control): + # start Flask server in testing mode + import fedn.network.api.server + self.app = fedn.network.api.server.app.test_client() + + def test_get_model_trail(self): + """ Test get_model_trail endpoint. """ + # Mock api.get_model_trail + model_id = "test" + time_stamp = time.time() + return_value = {model_id: time_stamp} + fedn.network.api.server.api.get_model_trail = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_model_trail') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_model_trail was called + fedn.network.api.server.api.get_model_trail.assert_called_once_with() + + def test_get_latest_model(self): + """ Test get_latest_model endpoint. """ + # Mock api.get_latest_model + model_id = "test" + time_stamp = time.time() + return_value = {model_id: time_stamp} + fedn.network.api.server.api.get_latest_model = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_latest_model') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_latest_model was called + fedn.network.api.server.api.get_latest_model.assert_called_once_with() + + def test_get_initial_model(self): + """ Test get_initial_model endpoint. """ + # Mock api.get_initial_model + model_id = "test" + time_stamp = time.time() + return_value = {model_id: time_stamp} + fedn.network.api.server.api.get_initial_model = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_initial_model') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_initial_model was called + fedn.network.api.server.api.get_initial_model.assert_called_once_with() + + def test_set_initial_model(self): + """ Test set_initial_model endpoint. """ + # Mock api.set_initial_model + success = True + message = "test" + return_value = {'success': success, 'message': message} + fedn.network.api.server.api.set_initial_model = MagicMock(return_value=return_value) + # Create test file + request_file = (io.BytesIO(b"abcdef"), 'test.txt') + # Make request + response = self.app.post('/set_initial_model', data={"file": request_file}) + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.set_initial_model was called + fedn.network.api.server.api.set_initial_model.assert_called_once() + + def test_list_clients(self): + """ Test list_clients endpoint. """ + # Mock api.get_all_clients + return_value = {"test": "test"} + fedn.network.api.server.api.get_all_clients = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/list_clients') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_all_clients was called + fedn.network.api.server.api.get_all_clients.assert_called_once_with() + + def test_get_active_clients(self): + """ Test get_active_clients endpoint. """ + # Mock api.get_active_clients + return_value = {"test": "test"} + fedn.network.api.server.api.get_active_clients = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_active_clients?combiner=test') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_active_clients was called + fedn.network.api.server.api.get_active_clients.assert_called_once_with("test") + + def test_add_client(self): + """ Test add_client endpoint. """ + # Mock api.add_client + return_value = {"test": "test"} + fedn.network.api.server.api.add_client = MagicMock(return_value=return_value) + # Make request + response = self.app.post('/add_client', json={ + 'preferred_combiner': 'test', + }) + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.add_client was called + fedn.network.api.server.api.add_client.assert_called_once_with( + preferred_combiner="test", + remote_addr='127.0.0.1' + ) + + def test_list_combiners(self): + """ Test list_combiners endpoint. """ + # Mock api.get_all_combiners + return_value = {"test": "test"} + fedn.network.api.server.api.get_all_combiners = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/list_combiners') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_all_combiners was called + fedn.network.api.server.api.get_all_combiners.assert_called_once_with() + + def test_list_rounds(self): + """ Test list_rounds endpoint. """ + # Mock api.get_all_rounds + return_value = {"test": "test"} + fedn.network.api.server.api.get_all_rounds = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/list_rounds') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_all_rounds was called + fedn.network.api.server.api.get_all_rounds.assert_called_once_with() + + def test_get_round(self): + """ Test get_round endpoint. """ + # Mock api.get_round + return_value = {"test": "test"} + fedn.network.api.server.api.get_round = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_round?round_id=test') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_round was called + fedn.network.api.server.api.get_round.assert_called_once_with("test") + + def test_get_combiner(self): + """ Test get_combiner endpoint. """ + # Mock api.get_combiner + return_value = {"test": "test"} + fedn.network.api.server.api.get_combiner = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_combiner?combiner=test') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_combiner was called + fedn.network.api.server.api.get_combiner.assert_called_once_with("test") + + def test_add_combiner(self): + """ Test add_combiner endpoint. """ + # Mock api.add_combiner + success = True + message = "test" + return_value = {'success': success, 'message': message} + fedn.network.api.server.api.add_combiner = MagicMock(return_value=return_value) + # Make request + response = self.app.post('/add_combiner', json={ + 'combiner_id': 'test', + 'address': '1234', + 'port': '1234', + 'secure_grpc': 'True', + 'fqdn': 'test', + }) + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.add_combiner was called + fedn.network.api.server.api.add_combiner.assert_called_once_with( + combiner_id='test', + remote_addr='127.0.0.1', + address='1234', + port='1234', + secure_grpc='True', + fqdn='test', + ) + + def test_get_events(self): + """ Test get_events endpoint. """ + # Mock api.get_events + return_value = {"test": "test"} + fedn.network.api.server.api.get_events = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_events') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_events was called + fedn.network.api.server.api.get_events.assert_called_once() + + def test_get_status(self): + """ Test get_status endpoint. """ + # Mock api.get_status + return_value = {"test": "test"} + fedn.network.api.server.api.get_controller_status = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_controller_status') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_status was called + fedn.network.api.server.api.get_controller_status.assert_called_once() + + def test_start_session(self): + """ Test start_session endpoint. """ + # Mock api.start_session + success = True + message = "test" + return_value = {'success': success, 'message': message} + fedn.network.api.server.api.start_session = MagicMock(return_value=return_value) + # Make request with only session_id + json = {'session_id': 'test', + 'round_timeout': float(60), + 'rounds': 1, + 'round_buffer_size': -1, + } + response = self.app.post('/start_session', json=json) + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.start_session was called + fedn.network.api.server.api.start_session.assert_called_once_with( + session_id='test', + round_timeout=float(60), + rounds=1, + round_buffer_size=-1, + ) + + def test_list_sessions(self): + """ Test list_sessions endpoint. """ + # Mock api.list_sessions + return_value = {"test": "test"} + fedn.network.api.server.api.get_all_sessions = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/list_sessions') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.list_sessions was called + fedn.network.api.server.api.get_all_sessions.assert_called_once() + + def test_get_package(self): + """ Test get_package endpoint. """ + # Mock api.get_package + return_value = {"test": "test"} + fedn.network.api.server.api.get_compute_package = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_package') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_package was called + fedn.network.api.server.api.get_compute_package.assert_called_once_with() + + def test_get_controller_status(self): + """ Test get_controller_status endpoint. """ + # Mock api.get_controller_status + return_value = {"test": "test"} + fedn.network.api.server.api.get_controller_status = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_controller_status') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_controller_status was called + fedn.network.api.server.api.get_controller_status.assert_called_once_with() + + def test_get_client_config(self): + """ Test get_client_config endpoint. """ + # Mock api.get_client_config + return_value = {"test": "test"} + fedn.network.api.server.api.get_client_config = MagicMock(return_value=return_value) + # Make request + response = self.app.get('/get_client_config') + # Assert response + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json, return_value) + # Assert api.get_client_config was called + fedn.network.api.server.api.get_client_config.assert_called_once_with(True) + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/network/clients/__init__.py b/fedn/fedn/network/clients/__init__.py new file mode 100644 index 000000000..effcee624 --- /dev/null +++ b/fedn/fedn/network/clients/__init__.py @@ -0,0 +1,4 @@ +""" The FEDn client package is responsible for executing the federated learning tasks, including ML model training and validation. It's the acting gRPC client for the federated network. +The client first connacts the centralized controller to receive :class:`fedn.network.combiner.Combiner` assingment. The client then connects to the combiner and +sends requests to the combiner to receive model updates and send model updates.""" +# flake8: noqa diff --git a/fedn/fedn/client.py b/fedn/fedn/network/clients/client.py similarity index 73% rename from fedn/fedn/client.py rename to fedn/fedn/network/clients/client.py index f1abfc3db..e27616925 100644 --- a/fedn/fedn/client.py +++ b/fedn/fedn/network/clients/client.py @@ -4,7 +4,7 @@ import os import queue import re -import ssl +import socket import sys import tempfile import threading @@ -15,15 +15,15 @@ from io import BytesIO import grpc -from flask import Flask +from cryptography.hazmat.primitives.serialization import Encoding from google.protobuf.json_format import MessageToJson +from OpenSSL import SSL import fedn.common.net.grpc.fedn_pb2 as fedn import fedn.common.net.grpc.fedn_pb2_grpc as rpc -from fedn.clients.client.state import ClientState, ClientStateToString -from fedn.common.control.package import PackageRuntime -from fedn.common.net.connect import ConnectorClient, Status -from fedn.common.net.web.client import page, style +from fedn.network.clients.connect import ConnectorClient, Status +from fedn.network.clients.package import PackageRuntime +from fedn.network.clients.state import ClientState, ClientStateToString from fedn.utils.dispatcher import Dispatcher from fedn.utils.helpers import get_helper from fedn.utils.logger import Logger @@ -42,26 +42,15 @@ def __call__(self, context, callback): class Client: """FEDn Client. Service running on client/datanodes in a federation, - recieving and handling model update and model validation requests. - - Attibutes - --------- - config: dict - A configuration dictionary containing connection information for - the discovery service (controller) and settings governing e.g. - client-combiner assignment behavior. + recieving and handling model update and model validation requests. + :param config: A configuration dictionary containing connection information for the discovery service (controller) + and settings governing e.g. client-combiner assignment behavior. + :type config: dict """ def __init__(self, config): - """ - Parameters - ---------- - config: dict - A configuration dictionary containing connection information for - the discovery service (controller) and settings governing e.g. - client-combiner assignment behavior. - """ + """Initialize the client.""" self.state = None self.error_state = False @@ -110,7 +99,147 @@ def __init__(self, config): self.state = ClientState.idle + def _assign(self): + """Contacts the controller and asks for combiner assignment. + + :return: A configuration dictionary containing connection information for combiner. + :rtype: dict + """ + + print("Asking for assignment!", flush=True) + while True: + status, response = self.connector.assign() + if status == Status.TryAgain: + print(response, flush=True) + time.sleep(5) + continue + if status == Status.Assigned: + client_config = response + break + if status == Status.UnAuthorized: + print(response, flush=True) + sys.exit("Exiting: Unauthorized") + if status == Status.UnMatchedConfig: + print(response, flush=True) + sys.exit("Exiting: UnMatchedConfig") + time.sleep(5) + print(".", end=' ', flush=True) + + print("Got assigned!", flush=True) + print("Received combiner config: {}".format(client_config), flush=True) + return client_config + + def _add_grpc_metadata(self, key, value): + """Add metadata for gRPC calls. + + :param key: The key of the metadata. + :type key: str + :param value: The value of the metadata. + :type value: str + """ + # Check if metadata exists and add if not + if not hasattr(self, 'metadata'): + self.metadata = () + + # Check if metadata key already exists and replace value if so + for i, (k, v) in enumerate(self.metadata): + if k == key: + # Replace value + self.metadata = self.metadata[:i] + ((key, value),) + self.metadata[i + 1:] + return + + # Set metadata using tuple concatenation + self.metadata += ((key, value),) + + def _get_ssl_certificate(self, domain, port=443): + context = SSL.Context(SSL.SSLv23_METHOD) + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect((domain, port)) + ssl_sock = SSL.Connection(context, sock) + ssl_sock.set_tlsext_host_name(domain.encode()) + ssl_sock.set_connect_state() + ssl_sock.do_handshake() + cert = ssl_sock.get_peer_certificate() + ssl_sock.close() + sock.close() + cert = cert.to_cryptography().public_bytes(Encoding.PEM).decode() + return cert + + def _connect(self, client_config): + """Connect to assigned combiner. + + :param client_config: A configuration dictionary containing connection information for + the combiner. + :type client_config: dict + """ + + # TODO use the client_config['certificate'] for setting up secure comms' + host = client_config['host'] + # Add host to gRPC metadata + self._add_grpc_metadata('grpc-server', host) + print("CLIENT: Using metadata: {}".format(self.metadata), flush=True) + port = client_config['port'] + secure = False + if client_config['fqdn'] is not None: + host = client_config['fqdn'] + # assuming https if fqdn is used + port = 443 + print(f"CLIENT: Connecting to combiner host: {host}:{port}", flush=True) + + if client_config['certificate']: + print("CLIENT: using certificate from Reducer for GRPC channel") + secure = True + cert = base64.b64decode( + client_config['certificate']) # .decode('utf-8') + credentials = grpc.ssl_channel_credentials(root_certificates=cert) + channel = grpc.secure_channel("{}:{}".format(host, str(port)), credentials) + elif os.getenv("FEDN_GRPC_ROOT_CERT_PATH"): + secure = True + print("CLIENT: using root certificate from environment variable for GRPC channel") + with open(os.environ["FEDN_GRPC_ROOT_CERT_PATH"], 'rb') as f: + credentials = grpc.ssl_channel_credentials(f.read()) + channel = grpc.secure_channel("{}:{}".format(host, str(port)), credentials) + elif self.config['secure']: + secure = True + print("CLIENT: using CA certificate for GRPC channel") + cert = self._get_ssl_certificate(host, port=port) + + credentials = grpc.ssl_channel_credentials(cert.encode('utf-8')) + if self.config['token']: + token = self.config['token'] + auth_creds = grpc.metadata_call_credentials(GrpcAuth(token)) + channel = grpc.secure_channel("{}:{}".format(host, str(port)), grpc.composite_channel_credentials(credentials, auth_creds)) + else: + channel = grpc.secure_channel("{}:{}".format(host, str(port)), credentials) + else: + print("CLIENT: using insecure GRPC channel") + if port == 443: + port = 80 + channel = grpc.insecure_channel("{}:{}".format( + host, + str(port))) + + self.channel = channel + + self.connectorStub = rpc.ConnectorStub(channel) + self.combinerStub = rpc.CombinerStub(channel) + self.modelStub = rpc.ModelServiceStub(channel) + + print("Client: {} connected {} to {}:{}".format(self.name, + "SECURED" if secure else "INSECURE", + host, + port), + flush=True) + + print("Client: Using {} compute package.".format( + client_config["package"])) + + def _disconnect(self): + """Disconnect from the combiner.""" + self.channel.close() + def _detach(self): + """Detach from the FEDn network (disconnect from combiner)""" # Setting _attached to False will make all processing threads return if not self._attached: print("Client is not attached.", flush=True) @@ -120,7 +249,7 @@ def _detach(self): self._disconnect() def _attach(self): - """ """ + """Attach to the FEDn network (connect to combiner)""" # Ask controller for a combiner and connect to that combiner. if self._attached: print("Client is already attached. ", flush=True) @@ -134,13 +263,24 @@ def _attach(self): return client_config def _initialize_helper(self, client_config): + """Initialize the helper class for the client. + + :param client_config: A configuration dictionary containing connection information for + | the discovery service (controller) and settings governing e.g. + | client-combiner assignment behavior. + :type client_config: dict + :return: + """ - if 'model_type' in client_config.keys(): - self.helper = get_helper(client_config['model_type']) + if 'helper_type' in client_config.keys(): + self.helper = get_helper(client_config['helper_type']) def _subscribe_to_combiner(self, config): """Listen to combiner message stream and start all processing threads. + :param config: A configuration dictionary containing connection information for + | the discovery service (controller) and settings governing e.g. + | client-combiner assignment behavior. """ # Start sending heartbeats to the combiner. @@ -160,7 +300,14 @@ def _subscribe_to_combiner(self, config): threading.Thread(target=self.process_request, daemon=True).start() def _initialize_dispatcher(self, config): - """ """ + """ Initialize the dispatcher for the client. + + :param config: A configuration dictionary containing connection information for + | the discovery service (controller) and settings governing e.g. + | client-combiner assignment behavior. + :type config: dict + :return: + """ if config['remote_compute_context']: pr = PackageRuntime(os.getcwd(), os.getcwd()) @@ -214,117 +361,18 @@ def _initialize_dispatcher(self, config): copy_tree(from_path, self.run_path) self.dispatcher = Dispatcher(dispatch_config, self.run_path) - def _assign(self): - """Contacts the controller and asks for combiner assignment. """ - - print("Asking for assignment!", flush=True) - while True: - status, response = self.connector.assign() - if status == Status.TryAgain: - print(response, flush=True) - time.sleep(5) - continue - if status == Status.Assigned: - client_config = response - break - if status == Status.UnAuthorized: - print(response, flush=True) - sys.exit("Exiting: Unauthorized") - if status == Status.UnMatchedConfig: - print(response, flush=True) - sys.exit("Exiting: UnMatchedConfig") - time.sleep(5) - print(".", end=' ', flush=True) - - print("Got assigned!", flush=True) - return client_config - - def _connect(self, client_config): - """Connect to assigned combiner. - - Parameters - ---------- - client_config : dict - A dictionary with connection information and settings - for the assigned combiner. - - """ - - # TODO use the client_config['certificate'] for setting up secure comms' - host = client_config['host'] - port = client_config['port'] - secure = False - if client_config['fqdn'] != "None": - host = client_config['fqdn'] - # assuming https if fqdn is used - port = 443 - print(f"CLIENT: Connecting to combiner host: {host}:{port}", flush=True) - - if client_config['certificate']: - print("CLIENT: using certificate from Reducer for GRPC channel") - secure = True - cert = base64.b64decode( - client_config['certificate']) # .decode('utf-8') - credentials = grpc.ssl_channel_credentials(root_certificates=cert) - channel = grpc.secure_channel("{}:{}".format(host, str(port)), credentials) - elif os.getenv("FEDN_GRPC_ROOT_CERT_PATH"): - secure = True - print("CLIENT: using root certificate from environment variable for GRPC channel") - with open(os.environ["FEDN_GRPC_ROOT_CERT_PATH"], 'rb') as f: - credentials = grpc.ssl_channel_credentials(f.read()) - channel = grpc.secure_channel("{}:{}".format(host, str(port)), credentials) - elif self.config['secure']: - secure = True - print("CLIENT: using CA certificate for GRPC channel") - cert = ssl.get_server_certificate((host, port)) - - credentials = grpc.ssl_channel_credentials(cert.encode('utf-8')) - if self.config['token']: - token = self.config['token'] - auth_creds = grpc.metadata_call_credentials(GrpcAuth(token)) - channel = grpc.secure_channel("{}:{}".format(host, str(port)), grpc.composite_channel_credentials(credentials, auth_creds)) - else: - channel = grpc.secure_channel("{}:{}".format(host, str(port)), credentials) - else: - print("CLIENT: using insecure GRPC channel") - if port == 443: - port = 80 - channel = grpc.insecure_channel("{}:{}".format( - host, - str(port))) - - self.channel = channel - - self.connection = rpc.ConnectorStub(channel) - self.orchestrator = rpc.CombinerStub(channel) - self.models = rpc.ModelServiceStub(channel) - - print("Client: {} connected {} to {}:{}".format(self.name, - "SECURED" if secure else "INSECURE", - host, - port), - flush=True) - - print("Client: Using {} compute package.".format( - client_config["package"])) - - def _disconnect(self): - self.channel.close() - def get_model(self, id): """Fetch a model from the assigned combiner. + Downloads the model update object via a gRPC streaming channel. - Downloads the model update object via a gRPC streaming channel, Dowload. - - Parameters - ---------- - id : str - The id of the model update object. - + :param id: The id of the model update object. + :type id: str + :return: The model update object. + :rtype: BytesIO """ data = BytesIO() - for part in self.models.Download(fedn.ModelRequest(id=id)): + for part in self.modelStub.Download(fedn.ModelRequest(id=id), metadata=self.metadata): if part.status == fedn.ModelStatus.IN_PROGRESS: data.write(part.data) @@ -339,15 +387,14 @@ def get_model(self, id): def set_model(self, model, id): """Send a model update to the assigned combiner. - Uploads the model updated object via a gRPC streaming channel, Upload. - Parameters - ---------- - model : BytesIO, object - The model update object. - id : str - The id of the model update object. + :param model: The model update object. + :type model: BytesIO + :param id: The id of the model update object. + :type id: str + :return: The model update object. + :rtype: BytesIO """ if not isinstance(model, BytesIO): bt = BytesIO() @@ -360,9 +407,12 @@ def set_model(self, model, id): bt.seek(0, 0) def upload_request_generator(mdl): - """ + """Generator function for model upload requests. - :param mdl: + :param mdl: The model update object. + :type mdl: BytesIO + :return: A model update request. + :rtype: fedn.ModelRequest """ while True: b = mdl.read(CHUNK_SIZE) @@ -377,21 +427,26 @@ def upload_request_generator(mdl): if not b: break - result = self.models.Upload(upload_request_generator(bt)) + result = self.modelStub.Upload(upload_request_generator(bt), metadata=self.metadata) return result def _listen_to_model_update_request_stream(self): - """Subscribe to the model update request stream. """ + """Subscribe to the model update request stream. + + :return: None + :rtype: None + """ r = fedn.ClientAvailableMessage() r.sender.name = self.name r.sender.role = fedn.WORKER - metadata = [('client', r.sender.name)] + # Add client to metadata + self._add_grpc_metadata('client', self.name) while True: try: - for request in self.orchestrator.ModelUpdateRequestStream(r, metadata=metadata): + for request in self.combinerStub.ModelUpdateRequestStream(r, metadata=self.metadata): if request.sender.role == fedn.COMBINER: # Process training request self._send_status("Received model update request.", log_level=fedn.Status.AUDIT, @@ -406,8 +461,6 @@ def _listen_to_model_update_request_stream(self): except grpc.RpcError: # TODO: make configurable timeout = 5 - # print("CLIENT __listen_to_model_update_request_stream: GRPC ERROR {} retrying in {}..".format( - # status_code.name, timeout), flush=True) time.sleep(timeout) except Exception: raise @@ -416,14 +469,18 @@ def _listen_to_model_update_request_stream(self): return def _listen_to_model_validation_request_stream(self): - """Subscribe to the model validation request stream. """ + """Subscribe to the model validation request stream. + + :return: None + :rtype: None + """ r = fedn.ClientAvailableMessage() r.sender.name = self.name r.sender.role = fedn.WORKER while True: try: - for request in self.orchestrator.ModelValidationRequestStream(r): + for request in self.combinerStub.ModelValidationRequestStream(r, metadata=self.metadata): # Process validation request _ = request.model_id self._send_status("Recieved model validation request.", log_level=fedn.Status.AUDIT, @@ -433,8 +490,6 @@ def _listen_to_model_validation_request_stream(self): except grpc.RpcError: # TODO: make configurable timeout = 5 - # print("CLIENT __listen_to_model_validation_request_stream: GRPC ERROR {} retrying in {}..".format( - # status_code.name, timeout), flush=True) time.sleep(timeout) except Exception: raise @@ -442,86 +497,13 @@ def _listen_to_model_validation_request_stream(self): if not self._attached: return - def process_request(self): - """Process training and validation tasks. """ - while True: - - if not self._attached: - return - - try: - (task_type, request) = self.inbox.get(timeout=1.0) - if task_type == 'train': - - tic = time.time() - self.state = ClientState.training - model_id, meta = self._process_training_request( - request.model_id) - processing_time = time.time()-tic - meta['processing_time'] = processing_time - - if model_id is not None: - # Notify the combiner that a model update is available - update = fedn.ModelUpdate() - update.sender.name = self.name - update.sender.role = fedn.WORKER - update.receiver.name = request.sender.name - update.receiver.role = request.sender.role - update.model_id = request.model_id - update.model_update_id = str(model_id) - update.timestamp = str(datetime.now()) - update.correlation_id = request.correlation_id - update.meta = json.dumps(meta) - # TODO: Check responses - _ = self.orchestrator.SendModelUpdate(update) - self._send_status("Model update completed.", log_level=fedn.Status.AUDIT, - type=fedn.StatusType.MODEL_UPDATE, request=update) - - else: - self._send_status("Client {} failed to complete model update.", - log_level=fedn.Status.WARNING, - request=request) - self.state = ClientState.idle - self.inbox.task_done() - - elif task_type == 'validate': - self.state = ClientState.validating - metrics = self._process_validation_request( - request.model_id) - - if metrics is not None: - # Send validation - validation = fedn.ModelValidation() - validation.sender.name = self.name - validation.sender.role = fedn.WORKER - validation.receiver.name = request.sender.name - validation.receiver.role = request.sender.role - validation.model_id = str(request.model_id) - validation.data = json.dumps(metrics) - self.str = str(datetime.now()) - validation.timestamp = self.str - validation.correlation_id = request.correlation_id - _ = self.orchestrator.SendModelValidation( - validation) - self._send_status("Model validation completed.", log_level=fedn.Status.AUDIT, - type=fedn.StatusType.MODEL_VALIDATION, request=validation) - else: - self._send_status("Client {} failed to complete model validation.".format(self.name), - log_level=fedn.Status.WARNING, request=request) - - self.state = ClientState.idle - self.inbox.task_done() - except queue.Empty: - pass - def _process_training_request(self, model_id): """Process a training (model update) request. - Parameters - ---------- - model_id : Str - The id of the model to update. - + :param model_id: The model id of the model to be updated. + :type model_id: str + :return: The model id of the updated model, or None if the update failed. And a dict with metadata. + :rtype: tuple """ self._send_status( @@ -554,8 +536,14 @@ def _process_training_request(self, model_id): self.set_model(out_model, str(updated_model_id)) meta['upload_model'] = time.time() - tic + # Read the metadata file + with open(outpath+'-metadata', 'r') as fh: + training_metadata = json.loads(fh.read()) + meta['training_metadata'] = training_metadata + os.unlink(inpath) os.unlink(outpath) + os.unlink(outpath+'-metadata') except Exception as e: print("ERROR could not process training request due to error: {}".format( @@ -567,9 +555,24 @@ def _process_training_request(self, model_id): return updated_model_id, meta - def _process_validation_request(self, model_id): + def _process_validation_request(self, model_id, is_inference): + """Process a validation request. + + :param model_id: The model id of the model to be validated. + :type model_id: str + :param is_inference: True if the validation is an inference request, False if it is a validation request. + :type is_inference: bool + :return: The validation metrics, or None if validation failed. + :rtype: dict + """ + # Figure out cmd + if is_inference: + cmd = 'infer' + else: + cmd = 'validate' + self._send_status( - "Processing validation request for model_id {}".format(model_id)) + f"Processing {cmd} request for model_id {model_id}") self.state = ClientState.validating try: model = self.get_model(str(model_id)) @@ -579,7 +582,7 @@ def _process_validation_request(self, model_id): fh.write(model.getbuffer()) _, outpath = tempfile.mkstemp() - self.dispatcher.run_cmd("validate {} {}".format(inpath, outpath)) + self.dispatcher.run_cmd(f"{cmd} {inpath} {outpath}") with open(outpath, "r") as fh: validation = json.loads(fh.read()) @@ -596,10 +599,88 @@ def _process_validation_request(self, model_id): self.state = ClientState.idle return validation - def _handle_combiner_failure(self): - """ Register failed combiner connection. + def process_request(self): + """Process training and validation tasks. """ + while True: - """ + if not self._attached: + return + + try: + (task_type, request) = self.inbox.get(timeout=1.0) + if task_type == 'train': + + tic = time.time() + self.state = ClientState.training + model_id, meta = self._process_training_request( + request.model_id) + processing_time = time.time()-tic + meta['processing_time'] = processing_time + meta['config'] = request.data + + if model_id is not None: + # Send model update to combiner + update = fedn.ModelUpdate() + update.sender.name = self.name + update.sender.role = fedn.WORKER + update.receiver.name = request.sender.name + update.receiver.role = request.sender.role + update.model_id = request.model_id + update.model_update_id = str(model_id) + update.timestamp = str(datetime.now()) + update.correlation_id = request.correlation_id + update.meta = json.dumps(meta) + # TODO: Check responses + _ = self.combinerStub.SendModelUpdate(update, metadata=self.metadata) + self._send_status("Model update completed.", log_level=fedn.Status.AUDIT, + type=fedn.StatusType.MODEL_UPDATE, request=update) + + else: + self._send_status("Client {} failed to complete model update.", + log_level=fedn.Status.WARNING, + request=request) + self.state = ClientState.idle + self.inbox.task_done() + + elif task_type == 'validate': + self.state = ClientState.validating + metrics = self._process_validation_request( + request.model_id, request.is_inference) + + if metrics is not None: + # Send validation + validation = fedn.ModelValidation() + validation.sender.name = self.name + validation.sender.role = fedn.WORKER + validation.receiver.name = request.sender.name + validation.receiver.role = request.sender.role + validation.model_id = str(request.model_id) + validation.data = json.dumps(metrics) + self.str = str(datetime.now()) + validation.timestamp = self.str + validation.correlation_id = request.correlation_id + _ = self.combinerStub.SendModelValidation( + validation, metadata=self.metadata) + + # Set status type + if request.is_inference: + status_type = fedn.StatusType.INFERENCE + else: + status_type = fedn.StatusType.MODEL_VALIDATION + + self._send_status("Model validation completed.", log_level=fedn.Status.AUDIT, + type=status_type, request=validation) + else: + self._send_status("Client {} failed to complete model validation.".format(self.name), + log_level=fedn.Status.WARNING, request=request) + + self.state = ClientState.idle + self.inbox.task_done() + except queue.Empty: + pass + + def _handle_combiner_failure(self): + """ Register failed combiner connection.""" self._missed_heartbeat += 1 if self._missed_heartbeat > self.config['reconnect_after_missed_heartbeat']: self._detach() @@ -607,18 +688,16 @@ def _handle_combiner_failure(self): def _send_heartbeat(self, update_frequency=2.0): """Send a heartbeat to the combiner. - Parameters - ---------- - update_frequency : float - The interval in seconds between heartbeat messages. - + :param update_frequency: The frequency of the heartbeat in seconds. + :type update_frequency: float + :return: None if the client is detached. + :rtype: None """ - while True: heartbeat = fedn.Heartbeat(sender=fedn.Client( name=self.name, role=fedn.WORKER)) try: - self.connection.SendHeartbeat(heartbeat) + self.connectorStub.SendHeartbeat(heartbeat, metadata=self.metadata) self._missed_heartbeat = 0 except grpc.RpcError as e: status_code = e.code() @@ -631,7 +710,17 @@ def _send_heartbeat(self, update_frequency=2.0): return def _send_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None): - """Send status message. """ + """Send status message. + + :param msg: The message to send. + :type msg: str + :param log_level: The log level of the message. + :type log_level: fedn.Status.INFO, fedn.Status.WARNING, fedn.Status.ERROR + :param type: The type of the message. + :type type: str + :param request: The request message. + :type request: fedn.Request + """ status = fedn.Status() status.timestamp = str(datetime.now()) status.sender.name = self.name @@ -647,36 +736,10 @@ def _send_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None) self.logs.append( "{} {} LOG LEVEL {} MESSAGE {}".format(str(datetime.now()), status.sender.name, status.log_level, status.status)) - _ = self.connection.SendStatus(status) - - def run_web(self): - """Starts a local logging UI (Flask app) serving on port 8080. - - Currently not in use as default. - - """ - app = Flask(__name__) - - @ app.route('/') - def index(): - """ - - :return: - """ - logs_fancy = str() - for log in self.logs: - logs_fancy += "

" + log + "

\n" - - return page.format(client=self.name, state=ClientStateToString(self.state), style=style, logs=logs_fancy) - - self._original_stdout = sys.stdout - sys.stdout = open(os.devnull, 'w') - app.run(host="0.0.0.0", port="8080") - sys.stdout.close() - sys.stdout = self._original_stdout + _ = self.connectorStub.SendStatus(status, metadata=self.metadata) def run(self): - """ Main run loop. """ + """ Run the client. """ try: cnt = 0 old_state = self.state diff --git a/fedn/fedn/network/clients/connect.py b/fedn/fedn/network/clients/connect.py new file mode 100644 index 000000000..2f8acfa8d --- /dev/null +++ b/fedn/fedn/network/clients/connect.py @@ -0,0 +1,116 @@ +# This file contains the Connector class for assigning client to the FEDn network via the discovery service (REST-API). +# The Connector class is used by the Client class in fedn/network/clients/client.py. +# Once assigned, the client will retrieve combiner assignment from the discovery service. +# The discovery service will also add the client to the statestore. +# +# +import enum + +import requests + + +class Status(enum.Enum): + """ Enum for representing the status of a client assignment.""" + Unassigned = 0 + Assigned = 1 + TryAgain = 2 + UnAuthorized = 3 + UnMatchedConfig = 4 + + +class ConnectorClient: + """ Connector for assigning client to a combiner in the FEDn network. + + :param host: host of discovery service + :type host: str + :param port: port of discovery service + :type port: int + :param token: token for authentication + :type token: str + :param name: name of client + :type name: str + :param remote_package: True if remote package is used, False if local + :type remote_package: bool + :param force_ssl: True if https is used, False if http + :type force_ssl: bool + :param verify: True if certificate is verified, False if not + :type verify: bool + :param combiner: name of preferred combiner + :type combiner: str + :param id: id of client + """ + + def __init__(self, host, port, token, name, remote_package, force_ssl=False, verify=False, combiner=None, id=None): + + self.host = host + self.port = port + self.token = token + self.name = name + self.verify = verify + self.preferred_combiner = combiner + self.id = id + self.package = 'remote' if remote_package else 'local' + + # for https we assume a an ingress handles permanent redirect (308) + if force_ssl: + self.prefix = "https://" + else: + self.prefix = "http://" + if self.port: + self.connect_string = "{}{}:{}".format( + self.prefix, self.host, self.port) + else: + self.connect_string = "{}{}".format( + self.prefix, self.host) + + print("\n\nsetting the connection string to {}\n\n".format( + self.connect_string), flush=True) + + def assign(self): + """ + Connect client to FEDn network discovery service, ask for combiner assignment. + + :return: Tuple with assingment status, combiner connection information if sucessful, else None. + :rtype: tuple(:class:`fedn.network.clients.connect.Status`, str) + """ + try: + retval = None + payload = {'client_id': self.name, 'preferred_combiner': self.preferred_combiner} + + retval = requests.post(self.connect_string + '/add_client', + json=payload, + verify=self.verify, + allow_redirects=True, + headers={'Authorization': 'Token {}'.format(self.token)}) + except Exception as e: + print('***** {}'.format(e), flush=True) + return Status.Unassigned, {} + + if retval.status_code == 400: + # Get error messange from response + reason = retval.json()['message'] + return Status.UnMatchedConfig, reason + + if retval.status_code == 401: + reason = "Unauthorized connection to reducer, make sure the correct token is set" + return Status.UnAuthorized, reason + + if retval.status_code >= 200 and retval.status_code < 204: + if retval.json()['status'] == 'retry': + if 'message' in retval.json(): + reason = retval.json()['message'] + else: + reason = "Reducer was not ready. Try again later." + + return Status.TryAgain, reason + + reducer_package = retval.json()['package'] + if reducer_package != self.package: + reason = "Unmatched config of compute package between client and reducer.\n" +\ + "Reducer uses {} package and client uses {}.".format( + reducer_package, self.package) + return Status.UnMatchedConfig, reason + + return Status.Assigned, retval.json() + + return Status.Unassigned, None diff --git a/fedn/fedn/common/control/package.py b/fedn/fedn/network/clients/package.py similarity index 54% rename from fedn/fedn/common/control/package.py rename to fedn/fedn/network/clients/package.py index b7f3a3471..d6c91ccba 100644 --- a/fedn/fedn/common/control/package.py +++ b/fedn/fedn/network/clients/package.py @@ -1,5 +1,7 @@ +# This file contains the PackageRuntime class, which is used to download, validate and unpack compute packages. +# +# import cgi -import hashlib import os import tarfile from distutils.dir_util import copy_tree @@ -11,91 +13,13 @@ from fedn.utils.dispatcher import Dispatcher -class Package: - """ - - """ - - def __init__(self, config): - self.config = config - self.name = config['name'] - self.cwd = config['cwd'] - if 'port' in config: - self.reducer_port = config['port'] - if 'host' in config: - self.reducer_host = config['host'] - if 'token' in config: - self.reducer_token = config['token'] - - self.package_file = None - self.file_path = None - self.package_hash = None - - def package(self, validate=False): - """ - - :param validate: - :return: - """ - # check config - package_file = '{name}.tar.gz'.format(name=self.name) - - # package the file - cwd = os.getcwd() - self.file_path = os.getcwd() - if self.config['cwd'] == '': - self.file_path = os.getcwd() - os.chdir(self.file_path) - with tarfile.open(os.path.join(os.path.dirname(self.file_path), package_file), 'w:gz') as tf: - # for walking the current dir with absolute path (in archive) - # for root, dirs, files in os.walk(self.file_path): - # for file in files: - # tf.add(os.path.join(root, file)) - # for walking the current dir - for file in os.listdir(self.file_path): - tf.add(file) - tf.close() - - hsh = hashlib.sha256() - with open(os.path.join(os.path.dirname(self.file_path), package_file), 'rb') as f: - for byte_block in iter(lambda: f.read(4096), b""): - hsh.update(byte_block) - - os.chdir(cwd) - self.package_file = package_file - self.package_hash = hsh.hexdigest() - - return package_file, hsh.hexdigest() - - def upload(self): - """ - - """ - if self.package_file: - # data = {'name': self.package_file, 'hash': str(self.package_hash)} - # print("going to send {}".format(data),flush=True) - f = open(os.path.join(os.path.dirname( - self.file_path), self.package_file), 'rb') - print("Sending the following file {}".format(f.read()), flush=True) - f.seek(0, 0) - files = {'file': f} - try: - requests.post('https://{}:{}/context'.format(self.reducer_host, self.reducer_port), - verify=False, files=files, - # data=data, - headers={'Authorization': 'Token {}'.format(self.reducer_token)}) - except Exception as e: - print("failed to put execution context to reducer. {}".format( - e), flush=True) - finally: - f.close() - - print("Upload 4 ", flush=True) - - class PackageRuntime: - """ + """ PackageRuntime is used to download, validate and unpack compute packages. + :param package_path: path to compute package + :type package_path: str + :param package_dir: directory to unpack compute package + :type package_dir: str """ def __init__(self, package_path, package_dir): @@ -112,14 +36,14 @@ def __init__(self, package_path, package_dir): self.expected_checksum = None def download(self, host, port, token, force_ssl=False, secure=False, name=None): - """ - Download compute package from controller - - :param host: - :param port: - :param token: - :param name: - :return: + """ Download compute package from controller + + :param host: host of controller + :param port: port of controller + :param token: token for authentication + :param name: name of package + :return: True if download was successful, None otherwise + :rtype: bool """ # for https we assume a an ingress handles permanent redirect (308) if force_ssl: @@ -127,9 +51,9 @@ def download(self, host, port, token, force_ssl=False, secure=False, name=None): else: scheme = "http" if port: - path = f"{scheme}://{host}:{port}/context" + path = f"{scheme}://{host}:{port}/download_package" else: - path = f"{scheme}://{host}/context" + path = f"{scheme}://{host}/download_package" if name: path = path + "?name={}".format(name) @@ -148,9 +72,9 @@ def download(self, host, port, token, force_ssl=False, secure=False, name=None): for chunk in r.iter_content(chunk_size=8192): f.write(chunk) if port: - path = f"{scheme}://{host}:{port}/checksum" + path = f"{scheme}://{host}:{port}/get_package_checksum" else: - path = f"{scheme}://{host}/checksum" + path = f"{scheme}://{host}/get_package_checksum" if name: path = path + "?name={}".format(name) @@ -166,22 +90,17 @@ def download(self, host, port, token, force_ssl=False, secure=False, name=None): return True def validate(self, expected_checksum): - """ + """ Validate the package against the checksum provided by the controller - :param expected_checksum: - :return: + :param expected_checksum: checksum provided by the controller + :return: True if checksums match, False otherwise + :rtype: bool """ self.expected_checksum = expected_checksum # crosscheck checksum and unpack if security checks are ok. - # print("check if checksum {} is equal to checksum expected {}".format(self.checksum,self.expected_checksum)) file_checksum = str(sha(os.path.join(self.pkg_path, self.pkg_name))) - # catched by client, make configurable by governance network! - # if self.expected_checksum is None: - # print("CAUTION: Package validation turned OFF on client", flush=True) - # return True - if self.checksum == self.expected_checksum == file_checksum: print("Package validated {}".format(self.checksum)) return True @@ -189,8 +108,10 @@ def validate(self, expected_checksum): return False def unpack(self): - """ + """ Unpack the compute package + :return: True if unpacking was successful, False otherwise + :rtype: bool """ if self.pkg_name: f = None @@ -205,7 +126,10 @@ def unpack(self): self.pkg_path, self.pkg_name), 'r:bz2') else: print( - "Failed to unpack compute package, no pkg_name set. Has the reducer been configured with a compute package?") + "Failed to unpack compute package, no pkg_name set." + "Has the reducer been configured with a compute package?" + ) + return False os.getcwd() try: @@ -215,14 +139,18 @@ def unpack(self): f.extractall() print("Successfully extracted compute package content in {}".format( self.dir), flush=True) + return True except Exception: print("Error extracting files!") + return False def dispatcher(self, run_path): - """ + """ Dispatch the compute package - :param run_path: - :return: + :param run_path: path to dispatch the compute package + :type run_path: str + :return: Dispatcher object + :rtype: :class:`fedn.utils.dispatcher.Dispatcher` """ from_path = os.path.join(os.getcwd(), 'client') diff --git a/fedn/fedn/clients/client/state.py b/fedn/fedn/network/clients/state.py similarity index 55% rename from fedn/fedn/clients/client/state.py rename to fedn/fedn/network/clients/state.py index bb30e41a2..262f5862e 100644 --- a/fedn/fedn/clients/client/state.py +++ b/fedn/fedn/network/clients/state.py @@ -2,16 +2,19 @@ class ClientState(Enum): + """ Enum for representing the state of a client.""" idle = 1 training = 2 validating = 3 def ClientStateToString(state): - """ + """ Convert a ClientState to a string representation. - :param state: - :return: + :param state: the state to convert + :type state: :class:`fedn.network.clients.state.ClientState` + :return: string representation of the state + :rtype: str """ if state == ClientState.idle: return "IDLE" diff --git a/fedn/fedn/network/clients/test_client.py b/fedn/fedn/network/clients/test_client.py new file mode 100644 index 000000000..889c00b94 --- /dev/null +++ b/fedn/fedn/network/clients/test_client.py @@ -0,0 +1,45 @@ +import unittest + +from fedn.network.clients.client import Client + + +class TestClient(unittest.TestCase): + """Test the Client class.""" + + def setUp(self): + self.client = Client() + + def test_add_grpc_metadata(self): + """Test the _add_grpc_metadata method.""" + + # Test adding metadata when it doesn't exist + self.client._add_grpc_metadata('key1', 'value1') + self.assertEqual(self.client.metadata, (('key1', 'value1'),)) + + # Test adding metadata when it already exists + self.client._add_grpc_metadata('key1', 'value2') + self.assertEqual(self.client.metadata, (('key1', 'value2'),)) + + # Test adding multiple metadata + self.client._add_grpc_metadata('key2', 'value3') + self.assertEqual(self.client.metadata, (('key1', 'value2'), ('key2', 'value3'))) + + # Test adding metadata with special characters + self.client._add_grpc_metadata('key3', 'value4!@#$%^&*()') + self.assertEqual(self.client.metadata, (('key1', 'value2'), ('key2', 'value3'), ('key3', 'value4!@#$%^&*()'))) + + # Test adding metadata with empty key + with self.assertRaises(ValueError): + self.client._add_grpc_metadata('', 'value5') + + # Test adding metadata with empty value + with self.assertRaises(ValueError): + self.client._add_grpc_metadata('key4', '') + + # Test adding metadata with None value + with self.assertRaises(ValueError): + self.client._add_grpc_metadata('key5', None) + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/network/combiner/__init__.py b/fedn/fedn/network/combiner/__init__.py new file mode 100644 index 000000000..462f91393 --- /dev/null +++ b/fedn/fedn/network/combiner/__init__.py @@ -0,0 +1 @@ +""" The FEDn Combiner package is responsible for combining models from multiple clients. It's the acting gRPC server for the federated network.""" diff --git a/fedn/fedn/network/combiner/aggregators/__init__.py b/fedn/fedn/network/combiner/aggregators/__init__.py new file mode 100644 index 000000000..cb7ee83d6 --- /dev/null +++ b/fedn/fedn/network/combiner/aggregators/__init__.py @@ -0,0 +1,3 @@ +""" The aggregator package is responsible for aggregating models from multiple clients. It's called both in :class:`fedn.network.combiner.Combiner` and :class:`fedn.network.controller.Controller` +to aggregate models from clients. """ +# flake8: noqa diff --git a/fedn/fedn/network/combiner/aggregators/aggregatorbase.py b/fedn/fedn/network/combiner/aggregators/aggregatorbase.py new file mode 100644 index 000000000..bcbb699e2 --- /dev/null +++ b/fedn/fedn/network/combiner/aggregators/aggregatorbase.py @@ -0,0 +1,138 @@ +import importlib +import json +import queue +from abc import ABC, abstractmethod + +import fedn.common.net.grpc.fedn_pb2 as fedn + +AGGREGATOR_PLUGIN_PATH = "fedn.network.combiner.aggregators.{}" + + +class AggregatorBase(ABC): + """ Abstract class defining an aggregator. + + :param id: A reference to id of :class: `fedn.network.combiner.Combiner` + :type id: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + :param control: A handle to the :class: `fedn.network.combiner.round.RoundController` + :type control: class: `fedn.network.combiner.round.RoundController` + """ + + @abstractmethod + def __init__(self, storage, server, modelservice, control): + """ Initialize the aggregator.""" + self.name = self.__class__.__name__ + self.storage = storage + self.server = server + self.modelservice = modelservice + self.control = control + self.model_updates = queue.Queue() + + @abstractmethod + def combine_models(self, nr_expected_models=None, nr_required_models=1, helper=None, timeout=180, delete_models=True): + """Routine for combining model updates. Implemented in subclass. + + :param nr_expected_models: Number of expected models. If None, wait for all models. + :type nr_expected_models: int + :param nr_required_models: Number of required models to combine. + :type nr_required_models: int + :param helper: A helper object. + :type helper: :class: `fedn.utils.plugins.helperbase.HelperBase` + :param timeout: Timeout in seconds to wait for models to be combined. + :type timeout: int + :param delete_models: Delete client models after combining. + :type delete_models: bool + :return: A combined model. + """ + pass + + def on_model_update(self, model_update): + """Callback when a new client model update is recieved. + Performs (optional) pre-processing and then puts the update id + on the aggregation queue. Override in subclass as needed. + + :param model_update: A ModelUpdate message. + :type model_id: str + """ + try: + self.server.report_status("AGGREGATOR({}): callback received model update {}".format(self.name, model_update.model_update_id), + log_level=fedn.Status.INFO) + + # Validate the update and metadata + valid_update = self._validate_model_update(model_update) + if valid_update: + # Push the model update to the processing queue + self.model_updates.put(model_update) + else: + self.server.report_status("AGGREGATOR({}): Invalid model update, skipping.".format(self.name)) + except Exception as e: + self.server.report_status("AGGREGATOR({}): Failed to receive model update! {}".format(self.name, e), + log_level=fedn.Status.WARNING) + pass + + def _validate_model_update(self, model_update): + """ Validate the model update. + + :param model_update: A ModelUpdate message. + :type model_update: object + :return: True if the model update is valid, False otherwise. + :rtype: bool + """ + # TODO: Validate the metadata to check that it contains all variables assumed by the aggregator. + data = json.loads(model_update.meta)['training_metadata'] + if 'num_examples' not in data.keys(): + self.server.report_status("AGGREGATOR({}): Model validation failed, num_examples missing in metadata.".format(self.name)) + return False + return True + + def next_model_update(self, helper): + """ Get the next model update from the queue. + + :param helper: A helper object. + :type helper: object + :return: A tuple containing the model update, metadata and model id. + :rtype: tuple + """ + model_update = self.model_updates.get(block=False) + model_id = model_update.model_update_id + model_next = self.control.load_model_update(helper, model_id) + # Get relevant metadata + data = json.loads(model_update.meta)['training_metadata'] + config = json.loads(json.loads(model_update.meta)['config']) + data['round_id'] = config['round_id'] + + return model_next, data, model_id + + def get_state(self): + """ Get the state of the aggregator's queue, including the number of model updates.""" + state = { + 'queue_len': self.model_updates.qsize() + + } + return state + + +def get_aggregator(aggregator_module_name, storage, server, modelservice, control): + """ Return an instance of the helper class. + + :param helper_module_name: The name of the helper plugin module. + :type helper_module_name: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + :param control: A handle to the :class: `fedn.network.combiner.round.RoundController` + :type control: class: `fedn.network.combiner.round.RoundController` + :return: An aggregator instance. + :rtype: class: `fedn.combiner.aggregators.AggregatorBase` + """ + aggregator_plugin = AGGREGATOR_PLUGIN_PATH.format(aggregator_module_name) + aggregator = importlib.import_module(aggregator_plugin) + return aggregator.Aggregator(storage, server, modelservice, control) diff --git a/fedn/fedn/network/combiner/aggregators/fedavg.py b/fedn/fedn/network/combiner/aggregators/fedavg.py new file mode 100644 index 000000000..0cd15b66a --- /dev/null +++ b/fedn/fedn/network/combiner/aggregators/fedavg.py @@ -0,0 +1,88 @@ +import fedn.common.net.grpc.fedn_pb2 as fedn +from fedn.network.combiner.aggregators.aggregatorbase import AggregatorBase + + +class Aggregator(AggregatorBase): + """ Local SGD / Federated Averaging (FedAvg) aggregator. Computes a weighted mean + of parameter updates. + + :param id: A reference to id of :class: `fedn.network.combiner.Combiner` + :type id: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + :param control: A handle to the :class: `fedn.network.combiner.round.RoundController` + :type control: class: `fedn.network.combiner.round.RoundController` + + """ + + def __init__(self, storage, server, modelservice, control): + """Constructor method""" + + super().__init__(storage, server, modelservice, control) + + self.name = "fedavg" + + def combine_models(self, helper=None, time_window=180, max_nr_models=100, delete_models=True): + """Aggregate model updates in the queue by computing an incremental + weighted average of parameters. + + :param helper: An instance of :class: `fedn.utils.helpers.HelperBase`, ML framework specific helper, defaults to None + :type helper: class: `fedn.utils.helpers.HelperBase`, optional + :param time_window: The time window for model aggregation, defaults to 180 + :type time_window: int, optional + :param max_nr_models: The maximum number of updates aggregated, defaults to 100 + :type max_nr_models: int, optional + :param delete_models: Delete models from storage after aggregation, defaults to True + :type delete_models: bool, optional + :return: The global model and metadata + :rtype: tuple + """ + + data = {} + data['time_model_load'] = 0.0 + data['time_model_aggregation'] = 0.0 + + model = None + nr_aggregated_models = 0 + total_examples = 0 + + self.server.report_status( + "AGGREGATOR({}): Aggregating model updates... ".format(self.name)) + + while not self.model_updates.empty(): + try: + # Get next model from queue + model_next, metadata, model_id = self.next_model_update(helper) + self.server.report_status( + "AGGREGATOR({}): Processing model update {}, metadata: {} ".format(self.name, model_id, metadata)) + + # Increment total number of examples + total_examples += metadata['num_examples'] + + if nr_aggregated_models == 0: + model = model_next + else: + model = helper.increment_average( + model, model_next, metadata['num_examples'], total_examples) + + nr_aggregated_models += 1 + # Delete model from storage + if delete_models: + self.modelservice.models.delete(model_id) + self.server.report_status( + "AGGREGATOR({}): Deleted model update {} from storage.".format(self.name, model_id)) + self.model_updates.task_done() + except Exception as e: + self.server.report_status( + "AGGREGATOR({}): Error encoutered while processing model update {}, skipping this update.".format(self.name, e)) + self.model_updates.task_done() + + data['nr_aggregated_models'] = nr_aggregated_models + + self.server.report_status("AGGREGATOR({}): Aggregation completed, aggregated {} models.".format(self.name, nr_aggregated_models), + log_level=fedn.Status.INFO) + return model, data diff --git a/fedn/fedn/network/combiner/aggregators/tests/test_fedavg.py b/fedn/fedn/network/combiner/aggregators/tests/test_fedavg.py new file mode 100644 index 000000000..55e5052b8 --- /dev/null +++ b/fedn/fedn/network/combiner/aggregators/tests/test_fedavg.py @@ -0,0 +1,33 @@ +import unittest +from unittest.mock import MagicMock + +from fedn.network.combiner.aggregators.fedavg import FedAvg + + +class TestFedAvg(unittest.TestCase): + """Test the FedAvg aggregator""" + + def SetUp(self): + pass + + def test_fedavg_init(self, *args, **kwargs): + """Test the FedAvg aggregator constructor""" + aggregator = FedAvg("id", None, None, None, None) + self.assertEqual(aggregator.name, "FedAvg") + + def test_fedavg_combine_models(self, *args, **kwargs): + """Test the FedAvg aggregator combine_models method with mock classes and methods""" + aggregator = FedAvg("id", None, None, None, None) + aggregator.next_model_update = MagicMock(return_value=(None, None, None)) + aggregator.server = MagicMock() + + data = {} + data['time_model_load'] = 0.0 + data['time_model_aggregation'] = 0.0 + data['nr_aggregated_models'] = 0 + + self.assertEqual(aggregator.combine_models(), (None, data)) + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/network/combiner/connect.py b/fedn/fedn/network/combiner/connect.py new file mode 100644 index 000000000..de705a56c --- /dev/null +++ b/fedn/fedn/network/combiner/connect.py @@ -0,0 +1,125 @@ +# This file contains the Connector class for announcing combiner to the FEDn network via the discovery service (REST-API). +# The Connector class is used by the Combiner class in fedn/network/combiner/server.py. +# Once announced, the combiner will be able to receive controller requests from the controllerStub via gRPC. +# The discovery service will also add the combiner to the statestore. +# +# +import enum + +import requests + + +class Status(enum.Enum): + """ Enum for representing the status of a combiner announcement.""" + Unassigned = 0 + Assigned = 1 + TryAgain = 2 + UnAuthorized = 3 + UnMatchedConfig = 4 + + +class ConnectorCombiner: + """ Connector for annnouncing combiner to the FEDn network. + + :param host: host of discovery service + :type host: str + :param port: port of discovery service + :type port: int + :param myhost: host of combiner + :type myhost: str + :param fqdn: fully qualified domain name of combiner + :type fqdn: str + :param myport: port of combiner + :type myport: int + :param token: token for authentication + :type token: str + :param name: name of combiner + :type name: str + :param secure: True if https is used, False if http + :type secure: bool + :param verify: True if certificate is verified, False if not + :type verify: bool + """ + + def __init__(self, host, port, myhost, fqdn, myport, token, name, secure=False, verify=False): + """ Initialize the ConnectorCombiner. + + :param host: The host of the discovery service. + :type host: str + :param port: The port of the discovery service. + :type port: int + :param myhost: The host of the combiner. + :type myhost: str + :param fqdn: The fully qualified domain name of the combiner. + :type fqdn: str + :param myport: The port of the combiner. + :type myport: int + :param token: The token for the discovery service. + :type token: str + :param name: The name of the combiner. + :type name: str + :param secure: Use https for the connection to the discovery service. + :type secure: bool + :param verify: Verify the connection to the discovery service. + :type verify: bool + """ + + self.host = host + self.fqdn = fqdn + self.port = port + self.myhost = myhost + self.myport = myport + self.token = token + self.name = name + self.secure = secure + self.verify = verify + + # for https we assume a an ingress handles permanent redirect (308) + self.prefix = "http://" + if port: + self.connect_string = "{}{}:{}".format( + self.prefix, self.host, self.port) + else: + self.connect_string = "{}{}".format( + self.prefix, self.host) + + print("\n\nsetting the connection string to {}\n\n".format( + self.connect_string), flush=True) + + def announce(self): + """ + Announce combiner to FEDn network via discovery service (REST-API). + + :return: Tuple with announcement Status, FEDn network configuration if sucessful, else None. + :rtype: :class:`fedn.network.combiner.connect.Status`, str + """ + payload = { + "combiner_id": self.name, + "address": self.myhost, + "fqdn": self.fqdn, + "port": self.myport, + "secure_grpc": self.secure + } + try: + retval = requests.post(self.connect_string + '/add_combiner', json=payload, + verify=self.verify, + headers={'Authorization': 'Token {}'.format(self.token)}) + except Exception: + return Status.Unassigned, {} + + if retval.status_code == 400: + # Get error messange from response + reason = retval.json()['message'] + return Status.UnMatchedConfig, reason + + if retval.status_code == 401: + reason = "Unauthorized connection to reducer, make sure the correct token is set" + return Status.UnAuthorized, reason + + if retval.status_code >= 200 and retval.status_code < 204: + if retval.json()['status'] == 'retry': + reason = retval.json()['message'] + return Status.TryAgain, reason + return Status.Assigned, retval.json() + + return Status.Unassigned, None diff --git a/fedn/fedn/clients/reducer/interfaces.py b/fedn/fedn/network/combiner/interfaces.py similarity index 56% rename from fedn/fedn/clients/reducer/interfaces.py rename to fedn/fedn/network/combiner/interfaces.py index a307fe446..6dfb0428d 100644 --- a/fedn/fedn/clients/reducer/interfaces.py +++ b/fedn/fedn/network/combiner/interfaces.py @@ -4,6 +4,7 @@ from io import BytesIO import grpc +from google.protobuf.json_format import MessageToJson import fedn.common.net.grpc.fedn_pb2 as fedn import fedn.common.net.grpc.fedn_pb2_grpc as rpc @@ -14,11 +15,28 @@ class CombinerUnavailableError(Exception): class Channel: + """ Wrapper for a gRPC channel. + + :param address: The address for the gRPC server. + :type address: str + :param port: The port for connecting to the gRPC server. + :type port: int + :param certificate: The certificate for connecting to the gRPC server (optional) + :type certificate: str """ - """ + def __init__(self, address, port, certificate=None): + """ Create a channel. + + If a valid certificate is given, a secure channel is created, else insecure. - def __init__(self, address, port, certificate): + :parameter address: The address for the gRPC server. + :type address: str + :parameter port: The port for connecting to the gRPC server. + :type port: int + :parameter certificate: The certificate for connecting to the gRPC server (optional) + :type certificate: str + """ self.address = address self.port = port self.certificate = certificate @@ -33,19 +51,40 @@ def __init__(self, address, port, certificate): '{}:{}'.format(self.address, str(self.port))) def get_channel(self): - """ + """ Get a channel. - :return: + :return: An instance of a gRPC channel + :rtype: :class:`grpc.Channel` """ return copy.copy(self.channel) class CombinerInterface: - """ - + """ Interface for the Combiner (aggregation server). + Abstraction on top of the gRPC server servicer. + + :param parent: The parent combiner (controller) + :type parent: :class:`fedn.network.api.interfaces.API` + :param name: The name of the combiner. + :type name: str + :param address: The address of the combiner. + :type address: str + :param fqdn: The fully qualified domain name of the combiner. + :type fqdn: str + :param port: The port of the combiner. + :type port: int + :param certificate: The certificate of the combiner (optional). + :type certificate: str + :param key: The key of the combiner (optional). + :type key: str + :param ip: The ip of the combiner (optional). + :type ip: str + :param config: The configuration of the combiner (optional). + :type config: dict """ def __init__(self, parent, name, address, fqdn, port, certificate=None, key=None, ip=None, config=None): + """ Initialize the combiner interface.""" self.parent = parent self.name = name self.address = address @@ -62,33 +101,34 @@ def __init__(self, parent, name, address, fqdn, port, certificate=None, key=None else: self.config = config - @classmethod - def from_statestore(statestore, name): - """ """ - @classmethod def from_json(combiner_config): - """ + """ Initialize the combiner config from a json document. - :return: + :parameter combiner_config: The combiner configuration. + :type combiner_config: dict + :return: An instance of the combiner interface. + :rtype: :class:`fedn.network.combiner.interfaces.CombinerInterface` """ return CombinerInterface(**combiner_config) def to_dict(self): - """ + """ Export combiner configuration to a dictionary. - :return: + :return: A dictionary with the combiner configuration. + :rtype: dict """ data = { - 'parent': self.parent.to_dict(), + 'parent': self.parent, 'name': self.name, 'address': self.address, 'fqdn': self.fqdn, 'port': self.port, 'ip': self.ip, 'certificate': None, - 'key': None + 'key': None, + 'config': self.config } if self.certificate: @@ -105,20 +145,44 @@ def to_dict(self): return data def to_json(self): - """ + """ Export combiner configuration to json. - :return: + :return: A json document with the combiner configuration. + :rtype: str """ return json.dumps(self.to_dict()) - def report(self, config=None): + def get_certificate(self): + """ Get combiner certificate. + + :return: The combiner certificate. + :rtype: str, None if no certificate is set. + """ + if self.certificate: + cert_b64 = base64.b64encode(self.certificate) + return str(cert_b64).split('\'')[1] + else: + return None + + def get_key(self): + """ Get combiner key. + + :return: The combiner key. + :rtype: str, None if no key is set. """ + if self.key: + key_b64 = base64.b64encode(self.key) + return str(key_b64).split('\'')[1] + else: + return None - :param config: - :return: + def report(self): + """ Recieve a status report from the combiner. + + :return: A dictionary describing the combiner state. + :rtype: dict + :raises CombinerUnavailableError: If the combiner is unavailable. """ - print(f"Trying to create Report channel to gRPC server at: address {self.address} port {self.port}", flush=True) - print(f"Certificate: {self.certificate}", flush=True) channel = Channel(self.address, self.port, self.certificate).get_channel() control = rpc.ControlStub(channel) @@ -136,9 +200,10 @@ def report(self, config=None): raise def configure(self, config=None): - """ + """ Configure the combiner. Set the parameters in config at the server. - :param config: + :param config: A dictionary containing parameters. + :type config: dict """ if not config: config = self.config @@ -160,83 +225,64 @@ def configure(self, config=None): else: raise - def start(self, config): - """ + def flush_model_update_queue(self): + """ Reset the model update queue on the combiner. """ - :param config: - :return: - """ channel = Channel(self.address, self.port, self.certificate).get_channel() control = rpc.ControlStub(channel) + request = fedn.ControlRequest() - request.command = fedn.Command.START - for k, v in config.items(): - p = request.parameter.add() - p.key = str(k) - p.value = str(v) try: - response = control.Start(request) + control.FlushAggregationQueue(request) except grpc.RpcError as e: if e.code() == grpc.StatusCode.UNAVAILABLE: raise CombinerUnavailableError else: raise - print("Response from combiner {}".format(response.message)) - return response - - def set_model_id(self, model_id): - """ + def submit(self, config): + """ Submit a compute plan to the combiner. - :param model_id: + :param config: The job configuration. + :type config: dict + :return: Server ControlResponse object. + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` """ channel = Channel(self.address, self.port, self.certificate).get_channel() control = rpc.ControlStub(channel) request = fedn.ControlRequest() - p = request.parameter.add() - p.key = 'model_id' - p.value = str(model_id) + request.command = fedn.Command.START + for k, v in config.items(): + p = request.parameter.add() + p.key = str(k) + p.value = str(v) try: - control.Configure(request) + response = control.Start(request) except grpc.RpcError as e: if e.code() == grpc.StatusCode.UNAVAILABLE: raise CombinerUnavailableError else: raise - def get_model_id(self): - """ - - :return: - """ - channel = Channel(self.address, self.port, - self.certificate).get_channel() - reducer = rpc.ReducerStub(channel) - request = fedn.GetGlobalModelRequest() - try: - response = reducer.GetGlobalModel(request) - except grpc.RpcError as e: - if e.code() == grpc.StatusCode.UNAVAILABLE: - raise CombinerUnavailableError - else: - raise + return response - return response.model_id + def get_model(self, id): + """ Download a model from the combiner server. - def get_model(self, id=None): - """ Retrive the model bundle from a combiner. """ + :param id: The model id. + :type id: str + :return: A file-like object containing the model. + :rtype: :class:`io.BytesIO`, None if the model is not available. + """ channel = Channel(self.address, self.port, self.certificate).get_channel() modelservice = rpc.ModelServiceStub(channel) - if not id: - id = self.get_model_id() - data = BytesIO() data.seek(0, 0) @@ -250,9 +296,10 @@ def get_model(self, id=None): return None def allowing_clients(self): - """ + """ Check if the combiner is allowing additional client connections. - :return: + :return: True if accepting, else False. + :rtype: bool """ channel = Channel(self.address, self.port, self.certificate).get_channel() @@ -274,3 +321,22 @@ def allowing_clients(self): return False return False + + def list_active_clients(self): + """ List active clients. + + :return: A list of active clients. + :rtype: json + """ + channel = Channel(self.address, self.port, + self.certificate).get_channel() + control = rpc.ConnectorStub(channel) + request = fedn.ListClientsRequest() + try: + response = control.ListActiveClients(request) + except grpc.RpcError as e: + if e.code() == grpc.StatusCode.UNAVAILABLE: + raise CombinerUnavailableError + else: + raise + return MessageToJson(response) diff --git a/fedn/fedn/clients/combiner/modelservice.py b/fedn/fedn/network/combiner/modelservice.py similarity index 54% rename from fedn/fedn/clients/combiner/modelservice.py rename to fedn/fedn/network/combiner/modelservice.py index abdb80d6a..7d29d0d00 100644 --- a/fedn/fedn/clients/combiner/modelservice.py +++ b/fedn/fedn/network/combiner/modelservice.py @@ -1,3 +1,5 @@ +import os +import tempfile from io import BytesIO import fedn.common.net.grpc.fedn_pb2 as fedn @@ -8,28 +10,71 @@ class ModelService(rpc.ModelServiceServicer): - """ + """ Service for handling download and upload of models to the server. """ def __init__(self): self.models = TempModelStorage() - # self.models = defaultdict(io.BytesIO) - # self.models_metadata = {} def exist(self, model_id): - """ + """ Check if a model exists on the server. - :param model_id: - :return: + :param model_id: The model id. + :return: True if the model exists, else False. """ return self.models.exist(model_id) - def get_model(self, id): + def get_tmp_path(self): + """ Return a temporary output path compatible with save_model, load_model. """ + fd, path = tempfile.mkstemp() + os.close(fd) + return path + + def load_model_from_BytesIO(self, model_bytesio, helper): + """ Load a model from a BytesIO object. + + :param model_bytesio: A BytesIO object containing the model. + :type model_bytesio: :class:`io.BytesIO` + :param helper: The helper object for the model. + :type helper: :class:`fedn.utils.helperbase.HelperBase` + :return: The model object. + :rtype: return type of helper.load """ + path = self.get_tmp_path() + with open(path, 'wb') as fh: + fh.write(model_bytesio) + fh.flush() + model = helper.load(path) + os.unlink(path) + return model + + def serialize_model_to_BytesIO(self, model, helper): + """ Serialize a model to a BytesIO object. + + :param model: The model object. + :type model: return type of helper.load + :param helper: The helper object for the model. + :type helper: :class:`fedn.utils.helperbase.HelperBase` + :return: A BytesIO object containing the model. + :rtype: :class:`io.BytesIO` + """ + outfile_name = helper.save(model) + + a = BytesIO() + a.seek(0, 0) + with open(outfile_name, 'rb') as f: + a.write(f.read()) + os.unlink(outfile_name) + return a + + def get_model(self, id): + """ Download model with id 'id' from server. - :param id: - :return: + :param id: The model id. + :type id: str + :return: A BytesIO object containing the model. + :rtype: :class:`io.BytesIO`, None if model does not exist. """ data = BytesIO() @@ -46,12 +91,13 @@ def get_model(self, id): return None def set_model(self, model, id): - """ + """ Upload model to server. - :param model: - :param id: + :param model: A model object (BytesIO) + :type model: :class:`io.BytesIO` + :param id: The model id. + :type id: str """ - if not isinstance(model, BytesIO): bt = BytesIO() @@ -86,13 +132,16 @@ def upload_request_generator(mdl): # Model Service def Upload(self, request_iterator, context): + """ RPC endpoints for uploading a model. + + :param request_iterator: The model request iterator. + :type request_iterator: :class:`fedn.common.net.grpc.fedn_pb2.ModelRequest` + :param context: The context object (unused) + :type context: :class:`grpc._server._Context` + :return: A model response object. + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ModelResponse` """ - :param request_iterator: - :param context: - :return: - """ - # print("STARTING UPLOAD!", flush=True) result = None for request in request_iterator: if request.status == fedn.ModelStatus.IN_PROGRESS: @@ -109,18 +158,21 @@ def Upload(self, request_iterator, context): return result def Download(self, request, context): - """ - - :param request: - :param context: - :return: + """ RPC endpoints for downloading a model. + + :param request: The model request object. + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelRequest` + :param context: The context object (unused) + :type context: :class:`grpc._server._Context` + :return: A model response iterator. + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ModelResponse` """ try: if self.models.get_meta(request.id) != fedn.ModelStatus.OK: print("Error file is not ready", flush=True) yield fedn.ModelResponse(id=request.id, data=None, status=fedn.ModelStatus.FAILED) except Exception: - print("Error file does not exist", flush=True) + print("Error file does not exist: {}".format(request.id), flush=True) yield fedn.ModelResponse(id=request.id, data=None, status=fedn.ModelStatus.FAILED) try: @@ -133,4 +185,4 @@ def Download(self, request, context): return yield fedn.ModelResponse(id=request.id, data=piece, status=fedn.ModelStatus.IN_PROGRESS) except Exception as e: - print("Downloading went wrong! {}".format(e), flush=True) + print("Downloading went wrong: {} {}".format(request.id, e), flush=True) diff --git a/fedn/fedn/network/combiner/round.py b/fedn/fedn/network/combiner/round.py new file mode 100644 index 000000000..dd41deee3 --- /dev/null +++ b/fedn/fedn/network/combiner/round.py @@ -0,0 +1,387 @@ +import queue +import random +import sys +import time +import uuid + +from fedn.network.combiner.aggregators.aggregatorbase import get_aggregator +from fedn.utils.helpers import get_helper + + +class ModelUpdateError(Exception): + pass + + +class RoundController: + """ Round controller. + + The round controller recieves round configurations from the global controller + and coordinates model updates and aggregation, and model validations. + + :param aggregator_name: The name of the aggregator plugin module. + :type aggregator_name: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + """ + + def __init__(self, aggregator_name, storage, server, modelservice): + """ Initialize the RoundController.""" + + self.round_configs = queue.Queue() + self.storage = storage + self.server = server + self.modelservice = modelservice + self.aggregator = get_aggregator(aggregator_name, self.storage, self.server, self.modelservice, self) + + def push_round_config(self, round_config): + """Add a round_config (job description) to the inbox. + + :param round_config: A dict containing the round configuration (from global controller). + :type round_config: dict + :return: A job id (universally unique identifier) for the round. + :rtype: str + """ + try: + round_config['_job_id'] = str(uuid.uuid4()) + self.round_configs.put(round_config) + except Exception: + self.server.report_status( + "ROUNDCONTROL: Failed to push round config.", flush=True) + raise + return round_config['_job_id'] + + def load_model_update(self, helper, model_id): + """Load model update in its native format. + + :param helper: An instance of :class: `fedn.utils.helpers.HelperBase`, ML framework specific helper, defaults to None + :type helper: class: `fedn.utils.helpers.HelperBase` + :param model_id: The ID of the model update, UUID in str format + :type model_id: str + """ + + model_str = self.load_model_update_str(model_id) + if model_str: + try: + model = self.modelservice.load_model_from_BytesIO(model_str.getbuffer(), helper) + except IOError: + self.server.report_status( + "AGGREGATOR({}): Failed to load model!".format(self.name)) + else: + raise ModelUpdateError("Failed to load model.") + + return model + + def load_model_update_str(self, model_id, retry=3): + """Load model update object and return it as BytesIO. + + :param model_id: The ID of the model + :type model_id: str + :param retry: number of times retrying load model update, defaults to 3 + :type retry: int, optional + :return: Updated model + :rtype: class: `io.BytesIO` + """ + # Try reading model update from local disk/combiner memory + model_str = self.modelservice.models.get(model_id) + # And if we cannot access that, try downloading from the server + if model_str is None: + model_str = self.modelservice.get_model(model_id) + # TODO: use retrying library + tries = 0 + while tries < retry: + tries += 1 + if not model_str or sys.getsizeof(model_str) == 80: + self.server.report_status( + "ROUNDCONTROL: Model download failed. retrying", flush=True) + + time.sleep(1) + model_str = self.modelservice.get_model(model_id) + + return model_str + + def waitforit(self, config, buffer_size=100, polling_interval=0.1): + """ Defines the policy for how long the server should wait before starting to aggregate models. + + The policy is as follows: + 1. Wait a maximum of time_window time until the round times out. + 2. Terminate if a preset number of model updates (buffer_size) are in the queue. + + :param config: The round config object + :type config: dict + :param buffer_size: The number of model updates to wait for before starting aggregation, defaults to 100 + :type buffer_size: int, optional + :param polling_interval: The polling interval, defaults to 0.1 + :type polling_interval: float, optional + """ + + time_window = float(config['round_timeout']) + + tt = 0.0 + while tt < time_window: + if self.aggregator.model_updates.qsize() >= buffer_size: + break + + time.sleep(polling_interval) + tt += polling_interval + + def _training_round(self, config, clients): + """Send model update requests to clients and aggregate results. + + :param config: The round config object (passed to the client). + :type config: dict + :param clients: clients to participate in the training round + :type clients: list + :return: an aggregated model and associated metadata + :rtype: model, dict + """ + + self.server.report_status( + "ROUNDCONTROL: Initiating training round, participating clients: {}".format(clients)) + + meta = {} + meta['nr_expected_updates'] = len(clients) + meta['nr_required_updates'] = int(config['clients_required']) + meta['timeout'] = float(config['round_timeout']) + + # Request model updates from all active clients. + self.server.request_model_update(config, clients=clients) + + # If buffer_size is -1 (default), the round terminates when/if all clients have completed. + if int(config['buffer_size']) == -1: + buffer_size = len(clients) + else: + buffer_size = int(config['buffer_size']) + + # Wait / block until the round termination policy has been met. + self.waitforit(config, buffer_size=buffer_size) + + tic = time.time() + model = None + data = None + + try: + helper = get_helper(config['helper_type']) + print("ROUNDCONTROL: Config delete_models_storage: {}".format(config['delete_models_storage']), flush=True) + if config['delete_models_storage'] == 'True': + delete_models = True + else: + delete_models = False + model, data = self.aggregator.combine_models(helper=helper, + delete_models=delete_models) + except Exception as e: + print("AGGREGATION FAILED AT COMBINER! {}".format(e), flush=True) + + meta['time_combination'] = time.time() - tic + meta['aggregation_time'] = data + return model, meta + + def _validation_round(self, config, clients, model_id): + """Send model validation requests to clients. + + :param config: The round config object (passed to the client). + :type config: dict + :param clients: clients to send validation requests to + :type clients: list + :param model_id: The ID of the model to validate + :type model_id: str + """ + self.server.request_model_validation(model_id, config, clients) + + def stage_model(self, model_id, timeout_retry=3, retry=2): + """Download a model from persistent storage and set in modelservice. + + :param model_id: ID of the model update object to stage. + :type model_id: str + :param timeout_retry: Sleep before retrying download again(sec), defaults to 3 + :type timeout_retry: int, optional + :param retry: Number of retries, defaults to 2 + :type retry: int, optional + """ + + # If the model is already in memory at the server we do not need to do anything. + if self.modelservice.models.exist(model_id): + print("ROUNDCONTROL: Model already exists in memory, skipping model staging.", flush=True) + return + print("ROUNDCONTROL: Model Staging, fetching model from storage...", flush=True) + # If not, download it and stage it in memory at the combiner. + tries = 0 + while True: + try: + model = self.storage.get_model_stream(model_id) + if model: + break + except Exception: + self.server.report_status("ROUNDCONTROL: Could not fetch model from storage backend, retrying.", + flush=True) + time.sleep(timeout_retry) + tries += 1 + if tries > retry: + self.server.report_status( + "ROUNDCONTROL: Failed to stage model {} from storage backend!".format(model_id), flush=True) + return + + self.modelservice.set_model(model, model_id) + + def _assign_round_clients(self, n, type="trainers"): + """ Obtain a list of clients(trainers or validators) to ask for updates in this round. + + :param n: Size of a random set taken from active trainers(clients), if n > "active trainers" all is used + :type n: int + :param type: type of clients, either "trainers" or "validators", defaults to "trainers" + :type type: str, optional + :return: Set of clients + :rtype: list + """ + + if type == "validators": + clients = self.server.get_active_validators() + elif type == "trainers": + clients = self.server.get_active_trainers() + else: + self.server.report_status( + "ROUNDCONTROL(ERROR): {} is not a supported type of client".format(type), flush=True) + raise + + # If the number of requested trainers exceeds the number of available, use all available. + if n > len(clients): + n = len(clients) + + # If not, we pick a random subsample of all available clients. + clients = random.sample(clients, n) + + return clients + + def _check_nr_round_clients(self, config, timeout=0.0): + """Check that the minimal number of clients required to start a round are available. + + :param config: The round config object. + :type config: dict + :param timeout: Timeout in seconds, defaults to 0.0 + :type timeout: float, optional + :return: True if the required number of clients are available, False otherwise. + :rtype: bool + """ + + ready = False + t = 0.0 + while not ready: + active = self.server.nr_active_trainers() + + if active >= int(config['clients_requested']): + return True + else: + self.server.report_status("waiting for {} clients to get started, currently: {}".format( + int(config['clients_requested']) - active, + active), flush=True) + if t >= timeout: + if active >= int(config['clients_required']): + return True + else: + return False + + time.sleep(1.0) + t += 1.0 + + return ready + + def execute_validation_round(self, round_config): + """ Coordinate validation rounds as specified in config. + + :param round_config: The round config object. + :type round_config: dict + """ + model_id = round_config['model_id'] + self.server.report_status( + "COMBINER orchestrating validation of model {}".format(model_id)) + self.stage_model(model_id) + validators = self._assign_round_clients( + self.server.max_clients, type="validators") + self._validation_round(round_config, validators, model_id) + + def execute_training_round(self, config): + """ Coordinates clients to execute training tasks. + + :param config: The round config object. + :type config: dict + :return: metadata about the training round. + :rtype: dict + """ + + self.server.report_status( + "ROUNDCONTROL: Processing training round, job_id {}".format(config['_job_id']), flush=True) + + data = {} + data['config'] = config + data['round_id'] = config['round_id'] + + # Make sure the model to update is available on this combiner. + self.stage_model(config['model_id']) + + clients = self._assign_round_clients(self.server.max_clients) + model, meta = self._training_round(config, clients) + data['data'] = meta + + if model is None: + self.server.report_status( + "\t Failed to update global model in round {0}!".format(config['round_id'])) + + if model is not None: + helper = get_helper(config['helper_type']) + a = self.modelservice.serialize_model_to_BytesIO(model, helper) + # Send aggregated model to server + model_id = str(uuid.uuid4()) + self.modelservice.set_model(a, model_id) + a.close() + data['model_id'] = model_id + + self.server.report_status( + "ROUNDCONTROL: TRAINING ROUND COMPLETED. Aggregated model id: {}, Job id: {}".format(model_id, config['_job_id']), flush=True) + + return data + + def run(self, polling_interval=1.0): + """ Main control loop. Execute rounds based on round config on the queue. + + :param polling_interval: The polling interval in seconds for checking if a new job/config is available. + :type polling_interval: float + """ + try: + while True: + try: + round_config = self.round_configs.get(block=False) + + # Check that the minimum allowed number of clients are connected + ready = self._check_nr_round_clients(round_config) + round_meta = {} + + if ready: + if round_config['task'] == 'training': + tic = time.time() + round_meta = self.execute_training_round(round_config) + round_meta['time_exec_training'] = time.time() - \ + tic + round_meta['status'] = "Success" + round_meta['name'] = self.server.id + self.server.tracer.set_round_combiner_data(round_meta) + elif round_config['task'] == 'validation' or round_config['task'] == 'inference': + self.execute_validation_round(round_config) + else: + self.server.report_status( + "ROUNDCONTROL: Round config contains unkown task type.", flush=True) + else: + round_meta = {} + round_meta['status'] = "Failed" + round_meta['reason'] = "Failed to meet client allocation requirements for this round config." + self.server.report_status( + "ROUNDCONTROL: {0}".format(round_meta['reason']), flush=True) + + self.round_configs.task_done() + except queue.Empty: + time.sleep(polling_interval) + + except (KeyboardInterrupt, SystemExit): + pass diff --git a/fedn/fedn/network/combiner/server.py b/fedn/fedn/network/combiner/server.py new file mode 100644 index 000000000..7a9c87ff9 --- /dev/null +++ b/fedn/fedn/network/combiner/server.py @@ -0,0 +1,875 @@ +import base64 +import json +import queue +import re +import signal +import sys +import threading +import time +import uuid +from datetime import datetime, timedelta +from enum import Enum + +import fedn.common.net.grpc.fedn_pb2 as fedn +import fedn.common.net.grpc.fedn_pb2_grpc as rpc +from fedn.common.net.grpc.server import Server +from fedn.common.storage.s3.s3repo import S3ModelRepository +from fedn.common.tracer.mongotracer import MongoTracer +from fedn.network.combiner.connect import ConnectorCombiner, Status +from fedn.network.combiner.modelservice import ModelService +from fedn.network.combiner.round import RoundController + +VALID_NAME_REGEX = '^[a-zA-Z0-9_-]*$' + + +class Role(Enum): + """ Enum for combiner roles. """ + WORKER = 1 + COMBINER = 2 + REDUCER = 3 + OTHER = 4 + + +def role_to_proto_role(role): + """ Convert a Role to a proto Role. + + :param role: the role to convert + :type role: :class:`fedn.network.combiner.server.Role` + :return: proto role + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Role` + """ + if role == Role.COMBINER: + return fedn.COMBINER + if role == Role.WORKER: + return fedn.WORKER + if role == Role.REDUCER: + return fedn.REDUCER + if role == Role.OTHER: + return fedn.OTHER + + +class Combiner(rpc.CombinerServicer, rpc.ReducerServicer, rpc.ConnectorServicer, rpc.ControlServicer): + """ Combiner gRPC server. + + :param config: configuration for the combiner + :type config: dict + """ + + def __init__(self, config): + """ Initialize Combiner server.""" + + # Client queues + self.clients = {} + + self.modelservice = ModelService() + + # Validate combiner name + match = re.search(VALID_NAME_REGEX, config['name']) + if not match: + raise ValueError('Unallowed character in combiner name. Allowed characters: a-z, A-Z, 0-9, _, -.') + + self.id = config['name'] + self.role = Role.COMBINER + self.max_clients = config['max_clients'] + + # Connector to announce combiner to discover service (reducer) + announce_client = ConnectorCombiner(host=config['discover_host'], + port=config['discover_port'], + myhost=config['host'], + fqdn=config['fqdn'], + myport=config['port'], + token=config['token'], + name=config['name'], + secure=config['secure'], + verify=config['verify']) + + response = None + while True: + # announce combiner to discover service + status, response = announce_client.announce() + if status == Status.TryAgain: + print(response, flush=True) + time.sleep(5) + continue + if status == Status.Assigned: + announce_config = response + print( + "COMBINER {0}: Announced successfully".format(self.id), flush=True) + break + if status == Status.UnAuthorized: + print(response, flush=True) + print("Status.UnAuthorized", flush=True) + sys.exit("Exiting: Unauthorized") + if status == Status.UnMatchedConfig: + print(response, flush=True) + print("Status.UnMatchedConfig", flush=True) + sys.exit("Exiting: Missing config") + + cert = announce_config['certificate'] + key = announce_config['key'] + + if announce_config['certificate']: + cert = base64.b64decode(announce_config['certificate']) # .decode('utf-8') + key = base64.b64decode(announce_config['key']) # .decode('utf-8') + + # Set up gRPC server configuration + grpc_config = {'port': config['port'], + 'secure': config['secure'], + 'certificate': cert, + 'key': key} + + # Set up model repository + self.repository = S3ModelRepository( + announce_config['storage']['storage_config']) + + # Create gRPC server + self.server = Server(self, self.modelservice, grpc_config) + + # Set up tracer for statestore + self.tracer = MongoTracer( + announce_config['statestore']['mongo_config'], announce_config['statestore']['network_id']) + + # Set up round controller + self.control = RoundController(config['aggregator'], self.repository, self, self.modelservice) + + # Start thread for round controller + threading.Thread(target=self.control.run, daemon=True).start() + + # Start the gRPC server + self.server.start() + + def __whoami(self, client, instance): + """ Set the client id and role in a proto message. + + :param client: the client to set the id and role for + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param instance: the instance to get the id and role from + :type instance: :class:`fedn.network.combiner.server.Combiner` + :return: the client with id and role set + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Client` + """ + client.name = instance.id + client.role = role_to_proto_role(instance.role) + return client + + def report_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None, flush=True): + """ Report status of the combiner. + + :param msg: the message to report + :type msg: str + :param log_level: the log level to report at + :type log_level: :class:`fedn.common.net.grpc.fedn_pb2.Status` + :param type: the type of status to report + :type type: :class:`fedn.common.net.grpc.fedn_pb2.Status.Type` + :param request: the request to report status for + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param flush: whether to flush the message to stdout + :type flush: bool + """ + print("{}:COMBINER({}):{} {}".format(datetime.now().strftime( + '%Y-%m-%d %H:%M:%S'), self.id, log_level, msg), flush=flush) + + def request_model_update(self, config, clients=[]): + """ Ask clients to update the current global model. + + :param config: the model configuration to send to clients + :type config: dict + :param clients: the clients to send the request to + :type clients: list + + """ + + request = fedn.ModelUpdateRequest() + self.__whoami(request.sender, self) + request.model_id = config['model_id'] + request.correlation_id = str(uuid.uuid4()) + request.timestamp = str(datetime.now()) + request.data = json.dumps(config) + + if len(clients) == 0: + clients = self.get_active_trainers() + + for client in clients: + request.receiver.name = client.name + request.receiver.role = fedn.WORKER + _ = self.SendModelUpdateRequest(request, self) + # TODO: Check response + + print("COMBINER: Sent model update request for model {} to clients {}".format( + request.model_id, clients), flush=True) + + def request_model_validation(self, model_id, config, clients=[]): + """ Ask clients to validate the current global model. + + :param model_id: the model id to validate + :type model_id: str + :param config: the model configuration to send to clients + :type config: dict + :param clients: the clients to send the request to + :type clients: list + + """ + + request = fedn.ModelValidationRequest() + self.__whoami(request.sender, self) + request.model_id = model_id + request.correlation_id = str(uuid.uuid4()) + request.timestamp = str(datetime.now()) + request.is_inference = (config['task'] == 'inference') + + if len(clients) == 0: + clients = self.get_active_validators() + + for client in clients: + request.receiver.name = client.name + request.receiver.role = fedn.WORKER + self.SendModelValidationRequest(request, self) + + print("COMBINER: Sent validation request for model {} to clients {}".format( + model_id, clients), flush=True) + + def _list_clients(self, channel): + """ List active clients on a channel. + + :param channel: the channel to list clients for, for example MODEL_UPDATE_REQUESTS + :type channel: :class:`fedn.common.net.grpc.fedn_pb2.Channel` + :return: the list of active clients + :rtype: list + """ + request = fedn.ListClientsRequest() + self.__whoami(request.sender, self) + request.channel = channel + clients = self.ListActiveClients(request, self) + return clients.client + + def get_active_trainers(self): + """ Get a list of active trainers. + + :return: the list of active trainers + :rtype: list + """ + trainers = self._list_clients(fedn.Channel.MODEL_UPDATE_REQUESTS) + return trainers + + def get_active_validators(self): + """ Get a list of active validators. + + :return: the list of active validators + :rtype: list + """ + validators = self._list_clients(fedn.Channel.MODEL_VALIDATION_REQUESTS) + return validators + + def nr_active_trainers(self): + """ Get the number of active trainers. + + :return: the number of active trainers + :rtype: int + """ + return len(self.get_active_trainers()) + + def nr_active_validators(self): + """ Get the number of active validators. + + :return: the number of active validators + :rtype: int + """ + return len(self.get_active_validators()) + + #################################################################################################################### + + def __join_client(self, client): + """ Add a client to the list of active clients. + + :param client: the client to add + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + """ + if client.name not in self.clients.keys(): + self.clients[client.name] = {"lastseen": datetime.now()} + + def _subscribe_client_to_queue(self, client, queue_name): + """ Subscribe a client to the queue. + + :param client: the client to subscribe + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param queue_name: the name of the queue to subscribe to + :type queue_name: str + """ + self.__join_client(client) + if queue_name not in self.clients[client.name].keys(): + self.clients[client.name][queue_name] = queue.Queue() + + def __get_queue(self, client, queue_name): + """ Get the queue for a client. + + :param client: the client to get the queue for + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param queue_name: the name of the queue to get + :type queue_name: str + :return: the queue + :rtype: :class:`queue.Queue` + + :raises KeyError: if the queue does not exist + """ + try: + return self.clients[client.name][queue_name] + except KeyError: + raise + + def _send_request(self, request, queue_name): + """ Send a request to a client. + + :param request: the request to send + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param queue_name: the name of the queue to send the request to + :type queue_name: str + """ + self.__route_request_to_client(request, request.receiver, queue_name) + + def _broadcast_request(self, request, queue_name): + """ Publish a request to all subscribed members. + + :param request: the request to send + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param queue_name: the name of the queue to send the request to + :type queue_name: str + """ + active_clients = self._list_active_clients() + for client in active_clients: + self.clients[client.name][queue_name].put(request) + + def __route_request_to_client(self, request, client, queue_name): + """ Route a request to a client. + + :param request: the request to send + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param client: the client to send the request to + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param queue_name: the name of the queue to send the request to + :type queue_name: str + + :raises Exception: if the request could not be routed, direct cause of KeyError in __get_queue + """ + try: + q = self.__get_queue(client, queue_name) + q.put(request) + except Exception: + print("Failed to route request to client: {} {}", + request.receiver, queue_name) + raise + + def _send_status(self, status): + """ Report a status to tracer. + + :param status: the status to report + :type status: :class:`fedn.common.net.grpc.fedn_pb2.Status` + """ + + self.tracer.report_status(status) + + def __register_heartbeat(self, client): + """ Register a client if first time connecting. Update heartbeat timestamp. + + :param client: the client to register + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + """ + self.__join_client(client) + self.clients[client.name]["lastseen"] = datetime.now() + + def flush_model_update_queue(self): + """Clear the model update queue (aggregator). """ + + q = self.control.aggregator.model_updates + try: + with q.mutex: + q.queue.clear() + q.all_tasks_done.notify_all() + q.unfinished_tasks = 0 + return True + except Exception: + return False + + ##################################################################################################################### + + # Control Service + + def Start(self, control: fedn.ControlRequest, context): + """ Start a round of federated learning" + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + print("\nRECIEVED **START** from Controller {}\n".format(control.command), flush=True) + + config = {} + for parameter in control.parameter: + config.update({parameter.key: parameter.value}) + + job_id = self.control.push_round_config(config) + + response = fedn.ControlResponse() + p = response.parameter.add() + p.key = "job_id" + p.value = job_id + + return response + + # RPCs related to remote configuration of the server, round controller, + # aggregator and their states. + + def Configure(self, control: fedn.ControlRequest, context): + """ Configure the Combiner. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + for parameter in control.parameter: + setattr(self, parameter.key, parameter.value) + + response = fedn.ControlResponse() + return response + + def FlushAggregationQueue(self, control: fedn.ControlRequest, context): + """ Flush the queue. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + + status = self.flush_model_update_queue() + + response = fedn.ControlResponse() + if status: + response.message = 'Success' + else: + response.message = 'Failed' + + return response + + ############################################################################## + + def Stop(self, control: fedn.ControlRequest, context): + """ TODO: Not yet implemented. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + response = fedn.ControlResponse() + print("\n RECIEVED **STOP** from Controller\n", flush=True) + return response + + def Report(self, control: fedn.ControlRequest, context): + """ Describe current state of the Combiner. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + + response = fedn.ControlResponse() + self.report_status("\n RECIEVED **REPORT** from Controller\n", + log_level=fedn.Status.INFO) + + control_state = self.control.aggregator.get_state() + self.report_status("Aggregator state: {}".format(control_state), log_level=fedn.Status.INFO) + p = response.parameter.add() + for key, value in control_state.items(): + p.key = str(key) + p.value = str(value) + + active_trainers = self.get_active_trainers() + p = response.parameter.add() + p.key = "nr_active_trainers" + p.value = str(len(active_trainers)) + + active_validators = self.get_active_validators() + p = response.parameter.add() + p.key = "nr_active_validators" + p.value = str(len(active_validators)) + + active_trainers_ = self.get_active_trainers() + active_trainers = [] + for client in active_trainers_: + active_trainers.append(client) + p = response.parameter.add() + p.key = "active_trainers" + p.value = str(active_trainers) + + active_validators_ = self.get_active_validators() + active_validators = [] + for client in active_validators_: + active_validators.append(client) + p = response.parameter.add() + p.key = "active_validators" + p.value = str(active_validators) + + p = response.parameter.add() + p.key = "nr_active_clients" + p.value = str(len(active_trainers)+len(active_validators)) + + p = response.parameter.add() + p.key = "nr_unprocessed_compute_plans" + p.value = str(self.control.round_configs.qsize()) + + p = response.parameter.add() + p.key = "name" + p.value = str(self.id) + + return response + + ##################################################################################################################### + + def SendStatus(self, status: fedn.Status, context): + """ A client stream RPC endpoint that accepts status messages. + + :param status: the status message + :type status: :class:`fedn.common.net.grpc.fedn_pb2.Status` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + + self._send_status(status) + + response = fedn.Response() + response.response = "Status received." + return response + + def _list_subscribed_clients(self, queue_name): + """ List all clients subscribed to a queue. + + :param queue_name: the name of the queue + :type queue_name: str + :return: a list of client names + :rtype: list + """ + subscribed_clients = [] + for name, client in self.clients.items(): + if queue_name in client.keys(): + subscribed_clients.append(name) + return subscribed_clients + + def _list_active_clients(self, channel): + """ List all clients that have sent a status message in the last 10 seconds. + + :param channel: the name of the channel + :type channel: str + :return: a list of client names + :rtype: list + """ + active_clients = [] + for client in self._list_subscribed_clients(channel): + # This can break with different timezones. + now = datetime.now() + then = self.clients[client]["lastseen"] + # TODO: move the heartbeat timeout to config. + if (now - then) < timedelta(seconds=10): + active_clients.append(client) + return active_clients + + def _drop_inactive_clients(self): + """ TODO: Not implemented. Clean up clients that have missed the heartbeat. """ + + def ListActiveClients(self, request: fedn.ListClientsRequest, context): + """ RPC endpoint that returns a ClientList containing the names of all active clients. + An active client has sent a status message / responded to a heartbeat + request in the last 10 seconds. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ListClientsRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the client list + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ClientList` + """ + clients = fedn.ClientList() + active_clients = self._list_active_clients(request.channel) + + for client in active_clients: + clients.client.append(fedn.Client(name=client, role=fedn.WORKER)) + return clients + + def AcceptingClients(self, request: fedn.ConnectionRequest, context): + """ RPC endpoint that returns a ConnectionResponse indicating whether the server + is accepting clients or not. + + :param request: the request (unused) + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ConnectionRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ConnectionResponse` + """ + response = fedn.ConnectionResponse() + active_clients = self._list_active_clients( + fedn.Channel.MODEL_UPDATE_REQUESTS) + + try: + requested = int(self.max_clients) + if len(active_clients) >= requested: + response.status = fedn.ConnectionStatus.NOT_ACCEPTING + return response + if len(active_clients) < requested: + response.status = fedn.ConnectionStatus.ACCEPTING + return response + + except Exception as e: + print("Combiner not properly configured! {}".format(e), flush=True) + raise + + response.status = fedn.ConnectionStatus.TRY_AGAIN_LATER + return response + + def SendHeartbeat(self, heartbeat: fedn.Heartbeat, context): + """ RPC that lets clients send a hearbeat, notifying the server that + the client is available. + + :param heartbeat: the heartbeat + :type heartbeat: :class:`fedn.common.net.grpc.fedn_pb2.Heartbeat` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self.__register_heartbeat(heartbeat.sender) + response = fedn.Response() + response.sender.name = heartbeat.sender.name + response.sender.role = heartbeat.sender.role + response.response = "Heartbeat received" + return response + + # Combiner Service + + def ModelUpdateStream(self, update, context): + """ Model update stream RPC endpoint. Update status for client is connecting to stream. + + :param update: the update message + :type update: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdate` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + client = update.sender + status = fedn.Status( + status="Client {} connecting to ModelUpdateStream.".format(client.name)) + status.log_level = fedn.Status.INFO + status.sender.name = self.id + status.sender.role = role_to_proto_role(self.role) + + self._subscribe_client_to_queue(client, fedn.Channel.MODEL_UPDATES) + q = self.__get_queue(client, fedn.Channel.MODEL_UPDATES) + + self._send_status(status) + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + def ModelUpdateRequestStream(self, response, context): + """ A server stream RPC endpoint (Update model). Messages from client stream. + + :param response: the response + :type response: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdateRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + + client = response.sender + metadata = context.invocation_metadata() + if metadata: + metadata = dict(metadata) + print("\nClient connected: {}\n".format(metadata['client']), flush=True) + + status = fedn.Status( + status="Client {} connecting to ModelUpdateRequestStream.".format(client.name)) + status.log_level = fedn.Status.INFO + status.timestamp = str(datetime.now()) + + self.__whoami(status.sender, self) + + self._subscribe_client_to_queue( + client, fedn.Channel.MODEL_UPDATE_REQUESTS) + q = self.__get_queue(client, fedn.Channel.MODEL_UPDATE_REQUESTS) + + self._send_status(status) + + self.tracer.update_client_status(client.name, "online") + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + self.tracer.update_client_status(client.name, "offline") + + def ModelValidationStream(self, update, context): + """ Model validation stream RPC endpoint. Update status for client is connecting to stream. + + :param update: the update message + :type update: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidation` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + client = update.sender + status = fedn.Status( + status="Client {} connecting to ModelValidationStream.".format(client.name)) + status.log_level = fedn.Status.INFO + + status.sender.name = self.id + status.sender.role = role_to_proto_role(self.role) + + self._subscribe_client_to_queue(client, fedn.Channel.MODEL_VALIDATIONS) + q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATIONS) + + self._send_status(status) + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + def ModelValidationRequestStream(self, response, context): + """ A server stream RPC endpoint (Validation). Messages from client stream. + + :param response: the response + :type response: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidationRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + + client = response.sender + status = fedn.Status( + status="Client {} connecting to ModelValidationRequestStream.".format(client.name)) + status.log_level = fedn.Status.INFO + status.sender.name = self.id + status.sender.role = role_to_proto_role(self.role) + status.timestamp = str(datetime.now()) + + self._subscribe_client_to_queue( + client, fedn.Channel.MODEL_VALIDATION_REQUESTS) + q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATION_REQUESTS) + + self._send_status(status) + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + def SendModelUpdateRequest(self, request, context): + """ Send a model update request. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdateRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self._send_request(request, fedn.Channel.MODEL_UPDATE_REQUESTS) + + response = fedn.Response() + response.response = "RECEIVED ModelUpdateRequest from client {}".format( + request.sender.name) + return response # TODO Fill later + + def SendModelUpdate(self, request, context): + """ Send a model update response. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdate` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self.control.aggregator.on_model_update(request) + + response = fedn.Response() + response.response = "RECEIVED ModelUpdate {} from client {}".format( + response, response.sender.name) + return response # TODO Fill later + + def SendModelValidationRequest(self, request, context): + """ Send a model validation request. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidationRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self._send_request(request, fedn.Channel.MODEL_VALIDATION_REQUESTS) + + response = fedn.Response() + response.response = "RECEIVED ModelValidationRequest from client {}".format( + request.sender.name) + return response # TODO Fill later + + def register_model_validation(self, validation): + """Register a model validation. + + :param validation: the model validation + :type validation: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidation` + """ + + self.tracer.report_validation(validation) + + def SendModelValidation(self, request, context): + """ Send a model validation response. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidation` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self.report_status("Recieved ModelValidation from {}".format(request.sender.name), + log_level=fedn.Status.INFO) + + self.register_model_validation(request) + + response = fedn.Response() + response.response = "RECEIVED ModelValidation {} from client {}".format( + response, response.sender.name) + return response + + #################################################################################################################### + + def run(self): + """ Start the server.""" + + print("COMBINER: {} started, ready for requests. ".format( + self.id), flush=True) + try: + while True: + signal.pause() + except (KeyboardInterrupt, SystemExit): + pass + self.server.stop() diff --git a/fedn/fedn/clients/reducer/config.py b/fedn/fedn/network/config.py similarity index 84% rename from fedn/fedn/clients/reducer/config.py rename to fedn/fedn/network/config.py index 0ccfddc66..a9e8773f4 100644 --- a/fedn/fedn/clients/reducer/config.py +++ b/fedn/fedn/network/config.py @@ -6,9 +6,7 @@ class Config(ABC): class ReducerConfig(Config): - """ - - """ + """ Configuration for the Reducer component. """ compute_bundle_dir = None models_dir = None diff --git a/fedn/fedn/network/controller/__init__.py b/fedn/fedn/network/controller/__init__.py new file mode 100644 index 000000000..76372d276 --- /dev/null +++ b/fedn/fedn/network/controller/__init__.py @@ -0,0 +1,3 @@ +""" The controller package is responsible for orchestrating the federated learning process. It's acts as a gRPC client and sends round config tasks +to the :class:`fedn.network.combiner.Combiner`. """ +# flake8: noqa diff --git a/fedn/fedn/network/controller/control.py b/fedn/fedn/network/controller/control.py new file mode 100644 index 000000000..615edb3b5 --- /dev/null +++ b/fedn/fedn/network/controller/control.py @@ -0,0 +1,449 @@ +import copy +import datetime +import time +import uuid + +from tenacity import (retry, retry_if_exception_type, stop_after_delay, + wait_random) + +from fedn.network.combiner.interfaces import CombinerUnavailableError +from fedn.network.controller.controlbase import ControlBase +from fedn.network.state import ReducerState + + +class UnsupportedStorageBackend(Exception): + """Exception class for when storage backend is not supported. Passes""" + + def __init__(self, message): + """Constructor method. + + :param message: The exception message. + :type message: str + + """ + self.message = message + super().__init__(self.message) + + +class MisconfiguredStorageBackend(Exception): + """Exception class for when storage backend is misconfigured. + + :param message: The exception message. + :type message: str + """ + + def __init__(self, message): + """Constructor method.""" + self.message = message + super().__init__(self.message) + + +class NoModelException(Exception): + """Exception class for when model is None + + :param message: The exception message. + :type message: str + """ + + def __init__(self, message): + """Constructor method.""" + self.message = message + super().__init__(self.message) + + +class CombinersNotDoneException(Exception): + """ Exception class for when model is None """ + + def __init__(self, message): + """ Constructor method. + + :param message: The exception message. + :type message: str + + """ + self.message = message + super().__init__(self.message) + + +class Control(ControlBase): + """Controller, implementing the overall global training, validation and inference logic. + + :param statestore: A StateStorage instance. + :type statestore: class: `fedn.network.statestorebase.StateStorageBase` + """ + + def __init__(self, statestore): + """Constructor method.""" + + super().__init__(statestore) + self.name = "DefaultControl" + + def session(self, config): + """Execute a new training session. A session consists of one + or several global rounds. All rounds in the same session + have the same round_config. + + :param config: The session config. + :type config: dict + + """ + + if self._state == ReducerState.instructing: + print( + "Controller already in INSTRUCTING state. A session is in progress.", + flush=True, + ) + return + + if not self.statestore.get_latest_model(): + print("No model in model chain, please provide a seed model!") + return + + self._state = ReducerState.instructing + config["committed_at"] = datetime.datetime.now().strftime( + "%Y-%m-%d %H:%M:%S" + ) + self.create_session(config) + + if not self.statestore.get_latest_model(): + print( + "No model in model chain, please provide a seed model!", + flush=True, + ) + self._state = ReducerState.monitoring + + last_round = int(self.get_latest_round_id()) + + # Clear potential stragglers/old model updates at combiners + for combiner in self.network.get_combiners(): + combiner.flush_model_update_queue() + + # Execute the rounds in this session + for round in range(1, int(config["rounds"] + 1)): + # Increment the round number + if last_round: + current_round = last_round + round + else: + current_round = round + + try: + _, round_data = self.round(config, str(current_round)) + except TypeError as e: + print( + "Could not unpack data from round: {0}".format(e), + flush=True, + ) + + print( + "CONTROL: Round completed with status {}".format( + round_data["status"] + ), + flush=True, + ) + + # TODO: Report completion of session + self._state = ReducerState.idle + + def round(self, session_config, round_id): + """ Execute one global round. + + : param session_config: The session config. + : type session_config: dict + : param round_id: The round id. + : type round_id: str + + """ + + self.create_round({'round_id': round_id, 'status': "Pending"}) + + if len(self.network.get_combiners()) < 1: + print("CONTROLLER: Round cannot start, no combiners connected!", flush=True) + self.set_round_status(round_id, 'Failed') + return None, self.statestore.get_round(round_id) + + # Assemble round config for this global round + round_config = copy.deepcopy(session_config) + round_config["rounds"] = 1 + round_config["round_id"] = round_id + round_config["task"] = "training" + round_config["model_id"] = self.statestore.get_latest_model() + round_config["helper_type"] = self.statestore.get_helper() + + self.set_round_config(round_id, round_config) + + # Get combiners that are able to participate in round, given round_config + participating_combiners = self.get_participating_combiners(round_config) + + # Check if the policy to start the round is met + round_start = self.evaluate_round_start_policy(participating_combiners) + + if round_start: + print("CONTROL: round start policy met, {} participating combiners.".format( + len(participating_combiners)), flush=True) + else: + print("CONTROL: Round start policy not met, skipping round!", flush=True) + self.set_round_status(round_id, 'Failed') + return None, self.statestore.get_round(round_id) + + # Ask participating combiners to coordinate model updates + _ = self.request_model_updates(participating_combiners) + # TODO: Check response + + # Wait until participating combiners have produced an updated global model, + # or round times out. + def do_if_round_times_out(result): + print("CONTROL: Round timed out!", flush=True) + + @retry(wait=wait_random(min=1.0, max=2.0), + stop=stop_after_delay(session_config['round_timeout']), + retry_error_callback=do_if_round_times_out, + retry=retry_if_exception_type(CombinersNotDoneException)) + def combiners_done(): + + round = self.statestore.get_round(round_id) + if 'combiners' not in round: + # TODO: use logger + print("CONTROL: Waiting for combiners to update model...", flush=True) + raise CombinersNotDoneException("Combiners have not yet reported.") + + if len(round['combiners']) < len(participating_combiners): + print("CONTROL: Waiting for combiners to update model...", flush=True) + raise CombinersNotDoneException("All combiners have not yet reported.") + + return True + + combiners_done() + + # Due to the distributed nature of the computation, there might be a + # delay before combiners have reported the round data to the db, + # so we need some robustness here. + @retry(wait=wait_random(min=0.1, max=1.0), + retry=retry_if_exception_type(KeyError)) + def check_combiners_done_reporting(): + round = self.statestore.get_round(round_id) + combiners = round['combiners'] + return combiners + + _ = check_combiners_done_reporting() + + round = self.statestore.get_round(round_id) + round_valid = self.evaluate_round_validity_policy(round) + if not round_valid: + print("REDUCER CONTROL: Round invalid!", flush=True) + self.set_round_status(round_id, 'Failed') + return None, self.statestore.get_round(round_id) + + print("CONTROL: Reducing combiner level models...", flush=True) + # Reduce combiner models into a new global model + round_data = {} + try: + round = self.statestore.get_round(round_id) + model, data = self.reduce(round['combiners']) + round_data['reduce'] = data + print("CONTROL: Done reducing models from combiners!", flush=True) + except Exception as e: + print("CONTROL: Failed to reduce models from combiners: {}".format( + e), flush=True) + self.set_round_status(round_id, 'Failed') + return None, self.statestore.get_round(round_id) + + # Commit the new global model to the model trail + if model is not None: + print( + "CONTROL: Committing global model to model trail...", + flush=True, + ) + tic = time.time() + model_id = uuid.uuid4() + session_id = ( + session_config["session_id"] + if "session_id" in session_config + else None + ) + self.commit(model_id, model, session_id) + round_data["time_commit"] = time.time() - tic + print( + "CONTROL: Done committing global model to model trail!", + flush=True, + ) + else: + print( + "REDUCER: failed to update model in round with config {}".format( + session_config + ), + flush=True, + ) + self.set_round_status(round_id, 'Failed') + return None, self.statestore.get_round(round_id) + + self.set_round_status(round_id, 'Success') + + # 4. Trigger participating combiner nodes to execute a validation round for the current model + validate = session_config["validate"] + if validate: + combiner_config = copy.deepcopy(session_config) + combiner_config["round_id"] = round_id + combiner_config["model_id"] = self.statestore.get_latest_model() + combiner_config["task"] = "validation" + combiner_config["helper_type"] = self.statestore.get_helper() + + validating_combiners = self.get_participating_combiners( + combiner_config) + + for combiner, combiner_config in validating_combiners: + try: + print( + "CONTROL: Submitting validation round to combiner {}".format( + combiner + ), + flush=True, + ) + combiner.submit(combiner_config) + except CombinerUnavailableError: + self._handle_unavailable_combiner(combiner) + pass + + self.set_round_data(round_id, round_data) + self.set_round_status(round_id, 'Finished') + return model_id, self.statestore.get_round(round_id) + + def reduce(self, combiners): + """Combine updated models from Combiner nodes into one global model. + + : param combiners: dict of combiner names(key) and model IDs(value) to reduce + : type combiners: dict + """ + + meta = {} + meta["time_fetch_model"] = 0.0 + meta["time_load_model"] = 0.0 + meta["time_aggregate_model"] = 0.0 + + i = 1 + model = None + # Check if there are any combiners to reduce + if len(combiners) == 0: + print("REDUCER: No combiners to reduce!", flush=True) + return model, meta + + for combiner in combiners: + name = combiner['name'] + model_id = combiner['model_id'] + # TODO: Handle inactive RPC error in get_model and raise specific error + print( + "REDUCER: Fetching model ({model_id}) from combiner {name}".format( + model_id=model_id, name=name + ), + flush=True, + ) + try: + tic = time.time() + combiner_interface = self.get_combiner(name) + data = combiner_interface.get_model(model_id) + meta['time_fetch_model'] += (time.time() - tic) + except Exception as e: + print( + "REDUCER: Failed to fetch model from combiner {}: {}".format( + name, e + ), + flush=True, + ) + data = None + + if data is not None: + try: + tic = time.time() + helper = self.get_helper() + data.seek(0) + model_next = helper.load(data) + meta["time_load_model"] += time.time() - tic + tic = time.time() + model = helper.increment_average(model, model_next, i, i) + meta["time_aggregate_model"] += time.time() - tic + except Exception: + tic = time.time() + data.seek(0) + model = helper.load(data) + meta["time_aggregate_model"] += time.time() - tic + i = i + 1 + + return model, meta + + def infer_instruct(self, config): + """Main entrypoint for executing the inference compute plan. + + : param config: configuration for the inference round + """ + + # Check/set instucting state + if self.__state == ReducerState.instructing: + print("Already set in INSTRUCTING state", flush=True) + return + self.__state = ReducerState.instructing + + # Check for a model chain + if not self.statestore.latest_model(): + print("No model in model chain, please seed the alliance!") + + # Set reducer in monitoring state + self.__state = ReducerState.monitoring + + # Start inference round + try: + self.inference_round(config) + except TypeError: + print("Could not unpack data from round...", flush=True) + + # Set reducer in idle state + self.__state = ReducerState.idle + + def inference_round(self, config): + """Execute an inference round. + + : param config: configuration for the inference round + """ + + # Init meta + round_data = {} + + # Check for at least one combiner in statestore + if len(self.network.get_combiners()) < 1: + print("REDUCER: No combiners connected!") + return round_data + + # Setup combiner configuration + combiner_config = copy.deepcopy(config) + combiner_config["model_id"] = self.statestore.get_latest_model() + combiner_config["task"] = "inference" + combiner_config["helper_type"] = self.statestore.get_framework() + + # Select combiners + validating_combiners = self.get_participating_combiners( + combiner_config) + + # Test round start policy + round_start = self.check_round_start_policy(validating_combiners) + if round_start: + print( + "CONTROL: round start policy met, participating combiners {}".format( + validating_combiners + ), + flush=True, + ) + else: + print( + "CONTROL: Round start policy not met, skipping round!", + flush=True, + ) + return None + + # Synch combiners with latest model and trigger inference + for combiner, combiner_config in validating_combiners: + try: + combiner.submit(combiner_config) + except CombinerUnavailableError: + # It is OK if inference fails for a combiner + self._handle_unavailable_combiner(combiner) + pass + + return round_data diff --git a/fedn/fedn/network/controller/controlbase.py b/fedn/fedn/network/controller/controlbase.py new file mode 100644 index 000000000..fab6a2027 --- /dev/null +++ b/fedn/fedn/network/controller/controlbase.py @@ -0,0 +1,382 @@ +import os +import uuid +from abc import ABC, abstractmethod +from time import sleep + +import fedn.utils.helpers +from fedn.common.storage.s3.s3repo import S3ModelRepository +from fedn.common.tracer.mongotracer import MongoTracer +from fedn.network.api.network import Network +from fedn.network.combiner.interfaces import CombinerUnavailableError +from fedn.network.state import ReducerState + +# Maximum number of tries to connect to statestore and retrieve storage configuration +MAX_TRIES_BACKEND = os.getenv("MAX_TRIES_BACKEND", 10) + + +class UnsupportedStorageBackend(Exception): + pass + + +class MisconfiguredStorageBackend(Exception): + pass + + +class MisconfiguredHelper(Exception): + pass + + +class ControlBase(ABC): + """Base class and interface for a global controller. + Override this class to implement a global training strategy (control). + + :param statestore: The statestore object. + :type statestore: :class:`fedn.network.statestore.statestorebase.StateStoreBase` + """ + + @abstractmethod + def __init__(self, statestore): + """Constructor.""" + self._state = ReducerState.setup + + self.statestore = statestore + if self.statestore.is_inited(): + self.network = Network(self, statestore) + + try: + not_ready = True + tries = 0 + while not_ready: + storage_config = self.statestore.get_storage_backend() + if storage_config: + not_ready = False + else: + print( + "REDUCER CONTROL: Storage backend not configured, waiting...", + flush=True, + ) + sleep(5) + tries += 1 + if tries > MAX_TRIES_BACKEND: + raise Exception + except Exception: + print( + "REDUCER CONTROL: Failed to retrive storage configuration, exiting.", + flush=True, + ) + raise MisconfiguredStorageBackend() + + if storage_config["storage_type"] == "S3": + self.model_repository = S3ModelRepository( + storage_config["storage_config"] + ) + else: + print( + "REDUCER CONTROL: Unsupported storage backend, exiting.", + flush=True, + ) + raise UnsupportedStorageBackend() + + # The tracer is a helper that manages state in the database backend + statestore_config = statestore.get_config() + self.tracer = MongoTracer( + statestore_config["mongo_config"], statestore_config["network_id"] + ) + + if self.statestore.is_inited(): + self._state = ReducerState.idle + + @abstractmethod + def session(self, config): + pass + + @abstractmethod + def round(self, config, round_number): + pass + + @abstractmethod + def reduce(self, combiners): + pass + + def get_helper(self): + """Get a helper instance from global config. + + :return: Helper instance. + :rtype: :class:`fedn.utils.plugins.helperbase.HelperBase` + """ + helper_type = self.statestore.get_helper() + helper = fedn.utils.helpers.get_helper(helper_type) + if not helper: + raise MisconfiguredHelper( + "Unsupported helper type {}, please configure compute_package.helper !".format( + helper_type + ) + ) + return helper + + def get_state(self): + """Get the current state of the controller. + + :return: The current state. + :rtype: :class:`fedn.network.state.ReducerState` + """ + return self._state + + def idle(self): + """Check if the controller is idle. + + :return: True if idle, False otherwise. + :rtype: bool + """ + if self._state == ReducerState.idle: + return True + else: + return False + + def get_model_info(self): + """ + + :return: + """ + return self.statestore.get_model_trail() + + # TODO: remove use statestore.get_events() instead + def get_events(self): + """ + + :return: + """ + return self.statestore.get_events() + + def get_latest_round_id(self): + last_round = self.statestore.get_latest_round() + if not last_round: + return 0 + else: + return last_round["round_id"] + + def get_latest_round(self): + round = self.statestore.get_latest_round() + return round + + def get_compute_package_name(self): + """ + + :return: + """ + definition = self.statestore.get_compute_package() + if definition: + try: + package_name = definition["filename"] + return package_name + except (IndexError, KeyError): + print( + "No context filename set for compute context definition", + flush=True, + ) + return None + else: + return None + + def set_compute_package(self, filename, path): + """Persist the configuration for the compute package.""" + self.model_repository.set_compute_package(filename, path) + self.statestore.set_compute_package(filename) + + def get_compute_package(self, compute_package=""): + """ + + :param compute_package: + :return: + """ + if compute_package == "": + compute_package = self.get_compute_package_name() + if compute_package: + return self.model_repository.get_compute_package(compute_package) + else: + return None + + def create_session(self, config): + """ Initialize a new session in backend db. """ + + if "session_id" not in config.keys(): + session_id = uuid.uuid4() + config["session_id"] = str(session_id) + else: + session_id = config["session_id"] + + self.tracer.create_session(id=session_id) + self.tracer.set_session_config(session_id, config) + + def create_round(self, round_data): + """Initialize a new round in backend db. """ + + self.tracer.create_round(round_data) + + def set_round_data(self, round_id, round_data): + """ Set round data. + + :param round_id: The round unique identifier + :type round_id: str + :param round_data: The status + :type status: dict + """ + self.tracer.set_round_data(round_id, round_data) + + def set_round_status(self, round_id, status): + """ Set the round round stats. + + :param round_id: The round unique identifier + :type round_id: str + :param status: The status + :type status: str + """ + self.tracer.set_round_status(round_id, status) + + def set_round_config(self, round_id, round_config): + """ Upate round in backend db. + + :param round_id: The round unique identifier + :type round_id: str + :param round_config: The round configuration + :type round_config: dict + """ + self.tracer.set_round_config(round_id, round_config) + + def request_model_updates(self, combiners): + """Ask Combiner server to produce a model update. + + :param combiners: A list of combiners + :type combiners: tuple (combiner, comboner_round_config) + """ + cl = [] + for combiner, combiner_round_config in combiners: + response = combiner.submit(combiner_round_config) + cl.append((combiner, response)) + return cl + + def commit(self, model_id, model=None, session_id=None): + """Commit a model to the global model trail. The model commited becomes the lastest consensus model. + + :param model_id: Unique identifier for the model to commit. + :type model_id: str (uuid) + :param model: The model object to commit + :type model: BytesIO + :param session_id: Unique identifier for the session + :type session_id: str + """ + + helper = self.get_helper() + if model is not None: + print( + "CONTROL: Saving model file temporarily to disk...", flush=True + ) + outfile_name = helper.save(model) + print("CONTROL: Uploading model to Minio...", flush=True) + model_id = self.model_repository.set_model( + outfile_name, is_file=True + ) + + print("CONTROL: Deleting temporary model file...", flush=True) + os.unlink(outfile_name) + + print( + "CONTROL: Committing model {} to global model trail in statestore...".format( + model_id + ), + flush=True, + ) + self.statestore.set_latest_model(model_id, session_id) + + def get_combiner(self, name): + for combiner in self.network.get_combiners(): + if combiner.name == name: + return combiner + return None + + def get_participating_combiners(self, combiner_round_config): + """Assemble a list of combiners able to participate in a round as + descibed by combiner_round_config. + """ + combiners = [] + for combiner in self.network.get_combiners(): + try: + combiner_state = combiner.report() + except CombinerUnavailableError: + self._handle_unavailable_combiner(combiner) + combiner_state = None + + if combiner_state is not None: + is_participating = self.evaluate_round_participation_policy( + combiner_round_config, combiner_state + ) + if is_participating: + combiners.append((combiner, combiner_round_config)) + return combiners + + def evaluate_round_participation_policy( + self, compute_plan, combiner_state + ): + """Evaluate policy for combiner round-participation. + A combiner participates if it is responsive and reports enough + active clients to participate in the round. + """ + + if compute_plan["task"] == "training": + nr_active_clients = int(combiner_state["nr_active_trainers"]) + elif compute_plan["task"] == "validation": + nr_active_clients = int(combiner_state["nr_active_validators"]) + else: + print("Invalid task type!", flush=True) + return False + + if int(compute_plan["clients_required"]) <= nr_active_clients: + return True + else: + return False + + def evaluate_round_start_policy(self, combiners): + """Check if the policy to start a round is met. + + :param combiners: A list of combiners + :type combiners: list + :return: True if the round policy is mer, otherwise False + :rtype: bool + """ + if len(combiners) > 0: + return True + else: + return False + + def evaluate_round_validity_policy(self, round): + """ Check if the round is valid. + + At the end of the round, before committing a model to the global model trail, + we check if the round validity policy has been met. This can involve + e.g. asserting that a certain number of combiners have reported in an + updated model, or that criteria on model performance have been met. + + :param round: The round object + :rtype round: dict + :return: True if the policy is met, otherwise False + :rtype: bool + """ + model_ids = [] + for combiner in round['combiners']: + try: + model_ids.append(combiner['model_id']) + except KeyError: + pass + + if len(model_ids) == 0: + return False + + return True + + def state(self): + """ Get the current state of the controller + + :return: The state + :rype: str + """ + return self._state diff --git a/fedn/fedn/clients/__init__.py b/fedn/fedn/network/dashboard/__init__.py similarity index 100% rename from fedn/fedn/clients/__init__.py rename to fedn/fedn/network/dashboard/__init__.py diff --git a/fedn/fedn/clients/reducer/plots.py b/fedn/fedn/network/dashboard/plots.py similarity index 100% rename from fedn/fedn/clients/reducer/plots.py rename to fedn/fedn/network/dashboard/plots.py diff --git a/fedn/fedn/network/dashboard/restservice.py b/fedn/fedn/network/dashboard/restservice.py new file mode 100644 index 000000000..14e7266bb --- /dev/null +++ b/fedn/fedn/network/dashboard/restservice.py @@ -0,0 +1,1187 @@ +import base64 +import copy +import datetime +import json +import os +import re +import threading +from io import BytesIO +from threading import Lock + +import jwt +import pandas as pd +from bokeh.embed import json_item +from bson import json_util +from flask import (Flask, abort, flash, jsonify, make_response, redirect, + render_template, request, send_file, send_from_directory, + url_for) +from werkzeug.utils import secure_filename + +from fedn.common.tracer.mongotracer import MongoTracer +from fedn.network.combiner.interfaces import CombinerInterface +from fedn.network.dashboard.plots import Plot +from fedn.network.state import ReducerState, ReducerStateToString +from fedn.utils.checksum import sha + +UPLOAD_FOLDER = "/app/client/package/" +ALLOWED_EXTENSIONS = {"gz", "bz2", "tar", "zip", "tgz"} + + +def allowed_file(filename): + """ + + :param filename: + :return: + """ + return ( + "." in filename + and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS + ) + + +def encode_auth_token(secret_key): + """Generates the Auth Token + :return: string + """ + try: + payload = { + "exp": datetime.datetime.utcnow() + + datetime.timedelta(days=90, seconds=0), + "iat": datetime.datetime.utcnow(), + "status": "Success", + } + token = jwt.encode(payload, secret_key, algorithm="HS256") + print( + "\n\n\nSECURE MODE ENABLED, USE TOKEN TO ACCESS REDUCER: **** {} ****\n\n\n".format( + token + ) + ) + return token + except Exception as e: + return e + + +def decode_auth_token(auth_token, secret): + """Decodes the auth token + :param auth_token: + :return: string + """ + try: + payload = jwt.decode(auth_token, secret, algorithms=["HS256"]) + return payload["status"] + except jwt.ExpiredSignatureError as e: + print(e) + return "Token has expired." + except jwt.InvalidTokenError as e: + print(e) + return "Invalid token." + + +class ReducerRestService: + """ """ + + def __init__(self, config, control, statestore, certificate_manager): + print("config object!: \n\n\n\n{}".format(config)) + if config["host"]: + self.host = config["host"] + else: + self.host = None + + self.name = config["name"] + + self.port = config["port"] + self.network_id = config["name"] + "-network" + + if "token" in config.keys(): + self.token_auth_enabled = True + else: + self.token_auth_enabled = False + + if "secret_key" in config.keys(): + self.SECRET_KEY = config["secret_key"] + else: + self.SECRET_KEY = None + + if "use_ssl" in config.keys(): + self.use_ssl = config["use_ssl"] + + self.remote_compute_package = config["remote_compute_package"] + if self.remote_compute_package: + self.package = "remote" + else: + self.package = "local" + + self.control = control + self.statestore = statestore + self.certificate_manager = certificate_manager + self.current_compute_context = None + + def to_dict(self): + """ + + :return: + """ + data = {"name": self.name} + return data + + def check_compute_package(self): + """Check if the compute package has been configured, + if remote compute context is set to False, True will be returned + + :return: True if configured + :rtype: bool + """ + if not self.remote_compute_package: + return True + + if not self.control.get_compute_package(): + return False + else: + return True + + def check_initial_model(self): + """Check if initial model (seed model) has been configured + + :return: True if configured, else False + :rtype: bool + """ + + if self.statestore.get_latest_model(): + return True + else: + return False + + def check_configured_response(self): + """Check if everything has been configured for client to connect, + return response if not. + + :return: Reponse with message if not configured, else None + :rtype: json + """ + if self.control.state() == ReducerState.setup: + return jsonify( + { + "status": "retry", + "package": self.package, + "msg": "Controller is not configured.", + } + ) + + if not self.check_compute_package(): + return jsonify( + { + "status": "retry", + "package": self.package, + "msg": "Compute package is not configured. Please upload the compute package.", + } + ) + + if not self.check_initial_model(): + return jsonify( + { + "status": "retry", + "package": self.package, + "msg": "Initial model is not configured. Please upload the model.", + } + ) + + if not self.control.idle(): + return jsonify( + { + "status": "retry", + "package": self.package, + "msg": "Conroller is not in idle state, try again later. ", + } + ) + return None + + def check_configured(self): + """Check if compute package has been configured and that and that the + state of the ReducerControl is not in setup otherwise render setup template. + Check if initial model has been configured, otherwise render setup_model template. + :return: Rendered html template or None + """ + if not self.check_compute_package(): + return render_template( + "setup.html", + client=self.name, + state=ReducerStateToString(self.control.state()), + logs=None, + refresh=False, + message="Please set the compute package", + ) + + if self.control.state() == ReducerState.setup: + return render_template( + "setup.html", + client=self.name, + state=ReducerStateToString(self.control.state()), + logs=None, + refresh=True, + message="Warning. Reducer is not base-configured. please do so with config file.", + ) + + if not self.check_initial_model(): + return render_template( + "setup_model.html", message="Please set the initial model." + ) + + return None + + def authorize(self, r, secret): + """Authorize client token + + :param r: Request + :type r: [type] + :param token: Token to verify against + :type token: string + """ + try: + # Get token + if "Authorization" in r.headers: # header auth + request_token = r.headers.get("Authorization").split()[1] + elif "token" in r.args: # args auth + request_token = str(r.args.get("token")) + elif "fedn_token" in r.cookies: + request_token = r.cookies.get("fedn_token") + else: # no token provided + print("Authorization failed. No token provided.", flush=True) + abort(401) + + # Log token and secret + print( + f"Secret: {secret}. Request token: {request_token}.", + flush=True, + ) + + # Authenticate + status = decode_auth_token(request_token, secret) + if status == "Success": + return True + else: + print( + 'Authorization failed. Status: "{}"'.format(status), + flush=True, + ) + abort(401) + except Exception as e: + print( + 'Authorization failed. Expection encountered: "{}".'.format(e), + flush=True, + ) + abort(401) + + def run(self): + """ + + :return: + """ + app = Flask(__name__) + + app.config["UPLOAD_FOLDER"] = UPLOAD_FOLDER + app.config["SECRET_KEY"] = self.SECRET_KEY + + @app.route("/") + def index(): + """ + + :return: + """ + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + # Render template + not_configured_template = self.check_configured() + if not_configured_template: + template = not_configured_template + else: + events = self.control.get_events() + message = request.args.get("message", None) + message_type = request.args.get("message_type", None) + template = render_template( + "events.html", + client=self.name, + state=ReducerStateToString(self.control.state()), + events=events, + logs=None, + refresh=True, + configured=True, + message=message, + message_type=message_type, + ) + + # Set token cookie in response if needed + response = make_response(template) + if "token" in request.args: # args auth + response.set_cookie("fedn_token", str(request.args["token"])) + + # Return response + return response + + @app.route("/status") + def status(): + """ + + :return: + """ + return {"state": ReducerStateToString(self.control.state())} + + @app.route("/netgraph") + def netgraph(): + """ + Creates nodes and edges for network graph + + :return: nodes and edges as keys + :rtype: dict + """ + result = {"nodes": [], "edges": []} + + result["nodes"].append( + { + "id": "reducer", + "label": "Reducer", + "role": "reducer", + "status": "active", + "name": "reducer", # TODO: get real host name + "type": "reducer", + } + ) + + combiner_info = combiner_status() + client_info = client_status() + + if len(combiner_info) < 1: + return result + + for combiner in combiner_info: + print("combiner info {}".format(combiner_info), flush=True) + try: + result["nodes"].append( + { + "id": combiner["name"], # "n{}".format(count), + "label": "Combiner ({} clients)".format( + combiner["nr_active_clients"] + ), + "role": "combiner", + "status": "active", # TODO: Hard-coded, combiner_info does not contain status + "name": combiner["name"], + "type": "combiner", + } + ) + except Exception as err: + print(err) + + for client in client_info["active_clients"]: + try: + if client["status"] != "offline": + result["nodes"].append( + { + "id": str(client["_id"]), + "label": "Client", + "role": client["role"], + "status": client["status"], + "name": client["name"], + "combiner": client["combiner"], + "type": "client", + } + ) + except Exception as err: + print(err) + + count = 0 + for node in result["nodes"]: + try: + if node["type"] == "combiner": + result["edges"].append( + { + "id": "e{}".format(count), + "source": node["id"], + "target": "reducer", + } + ) + elif node["type"] == "client": + result["edges"].append( + { + "id": "e{}".format(count), + "source": node["combiner"], + "target": node["id"], + } + ) + except Exception: + pass + count = count + 1 + return result + + @app.route("/networkgraph") + def network_graph(): + try: + plot = Plot(self.control.statestore) + result = netgraph() + df_nodes = pd.DataFrame(result["nodes"]) + df_edges = pd.DataFrame(result["edges"]) + graph = plot.make_netgraph_plot(df_edges, df_nodes) + return json.dumps(json_item(graph, "myplot")) + except Exception: + raise + # return '' + + @app.route("/events") + def events(): + """ + + :return: + """ + + response = self.control.get_events() + events = [] + + result = response["result"] + + for evt in result: + events.append(evt) + + return jsonify({"result": events, "count": response["count"]}) + + json_docs = [] + for doc in self.control.get_events(): + json_doc = json.dumps(doc, default=json_util.default) + json_docs.append(json_doc) + + json_docs.reverse() + + return {"events": json_docs} + + @app.route("/add") + def add(): + """Add a combiner to the network.""" + print("Adding combiner to network:", flush=True) + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + if self.control.state() == ReducerState.setup: + return jsonify({"status": "retry"}) + + name = request.args.get("name", None) + address = str(request.args.get("address", None)) + fqdn = str(request.args.get("fqdn", None)) + port = request.args.get("port", None) + secure_grpc = request.args.get("secure", None) + + if ( + port is None + or address is None + or name is None + or secure_grpc is None + ): + return "Please specify correct parameters." + + # Try to retrieve combiner from db + combiner = self.control.network.get_combiner(name) + if not combiner: + if secure_grpc == "True": + certificate, key = self.certificate_manager.get_or_create( + address + ).get_keypair_raw() + _ = base64.b64encode(certificate) + _ = base64.b64encode(key) + + else: + certificate = None + key = None + + combiner = CombinerInterface( + self, + name=name, + address=address, + fqdn=fqdn, + port=port, + certificate=copy.deepcopy(certificate), + key=copy.deepcopy(key), + ip=request.remote_addr, + ) + + self.control.network.add_combiner(combiner) + + combiner = self.control.network.get_combiner(name) + + ret = { + "status": "added", + "storage": self.control.statestore.get_storage_backend(), + "statestore": self.control.statestore.get_config(), + "certificate": combiner.get_certificate(), + "key": combiner.get_key(), + } + + return jsonify(ret) + + @app.route("/eula", methods=["GET", "POST"]) + def eula(): + """ + + :return: + """ + for r in request.headers: + print("header contains: {}".format(r), flush=True) + + return render_template("eula.html", configured=True) + + @app.route("/models", methods=["GET", "POST"]) + def models(): + """ + + :return: + """ + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + if request.method == "POST": + # upload seed file + uploaded_seed = request.files["seed"] + if uploaded_seed: + a = BytesIO() + a.seek(0, 0) + uploaded_seed.seek(0) + a.write(uploaded_seed.read()) + helper = self.control.get_helper() + a.seek(0) + model = helper.load(a) + self.control.commit(uploaded_seed.filename, model) + else: + not_configured = self.check_configured() + if not_configured: + return not_configured + + plot = Plot(self.control.statestore) + try: + valid_metrics = plot.fetch_valid_metrics() + box_plot = plot.create_box_plot(valid_metrics[0]) + except Exception as e: + valid_metrics = None + box_plot = None + print(e, flush=True) + + h_latest_model_id = self.statestore.get_latest_model() + + model_info = self.control.get_model_info() + return render_template( + "models.html", + box_plot=box_plot, + metrics=valid_metrics, + h_latest_model_id=h_latest_model_id, + seed=True, + model_info=model_info, + configured=True, + ) + + seed = True + return redirect(url_for("models", seed=seed)) + + @app.route("/delete_model_trail", methods=["GET", "POST"]) + def delete_model_trail(): + """ + + :return: + """ + if request.method == "POST": + statestore_config = self.control.statestore.get_config() + self.tracer = MongoTracer( + statestore_config["mongo_config"], + statestore_config["network_id"], + ) + try: + self.control.drop_models() + except Exception: + pass + + # drop objects in minio + self.control.delete_bucket_objects() + return redirect(url_for("models")) + seed = True + return redirect(url_for("models", seed=seed)) + + @app.route("/drop_control", methods=["GET", "POST"]) + def drop_control(): + """ + + :return: + """ + if request.method == "POST": + self.control.statestore.drop_control() + return redirect(url_for("control")) + return redirect(url_for("control")) + + # http://localhost:8090/control?rounds=4&model_id=879fa112-c861-4cb1-a25d-775153e5b548 + @app.route("/control", methods=["GET", "POST"]) + def control(): + """Main page for round control. Configure, start and stop training sessions.""" + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + not_configured = self.check_configured() + if not_configured: + return not_configured + + state = ReducerStateToString(self.control.state()) + refresh = True + + if self.remote_compute_package: + try: + self.current_compute_context = ( + self.control.get_compute_package_name() + ) + except Exception: + self.current_compute_context = None + else: + self.current_compute_context = "None:Local" + if self.control.state() == ReducerState.monitoring: + return redirect( + url_for( + "index", + state=state, + refresh=refresh, + message="Reducer is in monitoring state", + ) + ) + + if request.method == "POST": + # Get session configuration + round_timeout = float(request.form.get("timeout", 180)) + buffer_size = int(request.form.get("buffer_size", -1)) + rounds = int(request.form.get("rounds", 1)) + delete_models = request.form.get("delete_models", True) + task = request.form.get("task", "") + clients_required = request.form.get("clients_required", 1) + clients_requested = request.form.get("clients_requested", 8) + + # checking if there are enough clients connected to start! + clients_available = 0 + for combiner in self.control.network.get_combiners(): + try: + combiner_state = combiner.report() + nac = combiner_state["nr_active_clients"] + clients_available = clients_available + int(nac) + except Exception: + pass + + if clients_available < clients_required: + return redirect( + url_for( + "index", + state=state, + message="Not enough clients available to start rounds! " + "check combiner client capacity", + message_type="warning", + ) + ) + + validate = request.form.get("validate", False) + if validate == "False": + validate = False + helper_type = request.form.get("helper", "keras") + # self.control.statestore.set_framework(helper_type) + + latest_model_id = self.statestore.get_latest_model() + + config = { + "round_timeout": round_timeout, + "buffer_size": buffer_size, + "model_id": latest_model_id, + "rounds": rounds, + "delete_models_storage": delete_models, + "clients_required": clients_required, + "clients_requested": clients_requested, + "task": task, + "validate": validate, + "helper_type": helper_type, + } + + threading.Thread( + target=self.control.session, args=(config,) + ).start() + + return redirect( + url_for( + "index", + state=state, + refresh=refresh, + message="Sent execution plan.", + message_type="SUCCESS", + ) + ) + + else: + seed_model_id = None + latest_model_id = None + try: + seed_model_id = self.statestore.get_initial_model() + latest_model_id = self.statestore.get_latest_model() + except Exception: + pass + + return render_template( + "index.html", + latest_model_id=latest_model_id, + compute_package=self.current_compute_context, + seed_model_id=seed_model_id, + helper=self.control.statestore.get_helper(), + validate=True, + configured=True, + ) + + @app.route("/assign") + def assign(): + """Handle client assignment requests.""" + + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + response = self.check_configured_response() + + if response: + return response + + name = request.args.get("name", None) + combiner_preferred = request.args.get("combiner", None) + + if combiner_preferred: + combiner = self.control.network.get_combiner( + combiner_preferred + ) + else: + combiner = self.control.network.find_available_combiner() + + if combiner is None: + return jsonify( + { + "status": "retry", + "package": self.package, + "msg": "Failed to assign to a combiner, try again later.", + } + ) + + client = { + "name": name, + "combiner_preferred": combiner_preferred, + "combiner": combiner.name, + "ip": request.remote_addr, + "status": "available", + } + + # Add client to database + self.control.network.add_client(client) + + # Return connection information to client + if combiner.certificate: + cert_b64 = base64.b64encode(combiner.certificate) + cert = str(cert_b64).split("'")[1] + else: + cert = None + + response = { + "status": "assigned", + "host": combiner.address, + "fqdn": combiner.fqdn, + "package": self.package, + "ip": combiner.ip, + "port": combiner.port, + "certificate": cert, + "model_type": self.control.statestore.get_helper(), + } + + return jsonify(response) + + def combiner_status(): + """Get current status reports from all combiners registered in the network. + + :return: + """ + combiner_info = [] + for combiner in self.control.network.get_combiners(): + try: + report = combiner.report() + combiner_info.append(report) + except Exception: + pass + return combiner_info + + def client_status(): + """ + Get current status of clients (available) from DB compared with client status from all combiners, + update client status to DB and add their roles. + """ + client_info = self.control.network.get_client_info() + combiner_info = combiner_status() + try: + all_active_trainers = [] + all_active_validators = [] + + for client in combiner_info: + active_trainers_str = client["active_trainers"] + active_validators_str = client["active_validators"] + active_trainers_str = re.sub( + "[^a-zA-Z0-9-:\n\.]", "", active_trainers_str # noqa: W605 + ).replace( + "name:", " " + ) + active_validators_str = re.sub( + "[^a-zA-Z0-9-:\n\.]", "", active_validators_str # noqa: W605 + ).replace( + "name:", " " + ) + all_active_trainers.extend( + " ".join(active_trainers_str.split(" ")).split() + ) + all_active_validators.extend( + " ".join(active_validators_str.split(" ")).split() + ) + + active_trainers_list = [ + client + for client in client_info + if client["name"] in all_active_trainers + ] + active_validators_list = [ + cl + for cl in client_info + if cl["name"] in all_active_validators + ] + all_clients = [cl for cl in client_info] + + for client in all_clients: + status = "offline" + role = "None" + self.control.network.update_client_data( + client, status, role + ) + + all_active_clients = ( + active_validators_list + active_trainers_list + ) + for client in all_active_clients: + status = "active" + if ( + client in active_trainers_list + and client in active_validators_list + ): + role = "trainer-validator" + elif client in active_trainers_list: + role = "trainer" + elif client in active_validators_list: + role = "validator" + else: + role = "unknown" + self.control.network.update_client_data( + client, status, role + ) + + return { + "active_clients": all_clients, + "active_trainers": active_trainers_list, + "active_validators": active_validators_list, + } + except Exception: + pass + + return { + "active_clients": [], + "active_trainers": [], + "active_validators": [], + } + + @app.route("/metric_type", methods=["GET", "POST"]) + def change_features(): + """ + + :return: + """ + feature = request.args["selected"] + plot = Plot(self.control.statestore) + graphJSON = plot.create_box_plot(feature) + return graphJSON + + @app.route("/dashboard") + def dashboard(): + """ + + :return: + """ + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + not_configured = self.check_configured() + if not_configured: + return not_configured + + plot = Plot(self.control.statestore) + combiners_plot = plot.create_combiner_plot() + + timeline_plot = None + table_plot = None + clients_plot = plot.create_client_plot() + client_histogram_plot = plot.create_client_histogram_plot() + + return render_template( + "dashboard.html", + show_plot=True, + table_plot=table_plot, + timeline_plot=timeline_plot, + clients_plot=clients_plot, + client_histogram_plot=client_histogram_plot, + combiners_plot=combiners_plot, + configured=True, + ) + + @app.route("/network") + def network(): + """ + + :return: + """ + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + not_configured = self.check_configured() + if not_configured: + return not_configured + plot = Plot(self.control.statestore) + round_time_plot = plot.create_round_plot() + mem_cpu_plot = plot.create_cpu_plot() + combiner_info = combiner_status() + active_clients = client_status() + # print(combiner_info, flush=True) + return render_template( + "network.html", + network_plot=True, + round_time_plot=round_time_plot, + mem_cpu_plot=mem_cpu_plot, + combiner_info=combiner_info, + active_clients=active_clients["active_clients"], + active_trainers=active_clients["active_trainers"], + active_validators=active_clients["active_validators"], + configured=True, + ) + + @app.route("/config/download", methods=["GET"]) + def config_download(): + """ + + :return: + """ + chk_string = "" + name = self.control.get_compute_package_name() + if name is None or name == "": + chk_string = "" + else: + file_path = os.path.join(UPLOAD_FOLDER, name) + print("trying to get {}".format(file_path)) + + try: + sum = str(sha(file_path)) + except FileNotFoundError: + sum = "" + chk_string = "checksum: {}".format(sum) + + network_id = self.network_id + discover_host = self.name + discover_port = self.port + ctx = """network_id: {network_id} +discover_host: {discover_host} +discover_port: {discover_port} +{chk_string}""".format( + network_id=network_id, + discover_host=discover_host, + discover_port=discover_port, + chk_string=chk_string, + ) + + obj = BytesIO() + obj.write(ctx.encode("UTF-8")) + obj.seek(0) + return send_file( + obj, + as_attachment=True, + download_name="client.yaml", + mimetype="application/x-yaml", + ) + + @app.route("/context", methods=["GET", "POST"]) + def context(): + """ + + :return: + """ + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + # if reset is not empty then allow context re-set + reset = request.args.get("reset", None) + if reset: + return render_template("context.html") + + if request.method == "POST": + if "file" not in request.files: + flash("No file part") + return redirect(url_for("context")) + + file = request.files["file"] + helper_type = request.form.get("helper", "kerashelper") + # if user does not select file, browser also + # submit an empty part without filename + if file.filename == "": + flash("No selected file") + return redirect(url_for("context")) + + if file and allowed_file(file.filename): + filename = secure_filename(file.filename) + file_path = os.path.join( + app.config["UPLOAD_FOLDER"], filename + ) + file.save(file_path) + + if ( + self.control.state() == ReducerState.instructing + or self.control.state() == ReducerState.monitoring + ): + return "Not allowed to change context while execution is ongoing." + + self.control.set_compute_package(filename, file_path) + self.control.statestore.set_helper(helper_type) + return redirect(url_for("control")) + + name = request.args.get("name", "") + + if name == "": + name = self.control.get_compute_package_name() + if name is None or name == "": + return render_template("context.html") + + # There is a potential race condition here, if one client requests a package and at + # the same time another one triggers a fetch from Minio and writes to disk. + try: + mutex = Lock() + mutex.acquire() + return send_from_directory( + app.config["UPLOAD_FOLDER"], name, as_attachment=True + ) + except Exception: + try: + data = self.control.get_compute_package(name) + file_path = os.path.join(app.config["UPLOAD_FOLDER"], name) + with open(file_path, "wb") as fh: + fh.write(data) + return send_from_directory( + app.config["UPLOAD_FOLDER"], name, as_attachment=True + ) + except Exception: + raise + finally: + mutex.release() + + return render_template("context.html") + + @app.route("/checksum", methods=["GET", "POST"]) + def checksum(): + """ + + :return: + """ + # sum = '' + name = request.args.get("name", None) + if name == "" or name is None: + name = self.control.get_compute_package_name() + if name is None or name == "": + return jsonify({}) + + file_path = os.path.join(UPLOAD_FOLDER, name) + print("trying to get {}".format(file_path)) + + try: + sum = str(sha(file_path)) + except FileNotFoundError: + sum = "" + + data = {"checksum": sum} + + return jsonify(data) + + @app.route("/infer", methods=["POST"]) + def infer(): + """ + + :return: + """ + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get("SECRET_KEY")) + + # Check configured + not_configured = self.check_configured() + if not_configured: + return not_configured + + # Check compute context + if self.remote_compute_context: + try: + self.current_compute_context = ( + self.control.get_compute_package() + ) + except Exception as e: + print(e, flush=True) + self.current_compute_context = None + else: + self.current_compute_context = "None:Local" + + # Redirect if in monitoring state + if self.control.state() == ReducerState.monitoring: + return redirect( + url_for( + "index", + state=ReducerStateToString(self.control.state()), + refresh=True, + message="Reducer is in monitoring state", + ) + ) + + # POST params + timeout = int(request.form.get("timeout", 180)) + helper_type = request.form.get("helper", "keras") + clients_required = request.form.get("clients_required", 1) + clients_requested = request.form.get("clients_requested", 8) + + # Start inference request + config = { + "round_timeout": timeout, + "model_id": self.statestore.get_latest_model(), + "clients_required": clients_required, + "clients_requested": clients_requested, + "task": "inference", + "helper_type": helper_type, + } + threading.Thread( + target=self.control.infer_instruct, args=(config,) + ).start() + + # Redirect + return redirect( + url_for( + "index", + state=ReducerStateToString(self.control.state()), + refresh=True, + message="Sent execution plan (inference).", + message_type="SUCCESS", + ) + ) + + if not self.host: + bind = "0.0.0.0" + else: + bind = self.host + + app.run(host=bind, port=self.port) + + return app diff --git a/fedn/fedn/clients/reducer/static/dist/css/dark.css b/fedn/fedn/network/dashboard/static/dist/css/dark.css similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/css/dark.css rename to fedn/fedn/network/dashboard/static/dist/css/dark.css diff --git a/fedn/fedn/clients/reducer/static/dist/css/light.css b/fedn/fedn/network/dashboard/static/dist/css/light.css similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/css/light.css rename to fedn/fedn/network/dashboard/static/dist/css/light.css diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/.gitkeep b/fedn/fedn/network/dashboard/static/dist/fonts/.gitkeep similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/.gitkeep rename to fedn/fedn/network/dashboard/static/dist/fonts/.gitkeep diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.eot b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.eot similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.eot rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.eot diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.svg b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.svg rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.svg diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.ttf b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.ttf similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.ttf rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.ttf diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff2 b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff2 similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff2 rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff2 diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.eot b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.eot similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.eot rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.eot diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.svg b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.svg rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.svg diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.ttf b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.ttf similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.ttf rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.ttf diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff2 b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff2 similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff2 rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff2 diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.eot b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.eot similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.eot rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.eot diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.svg b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.svg rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.svg diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.ttf b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.ttf similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.ttf rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.ttf diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff2 b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff2 similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff2 rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff2 diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-2.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-2.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-2.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-2.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-3.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-3.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-3.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-3.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-4.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-4.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-4.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-4.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-5.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-5.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-5.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-5.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/brands/bootstrap.svg b/fedn/fedn/network/dashboard/static/dist/img/brands/bootstrap.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/brands/bootstrap.svg rename to fedn/fedn/network/dashboard/static/dist/img/brands/bootstrap.svg diff --git a/fedn/fedn/clients/reducer/static/dist/img/favicon.ico b/fedn/fedn/network/dashboard/static/dist/img/favicon.ico similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/favicon.ico rename to fedn/fedn/network/dashboard/static/dist/img/favicon.ico diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ad.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ad.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ad.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ad.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ae.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ae.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ae.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ae.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/af.png b/fedn/fedn/network/dashboard/static/dist/img/flags/af.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/af.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/af.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ag.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ag.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ag.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ag.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ai.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ai.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ai.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ai.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/al.png b/fedn/fedn/network/dashboard/static/dist/img/flags/al.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/al.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/al.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/am.png b/fedn/fedn/network/dashboard/static/dist/img/flags/am.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/am.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/am.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/an.png b/fedn/fedn/network/dashboard/static/dist/img/flags/an.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/an.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/an.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ao.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ao.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ao.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ao.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/aq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/aq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/aq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/aq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ar.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ar.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ar.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ar.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/as.png b/fedn/fedn/network/dashboard/static/dist/img/flags/as.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/as.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/as.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/at.png b/fedn/fedn/network/dashboard/static/dist/img/flags/at.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/at.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/at.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/au.png b/fedn/fedn/network/dashboard/static/dist/img/flags/au.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/au.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/au.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/aw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/aw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/aw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/aw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ax.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ax.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ax.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ax.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/az.png b/fedn/fedn/network/dashboard/static/dist/img/flags/az.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/az.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/az.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ba.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ba.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ba.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ba.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/be.png b/fedn/fedn/network/dashboard/static/dist/img/flags/be.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/be.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/be.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/br.png b/fedn/fedn/network/dashboard/static/dist/img/flags/br.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/br.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/br.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bs.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bs.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bs.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bs.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/by.png b/fedn/fedn/network/dashboard/static/dist/img/flags/by.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/by.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/by.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ca.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ca.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ca.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ca.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ch.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ch.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ch.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ch.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ci.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ci.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ci.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ci.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ck.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ck.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ck.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ck.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/co.png b/fedn/fedn/network/dashboard/static/dist/img/flags/co.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/co.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/co.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cx.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cx.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cx.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cx.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/de.png b/fedn/fedn/network/dashboard/static/dist/img/flags/de.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/de.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/de.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/do.png b/fedn/fedn/network/dashboard/static/dist/img/flags/do.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/do.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/do.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ec.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ec.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ec.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ec.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ee.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ee.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ee.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ee.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/eg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/eg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/eg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/eg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/eh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/eh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/eh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/eh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/er.png b/fedn/fedn/network/dashboard/static/dist/img/flags/er.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/er.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/er.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/es.png b/fedn/fedn/network/dashboard/static/dist/img/flags/es.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/es.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/es.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/et.png b/fedn/fedn/network/dashboard/static/dist/img/flags/et.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/et.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/et.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/eu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/eu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/eu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/eu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ga.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ga.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ga.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ga.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-eng.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-eng.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-eng.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-eng.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-nir.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-nir.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-nir.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-nir.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-sct.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-sct.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-sct.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-sct.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-wls.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-wls.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-wls.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-wls.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ge.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ge.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ge.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ge.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gs.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gs.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gs.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gs.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ht.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ht.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ht.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ht.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/id.png b/fedn/fedn/network/dashboard/static/dist/img/flags/id.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/id.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/id.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ie.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ie.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ie.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ie.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/il.png b/fedn/fedn/network/dashboard/static/dist/img/flags/il.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/il.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/il.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/im.png b/fedn/fedn/network/dashboard/static/dist/img/flags/im.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/im.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/im.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/in.png b/fedn/fedn/network/dashboard/static/dist/img/flags/in.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/in.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/in.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/io.png b/fedn/fedn/network/dashboard/static/dist/img/flags/io.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/io.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/io.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/iq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/iq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/iq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/iq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ir.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ir.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ir.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ir.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/is.png b/fedn/fedn/network/dashboard/static/dist/img/flags/is.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/is.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/is.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/it.png b/fedn/fedn/network/dashboard/static/dist/img/flags/it.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/it.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/it.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/je.png b/fedn/fedn/network/dashboard/static/dist/img/flags/je.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/je.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/je.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/jm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/jm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/jm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/jm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/jo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/jo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/jo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/jo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/jp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/jp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/jp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/jp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ke.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ke.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ke.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ke.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ki.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ki.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ki.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ki.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/km.png b/fedn/fedn/network/dashboard/static/dist/img/flags/km.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/km.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/km.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ky.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ky.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ky.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ky.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/la.png b/fedn/fedn/network/dashboard/static/dist/img/flags/la.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/la.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/la.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/li.png b/fedn/fedn/network/dashboard/static/dist/img/flags/li.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/li.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/li.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ls.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ls.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ls.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ls.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ly.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ly.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ly.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ly.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ma.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ma.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ma.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ma.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/md.png b/fedn/fedn/network/dashboard/static/dist/img/flags/md.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/md.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/md.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/me.png b/fedn/fedn/network/dashboard/static/dist/img/flags/me.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/me.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/me.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ml.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ml.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ml.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ml.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ms.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ms.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ms.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ms.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mx.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mx.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mx.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mx.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/my.png b/fedn/fedn/network/dashboard/static/dist/img/flags/my.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/my.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/my.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/na.png b/fedn/fedn/network/dashboard/static/dist/img/flags/na.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/na.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/na.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ne.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ne.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ne.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ne.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ng.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ng.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ng.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ng.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ni.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ni.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ni.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ni.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/no.png b/fedn/fedn/network/dashboard/static/dist/img/flags/no.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/no.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/no.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/np.png b/fedn/fedn/network/dashboard/static/dist/img/flags/np.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/np.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/np.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/om.png b/fedn/fedn/network/dashboard/static/dist/img/flags/om.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/om.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/om.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pa.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pa.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pa.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pa.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pe.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pe.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pe.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pe.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ph.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ph.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ph.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ph.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ps.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ps.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ps.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ps.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/py.png b/fedn/fedn/network/dashboard/static/dist/img/flags/py.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/py.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/py.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/qa.png b/fedn/fedn/network/dashboard/static/dist/img/flags/qa.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/qa.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/qa.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/re.png b/fedn/fedn/network/dashboard/static/dist/img/flags/re.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/re.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/re.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ro.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ro.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ro.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ro.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/rs.png b/fedn/fedn/network/dashboard/static/dist/img/flags/rs.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/rs.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/rs.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ru.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ru.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ru.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ru.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/rw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/rw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/rw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/rw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sa.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sa.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sa.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sa.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/se.png b/fedn/fedn/network/dashboard/static/dist/img/flags/se.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/se.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/se.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/si.png b/fedn/fedn/network/dashboard/static/dist/img/flags/si.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/si.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/si.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/so.png b/fedn/fedn/network/dashboard/static/dist/img/flags/so.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/so.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/so.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ss.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ss.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ss.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ss.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/st.png b/fedn/fedn/network/dashboard/static/dist/img/flags/st.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/st.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/st.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sx.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sx.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sx.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sx.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/td.png b/fedn/fedn/network/dashboard/static/dist/img/flags/td.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/td.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/td.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/th.png b/fedn/fedn/network/dashboard/static/dist/img/flags/th.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/th.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/th.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/to.png b/fedn/fedn/network/dashboard/static/dist/img/flags/to.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/to.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/to.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ua.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ua.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ua.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ua.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ug.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ug.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ug.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ug.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/um.png b/fedn/fedn/network/dashboard/static/dist/img/flags/um.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/um.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/um.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/us.png b/fedn/fedn/network/dashboard/static/dist/img/flags/us.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/us.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/us.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/uy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/uy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/uy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/uy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/uz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/uz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/uz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/uz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/va.png b/fedn/fedn/network/dashboard/static/dist/img/flags/va.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/va.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/va.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ve.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ve.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ve.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ve.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/wf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/wf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/wf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/wf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ws.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ws.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ws.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ws.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/xk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/xk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/xk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/xk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ye.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ye.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ye.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ye.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/yt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/yt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/yt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/yt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/za.png b/fedn/fedn/network/dashboard/static/dist/img/flags/za.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/za.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/za.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/zm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/zm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/zm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/zm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/zw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/zw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/zw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/zw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/illustrations/customer-support.png b/fedn/fedn/network/dashboard/static/dist/img/illustrations/customer-support.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/illustrations/customer-support.png rename to fedn/fedn/network/dashboard/static/dist/img/illustrations/customer-support.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/illustrations/searching.png b/fedn/fedn/network/dashboard/static/dist/img/illustrations/searching.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/illustrations/searching.png rename to fedn/fedn/network/dashboard/static/dist/img/illustrations/searching.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/illustrations/social.png b/fedn/fedn/network/dashboard/static/dist/img/illustrations/social.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/illustrations/social.png rename to fedn/fedn/network/dashboard/static/dist/img/illustrations/social.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/logo.svg b/fedn/fedn/network/dashboard/static/dist/img/logo.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/logo.svg rename to fedn/fedn/network/dashboard/static/dist/img/logo.svg diff --git a/fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-1.jpg b/fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-1.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-1.jpg rename to fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-1.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-2.jpg b/fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-2.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-2.jpg rename to fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-2.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-3.jpg b/fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-3.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-3.jpg rename to fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-3.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-analytics.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-analytics.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-analytics.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-analytics.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-crypto.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-crypto.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-crypto.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-crypto.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-default.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-default.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-default.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-default.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-saas.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-saas.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-saas.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-saas.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-social.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-social.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-social.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-social.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/mixed.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/mixed.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/mixed.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/mixed.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/pages-projects-list.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/pages-projects-list.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/pages-projects-list.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/pages-projects-list.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-compact.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-compact.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-compact.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-compact.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-right.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-right.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-right.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-right.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-colored.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-colored.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-colored.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-colored.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-dark.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-dark.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-dark.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-dark.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-default.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-default.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-default.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-default.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-light.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-light.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-light.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-light.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/js/app.js b/fedn/fedn/network/dashboard/static/dist/js/app.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/app.js rename to fedn/fedn/network/dashboard/static/dist/js/app.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/app.js.LICENSE.txt b/fedn/fedn/network/dashboard/static/dist/js/app.js.LICENSE.txt similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/app.js.LICENSE.txt rename to fedn/fedn/network/dashboard/static/dist/js/app.js.LICENSE.txt diff --git a/fedn/fedn/clients/reducer/static/dist/js/plot.js b/fedn/fedn/network/dashboard/static/dist/js/plot.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plot.js rename to fedn/fedn/network/dashboard/static/dist/js/plot.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.exporters.svg.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.exporters.svg.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.exporters.svg.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.exporters.svg.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.noverlap.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.noverlap.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.noverlap.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.noverlap.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.neo4j.cypher.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.neo4j.cypher.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.neo4j.cypher.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.neo4j.cypher.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.gexf.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.gexf.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.gexf.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.gexf.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.json.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.json.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.json.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.json.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.pathfinding.astar.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.pathfinding.astar.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.pathfinding.astar.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.pathfinding.astar.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.animate.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.animate.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.animate.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.animate.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.dragNodes.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.dragNodes.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.dragNodes.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.dragNodes.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.filter.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.filter.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.filter.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.filter.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.relativeSize.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.relativeSize.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.relativeSize.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.relativeSize.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customShapes.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customShapes.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customShapes.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customShapes.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeDots.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeDots.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeDots.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeDots.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.snapshot.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.snapshot.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.snapshot.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.snapshot.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.statistics.HITS.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.statistics.HITS.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.statistics.HITS.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.statistics.HITS.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/settings.js b/fedn/fedn/network/dashboard/static/dist/js/settings.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/settings.js rename to fedn/fedn/network/dashboard/static/dist/js/settings.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/sigma.min.js b/fedn/fedn/network/dashboard/static/dist/js/sigma.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/sigma.min.js rename to fedn/fedn/network/dashboard/static/dist/js/sigma.min.js diff --git a/fedn/fedn/network/dashboard/templates/context.html b/fedn/fedn/network/dashboard/templates/context.html new file mode 100644 index 000000000..8f392082a --- /dev/null +++ b/fedn/fedn/network/dashboard/templates/context.html @@ -0,0 +1,34 @@ +{% extends "index.html" %} + +{% block content %} +
+
+
Upload and set compute package
+
{{ message }}
+
+
+
+
+ +
+ +
+ + +
+ +
+
+ +
+
+ +
+
+
+ + +{% endblock %} \ No newline at end of file diff --git a/fedn/fedn/clients/reducer/templates/dashboard.html b/fedn/fedn/network/dashboard/templates/dashboard.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/dashboard.html rename to fedn/fedn/network/dashboard/templates/dashboard.html diff --git a/fedn/fedn/clients/reducer/templates/eula.html b/fedn/fedn/network/dashboard/templates/eula.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/eula.html rename to fedn/fedn/network/dashboard/templates/eula.html diff --git a/fedn/fedn/network/dashboard/templates/events.html b/fedn/fedn/network/dashboard/templates/events.html new file mode 100644 index 000000000..1fb5fac74 --- /dev/null +++ b/fedn/fedn/network/dashboard/templates/events.html @@ -0,0 +1,46 @@ +{% extends "index.html" %} + +{% block content %} + + +
+
+
Events
+
+
+ + + + +
+ +
+
+ + +{% endblock %} \ No newline at end of file diff --git a/fedn/fedn/network/dashboard/templates/index.html b/fedn/fedn/network/dashboard/templates/index.html new file mode 100644 index 000000000..4ac2d182b --- /dev/null +++ b/fedn/fedn/network/dashboard/templates/index.html @@ -0,0 +1,386 @@ + + + + + + + + + + {% if refresh %} + + {% endif %} + + FEDn Reducer + + + + + + + + + + + + + + + + +
+ +
+ + +
+
+

+
+
+ {% if message %} + {% if message_type == 'WARNING' %} + +
+
+ + +
+
+ + + + + + \ No newline at end of file diff --git a/fedn/fedn/clients/reducer/templates/models.html b/fedn/fedn/network/dashboard/templates/models.html similarity index 80% rename from fedn/fedn/clients/reducer/templates/models.html rename to fedn/fedn/network/dashboard/templates/models.html index 0bf9efce7..2dfa4eb6b 100644 --- a/fedn/fedn/clients/reducer/templates/models.html +++ b/fedn/fedn/network/dashboard/templates/models.html @@ -46,21 +46,11 @@
Box plot showing the model validation distr
-
Models
+
Model trail
{{ message }}
- diff --git a/fedn/fedn/clients/reducer/templates/network.html b/fedn/fedn/network/dashboard/templates/network.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/network.html rename to fedn/fedn/network/dashboard/templates/network.html diff --git a/fedn/fedn/clients/reducer/templates/setup.html b/fedn/fedn/network/dashboard/templates/setup.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/setup.html rename to fedn/fedn/network/dashboard/templates/setup.html diff --git a/fedn/fedn/clients/reducer/templates/setup_model.html b/fedn/fedn/network/dashboard/templates/setup_model.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/setup_model.html rename to fedn/fedn/network/dashboard/templates/setup_model.html diff --git a/fedn/fedn/network/loadbalancer/__init__.py b/fedn/fedn/network/loadbalancer/__init__.py new file mode 100644 index 000000000..d0e44bf3c --- /dev/null +++ b/fedn/fedn/network/loadbalancer/__init__.py @@ -0,0 +1 @@ +""" The loadbalancer package is responsible for loadbalancing the clients to the combiners. """ diff --git a/fedn/fedn/network/loadbalancer/firstavailable.py b/fedn/fedn/network/loadbalancer/firstavailable.py new file mode 100644 index 000000000..9d44d3fbd --- /dev/null +++ b/fedn/fedn/network/loadbalancer/firstavailable.py @@ -0,0 +1,20 @@ +from fedn.network.loadbalancer.loadbalancerbase import LoadBalancerBase + + +class LeastPacked(LoadBalancerBase): + """ Load balancer that selects the first available combiner. + + :param network: A handle to the network. + :type network: class: `fedn.network.api.network.Network` + """ + + def __init__(self, network): + super().__init__(network) + + def find_combiner(self): + """ Find the first available combiner. """ + + for combiner in self.network.get_combiners(): + if combiner.allowing_clients(): + return combiner + return None diff --git a/fedn/fedn/network/loadbalancer/leastpacked.py b/fedn/fedn/network/loadbalancer/leastpacked.py new file mode 100644 index 000000000..9e4aaba0d --- /dev/null +++ b/fedn/fedn/network/loadbalancer/leastpacked.py @@ -0,0 +1,36 @@ +from fedn.network.combiner.interfaces import CombinerUnavailableError +from fedn.network.loadbalancer.loadbalancerbase import LoadBalancerBase + + +class LeastPacked(LoadBalancerBase): + """ Load balancer that selects the combiner with the least number of attached clients. + + :param network: A handle to the network. + :type network: class: `fedn.network.api.network.Network` + """ + + def __init__(self, network): + super().__init__(network) + + def find_combiner(self): + """ + Find the combiner with the least number of attached clients. + + """ + min_clients = None + selected_combiner = None + + for combiner in self.network.get_combiners(): + try: + if combiner.allowing_clients(): + combiner_state = combiner.report() + if not min_clients: + min_clients = combiner_state['nr_active_clients'] + selected_combiner = combiner + elif combiner_state['nr_active_clients'] < min_clients: + min_clients = combiner_state['nr_active_clients'] + selected_combiner = combiner + except CombinerUnavailableError: + pass + + return selected_combiner diff --git a/fedn/fedn/network/loadbalancer/loadbalancerbase.py b/fedn/fedn/network/loadbalancer/loadbalancerbase.py new file mode 100644 index 000000000..ff1edfa9b --- /dev/null +++ b/fedn/fedn/network/loadbalancer/loadbalancerbase.py @@ -0,0 +1,18 @@ +from abc import ABC, abstractmethod + + +class LoadBalancerBase(ABC): + """ Abstract base class for load balancers. + + :param network: A handle to the network. + :type network: class: `fedn.network.api.network.Network` + """ + + def __init__(self, network): + """ """ + self.network = network + + @abstractmethod + def find_combiner(self): + """ Find a combiner to connect to. """ + pass diff --git a/fedn/fedn/reducer.py b/fedn/fedn/network/reducer.py similarity index 79% rename from fedn/fedn/reducer.py rename to fedn/fedn/network/reducer.py index 271207854..d61186864 100644 --- a/fedn/fedn/reducer.py +++ b/fedn/fedn/network/reducer.py @@ -4,10 +4,10 @@ import time from datetime import datetime -from fedn.clients.reducer.control import ReducerControl -from fedn.clients.reducer.restservice import ReducerRestService -from fedn.clients.reducer.state import ReducerStateToString from fedn.common.security.certificatemanager import CertificateManager +from fedn.network.controller.control import Control +from fedn.network.dashboard.restservice import ReducerRestService +from fedn.network.state import ReducerStateToString VALID_NAME_REGEX = '^[a-zA-Z0-9_-]*$' @@ -23,16 +23,12 @@ class MissingReducerConfiguration(Exception): class Reducer: """ A class used to instantiate the Reducer service. - Start Reducer services. + :param statestore: The backend statestore object. + :type statestore: :class:`fedn.network.statestore.statestorebase.StateStoreBase` """ def __init__(self, statestore): - """ - Parameters - ---------- - statestore: dict - The backend statestore object. - """ + """ Constructor""" self.statestore = statestore config = self.statestore.get_reducer() @@ -40,7 +36,6 @@ def __init__(self, statestore): print("REDUCER: Failed to retrive Reducer config, exiting.") raise MissingReducerConfiguration() - print(config, flush=True) # Validate reducer name match = re.search(VALID_NAME_REGEX, config['name']) if not match: @@ -50,16 +45,15 @@ def __init__(self, statestore): # The certificate manager is a utility that generates (self-signed) certificates. self.certificate_manager = CertificateManager(os.getcwd() + "/certs/") - self.control = ReducerControl(self.statestore) + self.control = Control(self.statestore) self.rest = ReducerRestService( - config, self.control, self.certificate_manager) + config, self.control, self.statestore, self.certificate_manager) def run(self): """Start REST service and control loop.""" threading.Thread(target=self.control_loop, daemon=True).start() - self.rest.run() def control_loop(self): @@ -77,11 +71,10 @@ def control_loop(self): "Reducer in state {} for {} seconds. Entering {} state".format(ReducerStateToString(old_state), delta.seconds, ReducerStateToString( - self.control.state())), + self.control.state())), flush=True) t1 = datetime.now() old_state = self.control.state() - self.control.monitor() except (KeyboardInterrupt, SystemExit): print("Exiting..", flush=True) diff --git a/fedn/fedn/clients/reducer/state.py b/fedn/fedn/network/state.py similarity index 64% rename from fedn/fedn/clients/reducer/state.py rename to fedn/fedn/network/state.py index ab1e33e0f..9d18bc924 100644 --- a/fedn/fedn/clients/reducer/state.py +++ b/fedn/fedn/network/state.py @@ -2,6 +2,7 @@ class ReducerState(Enum): + """ Enum for representing the state of a reducer.""" setup = 1 idle = 2 instructing = 3 @@ -9,10 +10,12 @@ class ReducerState(Enum): def ReducerStateToString(state): - """ + """ Convert ReducerState to string. - :param state: - :return: + :param state: The state. + :type state: :class:`fedn.network.state.ReducerState` + :return: The state as string. + :rtype: str """ if state == ReducerState.setup: return "setup" @@ -27,10 +30,12 @@ def ReducerStateToString(state): def StringToReducerState(state): - """ + """ Convert string to ReducerState. - :param state: - :return: + :param state: The state as string. + :type state: str + :return: The state. + :rtype: :class:`fedn.network.state.ReducerState` """ if state == "setup": return ReducerState.setup diff --git a/fedn/fedn/clients/reducer/__init__.py b/fedn/fedn/network/statestore/__init__.py similarity index 100% rename from fedn/fedn/clients/reducer/__init__.py rename to fedn/fedn/network/statestore/__init__.py diff --git a/fedn/fedn/network/statestore/mongostatestore.py b/fedn/fedn/network/statestore/mongostatestore.py new file mode 100644 index 000000000..19d514f59 --- /dev/null +++ b/fedn/fedn/network/statestore/mongostatestore.py @@ -0,0 +1,709 @@ +import copy +from datetime import datetime + +import pymongo + +from fedn.common.storage.db.mongo import connect_to_mongodb +from fedn.network.state import ReducerStateToString, StringToReducerState + +from .statestorebase import StateStoreBase + + +class MongoStateStore(StateStoreBase): + """Statestore implementation using MongoDB. + + :param network_id: The network id. + :type network_id: str + :param config: The statestore configuration. + :type config: dict + :param defaults: The default configuration. Given by config/settings-reducer.yaml.template + :type defaults: dict + """ + + def __init__(self, network_id, config, model_storage_config): + """Constructor.""" + self.__inited = False + try: + self.config = config + self.network_id = network_id + self.mdb = connect_to_mongodb(self.config, self.network_id) + + # FEDn network + self.network = self.mdb["network"] + self.reducer = self.network["reducer"] + self.combiners = self.network["combiners"] + self.clients = self.network["clients"] + self.storage = self.network["storage"] + + # Control + self.control = self.mdb["control"] + self.package = self.control["package"] + self.state = self.control["state"] + self.model = self.control["model"] + self.sessions = self.control["sessions"] + self.rounds = self.control["rounds"] + + # Logging + self.status = self.control["status"] + + self.__inited = True + except Exception as e: + print("FAILED TO CONNECT TO MONGODB, {}".format(e), flush=True) + self.state = None + self.model = None + self.control = None + self.network = None + self.combiners = None + self.clients = None + raise + + # Storage settings + self.set_storage_backend(model_storage_config) + self.__inited = True + + def is_inited(self): + """Check if the statestore is intialized. + + :return: True if initialized, else False. + :rtype: bool + """ + return self.__inited + + def get_config(self): + """Retrive the statestore config. + + :return: The statestore config. + :rtype: dict + """ + data = { + "type": "MongoDB", + "mongo_config": self.config, + "network_id": self.network_id, + } + return data + + def state(self): + """Get the current state. + + :return: The current state. + :rtype: str + """ + return StringToReducerState(self.state.find_one()["current_state"]) + + def transition(self, state): + """Transition to a new state. + + :param state: The new state. + :type state: str + :return: + """ + old_state = self.state.find_one({"state": "current_state"}) + if old_state != state: + return self.state.update_one( + {"state": "current_state"}, + {"$set": {"state": ReducerStateToString(state)}}, + True, + ) + else: + print( + "Not updating state, already in {}".format( + ReducerStateToString(state) + ) + ) + + def get_sessions(self, limit=None, skip=None, sort_key="_id", sort_order=pymongo.DESCENDING): + """Get all sessions. + + :param limit: The maximum number of sessions to return. + :type limit: int + :param skip: The number of sessions to skip. + :type skip: int + :param sort_key: The key to sort by. + :type sort_key: str + :param sort_order: The sort order. + :type sort_order: pymongo.ASCENDING or pymongo.DESCENDING + :return: Dictionary of sessions in result (array of session objects) and count. + """ + + result = None + + if limit is not None and skip is not None: + limit = int(limit) + skip = int(skip) + + result = self.sessions.find().limit(limit).skip(skip).sort( + sort_key, sort_order + ) + else: + result = self.sessions.find().sort( + sort_key, sort_order + ) + + count = self.sessions.count_documents({}) + + return { + "result": result, + "count": count, + } + + def get_session(self, session_id): + """Get session with id. + + :param session_id: The session id. + :type session_id: str + :return: The session. + :rtype: ObjectID + """ + return self.sessions.find_one({"session_id": session_id}) + + def set_latest_model(self, model_id, session_id=None): + """Set the latest model id. + + :param model_id: The model id. + :type model_id: str + :return: + """ + + committed_at = datetime.now() + + self.model.insert_one( + { + "key": "models", + "model": model_id, + "session_id": session_id, + "committed_at": committed_at, + } + ) + + self.model.update_one( + {"key": "current_model"}, {"$set": {"model": model_id}}, True + ) + self.model.update_one( + {"key": "model_trail"}, + { + "$push": { + "model": model_id, + "committed_at": str(committed_at), + } + }, + True, + ) + + def get_initial_model(self): + """Return model_id for the initial model in the model trail + + :return: The initial model id. None if no model is found. + :rtype: str + """ + + result = self.model.find_one( + {"key": "model_trail"}, sort=[("committed_at", pymongo.ASCENDING)] + ) + if result is None: + return None + + try: + model_id = result["model"] + if model_id == "" or model_id == " ": + return None + return model_id[0] + except (KeyError, IndexError): + return None + + def get_latest_model(self): + """Return model_id for the latest model in the model_trail + + :return: The latest model id. None if no model is found. + :rtype: str + """ + result = self.model.find_one({"key": "current_model"}) + if result is None: + return None + + try: + model_id = result["model"] + if model_id == "" or model_id == " ": + return None + return model_id + except (KeyError, IndexError): + return None + + def get_latest_round(self): + """Get the id of the most recent round. + + :return: The id of the most recent round. + :rtype: ObjectId + """ + + return self.rounds.find_one(sort=[("_id", pymongo.DESCENDING)]) + + def get_round(self, id): + """Get round with id. + + :param id: id of round to get + :type id: int + :return: round with id, reducer and combiners + :rtype: ObjectId + """ + + return self.rounds.find_one({"round_id": str(id)}) + + def get_rounds(self): + """Get all rounds. + + :return: All rounds. + :rtype: ObjectId + """ + + return self.rounds.find() + + def get_validations(self, **kwargs): + """Get validations from the database. + + :param kwargs: query to filter validations + :type kwargs: dict + :return: validations matching query + :rtype: ObjectId + """ + + result = self.control.validations.find(kwargs) + return result + + def set_compute_package(self, filename): + """Set the active compute package in statestore. + + :param filename: The filename of the compute package. + :type filename: str + :return: True if successful. + :rtype: bool + """ + self.control.package.update_one( + {"key": "active"}, + { + "$set": { + "filename": filename, + "committed_at": str(datetime.now()), + } + }, + True, + ) + self.control.package.update_one( + {"key": "package_trail"}, + { + "$push": { + "filename": filename, + "committed_at": str(datetime.now()), + } + }, + True, + ) + return True + + def get_compute_package(self): + """Get the active compute package. + + :return: The active compute package. + :rtype: ObjectID + """ + ret = self.control.package.find({"key": "active"}) + try: + retcheck = ret[0] + if ( + retcheck is None or retcheck == "" or retcheck == " " + ): # ugly check for empty string + return None + return retcheck + except (KeyError, IndexError): + return None + + def set_helper(self, helper): + """Set the active helper package in statestore. + + :param helper: The name of the helper package. See helper.py for available helpers. + :type helper: str + :return: + """ + self.control.package.update_one( + {"key": "active"}, {"$set": {"helper": helper}}, True + ) + + def get_helper(self): + """Get the active helper package. + + :return: The active helper set for the package. + :rtype: str + """ + ret = self.control.package.find_one({"key": "active"}) + # if local compute package used, then 'package' is None + # if not ret: + # get framework from round_config instead + # ret = self.control.config.find_one({'key': 'round_config'}) + try: + retcheck = ret["helper"] + if ( + retcheck == "" or retcheck == " " + ): # ugly check for empty string + return None + return retcheck + except (KeyError, IndexError): + return None + + def list_models( + self, + session_id=None, + limit=None, + skip=None, + sort_key="committed_at", + sort_order=pymongo.DESCENDING, + ): + """List all models in the statestore. + + :param session_id: The session id. + :type session_id: str + :param limit: The maximum number of models to return. + :type limit: int + :param skip: The number of models to skip. + :type skip: int + :return: List of models. + :rtype: list + """ + result = None + + find_option = ( + {"key": "models"} + if session_id is None + else {"key": "models", "session_id": session_id} + ) + + projection = {"_id": False, "key": False} + + if limit is not None and skip is not None: + limit = int(limit) + skip = int(skip) + + result = ( + self.model.find(find_option, projection) + .limit(limit) + .skip(skip) + .sort(sort_key, sort_order) + ) + + else: + result = self.model.find(find_option, projection).sort( + sort_key, sort_order + ) + + count = self.model.count_documents(find_option) + + return { + "result": result, + "count": count, + } + + def get_model_trail(self): + """Get the model trail. + + :return: dictionary of model_id: committed_at + :rtype: dict + """ + result = self.model.find_one({"key": "model_trail"}) + try: + if result is not None: + committed_at = result["committed_at"] + model = result["model"] + model_dictionary = dict(zip(model, committed_at)) + return model_dictionary + else: + return None + except (KeyError, IndexError): + return None + + def get_events(self, **kwargs): + """Get events from the database. + + :param kwargs: query to filter events + :type kwargs: dict + :return: events matching query + :rtype: ObjectId + """ + # check if kwargs is empty + + result = None + count = None + projection = {"_id": False} + + if not kwargs: + result = self.control.status.find({}, projection).sort( + "timestamp", pymongo.DESCENDING + ) + count = self.control.status.count_documents({}) + else: + limit = kwargs.pop("limit", None) + skip = kwargs.pop("skip", None) + + if limit is not None and skip is not None: + limit = int(limit) + skip = int(skip) + result = ( + self.control.status.find(kwargs, projection) + .sort("timestamp", pymongo.DESCENDING) + .limit(limit) + .skip(skip) + ) + else: + result = self.control.status.find(kwargs, projection).sort( + "timestamp", pymongo.DESCENDING + ) + + count = self.control.status.count_documents(kwargs) + + return { + "result": result, + "count": count, + } + + def get_storage_backend(self): + """Get the storage backend. + + :return: The storage backend. + :rtype: ObjectID + """ + try: + ret = self.storage.find( + {"status": "enabled"}, projection={"_id": False} + ) + return ret[0] + except (KeyError, IndexError): + return None + + def set_storage_backend(self, config): + """Set the storage backend. + + :param config: The storage backend configuration. + :type config: dict + :return: + """ + config = copy.deepcopy(config) + config["updated_at"] = str(datetime.now()) + config["status"] = "enabled" + self.storage.update_one( + {"storage_type": config["storage_type"]}, {"$set": config}, True + ) + + def set_reducer(self, reducer_data): + """Set the reducer in the statestore. + + :param reducer_data: dictionary of reducer config. + :type reducer_data: dict + :return: + """ + reducer_data["updated_at"] = str(datetime.now()) + self.reducer.update_one( + {"name": reducer_data["name"]}, {"$set": reducer_data}, True + ) + + def get_reducer(self): + """Get reducer.config. + + return: reducer config. + rtype: ObjectId + """ + try: + ret = self.reducer.find_one() + return ret + except Exception: + return None + + def get_combiner(self, name): + """Get combiner by name. + + :param name: name of combiner to get. + :type name: str + :return: The combiner. + :rtype: ObjectId + """ + try: + ret = self.combiners.find_one({"name": name}) + return ret + except Exception: + return None + + def get_combiners(self, limit=None, skip=None, sort_key="updated_at", sort_order=pymongo.DESCENDING, projection={}): + """Get all combiners. + + :param limit: The maximum number of combiners to return. + :type limit: int + :param skip: The number of combiners to skip. + :type skip: int + :param sort_key: The key to sort by. + :type sort_key: str + :param sort_order: The sort order. + :type sort_order: pymongo.ASCENDING or pymongo.DESCENDING + :param projection: The projection. + :type projection: dict + :return: Dictionary of combiners in result and count. + :rtype: dict + """ + + result = None + count = None + + try: + if limit is not None and skip is not None: + limit = int(limit) + skip = int(skip) + result = self.combiners.find({}, projection).limit(limit).skip(skip).sort(sort_key, sort_order) + else: + result = self.combiners.find({}, projection).sort(sort_key, sort_order) + + count = self.combiners.count_documents({}) + + except Exception: + return None + + return { + "result": result, + "count": count, + } + + def set_combiner(self, combiner_data): + """Set combiner in statestore. + + :param combiner_data: dictionary of combiner config + :type combiner_data: dict + :return: + """ + + combiner_data["updated_at"] = str(datetime.now()) + self.combiners.update_one( + {"name": combiner_data["name"]}, {"$set": combiner_data}, True + ) + + def delete_combiner(self, combiner): + """Delete a combiner from statestore. + + :param combiner: name of combiner to delete. + :type combiner: str + :return: + """ + try: + self.combiners.delete_one({"name": combiner}) + except Exception: + print( + "WARNING, failed to delete combiner: {}".format(combiner), + flush=True, + ) + + def set_client(self, client_data): + """Set client in statestore. + + :param client_data: dictionary of client config. + :type client_data: dict + :return: + """ + client_data["updated_at"] = str(datetime.now()) + self.clients.update_one( + {"name": client_data["name"]}, {"$set": client_data}, True + ) + + def get_client(self, name): + """Get client by name. + + :param name: name of client to get. + :type name: str + :return: The client. None if not found. + :rtype: ObjectId + """ + try: + ret = self.clients.find({"key": name}) + if list(ret) == []: + return None + else: + return ret + except Exception: + return None + + def list_clients(self, limit=None, skip=None, status=None, sort_key="last_seen", sort_order=pymongo.DESCENDING): + """List all clients registered on the network. + + :param limit: The maximum number of clients to return. + :type limit: int + :param skip: The number of clients to skip. + :type skip: int + :param status: online | offline + :type status: str + :param sort_key: The key to sort by. + """ + + result = None + count = None + + try: + find = {} if status is None else {"status": status} + projection = {"_id": False, "updated_at": False} + + if limit is not None and skip is not None: + limit = int(limit) + skip = int(skip) + result = self.clients.find(find, projection).limit(limit).skip(skip).sort(sort_key, sort_order) + else: + result = self.clients.find(find, projection).sort(sort_key, sort_order) + + count = self.clients.count_documents(find) + + except Exception as e: + print("ERROR: {}".format(e), flush=True) + + return { + "result": result, + "count": count, + } + + def list_combiners_data(self, combiners, sort_key="count", sort_order=pymongo.DESCENDING): + """List all combiner data. + + :param combiners: list of combiners to get data for. + :type combiners: list + :param sort_key: The key to sort by. + :type sort_key: str + :param sort_order: The sort order. + :type sort_order: pymongo.ASCENDING or pymongo.DESCENDING + :return: list of combiner data. + :rtype: list(ObjectId) + """ + + result = None + + try: + + pipeline = [ + {"$match": {"combiner": {"$in": combiners}, "status": "online"}}, + {"$group": {"_id": "$combiner", "count": {"$sum": 1}}}, + {"$sort": {sort_key: sort_order, "_id": pymongo.ASCENDING}} + ] if combiners is not None else [ + {"$group": {"_id": "$combiner", "count": {"$sum": 1}}}, + {"$sort": {sort_key: sort_order, "_id": pymongo.ASCENDING}} + ] + + result = self.clients.aggregate(pipeline) + + except Exception as e: + print("ERROR: {}".format(e), flush=True) + + return result + + def update_client_status(self, client_data, status, role): + """Set or update client status. + + :param client_data: dictionary of client config. + :type client_data: dict + :param status: status of client. + :type status: str + :param role: role of client. + :type role: str + :return: + """ + self.clients.update_one( + {"name": client_data["name"]}, + {"$set": {"status": status, "role": role}}, + ) diff --git a/fedn/fedn/network/statestore/statestorebase.py b/fedn/fedn/network/statestore/statestorebase.py new file mode 100644 index 000000000..f41e3c025 --- /dev/null +++ b/fedn/fedn/network/statestore/statestorebase.py @@ -0,0 +1,52 @@ +from abc import ABC, abstractmethod + + +class StateStoreBase(ABC): + """ + + """ + + def __init__(self): + pass + + @abstractmethod + def state(self): + """ Return the current state of the statestore. + """ + pass + + @abstractmethod + def transition(self, state): + """ Transition the statestore to a new state. + + :param state: The new state. + :type state: str + """ + pass + + @abstractmethod + def set_latest_model(self, model_id): + """ Set the latest model id in the statestore. + + :param model_id: The model id. + :type model_id: str + """ + pass + + @abstractmethod + def get_latest_model(self): + """ Get the latest model id from the statestore. + + :return: The model object. + :rtype: ObjectId + """ + pass + + @abstractmethod + def is_inited(self): + """ Check if the statestore is initialized. + + :return: True if initialized, else False. + :rtype: bool + """ + pass diff --git a/fedn/fedn/tests/test_reducer_service.py b/fedn/fedn/tests/test_reducer_service.py index fc5ca8d9b..22e9d54af 100644 --- a/fedn/fedn/tests/test_reducer_service.py +++ b/fedn/fedn/tests/test_reducer_service.py @@ -1,7 +1,7 @@ import unittest from unittest.mock import patch -from fedn.clients.reducer.restservice import ReducerRestService +from fedn.network.dashboard.restservice import ReducerRestService class TestInit(unittest.TestCase): @@ -78,15 +78,15 @@ def test_check_compute_package(self): def test_check_initial_model(self): - self.restservice.control.get_latest_model.return_value = 'model-uid' + self.restservice.statestore.get_latest_model.return_value = 'model-uid' retval = self.restservice.check_initial_model() self.assertTrue(retval) - self.restservice.control.get_latest_model.return_value = None + self.restservice.statestore.get_latest_model.return_value = None retval = self.restservice.check_initial_model() self.assertFalse(retval) - self.restservice.control.get_latest_model.return_value = '' + self.restservice.statestore.get_latest_model.return_value = '' retval = self.restservice.check_initial_model() self.assertFalse(retval) diff --git a/fedn/fedn/utils/__init__.py b/fedn/fedn/utils/__init__.py index e69de29bb..dc63e07e9 100644 --- a/fedn/fedn/utils/__init__.py +++ b/fedn/fedn/utils/__init__.py @@ -0,0 +1,3 @@ +""" The utils package is responsible for providing utility functions for the FEDn framework. Such as logging, checksums and other model helper functions to aggregate models. +THe helper functions is there to support aggregating various models from different ML frameworks, such as Tensorflow, PyTorch and Keras.""" +# flake8: noqa diff --git a/fedn/fedn/utils/checksum.py b/fedn/fedn/utils/checksum.py index 99fb7f840..3c7bbd3ec 100644 --- a/fedn/fedn/utils/checksum.py +++ b/fedn/fedn/utils/checksum.py @@ -2,10 +2,12 @@ def sha(fname): - """ + """ Calculate the sha256 checksum of a file. Used for computing checksums of compute packages. - :param fname: - :return: + :param fname: The file path. + :type fname: str + :return: The sha256 checksum. + :rtype: :py:class:`hashlib.sha256` """ hash = hashlib.sha256() with open(fname, "rb") as f: diff --git a/fedn/fedn/utils/dispatcher.py b/fedn/fedn/utils/dispatcher.py index 986eb25bd..f4b2ee44a 100644 --- a/fedn/fedn/utils/dispatcher.py +++ b/fedn/fedn/utils/dispatcher.py @@ -7,18 +7,25 @@ class Dispatcher: - """ + """ Dispatcher class for compute packages. + :param config: The configuration. + :type config: dict + :param dir: The directory to dispatch to. + :type dir: str """ def __init__(self, config, dir): + """ Initialize the dispatcher.""" self.config = config self.project_dir = dir def run_cmd(self, cmd_type): - """ + """ Run a command. - :param cmd_type: + :param cmd_type: The command type. + :type cmd_type: str + :return: """ try: cmdsandargs = cmd_type.split(' ') diff --git a/fedn/fedn/utils/helpers.py b/fedn/fedn/utils/helpers.py index ac5a588aa..52379fd77 100644 --- a/fedn/fedn/utils/helpers.py +++ b/fedn/fedn/utils/helpers.py @@ -1,63 +1,43 @@ -from abc import ABC, abstractmethod +import importlib +import json +PLUGIN_PATH = "fedn.utils.plugins.{}" -class HelperBase(ABC): - """ Abstract class defining helpers. """ - def __init__(self): - """ """ - - @abstractmethod - def increment_average(self, model, model_next, n): - """ Compute one increment of incremental averaging. - n: the iteration index 1...N in the sequence. - """ - pass +def get_helper(helper_module_name): + """ Return an instance of the helper class. - @abstractmethod - def save_model(self, model, path): - """ - Serialize the model to file on disk on path. - The serialized model must be a single binary object. - """ - pass + :param helper_module_name: The name of the helper plugin module. + :type helper_module_name: str + :return: A helper instance. + :rtype: class: `fedn.utils.helpers.HelperBase` + """ + helper_plugin = PLUGIN_PATH.format(helper_module_name) + helper = importlib.import_module(helper_plugin) + return helper.Helper() - @abstractmethod - def load_model(self, path): - """ Load the model save with save_model from disk on path. """ - pass - @abstractmethod - def serialize_model_to_BytesIO(self, model): - """ Serialize a model to a BytesIO buffered object. """ - pass +def save_metadata(metadata, filename): + """ Save metadata to file. - @abstractmethod - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO buffered object. """ - pass + :param metadata: The metadata to save. + :type metadata: dict + :param filename: The name of the file to save to. + :type filename: str + """ + with open(filename+'-metadata', 'w') as outfile: + json.dump(metadata, outfile) - @abstractmethod - def get_tmp_path(self): - """ Return a temporary output path compatible with save_model, load_model. """ - pass +# Save metric data to file -def get_helper(helper_type): - """ Return an instance of the helper class. +def save_metrics(metrics, filename): + """ Save metrics to file. - :param helper_type (str): The helper type ('keras','pytorch') - :return: + :param metrics: The metrics to save. + :type metrics: dict + :param filename: The name of the file to save to. + :type filename: str """ - if helper_type == 'numpyarray': - # TODO: refactor cyclical import to avoid this ugly line - """ noqa """; from fedn.utils.numpyarrayhelper import NumpyArrayHelper # autopep8: off # noqa: E702 - return NumpyArrayHelper() - elif helper_type == 'keras': - """ noqa """; from fedn.utils.kerashelper import KerasHelper # autopep8: off # noqa: E702 - return KerasHelper() - elif helper_type == 'pytorch': - """ noqa """; from fedn.utils.pytorchhelper import PytorchHelper # autopep8: off # noqa: E702 - return PytorchHelper() - else: - return None + with open(filename, 'w') as outfile: + json.dump(metrics, outfile) diff --git a/fedn/fedn/utils/kerashelper.py b/fedn/fedn/utils/kerashelper.py deleted file mode 100644 index be081e45e..000000000 --- a/fedn/fedn/utils/kerashelper.py +++ /dev/null @@ -1,106 +0,0 @@ -import os -import tempfile -from io import BytesIO - -import numpy as np - -from .helpers import HelperBase - - -class KerasHelper(HelperBase): - """ FEDn helper class for keras.Sequential. """ - - def average_weights(self, weights): - """ Average weights of Keras Sequential models. """ - - avg_w = [] - for i in range(len(weights[0])): - lay_l = np.array([w[i] for w in weights]) - weight_l_avg = np.mean(lay_l, 0) - avg_w.append(weight_l_avg) - - return avg_w - - def increment_average(self, weights, weights_next, n): - """ Update an incremental average. """ - w_prev = weights - w_next = weights_next - w = np.add(w_prev, (np.array(w_next) - np.array(w_prev)) / n) - return w - - def set_weights(self, weights_, weights): - """ - - :param weights_: - :param weights: - """ - weights_ = weights # noqa F841 - - def get_weights(self, weights): - """ - - :param weights: - :return: - """ - return weights - - def get_tmp_path(self): - """ Return a temporary output path compatible with save_model, load_model. """ - fd, path = tempfile.mkstemp(suffix='.npz') - os.close(fd) - return path - - def save_model(self, weights, path=None): - """ - - :param weights: - :param path: - :return: - """ - if not path: - path = self.get_tmp_path() - - weights_dict = {} - for i, w in enumerate(weights): - weights_dict[str(i)] = w - - np.savez_compressed(path, **weights_dict) - - return path - - def load_model(self, path="weights.npz"): - """ - - :param path: - :return: - """ - a = np.load(path) - weights = [] - for i in range(len(a.files)): - weights.append(a[str(i)]) - return weights - - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO object. """ - path = self.get_tmp_path() - with open(path, 'wb') as fh: - fh.write(model_bytesio) - fh.flush() - model = self.load_model(path) - os.unlink(path) - return model - - def serialize_model_to_BytesIO(self, model): - """ - - :param model: - :return: - """ - outfile_name = self.save_model(model) - - a = BytesIO() - a.seek(0, 0) - with open(outfile_name, 'rb') as f: - a.write(f.read()) - os.unlink(outfile_name) - return a diff --git a/fedn/fedn/utils/logger.py b/fedn/fedn/utils/logger.py index 08c2c94e1..563012996 100644 --- a/fedn/fedn/utils/logger.py +++ b/fedn/fedn/utils/logger.py @@ -3,11 +3,18 @@ class Logger: - """ + """ Logger class for Fedn. + :param log_level: The log level. + :type log_level: int + :param to_file: The name of the file to log to. + :type to_file: str + :param file_path: The path to the log file. + :type file_path: str """ def __init__(self, log_level=logging.DEBUG, to_file='', file_path=os.getcwd()): + """ Initialize the logger.""" root = logging.getLogger() root.setLevel(log_level) diff --git a/fedn/fedn/utils/numpyarrayhelper.py b/fedn/fedn/utils/numpyarrayhelper.py deleted file mode 100644 index fee1ac42b..000000000 --- a/fedn/fedn/utils/numpyarrayhelper.py +++ /dev/null @@ -1,67 +0,0 @@ -import os -import tempfile -from io import BytesIO - -import numpy as np - -from .helpers import HelperBase - - -class NumpyArrayHelper(HelperBase): - """ FEDn helper class for numpy arrays. """ - - def increment_average(self, model, model_next, n): - """ Update an incremental average. """ - return np.add(model, (model_next - model) / n) - - def save_model(self, model, path=None): - """ - - :param model: - :param path: - :return: - """ - if not path: - _, path = tempfile.mkstemp() - np.savetxt(path, model) - return path - - def load_model(self, path): - """ - - :param path: - :return: - """ - model = np.loadtxt(path) - return model - - def serialize_model_to_BytesIO(self, model): - """ - - :param model: - :return: - """ - outfile_name = self.save_model(model) - - a = BytesIO() - a.seek(0, 0) - with open(outfile_name, 'rb') as f: - a.write(f.read()) - os.unlink(outfile_name) - return a - - def get_tmp_path(self): - """ Return a temporary output path compatible with save_model, load_model. """ - fd, path = tempfile.mkstemp() - os.close(fd) - return path - - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO object. """ - path = self.get_tmp_path() - with open(path, 'wb') as fh: - fh.write(model_bytesio) - fh.flush() - model = np.loadtxt(path) - os.unlink(path) - return model diff --git a/fedn/fedn/utils/plugins/__init__.py b/fedn/fedn/utils/plugins/__init__.py new file mode 100644 index 000000000..162a2d351 --- /dev/null +++ b/fedn/fedn/utils/plugins/__init__.py @@ -0,0 +1,3 @@ +""" The plugins package is responsible for loading model helper functions supporting different ML frameworks. The :class:`fedn.utils.plugins.helperbase.HelperBase` is +an abstract class which user can implement their own helper functions to support different ML frameworks. """ +# flake8: noqa diff --git a/fedn/fedn/utils/plugins/helperbase.py b/fedn/fedn/utils/plugins/helperbase.py new file mode 100644 index 000000000..6c9c147e6 --- /dev/null +++ b/fedn/fedn/utils/plugins/helperbase.py @@ -0,0 +1,52 @@ +import os +import tempfile +from abc import ABC, abstractmethod + + +class HelperBase(ABC): + """ Abstract class defining helpers. """ + + def __init__(self): + """ Initialize helper. """ + + self.name = self.__class__.__name__ + + @abstractmethod + def increment_average(self, model, model_next, a, W): + """ Compute one increment of incremental weighted averaging. + + :param model: Current model weights in array-like format. + :param model_next: New model weights in array-like format. + :param a: Number of examples in new model. + :param W: Total number of examples. + :return: Incremental weighted average of model weights. + """ + pass + + @abstractmethod + def save(self, model, path): + """Serialize weights to file. The serialized model must be a single binary object. + + :param model: Weights in array-like format. + :param path: Path to file. + + """ + pass + + @abstractmethod + def load(self, fh): + """ Load weights from file or filelike. + + :param fh: file path, filehandle, filelike. + :return: Weights in array-like format. + """ + pass + + def get_tmp_path(self): + """ Return a temporary output path compatible with save_model, load_model. + + :return: Path to file. + """ + fd, path = tempfile.mkstemp(suffix='.npz') + os.close(fd) + return path diff --git a/fedn/fedn/utils/plugins/kerashelper.py b/fedn/fedn/utils/plugins/kerashelper.py new file mode 100644 index 000000000..195858c76 --- /dev/null +++ b/fedn/fedn/utils/plugins/kerashelper.py @@ -0,0 +1,85 @@ +import numpy as np + +from .helperbase import HelperBase + + +class Helper(HelperBase): + """ FEDn helper class for keras.Sequential. """ + + def __init__(self): + """ Initialize helper. """ + self.name = "kerashelper" + super().__init__() + + # function to calculate an incremental weighted average of the weights + def increment_average(self, model, model_next, num_examples, total_examples): + """ Incremental weighted average of model weights. + + :param model: Current model weights. + :type model: list of numpy arrays. + :param model_next: New model weights. + :type model_next: list of numpy arrays. + :param num_examples: Number of examples in new model. + :type num_examples: int + :param total_examples: Total number of examples. + :type total_examples: int + :return: Incremental weighted average of model weights. + :rtype: list of numpy arrays. + """ + # Incremental weighted average + w = num_examples / total_examples + weights = [] + for i in range(len(model)): + weights.append(w * model[i] + (1 - w) * model_next[i]) + + return weights + + # function to calculate an incremental weighted average of the weights using numpy.add + def increment_average_add(self, model, model_next, num_examples, total_examples): + """ Incremental weighted average of model weights. + + :param model: Current model weights. + :type model: list of numpy arrays. + :param model_next: New model weights. + :type model_next: list of numpy arrays. + :param num_examples: Number of examples in new model. + :type num_examples: int + :param total_examples: Total number of examples. + :type total_examples: int + :return: Incremental weighted average of model weights. + :rtype: list of numpy arrays. + """ + # Incremental weighted average + w = np.add(model, num_examples*(np.array(model_next) - np.array(model)) / total_examples) + return w + + def save(self, weights, path=None): + """ Serialize weights to file. The serialized model must be a single binary object. + + :param weights: List of weights in numpy format. + :param path: Path to file. + :return: Path to file. + """ + if not path: + path = self.get_tmp_path() + + weights_dict = {} + for i, w in enumerate(weights): + weights_dict[str(i)] = w + + np.savez_compressed(path, **weights_dict) + + return path + + def load(self, fh): + """ Load weights from file or filelike. + + :param fh: file path, filehandle, filelike. + :return: List of weights in numpy format. + """ + a = np.load(fh) + + weights = [] + for i in range(len(a.files)): + weights.append(a[str(i)]) + return weights diff --git a/fedn/fedn/utils/plugins/numpyarrayhelper.py b/fedn/fedn/utils/plugins/numpyarrayhelper.py new file mode 100644 index 000000000..21bf979b8 --- /dev/null +++ b/fedn/fedn/utils/plugins/numpyarrayhelper.py @@ -0,0 +1,49 @@ +import tempfile + +import numpy as np + +from .helperbase import HelperBase + + +class Helper(HelperBase): + """ FEDn helper class for numpy arrays. """ + + def increment_average(self, model, model_next, n): + """ Update an incremental average. + + :param model: Current model weights. + :type model: numpy array. + :param model_next: New model weights. + :type model_next: numpy array. + :param n: Number of examples in new model. + :type n: int + :return: Incremental weighted average of model weights. + :rtype: :class:`numpy.array` + """ + return np.add(model, (model_next - model) / n) + + def save(self, model, path=None): + """Serialize weights/parameters to file. + + :param model: Weights/parameters in numpy array format. + :type model: numpy array. + :param path: Path to file. + :type path: str + :return: Path to file. + :rtype: str + """ + if not path: + _, path = tempfile.mkstemp() + np.savetxt(path, model) + return path + + def load(self, path): + """Load weights/parameters from file or filelike. + + :param path: Path to file. + :type path: str + :return: Weights/parameters in numpy array format. + :rtype: :class:`numpy.array` + """ + model = np.loadtxt(path) + return model diff --git a/fedn/fedn/utils/plugins/pytorchhelper.py b/fedn/fedn/utils/plugins/pytorchhelper.py new file mode 100644 index 000000000..17b01200c --- /dev/null +++ b/fedn/fedn/utils/plugins/pytorchhelper.py @@ -0,0 +1,63 @@ +from collections import OrderedDict + +import numpy as np + +from .helperbase import HelperBase + + +class Helper(HelperBase): + """ FEDn helper class for pytorch. """ + + def __init__(self): + """ Initialize helper. """ + super().__init__() + self.name = "pytorchhelper" + + def increment_average(self, model, model_next, num_examples, total_examples): + """ Update a weighted incremental average of model weights. + + :param model: Current model weights with keys from torch state_dict. + :type model: OrderedDict + :param model_next: New model weights with keys from torch state_dict. + :type model_next: OrderedDict + :param num_examples: Number of examples in new model. + :type num_examples: int + :param total_examples: Total number of examples. + :type total_examples: int + :return: Incremental weighted average of model weights. + :rtype: OrderedDict + """ + w = OrderedDict() + for name in model.keys(): + tensorDiff = model_next[name] - model[name] + w[name] = model[name] + num_examples*tensorDiff / total_examples + return w + + def save(self, model, path=None): + """ Serialize weights to file. The serialized model must be a single binary object. + + :param model: Weights of model with keys from torch state_dict. + :type model: OrderedDict + :param path: File path. + :type path: str + :return: Path to file (generated as tmp file unless path is set). + :rtype: str + """ + if not path: + path = self.get_tmp_path() + np.savez_compressed(path, **model) + return path + + def load(self, path): + """ Load weights from file or filelike. + + :param path: file path, filehandle, filelike. + :type path: str + :return: Weights of model with keys from torch state_dict. + :rtype: OrderedDict + """ + a = np.load(path) + weights_np = OrderedDict() + for i in a.files: + weights_np[i] = a[i] + return weights_np diff --git a/fedn/fedn/utils/plugins/tests/test_kerashelper.py b/fedn/fedn/utils/plugins/tests/test_kerashelper.py new file mode 100644 index 000000000..5e392b47c --- /dev/null +++ b/fedn/fedn/utils/plugins/tests/test_kerashelper.py @@ -0,0 +1,94 @@ +import os +import unittest + +import numpy as np + +from fedn.utils.plugins.kerashelper import Helper as KerasHelper + + +class TestKerasHelper(unittest.TestCase): + """Test the KerasHelper class.""" + + def setUp(self): + self.helper = KerasHelper() + + def test_increment_average(self): + """Test the increment_average method.""" + # Test with a list + model = [1, 2, 3] + model_next = [4, 5, 6] + a = 10 + W = 20 + + result = self.helper.increment_average(model, model_next, a, W) + + self.assertEqual(result, [2.5, 3.5, 4.5]) + + # Test with a numpy array + model = np.array([1, 2, 3]) + model_next = np.array([4, 5, 6]) + + result = self.helper.increment_average(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([2.5, 3.5, 4.5])) + + # test with a list of numpy arrays + model = [np.array([1, 2, 3])] + model_next = [np.array([4, 5, 6])] + + result = self.helper.increment_average(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([[2.5, 3.5, 4.5]])) + + def test_increment_average_add(self): + """Test the increment_average_add method.""" + model = [1, 2, 3] + model_next = [4, 5, 6] + a = 10 + W = 20 + + result = self.helper.increment_average_add(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([2.5, 3.5, 4.5])) + + # Test with a numpy array + model = np.array([1, 2, 3]) + model_next = np.array([4, 5, 6]) + + result = self.helper.increment_average_add(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([2.5, 3.5, 4.5])) + + # test with a list of numpy arrays + model = [np.array([1, 2, 3])] + model_next = [np.array([4, 5, 6])] + + result = self.helper.increment_average_add(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([[2.5, 3.5, 4.5]])) + + def test_save(self): + """Test the save method.""" + weights = [1, 2, 3] + + result = self.helper.save(weights, 'test.npz') + + self.assertEqual(result, 'test.npz') + + def test_load(self): + """Test the load method.""" + weights = [1, 2, 3] + + result = self.helper.save(weights, 'test.npz') + result = self.helper.load('test.npz') + + self.assertEqual(result, [1, 2, 3]) + + # Tear down method, remove test.npz + def tearDown(self): + if os.path.exists('test.npz'): + os.remove('test.npz') + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/utils/plugins/tests/test_pytorchhelper.py b/fedn/fedn/utils/plugins/tests/test_pytorchhelper.py new file mode 100644 index 000000000..4eb98c7f9 --- /dev/null +++ b/fedn/fedn/utils/plugins/tests/test_pytorchhelper.py @@ -0,0 +1,63 @@ +import os +import unittest + +import numpy as np + +from fedn.utils.plugins.pytorchhelper import Helper as PyTorchHelper + + +class TestPyTorchHelper(unittest.TestCase): + """Test the PyTorchHelper class.""" + + def setUp(self): + self.helper = PyTorchHelper() + + def test_increment_average(self): + """Test the increment_average method. The weights are stored as OrderedDicts.""" + + # Model as OrderedDict with keys as torch layers and values as numpy arrays + model = {'layer1': np.array([1, 2, 3])} + model_next = {'layer1': np.array([4, 5, 6])} + a = 10 + W = 20 + + result = self.helper.increment_average(model, model_next, a, W) + + # Check OrderedDict values match + np.testing.assert_array_equal(result['layer1'], np.array([2.5, 3.5, 4.5])) + + # Model as OrderedDict with keys as torch layers and values as lists + model = {'layer1': [1, 2, 3]} + model_next = {'layer1': [4, 5, 6]} + a = 10 + W = 20 + + # Catch TypeError: unsupported operand type(s) for -: 'list' and 'list' + with self.assertRaises(TypeError): + result = self.helper.increment_average(model, model_next, a, W) + + # Test save and load methods + def test_save_load(self): + """Test the save and load methods.""" + + # Create a model + model = {'layer1': np.array([1, 2, 3])} + + # Save the model + self.helper.save(model, 'test_model') + + # Check if the model file exists + self.assertTrue(os.path.exists('test_model.npz')) + + # Load the model + result = self.helper.load('test_model.npz') + + # Check OrderedDict values match + np.testing.assert_array_equal(result['layer1'], np.array([1, 2, 3])) + + # Remove the model file + os.remove('test_model.npz') + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/utils/process.py b/fedn/fedn/utils/process.py index 5a005e0cd..bd31f9441 100644 --- a/fedn/fedn/utils/process.py +++ b/fedn/fedn/utils/process.py @@ -5,18 +5,22 @@ def run_process(args, cwd): - """ + """ Run a process and log the output. - :param args: - :param cwd: + :param args: The arguments to the process. + :type args: list + :param cwd: The current working directory. + :type cwd: str + :return: """ status = subprocess.Popen( args, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # print(status) def check_io(): - """ + """ Check stdout/stderr of the child process. + :return: """ while True: output = status.stdout.readline().decode() diff --git a/fedn/fedn/utils/pytorchhelper.py b/fedn/fedn/utils/pytorchhelper.py deleted file mode 100644 index fe1330b17..000000000 --- a/fedn/fedn/utils/pytorchhelper.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -import tempfile -from collections import OrderedDict -from io import BytesIO - -import numpy as np - -from .helpers import HelperBase - - -class PytorchHelper(HelperBase): - - def increment_average(self, model, model_next, n): - """ Update an incremental average. """ - w = OrderedDict() - for name in model.keys(): - tensorDiff = model_next[name] - model[name] - w[name] = model[name] + tensorDiff / n - return w - - def get_tmp_path(self): - """ - - :return: - """ - fd, path = tempfile.mkstemp(suffix='.npz') - os.close(fd) - return path - - def save_model(self, weights_dict, path=None): - """ - - :param weights_dict: - :param path: - :return: - """ - if not path: - path = self.get_tmp_path() - np.savez_compressed(path, **weights_dict) - return path - - def load_model(self, path="weights.npz"): - """ - - :param path: - :return: - """ - b = np.load(path) - weights_np = OrderedDict() - for i in b.files: - weights_np[i] = b[i] - return weights_np - - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO object. """ - path = self.get_tmp_path() - with open(path, 'wb') as fh: - fh.write(model_bytesio) - fh.flush() - model = self.load_model(path) - os.unlink(path) - return model - - def serialize_model_to_BytesIO(self, model): - """ - - :param model: - :return: - """ - outfile_name = self.save_model(model) - - a = BytesIO() - a.seek(0, 0) - with open(outfile_name, 'rb') as f: - a.write(f.read()) - os.unlink(outfile_name) - return a diff --git a/fedn/fedn/utils/tests/test_helpers.py b/fedn/fedn/utils/tests/test_helpers.py new file mode 100644 index 000000000..9dfcdd36f --- /dev/null +++ b/fedn/fedn/utils/tests/test_helpers.py @@ -0,0 +1,57 @@ +import os +import unittest + +from fedn.utils.helpers import get_helper, save_metadata, save_metrics + + +class TestHelpers(unittest.TestCase): + + def test_get_helper(self): + helper = get_helper('pytorchhelper') + + # Check that helper is not None + self.assertTrue(helper is not None) + + # Check that helper nane is correct + self.assertTrue(helper.name == 'pytorchhelper') + + def test_save_metadata(self): + metadata = {'test': 'test'} + save_metadata(metadata, 'test') + + # Check that file exists + self.assertTrue(os.path.exists('test-metadata')) + + # Check that file is not empty + self.assertTrue(os.path.getsize('test-metadata') > 0) + + # Check that file contains the correct data + with open('test-metadata', 'r') as f: + data = f.read() + self.assertTrue(data == '{"test": "test"}') + + def test_save_metrics(self): + metrics = {'test': 'test'} + save_metrics(metrics, 'test_metrics.json') + + # Check that file exists + self.assertTrue(os.path.exists('test_metrics.json')) + + # Check that file is not empty + self.assertTrue(os.path.getsize('test_metrics.json') > 0) + + # Check that file contains the correct data + with open('test_metrics.json', 'r') as f: + data = f.read() + self.assertTrue(data == '{"test": "test"}') + + # Clean up (remove files) + def tearDown(self): + if os.path.exists('test-metadata'): + os.remove('test-metadata') + if os.path.exists('test_metrics.json'): + os.remove('test_metrics.json') + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/setup.py b/fedn/setup.py index 764b09170..62888ce09 100644 --- a/fedn/setup.py +++ b/fedn/setup.py @@ -2,10 +2,8 @@ setup( name='fedn', - version='0.4.1', + version='0.6.0', description="""Scaleout Federated Learning""", - long_description=open('README.md').read(), - long_description_content_type="text/markdown", author='Scaleout Systems AB', author_email='contact@scaleoutsystems.com', url='https://www.scaleoutsystems.com', @@ -17,9 +15,9 @@ "urllib3>=1.26.4", "minio", "python-slugify", - "grpcio~=1.47.0", + "grpcio~=1.48.0", "grpcio-tools", - "numpy>=1.21.6,<=1.22.2", + "numpy>=1.21.6", "protobuf", "pymongo", "Flask", @@ -47,5 +45,6 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', ], )