From ce258e68eaf1c80c4baaccedf7eb9e994381e84e Mon Sep 17 00:00:00 2001 From: Andreas Hellander Date: Fri, 13 Dec 2024 23:54:21 +0100 Subject: [PATCH] test --- fedn/network/clients/grpc_handler.py | 3 --- fedn/network/combiner/modelservice.py | 2 +- fedn/network/grpc/server.py | 2 +- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/fedn/network/clients/grpc_handler.py b/fedn/network/clients/grpc_handler.py index e94f2d8f..9858c530 100644 --- a/fedn/network/clients/grpc_handler.py +++ b/fedn/network/clients/grpc_handler.py @@ -26,7 +26,6 @@ MAX_CONNECTION_IDLE_MS = 30000 MAX_CONNECTION_AGE_GRACE_MS = "INT_MAX" # keep connection open indefinitely CLIENT_IDLE_TIMEOUT_MS = 30000 -MAX_MESSAGE_LENGTH = 10 * 1024 * 1024 GRPC_OPTIONS = [ ("grpc.keepalive_time_ms", KEEPALIVE_TIME_MS), @@ -36,8 +35,6 @@ ("grpc.max_connection_idle_ms", MAX_CONNECTION_IDLE_MS), ("grpc.max_connection_age_grace_ms", MAX_CONNECTION_AGE_GRACE_MS), ("grpc.client_idle_timeout_ms", CLIENT_IDLE_TIMEOUT_MS), - ('grpc.max_send_message_length', MAX_MESSAGE_LENGTH), - ('grpc.max_receive_message_length', MAX_MESSAGE_LENGTH), ] diff --git a/fedn/network/combiner/modelservice.py b/fedn/network/combiner/modelservice.py index 76702e94..ef5f9a75 100644 --- a/fedn/network/combiner/modelservice.py +++ b/fedn/network/combiner/modelservice.py @@ -9,7 +9,7 @@ from fedn.common.log_config import logger from fedn.network.storage.models.tempmodelstorage import TempModelStorage -CHUNK_SIZE = 4 * 1024 * 1024 +CHUNK_SIZE = 2 * 1024 * 1024 def upload_request_generator(mdl, id): diff --git a/fedn/network/grpc/server.py b/fedn/network/grpc/server.py index a4e24f58..a581c16b 100644 --- a/fedn/network/grpc/server.py +++ b/fedn/network/grpc/server.py @@ -33,7 +33,7 @@ def __init__(self, servicer, config: ServerConfig): KEEPALIVE_TIMEOUT_MS = 20 * 1000 # max idle time before server terminates the connection (5 minutes) MAX_CONNECTION_IDLE_MS = 5 * 60 * 1000 - MAX_MESSAGE_LENGTH = 10 * 1024 * 1024 + MAX_MESSAGE_LENGTH = 100 * 1024 * 1024 self.server = grpc.server( futures.ThreadPoolExecutor(max_workers=350),