From 96b6487ffef8faa15f89ee090117641269ec3ec0 Mon Sep 17 00:00:00 2001 From: Wen Guan Date: Mon, 5 Feb 2024 14:51:37 +0100 Subject: [PATCH] fix error in log --- Dockerfile | 7 +- Dockerfile.centos7 | 7 +- common/lib/idds/common/utils.py | 256 +++++++++++++++++- .../httpd-idds-443-py39-cc7.conf | 10 +- main/etc/idds/idds.cfg.template | 2 +- main/etc/sql/oracle_update.sql | 2 +- main/etc/sql/postgresql.sql | 1 + main/lib/idds/agents/common/baseagent.py | 5 +- .../53d0af715dab_add_site_throttler.py | 3 + main/lib/idds/tests/core_tests.py | 3 +- main/lib/idds/tests/panda_test.py | 6 +- main/lib/idds/tests/test_domapanda_big.py | 8 +- main/tools/env/setup_panda.sh | 2 +- start-daemon.sh | 14 +- 14 files changed, 303 insertions(+), 23 deletions(-) diff --git a/Dockerfile b/Dockerfile index 92dd8348..3a94cb44 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ RUN yum-config-manager --enable crb # RUN yum install -y httpd.x86_64 conda gridsite mod_ssl.x86_64 httpd-devel.x86_64 gcc.x86_64 supervisor.noarch fetch-crl.noarch lcg-CA postgresql postgresql-contrib postgresql-static postgresql-libs postgresql-devel && \ # yum clean all && \ # rm -rf /var/cache/yum -RUN yum install -y httpd.x86_64 which conda gridsite mod_ssl.x86_64 httpd-devel.x86_64 gcc.x86_64 supervisor.noarch fetch-crl.noarch redis syslog-ng procps passwd which && \ +RUN yum install -y httpd.x86_64 which conda gridsite mod_ssl.x86_64 httpd-devel.x86_64 gcc.x86_64 supervisor.noarch fetch-crl.noarch redis syslog-ng procps passwd which systemd-udev && \ yum clean all && \ rm -rf /var/cache/yum @@ -45,6 +45,11 @@ RUN yum install -y fetch-crl.noarch ca-policy-egi-core && \ yum clean all && \ rm -rf /var/cache/yum +# update network limitations +# RUN echo 4096 > /proc/sys/net/core/somaxconn +# RUN sysctl -w net.core.somaxconn=4096 +RUN echo 'net.core.somaxconn=4096' >> /etc/sysctl.d/999-net.somax.conf + # setup env RUN adduser atlpan RUN groupadd zp diff --git a/Dockerfile.centos7 b/Dockerfile.centos7 index dfc76e3a..4c2a18ed 100644 --- a/Dockerfile.centos7 +++ b/Dockerfile.centos7 @@ -28,7 +28,7 @@ RUN yum upgrade -y && \ # RUN yum install -y httpd.x86_64 conda gridsite mod_ssl.x86_64 httpd-devel.x86_64 gcc.x86_64 supervisor.noarch fetch-crl.noarch lcg-CA postgresql postgresql-contrib postgresql-static postgresql-libs postgresql-devel && \ # yum clean all && \ # rm -rf /var/cache/yum -RUN yum install -y httpd.x86_64 which conda gridsite mod_ssl.x86_64 httpd-devel.x86_64 gcc.x86_64 supervisor.noarch fetch-crl.noarch lcg-CA redis syslog-ng && \ +RUN yum install -y httpd.x86_64 which conda gridsite mod_ssl.x86_64 httpd-devel.x86_64 gcc.x86_64 supervisor.noarch fetch-crl.noarch lcg-CA redis syslog-ng systemd-udev && \ yum clean all && \ rm -rf /var/cache/yum @@ -42,6 +42,11 @@ RUN yum install -y fetch-crl.noarch lcg-CA ca-policy-egi-core && \ yum clean all && \ rm -rf /var/cache/yum +# update network limitations +# RUN echo 4096 > /proc/sys/net/core/somaxconn +# RUN sysctl -w net.core.somaxconn=4096 +RUN echo 'net.core.somaxconn=4096' >> /etc/sysctl.d/999-net.somax.conf + # setup env RUN adduser atlpan RUN groupadd zp diff --git a/common/lib/idds/common/utils.py b/common/lib/idds/common/utils.py index f361affe..34af0160 100644 --- a/common/lib/idds/common/utils.py +++ b/common/lib/idds/common/utils.py @@ -6,16 +6,19 @@ # http://www.apache.org/licenses/LICENSE-2.0OA # # Authors: -# - Wen Guan, , 2019 - 2023 +# - Wen Guan, , 2019 - 2024 +import base64 import errno import datetime +import importlib import logging import json import os import re import requests +import signal import subprocess import sys import tarfile @@ -27,6 +30,7 @@ from itertools import groupby from operator import itemgetter from packaging import version as packaging_version +from typing import Any, Callable from idds.common.config import (config_has_section, config_has_option, config_get, config_get_bool) @@ -234,15 +238,112 @@ def check_database(): return False -def run_process(cmd, stdout=None, stderr=None): +def kill_process_group(pgrp, nap=10): + """ + Kill the process group. + DO NOT MOVE TO PROCESSES.PY - will lead to circular import since execute() needs it as well. + :param pgrp: process group id (int). + :param nap: napping time between kill signals in seconds (int) + :return: boolean (True if SIGTERM followed by SIGKILL signalling was successful) + """ + + status = False + _sleep = True + + # kill the process gracefully + print(f"killing group process {pgrp}") + try: + os.killpg(pgrp, signal.SIGTERM) + except Exception as error: + print(f"exception thrown when killing child group process under SIGTERM: {error}") + _sleep = False + else: + print(f"SIGTERM sent to process group {pgrp}") + + if _sleep: + print(f"sleeping {nap} s to allow processes to exit") + time.sleep(nap) + + try: + os.killpg(pgrp, signal.SIGKILL) + except Exception as error: + print(f"exception thrown when killing child group process with SIGKILL: {error}") + else: + print(f"SIGKILL sent to process group {pgrp}") + status = True + + return status + + +def kill_all(process: Any) -> str: + """ + Kill all processes after a time-out exception in process.communication(). + + :param process: process object + :return: stderr (str). + """ + + stderr = '' + try: + print('killing lingering subprocess and process group') + time.sleep(1) + # process.kill() + kill_process_group(os.getpgid(process.pid)) + except ProcessLookupError as exc: + stderr += f'\n(kill process group) ProcessLookupError={exc}' + except Exception as exc: + stderr += f'\n(kill_all 1) exception caught: {exc}' + try: + print('killing lingering process') + time.sleep(1) + os.kill(process.pid, signal.SIGTERM) + print('sleeping a bit before sending SIGKILL') + time.sleep(10) + os.kill(process.pid, signal.SIGKILL) + except ProcessLookupError as exc: + stderr += f'\n(kill process) ProcessLookupError={exc}' + except Exception as exc: + stderr += f'\n(kill_all 2) exception caught: {exc}' + print(f'sent soft kill signals - final stderr: {stderr}') + return stderr + + +def run_process(cmd, stdout=None, stderr=None, wait=False, timeout=7 * 24 * 3600): """ Runs a command in an out-of-procees shell. """ + print(f"To run command: {cmd}") if stdout and stderr: - process = subprocess.Popen(cmd, shell=True, stdout=stdout, stderr=stderr, preexec_fn=os.setsid) + process = subprocess.Popen(cmd, shell=True, stdout=stdout, stderr=stderr, preexec_fn=os.setsid, encoding='utf-8') + else: + process = subprocess.Popen(cmd, shell=True, preexec_fn=os.setsid, encoding='utf-8') + if not wait: + return process + + try: + print(f'subprocess.communicate() will use timeout={timeout} s') + process.communicate(timeout=timeout) + except subprocess.TimeoutExpired as ex: + stderr = f'subprocess communicate sent TimeoutExpired: {ex}' + print(stderr) + stderr = kill_all(process) + print(f'Killing process: {stderr}') + exit_code = -1 + except Exception as ex: + stderr = f'subprocess has an exception: {ex}' + print(stderr) + stderr = kill_all(process) + print(f'Killing process: {stderr}') + exit_code = -1 else: - process = subprocess.Popen(cmd, shell=True) - return process + exit_code = process.poll() + + try: + process.wait(timeout=60) + except subprocess.TimeoutExpired: + print("process did not complete within the timeout of 60s - terminating") + process.terminate() + return exit_code def run_command(cmd): @@ -630,3 +731,148 @@ def group_list(input_list, key): update_groups[item_tuple] = {'keys': [], 'items': item} update_groups[item_tuple]['keys'].append(item_key) return update_groups + + +def import_fun(name: str) -> Callable[..., Any]: + """Returns a function from a dotted path name. Example: `path.to.module:func`. + + When the attribute we look for is a staticmethod, module name in its + dotted path is not the last-before-end word + + E.g.: package_a.package_b.module_a:ClassA.my_static_method + + Thus we remove the bits from the end of the name until we can import it + + Args: + name (str): The name (reference) to the path. + + Raises: + ValueError: If no module is found or invalid attribute name. + + Returns: + Any: An attribute (normally a Callable) + """ + name_bits = name.split(':') + module_name_bits, attribute_bits = name_bits[:-1], [name_bits[-1]] + module_name_bits = module_name_bits.split('.') + attribute_bits = attribute_bits.split('.') + module = None + while len(module_name_bits): + try: + module_name = '.'.join(module_name_bits) + module = importlib.import_module(module_name) + break + except ImportError: + attribute_bits.insert(0, module_name_bits.pop()) + + if module is None: + # maybe it's a builtin + try: + return __builtins__[name] + except KeyError: + raise ValueError('Invalid attribute name: %s' % name) + + attribute_name = '.'.join(attribute_bits) + if hasattr(module, attribute_name): + return getattr(module, attribute_name) + # staticmethods + attribute_name = attribute_bits.pop() + attribute_owner_name = '.'.join(attribute_bits) + try: + attribute_owner = getattr(module, attribute_owner_name) + except: # noqa + raise ValueError('Invalid attribute name: %s' % attribute_name) + + if not hasattr(attribute_owner, attribute_name): + raise ValueError('Invalid attribute name: %s' % name) + return getattr(attribute_owner, attribute_name) + + +def import_attribute(name: str) -> Callable[..., Any]: + """Returns an attribute from a dotted path name. Example: `path.to.func`. + + When the attribute we look for is a staticmethod, module name in its + dotted path is not the last-before-end word + + E.g.: package_a.package_b.module_a.ClassA.my_static_method + + Thus we remove the bits from the end of the name until we can import it + + Args: + name (str): The name (reference) to the path. + + Raises: + ValueError: If no module is found or invalid attribute name. + + Returns: + Any: An attribute (normally a Callable) + """ + name_bits = name.split('.') + module_name_bits, attribute_bits = name_bits[:-1], [name_bits[-1]] + module = None + while len(module_name_bits): + try: + module_name = '.'.join(module_name_bits) + module = importlib.import_module(module_name) + break + except ImportError: + attribute_bits.insert(0, module_name_bits.pop()) + + if module is None: + # maybe it's a builtin + try: + return __builtins__[name] + except KeyError: + raise ValueError('Invalid attribute name: %s' % name) + + attribute_name = '.'.join(attribute_bits) + if hasattr(module, attribute_name): + return getattr(module, attribute_name) + # staticmethods + attribute_name = attribute_bits.pop() + attribute_owner_name = '.'.join(attribute_bits) + try: + attribute_owner = getattr(module, attribute_owner_name) + except: # noqa + raise ValueError('Invalid attribute name: %s' % attribute_name) + + if not hasattr(attribute_owner, attribute_name): + raise ValueError('Invalid attribute name: %s' % name) + return getattr(attribute_owner, attribute_name) + + +def decode_base64(sb): + try: + if isinstance(sb, str): + sb_bytes = bytes(sb, 'ascii') + elif isinstance(sb, bytes): + sb_bytes = sb + else: + return sb + return base64.b64decode(sb_bytes).decode("utf-8") + except Exception as ex: + logging.error("decode_base64 %s: %s" % (sb, ex)) + return sb + + +def encode_base64(sb): + try: + if isinstance(sb, str): + sb_bytes = bytes(sb, 'ascii') + elif isinstance(sb, bytes): + sb_bytes = sb + return base64.b64encode(sb_bytes).decode("utf-8") + except Exception as ex: + logging.error("encode_base64 %s: %s" % (sb, ex)) + return sb + + +def create_archive_file(work_dir, archive_filename, files): + if not archive_filename.startswith("/"): + archive_filename = os.path.join(work_dir, archive_filename) + + with tarfile.open(archive_filename, "w:gz", dereference=True) as tar: + for local_file in files: + # base_name = os.path.basename(local_file) + tar.add(local_file, arcname=os.path.basename(local_file)) + return archive_filename diff --git a/main/config_default/httpd-idds-443-py39-cc7.conf b/main/config_default/httpd-idds-443-py39-cc7.conf index 4922fa16..8a980b74 100644 --- a/main/config_default/httpd-idds-443-py39-cc7.conf +++ b/main/config_default/httpd-idds-443-py39-cc7.conf @@ -32,7 +32,8 @@ MinSpareServers ${IDDS_SERVER_CONF_MIN_WORKERS} ServerLimit ${IDDS_SERVER_CONF_MAX_WORKERS} MaxSpareServers ${IDDS_SERVER_CONF_MAX_WORKERS} MaxClients ${IDDS_SERVER_CONF_MAX_WORKERS} -MaxRequestsPerChild 2000 +MaxRequestsPerChild 100 +ThreadsPerChild 100 @@ -41,14 +42,15 @@ MinSpareThreads ${IDDS_SERVER_CONF_MIN_WORKERS} ServerLimit ${IDDS_SERVER_CONF_MAX_WORKERS} MaxSpareThreads ${IDDS_SERVER_CONF_MAX_WORKERS} MaxRequestWorkers ${IDDS_SERVER_CONF_MAX_WORKERS} -MaxConnectionsPerChild 2000 +MaxConnectionsPerChild 100 +ThreadsPerChild 100 WSGIPythonHome /opt/idds WSGIPythonPath /opt/idds/lib/python3.9/site-packages - WSGIDaemonProcess idds_daemon processes=${IDDS_SERVER_CONF_NUM_WSGI} threads=2 request-timeout=600 queue-timeout=600 python-home=/opt/idds python-path=/opt/idds/lib/python3.9/site-packages python-path=/opt/idds python-path=/opt/idds/lib/python3.9/site-packages + WSGIDaemonProcess idds_daemon processes=${IDDS_SERVER_CONF_NUM_WSGI} threads=${IDDS_SERVER_CONF_NUM_WSGI_THREAD} request-timeout=600 queue-timeout=600 python-home=/opt/idds python-path=/opt/idds/lib/python3.9/site-packages python-path=/opt/idds python-path=/opt/idds/lib/python3.9/site-packages WSGIProcessGroup idds_daemon WSGIApplicationGroup %GLOBAL WSGIScriptAlias /idds /opt/idds/bin/idds.wsgi @@ -57,6 +59,8 @@ WSGIPythonPath /opt/idds/lib/python3.9/site-packages WSGIPassAuthorization On +ListenBackLog ${IDDS_SERVER_CONF_MAX_BACKLOG} + Listen 8443 Listen 8080 diff --git a/main/etc/idds/idds.cfg.template b/main/etc/idds/idds.cfg.template index d75942fb..f516f1b9 100755 --- a/main/etc/idds/idds.cfg.template +++ b/main/etc/idds/idds.cfg.template @@ -26,7 +26,7 @@ loglevel = DEBUG # idds atlas condor pool: aipanda101 # dev: aipanda104 # doma: aipanda105-107 -# +# idds-mon: aipanda108 [database] #default = mysql://idds:idds@pcuwvirt5.cern.ch/idds #default = mysql://idds:idds_passwd@aipanda182.cern.ch/idds diff --git a/main/etc/sql/oracle_update.sql b/main/etc/sql/oracle_update.sql index e3dc6cf0..0da5cb11 100644 --- a/main/etc/sql/oracle_update.sql +++ b/main/etc/sql/oracle_update.sql @@ -463,7 +463,7 @@ CREATE TABLE meta_info created_at DATE DEFAULT SYS_EXTRACT_UTC(systimestamp(0)), updated_at DATE DEFAULT SYS_EXTRACT_UTC(systimestamp(0)), description VARCHAR2(1000), - metadata CLOB, + meta_info CLOB, CONSTRAINT METAINFO_PK PRIMARY KEY (meta_id), -- USING INDEX LOCAL, CONSTRAINT METAINFO_NAME_UQ UNIQUE (name) ); diff --git a/main/etc/sql/postgresql.sql b/main/etc/sql/postgresql.sql index 6d4c02c1..792a94cf 100644 --- a/main/etc/sql/postgresql.sql +++ b/main/etc/sql/postgresql.sql @@ -311,6 +311,7 @@ CREATE TABLE doma_idds.events_archive ( CONSTRAINT "EVENTS_AR_PK" PRIMARY KEY (event_id) ); +CREATE SEQUENCE doma_idds."THROTTLER_ID_SEQ" START WITH 1 CREATE TABLE doma_idds.throttlers ( throttler_id BIGSERIAL NOT NULL, diff --git a/main/lib/idds/agents/common/baseagent.py b/main/lib/idds/agents/common/baseagent.py index 54275972..578b9721 100644 --- a/main/lib/idds/agents/common/baseagent.py +++ b/main/lib/idds/agents/common/baseagent.py @@ -229,7 +229,10 @@ def __call__(self): def stop(self): super(BaseAgent, self).stop() - self.event_bus.stop() + try: + self.event_bus.stop() + except Exception: + pass def terminate(self): self.stop() diff --git a/main/lib/idds/orm/base/alembic/versions/53d0af715dab_add_site_throttler.py b/main/lib/idds/orm/base/alembic/versions/53d0af715dab_add_site_throttler.py index 0557b6e7..44bb7a33 100644 --- a/main/lib/idds/orm/base/alembic/versions/53d0af715dab_add_site_throttler.py +++ b/main/lib/idds/orm/base/alembic/versions/53d0af715dab_add_site_throttler.py @@ -57,6 +57,8 @@ def upgrade() -> None: op.drop_constraint('THROTTLER_PK', table_name='throttlers', schema=schema) except: pass + # op.create_sequence(sa.Sequence('THROTTLER_ID_SEQ', schema=schema)) + op.execute(sa.schema.CreateSequence(sa.Sequence('THROTTLER_ID_SEQ', schema=schema))) op.create_table('throttlers', sa.Column('throttler_id', sa.BigInteger(), sa.Sequence('THROTTLER_ID_SEQ', schema=schema)), sa.Column('site', sa.String(50), nullable=False), @@ -92,3 +94,4 @@ def downgrade() -> None: op.drop_constraint('THROTTLER_SITE_UQ', table_name='throttlers', schema=schema) op.drop_constraint('THROTTLER_PK', table_name='throttlers', schema=schema) op.drop_table('throttlers', schema=schema) + op.drop_sequence('THROTTLER_ID_SEQ', schema=schema) diff --git a/main/lib/idds/tests/core_tests.py b/main/lib/idds/tests/core_tests.py index cd265c0e..c8e4770d 100644 --- a/main/lib/idds/tests/core_tests.py +++ b/main/lib/idds/tests/core_tests.py @@ -175,6 +175,7 @@ def print_workflow_template(workflow, layers=0): reqs = get_requests(request_id=479187, with_request=True, with_detail=False, with_metadata=True) reqs = get_requests(request_id=4498, with_request=True, with_detail=False, with_metadata=True) reqs = get_requests(request_id=3244, with_request=True, with_detail=False, with_metadata=True) +reqs = get_requests(request_id=6082, with_request=True, with_detail=False, with_metadata=True) # reqs = get_requests(request_id=589913, with_request=True, with_detail=False, with_metadata=True) for req in reqs: # print(req['request_id']) @@ -227,9 +228,9 @@ def print_workflow_template(workflow, layers=0): print(json_dumps(workflow.template, sort_keys=True, indent=4)) -""" sys.exit(0) +""" reqs = get_requests(request_id=28182323, with_request=False, with_detail=True, with_metadata=False) for req in reqs: print(json_dumps(req, sort_keys=True, indent=4)) diff --git a/main/lib/idds/tests/panda_test.py b/main/lib/idds/tests/panda_test.py index 0a45a4da..a7788ece 100644 --- a/main/lib/idds/tests/panda_test.py +++ b/main/lib/idds/tests/panda_test.py @@ -10,8 +10,8 @@ os.environ['PANDA_URL'] = 'http://rubin-panda-server-dev.slac.stanford.edu:80/server/panda' os.environ['PANDA_URL_SSL'] = 'https://rubin-panda-server-dev.slac.stanford.edu:8443/server/panda' -# os.environ['PANDA_URL'] = 'https://usdf-panda-server.slac.stanford.edu:8443/server/panda' -# os.environ['PANDA_URL_SSL'] = 'https://usdf-panda-server.slac.stanford.edu:8443/server/panda' +os.environ['PANDA_URL'] = 'https://usdf-panda-server.slac.stanford.edu:8443/server/panda' +os.environ['PANDA_URL_SSL'] = 'https://usdf-panda-server.slac.stanford.edu:8443/server/panda' from pandaclient import Client # noqa E402 @@ -49,6 +49,8 @@ task_ids = [i for i in range(166799, 167877)] task_ids = [i for i in range(167997, 168003)] task_ids = [688, 8686, 8695, 8696] +task_ids = [i for i in range(8958, 9634)] +task_ids = [i for i in range(8752, 8958)] for task_id in task_ids: print("Killing %s" % task_id) ret = Client.killTask(task_id, verbose=True) diff --git a/main/lib/idds/tests/test_domapanda_big.py b/main/lib/idds/tests/test_domapanda_big.py index a576af78..c9e78354 100644 --- a/main/lib/idds/tests/test_domapanda_big.py +++ b/main/lib/idds/tests/test_domapanda_big.py @@ -173,7 +173,7 @@ def setup_workflow(): taskN4.dependencies = [ {"name": "00004" + str(k), "dependencies": [], - "submitted": False} for k in range(10000) + "submitted": False} for k in range(100) ] taskN5 = PanDATask() @@ -182,7 +182,7 @@ def setup_workflow(): taskN5.dependencies = [ {"name": "00005" + str(k), "dependencies": [], - "submitted": False} for k in range(10000) + "submitted": False} for k in range(100) ] work1 = DomaPanDAWork(executable='echo; sleep 180', @@ -234,7 +234,7 @@ def setup_workflow(): work4 = DomaPanDAWork(executable='echo; sleep 180', primary_input_collection={'scope': 'pseudo_dataset', 'name': 'pseudo_input_collection#1'}, output_collections=[{'scope': 'pseudo_dataset', 'name': 'pseudo_output_collection#1'}], - log_collections=[], dependency_map=taskN1.dependencies, + log_collections=[], dependency_map=taskN4.dependencies, task_name=taskN4.name, task_queue=task_queue3, encode_command_line=True, task_priority=981, @@ -250,7 +250,7 @@ def setup_workflow(): work5 = DomaPanDAWork(executable='echo; sleep 180', primary_input_collection={'scope': 'pseudo_dataset', 'name': 'pseudo_input_collection#1'}, output_collections=[{'scope': 'pseudo_dataset', 'name': 'pseudo_output_collection#1'}], - log_collections=[], dependency_map=taskN1.dependencies, + log_collections=[], dependency_map=taskN5.dependencies, task_name=taskN5.name, task_queue=task_queue4, encode_command_line=True, task_priority=981, diff --git a/main/tools/env/setup_panda.sh b/main/tools/env/setup_panda.sh index bf38d973..37ef38ca 100644 --- a/main/tools/env/setup_panda.sh +++ b/main/tools/env/setup_panda.sh @@ -49,7 +49,7 @@ elif [ "$instance" == "usdf" ]; then export PANDA_URL=https://usdf-panda-server.slac.stanford.edu:8443/server/panda export PANDACACHE_URL=$PANDA_URL_SSL export PANDAMON_URL=https://usdf-panda-bigmon.slac.stanford.edu:8443/ - export PANDA_AUTH_VO=Rubin + export PANDA_AUTH_VO=Rubin:production export PANDACACHE_URL=$PANDA_URL_SSL export PANDA_SYS=/afs/cern.ch/user/w/wguan/workdisk/iDDS/.conda/iDDS/ diff --git a/start-daemon.sh b/start-daemon.sh index cadc96b6..9a8fe2bd 100755 --- a/start-daemon.sh +++ b/start-daemon.sh @@ -181,7 +181,7 @@ fi # min number of workers if [[ -z "${IDDS_SERVER_CONF_MIN_WORKERS}" ]]; then - export IDDS_SERVER_CONF_MIN_WORKERS=25 + export IDDS_SERVER_CONF_MIN_WORKERS=32 fi # max number of workers @@ -191,7 +191,17 @@ fi # max number of WSGI daemons if [[ -z "${IDDS_SERVER_CONF_NUM_WSGI}" ]]; then - export IDDS_SERVER_CONF_NUM_WSGI=25 + export IDDS_SERVER_CONF_NUM_WSGI=32 +fi + +# max number of WSGI daemons +if [[ -z "${IDDS_SERVER_CONF_MAX_BACKLOG}" ]]; then + export IDDS_SERVER_CONF_MAX_BACKLOG=511 +fi + +# max number of WSGI threads +if [[ -z "${IDDS_SERVER_CONF_NUM_WSGI_THREAD}" ]]; then + export IDDS_SERVER_CONF_NUM_WSGI_THREAD=32 fi # create database if not exists