diff --git a/.env-devel b/.env-devel
index ce26b5b581f..587cb23b55e 100644
--- a/.env-devel
+++ b/.env-devel
@@ -13,13 +13,12 @@ POSTGRES_PASSWORD=adminadmin
POSTGRES_PORT=5432
POSTGRES_USER=scu
-RABBITMQ_USER=admin
-RABBITMQ_LOG_CHANNEL=comp.backend.channels.log
-RABBITMQ_PASSWORD=adminadmin
-RABBITMQ_PROGRESS_CHANNEL=comp.backend.channels.progress
-
RABBIT_HOST=rabbit
+RABBIT_LOG_CHANNEL=comp.backend.channels.log
+RABBIT_PASSWORD=adminadmin
RABBIT_PORT=5672
+RABBIT_PROGRESS_CHANNEL=comp.backend.channels.progress
+RABBIT_USER=admin
REGISTRY_AUTH=True
REGISTRY_PW=adminadmin
@@ -43,3 +42,6 @@ WEBSERVER_LOGIN_REGISTRATION_INVITATION_REQUIRED=1
# python3 -c "from cryptography.fernet import Fernet; print(Fernet.generate_key())"
WEBSERVER_SESSION_SECRET_KEY=REPLACE ME with a key of at least length 32.
WEBSERVER_STUDIES_ACCESS_ENABLED=0
+WEBSERVER_PROMETHEUS_HOST=http://prometheus
+WEBSERVER_PROMETHEUS_PORT=9090
+WEBSERVER_PROMETHEUS_API_VERSION=v1
diff --git a/api/specs/webserver/v0/components/schemas/activity.yaml b/api/specs/webserver/v0/components/schemas/activity.yaml
new file mode 100644
index 00000000000..dc75293b8a1
--- /dev/null
+++ b/api/specs/webserver/v0/components/schemas/activity.yaml
@@ -0,0 +1,38 @@
+ActivityEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ $ref: '#/Activity'
+ additionalProperties: true
+ error:
+ nullable: true
+ default: null
+
+Activity:
+ type: object
+ properties:
+ stats:
+ $ref: '#/Status'
+ limits:
+ $ref: '#/Limits'
+ queued:
+ type: boolean
+
+Status:
+ type: object
+ properties:
+ cpuUsage:
+ type: number
+ minimum: 0
+ memoryUsage:
+ type: number
+
+Limits:
+ type: object
+ properties:
+ cpus:
+ type: number
+ mem:
+ type: number
\ No newline at end of file
diff --git a/api/specs/webserver/v0/openapi-activity.yaml b/api/specs/webserver/v0/openapi-activity.yaml
new file mode 100644
index 00000000000..5c52c112c59
--- /dev/null
+++ b/api/specs/webserver/v0/openapi-activity.yaml
@@ -0,0 +1,21 @@
+openapi: 3.0.0
+info:
+ title: activity management API
+ version: 0.1.0
+ description: API to be consumed by the activity manager to list and run actions on services
+servers:
+ - description: API server
+ url: '/v0'
+paths:
+ /activity/status:
+ get:
+ operationId: get_status
+ responses:
+ '200':
+ description: Object containing queuing, CPU and Memory usage/limits information of services
+ content:
+ application/json:
+ schema:
+ $ref: './components/schemas/activity.yaml#/ActivityEnveloped'
+ default:
+ $ref: './openapi.yaml#/components/responses/DefaultErrorResponse'
diff --git a/api/specs/webserver/v0/openapi-diagnostics.yaml b/api/specs/webserver/v0/openapi-diagnostics.yaml
index 5593b5513ad..1f4c8aae37f 100644
--- a/api/specs/webserver/v0/openapi-diagnostics.yaml
+++ b/api/specs/webserver/v0/openapi-diagnostics.yaml
@@ -31,6 +31,7 @@ paths:
schema:
type: string
- in: path
+ required: true
name: action
schema:
type: string
diff --git a/api/specs/webserver/v0/openapi.yaml b/api/specs/webserver/v0/openapi.yaml
index f089a1d2e29..4ff60504ae7 100644
--- a/api/specs/webserver/v0/openapi.yaml
+++ b/api/specs/webserver/v0/openapi.yaml
@@ -129,6 +129,10 @@ paths:
/nodes/{nodeInstanceUUID}/iframe:
$ref: './openapi-node-v0.0.1.yaml#/paths/~1nodes~1{nodeInstanceUUID}~1iframe'
+ # ACTIVITY -------------------------------------------------------------------------
+ /activity/status:
+ $ref: './openapi-activity.yaml#/paths/~1activity~1status'
+
components:
responses:
DefaultErrorResponse:
diff --git a/ci/travis/integration-testing/simcore-sdk b/ci/travis/integration-testing/simcore-sdk
index 9a22542e64b..4d0968d0a6d 100755
--- a/ci/travis/integration-testing/simcore-sdk
+++ b/ci/travis/integration-testing/simcore-sdk
@@ -27,7 +27,7 @@ install() {
before_script() {
if bash ci/travis/helpers/test_for_changes "${FOLDER_CHECKS[@]}";
then
- pip freeze
+ pip list -v
# pull the test images if registry is set up, else build the images
make pull-version || ((make pull-cache || true) && make build tag-version)
make info-images
diff --git a/ci/travis/system-testing/swarm-deploy b/ci/travis/system-testing/swarm-deploy
index 8b02d55bd9f..c6a69e32adc 100755
--- a/ci/travis/system-testing/swarm-deploy
+++ b/ci/travis/system-testing/swarm-deploy
@@ -26,12 +26,9 @@ install() {
before_script() {
pip list -v
make info-images
- make up-version
}
script() {
- # wait for a minute to let the swarm warm up...
- make info-swarm
pytest -v tests/swarm-deploy
}
diff --git a/packages/postgres-database/src/simcore_postgres_database/cli.py b/packages/postgres-database/src/simcore_postgres_database/cli.py
index 338489e2758..fa86e3bc64f 100644
--- a/packages/postgres-database/src/simcore_postgres_database/cli.py
+++ b/packages/postgres-database/src/simcore_postgres_database/cli.py
@@ -47,9 +47,13 @@ def safe_func(*args, **kargs):
#@retry(wait=wait_fixed(0.1), stop=stop_after_delay(60))
def _ping(url):
"""checks whether database is responsive"""
- engine = sa.create_engine(str(url))
- conn = engine.connect()
- conn.close()
+ try:
+ engine = sa.create_engine(str(url))
+ conn = engine.connect()
+ conn.close()
+ finally:
+ engine.dispose()
+
@safe(if_fails_return=None)
diff --git a/packages/service-library/src/servicelib/aiopg_utils.py b/packages/service-library/src/servicelib/aiopg_utils.py
index e8cd6eaebc1..577d22a4a2c 100644
--- a/packages/service-library/src/servicelib/aiopg_utils.py
+++ b/packages/service-library/src/servicelib/aiopg_utils.py
@@ -1,68 +1,11 @@
-"""
+""" Helpers for aiopg
-TODO: test!
+ - aiopg is used as client sdk to interact with postgres database asynchronously
-SEE https://aiopg.readthedocs.io/en/stable/
-SEE asyncpg https://magicstack.github.io/asyncpg/current/index.html
"""
-import aiopg.sa
-import attr
-import psycopg2
-import sqlalchemy as sa
-import logging
-import warnings
-
-logger = logging.getLogger(__name__)
-
-warnings.warn("DO NOT USE IN PRODUCTION, STILL UNDER DEVELOPMENT")
-
-@attr.s(auto_attribs=True)
-class AiopgExecutor:
- """
- Executes sa statements using aiopg Engine
-
- SEE https://github.com/aio-libs/aiopg/issues/321
- SEE http://docs.sqlalchemy.org/en/latest/faq/metadata_schema.html#how-can-i-get-the-create-table-drop-table-output-as-a-string)
- """
- engine: aiopg.sa.engine.Engine
- statement: str=None
- dsn: str=None # Data Source Name
-
- @property
- def sa_engine(self):
- return sa.create_engine(
- self.dsn,
- strategy="mock",
- executor=self._compile
- )
-
- def _compile(self, sql, *multiparams, **params):
- # pylint: disable=W0613, unused-argument
- self.statement = str(sql.compile(dialect=self.sa_engine.dialect))
-
- async def execute(self):
- async with self.engine.acquire() as conn:
- logger.debug(self.statement)
- resp = await conn.execute(self.statement)
- return resp
+from psycopg2 import Error as DBAPIError
-
-
-async def create_all(engine: aiopg.sa.engine.Engine, metadata: sa.MetaData, dsn: str):
- executor = AiopgExecutor(engine, dsn=dsn)
- metadata.create_all(executor.sa_engine, checkfirst=True)
- await executor.execute()
-
-
-async def drop_all(engine: aiopg.sa.engine.Engine, metadata: sa.MetaData):
- executor = AiopgExecutor(engine)
- metadata.drop_all(executor.sa_engine, checkfirst=True)
- await executor.execute()
-
-
-# EXCEPTIONS -------------------------------------
-#
# aiopg reuses DBAPI exceptions
#
# StandardError
@@ -80,11 +23,8 @@ async def drop_all(engine: aiopg.sa.engine.Engine, metadata: sa.MetaData):
# SEE https://aiopg.readthedocs.io/en/stable/core.html?highlight=Exception#exceptions
# SEE http://initd.org/psycopg/docs/module.html#dbapi-exceptions
-# alias add prefix DBAPI
-DBAPIError = psycopg2.Error
-__all__ = (
- 'create_all',
- 'drop_all'
-)
+__all__ = [
+ 'DBAPIError'
+]
diff --git a/packages/service-library/src/servicelib/application.py b/packages/service-library/src/servicelib/application.py
index 43996459b82..400a9cfb6c4 100644
--- a/packages/service-library/src/servicelib/application.py
+++ b/packages/service-library/src/servicelib/application.py
@@ -5,6 +5,13 @@
from .application_keys import APP_CONFIG_KEY
from .client_session import persistent_client_session
+async def startup_info(app: web.Application):
+ print(f"STARTING UP {app}...", flush=True)
+
+
+async def shutdown_info(app: web.Application):
+ print(f"SHUTDOWN {app} ...", flush=True)
+
def create_safe_application(config: Optional[Dict]=None) -> web.Application:
app = web.Application()
@@ -12,10 +19,12 @@ def create_safe_application(config: Optional[Dict]=None) -> web.Application:
# Enxures config entry
app[APP_CONFIG_KEY] = config or {}
+ app.on_startup.append(startup_info)
+ app.on_cleanup.append(shutdown_info)
+
# Ensures persistent client session
# NOTE: Ensures client session context is run first,
# then any further get_client_sesions will be correctly closed
app.cleanup_ctx.append(persistent_client_session)
-
return app
diff --git a/packages/service-library/src/servicelib/application_keys.py b/packages/service-library/src/servicelib/application_keys.py
index 52c32573379..d19f14f6f37 100644
--- a/packages/service-library/src/servicelib/application_keys.py
+++ b/packages/service-library/src/servicelib/application_keys.py
@@ -11,24 +11,21 @@
# REQUIREMENTS:
# - guarantees all keys are unique
-# TODO: facilitate key generation
-# TODO: hierarchical classification
+# - one place for all common keys
+# - hierarchical classification
# TODO: should be read-only (frozen?)
+#
+# web.Application keys, i.e. app[APP_*_KEY]
+#
+APP_CONFIG_KEY = f'{__name__ }.config'
+APP_OPENAPI_SPECS_KEY = f'{__name__ }.openapi_specs'
+APP_JSONSCHEMA_SPECS_KEY = f'{__name__ }.jsonschema_specs'
-# APP=application
-APP_CONFIG_KEY = __name__ + '.config'
-APP_OPENAPI_SPECS_KEY = __name__ + '.openapi_specs'
-APP_SESSION_SECRET_KEY = __name__ + '.session_secret'
-APP_JSONSCHEMA_SPECS_KEY = __name__ + '.jsonschema_specs'
+APP_DB_ENGINE_KEY = f'{__name__ }.db_engine'
-APP_DB_ENGINE_KEY = __name__ + '.db_engine'
-APP_DB_SESSION_KEY = __name__ + '.db_session'
-APP_DB_POOL_KEY = __name__ + '.db_pool'
+APP_CLIENT_SESSION_KEY = f'{__name__ }.session'
-APP_CLIENT_SESSION_KEY = f"{__name__ }.session"
-
-# RSP=response
-
-
-# TODO: tool to convert dotted __name__ to section in dict
+#
+# web.Response keys, i.e. app[RSP_*_KEY]
+#
diff --git a/packages/service-library/src/servicelib/monitoring.py b/packages/service-library/src/servicelib/monitoring.py
index 547c500c450..5fe020354cb 100644
--- a/packages/service-library/src/servicelib/monitoring.py
+++ b/packages/service-library/src/servicelib/monitoring.py
@@ -16,12 +16,13 @@
from aiohttp import web
from prometheus_client import CONTENT_TYPE_LATEST, Counter, Gauge, Histogram
+
log = logging.getLogger(__name__)
def middleware_factory(app_name):
@web.middleware
- async def middleware_handler(request, handler):
+ async def middleware_handler(request: web.Request, handler):
# See https://prometheus.io/docs/concepts/metric_types
try:
request['start_time'] = time.time()
@@ -35,10 +36,17 @@ async def middleware_handler(request, handler):
resp = exc
raise
except Exception as exc: #pylint: disable=broad-except
- # Prevents issue #1025. FIXME: why middleware below is not non-http exception safe?
- log.exception("Unexpected exception. \
- Error middleware below should only raise web.HTTPExceptions.")
+ # Prevents issue #1025.
resp = web.HTTPInternalServerError(reason=str(exc))
+ resp_time = time.time() - request['start_time']
+
+ # NOTE: all access to API (i.e. and not other paths as /socket, /x, etc) shall return web.HTTPErrors since processed by error_middleware_factory
+ log.exception('Unexpected server error "%s" from access: %s "%s %s" done in %3.2f secs. Responding with status %s',
+ type(exc),
+ request.remote, request.method, request.path,
+ resp_time,
+ resp.status
+ )
finally:
# metrics on the same request
resp_time = time.time() - request['start_time']
diff --git a/packages/service-library/src/servicelib/rest_middlewares.py b/packages/service-library/src/servicelib/rest_middlewares.py
index 200167e9347..1518469c033 100644
--- a/packages/service-library/src/servicelib/rest_middlewares.py
+++ b/packages/service-library/src/servicelib/rest_middlewares.py
@@ -27,15 +27,16 @@ def is_api_request(request: web.Request, api_version: str) -> bool:
return request.path.startswith(base_path)
-def _process_and_raise_unexpected_error(err):
+def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception):
# TODO: send info + trace to client ONLY in debug mode!!!
- logger.exception("Unexpected exception on server side")
- exc = create_error_response(
- [err,],
- "Unexpected Server error",
- web.HTTPInternalServerError
- )
- raise exc
+ resp = create_error_response( [err,], "Unexpected Server error", web.HTTPInternalServerError)
+
+ logger.exception('Unexpected server error "%s" from access: %s "%s %s". Responding with status %s',
+ type(err),
+ request.remote, request.method, request.path,
+ resp.status
+ )
+ raise resp
def error_middleware_factory(api_version: str = DEFAULT_API_VERSION):
@@ -78,7 +79,7 @@ async def _middleware(request: web.Request, handler):
payload = wrap_as_envelope(data=payload)
ex.text = json.dumps(payload)
except Exception as err: # pylint: disable=W0703
- _process_and_raise_unexpected_error(err)
+ _process_and_raise_unexpected_error(request, err)
raise ex
except web.HTTPRedirection as ex:
@@ -86,7 +87,7 @@ async def _middleware(request: web.Request, handler):
raise
except Exception as err: # pylint: disable=W0703
- _process_and_raise_unexpected_error(err)
+ _process_and_raise_unexpected_error(request, err)
return _middleware
diff --git a/packages/simcore-sdk/src/simcore_sdk/config/rabbit.py b/packages/simcore-sdk/src/simcore_sdk/config/rabbit.py
index be39d71bd85..419e672fa8c 100644
--- a/packages/simcore-sdk/src/simcore_sdk/config/rabbit.py
+++ b/packages/simcore-sdk/src/simcore_sdk/config/rabbit.py
@@ -10,7 +10,7 @@
# TODO: adapt all data below!
-# TODO: can use venv as defaults? e.g. $RABBITMQ_LOG_CHANNEL
+# TODO: can use venv as defaults? e.g. $RABBIT_LOG_CHANNEL
CONFIG_SCHEMA = T.Dict({
T.Key("name", default="tasks", optional=True): T.String(),
T.Key("enabled", default=True, optional=True): T.Bool(),
@@ -75,12 +75,12 @@ def __init__(self, config=None):
else:
config = {}
- RABBITMQ_USER = env.get('RABBITMQ_USER','simcore')
- RABBITMQ_PASSWORD = env.get('RABBITMQ_PASSWORD','simcore')
- RABBITMQ_HOST=env.get('RABBITMQ_HOST','rabbit')
- RABBITMQ_PORT=int(env.get('RABBITMQ_PORT', 5672))
- RABBITMQ_LOG_CHANNEL = env.get('RABBITMQ_LOG_CHANNEL','comp.backend.channels.log')
- RABBITMQ_PROGRESS_CHANNEL = env.get('RABBITMQ_PROGRESS_CHANNEL','comp.backend.channels.progress')
+ RABBIT_USER = env.get('RABBIT_USER','simcore')
+ RABBIT_PASSWORD = env.get('RABBIT_PASSWORD','simcore')
+ RABBIT_HOST=env.get('RABBIT_HOST','rabbit')
+ RABBIT_PORT=int(env.get('RABBIT_PORT', 5672))
+ RABBIT_LOG_CHANNEL = env.get('RABBIT_LOG_CHANNEL','comp.backend.channels.log')
+ RABBIT_PROGRESS_CHANNEL = env.get('RABBIT_PROGRESS_CHANNEL','comp.backend.channels.progress')
CELERY_RESULT_BACKEND=env.get('CELERY_RESULT_BACKEND','rpc://')
# FIXME: get variables via config.get('') or
# rabbit
@@ -88,23 +88,23 @@ def __init__(self, config=None):
try:
self._broker_url = eval_broker(config)
except: # pylint: disable=W0702
- self._broker_url = 'amqp://{user}:{pw}@{url}:{port}'.format(user=RABBITMQ_USER, pw=RABBITMQ_PASSWORD, url=RABBITMQ_HOST, port=RABBITMQ_PORT)
+ self._broker_url = 'amqp://{user}:{pw}@{url}:{port}'.format(user=RABBIT_USER, pw=RABBIT_PASSWORD, url=RABBIT_HOST, port=RABBIT_PORT)
self._result_backend = config.get("celery", {}).get("result_backend") or CELERY_RESULT_BACKEND
self._module_name = config.get("name") or "tasks"
# pika
self._pika_credentials = pika.PlainCredentials(
- config.get("user") or RABBITMQ_USER,
- config.get("password") or RABBITMQ_PASSWORD)
+ config.get("user") or RABBIT_USER,
+ config.get("password") or RABBIT_PASSWORD)
self._pika_parameters = pika.ConnectionParameters(
- host=config.get("host") or RABBITMQ_HOST,
- port=config.get("port") or RABBITMQ_PORT,
+ host=config.get("host") or RABBIT_HOST,
+ port=config.get("port") or RABBIT_PORT,
credentials=self._pika_credentials,
connection_attempts=100)
- self._log_channel = config.get("celery", {}).get("result_backend") or RABBITMQ_LOG_CHANNEL
- self._progress_channel = config.get("celery", {}).get("result_backend") or RABBITMQ_PROGRESS_CHANNEL
+ self._log_channel = config.get("celery", {}).get("result_backend") or RABBIT_LOG_CHANNEL
+ self._progress_channel = config.get("celery", {}).get("result_backend") or RABBIT_PROGRESS_CHANNEL
@property
def parameters(self):
diff --git a/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py b/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py
index 7d5636b09eb..258534f2d08 100644
--- a/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py
+++ b/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py
@@ -13,7 +13,7 @@
# NOTE: All this file ises classical mapping to keep LEGACY
class Base:
- metadata = metadata #pylint: disable=self-assigning-variable
+ metadata = metadata
class ComputationalPipeline:
diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py
index a307371c691..44d969c9869 100644
--- a/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py
+++ b/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py
@@ -32,10 +32,14 @@ def session_scope(session_factory):
class DbSettings:
def __init__(self):
self._db_settings_config = db_config()
- self.db = create_engine(self._db_settings_config.endpoint, client_encoding='utf8')
+ # FIXME: this is a SYNCRONOUS engine! And not disposed!?
+ self.db = create_engine(
+ self._db_settings_config.endpoint + f"?application_name={__name__}_{id(self)}",
+ client_encoding='utf8')
self.Session = sessionmaker(self.db)
# self.session = self.Session()
+
class _NodeModelEncoder(json.JSONEncoder):
def default(self, o): # pylint: disable=E0202
log.debug("Encoding object: %s", o)
diff --git a/packages/simcore-sdk/tests/__init__.py b/packages/simcore-sdk/tests/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/packages/simcore-sdk/tests/conftest.py b/packages/simcore-sdk/tests/conftest.py
index c96d120a5d9..f28902327b2 100644
--- a/packages/simcore-sdk/tests/conftest.py
+++ b/packages/simcore-sdk/tests/conftest.py
@@ -1,10 +1,7 @@
-import pytest
-import os
# pylint:disable=unused-argument
-pytest_plugins = ["tests.fixtures.postgres", "tests.fixtures.minio_fix", "tests.fixtures.storage"]
-
-@pytest.fixture(scope='session')
-def docker_compose_file(pytestconfig):
- my_path = os.path.join(os.path.dirname(__file__), 'docker-compose.yml')
- return my_path
+pytest_plugins = [
+ "fixtures.postgres",
+ "fixtures.minio_fix",
+ "fixtures.storage"
+]
diff --git a/packages/simcore-sdk/tests/fixtures/postgres.py b/packages/simcore-sdk/tests/fixtures/postgres.py
index 46eb2367321..6a3b673f770 100644
--- a/packages/simcore-sdk/tests/fixtures/postgres.py
+++ b/packages/simcore-sdk/tests/fixtures/postgres.py
@@ -3,7 +3,6 @@
import pytest
import sqlalchemy as sa
-from pytest_docker import docker_ip, docker_services # pylint:disable=W0611
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
@@ -28,6 +27,8 @@ def is_responsive(url):
except sa.exc.OperationalError:
logging.exception("Connection to db failed")
return False
+ finally:
+ eng.dispose()
return True
diff --git a/packages/simcore-sdk/tests/node_ports/conftest.py b/packages/simcore-sdk/tests/node_ports/conftest.py
index 32baf7d285d..54214d809e5 100644
--- a/packages/simcore-sdk/tests/node_ports/conftest.py
+++ b/packages/simcore-sdk/tests/node_ports/conftest.py
@@ -1,17 +1,19 @@
#pylint: disable=W0621, unused-argument, too-many-arguments, no-name-in-module
import json
+import sys
import uuid
from pathlib import Path
from typing import Any, List, Tuple
import pytest
import yarl
-
-from helpers import helpers
from simcore_sdk.models.pipeline_models import (Base, ComputationalPipeline,
ComputationalTask)
from simcore_sdk.node_ports import node_config
+import np_helpers
+
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
@pytest.fixture
def user_id()->int:
@@ -20,7 +22,7 @@ def user_id()->int:
@pytest.fixture
def s3_simcore_location() ->str:
- yield helpers.SIMCORE_STORE
+ yield np_helpers.SIMCORE_STORE
@pytest.fixture
def filemanager_cfg(storage, user_id, bucket):
@@ -45,29 +47,20 @@ def create(file_path:Path, project:str=None, node:str=None):
project = project_id
if node is None:
node = node_uuid
- return helpers.file_uuid(file_path, project, node)
+ return np_helpers.file_uuid(file_path, project, node)
yield create
@pytest.fixture(scope='session')
-def here()->Path:
- yield Path(__file__).parent
-
-@pytest.fixture(scope='session')
-def docker_compose_file(bucket, pytestconfig, here): # pylint:disable=unused-argument
- my_path = here /'docker-compose.yml'
-
- yield my_path
-
-
-
+def docker_compose_file(bucket, pytestconfig): # pylint:disable=unused-argument
+ return current_dir /'docker-compose.yml'
@pytest.fixture
-def default_configuration_file(here):
- yield here / "config" / "default_config.json"
+def default_configuration_file():
+ return current_dir / "config" / "default_config.json"
@pytest.fixture
-def empty_configuration_file(here):
- yield here / "config" / "empty_config.json"
+def empty_configuration_file():
+ return current_dir / "config" / "empty_config.json"
@pytest.fixture(scope='module')
def postgres(engine, session):
diff --git a/packages/simcore-sdk/tests/node_ports/helpers/__init__.py b/packages/simcore-sdk/tests/node_ports/helpers/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/packages/simcore-sdk/tests/node_ports/helpers/helpers.py b/packages/simcore-sdk/tests/node_ports/np_helpers.py
similarity index 100%
rename from packages/simcore-sdk/tests/node_ports/helpers/helpers.py
rename to packages/simcore-sdk/tests/node_ports/np_helpers.py
diff --git a/packages/simcore-sdk/tests/node_ports/test_nodeports.py b/packages/simcore-sdk/tests/node_ports/test_nodeports.py
index d6361db24fc..6eeafc85f23 100644
--- a/packages/simcore-sdk/tests/node_ports/test_nodeports.py
+++ b/packages/simcore-sdk/tests/node_ports/test_nodeports.py
@@ -9,11 +9,10 @@
from pathlib import Path
import pytest
-
-from helpers import helpers # pylint: disable=no-name-in-module
from simcore_sdk import node_ports
from simcore_sdk.node_ports import exceptions
+import np_helpers # pylint: disable=no-name-in-module
def _check_port_valid(ports, config_dict: dict, port_type:str, key_name: str, key):
@@ -151,7 +150,7 @@ def test_adding_new_ports(special_configuration, session):
"displayOrder":2,
"type": "integer"}})
config_dict["inputs"].update({"in_15":15})
- helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
+ np_helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
check_config_valid(PORTS, config_dict)
# # replace the configuration now, add an output
@@ -161,7 +160,7 @@ def test_adding_new_ports(special_configuration, session):
"description": "a cool output",
"displayOrder":2,
"type": "boolean"}})
- helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
+ np_helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
check_config_valid(PORTS, config_dict)
@@ -175,12 +174,12 @@ def test_removing_ports(special_configuration, session):
# let's remove the first input
del config_dict["schema"]["inputs"]["in_14"]
del config_dict["inputs"]["in_14"]
- helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
+ np_helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
check_config_valid(PORTS, config_dict)
# let's do the same for the second output
del config_dict["schema"]["outputs"]["out_2"]
del config_dict["outputs"]["out_2"]
- helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
+ np_helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
check_config_valid(PORTS, config_dict)
@@ -239,7 +238,7 @@ async def test_get_file_from_previous_node_with_mapping_of_same_key_name(special
check_config_valid(PORTS, config_dict)
# add a filetokeymap
config_dict["schema"]["inputs"]["in_15"]["fileToKeyMap"] = {item_alias:"in_15"}
- helpers.update_configuration(session, project_id, this_node_uuid, config_dict) #pylint: disable=E1101
+ np_helpers.update_configuration(session, project_id, this_node_uuid, config_dict) #pylint: disable=E1101
check_config_valid(PORTS, config_dict)
file_path = await PORTS.inputs["in_15"].get()
assert isinstance(file_path, item_pytype)
@@ -266,7 +265,7 @@ async def test_file_mapping(special_configuration, project_id, node_uuid, filema
# add a filetokeymap
config_dict["schema"]["inputs"]["in_1"]["fileToKeyMap"] = {item_alias:"in_1"}
config_dict["schema"]["outputs"]["out_1"]["fileToKeyMap"] = {item_alias:"out_1"}
- helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
+ np_helpers.update_configuration(session, project_id, node_uuid, config_dict) #pylint: disable=E1101
check_config_valid(PORTS, config_dict)
file_path = await PORTS.inputs["in_1"].get()
assert isinstance(file_path, item_pytype)
@@ -283,5 +282,5 @@ async def test_file_mapping(special_configuration, project_id, node_uuid, filema
await PORTS.set_file_by_keymap(invalid_alias)
await PORTS.set_file_by_keymap(file_path)
- file_id = helpers.file_uuid(file_path, project_id, node_uuid)
+ file_id = np_helpers.file_uuid(file_path, project_id, node_uuid)
assert PORTS.outputs["out_1"].value == {"store":s3_simcore_location, "path": file_id}
diff --git a/packages/simcore-sdk/tests/test_alchemy.py b/packages/simcore-sdk/tests/test_alchemy.py
index a777a8f2cdc..389cd57fdba 100644
--- a/packages/simcore-sdk/tests/test_alchemy.py
+++ b/packages/simcore-sdk/tests/test_alchemy.py
@@ -1,14 +1,19 @@
-import pytest
+# pylint:disable=redefined-outer-name
# pylint:disable=unused-import
+
+import pytest
+# FIXME: Not sure why but if this import is removed pytest_docker
+# gets the docker_compose_file wrong in tests_nodes.
+# Somehow the fixture in packages/simcore-sdk/tests/node_ports/conftest.py
+# does not override override of docker_compose_file from pytest_docker!
from pytest_docker import docker_ip, docker_services
-from sqlalchemy import JSON, Column, Integer, String, create_engine
+from simcore_sdk.models.pipeline_models import (ComputationalPipeline,
+ ComputationalTask,
+ comp_pipeline, comp_tasks)
+from sqlalchemy import JSON, Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.attributes import flag_modified
-# pylint:disable=redefined-outer-name
-
-
BASE = declarative_base()
class User(BASE):
__tablename__ = 'users'
@@ -17,8 +22,6 @@ class User(BASE):
data = Column(JSON)
-
-
@pytest.mark.enable_travis
def test_alchemy(engine, session):
BASE.metadata.create_all(engine)
@@ -47,9 +50,6 @@ def test_alchemy(engine, session):
assert alpha2.data['counter'] == 42
-from simcore_sdk.models.pipeline_models import ComputationalPipeline, ComputationalTask, comp_pipeline, comp_tasks
-
-
def test_legacy_queries_with_mapper_adapter():
"""Checks to ensure that LEGACY queries still work with
mapper adapter
diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh
index ec22a37467c..b57aae24f65 100755
--- a/services/director/docker/boot.sh
+++ b/services/director/docker/boot.sh
@@ -1,15 +1,17 @@
#!/bin/sh
#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
# BOOTING application ---------------------------------------------
-echo "Booting in ${SC_BOOT_MODE} mode ..."
+echo $INFO "Booting in ${SC_BOOT_MODE} mode ..."
echo " User :`id $(whoami)`"
echo " Workdir :`pwd`"
LOG_LEVEL=info
if [[ ${SC_BUILD_TARGET} == "development" ]]
then
- echo " Environment :"
+ echo $INFO "Environment :"
printenv | sed 's/=/: /' | sed 's/^/ /' | sort
#--------------------
@@ -20,10 +22,10 @@ then
cd /devel
#--------------------
- echo " Python :"
+ echo $INFO "Python :"
python --version | sed 's/^/ /'
which python | sed 's/^/ /'
- echo " PIP :"
+ echo $INFO "PIP :"
$SC_PIP list | sed 's/^/ /'
fi
@@ -31,8 +33,9 @@ fi
if [[ ${SC_BOOT_MODE} == "debug-ptvsd" ]]
then
echo
- echo "PTVSD Debugger initializing in port 3004"
- python3 -m ptvsd --host 0.0.0.0 --port 3000 -m simcore_service_director --loglevel=$LOG_LEVEL
+ echo $INFO "PTVSD Debugger initializing in port 3004"
+ python3 -m ptvsd --host 0.0.0.0 --port 3000 -m \
+ simcore_service_director --loglevel=$LOG_LEVEL
else
- simcore-service-director --loglevel=$LOG_LEVEL
+ exec simcore-service-director --loglevel=$LOG_LEVEL
fi
diff --git a/services/director/docker/entrypoint.sh b/services/director/docker/entrypoint.sh
index b515f26b678..223ac9ef329 100755
--- a/services/director/docker/entrypoint.sh
+++ b/services/director/docker/entrypoint.sh
@@ -1,4 +1,7 @@
#!/bin/sh
+#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
# This entrypoint script:
#
@@ -6,19 +9,18 @@
# - Notice that the container *starts* as --user [default root] but
# *runs* as non-root user [scu]
#
-echo "Entrypoint for stage ${SC_BUILD_TARGET} ..."
-echo " User :`id $(whoami)`"
-echo " Workdir :`pwd`"
+echo $INFO "Entrypoint for stage ${SC_BUILD_TARGET} ..."
+echo $INFO "User :`id $(whoami)`"
+echo $INFO "Workdir :`pwd`"
if [[ ${SC_BUILD_TARGET} == "development" ]]
then
-
# NOTE: expects docker run ... -v $(pwd):/devel/services/director
DEVEL_MOUNT=/devel/services/director
stat $DEVEL_MOUNT &> /dev/null || \
- (echo "ERROR: You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
+ (echo $ERROR "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
USERID=$(stat -c %u $DEVEL_MOUNT)
GROUPID=$(stat -c %g $DEVEL_MOUNT)
@@ -67,4 +69,4 @@ then
addgroup scu $GROUPNAME
fi
-su-exec scu "$@"
+exec su-exec scu "$@"
diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py
index 3df573216ab..dc3de526cfc 100644
--- a/services/director/src/simcore_service_director/producer.py
+++ b/services/director/src/simcore_service_director/producer.py
@@ -96,7 +96,12 @@ async def _create_docker_service_params(app: web.Application,
"SIMCORE_HOST_NAME": registry_proxy.get_service_last_names(service_key) + "_" + node_uuid
},
"Hosts": get_system_extra_hosts_raw(config.EXTRA_HOSTS_SUFFIX),
- "Init": True
+ "Init": True,
+ "Labels": {
+ "user_id": user_id,
+ "study_id": project_id,
+ "node_id": node_uuid
+ }
}
docker_params = {
"auth": await _create_auth() if config.REGISTRY_AUTH else {},
@@ -186,6 +191,11 @@ async def _create_docker_service_params(app: web.Application,
log.exception("Could not find swarm network")
log.debug("Converted labels to docker runtime parameters: %s", docker_params)
+
+ # set labels for CPU and Memory limits
+ container_spec["Labels"]["nano_cpus_limit"] = str(docker_params["task_template"]["Resources"]["Limits"]["NanoCPUs"])
+ container_spec["Labels"]["mem_limit"] = str(docker_params["task_template"]["Resources"]["Limits"]["MemoryBytes"])
+
return docker_params
diff --git a/services/docker-compose.yml b/services/docker-compose.yml
index 69978b5de28..0722aaa97aa 100644
--- a/services/docker-compose.yml
+++ b/services/docker-compose.yml
@@ -87,10 +87,10 @@ services:
- log:/home/scu/log
- /var/run/docker.sock:/var/run/docker.sock
environment:
- - RABBITMQ_USER=${RABBITMQ_USER}
- - RABBITMQ_PASSWORD=${RABBITMQ_PASSWORD}
- - RABBITMQ_LOG_CHANNEL=${RABBITMQ_LOG_CHANNEL}
- - RABBITMQ_PROGRESS_CHANNEL=${RABBITMQ_PROGRESS_CHANNEL}
+ - RABBIT_USER=${RABBIT_USER}
+ - RABBIT_PASSWORD=${RABBIT_PASSWORD}
+ - RABBIT_LOG_CHANNEL=${RABBIT_LOG_CHANNEL}
+ - RABBIT_PROGRESS_CHANNEL=${RABBIT_PROGRESS_CHANNEL}
- POSTGRES_ENDPOINT=${POSTGRES_ENDPOINT}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
@@ -149,8 +149,8 @@ services:
image: itisfoundation/rabbitmq:3.8.0-management
init: true
environment:
- - RABBITMQ_DEFAULT_USER=${RABBITMQ_USER}
- - RABBITMQ_DEFAULT_PASS=${RABBITMQ_PASSWORD}
+ - RABBITMQ_DEFAULT_USER=${RABBIT_USER}
+ - RABBITMQ_DEFAULT_PASS=${RABBIT_PASSWORD}
networks:
- default
- computational_services_subnet
diff --git a/services/rabbit/README.md b/services/rabbit/README.md
index 1dade7b4ebb..5558d4d88f5 100644
--- a/services/rabbit/README.md
+++ b/services/rabbit/README.md
@@ -1,6 +1,6 @@
# service
-[RabbitMQ: Message broquer](https://www.rabbitmq.com/)
+[RabbitMQ: Message broker](https://www.rabbitmq.com/)
## special configuration
diff --git a/services/sidecar/docker/boot.sh b/services/sidecar/docker/boot.sh
index 2eea24a12d6..0eef90f9424 100755
--- a/services/sidecar/docker/boot.sh
+++ b/services/sidecar/docker/boot.sh
@@ -1,14 +1,16 @@
#!/bin/sh
#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
# BOOTING application ---------------------------------------------
-echo "Booting in ${SC_BOOT_MODE} mode ..."
+echo $INFO "Booting in ${SC_BOOT_MODE} mode ..."
echo " User :`id $(whoami)`"
echo " Workdir :`pwd`"
if [[ ${SC_BUILD_TARGET} == "development" ]]
then
- echo " Environment :"
+ echo $INFO "Environment :"
printenv | sed 's/=/: /' | sed 's/^/ /' | sort
#--------------------
@@ -18,10 +20,10 @@ then
DEBUG_LEVEL=debug
#--------------------
- echo " Python :"
+ echo $INFO "Python :"
python --version | sed 's/^/ /'
which python | sed 's/^/ /'
- echo " PIP :"
+ echo $INFO "PIP :"
$SC_PIP list | sed 's/^/ /'
@@ -41,4 +43,4 @@ else
CONCURRENCY=2
fi
-celery worker --app sidecar.celery:app --concurrency ${CONCURRENCY} --loglevel=${DEBUG_LEVEL}
+exec celery worker --app sidecar.celery:app --concurrency ${CONCURRENCY} --loglevel=${DEBUG_LEVEL}
diff --git a/services/sidecar/docker/entrypoint.sh b/services/sidecar/docker/entrypoint.sh
index 5b8ed0ceb98..0002104f7e1 100755
--- a/services/sidecar/docker/entrypoint.sh
+++ b/services/sidecar/docker/entrypoint.sh
@@ -1,4 +1,7 @@
#!/bin/sh
+#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
# This entrypoint script:
#
@@ -6,7 +9,7 @@
# - Notice that the container *starts* as --user [default root] but
# *runs* as non-root user [scu]
#
-echo "Entrypoint for stage ${SC_BUILD_TARGET} ..."
+echo $INFO "Entrypoint for stage ${SC_BUILD_TARGET} ..."
echo " User :`id $(whoami)`"
echo " Workdir :`pwd`"
echo " scuUser :`id scu`"
@@ -17,12 +20,12 @@ GROUPNAME=scu
if [[ ${SC_BUILD_TARGET} == "development" ]]
then
- echo "development mode detected..."
+ echo $INFO "development mode detected..."
# NOTE: expects docker run ... -v $(pwd):/devel/services/sidecar
DEVEL_MOUNT=/devel/services/sidecar
stat $DEVEL_MOUNT &> /dev/null || \
- (echo "ERROR: You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
+ (echo $ERROR "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
USERID=$(stat -c %u $DEVEL_MOUNT)
GROUPID=$(stat -c %g $DEVEL_MOUNT)
@@ -30,17 +33,17 @@ then
if [[ $USERID -eq 0 ]]
then
- echo "mounted folder from root, adding scu to root..."
+ echo $INFO "mounted folder from root, adding scu to root..."
addgroup scu root
else
# take host's credentials in scu
if [[ -z "$GROUPNAME" ]]
then
- echo "mounted folder from $USERID, creating new group..."
+ echo $INFO "mounted folder from $USERID, creating new group..."
GROUPNAME=host_group
addgroup -g $GROUPID $GROUPNAME
else
- echo "mounted folder from $USERID, adding scu to $GROUPNAME..."
+ echo $INFO "mounted folder from $USERID, adding scu to $GROUPNAME..."
addgroup scu $GROUPNAME
fi
@@ -69,9 +72,9 @@ then
addgroup scu $GROUPNAME
fi
-echo "Starting boot ..."
+echo $INFO "Starting boot ..."
chown -R $USERNAME:$GROUPNAME /home/scu/input
chown -R $USERNAME:$GROUPNAME /home/scu/output
chown -R $USERNAME:$GROUPNAME /home/scu/log
-su-exec scu "$@"
+exec su-exec scu "$@"
diff --git a/services/sidecar/src/sidecar/celery.py b/services/sidecar/src/sidecar/celery.py
index 54b364f4b9b..9a0287a1264 100644
--- a/services/sidecar/src/sidecar/celery.py
+++ b/services/sidecar/src/sidecar/celery.py
@@ -1,27 +1,20 @@
-import logging
-
from celery import Celery
-from celery.utils.log import get_task_logger
from simcore_sdk.config.rabbit import Config as RabbitConfig
-# TODO: configure via command line or config file. Add in config.yaml
-logging.basicConfig(level=logging.DEBUG)
+from .celery_log_setup import get_task_logger
log = get_task_logger(__name__)
-log.setLevel(logging.DEBUG)
-
rabbit_config = RabbitConfig()
+log.info("Inititalizing celery app ...")
+
# TODO: make it a singleton?
app= Celery(rabbit_config.name,
broker=rabbit_config.broker,
backend=rabbit_config.backend)
-
-
-
__all__ = [
"rabbit_config",
"app"
diff --git a/services/sidecar/src/sidecar/celery_log_setup.py b/services/sidecar/src/sidecar/celery_log_setup.py
new file mode 100644
index 00000000000..98b22339e62
--- /dev/null
+++ b/services/sidecar/src/sidecar/celery_log_setup.py
@@ -0,0 +1,39 @@
+""" setup logging formatters to fit logspout's multiline pattern "^(ERROR|WARNING|INFO|DEBUG|CRITICAL)[:]"
+
+ NOTE: import to connect signals!
+
+ SEE https://github.com/ITISFoundation/osparc-ops/blob/master/services/graylog/docker-compose.yml#L113
+"""
+# NOTES:
+# https://docs.celeryproject.org/en/latest/userguide/signals.html#setup-logging
+# https://www.distributedpython.com/2018/08/28/celery-logging/
+# https://www.distributedpython.com/2018/11/06/celery-task-logger-format/
+
+import logging
+
+from celery.app.log import TaskFormatter
+from celery.signals import after_setup_logger, after_setup_task_logger
+from celery.utils.log import get_task_logger
+
+@after_setup_logger.connect
+def setup_loggers(logger, *_args, **_kwargs):
+ """ Customizes global loggers """
+ for handler in logger.handlers:
+ handler.setFormatter(logging.Formatter('%(levelname)s: [%(asctime)s/%(processName)s] %(message)s'))
+
+
+@after_setup_task_logger.connect
+def setup_task_logger(logger, *_args, **_kwargs):
+ """ Customizes task loggers """
+ for handler in logger.handlers:
+ handler.setFormatter(TaskFormatter('%(levelname)s: [%(asctime)s/%(processName)s][%(task_name)s(%(task_id)s)] %(message)s'))
+
+
+# TODO: configure via command line or config file. Add in config.yaml
+logging.basicConfig(level=logging.DEBUG)
+log = get_task_logger(__name__)
+log.info("Setting up loggers")
+
+__all__ = [
+ 'get_task_logger'
+]
diff --git a/services/sidecar/src/sidecar/core.py b/services/sidecar/src/sidecar/core.py
index c00b57dc30c..48a018dd288 100644
--- a/services/sidecar/src/sidecar/core.py
+++ b/services/sidecar/src/sidecar/core.py
@@ -325,7 +325,13 @@ def process(self):
environment=self._docker.env,
nano_cpus=config.SERVICES_MAX_NANO_CPUS,
mem_limit=config.SERVICES_MAX_MEMORY_BYTES,
- labels={'user_id': str(self._user_id), 'study_id': str(self._task.project_id), 'node_id': str(self._task.node_id)})
+ labels={
+ 'user_id': str(self._user_id),
+ 'study_id': str(self._task.project_id),
+ 'node_id': str(self._task.node_id),
+ 'nano_cpus_limit': str(config.SERVICES_MAX_NANO_CPUS),
+ 'mem_limit': str(config.SERVICES_MAX_MEMORY_BYTES)
+ })
except docker.errors.ImageNotFound:
log.exception("Run container: Image not found")
except docker.errors.APIError:
diff --git a/services/sidecar/src/sidecar/utils.py b/services/sidecar/src/sidecar/utils.py
index edad99f32c9..1e6b5e15ed7 100644
--- a/services/sidecar/src/sidecar/utils.py
+++ b/services/sidecar/src/sidecar/utils.py
@@ -4,17 +4,16 @@
import shutil
from concurrent.futures import ThreadPoolExecutor
-import tenacity
-from sqlalchemy import and_, create_engine
-from sqlalchemy.orm import sessionmaker
-
import docker
+import tenacity
from s3wrapper.s3_client import S3Client
from simcore_sdk.config.db import Config as db_config
from simcore_sdk.config.docker import Config as docker_config
from simcore_sdk.config.rabbit import Config as rabbit_config
from simcore_sdk.config.s3 import Config as s3_config
from simcore_sdk.models.pipeline_models import SUCCESS, ComputationalTask
+from sqlalchemy import and_, create_engine
+from sqlalchemy.orm import sessionmaker
def wrap_async_call(fct: asyncio.coroutine):
@@ -94,7 +93,10 @@ def __init__(self):
class DbSettings:
def __init__(self):
self._db_config = db_config()
- self.db = create_engine(self._db_config.endpoint, client_encoding='utf8', pool_pre_ping=True)
+ self.db = create_engine(
+ self._db_config.endpoint + f"?application_name={__name__}_{id(self)}",
+ client_encoding='utf8',
+ pool_pre_ping=True)
self.Session = sessionmaker(self.db, expire_on_commit=False)
#self.session = self.Session()
diff --git a/services/sidecar/tests/conftest.py b/services/sidecar/tests/conftest.py
index 9a2b6f34f8b..3e5b28154b7 100644
--- a/services/sidecar/tests/conftest.py
+++ b/services/sidecar/tests/conftest.py
@@ -43,8 +43,8 @@ def docker_compose_file(here):
os.environ['POSTGRES_ENDPOINT']="FOO" # TODO: update config schema!!
os.environ['MINIO_ACCESS_KEY']=ACCESS_KEY
os.environ['MINIO_SECRET_KEY']=SECRET_KEY
- os.environ['RABBITMQ_USER']=RABBIT_USER
- os.environ['RABBITMQ_PASSWORD']=RABBIT_PWD
+ os.environ['RABBIT_USER']=RABBIT_USER
+ os.environ['RABBIT_PASSWORD']=RABBIT_PWD
dc_path = here / 'docker-compose.yml'
@@ -85,8 +85,8 @@ def postgres_service(docker_services, docker_ip):
@pytest.fixture(scope='session')
def rabbit_service(docker_services, docker_ip):
# set env var here that is explicitly used from sidecar
- os.environ['RABBITMQ_HOST'] = "{host}".format(host=docker_ip)
- os.environ['RABBITMQ_PORT'] = "{port}".format(port=docker_services.port_for('rabbit', 15672))
+ os.environ['RABBIT_HOST'] = "{host}".format(host=docker_ip)
+ os.environ['RABBIT_PORT'] = "{port}".format(port=docker_services.port_for('rabbit', 15672))
rabbit_service = "dummy"
return rabbit_service
diff --git a/services/sidecar/tests/docker-compose.yml b/services/sidecar/tests/docker-compose.yml
index 84378bb3c06..8ed270c4a0d 100644
--- a/services/sidecar/tests/docker-compose.yml
+++ b/services/sidecar/tests/docker-compose.yml
@@ -27,7 +27,7 @@ services:
rabbit:
image: rabbitmq:3-management
environment:
- - RABBITMQ_DEFAULT_USER=${RABBITMQ_USER:-rabbit}
- - RABBITMQ_DEFAULT_PASS=${RABBITMQ_PASSWORD:-carrot}
+ - RABBIT_DEFAULT_USER=${RABBIT_USER:-rabbit}
+ - RABBIT_DEFAULT_PASS=${RABBIT_PASSWORD:-carrot}
ports:
- "15672:15672"
diff --git a/services/sidecar/tests/utils.py b/services/sidecar/tests/utils.py
index 9d4ad5cf0ef..b831dccade9 100644
--- a/services/sidecar/tests/utils.py
+++ b/services/sidecar/tests/utils.py
@@ -48,17 +48,23 @@ def create_tables(url, engine=None):
client_encoding="utf8",
connect_args={"connect_timeout": 30},
pool_pre_ping=True)
+ Base.metadata.create_all(engine)
+ engine.dispose()
+ else:
+ Base.metadata.create_all(engine)
- Base.metadata.create_all(engine)
def drop_tables(url, engine=None):
- if not engine:
+ is_owned = not engine
+ if is_owned:
engine = create_engine(url,
client_encoding="utf8",
connect_args={"connect_timeout": 30},
pool_pre_ping=True)
Base.metadata.drop_tables(engine)
+ if is_owned:
+ engine.dispose()
def setup_sleepers(url):
db_engine = create_engine(url,
diff --git a/services/storage/docker/boot.sh b/services/storage/docker/boot.sh
index b5b56a9de13..96bd6ce3a8e 100755
--- a/services/storage/docker/boot.sh
+++ b/services/storage/docker/boot.sh
@@ -1,15 +1,17 @@
#!/bin/sh
#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
# BOOTING application ---------------------------------------------
-echo "Booting in ${SC_BOOT_MODE} mode ..."
+echo $INFO "Booting in ${SC_BOOT_MODE} mode ..."
if [[ ${SC_BUILD_TARGET} == "development" ]]
then
- echo " User :`id $(whoami)`"
- echo " Workdir :`pwd`"
- echo " Environment :"
+ echo $INFO "User :`id $(whoami)`"
+ echo $INFO "Workdir :`pwd`"
+ echo $INFO "Environment :"
printenv | sed 's/=/: /' | sed 's/^/ /' | sort
#--------------------
@@ -20,17 +22,20 @@ then
cd /devel
#--------------------
- echo " Python :"
+ echo $INFO "Python :"
python --version | sed 's/^/ /'
which python | sed 's/^/ /'
- echo " PIP :"
+ echo $INFO "PIP :"
$SC_PIP list | sed 's/^/ /'
+ #------------
+ echo " setting entrypoint to use watchmedo autorestart..."
+ entrypoint='watchmedo auto-restart --recursive --pattern="*.py" --'
elif [[ ${SC_BUILD_TARGET} == "production" ]]
then
APP_CONFIG=docker-prod-config.yaml
-
+ entrypoint=''
fi
@@ -38,14 +43,15 @@ fi
if [[ ${SC_BOOT_MODE} == "debug-pdb" ]]
then
# NOTE: needs stdin_open: true and tty: true
- echo "Debugger attached: https://docs.python.org/3.6/library/pdb.html#debugger-commands ..."
- echo "Running: import pdb, simcore_service_storage.cli; pdb.run('simcore_service_storage.cli.main([\'-c\',\'${APP_CONFIG}\'])')"
- python -c "import pdb, simcore_service_storage.cli; \
+ echo $INFO "Debugger attached: https://docs.python.org/3.6/library/pdb.html#debugger-commands ..."
+ echo $INFO "Running: import pdb, simcore_service_storage.cli; pdb.run('simcore_service_storage.cli.main([\'-c\',\'${APP_CONFIG}\'])')"
+ eval $INFO "$entrypoint" python -c "import pdb, simcore_service_storage.cli; \
pdb.run('simcore_service_storage.cli.main([\'-c\',\'${APP_CONFIG}\'])')"
elif [[ ${SC_BOOT_MODE} == "debug-ptvsd" ]]
then
- echo "PTVSD Debugger initializing in port 3003 with ${APP_CONFIG}"
- python3 -m ptvsd --host 0.0.0.0 --port 3000 -m simcore_service_storage --config $APP_CONFIG
+ echo $INFO "PTVSD Debugger initializing in port 3003 with ${APP_CONFIG}"
+ eval "$entrypoint" python3 -m ptvsd --host 0.0.0.0 --port 3000 -m \
+ simcore_service_storage --config $APP_CONFIG
else
- simcore-service-storage --config $APP_CONFIG
+ exec simcore-service-storage --config $APP_CONFIG
fi
diff --git a/services/storage/docker/entrypoint.sh b/services/storage/docker/entrypoint.sh
index 7287ba763ec..b042885f16b 100755
--- a/services/storage/docker/entrypoint.sh
+++ b/services/storage/docker/entrypoint.sh
@@ -1,4 +1,7 @@
#!/bin/sh
+#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
# This entrypoint script:
#
@@ -6,13 +9,13 @@
# - Notice that the container *starts* as --user [default root] but
# *runs* as non-root user [scu]
#
-echo "Entrypoint for stage ${SC_BUILD_TARGET} ..."
+echo $INFO "Entrypoint for stage ${SC_BUILD_TARGET} ..."
echo " User :`id $(whoami)`"
echo " Workdir :`pwd`"
-echo "updating certificates..."
+echo $INFO "updating certificates..."
update-ca-certificates
-echo "certificates updated"
+echo $INFO "certificates updated"
if [[ ${SC_BUILD_TARGET} == "development" ]]
then
@@ -20,7 +23,7 @@ then
DEVEL_MOUNT=/devel/services/storage
stat $DEVEL_MOUNT &> /dev/null || \
- (echo "ERROR: You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
+ (echo $ERROR "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
USERID=$(stat -c %u $DEVEL_MOUNT)
GROUPID=$(stat -c %g $DEVEL_MOUNT)
@@ -50,5 +53,5 @@ then
python3 -m pip install ptvsd
fi
-echo "Starting boot ..."
-su-exec scu "$@"
+echo $INFO "Starting boot ..."
+exec su-exec scu "$@"
diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt
index 6c5e1bd9203..02aae5125ff 100644
--- a/services/storage/requirements/dev.txt
+++ b/services/storage/requirements/dev.txt
@@ -6,6 +6,9 @@
# pip install -r requirements/dev.txt
#
+# installs watchdog utility
+watchdog[watchmedo]
+
# installs base + tests requirements
-r _test.txt
diff --git a/services/storage/src/simcore_service_storage/application.py b/services/storage/src/simcore_service_storage/application.py
index 4b185a1f878..4c12074f6cb 100644
--- a/services/storage/src/simcore_service_storage/application.py
+++ b/services/storage/src/simcore_service_storage/application.py
@@ -2,37 +2,35 @@
Functions to create, setup and run an aiohttp application provided a configuration object
"""
+import json
import logging
+from typing import Dict
from aiohttp import web
+
+from servicelib.application import create_safe_application
from servicelib.monitoring import setup_monitoring
-from servicelib.client_session import persistent_client_session
from .db import setup_db
from .dsm import setup_dsm
from .rest import setup_rest
from .s3 import setup_s3
-from .settings import APP_CONFIG_KEY
log = logging.getLogger(__name__)
-def create(config):
- log.debug("Creating and setting up application")
-
- app = web.Application()
- app[APP_CONFIG_KEY] = config
+def create(config: Dict) -> web.Application:
+ log.debug("Initializing app with config:\n%s",
+ json.dumps(config, indent=2, sort_keys=True))
- # NOTE: ensure client session is context is run first, then any further get_client_sesions will be correctly closed
- app.cleanup_ctx.append(persistent_client_session)
+ app = create_safe_application(config)
setup_db(app) # -> postgres service
setup_s3(app) # -> minio service
setup_dsm(app) # core subsystem. Needs s3 and db setups done
setup_rest(app) # lastly, we expose API to the world
- monitoring = config["main"]["monitoring_enabled"]
- if monitoring:
+ if config["main"].get("monitoring_enabled", False):
setup_monitoring(app, "simcore_service_storage")
return app
diff --git a/services/storage/src/simcore_service_storage/db.py b/services/storage/src/simcore_service_storage/db.py
index c8cd8da730c..ce601849053 100644
--- a/services/storage/src/simcore_service_storage/db.py
+++ b/services/storage/src/simcore_service_storage/db.py
@@ -8,7 +8,7 @@
from servicelib.aiopg_utils import DBAPIError
from .models import metadata
-from .settings import APP_CONFIG_KEY, APP_DB_ENGINE_KEY, APP_DB_SESSION_KEY
+from .settings import APP_CONFIG_KEY, APP_DB_ENGINE_KEY
log = logging.getLogger(__name__)
@@ -22,36 +22,30 @@
@retry( wait=wait_fixed(RETRY_WAIT_SECS),
stop=stop_after_attempt(RETRY_COUNT),
- before_sleep=before_sleep_log(log, logging.INFO) )
+ before_sleep=before_sleep_log(log, logging.INFO),
+ reraise=True)
async def __create_tables(**params):
- sa_engine = sa.create_engine(DSN.format(**params))
- metadata.create_all(sa_engine)
-
-async def pg_engine(app: web.Application):
- engine = None
try:
- cfg = app[APP_CONFIG_KEY][THIS_SERVICE_NAME]
- params = {k:cfg[k] for k in 'database user password host port'.split()}
- await __create_tables(**params)
- engine = await create_engine(**params)
+ url = DSN.format(**params) + f"?application_name={__name__}_init"
+ sa_engine = sa.create_engine(url)
+ metadata.create_all(sa_engine)
+ finally:
+ sa_engine.dispose()
- except Exception: # pylint: disable=W0703
- log.exception("Could not create engine")
+async def pg_engine(app: web.Application):
+ cfg = app[APP_CONFIG_KEY][THIS_SERVICE_NAME]
+ params = {key:cfg[key] for key in 'database user password host port minsize maxsize'.split()}
- session = None
- app[APP_DB_ENGINE_KEY] = engine
- app[APP_DB_SESSION_KEY] = session
+ # TODO: set this as optional?
+ await __create_tables(**params)
- yield
+ async with create_engine(application_name=f'{__name__}_{id(app)}', **params) as engine:
+ app[APP_DB_ENGINE_KEY] = engine
- session = app.get(APP_DB_SESSION_KEY)
- if session:
- session.close()
+ yield # ----------
- engine = app.get(APP_DB_ENGINE_KEY)
- if engine:
- engine.close()
- await engine.wait_closed()
+ if engine is not app.get(APP_DB_ENGINE_KEY):
+ log.error("app does not hold right db engine")
async def is_service_responsive(app:web.Application):
""" Returns true if the app can connect to db service
@@ -68,17 +62,14 @@ async def is_service_responsive(app:web.Application):
return False
def setup_db(app: web.Application):
-
disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services",[])
if THIS_SERVICE_NAME in disable_services:
- app[APP_DB_ENGINE_KEY] = app[APP_DB_SESSION_KEY] = None
+ app[APP_DB_ENGINE_KEY] = None
log.warning("Service '%s' explicitly disabled in config", THIS_SERVICE_NAME)
return
app[APP_DB_ENGINE_KEY] = None
- app[APP_DB_SESSION_KEY] = None
-
# app is created at this point but not yet started
log.debug("Setting up %s [service: %s] ...", __name__, THIS_SERVICE_NAME)
diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py
index 868db6408f8..2fe85dfb2ae 100644
--- a/services/storage/src/simcore_service_storage/dsm.py
+++ b/services/storage/src/simcore_service_storage/dsm.py
@@ -8,18 +8,19 @@
from pathlib import Path
from typing import Dict, List, Tuple
-import aiobotocore
import aiofiles
import attr
import sqlalchemy as sa
from aiohttp import web
from aiopg.sa import Engine
+from sqlalchemy.sql import and_
+from yarl import URL
+
+import aiobotocore
from blackfynn.base import UnauthorizedException
from s3wrapper.s3_client import S3Client
from servicelib.aiopg_utils import DBAPIError
from servicelib.client_session import get_client_session
-from sqlalchemy.sql import and_
-from yarl import URL
from .datcore_wrapper import DatcoreWrapper
from .models import (DatasetMetaData, FileMetaData, FileMetaDataEx,
diff --git a/services/storage/src/simcore_service_storage/settings.py b/services/storage/src/simcore_service_storage/settings.py
index 2b10069e48b..97b9f984947 100644
--- a/services/storage/src/simcore_service_storage/settings.py
+++ b/services/storage/src/simcore_service_storage/settings.py
@@ -63,7 +63,6 @@
# DATABASE ----------------------------
APP_DB_ENGINE_KEY = __name__ + '.db_engine'
-APP_DB_SESSION_KEY = __name__ + '.db_session'
# DATA STORAGE MANAGER ----------------------------------
diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py
index 518c86766b6..b103bf99204 100644
--- a/services/storage/tests/conftest.py
+++ b/services/storage/tests/conftest.py
@@ -101,7 +101,9 @@ def postgres_service(docker_services, docker_ip):
'password': PASS,
'database': DATABASE,
'host': docker_ip,
- 'port': docker_services.port_for('postgres', 5432)
+ 'port': docker_services.port_for('postgres', 5432),
+ 'minsize':1,
+ 'maxsize':4
}
return postgres_service
diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py
index c8057fcbce4..2fd099e65d9 100644
--- a/services/storage/tests/utils.py
+++ b/services/storage/tests/utils.py
@@ -101,6 +101,7 @@ def insert_metadata(url: str, fmd: FileMetaData):
engine = sa.create_engine(url)
conn = engine.connect()
conn.execute(ins)
+ engine.dispose()
def create_full_tables(url):
meta = sa.MetaData()
@@ -143,9 +144,11 @@ def create_full_tables(url):
# with open(csv_file, 'r') as file:
# data_df = pd.read_csv(file)
# data_df.to_sql(t, con=engine, index=False, index_label="id", if_exists='append')
+ engine.dispose()
def drop_all_tables(url):
meta = sa.MetaData()
engine = sa.create_engine(url)
meta.drop_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users])
+ engine.dispose()
diff --git a/services/web/client/source/class/osparc/component/filter/ServiceFilter.js b/services/web/client/source/class/osparc/component/filter/AutocompleteFilter.js
similarity index 75%
rename from services/web/client/source/class/osparc/component/filter/ServiceFilter.js
rename to services/web/client/source/class/osparc/component/filter/AutocompleteFilter.js
index b7d88455806..7239ae96e06 100644
--- a/services/web/client/source/class/osparc/component/filter/ServiceFilter.js
+++ b/services/web/client/source/class/osparc/component/filter/AutocompleteFilter.js
@@ -6,9 +6,9 @@
*/
/**
- * Filter used for filtering services. Gets the list of services and uses them as possible options for the dropdown.
+ * Filter with a dropdown and autocomplete
*/
-qx.Class.define("osparc.component.filter.ServiceFilter", {
+qx.Class.define("osparc.component.filter.AutocompleteFilter", {
extend: osparc.component.filter.UIFilter,
/**
@@ -20,18 +20,11 @@ qx.Class.define("osparc.component.filter.ServiceFilter", {
this.base(arguments, filterId, groupId);
this._setLayout(new qx.ui.layout.Canvas());
- this.__autocompleteField = this.getChildControl("autocompletefield").set({
- placeholder: this.tr("Filter by service")
- });
+ this.__autocompleteField = this.getChildControl("autocompletefield");
this.getChildControl("clearbutton");
- const services = osparc.store.Store.getInstance().getServices();
- const dropdownData = Object.keys(services).map(key => {
- const split = key.split("/");
- return split[split.length-1];
- });
- this.__autocompleteField.setModel(new qx.data.Array(dropdownData));
+ this.__attachEventHandlers();
},
properties: {
@@ -68,6 +61,14 @@ qx.Class.define("osparc.component.filter.ServiceFilter", {
break;
}
return control || this.base(arguments, id);
+ },
+
+ __attachEventHandlers: function() {
+ this.__autocompleteField.addListener("changeValue", e => this._filterChange(e.getData()), this);
+ },
+
+ buildMenu: function(menuData) {
+ this.__autocompleteField.setModel(new qx.data.Array(menuData));
}
}
});
diff --git a/services/web/client/source/class/osparc/component/filter/TagsFilter.js b/services/web/client/source/class/osparc/component/filter/TagsFilter.js
index 88e40b78e45..bae830295a3 100644
--- a/services/web/client/source/class/osparc/component/filter/TagsFilter.js
+++ b/services/web/client/source/class/osparc/component/filter/TagsFilter.js
@@ -98,6 +98,10 @@ qx.Class.define("osparc.component.filter.TagsFilter", {
this.__menu = new qx.ui.menu.Menu();
this._dropdown.setMenu(this.__menu);
}
+ if (this.__menu.getChildren().find(button => button.getLabel && button.getLabel() === tagName)) {
+ // Don't add repeated options
+ return;
+ }
const button = new qx.ui.menu.Button(tagName);
button.addListener("execute", e => this.__addTag(tagName, e.getTarget()));
this.__menu.add(button);
diff --git a/services/web/client/source/class/osparc/component/service/manager/ActivityManager.js b/services/web/client/source/class/osparc/component/service/manager/ActivityManager.js
index 0be284fb80f..7e97008a273 100644
--- a/services/web/client/source/class/osparc/component/service/manager/ActivityManager.js
+++ b/services/web/client/source/class/osparc/component/service/manager/ActivityManager.js
@@ -22,14 +22,24 @@ qx.Class.define("osparc.component.service.manager.ActivityManager", {
this.__createFiltersBar();
this.__createActivityTree();
+ this.__createFetchingView();
this.__createActionsBar();
- this.__updateTree();
+ this.__reloadButton.fireEvent("execute");
+ },
+
+ statics: {
+ itemTypes: {
+ STUDY: "study",
+ SERVICE: "service"
+ }
},
members: {
__tree: null,
__studyFilter: null,
+ __fetchingView: null,
+ __reloadButton: null,
/**
* Creates the top bar that holds the filtering widgets.
*/
@@ -41,55 +51,45 @@ qx.Class.define("osparc.component.service.manager.ActivityManager", {
const filtersContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox());
const nameFilter = new osparc.component.filter.TextFilter("name", "activityMonitor");
const studyFilter = this.__studyFilter = new osparc.component.filter.StudyFilter("study", "activityMonitor");
- const serviceFilter = new osparc.component.filter.ServiceFilter("service", "activityMonitor");
filtersContainer.add(nameFilter);
filtersContainer.add(studyFilter);
- filtersContainer.add(serviceFilter);
filtersPart.add(filtersContainer);
this._add(toolbar);
nameFilter.getChildControl("textfield").setPlaceholder(this.tr("Filter by name"));
- // React to filter changes
- const msgName = osparc.utils.Utils.capitalize("activityMonitor", "filter");
- qx.event.message.Bus.getInstance().subscribe(msgName, msg => {
- const model = this.__tree.getDataModel();
- const filterText = msg.getData().name;
- const filterStudy = msg.getData().study;
- const filter = targetNode => {
- const nameFilterFn = node => {
- if (filterText && filterText.length) {
- if (node.type === qx.ui.treevirtual.MTreePrimitive.Type.BRANCH) {
- return true;
- } else if (node.label.indexOf(filterText) === -1) {
- return false;
- }
- }
- return true;
- };
- const studyFilterFn = node => {
- if (filterStudy && filterStudy.length) {
- if (node.type === qx.ui.treevirtual.MTreePrimitive.Type.LEAF) {
- return true;
- } else if (filterStudy.includes(node.label)) {
- return true;
- }
- return false;
- }
- return true;
- };
- return nameFilterFn(targetNode) && studyFilterFn(targetNode);
- };
- model.setFilter(filter);
- }, this);
+ osparc.data.Resources.get("studies")
+ .then(studies => studyFilter.buildMenu(studies));
},
/**
* Creates the main view, holding an instance of {osparc.component.service.manager.ActivityTree}.
*/
__createActivityTree: function() {
- const tree = this.__tree = new osparc.component.service.manager.ActivityTree();
- this._add(tree, {
+ this.__tree = new osparc.component.service.manager.ActivityTree();
+ this._add(this.__tree, {
+ flex: 1
+ });
+ this.__tree.addListener("treeUpdated", () => {
+ osparc.data.Resources.get("studies")
+ .then(studies => this.__studyFilter.buildMenu(studies));
+ }, this);
+ },
+
+ /**
+ * Creates a simple view with a fetching icon.
+ */
+ __createFetchingView: function() {
+ this.__fetchingView = new qx.ui.container.Composite(new qx.ui.layout.VBox().set({
+ alignX: "center",
+ alignY: "middle"
+ })).set({
+ visibility: "excluded"
+ });
+ const image = new qx.ui.basic.Image("@FontAwesome5Solid/circle-notch/26");
+ image.getContentElement().addClass("rotate");
+ this.__fetchingView.add(image);
+ this._add(this.__fetchingView, {
flex: 1
});
},
@@ -99,52 +99,42 @@ qx.Class.define("osparc.component.service.manager.ActivityManager", {
*/
__createActionsBar: function() {
const toolbar = new qx.ui.toolbar.ToolBar();
- const actionsPart = new qx.ui.toolbar.Part();
- toolbar.addSpacer();
- toolbar.add(actionsPart);
+ const tablePart = new qx.ui.toolbar.Part();
+ // const actionsPart = new qx.ui.toolbar.Part();
+ toolbar.add(tablePart);
+ // toolbar.addSpacer();
+ // toolbar.add(actionsPart);
+
+ const reloadButton = this.__reloadButton = new qx.ui.toolbar.Button(this.tr("Reload"), "@FontAwesome5Solid/sync-alt/14");
+ tablePart.add(reloadButton);
+ reloadButton.addListener("execute", () => {
+ this.__tree.exclude();
+ this.__fetchingView.show();
+ this.__tree.reset().then(() => {
+ this.__tree.show();
+ this.__fetchingView.exclude();
+ });
+ }, this);
+ /*
const runButton = new qx.ui.toolbar.Button(this.tr("Run"), "@FontAwesome5Solid/play/14");
actionsPart.add(runButton);
+ runButton.addListener("execute", () => osparc.component.message.FlashMessenger.getInstance().logAs("Not implemented"));
const stopButton = new qx.ui.toolbar.Button(this.tr("Stop"), "@FontAwesome5Solid/stop-circle/14");
actionsPart.add(stopButton);
+ stopButton.addListener("execute", () => osparc.component.message.FlashMessenger.getInstance().logAs("Not implemented"));
const infoButton = new qx.ui.toolbar.Button(this.tr("Info"), "@FontAwesome5Solid/info/14");
actionsPart.add(infoButton);
+ infoButton.addListener("execute", () => osparc.component.message.FlashMessenger.getInstance().logAs("Not implemented"));
- this._add(toolbar);
- },
+ [runButton, stopButton, infoButton].map(button => this.__tree.bind("selected", button, "enabled", {
+ converter: data => data.length > 0
+ }));
+ */
- /**
- * This functions updates the tree with the most recent data.
- */
- __updateTree: function() {
- const call = osparc.data.Resources.get("studies");
- call.then(studies => {
- const model = this.__tree.getDataModel();
- model.clearData();
- studies.forEach(study => {
- let parent = null;
- for (let key in study.workbench) {
- const node = study.workbench[key];
- const metadata = osparc.utils.Services.getNodeMetaData(node.key, node.version);
- if (metadata && metadata.type === "computational") {
- if (parent === null) {
- parent = model.addBranch(null, study.name, true);
- }
- const rowId = model.addLeaf(parent, node.label);
- if (metadata.key && metadata.key.length) {
- const splitted = metadata.key.split("/");
- model.setColumnData(rowId, 1, splitted[splitted.length-1]);
- }
- }
- }
- });
- model.setData();
- this.__studyFilter.buildMenu(studies);
- }).catch(e => {
- console.error(e);
- });
+ this._add(toolbar);
}
}
});
diff --git a/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js b/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js
index fdac1e6d1e9..de3da840709 100644
--- a/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js
+++ b/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js
@@ -8,29 +8,297 @@
/**
* This is a table display the status of running services in real time. Simulates, in some cases, the behavior of a tree.
* Has sorting and resizing capabilities, and its UI changes depending on its display mode, that changes depending on the activated type of sorting.
- * WiP
*/
qx.Class.define("osparc.component.service.manager.ActivityTree", {
- extend: qx.ui.treevirtual.TreeVirtual,
+ extend: osparc.ui.table.Table,
/**
* Constructor sets the model and general look.
*/
construct: function() {
- this.base(arguments, [
- "Node",
- "Service",
- "Status",
- "CPU usage",
- "GPU usage"
- ], {
- treeDataCellRenderer: new qx.ui.treevirtual.SimpleTreeDataCellRenderer().set({
- useTreeLines: false
- })
- });
- this.set({
- decorator: "no-border",
- padding: 0
+ this.__model = new qx.ui.table.model.Simple();
+ this.__model.setColumns([
+ this.tr("Type"),
+ this.tr("Node"),
+ this.tr("Service"),
+ this.tr("Status"),
+ this.tr("CPU usage"),
+ this.tr("Memory usage")
+ ]);
+ this.base(arguments, this.__model, {
+ tableColumnModel: obj => new qx.ui.table.columnmodel.Resize(obj),
+ initiallyHiddenColumns: [0]
});
+ const columnModel = this.getTableColumnModel();
+ columnModel.getBehavior().setMinWidth(1, 80);
+ columnModel.getBehavior().setMinWidth(2, 80);
+
+ columnModel.setDataCellRenderer(4, new osparc.ui.table.cellrenderer.Percentage("#2c7cce"));
+ columnModel.setDataCellRenderer(5, new osparc.ui.table.cellrenderer.Percentage("#358475").set({
+ unit: "MB"
+ }));
+
+ this.getSelectionModel().setSelectionMode(qx.ui.table.selection.Model.MULTIPLE_INTERVAL_SELECTION_TOGGLE);
+
+ this._applyMode(this.getMode());
+
+ this.__filters = {};
+ this.__sorting = {};
+
+ this.__attachEventHandlers();
+ },
+
+ properties: {
+ mode: {
+ check: "String",
+ nullable: false,
+ init: "hierarchical",
+ apply: "_applyMode"
+ },
+ selected: {
+ check: "Array",
+ init: [],
+ event: "changeSelection"
+ },
+ alwaysUpdate: {
+ check: "Boolean",
+ init: true,
+ nullable: false
+ }
+ },
+
+ statics: {
+ modes: {
+ HIERARCHICAL: "hierarchical",
+ FLAT: "flat"
+ }
+ },
+
+ events: {
+ treeUpdated: "qx.event.type.Event"
+ },
+
+ members: {
+ __model: null,
+ __filters: null,
+ __sorting: null,
+ __serviceNames: null,
+
+ _applyMode: function(mode) {
+ const columnModel = this.getTableColumnModel();
+ switch (mode) {
+ case this.self().modes.HIERARCHICAL:
+ columnModel.setDataCellRenderer(1,
+ new qx.ui.table.cellrenderer.Dynamic(cellInfo => {
+ if (cellInfo.rowData[0] === osparc.component.service.manager.ActivityManager.itemTypes.SERVICE) {
+ return new osparc.ui.table.cellrenderer.Indented(1);
+ }
+ return new osparc.ui.table.cellrenderer.Indented(0);
+ })
+ );
+ break;
+ case this.self().modes.FLAT:
+ columnModel.setDataCellRenderer(1, new qx.ui.table.cellrenderer.Default());
+ break;
+ }
+ },
+
+ _applyFilters: function(filters) {
+ this.__filters = filters;
+ const filterText = filters.name;
+ const filterStudy = filters.study;
+ // Filtering function
+ // By text
+ const nameFilterFn = row => {
+ if (row[0] === osparc.component.service.manager.ActivityManager.itemTypes.STUDY) {
+ return true;
+ }
+ if (filterText && filterText.length > 1) {
+ const trimmedText = filterText.trim().toLowerCase();
+ return row[1].trim().toLowerCase()
+ .includes(trimmedText) ||
+ row[2].trim().toLowerCase()
+ .includes(trimmedText);
+ }
+ return true;
+ };
+ const studyFilterFn = (row, index, array) => {
+ if (row[0] === osparc.component.service.manager.ActivityManager.itemTypes.SERVICE) {
+ return true;
+ }
+ if (filterStudy && filterStudy.length && !filterStudy.includes(row[1])) {
+ // Remove also its services
+ let i = index + 1;
+ let next = array[i];
+ while (next && next[0] === osparc.component.service.manager.ActivityManager.itemTypes.SERVICE && i < array.length) {
+ array.splice(i, 1);
+ next = array[i];
+ }
+ return false;
+ }
+ return true;
+ };
+ // Apply filters (working on a copy of the data)
+ const filteredData = [...this.getData()].filter(studyFilterFn).filter(nameFilterFn);
+ this.getTableModel().setData(this.__removeEmptyStudies(filteredData), false);
+ if (this.__hasActiveSorting()) {
+ const {
+ columnIndex,
+ ascending
+ } = this.__sorting;
+ this.getTableModel().sortByColumn(columnIndex, ascending);
+ }
+ },
+
+ __removeEmptyStudies: function(data) {
+ return data.filter((item, index, array) => {
+ if (item[0] === osparc.component.service.manager.ActivityManager.itemTypes.STUDY) {
+ if (index === array.length-1) {
+ return false;
+ }
+ if (item[0] === array[index+1][0]) {
+ return false;
+ }
+ }
+ return true;
+ });
+ },
+
+ __removeStudies: function(data) {
+ return data.filter(item => item[0] !== osparc.component.service.manager.ActivityManager.itemTypes.STUDY);
+ },
+
+ /**
+ * This functions updates the tree with the most recent data.
+ */
+ update: function() {
+ return Promise.all([osparc.data.Resources.get("studies"), osparc.data.Resources.getOne("activity")])
+ .then(async data => {
+ const studies = data[0] || {};
+ const activity = data[1] || {};
+
+ // Get service names
+ if (this.__serviceNames === null) {
+ this.__serviceNames = await osparc.data.Resources.get("servicesTodo");
+ }
+
+ const rows = [];
+ studies.forEach(study => {
+ let parentAdded = false;
+ for (var key in study.workbench) {
+ const node = study.workbench[key];
+ if (this.getMode() !== this.self().modes.FLAT && !parentAdded) {
+ rows.push([
+ osparc.component.service.manager.ActivityManager.itemTypes.STUDY,
+ study.name,
+ "",
+ "",
+ -1,
+ -1
+ ]);
+ parentAdded = true;
+ }
+ const row = [];
+ // type
+ row[0] = osparc.component.service.manager.ActivityManager.itemTypes.SERVICE;
+ // given name
+ row[1] = node.label;
+ // original name
+ if (this.__serviceNames[node.key]) {
+ row[2] = this.__serviceNames[node.key];
+ } else {
+ const splitted = node.key.split("/");
+ row[2] = splitted[splitted.length - 1];
+ }
+ if (Object.keys(activity).includes(key)) {
+ const stats = activity[key].stats;
+ const queued = activity[key].queued;
+ const limits = activity[key].limits;
+ if (stats) {
+ row[4] = stats.cpuUsage == null ? null : (Math.round(stats.cpuUsage * 10) / 10) + (limits && limits.cpus ? `/${limits.cpus * 100}` : ""); // eslint-disable-line no-eq-null
+ row[5] = stats.memUsage == null ? null : (Math.round(stats.memUsage * 10) / 10) + (limits && limits.mem ? `/${limits.mem}` : ""); // eslint-disable-line no-eq-null
+ row[3] = this.tr("Running");
+ }
+ if (queued) {
+ row[3] = this.tr("Queued");
+ }
+ } else {
+ row[3] = this.tr("Not running");
+ }
+ rows.push(row);
+ }
+ });
+ this.setData(rows);
+ if (this.__hasActiveFilters()) {
+ this._applyFilters(this.__filters);
+ }
+ if (this.__hasActiveSorting()) {
+ const {
+ columnIndex,
+ ascending
+ } = this.__sorting;
+ this.__model.sortByColumn(columnIndex, ascending);
+ }
+ this.fireEvent("treeUpdated");
+ })
+ .catch(e => {
+ console.error(e);
+ })
+ .then(() => {
+ // Give a 2 seconds delay
+ setTimeout(() => {
+ if (this.getAlwaysUpdate()) {
+ this.update();
+ }
+ }, 2000);
+ });
+ },
+
+ __hasActiveFilters: function() {
+ if (this.__filters.name && this.__filters.name.length) {
+ return true;
+ }
+ if (this.__filters.study && this.__filters.study.length) {
+ return true;
+ }
+ return false;
+ },
+
+ __hasActiveSorting: function() {
+ if (Object.keys(this.__sorting).length) {
+ return true;
+ }
+ return false;
+ },
+
+ reset: function() {
+ this.__sorting = {};
+ this.getTableModel().clearSorting();
+ this.setMode(this.self().modes.HIERARCHICAL);
+ return this.update();
+ },
+
+ __attachEventHandlers: function() {
+ // React to filter changes
+ const msgName = osparc.utils.Utils.capitalize("activityMonitor", "filter");
+ qx.event.message.Bus.getInstance().subscribe(msgName, msg => this._applyFilters(msg.getData()), this);
+
+ this.__model.addListener("sorted", e => {
+ this.__sorting = e.getData();
+ this.setMode(this.self().modes.FLAT);
+ this.getTableModel().setData(this.__removeStudies(this.getTableModel().getData()), false);
+ }, this);
+
+ this.getSelectionModel().addListener("changeSelection", e => {
+ this.setSelected(this.getSelection());
+ }, this);
+
+ this.addListener("disappear", () => {
+ this.setAlwaysUpdate(false);
+ }, this);
+ this.addListener("appear", () => {
+ this.resetAlwaysUpdate();
+ }, this);
+ }
}
});
diff --git a/services/web/client/source/class/osparc/data/Resources.js b/services/web/client/source/class/osparc/data/Resources.js
index 9efc0fff528..e3acaad9a0c 100644
--- a/services/web/client/source/class/osparc/data/Resources.js
+++ b/services/web/client/source/class/osparc/data/Resources.js
@@ -311,6 +311,18 @@ qx.Class.define("osparc.data.Resources", {
url: statics.API + "/storage/locations/{locationId}/files/{fileUuid}"
}
}
+ },
+ /*
+ * ACTIVITY
+ */
+ activity: {
+ usesCache: false,
+ endpoints: {
+ getOne: {
+ method: "GET",
+ url: statics.API + "/activity/status"
+ }
+ }
}
};
},
@@ -385,7 +397,7 @@ qx.Class.define("osparc.data.Resources", {
}
console.log(`Fetching ${resource} from server.`);
}
- return this.fetch(resource, "getOne", params);
+ return this.fetch(resource, "getOne", params || {});
},
/**
@@ -403,7 +415,7 @@ qx.Class.define("osparc.data.Resources", {
}
console.log(`Fetching ${resource} from server.`);
}
- return this.fetch(resource, "get", params);
+ return this.fetch(resource, "get", params || {});
},
/**
diff --git a/services/web/client/source/class/osparc/desktop/NavigationBar.js b/services/web/client/source/class/osparc/desktop/NavigationBar.js
index a2f87cfa795..244ea01b90f 100644
--- a/services/web/client/source/class/osparc/desktop/NavigationBar.js
+++ b/services/web/client/source/class/osparc/desktop/NavigationBar.js
@@ -190,10 +190,9 @@ qx.Class.define("osparc.desktop.NavigationBar", {
__createUserBtn: function() {
const menu = new qx.ui.menu.Menu();
- // Feature OFF
- // const activityManager = new qx.ui.menu.Button(this.tr("Activity manager"));
- // activityManager.addListener("execute", this.__openActivityManager, this);
- // menu.add(activityManager);
+ const activityManager = new qx.ui.menu.Button(this.tr("Activity manager"));
+ activityManager.addListener("execute", this.__openActivityManager, this);
+ menu.add(activityManager);
const preferences = new qx.ui.menu.Button(this.tr("Preferences"));
preferences.addListener("execute", this.__onOpenAccountSettings, this);
@@ -246,21 +245,20 @@ qx.Class.define("osparc.desktop.NavigationBar", {
win.center();
win.open();
}
- }
+ },
- // FEATURE OFF
- // __openActivityManager: function() {
- // const activityWindow = new qx.ui.window.Window(this.tr("Activity manager")).set({
- // height: 480,
- // width: 600,
- // layout: new qx.ui.layout.Grow(),
- // appearance: "service-window",
- // showMinimize: false,
- // contentPadding: 0
- // });
- // activityWindow.add(new osparc.component.service.manager.ActivityManager());
- // activityWindow.center();
- // activityWindow.open();
- // }
+ __openActivityManager: function() {
+ const activityWindow = new osparc.ui.window.SingletonWindow("activityManager", this.tr("Activity manager")).set({
+ height: 600,
+ width: 800,
+ layout: new qx.ui.layout.Grow(),
+ appearance: "service-window",
+ showMinimize: false,
+ contentPadding: 0
+ });
+ activityWindow.add(new osparc.component.service.manager.ActivityManager());
+ activityWindow.center();
+ activityWindow.open();
+ }
}
});
diff --git a/services/web/client/source/class/osparc/file/FilesTree.js b/services/web/client/source/class/osparc/file/FilesTree.js
index e00e7666173..afba3bf894d 100644
--- a/services/web/client/source/class/osparc/file/FilesTree.js
+++ b/services/web/client/source/class/osparc/file/FilesTree.js
@@ -136,10 +136,10 @@ qx.Class.define("osparc.file.FilesTree", {
populateTree: function(nodeId = null, locationId = null) {
if (nodeId) {
this.__populateNodeFiles(nodeId);
- } else if (locationId) {
- this.__populateMyLocation(locationId);
- } else {
+ } else if (locationId === null) {
this.__populateMyData();
+ } else {
+ this.__populateMyLocation(locationId);
}
this.getDelegate().configureItem = item => {
diff --git a/services/web/client/source/class/osparc/store/Data.js b/services/web/client/source/class/osparc/store/Data.js
index c9a5b181bb3..61f11bc9129 100644
--- a/services/web/client/source/class/osparc/store/Data.js
+++ b/services/web/client/source/class/osparc/store/Data.js
@@ -50,7 +50,8 @@ qx.Class.define("osparc.store.Data", {
"myDocuments": "qx.event.type.Data",
"nodeFiles": "qx.event.type.Data",
"fileCopied": "qx.event.type.Data",
- "deleteFile": "qx.event.type.Data"
+ "deleteFile": "qx.event.type.Data",
+ "presignedLink": "qx.event.type.Data"
},
members: {
diff --git a/services/web/client/source/class/osparc/ui/table/Table.js b/services/web/client/source/class/osparc/ui/table/Table.js
new file mode 100644
index 00000000000..70159dd6d5c
--- /dev/null
+++ b/services/web/client/source/class/osparc/ui/table/Table.js
@@ -0,0 +1,39 @@
+/*
+ * oSPARC - The SIMCORE frontend - https://osparc.io
+ * Copyright: 2019 IT'IS Foundation - https://itis.swiss
+ * License: MIT - https://opensource.org/licenses/MIT
+ * Authors: Ignacio Pascual (ignapas)
+ */
+
+/**
+ * Qooxdoo's table widget with some convenient methods.
+ */
+qx.Class.define("osparc.ui.table.Table", {
+ extend: qx.ui.table.Table,
+
+ properties: {
+ data: {
+ check: "Array",
+ apply: "_applyData"
+ }
+ },
+
+ members: {
+ getSelection: function() {
+ const ret = [];
+ const selectionRanges = this.getSelectionModel().getSelectedRanges();
+ if (selectionRanges.length > 0) {
+ selectionRanges.forEach(range => {
+ for (let i=range.minIndex; i<=range.maxIndex; i++) {
+ ret.push(this.getTableModel().getRowData(i));
+ }
+ });
+ }
+ return ret;
+ },
+
+ _applyData: function(data) {
+ this.getTableModel().setData(data, false);
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/ui/table/cellrenderer/Indented.js b/services/web/client/source/class/osparc/ui/table/cellrenderer/Indented.js
new file mode 100644
index 00000000000..04bcf13870f
--- /dev/null
+++ b/services/web/client/source/class/osparc/ui/table/cellrenderer/Indented.js
@@ -0,0 +1,50 @@
+/*
+ * oSPARC - The SIMCORE frontend - https://osparc.io
+ * Copyright: 2019 IT'IS Foundation - https://itis.swiss
+ * License: MIT - https://opensource.org/licenses/MIT
+ * Authors: Ignacio Pascual (ignapas)
+ */
+
+qx.Class.define("osparc.ui.table.cellrenderer.Indented", {
+ extend: qx.ui.table.cellrenderer.Default,
+
+ construct: function(indentation) {
+ this.base(arguments);
+ if (indentation) {
+ this.setIndentation(indentation);
+ } else {
+ this.__updateIndentation();
+ }
+ },
+
+ statics: {
+ TAB_SIZE: 4
+ },
+
+ properties: {
+ indentation: {
+ check: "Integer",
+ nullable: false,
+ init: 0,
+ apply: "_applyIndentation"
+ }
+ },
+
+ members: {
+ __indentString: null,
+ // overridden
+ _getContentHtml: function(cellInfo) {
+ const pre = this.base(arguments, cellInfo);
+ return this.__indentString + pre;
+ },
+
+ _applyIndentation: function() {
+ this.__updateIndentation();
+ },
+
+ __updateIndentation: function() {
+ const tab = Array(this.self().TAB_SIZE + 1).join(" ");
+ this.__indentString = Array(this.getIndentation() + 1).join(tab);
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/ui/table/cellrenderer/Percentage.js b/services/web/client/source/class/osparc/ui/table/cellrenderer/Percentage.js
new file mode 100644
index 00000000000..ea33b0af32b
--- /dev/null
+++ b/services/web/client/source/class/osparc/ui/table/cellrenderer/Percentage.js
@@ -0,0 +1,45 @@
+/*
+ * oSPARC - The SIMCORE frontend - https://osparc.io
+ * Copyright: 2019 IT'IS Foundation - https://itis.swiss
+ * License: MIT - https://opensource.org/licenses/MIT
+ * Authors: Ignacio Pascual (ignapas)
+ */
+
+qx.Class.define("osparc.ui.table.cellrenderer.Percentage", {
+ extend: qx.ui.table.cellrenderer.Html,
+
+ construct: function(color) {
+ this.base(arguments, "center");
+ this.setColor(color);
+ },
+
+ properties: {
+ color: {
+ check: "String",
+ nullable: false
+ },
+ unit: {
+ check: "String",
+ nullable: false,
+ init: "%"
+ }
+ },
+
+ members: {
+ // overridden
+ _getContentHtml: function(cellInfo) {
+ if (cellInfo.value == null || cellInfo.value < 0) { // eslint-disable-line no-eq-null
+ return "";
+ }
+ const splitted = cellInfo.value.split("/");
+ const width = typeof parseFloat(splitted[0]) === "number" && splitted.length === 2 ? this._calculateWidthPercentage(splitted[0], splitted[1]) : 0;
+ return "" +
+ `
${splitted[0]} ${this.getUnit()}
` +
+ ``;
+ },
+
+ _calculateWidthPercentage: function(value, limit) {
+ return (value / limit) * 100;
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/ui/table/cellrenderer/Unit.js b/services/web/client/source/class/osparc/ui/table/cellrenderer/Unit.js
new file mode 100644
index 00000000000..a47a71540c5
--- /dev/null
+++ b/services/web/client/source/class/osparc/ui/table/cellrenderer/Unit.js
@@ -0,0 +1,32 @@
+/*
+ * oSPARC - The SIMCORE frontend - https://osparc.io
+ * Copyright: 2019 IT'IS Foundation - https://itis.swiss
+ * License: MIT - https://opensource.org/licenses/MIT
+ * Authors: Ignacio Pascual (ignapas)
+ */
+
+qx.Class.define("osparc.ui.table.cellrenderer.Unit", {
+ extend: qx.ui.table.cellrenderer.Html,
+
+ construct: function(unit) {
+ this.base(arguments, "center");
+ this.setUnit(unit);
+ },
+
+ properties: {
+ unit: {
+ check: "String",
+ nullable: false
+ }
+ },
+
+ members: {
+ // overridden
+ _getContentHtml: function(cellInfo) {
+ if (cellInfo.value == null || cellInfo.value < 0) { // eslint-disable-line no-eq-null
+ return "";
+ }
+ return `${cellInfo.value} ${this.getUnit()}`;
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/ui/window/SingletonWindow.js b/services/web/client/source/class/osparc/ui/window/SingletonWindow.js
new file mode 100644
index 00000000000..280935ae996
--- /dev/null
+++ b/services/web/client/source/class/osparc/ui/window/SingletonWindow.js
@@ -0,0 +1,30 @@
+/*
+ * oSPARC - The SIMCORE frontend - https://osparc.io
+ * Copyright: 2019 IT'IS Foundation - https://itis.swiss
+ * License: MIT - https://opensource.org/licenses/MIT
+ * Authors: Ignacio Pascual (ignapas)
+ */
+
+qx.Class.define("osparc.ui.window.SingletonWindow", {
+ extend: qx.ui.window.Window,
+
+ construct: function(id, caption, icon) {
+ this.setId(id);
+ const singletonWindows = qx.core.Init.getApplication().getRoot()
+ .getChildren()
+ .filter(child => child.classname === this.classname);
+ const thisWindow = singletonWindows.find(win => win.getId() === id);
+ if (thisWindow) {
+ console.log(`Trying to create another SingletonWindow with id ${id}, disposing the old one...`);
+ thisWindow.dispose();
+ }
+ this.base(arguments, caption, icon);
+ },
+
+ properties: {
+ id: {
+ check: "String",
+ nullable: false
+ }
+ }
+});
diff --git a/services/web/server/Makefile b/services/web/server/Makefile
index e2e94d5ac93..5a2caebc2b7 100644
--- a/services/web/server/Makefile
+++ b/services/web/server/Makefile
@@ -10,30 +10,31 @@ ROOT_DIR = $(realpath $(CURDIR)/../../../)
VENV_DIR ?= $(realpath $(ROOT_DIR)/.venv)
-.PHONY: install-dev
-install-dev: ## install app in edit mode for development
+.PHONY: install
+install: ## install app in edit mode for development [DEV]
# installing in edit mode
@$(VENV_DIR)/bin/pip3 install -r requirements/dev.txt
+
.PHONY: tests
-tests: ## runs tests
+tests: ## runs all tests [DEV]
# running unit tests
- @$(VENV_DIR)/bin/pytest --cov=simcore_service_${APP_NAME} --cov-append -v -m "not travis" $(CURDIR)/tests/unit
+ @$(VENV_DIR)/bin/pytest -vv -x --ff --pdb $(CURDIR)/tests/unit
+ # running integration tests
+ @$(VENV_DIR)/bin/pytest -vv -x --ff --pdb $(CURDIR)/tests/integration
.PHONY: build
-build: ## builds docker image
+build: ## builds docker image (using main services/docker-compose-build.yml)
@$(MAKE) -C ${ROOT_DIR} target=${APP_NAME} build
-.PHONY: clean .check-clean
-.check-clean:
+.PHONY: clean
+clean: ## cleans all unversioned files in project and temp files create by this makefile
+ # Cleaning unversioned
@git clean -ndxf -e .vscode/
@echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ]
@echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ]
-
-clean: .check-clean ## cleans all unversioned files in project and temp files create by this makefile
- # Cleaning unversioned
@git clean -dxf -e .vscode/
diff --git a/services/web/server/docker/boot.sh b/services/web/server/docker/boot.sh
index 7ba86f1031d..a29565bcdec 100755
--- a/services/web/server/docker/boot.sh
+++ b/services/web/server/docker/boot.sh
@@ -1,14 +1,17 @@
#!/bin/sh
#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
+
# BOOTING application ---------------------------------------------
-echo "Booting in ${SC_BOOT_MODE} mode ..."
+echo $INFO "Booting in ${SC_BOOT_MODE} mode ..."
echo " User :`id $(whoami)`"
echo " Workdir :`pwd`"
if [[ ${SC_BUILD_TARGET} == "development" ]]
then
- echo " Environment :"
+ echo $INFO "Environment :"
printenv | sed 's/=/: /' | sed 's/^/ /' | sort
#------------
@@ -19,15 +22,20 @@ then
cd /devel
#------------
- echo " Python :"
+ echo $INFO "Python :"
python --version | sed 's/^/ /'
which python | sed 's/^/ /'
- echo " PIP :"
+ echo $INFO "PIP :"
$SC_PIP list | sed 's/^/ /'
+ #------------
+ echo $INFO "setting entrypoint to use watchmedo autorestart..."
+ entrypoint='watchmedo auto-restart --recursive --pattern="*.py" --'
+
elif [[ ${SC_BUILD_TARGET} == "production" ]]
then
APP_CONFIG=server-docker-prod.yaml
+ entrypoint=''
fi
@@ -35,15 +43,16 @@ fi
if [[ ${SC_BOOT_MODE} == "debug-pdb" ]]
then
# NOTE: needs stdin_open: true and tty: true
- echo "Debugger attached: https://docs.python.org/3.6/library/pdb.html#debugger-commands ..."
- echo "Running: import pdb, simcore_service_server.cli; pdb.run('simcore_service_server.cli.main([\'-c\',\'${APP_CONFIG}\'])')"
- python -c "import pdb, simcore_service_server.cli; \
+ echo $INFO "Debugger attached: https://docs.python.org/3.6/library/pdb.html#debugger-commands ..."
+ echo $INFO "Running: import pdb, simcore_service_server.cli; pdb.run('simcore_service_server.cli.main([\'-c\',\'${APP_CONFIG}\'])')"
+ eval "$entrypoint" python -c "import pdb, simcore_service_server.cli; \
pdb.run('simcore_service_server.cli.main([\'-c\',\'${APP_CONFIG}\'])')"
elif [[ ${SC_BOOT_MODE} == "debug-ptvsd" ]]
then
# NOTE: needs ptvsd installed
- echo "PTVSD Debugger initializing in port 3000 with ${APP_CONFIG}"
- python3 -m ptvsd --host 0.0.0.0 --port 3000 -m simcore_service_webserver --config $APP_CONFIG
+ echo $INFO "PTVSD Debugger initializing in port 3000 with ${APP_CONFIG}"
+ eval "$entrypoint" python3 -m ptvsd --host 0.0.0.0 --port 3000 -m \
+ simcore_service_webserver --config $APP_CONFIG
else
- simcore-service-webserver --config $APP_CONFIG
+ exec simcore-service-webserver --config $APP_CONFIG
fi
diff --git a/services/web/server/docker/entrypoint.sh b/services/web/server/docker/entrypoint.sh
index 2a9c4a8a1b9..2b5dee218b5 100755
--- a/services/web/server/docker/entrypoint.sh
+++ b/services/web/server/docker/entrypoint.sh
@@ -1,4 +1,7 @@
#!/bin/sh
+#
+INFO="INFO: [`basename "$0"`] "
+ERROR="ERROR: [`basename "$0"`] "
# This entrypoint script:
#
@@ -6,7 +9,7 @@
# - Notice that the container *starts* as --user [default root] but
# *runs* as non-root user [scu]
#
-echo "Entrypoint for stage ${SC_BUILD_TARGET} ..."
+echo $INFO "Entrypoint for stage ${SC_BUILD_TARGET} ..."
echo " User :`id $(whoami)`"
echo " Workdir :`pwd`"
@@ -17,7 +20,7 @@ then
DEVEL_MOUNT=/devel/services/web/server
stat $DEVEL_MOUNT &> /dev/null || \
- (echo "ERROR: You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
+ (echo $ERROR "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
USERID=$(stat -c %u $DEVEL_MOUNT)
GROUPID=$(stat -c %g $DEVEL_MOUNT)
@@ -47,4 +50,4 @@ then
python3 -m pip install ptvsd
fi
-su-exec scu "$@"
+exec su-exec scu "$@"
diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt
index ba7dcdea1c3..31252eae57c 100644
--- a/services/web/server/requirements/_base.txt
+++ b/services/web/server/requirements/_base.txt
@@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
-# pip-compile _base.in
+# pip-compile --output-file=_base.txt _base.in
#
aadict==0.2.3 # via asset
aio-pika==2.9.0
diff --git a/services/web/server/requirements/_test.in b/services/web/server/requirements/_test.in
index bf6ae74766d..af23373a70e 100644
--- a/services/web/server/requirements/_test.in
+++ b/services/web/server/requirements/_test.in
@@ -20,7 +20,7 @@ pytest-runner
Faker
openapi-spec-validator # TODO: this library is limiting jsonschema<3 (see base.in)
tenacity
-docker # for integration tests
+docker
# tools
pylint
diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt
index f061d3c86ed..7be7e044822 100644
--- a/services/web/server/requirements/_test.txt
+++ b/services/web/server/requirements/_test.txt
@@ -22,7 +22,7 @@ atomicwrites==1.3.0 # via pytest
attrs==19.1.0
billiard==3.6.0.0
celery==4.3.0
-certifi==2019.3.9 # via requests
+certifi==2019.9.11 # via requests
cffi==1.12.3
change-case==0.5.2
chardet==3.0.4
@@ -82,7 +82,7 @@ trafaret-config==2.0.2
trafaret==1.2.0
typed-ast==1.3.5 # via astroid
typing-extensions==3.7.2
-urllib3==1.25.3 # via requests
+urllib3==1.25.6 # via requests
vine==1.3.0
wcwidth==0.1.7 # via pytest
websocket-client==0.56.0 # via docker
diff --git a/services/web/server/requirements/dev.txt b/services/web/server/requirements/dev.txt
index 5c8b3eeb176..bd571f5a243 100644
--- a/services/web/server/requirements/dev.txt
+++ b/services/web/server/requirements/dev.txt
@@ -6,6 +6,9 @@
# pip install -r requirements/dev.txt
#
+# installs watchdog utility
+watchdog[watchmedo]
+
# installs base + tests requirements
-r _test.txt
diff --git a/services/web/server/src/simcore_service_webserver/activity/__init__.py b/services/web/server/src/simcore_service_webserver/activity/__init__.py
new file mode 100644
index 00000000000..7a641b323e3
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/activity/__init__.py
@@ -0,0 +1,48 @@
+import asyncio
+import logging
+
+from aiohttp import web
+from servicelib.application_keys import APP_CONFIG_KEY
+from servicelib.application_setup import ModuleCategory, app_module_setup
+from servicelib.rest_routing import (get_handlers_from_namespace,
+ iter_path_operations,
+ map_handlers_with_operations)
+
+from ..rest_config import APP_OPENAPI_SPECS_KEY
+from . import handlers
+from .config import CONFIG_SECTION_NAME
+
+logger = logging.getLogger(__name__)
+
+@app_module_setup(
+ __name__,
+ category=ModuleCategory.ADDON,
+ depends=['simcore_service_webserver.rest'],
+ logger=logger)
+def setup(app: web.Application):
+
+ # setup routes ------------
+ specs = app[APP_OPENAPI_SPECS_KEY]
+
+ def include_path(tup_object):
+ _method, path, _operation_id = tup_object
+ return any( tail in path for tail in ['/activity/status'] )
+
+ handlers_dict = {
+ 'get_status': handlers.get_status
+ }
+
+ routes = map_handlers_with_operations(
+ handlers_dict,
+ filter(include_path, iter_path_operations(specs)),
+ strict=True
+ )
+ app.router.add_routes(routes)
+
+
+# alias
+setup_activity = setup
+
+__all__ = (
+ 'setup_activity'
+)
diff --git a/services/web/server/src/simcore_service_webserver/activity/config.py b/services/web/server/src/simcore_service_webserver/activity/config.py
new file mode 100644
index 00000000000..0bc5d2a675f
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/activity/config.py
@@ -0,0 +1,14 @@
+""" Activity manager configuration
+ - config-file schema
+ - prometheus endpoint information
+"""
+import trafaret as T
+
+CONFIG_SECTION_NAME = "activity"
+
+schema = T.Dict({
+ T.Key("enabled", default=True, optional=True): T.Bool(),
+ T.Key("prometheus_host", default='http://prometheus', optional=False): T.String(),
+ T.Key("prometheus_port", default=9090, optional=False): T.Int(),
+ T.Key("prometheus_api_version", default='v1', optional=False): T.String()
+})
diff --git a/services/web/server/src/simcore_service_webserver/activity/handlers.py b/services/web/server/src/simcore_service_webserver/activity/handlers.py
new file mode 100644
index 00000000000..3aa85a4140d
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/activity/handlers.py
@@ -0,0 +1,115 @@
+import asyncio
+
+import aiohttp
+from servicelib.application_keys import APP_CONFIG_KEY
+from servicelib.client_session import get_client_session
+from servicelib.request_keys import RQT_USERID_KEY
+from yarl import URL
+
+from ..computation_handlers import get_celery
+from ..login.decorators import login_required
+
+
+async def query_prometheus(session, url, query):
+ async with session.get(url.with_query(query=query)) as resp:
+ result = await resp.json()
+ return result
+
+def celery_reserved(app):
+ return get_celery(app).control.inspect().reserved()
+
+#
+# Functions getting the data to be executed async
+#
+async def get_cpu_usage(session, url, user_id):
+ cpu_query = f'sum by (container_label_node_id) (irate(container_cpu_usage_seconds_total{{container_label_node_id=~".+", container_label_user_id="{user_id}"}}[20s])) * 100'
+ return await query_prometheus(session, url, cpu_query)
+
+async def get_memory_usage(session, url, user_id):
+ memory_query = f'container_memory_usage_bytes{{container_label_node_id=~".+", container_label_user_id="{user_id}"}} / 1000000'
+ return await query_prometheus(session, url, memory_query)
+
+async def get_celery_reserved(app):
+ return celery_reserved(app)
+
+async def get_container_metric_for_labels(session, url, user_id):
+ just_a_metric = f'container_cpu_user_seconds_total{{container_label_node_id=~".+", container_label_user_id="{user_id}"}}'
+ return await query_prometheus(session, url, just_a_metric)
+
+
+def get_prometheus_result_or_default(result, default):
+ if (isinstance(result, Exception)):
+ # Logs exception
+ return default
+ return result['data']['result']
+
+
+@login_required
+async def get_status(request: aiohttp.web.Request):
+ session = get_client_session(request.app)
+
+ user_id = request.get(RQT_USERID_KEY, -1)
+
+ config = request.app[APP_CONFIG_KEY]['activity']
+ url = URL(config.get('prometheus_host')).with_port(config.get('prometheus_port')).with_path('api/' + config.get('prometheus_api_version') + '/query')
+ results = await asyncio.gather(
+ get_cpu_usage(session, url, user_id),
+ get_memory_usage(session, url, user_id),
+ get_celery_reserved(request.app),
+ get_container_metric_for_labels(session, url, user_id),
+ return_exceptions=True
+ )
+ cpu_usage = get_prometheus_result_or_default(results[0], [])
+ mem_usage = get_prometheus_result_or_default(results[1], [])
+ metric = get_prometheus_result_or_default(results[3], [])
+ celery_inspect = results[2]
+
+ res = {}
+ for node in cpu_usage:
+ node_id = node['metric']['container_label_node_id']
+ usage = float(node['value'][1])
+ res[node_id] = {
+ 'stats': {
+ 'cpuUsage': usage
+ }
+ }
+
+ for node in mem_usage:
+ node_id = node['metric']['container_label_node_id']
+ usage = float(node['value'][1])
+ if node_id in res:
+ res[node_id]['stats']['memUsage'] = usage
+ else:
+ res[node_id] = {
+ 'stats': {
+ 'memUsage': usage
+ }
+ }
+
+ for node in metric:
+ limits = {
+ 'cpus': 0,
+ 'mem': 0
+ }
+ metric_labels = node['metric']
+ limits['cpus'] = float(metric_labels.get('container_label_nano_cpus_limit', 0)) / pow(10, 9) # Nanocpus to cpus
+ limits['mem'] = float(metric_labels.get('container_label_mem_limit', 0)) / pow(1024, 2) # In MB
+ node_id = metric_labels.get('container_label_node_id')
+ res[node_id]['limits'] = limits
+
+ if (hasattr(celery_inspect, 'items')):
+ for dummy_worker_id, worker in celery_inspect.items():
+ for task in worker:
+ if (task['args'][1:-1].split(', ')[0] == str(user_id)): # Extracts user_id from task's args
+ node_id = task['args'][1:-1].split(', ')[2][1:-1] # Extracts node_id from task's args
+ if node_id in res:
+ res[node_id]['queued'] = True
+ else:
+ res[node_id] = {
+ 'queued': True
+ }
+
+ if (not res):
+ raise aiohttp.web.HTTPNoContent
+
+ return res
diff --git a/services/web/server/src/simcore_service_webserver/application.py b/services/web/server/src/simcore_service_webserver/application.py
index dd8df96b65c..90c9c0d5468 100644
--- a/services/web/server/src/simcore_service_webserver/application.py
+++ b/services/web/server/src/simcore_service_webserver/application.py
@@ -11,6 +11,7 @@
from servicelib.monitoring import setup_monitoring
from servicelib.application_setup import app_module_setup, ModuleCategory
+from .activity import setup_activity
from .application_proxy import setup_app_proxy
from .computation import setup_computation
from .db import setup_db
@@ -69,6 +70,7 @@ def create_application(config: Dict) -> web.Application:
setup_users(app)
setup_projects(app) # needs storage
setup_studies_access(app)
+ setup_activity(app)
setup_app_proxy(app) # TODO: under development!!!
return app
diff --git a/services/web/server/src/simcore_service_webserver/application_config.py b/services/web/server/src/simcore_service_webserver/application_config.py
index 71675788853..59e9db8345e 100644
--- a/services/web/server/src/simcore_service_webserver/application_config.py
+++ b/services/web/server/src/simcore_service_webserver/application_config.py
@@ -25,6 +25,7 @@
from . import (computation_config, db_config, email_config, rest_config,
session_config, storage_config)
from .director import config as director_config
+from .activity import config as activity_config
from .login import config as login_config
from .projects import config as projects_config
from .resources import resources
@@ -68,6 +69,7 @@ def create_schema() -> T.Dict:
storage_config.CONFIG_SECTION_NAME: storage_config.schema,
addon_section(login_config.CONFIG_SECTION_NAME, optional=True): login_config.schema,
session_config.CONFIG_SECTION_NAME: session_config.schema,
+ activity_config.CONFIG_SECTION_NAME: activity_config.schema,
#TODO: s3_config.CONFIG_SECTION_NAME: s3_config.schema
#TODO: enable when sockets are refactored
# BELOW HERE minimal sections until more options are needed
diff --git a/services/web/server/src/simcore_service_webserver/config/server-defaults.yaml b/services/web/server/src/simcore_service_webserver/config/server-defaults.yaml
index 5637a010b77..94a8cfe177e 100644
--- a/services/web/server/src/simcore_service_webserver/config/server-defaults.yaml
+++ b/services/web/server/src/simcore_service_webserver/config/server-defaults.yaml
@@ -23,11 +23,14 @@ db:
port: 5432
user: simcore
rabbit:
+ enabled: True
+ host: rabbit
+ port: 5672
+ user: simcore
+ password: simcore
channels:
log: comp.backend.channels.log
progress: comp.backend.channels.progress
- password: simcore
- user: simcore
# s3:
# access_key: 'Q3AM3UQ867SPQQA43P2F'
# bucket_name: simcore
@@ -57,3 +60,8 @@ projects:
location: http://localhost:8043/api/specs/webserver/v0/components/schemas/project-v0.0.1.json
session:
secret_key: "TODO: Replace with a key of at least length 32"
+activity:
+ enabled: True
+ prometheus_host: http://prometheus
+ prometheus_port: 9090
+ prometheus_api_version: v1
diff --git a/services/web/server/src/simcore_service_webserver/config/server-docker-dev.yaml b/services/web/server/src/simcore_service_webserver/config/server-docker-dev.yaml
index c8184baaa5b..a182fd28fcf 100644
--- a/services/web/server/src/simcore_service_webserver/config/server-docker-dev.yaml
+++ b/services/web/server/src/simcore_service_webserver/config/server-docker-dev.yaml
@@ -25,16 +25,21 @@ db:
rabbit:
host: ${RABBIT_HOST}
port: ${RABBIT_PORT}
- user: ${RABBITMQ_USER}
- password: ${RABBITMQ_PASSWORD}
+ user: ${RABBIT_USER}
+ password: ${RABBIT_PASSWORD}
channels:
- progress: ${RABBITMQ_PROGRESS_CHANNEL}
- log: ${RABBITMQ_LOG_CHANNEL}
+ progress: ${RABBIT_PROGRESS_CHANNEL}
+ log: ${RABBIT_LOG_CHANNEL}
# s3:
# endpoint: ${S3_ENDPOINT}
# access_key: ${S3_ACCESS_KEY}
# secret_key: ${S3_SECRET_KEY}
# bucket_name: ${S3_BUCKET_NAME}
+activity:
+ enabled: True
+ prometheus_host: ${WEBSERVER_PROMETHEUS_HOST}
+ prometheus_port: ${WEBSERVER_PROMETHEUS_PORT}
+ prometheus_api_version: ${WEBSERVER_PROMETHEUS_API_VERSION}
login:
enabled: True
registration_invitation_required: False
diff --git a/services/web/server/src/simcore_service_webserver/config/server-docker-prod.yaml b/services/web/server/src/simcore_service_webserver/config/server-docker-prod.yaml
index a3a2d285522..838103380a3 100644
--- a/services/web/server/src/simcore_service_webserver/config/server-docker-prod.yaml
+++ b/services/web/server/src/simcore_service_webserver/config/server-docker-prod.yaml
@@ -23,11 +23,13 @@ db:
host: ${POSTGRES_HOST}
port: ${POSTGRES_PORT}
rabbit:
- user: ${RABBITMQ_USER}
- password: ${RABBITMQ_PASSWORD}
+ host: ${RABBIT_HOST}
+ port: ${RABBIT_PORT}
+ user: ${RABBIT_USER}
+ password: ${RABBIT_PASSWORD}
channels:
- progress: ${RABBITMQ_PROGRESS_CHANNEL}
- log: ${RABBITMQ_LOG_CHANNEL}
+ progress: ${RABBIT_PROGRESS_CHANNEL}
+ log: ${RABBIT_LOG_CHANNEL}
# s3:
# endpoint: ${S3_ENDPOINT}
# access_key: ${S3_ACCESS_KEY}
@@ -56,4 +58,9 @@ projects:
session:
# python3 -c "from cryptography.fernet import Fernet; print(Fernet.generate_key())"
secret_key: ${WEBSERVER_SESSION_SECRET_KEY}
+activity:
+ enabled: True
+ prometheus_host: ${WEBSERVER_PROMETHEUS_HOST}
+ prometheus_port: ${WEBSERVER_PROMETHEUS_PORT}
+ prometheus_api_version: ${WEBSERVER_PROMETHEUS_API_VERSION}
...
diff --git a/services/web/server/src/simcore_service_webserver/db.py b/services/web/server/src/simcore_service_webserver/db.py
index 745d93d2f64..ad7a00993ff 100644
--- a/services/web/server/src/simcore_service_webserver/db.py
+++ b/services/web/server/src/simcore_service_webserver/db.py
@@ -13,12 +13,11 @@
from servicelib.aiopg_utils import DBAPIError
from servicelib.application_keys import APP_CONFIG_KEY, APP_DB_ENGINE_KEY
-from servicelib.application_setup import app_module_setup,ModuleCategory
+from servicelib.application_setup import ModuleCategory, app_module_setup
from .db_config import CONFIG_SECTION_NAME
from .db_models import metadata
-# SETTINGS ----------------------------------------------------
THIS_MODULE_NAME = __name__.split(".")[-1]
THIS_SERVICE_NAME = 'postgres'
DSN = "postgresql://{user}:{password}@{host}:{port}/{database}" # Data Source Name. TODO: sync with config
@@ -26,8 +25,6 @@
RETRY_WAIT_SECS = 2
RETRY_COUNT = 20
CONNECT_TIMEOUT_SECS = 30
-# --------------------------------------------------------------
-
log = logging.getLogger(__name__)
@@ -38,33 +35,34 @@
reraise=True)
async def __create_tables(**params):
# TODO: move _init_db.metadata here!?
- sa_engine = sa.create_engine(DSN.format(**params))
- metadata.create_all(sa_engine)
+ try:
+ url = DSN.format(**params) + f"?application_name={__name__}_init"
+ sa_engine = sa.create_engine(url)
+ metadata.create_all(sa_engine)
+ finally:
+ sa_engine.dispose()
+
async def pg_engine(app: web.Application):
- engine = None
- try:
- cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]
- params = {k:cfg["postgres"][k] for k in 'database user password host port minsize maxsize'.split()}
+ cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]
+ params = {k:cfg["postgres"][k] for k in 'database user password host port minsize maxsize'.split()}
+
- if cfg.get("init_tables"):
+ if cfg.get("init_tables"):
+ try:
# TODO: get keys from __name__ (see notes in servicelib.application_keys)
await __create_tables(**params)
+ except DBAPIError:
+ log.exception("Could init db. Stopping :\n %s", cfg)
+ raise
- engine = await create_engine(**params)
-
- except DBAPIError:
- log.exception("Could init db. Stopping :\n %s", cfg)
- raise
- else:
+ async with create_engine(application_name=f'{__name__}_{id(app)}', **params) as engine:
app[APP_DB_ENGINE_KEY] = engine
- yield
+ yield #-------------------
- engine = app.get(APP_DB_ENGINE_KEY)
- if engine:
- engine.close()
- await engine.wait_closed()
+ if engine is not app.get(APP_DB_ENGINE_KEY):
+ log.error("app does not hold right db engine")
def is_service_enabled(app: web.Application):
diff --git a/services/web/server/src/simcore_service_webserver/db_config.py b/services/web/server/src/simcore_service_webserver/db_config.py
index 148ee2aec93..6747d130cd1 100644
--- a/services/web/server/src/simcore_service_webserver/db_config.py
+++ b/services/web/server/src/simcore_service_webserver/db_config.py
@@ -10,6 +10,18 @@
CONFIG_SECTION_NAME = 'db'
+# FIXME: database user password host port minsize maxsize
+#CONFIG_SCHEMA = T.Dict({
+# "database": T.String(),
+# "user": T.String(),
+# "password": T.String(),
+# "host": T.Or( T.String, T.Null),
+# "port": T.Or( T.Int, T.Null),
+# T.Key("minsize", default=1 ,optional=True): T.Int(),
+# T.Key("maxsize", default=4, optional=True): T.Int(),
+#})
+
+
schema = T.Dict({
T.Key("postgres"): _PG_SCHEMA,
T.Key("init_tables", default=False): T.Bool(),
diff --git a/services/web/server/src/simcore_service_webserver/login/__init__.py b/services/web/server/src/simcore_service_webserver/login/__init__.py
index 4696f4cfed9..37f9df83107 100644
--- a/services/web/server/src/simcore_service_webserver/login/__init__.py
+++ b/services/web/server/src/simcore_service_webserver/login/__init__.py
@@ -41,7 +41,14 @@ async def _setup_config_and_pgpool(app: web.Application):
db_cfg = app[APP_CONFIG_KEY][DB_SECTION]['postgres']
# db
- pool = await asyncpg.create_pool(dsn=DSN.format(**db_cfg), loop=asyncio.get_event_loop())
+ #TODO: setup lifetime of this pool?
+ #TODO: determin min/max size of the pool
+ pool = await asyncpg.create_pool(
+ dsn=DSN.format(**db_cfg) + f"?application_name={module_name}_{id(app)}",
+ min_size=db_cfg['minsize'],
+ max_size=db_cfg['maxsize'],
+ loop=asyncio.get_event_loop())
+
storage = AsyncpgStorage(pool) #NOTE: this key belongs to cfg, not settings!
# config
@@ -65,10 +72,12 @@ async def _setup_config_and_pgpool(app: web.Application):
app[APP_LOGIN_CONFIG] = cfg
- yield
+ yield # ----------------
+ if config["STORAGE"].pool is not pool:
+ log.error("Somebody has changed the db pool")
try:
- await asyncio.wait_for( pool.close(), timeout=TIMEOUT_SECS)
+ await asyncio.wait_for(pool.close(), timeout=TIMEOUT_SECS)
except asyncio.TimeoutError:
log.exception("Failed to close login storage loop")
diff --git a/services/web/server/src/simcore_service_webserver/security_authorization.py b/services/web/server/src/simcore_service_webserver/security_authorization.py
index 2baaaf66903..229181e34ee 100644
--- a/services/web/server/src/simcore_service_webserver/security_authorization.py
+++ b/services/web/server/src/simcore_service_webserver/security_authorization.py
@@ -1,11 +1,14 @@
import logging
-from typing import Dict, Tuple, Union
+from typing import Dict, Optional, Tuple, Union
import attr
+import psycopg2
import sqlalchemy as sa
from aiohttp import web
from aiohttp_security.abc import AbstractAuthorizationPolicy
from aiopg.sa import Engine
+from tenacity import (RetryCallState, after_log, retry,
+ retry_if_exception_type, stop_after_attempt, wait_fixed)
from servicelib.application_keys import APP_DB_ENGINE_KEY
@@ -15,6 +18,16 @@
log = logging.getLogger(__file__)
+
+def raise_http_unavailable_error(retry_state: RetryCallState):
+ # TODO: mark incident on db to determine the quality of service. E.g. next time we do not stop.
+ # TODO: add header with Retry-After
+ #obj, query = retry_state.args
+ #obj.app.register_incidents
+ # https://tools.ietf.org/html/rfc7231#section-7.1.3
+ raise web.HTTPServiceUnavailable()
+
+
@attr.s(auto_attribs=True, frozen=True)
class AuthorizationPolicy(AbstractAuthorizationPolicy):
app: web.Application
@@ -25,56 +38,57 @@ def engine(self) -> Engine:
"""Lazy getter since the database is not available upon setup
:return: database's engine
- :rtype: Engine
"""
# TODO: what if db is not available?
#return self.app.config_dict[APP_DB_ENGINE_KEY]
return self.app[APP_DB_ENGINE_KEY]
+ @retry(
+ retry=retry_if_exception_type(psycopg2.DatabaseError),
+ wait=wait_fixed(2),
+ stop=stop_after_attempt(3),
+ after=after_log(log, logging.ERROR),
+ retry_error_callback=raise_http_unavailable_error)
+ async def _safe_execute(self, query):
+ # NOTE: psycopg2.DatabaseError in #880 and #1160
+ # http://initd.org/psycopg/docs/module.html
+ async with self.engine.acquire() as conn:
+ ret = await conn.execute(query)
+ res = await ret.fetchone()
+ return res
- async def authorized_userid(self, identity: str):
+ async def authorized_userid(self, identity: str) -> Optional[str]:
""" Retrieve authorized user id.
Return the user_id of the user identified by the identity
or "None" if no user exists related to the identity.
"""
- # pylint: disable=E1120
- async with self.engine.acquire() as conn:
- # TODO: why users.c.user_login_key!=users.c.email
- query = users.select().where(
- sa.and_(users.c.email == identity,
- users.c.status != UserStatus.BANNED)
- )
- ret = await conn.execute(query)
- user = await ret.fetchone()
- return user["id"] if user else None
-
- async def permits(self, identity: str, permission: Union[str,Tuple], context: Dict=None):
+ # TODO: why users.c.user_login_key!=users.c.email
+ user = await self._safe_execute( users.select().where(
+ sa.and_(users.c.email == identity,
+ users.c.status != UserStatus.BANNED)
+ ))
+ return user["id"] if user else None
+
+ async def permits(self, identity: str, permission: Union[str,Tuple], context: Optional[Dict]=None) -> bool:
""" Determines whether an identified user has permission
:param identity: session identified corresponds to the user's email as defined in login.handlers.registration
- :type identity: str
:param permission: name of the operation that user wants to execute OR a tuple as (operator.and_|operator.or_, name1, name2, ...)
- :type permission: str or tuple
:param context: context of the operation, defaults to None
- :type context: Dict, optional
:return: True if user has permission to execute this operation within the given context
- :rtype: bool
"""
if identity is None or permission is None:
log.debug("Invalid indentity [%s] of permission [%s]. Denying access.", identity, permission)
return False
- async with self.engine.acquire() as conn:
- query = users.select().where(
- sa.and_(users.c.email == identity,
- users.c.status != UserStatus.BANNED)
+ user = await self._safe_execute( users.select().where(
+ sa.and_(users.c.email == identity,
+ users.c.status != UserStatus.BANNED)
)
- ret = await conn.execute(query)
- user = await ret.fetchone()
-
- if user:
- role = user.get('role')
- return await check_access(self.access_model, role, permission, context)
+ )
+ if user:
+ role = user.get('role')
+ return await check_access(self.access_model, role, permission, context)
return False
diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py
new file mode 100644
index 00000000000..876d126aca9
--- /dev/null
+++ b/services/web/server/tests/conftest.py
@@ -0,0 +1,70 @@
+""" Main test configuration
+
+ EXPECTED: simcore_service_webserver installed
+
+"""
+# pylint: disable=unused-argument
+# pylint: disable=bare-except
+# pylint:disable=redefined-outer-name
+
+import logging
+import sys
+from pathlib import Path
+
+import pytest
+
+import simcore_service_webserver
+
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+log = logging.getLogger(__name__)
+
+# mute noisy loggers
+logging.getLogger("openapi_spec_validator").setLevel(logging.WARNING)
+logging.getLogger("sqlalchemy").setLevel(logging.WARNING)
+
+## HELPERS
+sys.path.append(str(current_dir / 'helpers'))
+
+
+## FIXTURES: standard paths
+
+@pytest.fixture(scope='session')
+def package_dir() -> Path:
+ """ osparc-simcore installed directory """
+ dirpath = Path(simcore_service_webserver.__file__).resolve().parent
+ assert dirpath.exists()
+ return dirpath
+
+@pytest.fixture(scope='session')
+def osparc_simcore_root_dir() -> Path:
+ """ osparc-simcore repo root dir """
+ WILDCARD = "services/web/server"
+
+ root_dir = Path(current_dir)
+ while not any(root_dir.glob(WILDCARD)) and root_dir != Path("/"):
+ root_dir = root_dir.parent
+
+ msg = f"'{root_dir}' does not look like the git root directory of osparc-simcore"
+ assert root_dir.exists(), msg
+ assert any(root_dir.glob(WILDCARD)), msg
+ assert any(root_dir.glob(".git")), msg
+
+ return root_dir
+
+@pytest.fixture(scope="session")
+def env_devel_file(osparc_simcore_root_dir) -> Path:
+ env_devel_fpath = osparc_simcore_root_dir / ".env-devel"
+ assert env_devel_fpath.exists()
+ return env_devel_fpath
+
+@pytest.fixture(scope='session')
+def api_specs_dir(osparc_simcore_root_dir: Path) -> Path:
+ specs_dir = osparc_simcore_root_dir/ "api" / "specs" / "webserver"
+ assert specs_dir.exists()
+ return specs_dir
+
+@pytest.fixture(scope='session')
+def fake_data_dir() -> Path:
+ dirpath = (current_dir / "data").resolve()
+ assert dirpath.exists()
+ return dirpath
diff --git a/services/web/server/tests/unit/mock/parametrized_project.json b/services/web/server/tests/data/parametrized_project.json
similarity index 100%
rename from services/web/server/tests/unit/mock/parametrized_project.json
rename to services/web/server/tests/data/parametrized_project.json
diff --git a/services/web/server/tests/unit/mock/static/index.html b/services/web/server/tests/data/static/index.html
similarity index 100%
rename from services/web/server/tests/unit/mock/static/index.html
rename to services/web/server/tests/data/static/index.html
diff --git a/services/web/server/tests/unit/mock/static/osparc/.gitkeep b/services/web/server/tests/data/static/osparc/.gitkeep
similarity index 100%
rename from services/web/server/tests/unit/mock/static/osparc/.gitkeep
rename to services/web/server/tests/data/static/osparc/.gitkeep
diff --git a/services/web/server/tests/unit/mock/static/resource/.gitkeep b/services/web/server/tests/data/static/resource/.gitkeep
similarity index 100%
rename from services/web/server/tests/unit/mock/static/resource/.gitkeep
rename to services/web/server/tests/data/static/resource/.gitkeep
diff --git a/services/web/server/tests/unit/mock/static/transpiled/.gitkeep b/services/web/server/tests/data/static/transpiled/.gitkeep
similarity index 100%
rename from services/web/server/tests/unit/mock/static/transpiled/.gitkeep
rename to services/web/server/tests/data/static/transpiled/.gitkeep
diff --git a/services/web/server/tests/data/test_activity_config.yml b/services/web/server/tests/data/test_activity_config.yml
new file mode 100644
index 00000000000..8c155d9f027
--- /dev/null
+++ b/services/web/server/tests/data/test_activity_config.yml
@@ -0,0 +1,62 @@
+version: '1.0'
+main:
+ client_outdir: /usr/src/app/client
+ host: 127.0.0.1
+ log_level: DEBUG
+ port: 8080
+ testing: true
+ studies_access_enabled: True
+ monitoring_enabled: True
+director:
+ host: director
+ port: 8001
+ version: v0
+db:
+ init_tables: False
+ postgres:
+ database: simcoredb
+ endpoint: postgres:5432
+ host: postgres
+ maxsize: 5
+ minsize: 1
+ password: simcore
+ port: 5432
+ user: simcore
+rabbit:
+ channels:
+ log: comp.backend.channels.log
+ progress: comp.backend.channels.progress
+ password: simcore
+ user: simcore
+# s3:
+# access_key: 'Q3AM3UQ867SPQQA43P2F'
+# bucket_name: simcore
+# endpoint: play.minio.io:9000
+# secret_key: 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG'
+login:
+ enabled: False
+ registration_invitation_required: False
+ registration_confirmation_required: True
+smtp:
+ sender: 'OSPARC support '
+ host: mail.foo.com
+ port: 25
+ tls: False
+ username: Null
+ password: Null
+storage:
+ host: storage
+ port: 11111
+ version: v0
+rest:
+ version: v0
+ location: ${OSPARC_SIMCORE_REPO_ROOTDIR}/api/specs/webserver/v0/openapi.yaml
+projects:
+ location: ${OSPARC_SIMCORE_REPO_ROOTDIR}/api/specs/webserver/v0/components/schemas/project-v0.0.1.json
+session:
+ secret_key: "TODO: Replace with a key of at least length 32"
+activity:
+ enabled: True
+ prometheus_host: http://prometheus
+ prometheus_port: 9090
+ prometheus_api_version: v1
diff --git a/services/web/server/tests/data/test_activity_data.json b/services/web/server/tests/data/test_activity_data.json
new file mode 100644
index 00000000000..d290f45edca
--- /dev/null
+++ b/services/web/server/tests/data/test_activity_data.json
@@ -0,0 +1,54 @@
+{
+ "prometheus": {
+ "cpu_return": {
+ "data": {
+ "result": [
+ {
+ "metric": {
+ "container_label_node_id": "894dd8d5-de3b-4767-950c-7c3ed8f51d8c"
+ },
+ "value": [
+ null,
+ "3.9952102200000006"
+ ]
+ }
+ ]
+ }
+ },
+ "memory_return": {
+ "data": {
+ "result": [
+ {
+ "metric": {
+ "container_label_node_id": "894dd8d5-de3b-4767-950c-7c3ed8f51d8c"
+ },
+ "value": [
+ null,
+ "177.664"
+ ]
+ }
+ ]
+ }
+ },
+ "labels_return": {
+ "data": {
+ "result": [
+ {
+ "metric": {
+ "container_label_node_id": "894dd8d5-de3b-4767-950c-7c3ed8f51d8c",
+ "container_label_nano_cpus_limit": "4000000000",
+ "container_label_mem_limit": "2147483648"
+ }
+ }
+ ]
+ }
+ }
+ },
+ "celery": {
+ "celery_return": {
+ "celery@b02a130fc2e1": [{
+ "args": "(-1, '5360fc76-09f0-11ea-a930-02420aff000a', '35f95ad4-67b8-4ed8-bd55-84a5d600e687')"
+ }]
+ }
+ }
+}
\ No newline at end of file
diff --git a/services/web/server/tests/helpers/utils_docker.py b/services/web/server/tests/helpers/utils_docker.py
new file mode 100644
index 00000000000..2b0bcb4de33
--- /dev/null
+++ b/services/web/server/tests/helpers/utils_docker.py
@@ -0,0 +1,87 @@
+
+import logging
+import os
+import subprocess
+import tempfile
+from pathlib import Path
+from typing import Dict, List, Optional, Union
+
+import docker
+import yaml
+from tenacity import after_log, retry, stop_after_attempt, wait_fixed
+
+log = logging.getLogger(__name__)
+
+@retry(
+ wait=wait_fixed(2),
+ stop=stop_after_attempt(10),
+ after=after_log(log, logging.WARN))
+def get_service_published_port(service_name: str, target_port: Optional[int]=None) -> str:
+ """
+ WARNING: ENSURE that service name exposes a port in Dockerfile file or docker-compose config file
+ """
+ # NOTE: retries since services can take some time to start
+ client = docker.from_env()
+
+ services = [x for x in client.services.list() if service_name in x.name]
+ if not services:
+ raise RuntimeError(f"Cannot find published port for service '{service_name}'. Probably services still not started.")
+
+ service_ports = services[0].attrs["Endpoint"].get("Ports")
+ if not service_ports:
+ raise RuntimeError(f"Cannot find published port for service '{service_name}' in endpoint. Probably services still not started.")
+
+ published_port = None
+ msg = ", ".join( f"{p.get('TargetPort')} -> {p.get('PublishedPort')}" for p in service_ports )
+
+ if target_port is None:
+ if len(service_ports)>1:
+ log.warning("Multiple ports published in service '%s': %s. Defaulting to first", service_name, msg)
+ published_port = service_ports[0]["PublishedPort"]
+
+ else:
+ target_port = int(target_port)
+ for p in service_ports:
+ if p['TargetPort'] == target_port:
+ published_port = p['PublishedPort']
+ break
+
+ if published_port is None:
+ raise RuntimeError(f"Cannot find published port for {target_port}. Got {msg}")
+
+ return str(published_port)
+
+
+def run_docker_compose_config(
+ docker_compose_paths: Union[List[Path], Path],
+ workdir: Path,
+ destination_path: Optional[Path]=None) -> Dict:
+ """ Runs docker-compose config to validate and resolve a compose file configuration
+
+ - Composes all configurations passed in 'docker_compose_paths'
+ - Takes 'workdir' as current working directory (i.e. all '.env' files there will be captured)
+ - Saves resolved output config to 'destination_path' (if given)
+ """
+
+ if not isinstance(docker_compose_paths, List):
+ docker_compose_paths = [docker_compose_paths, ]
+
+ temp_dir = None
+ if destination_path is None:
+ temp_dir = Path(tempfile.mkdtemp(prefix=''))
+ destination_path = temp_dir / 'docker-compose.yml'
+
+ config_paths = [ f"-f {os.path.relpath(docker_compose_path, workdir)}" for docker_compose_path in docker_compose_paths]
+ configs_prefix = " ".join(config_paths)
+
+ subprocess.run( f"docker-compose {configs_prefix} config > {destination_path}",
+ shell=True, check=True,
+ cwd=workdir)
+
+ with destination_path.open() as f:
+ config = yaml.safe_load(f)
+
+ if temp_dir:
+ temp_dir.unlink()
+
+ return config
diff --git a/services/web/server/tests/helpers/utils_tokens.py b/services/web/server/tests/helpers/utils_tokens.py
index 51a62ec198c..e1f13fb55a7 100644
--- a/services/web/server/tests/helpers/utils_tokens.py
+++ b/services/web/server/tests/helpers/utils_tokens.py
@@ -18,6 +18,7 @@ def create_db_tables(**kargs):
url = DSN.format(**kargs)
engine = sa.create_engine(url, isolation_level="AUTOCOMMIT")
metadata.create_all(bind=engine, tables=[users, tokens], checkfirst=True)
+ engine.dispose()
return url
diff --git a/services/web/server/tests/integration/computation/test_computation.py b/services/web/server/tests/integration/computation/test_computation.py
index ef69f2a4676..193e599b6a4 100644
--- a/services/web/server/tests/integration/computation/test_computation.py
+++ b/services/web/server/tests/integration/computation/test_computation.py
@@ -1,5 +1,3 @@
-# pylint:disable=wildcard-import
-# pylint:disable=unused-import
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
@@ -8,20 +6,16 @@
import sys
import time
import uuid
-from contextlib import contextmanager
from pathlib import Path
from pprint import pprint
-from typing import Dict
import pytest
-import yaml
from aiohttp import web
from yarl import URL
from servicelib.application import create_safe_application
from servicelib.application_keys import APP_CONFIG_KEY
-from servicelib.rest_responses import unwrap_envelope
-from simcore_sdk.models.pipeline_models import ( # uses legacy TODO: upgrade test
+from simcore_sdk.models.pipeline_models import (
SUCCESS, ComputationalPipeline, ComputationalTask)
from simcore_service_webserver.computation import setup_computation
from simcore_service_webserver.db import setup_db
@@ -31,11 +25,12 @@
from simcore_service_webserver.security import setup_security
from simcore_service_webserver.security_roles import UserRole
from simcore_service_webserver.session import setup_session
-from simcore_service_webserver.users import setup_users
from utils_assert import assert_status
from utils_login import LoggedUser
from utils_projects import NewProject
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
API_VERSION = "v0"
API_PREFIX = "/" + API_VERSION
@@ -52,28 +47,22 @@
]
ops_services = [
- 'minio'
+ 'minio',
# 'adminer',
# 'portainer'
]
-@pytest.fixture(scope='session')
-def here() -> Path:
- return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
@pytest.fixture
-def client(loop, aiohttp_unused_port, aiohttp_client, app_config, here, docker_compose_file):
- port = app_config["main"]["port"] = aiohttp_unused_port()
- host = app_config['main']['host'] = '127.0.0.1'
-
+def client(loop, aiohttp_client,
+ app_config, ## waits until swarm with *_services are up
+ ):
assert app_config["rest"]["version"] == API_VERSION
assert API_VERSION in app_config["rest"]["location"]
app_config['storage']['enabled'] = False
app_config["db"]["init_tables"] = True # inits postgres_service
- final_config_path = here / "config.app.yaml"
- with final_config_path.open('wt') as f:
- yaml.dump(app_config, f, default_flow_style=False)
+ pprint(app_config)
# fake config
app = create_safe_application()
@@ -90,33 +79,28 @@ def client(loop, aiohttp_unused_port, aiohttp_client, app_config, here, docker_c
setup_computation(app)
yield loop.run_until_complete(aiohttp_client(app, server_kwargs={
- 'port': port,
- 'host': 'localhost'
+ 'port': app_config["main"]["port"],
+ 'host': app_config['main']['host']
}))
- # cleanup
- final_config_path.unlink()
-
@pytest.fixture
def project_id() -> str:
return str(uuid.uuid4())
-
-
-@pytest.fixture
-def mock_workbench_payload(here):
- file_path = here / "workbench_sleeper_payload.json"
+@pytest.fixture(scope='session')
+def mock_workbench_payload():
+ file_path = current_dir / "workbench_sleeper_payload.json"
with file_path.open() as fp:
return json.load(fp)
-@pytest.fixture
-def mock_workbench_adjacency_list(here):
- file_path = here / "workbench_sleeper_dag_adjacency_list.json"
+@pytest.fixture(scope='session')
+def mock_workbench_adjacency_list():
+ file_path = current_dir / "workbench_sleeper_dag_adjacency_list.json"
with file_path.open() as fp:
return json.load(fp)
-@pytest.fixture
+@pytest.fixture(scope='session')
def mock_project(fake_data_dir, mock_workbench_payload):
with (fake_data_dir / "fake-project.json").open() as fp:
project = json.load(fp)
@@ -154,6 +138,7 @@ def assert_db_contents(project_id, postgres_session,
mock_workbench_payload, mock_workbench_adjacency_list,
check_outputs:bool
):
+ # pylint: disable=no-member
pipeline_db = postgres_session.query(ComputationalPipeline)\
.filter(ComputationalPipeline.project_id == project_id).one()
assert pipeline_db.project_id == project_id
@@ -179,6 +164,7 @@ def assert_db_contents(project_id, postgres_session,
assert task_db.image["tag"] == mock_pipeline[task_db.node_id]["version"]
def assert_sleeper_services_completed(project_id, postgres_session):
+ # pylint: disable=no-member
# we wait 15 secs before testing...
time.sleep(15)
pipeline_db = postgres_session.query(ComputationalPipeline)\
diff --git a/services/web/server/tests/integration/computation/test_rabbit.py b/services/web/server/tests/integration/computation/test_rabbit.py
index 51d38655724..1f511d25df7 100644
--- a/services/web/server/tests/integration/computation/test_rabbit.py
+++ b/services/web/server/tests/integration/computation/test_rabbit.py
@@ -1,4 +1,3 @@
-# pylint:disable=wildcard-import
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
@@ -33,15 +32,14 @@ def here() -> Path:
return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
@pytest.fixture
-def webserver_service(loop, aiohttp_unused_port, aiohttp_server, app_config, rabbit_service):
- port = app_config["main"]["port"] = aiohttp_unused_port()
- host = app_config['main']['host'] = '127.0.0.1'
-
+def client(loop, aiohttp_client,
+ app_config, ## waits until swarm with *_services are up
+ rabbit_service ## waits until rabbit is responsive
+ ):
assert app_config["rest"]["version"] == API_VERSION
assert API_VERSION in app_config["rest"]["location"]
app_config['storage']['enabled'] = False
-
app_config["db"]["init_tables"] = True # inits postgres_service
# fake config
@@ -50,16 +48,12 @@ def webserver_service(loop, aiohttp_unused_port, aiohttp_server, app_config, rab
setup_computation(app)
- server = loop.run_until_complete(aiohttp_server(app, port=port))
- yield server
- # cleanup
+ yield loop.run_until_complete(aiohttp_client(app, server_kwargs={
+ 'port': app_config["main"]["port"],
+ 'host': app_config['main']['host']
+ }))
-@pytest.fixture
-def client(loop, webserver_service, aiohttp_client):
- client = loop.run_until_complete(aiohttp_client(webserver_service))
- return client
-
@pytest.fixture
def rabbit_config(app_config):
rb_config = app_config[CONFIG_SECTION_NAME]
diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py
index 87f6a9ed465..7a69e9e8a28 100644
--- a/services/web/server/tests/integration/conftest.py
+++ b/services/web/server/tests/integration/conftest.py
@@ -1,27 +1,35 @@
+""" Configuration for integration testing
+
+ During integration testing,
+ - the app under test (i.e. the webserver) will be installed and started in the host
+ - every test module (i.e. integration/**/test_*.py) deploys a stack in a swarm fixture with a seleciton of core and op-services
+ - the selection of core/op services are listed in the 'core_services' and 'ops_serices' variables in each test module
+
+ NOTE: services/web/server/tests/conftest.py is pre-loaded
+
+"""
# pylint: disable=unused-argument
# pylint: disable=bare-except
# pylint:disable=redefined-outer-name
-
import logging
import sys
+from copy import deepcopy
from pathlib import Path
from pprint import pprint
from typing import Dict
-import docker
import pytest
import trafaret_config
import yaml
-from tenacity import after_log, retry, stop_after_attempt, wait_fixed
from simcore_service_webserver.application_config import app_schema
from simcore_service_webserver.cli import create_environ
from simcore_service_webserver.resources import resources as app_resources
+from utils_docker import get_service_published_port
# imports the fixtures for the integration tests
pytest_plugins = [
- "fixtures.standard_directories",
"fixtures.docker_compose",
"fixtures.docker_swarm",
"fixtures.docker_registry",
@@ -30,24 +38,19 @@
"fixtures.postgres_service"
]
-log = logging.getLogger(__name__)
-
-# mute noisy loggers
-logging.getLogger("openapi_spec_validator").setLevel(logging.WARNING)
-logging.getLogger("sqlalchemy").setLevel(logging.WARNING)
-
-sys.path.append(str(Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent.parent / 'helpers'))
-API_VERSION = "v0"
-
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-@pytest.fixture(scope='session')
-def here():
- return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
-def webserver_environ(request, simcore_docker_compose, docker_stack) -> Dict[str, str]:
- """ Environment variables for the webserver application
+def webserver_environ(request, docker_stack: Dict, simcore_docker_compose: Dict) -> Dict[str, str]:
+ """
+ Started already swarm with integration stack (via dependency with 'docker_stack')
+ Environment variable expected for the web-server application in
+ an test-integration context, i.e. web-server runs in host and the
+ remaining services (defined in variable 'core_services') are deployed
+ in containers
"""
assert "webserver" not in docker_stack["services"]
@@ -71,72 +74,66 @@ def webserver_environ(request, simcore_docker_compose, docker_stack) -> Dict[str
if 'ports' in simcore_docker_compose['services'][name] ]
for name in services_with_published_ports:
+
+ host_key = f'{name.upper()}_HOST'
+ port_key = f'{name.upper()}_PORT'
+
# published port is sometimes dynamically defined by the swarm
- published_port = get_service_published_port(name)
+ assert host_key in environ, "Variables names expected to be prefix with service names in docker-compose"
+ assert port_key in environ
- environ['%s_HOST' % name.upper()] = '127.0.0.1'
- environ['%s_PORT' % name.upper()] = published_port
# to swarm boundary since webserver is installed in the host and therefore outside the swarm's network
+ published_port = get_service_published_port(name, int(environ.get(port_key)))
+ environ[host_key] = '127.0.0.1'
+ environ[port_key] = published_port
- pprint(environ)
+ pprint(environ) # NOTE: displayed only if error
return environ
@pytest.fixture(scope='module')
-def app_config(here, webserver_environ) -> Dict:
- config_file_path = here / "config.yaml"
- def _recreate_config_file():
- with app_resources.stream("config/server-docker-dev.yaml") as f:
- cfg = yaml.safe_load(f)
- # test webserver works in host
- cfg["main"]['host'] = '127.0.0.1'
+def _webserver_dev_config(webserver_environ: Dict, docker_stack: Dict) -> Dict:
+ """
+ Swarm with integration stack already started
+
+ Configuration for a webserver provided it runs in host
+
+ NOTE: Prefer using 'app_config' below instead of this as a function-scoped fixture
+ """
+ config_file_path = current_dir / "webserver_dev_config.yaml"
- with config_file_path.open('wt') as f:
- yaml.dump(cfg, f, default_flow_style=False)
+ # recreate config-file
+ with app_resources.stream("config/server-docker-dev.yaml") as f:
+ cfg = yaml.safe_load(f)
+ # test webserver works in host
+ cfg["main"]['host'] = '127.0.0.1'
- _recreate_config_file()
+ with config_file_path.open('wt') as f:
+ yaml.dump(cfg, f, default_flow_style=False)
# Emulates cli
config_environ = {}
config_environ.update(webserver_environ)
config_environ.update( create_environ(skip_host_environ=True) ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well
+
# validates
cfg_dict = trafaret_config.read_and_validate(config_file_path, app_schema, vars=config_environ)
+ # WARNING: changes to this fixture during testing propagates to other tests. Use cfg = deepcopy(cfg_dict)
+ # FIXME: freeze read/only json obj
yield cfg_dict
# clean up
# to debug configuration uncomment next line
config_file_path.unlink()
-## HELPERS
-def resolve_environ(service, environ):
- _environs = {}
- for item in service.get("environment", list()):
- key, value = item.split("=")
- if value.startswith("${") and value.endswith("}"):
- value = value[2:-1]
- if ":" in value:
- variable, default = value.split(":")
- value = environ.get(variable, default[1:])
- else:
- value = environ.get(value, value)
- _environs[key] = value
- return _environs
-
-
-
-@retry(wait=wait_fixed(2), stop=stop_after_attempt(10), after=after_log(log, logging.WARN))
-def get_service_published_port(service_name: str) -> str:
- # WARNING: ENSURE that service name defines a port
- # NOTE: retries since services can take some time to start
- client = docker.from_env()
- services = [x for x in client.services.list() if service_name in x.name]
- if not services:
- raise RuntimeError("Cannot find published port for service '%s'. Probably services still not up" % service_name)
- service_endpoint = services[0].attrs["Endpoint"]
-
- if "Ports" not in service_endpoint or not service_endpoint["Ports"]:
- raise RuntimeError("Cannot find published port for service '%s' in endpoint. Probably services still not up" % service_name)
-
- published_port = service_endpoint["Ports"][0]["PublishedPort"]
- return str(published_port)
+ return cfg_dict
+
+@pytest.fixture(scope="function")
+def app_config(_webserver_dev_config: Dict, aiohttp_unused_port) -> Dict:
+ """
+ Swarm with integration stack already started
+ This fixture can be safely modified during test since it is renovated on every call
+ """
+ cfg = deepcopy(_webserver_dev_config)
+ cfg["main"]["port"] = aiohttp_unused_port()
+ return cfg
diff --git a/services/web/server/tests/integration/fixtures/__init__.py b/services/web/server/tests/integration/fixtures/__init__.py
new file mode 100644
index 00000000000..b472f27c93f
--- /dev/null
+++ b/services/web/server/tests/integration/fixtures/__init__.py
@@ -0,0 +1 @@
+# Collection of tests fixtures for integration testing
diff --git a/services/web/server/tests/integration/fixtures/celery_service.py b/services/web/server/tests/integration/fixtures/celery_service.py
index 8428824d10d..8a51d5634ed 100644
--- a/services/web/server/tests/integration/fixtures/celery_service.py
+++ b/services/web/server/tests/integration/fixtures/celery_service.py
@@ -1,9 +1,9 @@
-# pylint:disable=wildcard-import
-# pylint:disable=unused-import
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
+from copy import deepcopy
+
import celery
import celery.bin.base
import celery.bin.celery
@@ -13,8 +13,8 @@
@pytest.fixture(scope="module")
-def celery_service(app_config, docker_stack):
- cfg = app_config["rabbit"]
+def celery_service(_webserver_dev_config, docker_stack):
+ cfg = deepcopy(_webserver_dev_config["rabbit"])
host = cfg["host"]
port = cfg["port"]
user = cfg["user"]
diff --git a/services/web/server/tests/integration/fixtures/docker_compose.py b/services/web/server/tests/integration/fixtures/docker_compose.py
index d24ce60d951..a601d667f55 100644
--- a/services/web/server/tests/integration/fixtures/docker_compose.py
+++ b/services/web/server/tests/integration/fixtures/docker_compose.py
@@ -1,9 +1,17 @@
+"""
+
+ Main Makefile produces a set of docker-compose configuration files
+ Here we can find fixtures of most of these configurations
+
+"""
+
# pylint:disable=wildcard-import
# pylint:disable=unused-import
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
+
import os
import re
import shutil
@@ -13,14 +21,15 @@
from collections import defaultdict
from copy import deepcopy
from pathlib import Path
-from typing import Dict, List
+from typing import Dict, List, Union
import pytest
import yaml
+from utils_docker import run_docker_compose_config
@pytest.fixture("session")
-def devel_environ(env_devel_file) -> Dict[str, str]:
+def devel_environ(env_devel_file: Path) -> Dict[str, str]:
""" Loads and extends .env-devel
"""
@@ -50,16 +59,21 @@ def devel_environ(env_devel_file) -> Dict[str, str]:
return env_devel
+
@pytest.fixture(scope="module")
def temp_folder(request, tmpdir_factory) -> Path:
tmp = Path(tmpdir_factory.mktemp("docker_compose_{}".format(request.module.__name__)))
yield tmp
+
@pytest.fixture(scope="module")
-def env_file(osparc_simcore_root_dir, devel_environ):
- # ensures .env at git_root_dir
+def env_file(osparc_simcore_root_dir: Path, devel_environ: Dict[str, str]) -> Path:
+ """
+ Creates a .env file from the .env-devel
+ """
+ # preserves .env at git_root_dir after test if already exists
env_path = osparc_simcore_root_dir / ".env"
- backup_path = osparc_simcore_root_dir / ".env-bak"
+ backup_path = osparc_simcore_root_dir / ".env.bak"
if env_path.exists():
shutil.copy(env_path, backup_path)
@@ -76,15 +90,18 @@ def env_file(osparc_simcore_root_dir, devel_environ):
backup_path.unlink()
+
@pytest.fixture("module")
-def simcore_docker_compose(osparc_simcore_root_dir, env_file, temp_folder) -> Dict:
+def simcore_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path) -> Dict:
""" Resolves docker-compose for simcore stack in local host
+ Produces same as `make .stack-simcore-version.yml` in a temporary folder
"""
COMPOSE_FILENAMES = [
"docker-compose.yml",
"docker-compose.local.yml"
]
+
# ensures .env at git_root_dir
assert env_file.exists()
assert env_file.parent == osparc_simcore_root_dir
@@ -94,19 +111,19 @@ def simcore_docker_compose(osparc_simcore_root_dir, env_file, temp_folder) -> Di
for filename in COMPOSE_FILENAMES]
assert all(docker_compose_path.exists() for docker_compose_path in docker_compose_paths)
- # path to resolved docker-compose
- destination_path = temp_folder / "simcore_docker_compose.yml"
+ config = run_docker_compose_config(docker_compose_paths,
+ workdir=env_file.parent,
+ destination_path=temp_folder / "simcore_docker_compose.yml")
- config = _run_docker_compose_config(docker_compose_paths, destination_path, osparc_simcore_root_dir)
return config
-
@pytest.fixture("module")
-def ops_docker_compose(osparc_simcore_root_dir, env_file, temp_folder) -> Dict:
+def ops_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path) -> Dict:
""" Filters only services in docker-compose-ops.yml and returns yaml data
+ Produces same as `make .stack-ops.yml` in a temporary folder
"""
- # ensures .env at git_root_dir
+ # ensures .env at git_root_dir, which will be used as current directory
assert env_file.exists()
assert env_file.parent == osparc_simcore_root_dir
@@ -114,24 +131,20 @@ def ops_docker_compose(osparc_simcore_root_dir, env_file, temp_folder) -> Dict:
docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose-ops.yml"
assert docker_compose_path.exists()
- # path to resolved docker-compose
- destination_path = temp_folder / "ops_docker_compose.yml"
-
- config = _run_docker_compose_config(docker_compose_path, destination_path, osparc_simcore_root_dir)
+ config = run_docker_compose_config(docker_compose_path,
+ workdir=env_file.parent,
+ destination_path=temp_folder / "ops_docker_compose.yml")
return config
-
@pytest.fixture(scope='module')
-def docker_compose_file(request, temp_folder, simcore_docker_compose):
- """ A copy of simcore_docker_compose filtered with services in core_services
-
- Creates a docker-compose.yml with services listed in 'core_services' module variable
+def core_services_config_file(request, temp_folder, simcore_docker_compose):
+ """ Creates a docker-compose config file for every stack of services in'core_services' module variable
File is created in a temp folder
-
- Overrides pytest-docker fixture
"""
- core_services = getattr(request.module, 'core_services', []) # TODO: PC->SAN could also be defined as a fixture (as with docker_compose)
+ core_services = getattr(request.module, 'core_services', []) # TODO: PC->SAN could also be defined as a fixture instead of a single variable (as with docker_compose)
+ assert core_services, f"Expected at least one service in 'core_services' within '{request.module.__name__}'"
+
docker_compose_path = Path(temp_folder / 'simcore_docker_compose.filtered.yml')
_filter_services_and_dump(core_services, simcore_docker_compose, docker_compose_path)
@@ -139,8 +152,8 @@ def docker_compose_file(request, temp_folder, simcore_docker_compose):
return docker_compose_path
@pytest.fixture(scope='module')
-def ops_docker_compose_file(request, temp_folder, ops_docker_compose):
- """ Creates a docker-compose.yml with services listed in 'ops_services' module variable
+def ops_services_config_file(request, temp_folder, ops_docker_compose):
+ """ Creates a docker-compose config file for every stack of services in 'ops_services' module variable
File is created in a temp folder
"""
ops_services = getattr(request.module, 'ops_services', [])
@@ -151,8 +164,6 @@ def ops_docker_compose_file(request, temp_folder, ops_docker_compose):
return docker_compose_path
-
-
# HELPERS ---------------------------------------------
def _get_ip()->str:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
@@ -192,27 +203,3 @@ def _filter_services_and_dump(include: List, services_compose: Dict, docker_comp
# locally we have access to file
print(f"Saving config to '{docker_compose_path}'")
yaml.dump(content, fh, default_flow_style=False)
-
-
-
-def _run_docker_compose_config(docker_compose_paths, destination_path: Path, osparc_simcore_root_dir: Path) -> Dict:
- """
- Runs docker-compose config on multiple files 'docker_compose_paths' taking 'osparc_simcore_root_dir'
- as current working directory and saves the output to 'destination_path'
- """
-
-
- if not isinstance(docker_compose_paths, list):
- docker_compose_paths = [docker_compose_paths, ]
-
- config_paths = [ f"-f {os.path.relpath(docker_compose_path, osparc_simcore_root_dir)}" for docker_compose_path in docker_compose_paths]
- configs_prefix = " ".join(config_paths)
-
- # TODO: use instead python api of docker-compose!
- subprocess.run( f"docker-compose {configs_prefix} config > {destination_path}",
- shell=True, check=True,
- cwd=osparc_simcore_root_dir)
-
- with destination_path.open() as f:
- config = yaml.safe_load(f)
- return config
diff --git a/services/web/server/tests/integration/fixtures/docker_swarm.py b/services/web/server/tests/integration/fixtures/docker_swarm.py
index fb7b199bd21..ddb07a1e82c 100644
--- a/services/web/server/tests/integration/fixtures/docker_swarm.py
+++ b/services/web/server/tests/integration/fixtures/docker_swarm.py
@@ -26,17 +26,21 @@ def docker_swarm(docker_client):
# teardown
assert docker_client.swarm.leave(force=True)
+
@pytest.fixture(scope='module')
-def docker_stack(docker_swarm, docker_client, docker_compose_file: Path, ops_docker_compose_file: Path):
- stacks = ['simcore', 'ops' ]
+def docker_stack(docker_swarm, docker_client, core_services_config_file: Path, ops_services_config_file: Path):
+ stacks = {
+ 'simcore': core_services_config_file,
+ 'ops': ops_services_config_file
+ }
# make up-version
- subprocess.run( f"docker stack deploy -c {docker_compose_file.name} {stacks[0]}",
- shell=True, check=True,
- cwd=docker_compose_file.parent)
- subprocess.run( f"docker stack deploy -c {ops_docker_compose_file.name} {stacks[1]}",
- shell=True, check=True,
- cwd=ops_docker_compose_file.parent)
+ stacks_up = []
+ for stack_name, stack_config_file in stacks.items():
+ subprocess.run( f"docker stack deploy -c {stack_config_file.name} {stack_name}",
+ shell=True, check=True,
+ cwd=stack_config_file.parent)
+ stacks_up.append(stack_name)
def _print_services(msg):
from pprint import pprint
@@ -48,7 +52,7 @@ def _print_services(msg):
_print_services("[BEFORE TEST]")
yield {
- 'stacks':stacks,
+ 'stacks': stacks_up,
'services': [service.name for service in docker_client.services.list()]
}
@@ -70,14 +74,15 @@ def _print_services(msg):
# make down
# NOTE: remove them in reverse order since stacks share common networks
- stacks.reverse()
- for stack in stacks:
+ WAIT_BEFORE_RETRY_SECS = 1
+ stacks_up.reverse()
+ for stack in stacks_up:
subprocess.run(f"docker stack rm {stack}", shell=True, check=True)
while docker_client.services.list(filters={"label":f"com.docker.stack.namespace={stack}"}):
- time.sleep(1)
+ time.sleep(WAIT_BEFORE_RETRY_SECS)
while docker_client.networks.list(filters={"label":f"com.docker.stack.namespace={stack}"}):
- time.sleep(1)
+ time.sleep(WAIT_BEFORE_RETRY_SECS)
_print_services("[AFTER REMOVED]")
diff --git a/services/web/server/tests/integration/fixtures/postgres_service.py b/services/web/server/tests/integration/fixtures/postgres_service.py
index 9a4ade5d8cb..fda100a0df0 100644
--- a/services/web/server/tests/integration/fixtures/postgres_service.py
+++ b/services/web/server/tests/integration/fixtures/postgres_service.py
@@ -4,6 +4,8 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
+from copy import deepcopy
+
import pytest
import sqlalchemy as sa
import tenacity
@@ -14,8 +16,8 @@
@pytest.fixture(scope='module')
-def postgres_db(app_config, webserver_environ, docker_stack):
- cfg = app_config["db"]["postgres"]
+def postgres_db(_webserver_dev_config, webserver_environ, docker_stack):
+ cfg = deepcopy(_webserver_dev_config["db"]["postgres"])
url = DSN.format(**cfg)
# NOTE: Comment this to avoid postgres_service
diff --git a/services/web/server/tests/integration/fixtures/rabbit_service.py b/services/web/server/tests/integration/fixtures/rabbit_service.py
index a357c2fa8fe..bee2052a840 100644
--- a/services/web/server/tests/integration/fixtures/rabbit_service.py
+++ b/services/web/server/tests/integration/fixtures/rabbit_service.py
@@ -4,6 +4,7 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
+from copy import deepcopy
from typing import Dict
import aio_pika
@@ -12,8 +13,8 @@
@pytest.fixture(scope="function")
-async def rabbit_service(app_config: Dict, docker_stack):
- cfg = app_config["rabbit"]
+async def rabbit_service(_webserver_dev_config: Dict, docker_stack):
+ cfg = deepcopy(_webserver_dev_config["rabbit"])
host = cfg["host"]
port = cfg["port"]
user = cfg["user"]
diff --git a/services/web/server/tests/integration/fixtures/standard_directories.py b/services/web/server/tests/integration/fixtures/standard_directories.py
deleted file mode 100644
index 92b49e0b5f6..00000000000
--- a/services/web/server/tests/integration/fixtures/standard_directories.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# pylint:disable=wildcard-import
-# pylint:disable=unused-import
-# pylint:disable=unused-variable
-# pylint:disable=unused-argument
-# pylint:disable=redefined-outer-name
-
-import sys
-from pathlib import Path
-
-import pytest
-
-import simcore_service_webserver
-
-
-@pytest.fixture(scope='session')
-def fixture_dir() -> Path:
- return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-
-@pytest.fixture(scope='session')
-def package_dir() -> Path:
- dirpath = Path(simcore_service_webserver.__file__).resolve().parent
- assert dirpath.exists()
- return dirpath
-
-@pytest.fixture(scope='session')
-def osparc_simcore_root_dir(fixture_dir: Path) -> Path:
- root_dir = fixture_dir.parent.parent.parent.parent.parent.parent.resolve()
- assert root_dir.exists(), "Is this service within osparc-simcore repo?"
- assert any(root_dir.glob("services/web/server")), "%s not look like rootdir" % root_dir
- return root_dir
-
-@pytest.fixture(scope='session')
-def api_specs_dir(osparc_simcore_root_dir: Path) -> Path:
- specs_dir = osparc_simcore_root_dir/ "api" / "specs" / "webserver"
- assert specs_dir.exists()
- return specs_dir
-
-@pytest.fixture(scope='session')
-def integration_test_dir(fixture_dir: Path) -> Path:
- tests_dir = fixture_dir.parent.resolve()
- assert tests_dir.exists()
- return tests_dir
-
-@pytest.fixture(scope='session')
-def tests_dir(integration_test_dir: Path) -> Path:
- tests_dir = integration_test_dir.parent.resolve()
- assert tests_dir.exists()
- return tests_dir
-
-@pytest.fixture(scope='session')
-def fake_data_dir(tests_dir: Path) -> Path:
- fake_data_dir = tests_dir / "data"
- assert fake_data_dir.exists()
- return fake_data_dir
-
-# @pytest.fixture(scope='session')
-# def mock_dir(fixture_dir):
-# dirpath = fixture_dir / "mock"
-# assert dirpath.exists()
-# return dirpath
-
-# @pytest.fixture(scope='session')
-# def docker_compose_file(mock_dir):
-# """
-# Path to docker-compose configuration files used for testing
-
-# - fixture defined in pytest-docker
-# """
-# fpath = mock_dir / 'docker-compose.yml'
-# assert fpath.exists()
-# return str(fpath)
-
-@pytest.fixture(scope="session")
-def server_test_configfile(mock_dir):
- fpath = mock_dir / "configs/server-host-test.yaml"
- assert fpath.exists()
- return fpath
-
-@pytest.fixture(scope="session")
-def light_test_configfile(mock_dir):
- fpath = mock_dir / "configs/light-test.yaml"
- assert fpath.exists()
- return fpath
-
-@pytest.fixture("session")
-def env_devel_file(osparc_simcore_root_dir) -> Path:
- env_devel_fpath = osparc_simcore_root_dir / ".env-devel"
- assert env_devel_fpath.exists()
- return env_devel_fpath
diff --git a/services/web/server/tests/integration/test_project_workflow.py b/services/web/server/tests/integration/test_project_workflow.py
index 1961151403d..3889f8c57f1 100644
--- a/services/web/server/tests/integration/test_project_workflow.py
+++ b/services/web/server/tests/integration/test_project_workflow.py
@@ -3,27 +3,20 @@
e.g. run, pull, push ,... pipelines
This one here is too similar to unit/with_postgres/test_projects.py
"""
-
-# pylint:disable=wildcard-import
-# pylint:disable=unused-import
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
import json
-import sys
from asyncio import Future
from copy import deepcopy
from pathlib import Path
-from pprint import pprint
from typing import Dict, List
import pytest
from aiohttp import web
from servicelib.application import create_safe_application
-from servicelib.application_keys import APP_CONFIG_KEY
-from servicelib.rest_responses import unwrap_envelope
from simcore_service_webserver.db import setup_db
from simcore_service_webserver.login import setup_login
from simcore_service_webserver.projects import setup_projects
@@ -48,9 +41,31 @@
# 'adminer'
]
-@pytest.fixture(scope='session')
-def here() -> Path:
- return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
+@pytest.fixture
+def client(loop, aiohttp_client,
+ app_config, ## waits until swarm with *_services are up
+ ):
+ assert app_config["rest"]["version"] == API_VERSION
+ assert API_VERSION in app_config["rest"]["location"]
+
+ app_config['storage']['enabled'] = False
+ app_config['rabbit']['enabled'] = False
+
+ app = create_safe_application(app_config)
+
+ setup_db(app)
+ setup_session(app)
+ setup_security(app)
+ setup_rest(app)
+ setup_login(app)
+ assert setup_projects(app)
+
+ yield loop.run_until_complete(aiohttp_client(app, server_kwargs={
+ 'port': app_config["main"]["port"],
+ 'host': app_config['main']['host']
+ }))
+
@pytest.fixture(scope="session")
def fake_template_projects(package_dir: Path) -> Dict:
@@ -84,35 +99,6 @@ def fake_project_data(fake_data_dir: Path) -> Dict:
with (fake_data_dir / "fake-project.json").open() as fp:
return json.load(fp)
-@pytest.fixture
-def webserver_service(loop, docker_stack, aiohttp_server, aiohttp_unused_port, api_specs_dir, app_config):
-# def webserver_service(loop, aiohttp_server, aiohttp_unused_port, api_specs_dir, app_config): # <<< DEVELOPMENT
- port = app_config["main"]["port"] = aiohttp_unused_port()
- app_config['main']['host'] = '127.0.0.1'
-
- assert app_config["rest"]["version"] == API_VERSION
- assert API_VERSION in app_config["rest"]["location"]
-
- app_config['storage']['enabled'] = False
- app_config['rabbit']['enabled'] = False
-
- app = create_safe_application(app_config)
-
- setup_db(app)
- setup_session(app)
- setup_security(app)
- setup_rest(app)
- setup_login(app)
- assert setup_projects(app)
-
- yield loop.run_until_complete( aiohttp_server(app, port=port) )
-
-@pytest.fixture
-def client(loop, webserver_service, aiohttp_client):
- client = loop.run_until_complete(aiohttp_client(webserver_service))
- yield client
-
-
@pytest.fixture
async def logged_user(client): #, role: UserRole):
""" adds a user in db and logs in with client
diff --git a/services/web/server/tests/integration-proxy/Makefile b/services/web/server/tests/sandbox/TODO - integration-proxy/Makefile
similarity index 100%
rename from services/web/server/tests/integration-proxy/Makefile
rename to services/web/server/tests/sandbox/TODO - integration-proxy/Makefile
diff --git a/services/web/server/tests/integration-proxy/conftest.py b/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py
similarity index 100%
rename from services/web/server/tests/integration-proxy/conftest.py
rename to services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py
diff --git a/services/web/server/tests/integration-proxy/test_application_proxy.py b/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py
similarity index 100%
rename from services/web/server/tests/integration-proxy/test_application_proxy.py
rename to services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py
diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py
index 13ab8432a6b..8cf0b5f48ef 100644
--- a/services/web/server/tests/unit/conftest.py
+++ b/services/web/server/tests/unit/conftest.py
@@ -1,87 +1,39 @@
+""" Configuration for unit testing
+
+ - Any interaction with other app MUST be emulated with fakes/mocks
+ - ONLY external apps allowed is postgress (see unit/with_postgres)
+"""
+
# pylint: disable=unused-argument
-# pylint: disable=unused-import
# pylint: disable=bare-except
# pylint:disable=redefined-outer-name
-import collections
import json
import logging
-import os
import sys
from pathlib import Path
from typing import Dict
import pytest
-import yaml
-import simcore_service_webserver
-from simcore_service_webserver.cli_config import read_and_validate
+## current directory
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+## Log
log = logging.getLogger(__name__)
-# mute noisy loggers
-logging.getLogger("openapi_spec_validator").setLevel(logging.WARNING)
-logging.getLogger("sqlalchemy").setLevel(logging.WARNING)
-
-sys.path.append(str(Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent.parent / 'helpers'))
@pytest.fixture(scope='session')
def here():
- return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+ cdir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+ assert cdir == current_dir, "Somebody changing current_dir?"
+ return cdir
-@pytest.fixture(scope='session')
-def package_dir(here):
- dirpath = Path(simcore_service_webserver.__file__).resolve().parent
- assert dirpath.exists()
- return dirpath
@pytest.fixture(scope='session')
-def osparc_simcore_root_dir(here):
- root_dir = here.parent.parent.parent.parent.parent.resolve()
- assert root_dir.exists(), "Is this service within osparc-simcore repo?"
- assert any(root_dir.glob("services/web/server")), "%s not look like rootdir" % root_dir
- return root_dir
+def fake_static_dir(fake_data_dir: Path) -> Dict:
+ return fake_data_dir / "static"
-@pytest.fixture(scope='session')
-def api_specs_dir(osparc_simcore_root_dir):
- specs_dir = osparc_simcore_root_dir/ "api" / "specs" / "webserver"
- assert specs_dir.exists()
- return specs_dir
-
-@pytest.fixture(scope='session')
-def mock_dir(here):
- dirpath = here / "mock"
- assert dirpath.exists()
- return dirpath
-
-@pytest.fixture(scope='session')
-def fake_data_dir(here):
- dirpath = (here / "../data").resolve()
- assert dirpath.exists()
- return dirpath
-
-@pytest.fixture(scope='session')
-def docker_compose_file(mock_dir):
- """
- Path to docker-compose configuration files used for testing
-
- - fixture defined in pytest-docker
- """
- fpath = mock_dir / 'docker-compose.yml'
- assert fpath.exists()
- return str(fpath)
-
-@pytest.fixture(scope="session")
-def server_test_configfile(mock_dir):
- fpath = mock_dir / "configs/server-host-test.yaml"
- assert fpath.exists()
- return fpath
-
-@pytest.fixture(scope="session")
-def light_test_configfile(mock_dir):
- fpath = mock_dir / "configs/light-test.yaml"
- assert fpath.exists()
- return fpath
@pytest.fixture
def fake_project(fake_data_dir: Path) -> Dict:
@@ -92,3 +44,8 @@ def fake_project(fake_data_dir: Path) -> Dict:
@pytest.fixture
def project_schema_file(api_specs_dir: Path) -> Path:
return api_specs_dir / "v0/components/schemas/project-v0.0.1.json"
+
+@pytest.fixture
+def activity_data(fake_data_dir: Path) -> Dict:
+ with (fake_data_dir / "test_activity_data.json").open() as fp:
+ yield json.load(fp)
diff --git a/services/web/server/tests/unit/mock/configs/light-test.yaml b/services/web/server/tests/unit/mock/configs/light-test.yaml
deleted file mode 100644
index 5f47c870180..00000000000
--- a/services/web/server/tests/unit/mock/configs/light-test.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
-# This config is used for testing on the host
----
-version: "1.0"
-main:
- host: 127.0.0.1
- port: 8080
- client_outdir: ../../../client/source-output
- log_level: DEBUG
- testing: True
-director:
- host: localhost
- port: 8001
-db:
- enabled: False
- postgres:
- database: test_db
- user: test_user
- password: test_pass
- host: localhost
- port: 0000
- # DEPRECATE OR add postgresql+psycopg2:// otherwise will fail sqlalchemy.exc.ArgumentError: Could not parse rfc1738 URL from string 'localhost:5432'
- endpoint: localhost:5432
-rabbit:
- enabled: False
- host: ${RABBIT_HOST}
- password: simcore
- user: simcore
- channels:
- log: comp.backend.channels.log
- progress: comp.backend.channels.progress
-# s3:
-# access_key: '12345678'
-# bucket_name: simcore
-# endpoint: localhost:9000
-# secret_key: '12345678'
-smtp:
- sender: 'OSPARC support '
- host: mail.foo.com
- port: 25
- tls: False
- username: None
- password: None
-rest:
- version: v0
- location: api/specs/webserver/v0/openapi.yaml
-...
diff --git a/services/web/server/tests/unit/mock/configs/minimum.yaml b/services/web/server/tests/unit/mock/configs/minimum.yaml
deleted file mode 100644
index ebb0ab90620..00000000000
--- a/services/web/server/tests/unit/mock/configs/minimum.yaml
+++ /dev/null
@@ -1,48 +0,0 @@
-# This config is used for testing on the host
----
-version: "1.0"
-main:
- host: 127.0.0.1
- port: 8080
- client_outdir: client/source-output
- log_level: DEBUG
- testing: True
-director:
- host: localhost
- port: 8001
-db:
- enabled: False
- postgres:
- database: test_db
- user: test_user
- password: test_pass
- host: localhost
- port: 0000
- # DEPRECATE OR add postgresql+psycopg2:// otherwise will fail sqlalchemy.exc.ArgumentError: Could not parse rfc1738 URL from string 'localhost:5432'
- endpoint: localhost:5432
-rabbit:
- enabled: False
- host: foo
- password: simcore
- user: simcore
- channels:
- log: comp.backend.channels.log
- progress: comp.backend.channels.progress
-s3:
- access_key: '12345678'
- bucket_name: simcore
- endpoint: localhost:9000
- secret_key: '12345678'
-smtp:
- sender: 'OSPARC support '
- host: mail.foo.com
- port: 25
- tls: False
- username: None
- password: None
-rest:
- version: v0
- location: api/specs/webserver/v0/openapi.yaml
-session:
- secret_key: 'Thirty two length bytes key.'
-...
diff --git a/services/web/server/tests/unit/mock/configs/server-host-test.yaml b/services/web/server/tests/unit/mock/configs/server-host-test.yaml
deleted file mode 100644
index 83ff8e6bb24..00000000000
--- a/services/web/server/tests/unit/mock/configs/server-host-test.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
-# This config is used for testing on the host
----
-version: "1.0"
-main:
- host: 127.0.0.1
- port: 8080
- client_outdir: ../../../client/source-output
- log_level: DEBUG
- testing: True
-director:
- host: localhost
- port: 8001
-db:
- postgres:
- database: test_db
- user: test_user
- password: test_pass
- host: localhost
- port: ${POSTGRES_PORT}
- # DEPRECATE OR add postgresql+psycopg2:// otherwise will fail sqlalchemy.exc.ArgumentError: Could not parse rfc1738 URL from string 'localhost:5432'
- endpoint: localhost:5432
-rabbit:
- host: ${RABBIT_HOST}
- password: simcore
- user: simcore
- channels:
- log: comp.backend.channels.log
- progress: comp.backend.channels.progress
-s3:
- access_key: '12345678'
- bucket_name: simcore
- endpoint: localhost:9000
- secret_key: '12345678'
-smtp:
- sender: 'OSPARC support '
- host: mail.foo.com
- port: 25
- tls: False
- username: None
- password: None
-rest:
- version: v0
- location: api/specs/webserver/v0/openapi.yaml
-session:
- secret_key: "Thirty two length bytes key."
-...
diff --git a/services/web/server/tests/unit/mock/docker-compose.yml b/services/web/server/tests/unit/mock/docker-compose.yml
deleted file mode 100644
index 76eace917bc..00000000000
--- a/services/web/server/tests/unit/mock/docker-compose.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-version: '3.4'
-services:
- postgres:
- image: postgres:10
- #volumes: TODO: make db persistent
- # - '.:/home/scu/client'
- ports:
- - '5432:5432'
- adminer:
- image: adminer
- ports:
- - 18080:8080
- depends_on:
- - postgres
- rabbit:
- image: rabbitmq:3-management
- environment:
- - RABBITMQ_DEFAULT_USER=simcore
- - RABBITMQ_DEFAULT_PASS=simcore
- ports:
- # NOTE: these need to added because test server runs in host!
- - "15672:15672"
- - "5671:5671"
- - "5672:5672"
diff --git a/services/web/server/tests/unit/test_activity.py b/services/web/server/tests/unit/test_activity.py
new file mode 100644
index 00000000000..2ce753905fa
--- /dev/null
+++ b/services/web/server/tests/unit/test_activity.py
@@ -0,0 +1,119 @@
+# pylint:disable=unused-variable
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+
+import importlib
+from asyncio import Future
+from pathlib import Path
+
+import yaml
+
+import pytest
+from aiohttp import web
+from aiohttp.client_exceptions import ClientConnectionError
+from servicelib.application import create_safe_application
+from simcore_service_webserver.activity import handlers, setup_activity
+from simcore_service_webserver.rest import setup_rest
+from simcore_service_webserver.security import setup_security
+from simcore_service_webserver.session import setup_session
+from utils_assert import assert_status
+
+
+def future_with_result(result):
+ f = Future()
+ f.set_result(result)
+ return f
+
+
+@pytest.fixture
+def mocked_login_required(mocker):
+ mock = mocker.patch(
+ 'simcore_service_webserver.login.decorators.login_required',
+ lambda h: h)
+ importlib.reload(handlers)
+ return mock
+
+@pytest.fixture
+def mocked_monitoring(loop, mocker, activity_data):
+ prometheus_data = activity_data.get('prometheus')
+ cpu_ret = prometheus_data.get('cpu_return')
+ mocker.patch('simcore_service_webserver.activity.handlers.get_cpu_usage',
+ return_value=future_with_result(cpu_ret))
+
+ mem_ret = prometheus_data.get('memory_return')
+ mocker.patch('simcore_service_webserver.activity.handlers.get_memory_usage',
+ return_value=future_with_result(mem_ret))
+
+ labels_ret = prometheus_data.get('labels_return')
+ mocker.patch('simcore_service_webserver.activity.handlers.get_container_metric_for_labels',
+ return_value=future_with_result(labels_ret))
+
+ celery_data = activity_data.get('celery')
+ celery_ret = celery_data.get('celery_return')
+ mocker.patch('simcore_service_webserver.activity.handlers.get_celery_reserved',
+ return_value=future_with_result(celery_ret))
+
+@pytest.fixture
+def mocked_monitoring_down(mocker):
+ mocker.patch(
+ 'simcore_service_webserver.activity.handlers.query_prometheus',
+ side_effect=ClientConnectionError)
+ mocker.patch(
+ 'simcore_service_webserver.activity.handlers.celery_reserved',
+ side_effect=ClientConnectionError)
+ return mocker
+
+@pytest.fixture
+def app_config(fake_data_dir: Path, osparc_simcore_root_dir: Path):
+ with open(fake_data_dir/"test_activity_config.yml") as fh:
+ content = fh.read()
+ config = content.replace("${OSPARC_SIMCORE_REPO_ROOTDIR}", str(osparc_simcore_root_dir))
+
+ return yaml.load(config)
+
+@pytest.fixture
+def client(loop, aiohttp_client, app_config):
+ app = create_safe_application(app_config)
+
+ setup_session(app)
+ setup_security(app)
+ setup_rest(app)
+ setup_activity(app)
+
+ cli = loop.run_until_complete(aiohttp_client(app))
+ return cli
+
+
+async def test_has_login_required(client):
+ resp = await client.get('/v0/activity/status')
+ await assert_status(resp, web.HTTPUnauthorized)
+
+async def test_monitoring_up(mocked_login_required, mocked_monitoring, client):
+ QUEUED_NODE_ID = '35f95ad4-67b8-4ed8-bd55-84a5d600e687'
+ RUNNING_NODE_ID = '894dd8d5-de3b-4767-950c-7c3ed8f51d8c'
+
+ resp = await client.get('/v0/activity/status')
+ data, _ = await assert_status(resp, web.HTTPOk)
+ assert QUEUED_NODE_ID in data, 'Queued node not present'
+ assert RUNNING_NODE_ID in data, 'Running node not present'
+
+ celery = data.get(QUEUED_NODE_ID)
+ prometheus = data.get(RUNNING_NODE_ID)
+
+ assert 'queued' in celery, 'There is no queued key for queued node'
+ assert celery.get('queued'), 'Queued should be True for queued node'
+
+ assert 'limits' in prometheus, 'There is no limits key for executing node'
+ assert 'stats' in prometheus, 'There is no stats key for executed node'
+
+ limits = prometheus.get('limits')
+ assert limits.get('cpus') == 4.0, 'Incorrect value: Cpu limit'
+ assert limits.get('mem') == 2048.0, 'Incorrect value: Memory limit'
+
+ stats = prometheus.get('stats')
+ assert stats.get('cpuUsage') == 3.9952102200000006, 'Incorrect value: Cpu usage'
+ assert stats.get('memUsage') == 177.664, 'Incorrect value: Memory usage'
+
+async def test_monitoring_down(mocked_login_required, mocked_monitoring_down, client):
+ resp = await client.get('/v0/activity/status')
+ await assert_status(resp, web.HTTPNoContent)
diff --git a/services/web/server/tests/unit/test_template_projects.py b/services/web/server/tests/unit/test_template_projects.py
index c043328173f..43d3e8ce575 100644
--- a/services/web/server/tests/unit/test_template_projects.py
+++ b/services/web/server/tests/unit/test_template_projects.py
@@ -40,8 +40,8 @@ def fake_db():
@pytest.fixture
-def mock_parametrized_project(mock_dir):
- path = mock_dir/"parametrized_project.json"
+def mock_parametrized_project(fake_data_dir):
+ path = fake_data_dir/"parametrized_project.json"
with path.open() as fh:
prj = json.load(fh)
diff --git a/services/web/server/tests/unit/with_postgres/config.yaml b/services/web/server/tests/unit/with_postgres/config.yaml
index a7be72569bb..1e3724ebcd7 100644
--- a/services/web/server/tests/unit/with_postgres/config.yaml
+++ b/services/web/server/tests/unit/with_postgres/config.yaml
@@ -1,6 +1,6 @@
version: '1.0'
main:
- client_outdir: ${OSPARC_SIMCORE_REPO_ROOTDIR}/services/web/server/tests/unit/mock/static
+ client_outdir: ${OSPARC_SIMCORE_REPO_ROOTDIR}/services/web/server/tests/data/static
host: 127.0.0.1
log_level: DEBUG
port: 8080
@@ -57,3 +57,5 @@ projects:
session:
# python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key())"
secret_key: 'tjwiMSLe0Xd9dwMlAVQT9pYY9JEnr7rcH05fkUcukVs='
+activity:
+ enabled: False
diff --git a/services/web/server/tests/unit/with_postgres/conftest.py b/services/web/server/tests/unit/with_postgres/conftest.py
index e133fc14efd..e150f5d8684 100644
--- a/services/web/server/tests/unit/with_postgres/conftest.py
+++ b/services/web/server/tests/unit/with_postgres/conftest.py
@@ -1,27 +1,23 @@
-""" Fixtures for this folder's tests
+""" Configuration for unit testing with a postgress fixture
-Notice that fixtures in ../conftest.py are also accessible here
+ - Unit testing of webserver app with a postgress service as fixture
+ - Starts test session by running a postgres container as a fixture (see postgress_service)
+ IMPORTANT: remember that these are still unit-tests!
"""
-# pylint:disable=wildcard-import
-# pylint:disable=unused-import
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
-
-import json
import os
import sys
from asyncio import Future
from copy import deepcopy
from pathlib import Path
-from typing import Dict
import pytest
import sqlalchemy as sa
import trafaret_config
-import yaml
import simcore_service_webserver.utils
from simcore_service_webserver.application import create_application
@@ -30,40 +26,14 @@
from simcore_service_webserver.db import DSN
from simcore_service_webserver.db_models import confirmations, metadata, users
-tests_folder = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent.parent.parent
-sys.path.append(str(tests_folder/ 'helpers'))
-
-
-@pytest.fixture(scope="session")
-def here():
- return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-
-@pytest.fixture(scope="session")
-def mock_dir(here):
- return here / "../mock"
-
-@pytest.fixture(scope='session')
-def fake_data_dir(here):
- dirpath = (here / "../../data").resolve()
- assert dirpath.exists()
- return dirpath
-
-@pytest.fixture
-def fake_project(fake_data_dir: Path) -> Dict:
- with (fake_data_dir / "fake-project.json").open() as fp:
- yield json.load(fp)
+## current directory
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-@pytest.fixture(scope='session')
-def osparc_simcore_root_dir(here):
- root_dir = here.parent.parent.parent.parent.parent.parent.resolve()
- assert root_dir.exists(), "Is this service within osparc-simcore repo?"
- assert any(root_dir.glob("services/web/server")), "%s not look like rootdir" % root_dir
- return root_dir
@pytest.fixture(scope="session")
-def default_app_cfg(here, osparc_simcore_root_dir):
+def default_app_cfg(osparc_simcore_root_dir, fake_static_dir):
# NOTE: ONLY used at the session scopes
- cfg_path = here / "config.yaml"
+ cfg_path = current_dir / "config.yaml"
assert cfg_path.exists()
variables = dict(os.environ)
@@ -74,11 +44,12 @@ def default_app_cfg(here, osparc_simcore_root_dir):
# validates and fills all defaults/optional entries that normal load would not do
cfg_dict = trafaret_config.read_and_validate(cfg_path, app_schema, vars=variables)
+ assert Path(cfg_dict["main"]["client_outdir"]) == fake_static_dir
+
# WARNING: changes to this fixture during testing propagates to other tests. Use cfg = deepcopy(cfg_dict)
# FIXME: free cfg_dict but deepcopy shall be r/w
return cfg_dict
-
@pytest.fixture(scope="function")
def app_cfg(default_app_cfg, aiohttp_unused_port):
cfg = deepcopy(default_app_cfg)
@@ -90,10 +61,10 @@ def app_cfg(default_app_cfg, aiohttp_unused_port):
# this fixture can be safely modified during test since it is renovated on every call
return cfg
-
@pytest.fixture(scope='session')
-def docker_compose_file(here, default_app_cfg):
+def docker_compose_file(default_app_cfg):
""" Overrides pytest-docker fixture
+
"""
old = os.environ.copy()
@@ -104,13 +75,14 @@ def docker_compose_file(here, default_app_cfg):
os.environ['TEST_POSTGRES_USER']=cfg['user']
os.environ['TEST_POSTGRES_PASSWORD']=cfg['password']
- dc_path = here / 'docker-compose.yml'
+ dc_path = current_dir / 'docker-compose.yml'
assert dc_path.exists()
yield str(dc_path)
os.environ = old
+
@pytest.fixture(scope='session')
def postgres_service(docker_services, docker_ip, default_app_cfg):
cfg = deepcopy(default_app_cfg["db"]["postgres"])
@@ -125,8 +97,8 @@ def postgres_service(docker_services, docker_ip, default_app_cfg):
timeout=30.0,
pause=0.1,
)
- return url
+ return url
@pytest.fixture
def postgres_db(app_cfg, postgres_service):
@@ -145,21 +117,18 @@ def postgres_db(app_cfg, postgres_service):
metadata.drop_all(engine)
engine.dispose()
-
@pytest.fixture
-def server(loop, aiohttp_server, app_cfg, monkeypatch, postgres_db): #pylint: disable=R0913
+def web_server(loop, aiohttp_server, app_cfg, monkeypatch, postgres_db):
app = create_application(app_cfg)
path_mail(monkeypatch)
server = loop.run_until_complete( aiohttp_server(app, port=app_cfg["main"]["port"]) )
return server
-
@pytest.fixture
-def client(loop, aiohttp_client, server):
- client = loop.run_until_complete(aiohttp_client(server))
+def client(loop, aiohttp_client, web_server):
+ client = loop.run_until_complete(aiohttp_client(web_server))
return client
-
@pytest.fixture
async def storage_subsystem_mock(loop, mocker):
"""
@@ -180,7 +149,9 @@ async def _mock_copy_data_from_project(*args):
mock1.return_value.set_result("")
return mock, mock1
+
# helpers ---------------
+
def path_mail(monkeypatch):
async def send_mail(*args):
print('=== EMAIL TO: {}\n=== SUBJECT: {}\n=== BODY:\n{}'.format(*args))
diff --git a/services/web/server/tests/unit/with_postgres/docker-compose.debug.yml b/services/web/server/tests/unit/with_postgres/docker-compose.debug.yml
index fc1ef184537..d6eeb261d20 100644
--- a/services/web/server/tests/unit/with_postgres/docker-compose.debug.yml
+++ b/services/web/server/tests/unit/with_postgres/docker-compose.debug.yml
@@ -1,7 +1,7 @@
version: '3.4'
services:
postgres:
- image: postgres:10
+ image: postgres:10.10
restart: always
environment:
# defaults are the same as in conftest.yaml so we start compose from command line for debugging
diff --git a/services/web/server/tests/unit/with_postgres/docker-compose.yml b/services/web/server/tests/unit/with_postgres/docker-compose.yml
index e49fb5cdfd7..7b0c3fe4a51 100644
--- a/services/web/server/tests/unit/with_postgres/docker-compose.yml
+++ b/services/web/server/tests/unit/with_postgres/docker-compose.yml
@@ -1,7 +1,7 @@
version: '3.4'
services:
postgres:
- image: postgres:10
+ image: postgres:10.10
restart: always
environment:
POSTGRES_DB: ${TEST_POSTGRES_DB}
diff --git a/services/web/server/tests/unit/with_postgres/test_access_to_studies.py b/services/web/server/tests/unit/with_postgres/test_access_to_studies.py
index 2fbf94d26c4..70e38ad8d59 100644
--- a/services/web/server/tests/unit/with_postgres/test_access_to_studies.py
+++ b/services/web/server/tests/unit/with_postgres/test_access_to_studies.py
@@ -64,9 +64,6 @@ def client(loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, mo
#def client(loop, aiohttp_client, app_cfg, qx_client_outdir, monkeypatch): # <<<< FOR DEVELOPMENT. DO NOT REMOVE.
cfg = deepcopy(app_cfg)
- port = cfg["main"]["port"]
- cfg['main']['host'] = '127.0.0.1'
-
cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup
cfg['projects']['enabled'] = True
cfg['storage']['enabled'] = False
@@ -86,8 +83,8 @@ def client(loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, mo
# server and client
yield loop.run_until_complete(aiohttp_client(app, server_kwargs={
- 'port': port,
- 'host': 'localhost'
+ 'port': cfg["main"]["port"],
+ 'host': cfg['main']['host']
}))
diff --git a/services/web/server/tests/unit/with_postgres/test_db.py b/services/web/server/tests/unit/with_postgres/test_db.py
index 72dc0c04137..a7a216acaaf 100644
--- a/services/web/server/tests/unit/with_postgres/test_db.py
+++ b/services/web/server/tests/unit/with_postgres/test_db.py
@@ -1,8 +1,21 @@
-from simcore_service_webserver.db import is_service_enabled, is_service_responsive
+import io
+import yaml
+from simcore_service_webserver.db import (is_service_enabled,
+ is_service_responsive)
-async def test_responsive(server):
- app = server.app
+def test_uses_same_postgres_version(docker_compose_file, osparc_simcore_root_dir):
+ with io.open(docker_compose_file) as fh:
+ fixture = yaml.safe_load(fh)
+
+ with io.open(osparc_simcore_root_dir / "services" / "docker-compose.yml") as fh:
+ expected = yaml.safe_load(fh)
+
+ assert fixture['services']['postgres']['image'] == expected['services']['postgres']['image']
+
+
+async def test_responsive(web_server):
+ app = web_server.app
assert is_service_enabled(app)
assert await is_service_responsive(app)
diff --git a/services/web/server/tests/unit/with_postgres/test_login.py b/services/web/server/tests/unit/with_postgres/test_login.py
index 1c2a959273c..128d58dfa03 100644
--- a/services/web/server/tests/unit/with_postgres/test_login.py
+++ b/services/web/server/tests/unit/with_postgres/test_login.py
@@ -79,7 +79,7 @@ async def test_login_inactive_user(client):
async def test_login_successfully(client):
url = client.app.router['auth_login'].url_for()
- r = await client.get(url)
+
async with NewUser() as user:
r = await client.post(url, json={
'email': user['email'],
@@ -91,7 +91,3 @@ async def test_login_successfully(client):
assert not error
assert data
assert cfg.MSG_LOGGED_IN in data['message']
-
-if __name__ == '__main__':
- import pytest
- pytest.main([__file__, '--maxfail=1'])
diff --git a/services/web/server/tests/unit/with_postgres/test_users.py b/services/web/server/tests/unit/with_postgres/test_users.py
index 140c83cdf7e..0515be683f0 100644
--- a/services/web/server/tests/unit/with_postgres/test_users.py
+++ b/services/web/server/tests/unit/with_postgres/test_users.py
@@ -8,10 +8,13 @@
import random
from copy import deepcopy
from itertools import repeat
+from unittest.mock import MagicMock
import faker
import pytest
from aiohttp import web
+from aiopg.sa.connection import SAConnection
+from psycopg2 import OperationalError
from yarl import URL
from servicelib.application import create_safe_application
@@ -140,7 +143,6 @@ async def test_get_profile(logged_user, client, role, expected):
assert data['last_name'] == ""
assert data['role'] == role.name.capitalize()
-
@pytest.mark.parametrize("role,expected", [
(UserRole.ANONYMOUS, web.HTTPUnauthorized),
(UserRole.GUEST, web.HTTPForbidden),
@@ -273,3 +275,43 @@ async def test_delete_token(client, logged_user, tokens_db, fake_tokens, role, e
if not error:
assert not (await get_token_from_db(tokens_db, token_service=sid))
+
+
+## BUG FIXES #######################################################
+
+@pytest.fixture
+def mock_failing_connection(mocker) -> MagicMock:
+ """
+ async with engine.acquire() as conn:
+ await conn.execute(query) --> will raise OperationalError
+ """
+ # See http://initd.org/psycopg/docs/module.html
+ conn_execute = mocker.patch.object(SAConnection, "execute")
+ conn_execute.side_effect=OperationalError("MOCK: server closed the connection unexpectedly")
+ return conn_execute
+
+@pytest.mark.parametrize("role,expected", [
+ (UserRole.USER, web.HTTPServiceUnavailable),
+])
+async def test_get_profile_with_failing_db_connection(logged_user, client,
+ mock_failing_connection: MagicMock,
+ role: UserRole,
+ expected: web.HTTPException):
+ """
+ Reproduces issue https://github.com/ITISFoundation/osparc-simcore/pull/1160
+
+ A logged user fails to get profie because though authentication because
+
+ i.e. conn.execute(query) will raise psycopg2.OperationalError: server closed the connection unexpectedly
+
+ ISSUES: #880, #1160
+ """
+ url = client.app.router["get_my_profile"].url_for()
+ assert str(url) == "/v0/me"
+
+ resp = await client.get(url)
+
+ NUM_RETRY = 3
+ assert mock_failing_connection.call_count == NUM_RETRY, "Expected mock failure raised in AuthorizationPolicy.authorized_userid after severals"
+
+ data, error = await assert_status(resp, expected)
diff --git a/tests/swarm-deploy/Makefile b/tests/swarm-deploy/Makefile
new file mode 100644
index 00000000000..d4ce19fa715
--- /dev/null
+++ b/tests/swarm-deploy/Makefile
@@ -0,0 +1,30 @@
+.DEFAULT_GOAL := help
+
+ROOT_DIR = $(realpath $(CURDIR)/../../)
+VENV_DIR ?= $(realpath $(ROOT_DIR)/.venv)
+
+
+%.txt: %.in
+ # pip compiling $<
+ @$(VENV_DIR)/bin/pip-compile --output-file $@ $<
+
+
+.PHONY: install
+install: $(VENV_DIR) requirements.txt ## installs dependencies
+ # installing requirements
+ @$ Path:
+ WILDCARD = "services/web/server"
+
+ root_dir = Path(current_dir)
+ while not any(root_dir.glob(WILDCARD)) and root_dir != Path("/"):
+ root_dir = root_dir.parent
+
+ msg = f"'{root_dir}' does not look like the git root directory of osparc-simcore"
+ assert root_dir.exists(), msg
+ assert any(root_dir.glob(WILDCARD)), msg
+ assert any(root_dir.glob(".git")), msg
+
+ return root_dir
+
+
+@pytest.fixture(scope='session')
+def docker_client() -> DockerClient:
+ client = docker.from_env()
+ yield client
+
+
+@pytest.fixture(scope='session')
+def docker_swarm_node(docker_client: DockerClient) -> None:
+ # SAME node along ALL session
+ docker_client.swarm.init()
+ yield #--------------------
+ assert docker_client.swarm.leave(force=True)
+
+
+@pytest.fixture(scope='module')
+def osparc_deploy( osparc_simcore_root_dir: Path,
+ docker_client: DockerClient,
+ docker_swarm_node) -> Dict:
+
+ environ = dict(os.environ)
+ if "TRAVIS" not in environ:
+ environ["DOCKER_REGISTRY"] = "local"
+ environ["DOCKER_IMAGE_TAG"] = "production"
+
+ subprocess.run(
+ "make info up-version info-swarm",
+ shell=True, check=True, env=environ,
+ cwd=osparc_simcore_root_dir
+ )
+
+ with open( osparc_simcore_root_dir / ".stack-simcore-version.yml" ) as fh:
+ simcore_config = yaml.safe_load(fh)
+
+ with open( osparc_simcore_root_dir / ".stack-ops.yml" ) as fh:
+ ops_config = yaml.safe_load(fh)
+
+ stack_configs = {
+ 'simcore': simcore_config,
+ 'ops': ops_config
+ }
+
+ yield stack_configs #-------------------------------------------------
+
+ WAIT_BEFORE_RETRY_SECS = 1
+
+ subprocess.run(
+ "make down",
+ shell=True, check=True, env=environ,
+ cwd=osparc_simcore_root_dir
+ )
+
+ subprocess.run(f"docker network prune -f", shell=True, check=False)
+
+ for stack in stack_configs.keys():
+ while True:
+ online = docker_client.services.list(filters={"label":f"com.docker.stack.namespace={stack}"})
+ if online:
+ print(f"Waiting until {len(online)} services stop: {[s.name for s in online]}")
+ time.sleep(WAIT_BEFORE_RETRY_SECS)
+ else:
+ break
+
+ while True:
+ networks = docker_client.networks.list(filters={"label":f"com.docker.stack.namespace={stack}"})
+ if networks:
+ print(f"Waiting until {len(networks)} networks stop: {[n.name for n in networks]}")
+ time.sleep(WAIT_BEFORE_RETRY_SECS)
+ else:
+ break
+
+ (osparc_simcore_root_dir / ".stack-simcore-version.yml").unlink()
+ (osparc_simcore_root_dir / ".stack-ops.yml").unlink()
diff --git a/tests/swarm-deploy/test_service_restart.py b/tests/swarm-deploy/test_service_restart.py
new file mode 100644
index 00000000000..ecca0051021
--- /dev/null
+++ b/tests/swarm-deploy/test_service_restart.py
@@ -0,0 +1,114 @@
+# pylint:disable=unused-variable
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+
+import logging
+import subprocess
+import sys
+import time
+from pathlib import Path
+from pprint import pformat
+from typing import Dict, List
+
+import pytest
+from docker import DockerClient
+from docker.models.services import Service
+from tenacity import before_log, retry, stop_after_attempt, wait_fixed
+
+logger = logging.getLogger(__name__)
+
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
+
+
+# time measured from command 'up' finished until *all* tasks are running
+MAX_TIME_TO_DEPLOY_SECS = 60
+MAX_TIME_TO_RESTART_SERVICE = 5
+
+
+@pytest.fixture("module")
+def deployed_simcore_stack(osparc_deploy: Dict, docker_client: DockerClient) -> List[Service]:
+ # NOTE: the goal here is NOT to test time-to-deplopy but
+ # rather guaranteing that the framework is fully deployed before starting
+ # tests. Obviously in a critical state in which the frameworks has a problem
+ # the fixture will fail
+
+ @retry( wait=wait_fixed(MAX_TIME_TO_DEPLOY_SECS),
+ stop=stop_after_attempt(3),
+ before=before_log(logger, logging.WARNING) )
+ def ensure_deployed():
+ for service in docker_client.services.list():
+ for task in service.tasks():
+ assert task['Status']['State'] == task['DesiredState'], \
+ f'{service.name} still not ready: {pformat(task)}'
+
+ try:
+ ensure_deployed()
+ finally:
+ # logs table like
+ # ID NAME IMAGE NODE DESIRED STATE CURRENT STATE ERROR
+ # xbrhmaygtb76 simcore_sidecar.1 itisfoundation/sidecar:latest crespo-wkstn Running Running 53 seconds ago
+ # zde7p8qdwk4j simcore_rabbit.1 itisfoundation/rabbitmq:3.8.0-management crespo-wkstn Running Running 59 seconds ago
+ # f2gxmhwq7hhk simcore_postgres.1 postgres:10.10 crespo-wkstn Running Running about a minute ago
+ # 1lh2hulxmc4q simcore_director.1 itisfoundation/director:latest crespo-wkstn Running Running 34 seconds ago
+ # ...
+ subprocess.run("docker stack ps simcore", shell=True, check=False)
+
+ return [service for service in docker_client.services.list()
+ if service.name.startswith("simcore_")]
+
+
+
+@pytest.mark.parametrize("service_name", [
+ 'simcore_webserver',
+ 'simcore_storage'
+])
+def test_graceful_restart_services(
+ service_name: str,
+ deployed_simcore_stack: List[Service],
+ osparc_deploy: Dict):
+ """
+ NOTE: loop fixture makes this test async
+ NOTE: needs to run AFTER test_core_service_running
+ """
+ service = next( s for s in deployed_simcore_stack if s.name == service_name )
+
+ # "Status": {
+ # "Timestamp": "2019-11-18T19:33:30.448132327Z",
+ # "State": "shutdown",
+ # "Message": "shutdown",
+ # "ContainerStatus": {
+ # "ContainerID": "f2921c983ad934b4daa0c514543bbfd1a9ea89189bd1ad98b67d63b9f98f05be",
+ # "PID": 0,
+ # "ExitCode": 143
+ # },
+ # "PortStatus": {}
+ # },
+ # "DesiredState": "shutdown",
+ assert all( task['Status']['State'] == "running" for task in service.tasks() )
+
+ assert service.force_update()
+
+ time.sleep(MAX_TIME_TO_RESTART_SERVICE)
+
+ shutdown_tasks = service.tasks(filters={'desired-state': 'shutdown'})
+ assert len(shutdown_tasks) == 1
+
+ task = shutdown_tasks[0]
+ assert task['Status']['ContainerStatus']['ExitCode'] == 0, pformat(task['Status'])
+
+ # TODO: check ps ax has TWO processes
+ ## name = core_service_name.name.replace("simcore_", "")
+ ## cmd = f"docker exec -it $(docker ps | grep {name} | awk '{{print $1}}') /bin/sh -c 'ps ax'"
+ # $ docker exec -it $(docker ps | grep storage | awk '{print $1}') /bin/sh -c 'ps ax'
+ # PID USER TIME COMMAND
+ # 1 root 0:00 /sbin/docker-init -- /bin/sh services/storage/docker/entry
+ # 6 scu 0:02 {simcore-service} /usr/local/bin/python /usr/local/bin/sim
+ # 54 root 0:00 ps ax
+
+ # $ docker exec -it $(docker ps | grep sidecar | awk '{print $1}') /bin/sh -c 'ps ax'
+ # PID USER TIME COMMAND
+ # 1 root 0:00 /sbin/docker-init -- /bin/sh services/sidecar/docker/entry
+ # 6 scu 0:00 {celery} /usr/local/bin/python /usr/local/bin/celery worke
+ # 26 scu 0:00 {celery} /usr/local/bin/python /usr/local/bin/celery worke
+ # 27 scu 0:00 {celery} /usr/local/bin/python /usr/local/bin/celery worke
diff --git a/tests/swarm-deploy/test_swarm_runs.py b/tests/swarm-deploy/test_swarm_runs.py
index a652830364b..d355d4766a8 100644
--- a/tests/swarm-deploy/test_swarm_runs.py
+++ b/tests/swarm-deploy/test_swarm_runs.py
@@ -1,34 +1,25 @@
-"""
-PRECONDITION:
- Assumes simcore stack is deployed, i.e. make ops_disabled=1 up-version
-
-SEE before_script() in ci/travis/system-testing/swarm-deploy
-"""
-
-# pylint:disable=wildcard-import
-# pylint:disable=unused-import
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
import asyncio
-import datetime
import logging
import os
-import re
import sys
import urllib
from pathlib import Path
from pprint import pformat
-from typing import Dict
+from typing import Dict, List
-import docker
import pytest
-import tenacity
-import yaml
+from docker import DockerClient
+from docker.models.services import Service
logger = logging.getLogger(__name__)
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
+
WAIT_TIME_SECS = 20
RETRY_COUNT = 7
MAX_WAIT_TIME=240
@@ -46,68 +37,37 @@
stack_name = os.environ.get("SWARM_STACK_NAME", 'simcore')
-stack_service_names = sorted([ f"{stack_name}_{name}" for name in docker_compose_service_names ])
-
-
-
-# UTILS --------------------------------
-
-def get_tasks_summary(tasks):
- msg = ""
- for t in tasks:
- t["Status"].setdefault("Err", '')
- msg += "- task ID:{ID}, STATE: {Status[State]}, ERROR: '{Status[Err]}' \n".format(
- **t)
- return msg
-
-
-def get_failed_tasks_logs(service, docker_client):
- failed_states = ["COMPLETE", "FAILED",
- "SHUTDOWN", "REJECTED", "ORPHANED", "REMOVE"]
- failed_logs = ""
- for t in service.tasks():
- if t['Status']['State'].upper() in failed_states:
- cid = t['Status']['ContainerStatus']['ContainerID']
- failed_logs += "{2} {0} - {1} BEGIN {2}\n".format(
- service.name, t['ID'], "="*10)
- if cid:
- container = docker_client.containers.get(cid)
- failed_logs += container.logs().decode('utf-8')
- else:
- failed_logs += " log unavailable. container does not exists\n"
- failed_logs += "{2} {0} - {1} END {2}\n".format(
- service.name, t['ID'], "="*10)
-
- return failed_logs
-
-# FIXTURES -------------------------------------
-
-@pytest.fixture(scope="session")
-def here() -> Path:
- return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+stack_service_names = sorted([ f"{stack_name}_{name}"
+ for name in docker_compose_service_names ])
+
+# wait if running pre-state
+# https://docs.docker.com/engine/swarm/how-swarm-mode-works/swarm-task-states/
+pre_states = [
+ "NEW",
+ "PENDING",
+ "ASSIGNED",
+ "PREPARING",
+ "STARTING"
+]
-def osparc_simcore_root_dir(here) -> Path:
- root_dir = here.parent.parent.resolve()
- assert root_dir.exists(), "Is this service within osparc-simcore repo?"
- assert any(root_dir.glob("services/web/server")), "%s not look like rootdir" % root_dir
- return root_dir
+failed_states = [
+ "COMPLETE",
+ "FAILED",
+ "SHUTDOWN",
+ "REJECTED",
+ "ORPHANED",
+ "REMOVE",
+ "CREATED"
+]
-@pytest.fixture(scope='session')
-def osparc_simcore_services_dir(osparc_simcore_root_dir) -> Path:
- services_dir = Path(osparc_simcore_root_dir) / "services"
- return services_dir
@pytest.fixture(scope="session", params=stack_service_names)
-def core_service_name(request):
+def core_service_name(request) -> str:
return str(request.param)
-@pytest.fixture(scope="function")
-def docker_client():
- client = docker.from_env()
- yield client
-@pytest.fixture(scope="function")
-def core_services_running(docker_client):
+@pytest.fixture
+def core_services_running(docker_client: DockerClient) -> List[Service]:
# Matches service names in stacks as e.g.
#
# 'mystack_director'
@@ -117,16 +77,27 @@ def core_services_running(docker_client):
# for a stack named 'mystack'
# maps service names in docker-compose with actual services
- running_services = [ s for s in docker_client.services.list() if s.name.startswith(stack_name) ]
+ running_services = [ s for s in docker_client.services.list()
+ if s.name.startswith(stack_name) ]
return running_services
-# TESTS -------------------------------
-def test_all_services_up(core_services_running):
+
+def test_all_services_up(core_services_running: str, osparc_deploy:Dict):
running_services = sorted( [s.name for s in core_services_running] )
assert running_services == stack_service_names
+ expected = [ f'{stack_name}_{service_name}'
+ for service_name in osparc_deploy[stack_name]['services'].keys()
+ ]
+ assert running_services == sorted(expected)
-async def test_core_service_running(core_service_name, core_services_running, docker_client, loop):
+
+async def test_core_service_running(
+ core_service_name: str,
+ core_services_running: List[Service],
+ docker_client: DockerClient,
+ loop: asyncio.BaseEventLoop,
+ osparc_deploy: Dict ):
"""
NOTE: loop fixture makes this test async
"""
@@ -146,9 +117,6 @@ async def test_core_service_running(core_service_name, core_services_running, do
get_tasks_summary(tasks),
get_failed_tasks_logs(running_service, docker_client))
- # wait if running pre-state
- # https://docs.docker.com/engine/swarm/how-swarm-mode-works/swarm-task-states/
- pre_states = ["NEW", "PENDING", "ASSIGNED", "PREPARING", "STARTING"]
for n in range(RETRY_COUNT):
task = running_service.tasks()[0]
@@ -165,8 +133,7 @@ async def test_core_service_running(core_service_name, core_services_running, do
get_failed_tasks_logs(running_service, docker_client))
-async def test_check_serve_root():
- # TODO: this is
+async def test_check_serve_root(osparc_deploy: Dict):
req = urllib.request.Request("http://127.0.0.1:9081/")
try:
resp = urllib.request.urlopen(req)
@@ -180,3 +147,34 @@ async def test_check_serve_root():
pytest.fail("The server could not fulfill the request.\nError code {}".format(err.code))
except urllib.error.URLError as err:
pytest.fail("Failed reaching the server..\nError reason {}".format(err.reason))
+
+
+
+
+# UTILS --------------------------------
+
+def get_tasks_summary(tasks):
+ msg = ""
+ for t in tasks:
+ t["Status"].setdefault("Err", '')
+ msg += "- task ID:{ID}, STATE: {Status[State]}, ERROR: '{Status[Err]}' \n".format(
+ **t)
+ return msg
+
+
+def get_failed_tasks_logs(service, docker_client):
+ failed_logs = ""
+ for t in service.tasks():
+ if t['Status']['State'].upper() in failed_states:
+ cid = t['Status']['ContainerStatus']['ContainerID']
+ failed_logs += "{2} {0} - {1} BEGIN {2}\n".format(
+ service.name, t['ID'], "="*10)
+ if cid:
+ container = docker_client.containers.get(cid)
+ failed_logs += container.logs().decode('utf-8')
+ else:
+ failed_logs += " log unavailable. container does not exists\n"
+ failed_logs += "{2} {0} - {1} END {2}\n".format(
+ service.name, t['ID'], "="*10)
+
+ return failed_logs