diff --git a/.env-devel b/.env-devel index 0a1a7b210ea..52eae87d12c 100644 --- a/.env-devel +++ b/.env-devel @@ -8,6 +8,8 @@ BF_API_SECRET=none DOCKER_IMAGE_TAG=latest +MAINTENANCE_PASSWORD=z43 + PUBLISHED_HOST_NAME=localhost POSTGRES_ENDPOINT=postgres:5432 @@ -41,7 +43,7 @@ S3_SECURE=0 SMTP_HOST=mail.speag.com SMTP_PORT=25 -VENV2=.venv27/ +WEBSERVER_LOGIN_REGISTRATION_INVITATION_REQUIRED=1 # python3 -c "from cryptography.fernet import Fernet; print(Fernet.generate_key())" WEBSERVER_SESSION_SECRET_KEY=REPLACE ME with a key of at least length 32. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 35e1b45ef88..2c0e50eceba 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,6 +10,11 @@ +## How to test + + + + ## Checklist - [ ] I think the code is well written diff --git a/Makefile b/Makefile index d6410c0c9c8..db1525b1c2a 100644 --- a/Makefile +++ b/Makefile @@ -37,8 +37,8 @@ PY_FILES := $(strip $(shell find services packages -iname '*.py' \ -not -path "*web/server*")) TEMPCOMPOSE := $(shell mktemp) -SERVICES_LIST := apihub director sidecar storage webserver -CACHED_SERVICES_LIST := ${SERVICES_LIST} webclient +SERVICES_LIST := apihub director sidecar storage webserver maintenance +CACHED_SERVICES_LIST := apihub director sidecar storage webserver webclient CLIENT_WEB_OUTPUT:=$(CURDIR)/services/web/client/source-output export VCS_URL:=$(shell git config --get remote.origin.url) @@ -82,7 +82,13 @@ endif .PHONY: build # target: build: – Builds all core service images. build: .env .tmp-webclient-build - ${DOCKER_COMPOSE} -f services/docker-compose.yml build --parallel ${SERVICES_LIST}; + ${DOCKER_COMPOSE} -f services/docker-compose.yml build --parallel ${SERVICES_LIST} + +.PHONY: rebuild +# target: build: – Builds all core service images. +rebuild: .env .tmp-webclient-build + ${DOCKER_COMPOSE} -f services/docker-compose.yml build --no-cache --parallel ${SERVICES_LIST} + .PHONY: build-devel .tmp-webclient-build # target: build-devel, rebuild-devel: – Builds images of core services for development. @@ -129,12 +135,16 @@ up-devel: up-swarm-devel up-swarm: .env docker-swarm-check ${DOCKER} swarm init - ${DOCKER_COMPOSE} -f services/docker-compose.yml -f services/docker-compose.tools.yml config > $(TEMPCOMPOSE).tmp-compose.yml ; + ${DOCKER_COMPOSE} -f services/docker-compose.yml \ + -f services/docker-compose-tools.yml \ + config > $(TEMPCOMPOSE).tmp-compose.yml ; ${DOCKER} stack deploy -c $(TEMPCOMPOSE).tmp-compose.yml ${SWARM_STACK_NAME} up-swarm-devel: .env docker-swarm-check $(CLIENT_WEB_OUTPUT) ${DOCKER} swarm init - ${DOCKER_COMPOSE} -f services/docker-compose.yml -f services/docker-compose.devel.yml -f services/docker-compose.tools.yml config > $(TEMPCOMPOSE).tmp-compose.yml + ${DOCKER_COMPOSE} -f services/docker-compose.yml -f services/docker-compose.devel.yml \ + -f services/docker-compose-tools.yml \ + config > $(TEMPCOMPOSE).tmp-compose.yml ${DOCKER} stack deploy -c $(TEMPCOMPOSE).tmp-compose.yml ${SWARM_STACK_NAME} .PHONY: up-webclient-devel @@ -167,7 +177,8 @@ pull-cache: .env .PHONY: build-cache # target: build-cache – Builds service images and tags them as 'cache' build-cache: - ${DOCKER_COMPOSE} -f services/docker-compose.yml -f services/docker-compose.cache.yml build --parallel apihub director sidecar storage webclient + # WARNING: first all except webserver and then webserver + ${DOCKER_COMPOSE} -f services/docker-compose.yml -f services/docker-compose.cache.yml build --parallel apihub director sidecar storage webclient maintenance ${DOCKER} tag ${DOCKER_REGISTRY}/webclient:cache services_webclient:build ${DOCKER_COMPOSE} -f services/docker-compose.yml -f services/docker-compose.cache.yml build webserver @@ -201,15 +212,18 @@ endif # target: push – Pushes images into a registry push: - ${DOCKER_COMPOSE} -f services/docker-compose.yml push ${SERVICES_LIST} + ${DOCKER_COMPOSE} -f services/docker-compose.yml \ + push ${SERVICES_LIST} # target: pull – Pulls images from a registry pull: .env - ${DOCKER_COMPOSE} -f services/docker-compose.yml pull ${SERVICES_LIST} + ${DOCKER_COMPOSE} -f services/docker-compose.yml \ + pull ${SERVICES_LIST} # target: create-stack-file – use as 'make create-stack-file output_file=stack.yaml' create-stack-file: - ${DOCKER_COMPOSE} -f services/docker-compose.yml config > $(output_file) + ${DOCKER_COMPOSE} -f services/docker-compose.yml \ + config > $(output_file) ## ------------------------------- # Tools @@ -273,12 +287,6 @@ setup-check: .env .vscode/settings.json .venv/bin/pip3 install pylint autopep8 virtualenv pip-tools @echo "To activate the venv, execute 'source .venv/bin/activate' or '.venv/Scripts/activate.bat' (WIN)" -.venv27: .venv -# target: .venv27 – Creates a python2.7 virtual environment with dev tools - @python2 --version - .venv/bin/virtualenv --python=python2 .venv27 - @echo "To activate the venv27, execute 'source .venv27/bin/activate' or '.venv27/Scripts/activate.bat' (WIN)" - ## ------------------------------- # Auxiliary targets. diff --git a/api/specs/storage/v0/components/schemas/file_meta_data.yaml b/api/specs/storage/v0/components/schemas/file_meta_data.yaml index 29c0c26cd63..2c94ecf0c21 100644 --- a/api/specs/storage/v0/components/schemas/file_meta_data.yaml +++ b/api/specs/storage/v0/components/schemas/file_meta_data.yaml @@ -39,6 +39,18 @@ components: type: string user_name: type: string + file_id: + type: string + raw_file_path: + type: string + display_file_path: + type: string + created_at: + type: string + last_modified: + type: string + file_size: + type: integer example: file_uuid: 'simcore-testing/105/1000/3' location_id: "0" @@ -52,3 +64,9 @@ components: file_name: "example.txt" user_id: "12" user_name: "dennis" + file_id: "N:package:e263da07-2d89-45a6-8b0f-61061b913873" + raw_file_path: "Curation/derivatives/subjects/sourcedata/docs/samples/sam_1/sam_1.csv" + display_file_path: "Curation/derivatives/subjects/sourcedata/docs/samples/sam_1/sam_1.csv" + created_at: "2019-06-19T12:29:03.308611Z" + last_modified: "2019-06-19T12:29:03.78852Z" + file_size: 73 diff --git a/ops/db/docker-compose.yml b/ops/db/docker-compose.yml deleted file mode 100644 index 13583c41d06..00000000000 --- a/ops/db/docker-compose.yml +++ /dev/null @@ -1,20 +0,0 @@ -version: '3.4' -services: - postgres: - image: postgres:10 - environment: - - POSTGRES_USER=scu - - POSTGRES_PASSWORD=z43 - - POSTGRES_DB=simcoredb - volumes: - - postgres:/var/lib/postgresql/data - ports: - - "5432:5432" - adminer: - image: adminer - ports: - - 18080:8080 - depends_on: - - postgres -volumes: - postgres: diff --git a/ops/db/example/README.md b/ops/db/example/README.md deleted file mode 100644 index e2a7763520a..00000000000 --- a/ops/db/example/README.md +++ /dev/null @@ -1,159 +0,0 @@ -# Example for database migration - -Illustrates the workflow for database migration using `alembic` - -There are two folders: - - models: contains definition of tables in different stages - - migration: will contain files needed for migration, needs to be creted first. All `alembic` commands must be started in this directory. - -## Step 1 Initialize - -```bash -docker-compose up -d -pip install alembic -cd migration -alembic init alembic -```` - -This starts the database and creates an initial configuration for the migration. -We want to have a baseline state that basically contains only the table defintions. - -```bash -alembic revision -m "baseline" -``` - -This creates a file `uid_baseline.py` in `alembic/versions`. - -Go there and edit manually -```python - -"""baseline - -Revision ID: 100559f72c66 -Revises: -Create Date: 2018-07-11 14:28:59.084794 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '100559f72c66' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'A', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('name', sa.String(), nullable=False)) - - op.create_table( - 'B', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('name', sa.String(), nullable=False)) - - -def downgrade(): - op.drop_table('A') - op.drop_table('B') -``` - -Edit the file `alembic.ini` for the correct db url - -``` -sqlalchemy.url = postgresql+psycopg2://test:test@localhost:5432/test -``` - -Upgrade the database to the current state - -```bash -alembic upgrade head -``` - -Go and check `localhost:18080` - - -## Step 2 Track changes - -From now on we want to automatically track changes in the models. -Edit the file `migration/alembic/env.py` and import all models that shall be tracked. Obviously those guys need to be in the path. -Also change - -```python -target_metadata = Base.metadata -``` - -```python - -from __future__ import with_statement -from alembic import context -from sqlalchemy import engine_from_config, pool -from logging.config import fileConfig - -from base import Base -from a import A -from b import B - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = Base.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - -``` - -Now start changing the schemas: - -Copy `a.py.1` to `a.py` and let `alembic` do its magic - -```bash -alembic revision --autogenerate -m "Adds new thing to A" -``` -This creates an new script `uuid_adds_new_thing_to_a.py` - -Upgrade can be done with - -```bash -alembic upgrade head -``` - -Go and check `localhost:18080` - -Copy `b.py.1` to `b.py` and let `alembic` do its magic -```bash -alembic revision --autogenerate -m "Adds new thing to B" -``` -This creates an new script `uuid_adds_new_thing_to_b.py` - -Upgrade can be done with - -```bash -alembic upgrade head -``` - -Go and check `localhost:18080` - -From now on you can do the following - -```bash -alembic upgrade +1 -alembic downgrade -1 -alembic downgrade -2 -``` diff --git a/ops/db/example/models/__init__.py b/ops/db/example/models/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/ops/db/example/models/a.py b/ops/db/example/models/a.py deleted file mode 100644 index 03900920b3b..00000000000 --- a/ops/db/example/models/a.py +++ /dev/null @@ -1,9 +0,0 @@ -from sqlalchemy import Column, Integer, String -from base import Base - -class A(Base): - - __tablename__ = 'A' - - id = Column(Integer, primary_key=True) - name = Column(String) diff --git a/ops/db/example/models/a.py.0 b/ops/db/example/models/a.py.0 deleted file mode 100644 index 03900920b3b..00000000000 --- a/ops/db/example/models/a.py.0 +++ /dev/null @@ -1,9 +0,0 @@ -from sqlalchemy import Column, Integer, String -from base import Base - -class A(Base): - - __tablename__ = 'A' - - id = Column(Integer, primary_key=True) - name = Column(String) diff --git a/ops/db/example/models/a.py.1 b/ops/db/example/models/a.py.1 deleted file mode 100644 index 1addaadfd9c..00000000000 --- a/ops/db/example/models/a.py.1 +++ /dev/null @@ -1,10 +0,0 @@ -from sqlalchemy import Column, Integer, String -from base import Base - -class A(Base): - - __tablename__ = 'A' - - id = Column(Integer, primary_key=True) - name = Column(String) - new_item = Column(String) diff --git a/ops/db/example/models/b.py b/ops/db/example/models/b.py deleted file mode 100644 index ac0020b317c..00000000000 --- a/ops/db/example/models/b.py +++ /dev/null @@ -1,9 +0,0 @@ -from sqlalchemy import Column, Integer, String -from base import Base - -class B(Base): - - __tablename__ = 'B' - - id = Column(Integer, primary_key=True) - name = Column(String) diff --git a/ops/db/example/models/b.py.0 b/ops/db/example/models/b.py.0 deleted file mode 100644 index ac0020b317c..00000000000 --- a/ops/db/example/models/b.py.0 +++ /dev/null @@ -1,9 +0,0 @@ -from sqlalchemy import Column, Integer, String -from base import Base - -class B(Base): - - __tablename__ = 'B' - - id = Column(Integer, primary_key=True) - name = Column(String) diff --git a/ops/db/example/models/b.py.2 b/ops/db/example/models/b.py.2 deleted file mode 100644 index f4ee5ab6175..00000000000 --- a/ops/db/example/models/b.py.2 +++ /dev/null @@ -1,10 +0,0 @@ -from sqlalchemy import Column, Integer, String -from base import Base - -class B(Base): - - __tablename__ = 'B' - - id = Column(Integer, primary_key=True) - name = Column(String) - new_item = Column(String) diff --git a/ops/db/example/models/base.py b/ops/db/example/models/base.py deleted file mode 100644 index c64447da1a1..00000000000 --- a/ops/db/example/models/base.py +++ /dev/null @@ -1,2 +0,0 @@ -from sqlalchemy.ext.declarative import declarative_base -Base = declarative_base() diff --git a/ops/travis/system-testing/tests/test_swarm_runs.py b/ops/travis/system-testing/tests/test_swarm_runs.py index 09e76b03202..3d32a40e228 100644 --- a/ops/travis/system-testing/tests/test_swarm_runs.py +++ b/ops/travis/system-testing/tests/test_swarm_runs.py @@ -8,10 +8,10 @@ import datetime import logging import sys +import urllib from pathlib import Path from pprint import pformat from typing import Dict -import urllib import docker import pytest @@ -26,9 +26,58 @@ logger = logging.getLogger(__name__) +# UTILS -------------------------------- def _here() -> Path: return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + +def _load_yaml(path: Path) -> Dict: + content = {} + assert path.exists() + with path.open() as f: + content = yaml.safe_load(f) + return content + + +def _services_docker_compose(osparc_simcore_root_dir: Path) -> Dict[str, str]: + # TODO: pip install docker-compose and use + # https://github.com/docker/compose/blob/master/compose/cli/main.py#L328 + osparc_simcore_services_dir = osparc_simcore_root_dir / "services" + compose = {} + for name in ["docker-compose.yml", ]: + content = _load_yaml(osparc_simcore_services_dir / name) + compose.update(content) + return compose + +def get_tasks_summary(tasks): + msg = "" + for t in tasks: + t["Status"].setdefault("Err", '') + msg += "- task ID:{ID}, STATE: {Status[State]}, ERROR: '{Status[Err]}' \n".format( + **t) + return msg + + +def get_failed_tasks_logs(service, docker_client): + failed_states = ["COMPLETE", "FAILED", + "SHUTDOWN", "REJECTED", "ORPHANED", "REMOVE"] + failed_logs = "" + for t in service.tasks(): + if t['Status']['State'].upper() in failed_states: + cid = t['Status']['ContainerStatus']['ContainerID'] + failed_logs += "{2} {0} - {1} BEGIN {2}\n".format( + service.name, t['ID'], "="*10) + if cid: + container = docker_client.containers.get(cid) + failed_logs += container.logs().decode('utf-8') + else: + failed_logs += " log unavailable. container does not exists\n" + failed_logs += "{2} {0} - {1} END {2}\n".format( + service.name, t['ID'], "="*10) + + return failed_logs + +# FIXTURES ------------------------------------- @pytest.fixture(scope="session") def here() -> Path: return _here() @@ -43,15 +92,10 @@ def _osparc_simcore_root_dir(here) -> Path: def osparc_simcore_root_dir(here) -> Path: return _osparc_simcore_root_dir(here) - -def _services_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: - docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose.yml" - assert docker_compose_path.exists() - - content = {} - with docker_compose_path.open() as f: - content = yaml.safe_load(f) - return content +@pytest.fixture(scope='session') +def osparc_simcore_services_dir(osparc_simcore_root_dir) -> Path: + services_dir = Path(osparc_simcore_root_dir) / "services" + return services_dir @pytest.fixture("session") def services_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: @@ -60,12 +104,7 @@ def services_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: @pytest.fixture("session") def tools_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: - docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose.tools.yml" - assert docker_compose_path.exists() - - content = {} - with docker_compose_path.open() as f: - content = yaml.safe_load(f) + content = _load_yaml(osparc_simcore_root_dir / "services" / "docker-compose-tools.yml") return content def _list_core_services(): @@ -85,33 +124,7 @@ def docker_client(): yield client -# UTILS -------------------------------- - -def get_tasks_summary(tasks): - msg = "" - for t in tasks: - t["Status"].setdefault("Err", '') - msg += "- task ID:{ID}, STATE: {Status[State]}, ERROR: '{Status[Err]}' \n".format(**t) - return msg - -def get_failed_tasks_logs(service, docker_client): - failed_states = ["COMPLETE", "FAILED", "SHUTDOWN", "REJECTED", "ORPHANED", "REMOVE"] - failed_logs = "" - for t in service.tasks(): - if t['Status']['State'].upper() in failed_states: - cid = t['Status']['ContainerStatus']['ContainerID'] - failed_logs += "{2} {0} - {1} BEGIN {2}\n".format(service.name, t['ID'], "="*10) - if cid: - container = docker_client.containers.get(cid) - failed_logs += container.logs().decode('utf-8') - else: - failed_logs += " log unavailable. container does not exists\n" - failed_logs += "{2} {0} - {1} END {2}\n".format(service.name, t['ID'], "="*10) - - return failed_logs - # TESTS ------------------------------- - def test_all_services_up(docker_client, services_docker_compose, tools_docker_compose): """ NOTE: Assumes `make up-swarm` executed diff --git a/packages/postgres-database/README.md b/packages/postgres-database/README.md new file mode 100644 index 00000000000..15ad2273a97 --- /dev/null +++ b/packages/postgres-database/README.md @@ -0,0 +1,55 @@ +# simcore postgres database + +Contains database **models** served by the ``postgres`` service and adds an extension with **migration** tools (e.g. entrypoint that wraps [alembic]'s CLI in a similar way to [flask-migrate]). + + +To install migration tools add ``[migration]`` extra +```console + pip install .[migration] +``` +and to call the CLI use +```console + simcore-postgres-database --help + + # or a short alias + + sc-pg --help +``` +This entrypoing wraps calls to [alembic] commands and customizes it for ``simcore_postgres_database`` models and `postgres` online database. + + +A typical workflow: + +### Discover + +```console + simcore-postgres-database discover -u simcore -p simcore +``` + +```console + simcore-postgres-database info +``` + +### Review + +```console + simcore-postgres-database review -m "some message about changes" +``` +Auto-generates some scripts under [migration/versions](packages/postgres-database/migration/versions). The migration script **needs to be reviewed and edited**, as Alembic currently does not detect every change you +make to your models. In particular, Alembic is currently unable to detect: +- table name changes, +- column name changes, +- or anonymously named constraints +A detailed summary of limitations can be found in the Alembic autogenerate documentation. +Once finalized, the migration script also needs to be added to version control. + +### Upgrade + +Upgrades to given revision (get ``info`` to check history) +```console + simcore-postgres-database upgrade head +``` + + +[alembic]:https://alembic.sqlalchemy.org/en/latest/ +[flask-migrate]:https://flask-migrate.readthedocs.io/en/latest/ diff --git a/packages/postgres-database/VERSION b/packages/postgres-database/VERSION new file mode 100644 index 00000000000..6e8bf73aa55 --- /dev/null +++ b/packages/postgres-database/VERSION @@ -0,0 +1 @@ +0.1.0 diff --git a/packages/postgres-database/doc/database-migration.md b/packages/postgres-database/doc/database-migration.md new file mode 100644 index 00000000000..21ec44f8249 --- /dev/null +++ b/packages/postgres-database/doc/database-migration.md @@ -0,0 +1,58 @@ +# Migration of a database + +Issue 709 + +## Migration bewteen schema updates + +- Database model schemas change with time based on new requirements or fixes +- Deployed databases have already some data that fulfills current schema but not new one +- We need to update these databases to the new schema while keeping its data +- This shall be done with minimal or no downtime of the running databases + + +- https://sqlalchemy-migrate.readthedocs.io/en/latest/ + - Migration environment templates: ``alembic list_templates`` + - multidb?? + - Multiple alembic environs from [one ini file](https://alembic.sqlalchemy.org/en/latest/cookbook.html#multiple-environments) + ``` + alembic init migration + alembic revision -m "baseline" + alembic upgrade head + alembic revision -m "first tables" + alembic upgrade head + alembic revision -m "add column" + + alembic info + alembic list_templates + alembic current + alembic downgrade -1 + alembic head + alembic upgrade head + alembic history +``` + +- what autogenerate [does NOT decect](https://alembic.sqlalchemy.org/en/latest/autogenerate.html#what-does-autogenerate-detect-and-what-does-it-not-detect) + +- https://stackoverflow.com/questions/42992256/how-do-you-add-migrate-an-existing-database-with-alembic-flask-migrate-if-you-di +```console +alembic revision --autogenerate -m "Init tables" # to an empty db + +# changes to real db con these tables +alembic stamp head + +# revision changes +alembic revision --autogenerate -m "Added column to file_meta_data" +alembic upgrade head +``` + +## Migration between major releases of postgresql + +- Major release of PostgreSQL (first two digit groups, e.g. 8.4 and 8.5 are two consecutive major releases) might change the internal storage format + +- See https://www.postgresql.org/docs/9.0/migration.html + + +## Migration of database in production + +- Needs downtime? +- Collisions with ongoing requests while migration? diff --git a/packages/postgres-database/doc/database-models.md b/packages/postgres-database/doc/database-models.md new file mode 100644 index 00000000000..aa2bf5bdbbd --- /dev/null +++ b/packages/postgres-database/doc/database-models.md @@ -0,0 +1,12 @@ +# Database Models + + +## Rationale + +- Every table in the database is maintained by a given service +- This maintainer service shall have all helpers associated to that table (e.g. extension functions over raw metadata model) +- + + +- Isolate package with all table schemas per database and service +- Models shall not be implemented inheriting from Base. Use instead explicit table definitions](https://docs.sqlalchemy.org/en/latest/orm/mapping_styles.html#classical-mappings) diff --git a/packages/postgres-database/requirements/Makefile b/packages/postgres-database/requirements/Makefile new file mode 100644 index 00000000000..a947354f2d6 --- /dev/null +++ b/packages/postgres-database/requirements/Makefile @@ -0,0 +1,29 @@ +.PHONY: all check clean help + +objects = $(wildcard *.in) +outputs := $(objects:.in=.txt) + +# target: all – pip-compiles all requirements/*.in -> requirements/*.txt +all: $(outputs) + +%.txt: %.in + pip-compile --output-file $@ $< + +_test.txt: _base.txt + +_migration.txt: _base.txt + +# target: check – Checks whether pip-compile is installed +check: + @which pip-compile > /dev/null + +# target: clean – Cleans all requirements/*.txt +clean: check + - rm $(outputs) + +# target: help – Display all callable targets +help: + @echo + @egrep "^\s*#\s*target\s*:\s*" [Mm]akefile \ + | sed -r "s/^\s*#\s*target\s*:\s*//g" + @echo diff --git a/packages/postgres-database/requirements/_base.in b/packages/postgres-database/requirements/_base.in new file mode 100644 index 00000000000..da6c74026c1 --- /dev/null +++ b/packages/postgres-database/requirements/_base.in @@ -0,0 +1,9 @@ +# +# Specifies third-party dependencies for 'simcore-postgres-database' +# + +# psycopg2-binary # enforces binary version +# sqlalchemy>=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164 +sqlalchemy[postgresql_psycopg2binary]>=1.3.3 + +yarl diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt new file mode 100644 index 00000000000..08220816da0 --- /dev/null +++ b/packages/postgres-database/requirements/_base.txt @@ -0,0 +1,11 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=_base.txt _base.in +# +idna==2.8 # via yarl +multidict==4.5.2 # via yarl +psycopg2-binary==2.8.3 # via sqlalchemy +sqlalchemy[postgresql_psycopg2binary]==1.3.5 +yarl==1.3.0 diff --git a/packages/postgres-database/requirements/_migration.in b/packages/postgres-database/requirements/_migration.in new file mode 100644 index 00000000000..331a5cf7fb7 --- /dev/null +++ b/packages/postgres-database/requirements/_migration.in @@ -0,0 +1,12 @@ +# Installs dependencies to migrate 'simcore-postgres-database' +# + +# frozen specs +-r _base.txt + +certifi==2019.6.16 # added contraint to fit pre-installation of jupyter/base-notebook:python-3.7.3 (cannot uninstall) + +alembic +click +docker # TODO: aiodocker +tenacity diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt new file mode 100644 index 00000000000..5c2d44bf2e8 --- /dev/null +++ b/packages/postgres-database/requirements/_migration.txt @@ -0,0 +1,25 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=_migration.txt _migration.in +# +alembic==1.0.8 +certifi==2019.6.16 +chardet==3.0.4 # via requests +click==7.0 +docker==4.0.2 +idna==2.8 +mako==1.0.12 # via alembic +markupsafe==1.1.1 # via mako +multidict==4.5.2 +psycopg2-binary==2.8.3 +python-dateutil==2.8.0 # via alembic +python-editor==1.0.4 # via alembic +requests==2.22.0 # via docker +six==1.12.0 # via docker, python-dateutil, tenacity, websocket-client +sqlalchemy[postgresql_psycopg2binary]==1.3.5 +tenacity==5.0.4 +urllib3==1.25.3 # via requests +websocket-client==0.56.0 # via docker +yarl==1.3.0 diff --git a/packages/postgres-database/requirements/_test.in b/packages/postgres-database/requirements/_test.in new file mode 100644 index 00000000000..8f8f72ffd35 --- /dev/null +++ b/packages/postgres-database/requirements/_test.in @@ -0,0 +1,14 @@ +# +# Specifies dependencies required to run tests on 'simcore-postgres-database' +# + +# frozen specs +-r _base.txt + +# testing +pytest +pytest-cov + +# tools +pylint +coveralls diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt new file mode 100644 index 00000000000..ca2985f29e9 --- /dev/null +++ b/packages/postgres-database/requirements/_test.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=_test.txt _test.in +# +astroid==2.2.5 # via pylint +atomicwrites==1.3.0 # via pytest +attrs==19.1.0 # via pytest +certifi==2019.6.16 # via requests +chardet==3.0.4 # via requests +coverage==4.5.3 # via coveralls, pytest-cov +coveralls==1.8.1 +docopt==0.6.2 # via coveralls +idna==2.8 +importlib-metadata==0.18 # via pluggy, pytest +isort==4.3.20 # via pylint +lazy-object-proxy==1.4.1 # via astroid +mccabe==0.6.1 # via pylint +more-itertools==7.0.0 # via pytest +multidict==4.5.2 +packaging==19.0 # via pytest +pluggy==0.12.0 # via pytest +psycopg2-binary==2.8.3 +py==1.8.0 # via pytest +pylint==2.3.1 +pyparsing==2.4.0 # via packaging +pytest-cov==2.7.1 +pytest==4.6.3 +requests==2.22.0 # via coveralls +six==1.12.0 # via astroid, packaging, pytest +sqlalchemy[postgresql_psycopg2binary]==1.3.5 +typed-ast==1.4.0 # via astroid +urllib3==1.25.3 # via requests +wcwidth==0.1.7 # via pytest +wrapt==1.11.2 # via astroid +yarl==1.3.0 +zipp==0.5.1 # via importlib-metadata diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt new file mode 100644 index 00000000000..8a60c977a55 --- /dev/null +++ b/packages/postgres-database/requirements/ci.txt @@ -0,0 +1,13 @@ +# Shortcut to install all packages for the contigous integration (CI) of 'simcore-postgres-database' +# +# - As ci.txt but w/ tests +# +# Usage: +# pip install -r requirements/ci.txt +# + +# installs base + tests requirements +-r _test.txt + +# current module +. diff --git a/packages/postgres-database/requirements/dev.txt b/packages/postgres-database/requirements/dev.txt new file mode 100644 index 00000000000..cebccc971db --- /dev/null +++ b/packages/postgres-database/requirements/dev.txt @@ -0,0 +1,13 @@ +# Shortcut to install all packages needed to develop 'simcore-postgres-database' +# +# - As ci.txt but with current and repo packages in develop (edit) mode +# +# Usage: +# pip install -r requirements/dev.txt +# + +# installs base + tests requirements +-r _test.txt + +# current module +-e .[migration] diff --git a/packages/postgres-database/setup.py b/packages/postgres-database/setup.py new file mode 100644 index 00000000000..3ede4446855 --- /dev/null +++ b/packages/postgres-database/setup.py @@ -0,0 +1,61 @@ +import re +import sys +from pathlib import Path + +from setuptools import find_packages, setup + +here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +readme = Path( here / "README.md" ).read_text() +version = Path(here/ "VERSION").read_text().strip() + +def read_reqs( reqs_path: Path): + return re.findall(r'(^[^#-][\w]+[-~>=<.\w]+)', reqs_path.read_text(), re.MULTILINE) + +# Weak dependencies +install_requirements = read_reqs( here / "requirements" / "_base.in" ) + +# Strong dependencies +migration_requirements = read_reqs( here / "requirements" / "_migration.txt" ) +test_requirements = read_reqs( here / "requirements" / "_test.txt" ) + + +setup( + name='simcore-postgres-database', + version=version, + author="Pedro Crespo (pcrespov)", + description="Database models served by the simcore 'postgres' core service", + classifiers=[ + 'Development Status :: 2 - Pre-Alpha', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Natural Language :: English', + 'Programming Language :: Python :: 3.6', + ], + long_description=readme, + license="MIT license", + packages=find_packages(where='src'), + package_dir={'': 'src'}, + test_suite='tests', + install_requires=install_requirements, + tests_require=test_requirements, + extras_require= { + 'migration': migration_requirements, + 'test': test_requirements + }, + include_package_data=True, + package_data={ + '': [ + '*.ini', + 'migration/*.py', + 'migration/*.mako', + 'migration/versions/*.py', + ] + }, + entry_points = { + 'console_scripts': [ + 'simcore-postgres-database=simcore_postgres_database.cli:main', + 'sc-pg=simcore_postgres_database.cli:main', + ] + }, + zip_safe=False +) diff --git a/packages/postgres-database/src/simcore_postgres_database/__init__.py b/packages/postgres-database/src/simcore_postgres_database/__init__.py new file mode 100644 index 00000000000..1f5831b9301 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/__init__.py @@ -0,0 +1,8 @@ +from . import storage_models, webserver_models +from .models.base import metadata + +__all__ = [ + 'metadata', + 'webserver_models', + 'storage_models' +] diff --git a/packages/postgres-database/src/simcore_postgres_database/alembic.ini b/packages/postgres-database/src/simcore_postgres_database/alembic.ini new file mode 100644 index 00000000000..1c067c09635 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/alembic.ini @@ -0,0 +1,74 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = %(here)s/migration + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +timezone = UTC + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to migration/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat migration/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# url for testing. See packages/postgres-database/tests/docker-compose.yml +sqlalchemy.url = None + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/packages/postgres-database/src/simcore_postgres_database/cli.py b/packages/postgres-database/src/simcore_postgres_database/cli.py new file mode 100644 index 00000000000..338489e2758 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/cli.py @@ -0,0 +1,263 @@ +""" command line interface for migration + +""" +#pylint: disable=broad-except + +import json +import logging +import os +import sys +from copy import deepcopy +from logging.config import fileConfig +from pathlib import Path + +import alembic.command +import click +import docker +import sqlalchemy as sa +from alembic import __version__ as __alembic_version__ +from alembic.config import Config as AlembicConfig + +from simcore_postgres_database.settings import build_url + +alembic_version = tuple([int(v) for v in __alembic_version__.split('.')[0:3]]) + +here = Path( sys.argv[0] if __name__ == "__main__" else __file__ ).parent.resolve() +default_ini = here / 'alembic.ini' +migration_dir = here / 'migration' +discovered_cache = os.path.expanduser("~/.simcore_postgres_database_cache.json") + +log = logging.getLogger('root') +fileConfig(default_ini) + +def safe(if_fails_return=False): + def decorate(func): + def safe_func(*args, **kargs): + try: + res = func(*args, **kargs) + return res + except RuntimeError as err: + log.info("%s failed: %s", func.__name__, str(err)) + except Exception: + log.info("%s failed unexpectedly", func.__name__, exc_info=True) + return deepcopy(if_fails_return) # avoid issues with default mutables + return safe_func + return decorate + +#@retry(wait=wait_fixed(0.1), stop=stop_after_delay(60)) +def _ping(url): + """checks whether database is responsive""" + engine = sa.create_engine(str(url)) + conn = engine.connect() + conn.close() + + +@safe(if_fails_return=None) +def _get_service_published_port(service_name: str) -> int: + client = docker.from_env() + services = [x for x in client.services.list() if service_name in x.name] + if not services: + raise RuntimeError("Cannot find published port for service '%s'. Probably services still not up" % service_name) + service_endpoint = services[0].attrs["Endpoint"] + + if "Ports" not in service_endpoint or not service_endpoint["Ports"]: + raise RuntimeError("Cannot find published port for service '%s' in endpoint. Probably services still not up" % service_name) + + published_port = service_endpoint["Ports"][0]["PublishedPort"] + return int(published_port) + +def _get_alembic_config(cfg=None): + try: + if not cfg: + cfg = _load_cache() or {} + url = build_url(**cfg) + except Exception: + click.echo("Invalid database config, please run discover", err=True) + _reset_cache() + return {} + + config = AlembicConfig(default_ini) + config.set_main_option('script_location', str(migration_dir)) + config.set_main_option('sqlalchemy.url', str(url)) + return config + +@safe(if_fails_return={}) +def _load_cache(): + with open(discovered_cache) as fh: + cfg = json.load(fh) + return cfg + +def _reset_cache(): + if os.path.exists(discovered_cache): + os.remove(discovered_cache) + click.echo("Removed %s" % discovered_cache) + +# CLI ----------------------------------------------- +DEFAULT_HOST = 'postgres' +DEFAULT_PORT = 5432 +DEFAULT_DB = 'simcoredb' + +@click.group() +def main(): + """ Simplified CLI for database migration with alembic """ + +@main.command() +@click.option('--user', '-u') +@click.option('--password', '-p') +@click.option('--host') +@click.option('--port', type=int) +@click.option('--database') +def discover(**cli_inputs): + """ Discovers databases and stores configs in ~/.simcore_postgres_database.json """ + # NOTE: Do not add defaults to user, password so we get a chance to ping urls + # TODO: if multiple candidates online, then query user to select + + click.echo(f'Discovering database ...') + cli_cfg = {key:value for key, value in cli_inputs.items() if value is not None} + + def test_cached(): + """Tests cached configuration """ + cfg = _load_cache() or {} + if cfg: + cfg.update(cli_cfg) # overrides + return cfg + + def test_env(): + """Tests environ variables """ + cfg = { + 'user': os.getenv('POSTGRES_USER'), + 'password': os.getenv('POSTGRES_PASSWORD'), + 'host': os.getenv('POSTGRES_HOST', DEFAULT_HOST), + 'port': int(os.getenv('POSTGRES_PORT') or DEFAULT_PORT), + 'database': os.getenv('POSTGRES_DB', DEFAULT_DB) + } + cfg.update(cli_cfg) + return cfg + + + def test_swarm(): + """Tests published port in swarm from host """ + cfg = deepcopy(cli_cfg) + cfg['host'] = "127.0.0.1" + cfg['port'] = _get_service_published_port(cli_cfg.get('host', DEFAULT_HOST)) + cfg.setdefault('database', DEFAULT_DB) + return cfg + + + for test in [test_cached, test_env, test_swarm]: + try: + click.echo("-> {0.__name__}: {0.__doc__}".format(test)) + + cfg = test() + cfg.update(cli_cfg) # CLI always overrides + url = build_url(**cfg) + + click.echo(" ping {0.__name__}: {1} ...".format(test, url)) + + _ping(url) + + with open(discovered_cache, 'w') as fh: + json.dump(cfg, fh, sort_keys=True, indent=4) + + click.echo(f"Saved config at{discovered_cache}: {cfg}") + click.secho(f"{test.__name__} succeeded: {url} is online", + blink=True, bold=True, fg='green') + + return + + except Exception as err: + inline_msg = str(err).replace('\n','. ') + click.echo("<- {0.__name__} failed : {1}".format(test, inline_msg)) + + _reset_cache() + click.secho("Sorry, database not found !!", blink=True, + bold=True, fg="red") + +@main.command() +def info(): + """ Displays discovered config and other alembic infos""" + click.echo("Using alembic {}.{}.{}".format(*alembic_version)) + + cfg = _load_cache() + click.echo(f"Saved config: {cfg} @ {discovered_cache}") + config = _get_alembic_config(cfg) + if config: + click.echo("Revisions history ------------") + alembic.command.history(config) + click.echo("Current version: -------------") + alembic.command.current(config, verbose=True) + + +@main.command() +def clean(): + """ Clears discovered database """ + _reset_cache() + + +# Bypasses alembic CLI into a reduced version ------------ +# TODO: systematic bypass?? + +@main.command() +@click.option('-m', 'message') +def review(message): + """Auto-generates a new revison. Equivalent to `alembic revision --autogenerate -m "first tables"` + """ + click.echo('Auto-generates revision based on changes ') + + config = _get_alembic_config() + alembic.command.revision(config, message, + autogenerate=True, + sql=False, + head='head', + splice=False, + branch_label=None, + version_path=None, + rev_id=None) + +@main.command() +@click.argument('revision', default='head') +def upgrade(revision): + """Upgrades target database to a given revision + + Say we have revision ae1027a6acf + + Absolute migration: + sc-pg upgrade ae10 + + Relative to current: + sc-pg upgrade +2 + sc-pg downgrade -- -1 + sc-pg upgrade ae10+2 + + """ + click.echo(f'Upgrading database to {revision} ...') + config = _get_alembic_config() + alembic.command.upgrade(config, revision, sql=False, tag=None) + +@main.command() +@click.argument('revision', default='-1') +def downgrade(revision): + """Revert target database to a given revision + + Say we have revision ae1027a6acf + + Absolute migration: + sc-pg upgrade ae10 + + Relative to current: + sc-pg upgrade +2 + sc-pg downgrade -- -1 + sc-pg upgrade ae10+2 + """ + # https://click.palletsprojects.com/en/3.x/arguments/#argument-like-options + click.echo(f'Downgrading database to current-{revision} ...') + config = _get_alembic_config() + alembic.command.downgrade(config, str(revision), sql=False, tag=None) + +@main.command() +@click.argument('revision', default='head') +def stamp(revision): + """Stamps the database with a given revision; does not run any migration""" + click.echo(f'Stamps db to {revision} ...') + config = _get_alembic_config() + alembic.command.stamp(config, revision, sql=False, tag=None) diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/README.md b/packages/postgres-database/src/simcore_postgres_database/migration/README.md new file mode 100644 index 00000000000..41e0670f87e --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/README.md @@ -0,0 +1,87 @@ +# ``postgres-database`` database migration + +Generic single-database configuration. + + +## Basic workflow + +Our database migration is based on [alembic] and emulates [flask-migrate] plugin. The following steps assume that we start from scratch and aim to setup and run migration scripts for a new database. + + +### Init + +```command +alembic init migration +``` + +Will add a migrations folder to your application. The contents of this folder need to be added to version control along with your other source files. + +### Revision + +```command +$ alembic revision --autogenerate -m "Adding storage service tables" +INFO [alembic.runtime.migration] Context impl PostgresqlImpl. +INFO [alembic.runtime.migration] Will assume transactional DDL. +INFO [alembic.autogenerate.compare] Detected added table 'file_meta_data' +INFO [alembic.ddl.postgresql] Detected sequence named 'user_to_projects_id_seq' as owned by integer column 'user_to_projects(id)', assuming SERIAL and omitting +INFO [alembic.ddl.postgresql] Detected sequence named 'tokens_token_id_seq' as owned by integer column 'tokens(token_id)', assuming SERIAL and omitting +INFO [alembic.ddl.postgresql] Detected sequence named 'comp_tasks_task_id_seq' as owned by integer column 'comp_tasks(task_id)', assuming SERIAL and omitting + Generating /home/crespo/devp/osparc-simcore/packages/postgres-database/migration/versions/86f +ca23596da_adding_storage_service_tables.py ... done +``` +Auto-generates some scripts under [migration/versions](packages/postgres-database/migration/versions). The migration script **needs to be reviewed and edited**, as Alembic currently does not detect every change you +make to your models. In particular, Alembic is currently unable to detect: +- table name changes, +- column name changes, +- or anonymously named constraints +A detailed summary of limitations can be found in the Alembic autogenerate documentation. +Once finalized, the migration script also needs to be added to version control. + +### Upgrade + +Then you can apply the migration to the database: +```command +alembic upgrade head +``` +Then each time the database models change repeat the migrate and upgrade commands. + +To sync the database in another system just refresh the migrations folder from source control and run the upgrade command. + + +[flask-migrate]:https://flask-migrate.readthedocs.io/en/latest/ +[alembic]:https://alembic.sqlalchemy.org/en/latest/ + + +## Migration of databases already online + +**Context:** +- There is a database already online and with data that we want to preserve +- There are **no** migration scripts + +```command +alembic init migration +# setup config + +alembic revision --autogenerate -m "Init tables" +alembic stamp head # stamps the revision database with the given revision but do not run migrations +alembic revision --autogenerate -m "Added column to file_meta_data" +alembic upgrade head +``` + + +## Use cases +**A table has been altered* + +We create a revision script for the change by using the local db as follows: + +```bash +pip install -r packages/postgres-database/requirements/dev.txt # install sc-pg package +docker-compose -f services/docker-compose.yml -f services/docker-compose-tools.yml up adminer # bring db and ui up +docker ps # find the published port for the db +sc-pg discover -u simcore -p simcore --port=32787 # discover the db +sp-pg info # what revision are we at? +sc-pg upgrade head # to to latest if necessary +sc-pg review -m "Altered_table_why" # create a revision, note: the string will be part of the script +sc-pg upgrade head # apply the revision +sc-pg downgrade -- -1 # go back to old revision if sth went banana +``` diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/env.py b/packages/postgres-database/src/simcore_postgres_database/migration/env.py new file mode 100644 index 00000000000..6836d5ae008 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/env.py @@ -0,0 +1,76 @@ + +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool + +from simcore_postgres_database.settings import target_metadatas + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +target_metadata = target_metadatas + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + # pylint: disable=no-member + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + # pylint: disable=no-member + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +# pylint: disable=no-member +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/script.py.mako b/packages/postgres-database/src/simcore_postgres_database/migration/script.py.mako new file mode 100644 index 00000000000..2c0156303a8 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/20ec678d7dad_nullable_project_columns.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/20ec678d7dad_nullable_project_columns.py new file mode 100644 index 00000000000..ab4a2ff0e02 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/20ec678d7dad_nullable_project_columns.py @@ -0,0 +1,38 @@ +"""Nullable project columns + +Revision ID: 20ec678d7dad +Revises: 99db5efc4548 +Create Date: 2019-07-04 08:44:35.901118+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '20ec678d7dad' +down_revision = '99db5efc4548' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('projects', 'description', + existing_type=sa.VARCHAR(), + nullable=True) + op.alter_column('projects', 'thumbnail', + existing_type=sa.VARCHAR(), + nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('projects', 'thumbnail', + existing_type=sa.VARCHAR(), + nullable=False) + op.alter_column('projects', 'description', + existing_type=sa.VARCHAR(), + nullable=False) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/645807399320_init_tables.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/645807399320_init_tables.py new file mode 100644 index 00000000000..4e05b14d235 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/645807399320_init_tables.py @@ -0,0 +1,123 @@ +"""Init tables + +Revision ID: 645807399320 +Revises: +Create Date: 2019-06-21 13:14:40.700747+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '645807399320' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('comp_pipeline', + sa.Column('project_id', sa.String(), nullable=False), + sa.Column('dag_adjacency_list', sa.JSON(), nullable=True), + sa.Column('state', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('project_id') + ) + op.create_table('projects', + sa.Column('id', sa.BigInteger(), nullable=False), + sa.Column('type', sa.Enum('TEMPLATE', 'STANDARD', name='projecttype'), nullable=False), + sa.Column('uuid', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('thumbnail', sa.String(), nullable=False), + sa.Column('prj_owner', sa.String(), nullable=False), + sa.Column('creation_date', sa.DateTime(), nullable=False), + sa.Column('last_change_date', sa.DateTime(), nullable=False), + sa.Column('workbench', sa.JSON(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uuid') + ) + op.create_table('users', + sa.Column('id', sa.BigInteger(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('email', sa.String(), nullable=False), + sa.Column('password_hash', sa.String(), nullable=False), + sa.Column('status', sa.Enum('CONFIRMATION_PENDING', 'ACTIVE', 'BANNED', name='userstatus'), nullable=False), + sa.Column('role', sa.Enum('ANONYMOUS', 'GUEST', 'USER', 'TESTER', name='userrole'), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('created_ip', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id', name='user_pkey'), + sa.UniqueConstraint('email', name='user_login_key') + ) + op.create_table('comp_tasks', + sa.Column('task_id', sa.Integer(), nullable=False), + sa.Column('project_id', sa.String(), nullable=True), + sa.Column('node_id', sa.String(), nullable=True), + sa.Column('job_id', sa.String(), nullable=True), + sa.Column('internal_id', sa.Integer(), nullable=True), + sa.Column('schema', sa.JSON(), nullable=True), + sa.Column('inputs', sa.JSON(), nullable=True), + sa.Column('outputs', sa.JSON(), nullable=True), + sa.Column('image', sa.JSON(), nullable=True), + sa.Column('state', sa.Integer(), nullable=True), + sa.Column('submit', sa.DateTime(), nullable=True), + sa.Column('start', sa.DateTime(), nullable=True), + sa.Column('end', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['project_id'], ['comp_pipeline.project_id'], ), + sa.PrimaryKeyConstraint('task_id') + ) + op.create_table('confirmations', + sa.Column('code', sa.Text(), nullable=False), + sa.Column('user_id', sa.BigInteger(), nullable=True), + sa.Column('action', sa.Enum('REGISTRATION', 'RESET_PASSWORD', 'CHANGE_EMAIL', 'INVITATION', name='confirmationaction'), nullable=False), + sa.Column('data', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_confirmation_fkey', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('code', name='confirmation_code') + ) + op.create_table('tokens', + sa.Column('token_id', sa.BigInteger(), nullable=False), + sa.Column('user_id', sa.BigInteger(), nullable=False), + sa.Column('token_service', sa.String(), nullable=False), + sa.Column('token_data', sa.JSON(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('token_id') + ) + op.create_table('user_to_projects', + sa.Column('id', sa.BigInteger(), nullable=False), + sa.Column('user_id', sa.BigInteger(), nullable=False), + sa.Column('project_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('file_meta_data', + sa.Column('file_uuid', sa.String(), nullable=False), + sa.Column('location_id', sa.String(), nullable=True), + sa.Column('location', sa.String(), nullable=True), + sa.Column('bucket_name', sa.String(), nullable=True), + sa.Column('object_name', sa.String(), nullable=True), + sa.Column('project_id', sa.String(), nullable=True), + sa.Column('project_name', sa.String(), nullable=True), + sa.Column('node_id', sa.String(), nullable=True), + sa.Column('node_name', sa.String(), nullable=True), + sa.Column('file_name', sa.String(), nullable=True), + sa.Column('user_id', sa.String(), nullable=True), + sa.Column('user_name', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('file_uuid') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('file_meta_data') + op.drop_table('user_to_projects') + op.drop_table('tokens') + op.drop_table('confirmations') + op.drop_table('comp_tasks') + op.drop_table('users') + op.drop_table('projects') + op.drop_table('comp_pipeline') + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/99db5efc4548_added_column_to_file_meta_data.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/99db5efc4548_added_column_to_file_meta_data.py new file mode 100644 index 00000000000..3a57e96f025 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/99db5efc4548_added_column_to_file_meta_data.py @@ -0,0 +1,38 @@ +"""Added column to file_meta_data + +Revision ID: 99db5efc4548 +Revises: 645807399320 +Create Date: 2019-06-21 13:22:23.214635+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '99db5efc4548' +down_revision = '645807399320' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('file_meta_data', sa.Column('created_at', sa.String(), nullable=True)) + op.add_column('file_meta_data', sa.Column('display_file_path', sa.String(), nullable=True)) + op.add_column('file_meta_data', sa.Column('file_id', sa.String(), nullable=True)) + op.add_column('file_meta_data', sa.Column('file_size', sa.Integer(), nullable=True)) + op.add_column('file_meta_data', sa.Column('last_modified', sa.String(), nullable=True)) + op.add_column('file_meta_data', sa.Column('raw_file_path', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('file_meta_data', 'raw_file_path') + op.drop_column('file_meta_data', 'last_modified') + op.drop_column('file_meta_data', 'file_size') + op.drop_column('file_meta_data', 'file_id') + op.drop_column('file_meta_data', 'display_file_path') + op.drop_column('file_meta_data', 'created_at') + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/add5f60f0f67_adds_published_colums_in_projects_table.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/add5f60f0f67_adds_published_colums_in_projects_table.py new file mode 100644 index 00000000000..d3cd58e7f26 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/add5f60f0f67_adds_published_colums_in_projects_table.py @@ -0,0 +1,35 @@ +"""Adds published columns in projects table + +Revision ID: add5f60f0f67 +Revises: 20ec678d7dad +Create Date: 2019-07-07 19:04:09.502576+00:00 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'add5f60f0f67' +down_revision = '20ec678d7dad' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('projects', sa.Column('published', sa.Boolean(), default=True)) + # ### end Alembic commands ### + + # Applies the default to all + query = 'UPDATE "projects" SET published=false;' + op.execute(query) + # makes non nullable + query = 'ALTER TABLE "projects" ALTER "published" SET NOT NULL;' + op.execute(query) + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('projects', 'published') + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/__init__.py b/packages/postgres-database/src/simcore_postgres_database/models/__init__.py new file mode 100644 index 00000000000..dfa8b309243 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/__init__.py @@ -0,0 +1,6 @@ +""" tables + + We use a classical Mapping w/o using a Declarative system. + + See https://docs.sqlalchemy.org/en/latest/orm/mapping_styles.html#classical-mappings + """ diff --git a/packages/postgres-database/src/simcore_postgres_database/models/base.py b/packages/postgres-database/src/simcore_postgres_database/models/base.py new file mode 100644 index 00000000000..707221ae332 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/base.py @@ -0,0 +1,11 @@ +""" Contains model's metadata + + - Collects all table's schemas + - Metadata object needed to explicitly define table schemas +""" +from sqlalchemy.ext.declarative import declarative_base + +# DO NOT inheriting from _base. Use instead explicit table definitions +# See https://docs.sqlalchemy.org/en/latest/orm/mapping_styles.html#classical-mappings +_base = declarative_base() +metadata = _base.metadata diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_pipeline.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_pipeline.py new file mode 100644 index 00000000000..d81d67ebce7 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_pipeline.py @@ -0,0 +1,23 @@ +""" Computational Pipeline Table + +""" +import uuid + +import sqlalchemy as sa + +from .base import metadata + +UNKNOWN = 0 +PENDING = 1 +RUNNING = 2 +SUCCESS = 3 +FAILED = 4 + +def _new_uuid(): + return str(uuid.uuid4()) + +comp_pipeline = sa.Table("comp_pipeline", metadata, + sa.Column("project_id", sa.String, primary_key=True, default=_new_uuid), + sa.Column("dag_adjacency_list", sa.JSON), + sa.Column("state", sa.String, default=UNKNOWN) # TODO: MaG should not string?? +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py new file mode 100644 index 00000000000..77c7b85724a --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py @@ -0,0 +1,31 @@ + +""" Computational Tasks Table + +""" +import sqlalchemy as sa + +from .base import metadata +from .comp_pipeline import UNKNOWN + +comp_tasks = sa.Table("comp_tasks", metadata, + # this task db id + sa.Column("task_id", sa.Integer, primary_key=True), + sa.Column("project_id", sa.String, sa.ForeignKey('comp_pipeline.project_id')), + # dag node id + sa.Column("node_id", sa.String), + # celery task id + sa.Column("job_id", sa.String), + # internal id (better for debugging, nodes from 1 to N) + sa.Column("internal_id", sa.Integer), + + sa.Column("schema", sa.JSON), + sa.Column("inputs", sa.JSON), + sa.Column("outputs", sa.JSON), + sa.Column("image", sa.JSON), + sa.Column("state", sa.Integer, default=UNKNOWN), + + # utc timestamps for submission/start/end + sa.Column("submit", sa.DateTime), + sa.Column("start", sa.DateTime), + sa.Column("end", sa.DateTime), +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/confirmations.py b/packages/postgres-database/src/simcore_postgres_database/models/confirmations.py new file mode 100644 index 00000000000..ca7410cca3d --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/confirmations.py @@ -0,0 +1,39 @@ +""" User's confirmations table + + - Keeps a list of tokens to identify an action (registration, invitation, reset, etc) authorized + by link to a a user in the framework + - These tokens have an expiration date defined by configuration + +""" +import enum +import sqlalchemy as sa + +from .base import metadata +from .users import users + + +class ConfirmationAction(enum.Enum): + REGISTRATION = "REGISTRATION" + RESET_PASSWORD = "RESET_PASSWORD" + CHANGE_EMAIL = "CHANGE_EMAIL" + INVITATION = "INVITATION" + + + +confirmations = sa.Table("confirmations", metadata, + sa.Column("code", sa.Text), + sa.Column("user_id", sa.BigInteger), + sa.Column("action", + sa.Enum(ConfirmationAction), + nullable=False, + default=ConfirmationAction.REGISTRATION + ), + sa.Column("data", sa.Text), # TODO: json? + sa.Column("created_at", sa.DateTime, nullable=False), + + # + sa.PrimaryKeyConstraint("code", name="confirmation_code"), + sa.ForeignKeyConstraint(["user_id"], [users.c.id], + name="user_confirmation_fkey", + ondelete="CASCADE"), + ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/file_meta_data.py b/packages/postgres-database/src/simcore_postgres_database/models/file_meta_data.py new file mode 100644 index 00000000000..f95a37a18bc --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/file_meta_data.py @@ -0,0 +1,26 @@ +import sqlalchemy as sa + +from .base import metadata + +file_meta_data = sa.Table( + "file_meta_data", metadata, + sa.Column("file_uuid", sa.String, primary_key=True), + sa.Column("location_id", sa.String), + sa.Column("location", sa.String), + sa.Column("bucket_name", sa.String), + sa.Column("object_name", sa.String), + sa.Column("project_id", sa.String), + sa.Column("project_name", sa.String), + sa.Column("node_id", sa.String), + sa.Column("node_name", sa.String), + sa.Column("file_name", sa.String), + sa.Column("user_id", sa.String), + sa.Column("user_name", sa.String), + sa.Column("file_id", sa.String), + sa.Column("raw_file_path", sa.String), + sa.Column("display_file_path", sa.String), + sa.Column("created_at", sa.String), + sa.Column("last_modified", sa.String), + sa.Column("file_size", sa.Integer) +# sa.Column("state", sa.String()) +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects.py b/packages/postgres-database/src/simcore_postgres_database/models/projects.py new file mode 100644 index 00000000000..d1de2b6bd54 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects.py @@ -0,0 +1,38 @@ +""" Projects table + + - Every row fits a project document schemed as api/specs/webserver/v0/components/schemas/project-v0.0.1.json + +""" +import enum +import logging +from datetime import datetime + +import sqlalchemy as sa + +from .base import metadata + +log = logging.getLogger(__name__) + + +class ProjectType(enum.Enum): + """ + template: template project + standard: standard project + """ + TEMPLATE = "template" + STANDARD = "standard" + + +projects = sa.Table("projects", metadata, + sa.Column("id", sa.BigInteger, nullable=False, primary_key=True), + sa.Column("type", sa.Enum(ProjectType), nullable=False, default=ProjectType.STANDARD), + sa.Column("uuid", sa.String, nullable=False, unique=True), + sa.Column("name", sa.String, nullable=False), + sa.Column("description", sa.String, nullable=True), + sa.Column("thumbnail", sa.String, nullable=True), + sa.Column("prj_owner", sa.String, nullable=False), + sa.Column("creation_date", sa.DateTime(), nullable=False, default=datetime.utcnow), + sa.Column("last_change_date", sa.DateTime(), nullable=False, default=datetime.utcnow), + sa.Column("workbench", sa.JSON, nullable=False), + sa.Column("published", sa.Boolean, nullable=False, default=False) +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/tokens.py b/packages/postgres-database/src/simcore_postgres_database/models/tokens.py new file mode 100644 index 00000000000..6cbef180981 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/tokens.py @@ -0,0 +1,15 @@ +""" User Tokens table +""" +import sqlalchemy as sa + +from .base import metadata +from .users import users + +# NOTE: this is another way of of defining keys ... +tokens = sa.Table("tokens", metadata, + sa.Column("token_id", sa.BigInteger, nullable=False, primary_key=True), + sa.Column("user_id", sa.BigInteger, sa.ForeignKey(users.c.id), nullable=False), + sa.Column("token_service", sa.String, nullable=False), + sa.Column("token_data", sa.JSON, nullable=False), + +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py new file mode 100644 index 00000000000..4c3dde718b0 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py @@ -0,0 +1,11 @@ +import sqlalchemy as sa + +from .base import metadata +from .projects import projects +from .users import users + +user_to_projects = sa.Table("user_to_projects", metadata, + sa.Column("id", sa.BigInteger, nullable=False, primary_key=True), + sa.Column("user_id", sa.BigInteger, sa.ForeignKey(users.c.id), nullable=False), + sa.Column("project_id", sa.BigInteger, sa.ForeignKey(projects.c.id), nullable=False) +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/users.py b/packages/postgres-database/src/simcore_postgres_database/models/users.py new file mode 100644 index 00000000000..5410b11fb74 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/users.py @@ -0,0 +1,76 @@ +""" Users table + + - List of users in the framework + - Users they have a role within the framework that provides + them different access levels to it +""" +import itertools +from datetime import datetime +from enum import Enum + +import sqlalchemy as sa + +from .base import metadata + + +class UserRole(Enum): + """ SORTED enumeration of user roles + + A role defines a set of privileges the user can perform + Roles are sorted from lower to highest privileges + USER is the role assigned by default A user with a higher/lower role is denoted super/infra user + + ANONYMOUS : The user is not logged in + GUEST : Temporary user with very limited access. Main used for demos and for a limited amount of time + USER : Registered user. Basic permissions to use the platform [default] + TESTER : Upgraded user. First level of super-user with privileges to test the framework. + Can use everything but does not have an effect in other users or actual data + + See security_access.py + """ + ANONYMOUS = "ANONYMOUS" + GUEST = "GUEST" + USER = "USER" + TESTER = "TESTER" + + @classmethod + def super_users(cls): + return list(itertools.takewhile(lambda e: e!=cls.USER, cls)) + + # TODO: add comparison https://portingguide.readthedocs.io/en/latest/comparisons.html + + + +class UserStatus(Enum): + """ + pending: user registered but not confirmed + active: user is confirmed and can use the platform + banned: user is not authorized + """ + CONFIRMATION_PENDING = "PENDING" + ACTIVE = "ACTIVE" + BANNED = "BANNED" + + + + +users = sa.Table("users", metadata, + sa.Column("id", sa.BigInteger, nullable=False), + sa.Column("name", sa.String, nullable=False), + sa.Column("email", sa.String, nullable=False), + sa.Column("password_hash", sa.String, nullable=False), + sa.Column("status", + sa.Enum(UserStatus), + nullable=False, + default=UserStatus.CONFIRMATION_PENDING), + sa.Column("role", + sa.Enum(UserRole), + nullable=False, + default=UserRole.USER), + sa.Column("created_at", sa.DateTime(), nullable=False, default=datetime.utcnow), + sa.Column("created_ip", sa.String(), nullable=True), + + # + sa.PrimaryKeyConstraint("id", name="user_pkey"), + sa.UniqueConstraint("email", name="user_login_key"), +) diff --git a/packages/postgres-database/src/simcore_postgres_database/settings.py b/packages/postgres-database/src/simcore_postgres_database/settings.py new file mode 100644 index 00000000000..734106b313f --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/settings.py @@ -0,0 +1,17 @@ +from . import metadata +from yarl import URL + +# Schemas metadata (pre-loaded in __init__.py) +target_metadatas = [metadata, ] + +def build_url(host, port, database, user=None, password=None) -> URL: + """ postgresql+psycopg2://{user}:{password}@{host}:{port}/{database} """ + return URL.build(scheme="postgresql+psycopg2", + user=user, password=password, + host=host, port=port, + path=f"/{database}") + +__all__ = [ + 'target_metadatas', + 'build_url' +] diff --git a/packages/postgres-database/src/simcore_postgres_database/storage_models.py b/packages/postgres-database/src/simcore_postgres_database/storage_models.py new file mode 100644 index 00000000000..c4f39cdabc8 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/storage_models.py @@ -0,0 +1,20 @@ +""" Facade for storage service (tables manager) + + - Facade to direct access to models in the database by + the storage service +""" +from .models.base import metadata +from .models.file_meta_data import file_meta_data +from .models.projects import projects +from .models.tokens import tokens +from .models.user_to_projects import user_to_projects +from .models.user_to_projects import users + +__all__ = [ + "tokens", + "file_meta_data", + "metadata", + "projects", + "user_to_projects", + "users" +] diff --git a/packages/postgres-database/src/simcore_postgres_database/webserver_models.py b/packages/postgres-database/src/simcore_postgres_database/webserver_models.py new file mode 100644 index 00000000000..c22298f5f08 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/webserver_models.py @@ -0,0 +1,23 @@ +""" Facade for webserver service + + Facade to direct access to models in the database by + the webserver service + +""" +from .models.comp_pipeline import comp_pipeline +from .models.comp_tasks import comp_tasks +from .models.confirmations import ConfirmationAction, confirmations +from .models.projects import ProjectType, projects +from .models.tokens import tokens +from .models.user_to_projects import user_to_projects +from .models.users import UserRole, UserStatus, users + +__all__ = [ + "users", "UserRole", "UserStatus", + "projects", "ProjectType", + "user_to_projects", + "confirmations", "ConfirmationAction", + "tokens", + "comp_tasks", + "comp_pipeline" +] diff --git a/ops/db/example/docker-compose.yml b/packages/postgres-database/tests/docker-compose.yml similarity index 61% rename from ops/db/example/docker-compose.yml rename to packages/postgres-database/tests/docker-compose.yml index 664ec292cf1..d495bca069d 100644 --- a/ops/db/example/docker-compose.yml +++ b/packages/postgres-database/tests/docker-compose.yml @@ -6,6 +6,10 @@ services: - POSTGRES_USER=test - POSTGRES_PASSWORD=test - POSTGRES_DB=test + - POSTGRES_HOST=localhost + - POSTGRES_PORT=5432 + volumes: + - test_postgres_database_volume:/var/lib/postgresql/data ports: - "5432:5432" adminer: @@ -14,3 +18,5 @@ services: - 18080:8080 depends_on: - postgres +volumes: + test_postgres_database_volume: diff --git a/packages/s3wrapper/src/s3wrapper/s3_client.py b/packages/s3wrapper/src/s3wrapper/s3_client.py index bd5dfe1d3a1..7ffc2307d86 100644 --- a/packages/s3wrapper/src/s3wrapper/s3_client.py +++ b/packages/s3wrapper/src/s3wrapper/s3_client.py @@ -16,6 +16,9 @@ class S3Client: def __init__(self, endpoint, access_key=None, secret_key=None, secure=False): self.__metadata_prefix = "x-amz-meta-" self.client = None + self.endpoint = endpoint + self.access_key = access_key + self.secret_key = secret_key try: self.client = Minio(endpoint, access_key=access_key, diff --git a/packages/service-library/src/servicelib/decorators.py b/packages/service-library/src/servicelib/decorators.py new file mode 100644 index 00000000000..ae8e156a284 --- /dev/null +++ b/packages/service-library/src/servicelib/decorators.py @@ -0,0 +1,34 @@ +""" General purpose decorators + +IMPORTANT: lowest level module + I order to avoid cyclic dependences, please + DO NOT IMPORT ANYTHING from . +""" +import logging +from copy import deepcopy +from functools import wraps + +log = logging.getLogger(__name__) + + + +def safe_return(if_fails_return=False, catch=None, logger=None): + # defaults + if catch is None: + catch = (RuntimeError, ) + if logger is None: + logger = log + + def decorate(func): + @wraps(func) + def safe_func(*args, **kargs): + try: + res = func(*args, **kargs) + return res + except catch as err: + logger.info("%s failed: %s", func.__name__, str(err)) + except Exception: #pylint: disable=broad-except + logger.info("%s failed unexpectedly", func.__name__, exc_info=True) + return deepcopy(if_fails_return) # avoid issues with default mutables + return safe_func + return decorate diff --git a/packages/service-library/tests/test_decorators.py b/packages/service-library/tests/test_decorators.py new file mode 100644 index 00000000000..69142b6c30c --- /dev/null +++ b/packages/service-library/tests/test_decorators.py @@ -0,0 +1,22 @@ +from servicelib.decorators import safe_return + + +def test_safe_return_decorator(): + class MyException(Exception): + pass + + @safe_return(if_fails_return=False, catch=(MyException,), logger=None) + def raise_my_exception(): + raise MyException() + + assert not raise_my_exception() + +def test_safe_return_mutables(): + some_mutable_return = ['some', 'defaults'] + + @safe_return(if_fails_return=some_mutable_return) + def return_mutable(): + raise RuntimeError("Runtime is default") + + assert return_mutable() == some_mutable_return # contains the same + assert not (return_mutable() is some_mutable_return) # but is not the same diff --git a/packages/simcore-sdk/requirements/ci.txt b/packages/simcore-sdk/requirements/ci.txt index a80098fd923..efdc6c4a751 100644 --- a/packages/simcore-sdk/requirements/ci.txt +++ b/packages/simcore-sdk/requirements/ci.txt @@ -10,6 +10,7 @@ -r _test.txt # installs this repo's packages +../postgres-database/ ../s3wrapper/ ../../services/storage/client-sdk/python/ # Needed ONLY for testing diff --git a/packages/simcore-sdk/requirements/dev.txt b/packages/simcore-sdk/requirements/dev.txt index 892e1da2c48..b43d0e601a0 100644 --- a/packages/simcore-sdk/requirements/dev.txt +++ b/packages/simcore-sdk/requirements/dev.txt @@ -10,6 +10,7 @@ -r _test.txt # installs this repo's packages +-e ../postgres-database/ -e ../s3wrapper/ ../../services/storage/client-sdk/python/ # Needed ONLY for testing diff --git a/packages/simcore-sdk/setup.py b/packages/simcore-sdk/setup.py index 59a2adc072b..18848fa2792 100644 --- a/packages/simcore-sdk/setup.py +++ b/packages/simcore-sdk/setup.py @@ -13,6 +13,7 @@ def read_reqs( reqs_path: Path): install_requirements = read_reqs( here / "requirements" / "_base.in" ) test_requirements = read_reqs( here / "requirements" / "_test.txt" ) + [ + "simcore-postgres-database==0.1.0", "s3wrapper==0.1.0", "simcore-service-storage-sdk==0.1.0" ] diff --git a/packages/simcore-sdk/src/simcore_sdk/models/DEPRECATED b/packages/simcore-sdk/src/simcore_sdk/models/DEPRECATED new file mode 100644 index 00000000000..d599b712cfb --- /dev/null +++ b/packages/simcore-sdk/src/simcore_sdk/models/DEPRECATED @@ -0,0 +1 @@ +To get models, use instead ``import simcore_postgres_database`` diff --git a/packages/simcore-sdk/src/simcore_sdk/models/base.py b/packages/simcore-sdk/src/simcore_sdk/models/base.py index d202c5683c0..2431525cfa3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/models/base.py +++ b/packages/simcore-sdk/src/simcore_sdk/models/base.py @@ -1,7 +1,5 @@ -from sqlalchemy.ext.declarative import declarative_base +from simcore_postgres_database.models.base import metadata -# TODO: avoid inheriting from Base. Use instead explicit table definitions -# See https://docs.sqlalchemy.org/en/latest/orm/mapping_styles.html#classical-mappings -Base = declarative_base() - -metadata = Base.metadata +__all__ = [ + "metadata" +] diff --git a/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py b/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py index c91bb7236b1..258534f2d08 100644 --- a/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py +++ b/packages/simcore-sdk/src/simcore_sdk/models/pipeline_models.py @@ -1,22 +1,27 @@ -import uuid +# DEPRECATED: Use instead postgres-database +from sqlalchemy.orm import mapper import networkx as nx -from sqlalchemy import JSON, Column, DateTime, ForeignKey, Integer, String +from simcore_postgres_database.models.comp_pipeline import (FAILED, PENDING, + RUNNING, SUCCESS, + UNKNOWN, + comp_pipeline) +from simcore_postgres_database.models.comp_tasks import comp_tasks -from .base import Base +from .base import metadata -UNKNOWN = 0 -PENDING = 1 -RUNNING = 2 -SUCCESS = 3 -FAILED = 4 -class ComputationalPipeline(Base): - __tablename__ = 'comp_pipeline' +# NOTE: All this file ises classical mapping to keep LEGACY +class Base: + metadata = metadata - project_id = Column(String, primary_key=True, default=str(uuid.uuid4())) - dag_adjacency_list = Column(JSON) - state = Column(String, default=UNKNOWN) + +class ComputationalPipeline: + #pylint: disable=no-member + def __init__(self, **kargs): + for key, value in kargs.items(): + assert key in ComputationalPipeline._sa_class_manager.keys() + setattr(self, key, value) @property def execution_graph(self): @@ -34,25 +39,26 @@ def execution_graph(self): def __repr__(self): return ''.format(self.id) -class ComputationalTask(Base): - __tablename__ = 'comp_tasks' - # this task db id - task_id = Column(Integer, primary_key=True) - project_id = Column(String, ForeignKey('comp_pipeline.project_id')) - # dag node id - node_id = Column(String) - # celery task id - job_id = Column(String) - # internal id (better for debugging, nodes from 1 to N) - internal_id = Column(Integer) - - schema = Column(JSON) - inputs = Column(JSON) - outputs = Column(JSON) - image = Column(JSON) - state = Column(Integer, default=UNKNOWN) - - # utc timestamps for submission/start/end - submit = Column(DateTime) - start = Column(DateTime) - end = Column(DateTime) +mapper(ComputationalPipeline, comp_pipeline) + + + + + +class ComputationalTask: + #pylint: disable=no-member + def __init__(self, **kargs): + for key, value in kargs.items(): + assert key in ComputationalTask._sa_class_manager.keys() + setattr(self, key, value) + + +mapper(ComputationalTask, comp_tasks) + + +__all__ = [ + "metadata", + "ComputationalPipeline", + "ComputationalTask", + "UNKNOWN", "PENDING", "RUNNING", "SUCCESS", "FAILED" +] diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py index a870da6cf4d..a307371c691 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports/dbmanager.py @@ -55,6 +55,7 @@ def _get_node_from_db(node_uuid: str, session: sqlalchemy.orm.session.Session) - log.debug("Reading from database for node uuid %s", node_uuid) try: # project id should be also defined but was not the case before + # pylint: disable=no-member criteria = (NodeModel.node_id == node_uuid if config.PROJECT_ID == 'undefined' else and_(NodeModel.node_id == node_uuid, NodeModel.project_id == config.PROJECT_ID)) return session.query(NodeModel).filter(criteria).one() except exc.NoResultFound: @@ -66,6 +67,7 @@ class DBManager: def __init__(self): self._db_settings = DbSettings() with session_scope(self._db_settings.Session) as session: + # pylint: disable=no-member # project id should be also defined but was not the case before criteria = (NodeModel.node_id == config.NODE_UUID if config.PROJECT_ID == 'undefined' else and_(NodeModel.node_id == config.NODE_UUID, NodeModel.project_id == config.PROJECT_ID)) node = session.query(NodeModel).filter(criteria).one() @@ -78,6 +80,7 @@ def write_ports_configuration(self, json_configuration: str, node_uuid: str): node_configuration = json.loads(json_configuration) with session_scope(self._db_settings.Session) as session: + # pylint: disable=no-member updated_node = NodeModel(schema=node_configuration["schema"], inputs=node_configuration["inputs"], outputs=node_configuration["outputs"]) node = _get_node_from_db(node_uuid=node_uuid, session=session) diff --git a/packages/simcore-sdk/tests/node_ports/conftest.py b/packages/simcore-sdk/tests/node_ports/conftest.py index e4adb3c8416..32baf7d285d 100644 --- a/packages/simcore-sdk/tests/node_ports/conftest.py +++ b/packages/simcore-sdk/tests/node_ports/conftest.py @@ -159,6 +159,7 @@ def create_config(prev_node_inputs: List[Tuple[str, str, Any]] =None, prev_node_ postgres.commit() def _create_new_pipeline(session, project:str)->str: + # pylint: disable=no-member new_Pipeline = ComputationalPipeline(project_id=project) session.add(new_Pipeline) session.commit() diff --git a/packages/simcore-sdk/tests/node_ports/helpers/helpers.py b/packages/simcore-sdk/tests/node_ports/helpers/helpers.py index 8c2a16a80a9..dbb799e4d77 100644 --- a/packages/simcore-sdk/tests/node_ports/helpers/helpers.py +++ b/packages/simcore-sdk/tests/node_ports/helpers/helpers.py @@ -9,14 +9,17 @@ def update_configuration(session, project_id, node_uuid, new_configuration): log.debug("Update configuration of pipeline %s, node %s, on session %s", project_id, node_uuid, session) - task = session.query(ComputationalTask).filter(ComputationalTask.project_id==str(project_id), ComputationalTask.node_id==str(node_uuid)) + # pylint: disable=no-member + task = session.query(ComputationalTask).filter( + ComputationalTask.project_id==str(project_id), + ComputationalTask.node_id==str(node_uuid)) task.update(dict(schema=new_configuration["schema"], inputs=new_configuration["inputs"], outputs=new_configuration["outputs"])) session.commit() log.debug("Updated configuration") def update_config_file(path, config): - + with open(path, "w") as json_file: json.dump(config, json_file) diff --git a/packages/simcore-sdk/tests/test_alchemy.py b/packages/simcore-sdk/tests/test_alchemy.py index 5e160ba57d7..a777a8f2cdc 100644 --- a/packages/simcore-sdk/tests/test_alchemy.py +++ b/packages/simcore-sdk/tests/test_alchemy.py @@ -23,7 +23,7 @@ class User(BASE): def test_alchemy(engine, session): BASE.metadata.create_all(engine) users = ['alpha', 'beta', 'gamma'] - + for u in users: data = {} data['counter'] = 0 @@ -45,3 +45,39 @@ def test_alchemy(engine, session): alpha2 = session.query(User).filter(User.name == 'alpha').one() assert alpha2.data['counter'] == 42 + + +from simcore_sdk.models.pipeline_models import ComputationalPipeline, ComputationalTask, comp_pipeline, comp_tasks + + +def test_legacy_queries_with_mapper_adapter(): + """Checks to ensure that LEGACY queries still work with + mapper adapter + + This test was added to ensure we could `disable no-member` + for ComputationalTask and ComputationalPipeline mapped classes + """ + column_type = type(User.name) + # pylint: disable=no-member + + assert hasattr(ComputationalTask, "node_id") + assert hasattr(ComputationalTask, "project_id") + assert isinstance(ComputationalTask.node_id, column_type) + assert isinstance(ComputationalTask.project_id, column_type) + + assert hasattr(ComputationalTask, "schema") + assert hasattr(ComputationalTask, "inputs") + assert hasattr(ComputationalTask, "outputs") + assert isinstance(ComputationalTask.schema, column_type) + assert isinstance(ComputationalTask.inputs, column_type) + assert isinstance(ComputationalTask.outputs, column_type) + + assert hasattr(ComputationalPipeline, "project_id") + assert isinstance(ComputationalPipeline.project_id, column_type) + + # pylint: disable=protected-access + column_names = set(c.name for c in comp_pipeline.c) + assert set(ComputationalPipeline._sa_class_manager.keys()) == column_names + + column_names = set(c.name for c in comp_tasks.c) + assert set(ComputationalTask._sa_class_manager.keys()) == column_names diff --git a/scripts/demo/confirmations-invitations.csv b/scripts/demo/confirmations-invitations.csv index 9293bf7257e..47c7128bbfa 100644 --- a/scripts/demo/confirmations-invitations.csv +++ b/scripts/demo/confirmations-invitations.csv @@ -2,20 +2,20 @@ code,user_id,action,data,created_at AOuAejUGDv34i9QtxYK61V7GZmCE4B,1,INVITATION,"{ ""guest"": ""inviteed@foo.com"" , ""host"" : ""inviter@osparc.io"" -}",2019-06-07 14:38:56.202844 +}",2019-07-08 13:18:16.368438 uQhnK20tuXWdleIRhZaBcmrWaIrb2p,1,INVITATION,"{ ""guest"": ""inviteed@foo.com"" , ""host"" : ""inviter@osparc.io"" -}",2019-06-07 14:38:56.202856 +}",2019-07-08 13:18:16.368446 weedI0YvR6tMA7XEpaxgJZT2Z8SCUy,1,INVITATION,"{ ""guest"": ""inviteed@foo.com"" , ""host"" : ""inviter@osparc.io"" -}",2019-06-07 14:38:56.202860 +}",2019-07-08 13:18:16.368449 Q9m5C98ALYZDr1BjilkaaXWSMKxU21,1,INVITATION,"{ ""guest"": ""inviteed@foo.com"" , ""host"" : ""inviter@osparc.io"" -}",2019-06-07 14:38:56.202864 +}",2019-07-08 13:18:16.368453 jvhSQfoAAfin4htKgvvRYi3pkYdPhM,1,INVITATION,"{ ""guest"": ""inviteed@foo.com"" , ""host"" : ""inviter@osparc.io"" -}",2019-06-07 14:38:56.202867 +}",2019-07-08 13:18:16.368459 diff --git a/scripts/demo/create_portal_markdown.py b/scripts/demo/create_portal_markdown.py index 0885eae01ea..043e9d4cca0 100644 --- a/scripts/demo/create_portal_markdown.py +++ b/scripts/demo/create_portal_markdown.py @@ -8,15 +8,15 @@ import json import logging import sys +from contextlib import contextmanager from datetime import datetime from pathlib import Path -from simcore_service_webserver.login.registration import (URL, - get_invitation_url) -from simcore_service_webserver.login.utils import get_random_string -from simcore_service_webserver.resources import resources -from contextlib import contextmanager +from yarl import URL +from simcore_service_webserver.login.registration import get_invitation_url +from simcore_service_webserver.login.utils import get_random_string +#from simcore_service_webserver.resources import resources CONFIRMATIONS_FILENAME = "confirmations-invitations.csv" @@ -38,10 +38,13 @@ ] current_path = Path( sys.argv[0] if __name__ == "__main__" else __file__).resolve() -logging.basicConfig(level=logging.INFO) +current_dir = current_path.parent +logging.basicConfig(level=logging.INFO) log = logging.getLogger(__name__) +params = {} +params["154fb4ad-4913-478f-af04-19725db901a7"] = {'stimulation_period_secs': '1200'} @contextmanager def _open(filepath): @@ -54,17 +57,25 @@ def _open(filepath): def write_list(hostname, url, data, fh): + origin = URL(url) + print("## studies available @{}".format(hostname), file=fh) print("", file=fh) for prj in data: - print("- [{name}]({base_url}/study/{uuid})".format(base_url=url, **prj), file=fh) + study_url = origin.with_path("study/{uuid}".format(**prj)) + if prj['uuid'] in params: + study_url = study_url.with_query(**params[prj['uuid']]) + print("- [{name}]({study_url})".format(study_url=str(study_url), **prj), file=fh) print("", file=fh) def main(mock_codes): + data = {} + #with resources.stream('data/fake-template-projects.isan.json') as fp: + # data['localhost'] = json.load(fp) - with resources.stream('data/fake-template-projects.isan.json') as fp: - data = json.load(fp) + with open(current_dir / "template-projects/templates_in_master.json") as fp: + data['master'] = json.load(fp) file_path = str(current_path.with_suffix(".md")).replace("create_", "") with _open(file_path) as fh: @@ -72,7 +83,7 @@ def main(mock_codes): print("# THE PORTAL Emulator\n", file=fh) print("This pages is for testing purposes for issue [#{1}]({0}{1})\n".format(ISSUE, 715), file=fh) for hostname, url in HOST_URLS_MAPS: - write_list(hostname, url, data, fh) + write_list(hostname, url, data.get(hostname, []), fh) print("---", file=fh) diff --git a/scripts/demo/portal_markdown.md b/scripts/demo/portal_markdown.md index 2902dc9f3f3..ba32078fcc6 100644 --- a/scripts/demo/portal_markdown.md +++ b/scripts/demo/portal_markdown.md @@ -1,35 +1,24 @@ - + # THE PORTAL Emulator This pages is for testing purposes for issue [#715](https://github.com/ITISFoundation/osparc-simcore/issues/715) ## studies available @localhost -- [ISAN: 2D Plot](http://127.0.0.1:9081/study/template-uuid-4d5e-b80e-401c8066782f) -- [ISAN: 3D Paraview](http://127.0.0.1:9081/study/template-uuid-4d5e-b80e-401c8066781f) -- [ISAN: MattWard use case](http://127.0.0.1:9081/study/template-uuid-420d-b82d-e80bfa272ebd) -- [ISAN: UCDavis use case: 0D](http://127.0.0.1:9081/study/template-uuid-1234-a1a7-f7d4f3a8f26b) ## studies available @master -- [ISAN: 2D Plot](http://master.osparc.io/study/template-uuid-4d5e-b80e-401c8066782f) -- [ISAN: 3D Paraview](http://master.osparc.io/study/template-uuid-4d5e-b80e-401c8066781f) -- [ISAN: MattWard use case](http://master.osparc.io/study/template-uuid-420d-b82d-e80bfa272ebd) -- [ISAN: UCDavis use case: 0D](http://master.osparc.io/study/template-uuid-1234-a1a7-f7d4f3a8f26b) +- [ISAN2019: 3D Paraview](http://master.osparc.io/study/template-uuid-518d-a25d-8887bcae93f8) +- [ISAN: MattWard use case](http://master.osparc.io/study/template-uuid-5a9e-9580-c53d92d18803) +- [ISAN2019 - opencor-py model](http://master.osparc.io/study/154fb4ad-4913-478f-af04-19725db901a7?stimulation_period_secs=1200) +- [ISAN: 2D Plot](http://master.osparc.io/study/template-uuid-5716-bedd-b409bb021760) +- [ISAN: UCDavis use case: 0D](http://master.osparc.io/study/template-uuid-5d82-b08d-d39c436ca738) ## studies available @staging -- [ISAN: 2D Plot](https://staging.osparc.io/study/template-uuid-4d5e-b80e-401c8066782f) -- [ISAN: 3D Paraview](https://staging.osparc.io/study/template-uuid-4d5e-b80e-401c8066781f) -- [ISAN: MattWard use case](https://staging.osparc.io/study/template-uuid-420d-b82d-e80bfa272ebd) -- [ISAN: UCDavis use case: 0D](https://staging.osparc.io/study/template-uuid-1234-a1a7-f7d4f3a8f26b) ## studies available @production -- [ISAN: 2D Plot](https://osparc.io/study/template-uuid-4d5e-b80e-401c8066782f) -- [ISAN: 3D Paraview](https://osparc.io/study/template-uuid-4d5e-b80e-401c8066781f) -- [ISAN: MattWard use case](https://osparc.io/study/template-uuid-420d-b82d-e80bfa272ebd) -- [ISAN: UCDavis use case: 0D](https://osparc.io/study/template-uuid-1234-a1a7-f7d4f3a8f26b) --- # INVITATIONS Samples: @@ -51,3 +40,4 @@ This pages is for testing purposes for issue [#715](https://github.com/ITISFound - [weedI0YvR6tMA7XEpaxgJZT2Z8SCUy](https://staging.osparc.io/#/registration/?invitation=weedI0YvR6tMA7XEpaxgJZT2Z8SCUy) - [Q9m5C98ALYZDr1BjilkaaXWSMKxU21](https://staging.osparc.io/#/registration/?invitation=Q9m5C98ALYZDr1BjilkaaXWSMKxU21) - [jvhSQfoAAfin4htKgvvRYi3pkYdPhM](https://staging.osparc.io/#/registration/?invitation=jvhSQfoAAfin4htKgvvRYi3pkYdPhM) + diff --git a/scripts/demo/template-projects/converter.py b/scripts/demo/template-projects/converter.py new file mode 100644 index 00000000000..21e190bd60b --- /dev/null +++ b/scripts/demo/template-projects/converter.py @@ -0,0 +1,74 @@ +import csv +import json +import sys +from pathlib import Path +from typing import Dict, List + +from simcore_service_webserver.projects.projects_db import \ + _convert_to_schema_names + +SEPARATOR=',' + +current_file = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve() +current_dir = current_file.parent + +def load_csv(csv_filepath: Path) -> List[Dict]: + headers, items = [], [] + with open(csv_filepath, mode='r', encoding='utf-8-sig') as fhandler: + reader = csv.reader(fhandler, delimiter=',', quotechar='"') + for row in reader: + if row: + if not headers: + headers = row + else: + item = { key: row[i] for i, key in enumerate(headers) } + items.append(item) + return items + + +def load_projects(csv_path:Path ): + """ Returns schema-compatible projects + + """ + db_projects = load_csv(csv_path) + _projects = [] + + # process + for db_prj in db_projects: + if int(db_prj['published'])==1: + prj = _convert_to_schema_names(db_prj) + + # jsonifies + dump = prj['workbench'] + # TODO: use Encoder instead? + dump = dump.replace("False", 'false') \ + .replace("True", 'true') \ + .replace("None", 'null') + try: + prj['workbench'] = json.loads(dump) + except json.decoder.JSONDecodeError as err: + print(err) + # import pdb; pdb.set_trace() + + # TODO: validate against project schema!! + + _projects.append(prj) + else: + print("skipping {}".format(db_prj['name'])) + + return _projects + + +def main(): + """ + Converts csv exported from db into project schema-compatible json files + """ + for db_csv_export in current_dir.glob("template*.csv"): + data_projects = load_projects(db_csv_export) + json_path = db_csv_export.with_suffix('.json') + with open(json_path, 'w') as fh: + json.dump(data_projects, fh, indent=2) + + +if __name__ == "__main__": + main() diff --git a/scripts/demo/template-projects/templates_in_master.csv b/scripts/demo/template-projects/templates_in_master.csv new file mode 100644 index 00000000000..bd9b6f47909 --- /dev/null +++ b/scripts/demo/template-projects/templates_in_master.csv @@ -0,0 +1,9 @@ +type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published +TEMPLATE,template-uuid-5203-915e-1ae8ae0c9991,Sleepers,5 sleepers interconnected,"",maiz@itis.swiss,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""template-uuid-5f7e-92b0-5a14e84401e9"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""template-uuid-5d8a-812c-44dacf56840e"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5f7e-92b0-5a14e84401e9"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""template-uuid-5f7e-92b0-5a14e84401e9""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""template-uuid-5706-b741-4073a4454f0d"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5d8a-812c-44dacf56840e"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""template-uuid-5d8a-812c-44dacf56840e"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5d8a-812c-44dacf56840e""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""template-uuid-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""template-uuid-5f7e-92b0-5a14e84401e9"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5f7e-92b0-5a14e84401e9""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""template-uuid-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5706-b741-4073a4454f0d"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""template-uuid-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5706-b741-4073a4454f0d"", ""template-uuid-5065-a079-a5a0476e3c10""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",0 +TEMPLATE,template-uuid-54fd-a9d2-d40fcfb89c5f,Kember use case,Kember Cordiac Model with PostPro Viewer,"",maiz@itis.swiss,2019-06-06 14:34:41.832,2019-06-06 14:34:44.981,"{""template-uuid-5f7f-af9c-e91b64c4989e"": {""key"": ""simcore/services/comp/kember-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Kember cardiac model"", ""inputs"": {""dt"": 0.01, ""T"": 1000, ""forcing_factor"": 0}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 100}}, ""template-uuid-50b8-8ae7-b2a10131cc8f"": {""key"": ""simcore/services/dynamic/kember-viewer"", ""version"": ""2.9.0"", ""label"": ""kember-viewer"", ""inputs"": {""outputController"": {""nodeUuid"": ""template-uuid-5f7f-af9c-e91b64c4989e"", ""output"": ""out_1""}}, ""inputNodes"": [""template-uuid-5f7f-af9c-e91b64c4989e""], ""outputNode"": false, ""outputs"": {}, ""progress"": 10, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 100}}}",0 +TEMPLATE,template-uuid-59c3-a22d-ce2979b88313,"UCDavis use cases: 1D, 2D",Colleen Clancy 1D and 2D use cases with a file picker and PostPro viewers,"",maiz@itis.swiss,2019-06-06 14:34:52.882,2019-06-06 14:34:54.863,"{""template-uuid-5d51-b24e-5f9a34f0478f"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 1&2 D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""template-uuid-547e-8e94-2229dcd0b912"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 1D"", ""inputs"": {}, ""inputNodes"": [""template-uuid-5d51-b24e-5f9a34f0478f""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""template-uuid-5b74-aa0b-f646fd835066"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 2D"", ""inputs"": {}, ""inputNodes"": [""template-uuid-5d51-b24e-5f9a34f0478f"", ""template-uuid-547e-8e94-2229dcd0b912""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 400}}, ""template-uuid-5b4b-8cdf-ff7770a9f98e"": {""key"": ""simcore/services/comp/ucdavis-1d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-1-D solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 10, ""NBeats"": 1, ""Ligand"": 0, ""cAMKII"": ""WT"", ""tw"": 5, ""tl"": 200, ""homogeneity"": ""heterogeneous"", ""in_10"": {""nodeUuid"": ""template-uuid-5d51-b24e-5f9a34f0478f"", ""output"": ""outFile""}}, ""inputNodes"": [""template-uuid-5d51-b24e-5f9a34f0478f""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""template-uuid-547e-8e94-2229dcd0b912"", ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 350}}, ""template-uuid-5bb1-9ce9-8a7be6b86b63"": {""key"": ""simcore/services/dynamic/cc-1d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-1d-viewer"", ""inputs"": {""ECGs"": {""nodeUuid"": ""template-uuid-5b4b-8cdf-ff7770a9f98e"", ""output"": ""out_1""}, ""y_1D"": {""nodeUuid"": ""template-uuid-5b4b-8cdf-ff7770a9f98e"", ""output"": ""out_3""}}, ""inputNodes"": [""template-uuid-5b4b-8cdf-ff7770a9f98e""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""template-uuid-547e-8e94-2229dcd0b912"", ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""template-uuid-56e8-90f3-d0df529a0016"": {""key"": ""simcore/services/comp/ucdavis-2d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-2-D solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 10, ""Ligand"": 0, ""cAMKII"": ""WT"", ""tw"": 5, ""tl"": 200, ""homogeneity"": ""heterogeneous""}, ""inputNodes"": [""template-uuid-5d51-b24e-5f9a34f0478f"", ""template-uuid-547e-8e94-2229dcd0b912""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""template-uuid-5b74-aa0b-f646fd835066"", ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 350}}, ""template-uuid-5b92-be80-a7fa7c25a69b"": {""key"": ""simcore/services/dynamic/cc-2d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-2d-viewer"", ""inputs"": {""ap"": {""nodeUuid"": ""template-uuid-56e8-90f3-d0df529a0016"", ""output"": ""out_1""}}, ""inputNodes"": [""template-uuid-56e8-90f3-d0df529a0016""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""template-uuid-5b74-aa0b-f646fd835066"", ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}}",0 +TEMPLATE,template-uuid-518d-a25d-8887bcae93f8,ISAN2019: 3D Paraview,3D Paraview viewer with two inputs,https://user-images.githubusercontent.com/33152403/60168939-073a5580-9806-11e9-8dad-8a7caa3eb5ab.png,maiz@itis.swiss,2019-06-06 14:33:43.065,2019-06-06 14:33:44.747,"{""template-uuid-5753-af37-e6aec8120bf2"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 1"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/HField_Big.vtk""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""template-uuid-522c-a377-dd8d7cd1265b"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 2"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/bunny.vtk""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 250}}, ""template-uuid-9b0f-67677a20996c"": {""key"": ""simcore/services/dynamic/3d-viewer"", ""version"": ""2.10.0"", ""label"": ""3D ParaViewer"", ""inputs"": {""A"": {""nodeUuid"": ""template-uuid-5753-af37-e6aec8120bf2"", ""output"": ""outFile""}, ""B"": {""nodeUuid"": ""template-uuid-522c-a377-dd8d7cd1265b"", ""output"": ""outFile""}}, ""inputNodes"": [""template-uuid-5753-af37-e6aec8120bf2"", ""template-uuid-522c-a377-dd8d7cd1265b""], ""outputNode"": false, ""outputs"": {}, ""progress"": 85, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 175}}}",1 +TEMPLATE,template-uuid-5a9e-9580-c53d92d18803,ISAN: MattWard use case,MattWard Solver/PostPro viewer,https://user-images.githubusercontent.com/33152403/60168942-073a5580-9806-11e9-9162-3683dcff0711.png,maiz@itis.swiss,2019-06-06 14:33:58.681,2019-06-06 14:34:01.617,"{""template-uuid-523c-8caa-4ca36c927ca2"": {""key"": ""simcore/services/dynamic/mattward-viewer"", ""version"": ""2.9.0"", ""label"": ""MattWard"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 55, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}}",1 +TEMPLATE,154fb4ad-4913-478f-af04-19725db901a7,ISAN2019 - opencor-py model,Computes the cardiac myocyte action potential for the sympathetic drive input setting on the MAP-Core portal.,https://opencor.ws/res/pics/logo.png,crespo@itis.swiss,2019-06-18 15:20:16.567,2019-06-18 15:59:21.708,"{""template-uuid-4179-bc47-0905e6726758"": {""key"": ""simcore/services/comp/osparc-opencor"", ""version"": ""0.2.1"", ""label"": ""osparc-opencor"", ""inputs"": {""stimulation_period"": ""{{stimulation_period_secs}}"" }, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 314, ""y"": 263}}, ""template-uuid-4f4e-a1ae-429146d466b8"": {""key"": ""simcore/services/dynamic/jupyter-base-notebook"", ""version"": ""2.10.0"", ""label"": ""jupyter-base-notebook"", ""inputs"": {""input_1"": {""nodeUuid"": ""template-uuid-4179-bc47-0905e6726758"", ""output"": ""membrane_potential_json""}}, ""inputNodes"": [""template-uuid-4179-bc47-0905e6726758""], ""outputNode"": false, ""outputs"": {}, ""progress"": 100, ""thumbnail"": ""https://opencor.ws/res/pics/logo.png"", ""position"": {""x"": 631, ""y"": 259}}}",1 +TEMPLATE,template-uuid-5716-bedd-b409bb021760,ISAN: 2D Plot,2D RawGraphs viewer with one input,https://user-images.githubusercontent.com/33152403/60168938-06a1bf00-9806-11e9-99ff-20c52d851add.png,maiz@itis.swiss,2019-06-06 14:33:34.008,2019-06-06 14:33:35.825,"{""template-uuid-58e5-884e-2690b3d54f11"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/Height-Weight""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""template-uuid-58a5-83d7-57d0feffbfea"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""template-uuid-58e5-884e-2690b3d54f11"", ""output"": ""outFile""}}, ""inputNodes"": [""template-uuid-58e5-884e-2690b3d54f11""], ""outputNode"": false, ""outputs"": {}, ""progress"": 90, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 100}}}",1 +TEMPLATE,template-uuid-5d82-b08d-d39c436ca738,ISAN: UCDavis use case: 0D,Colleen Clancy Single Cell solver with a file picker and PostPro viewer,https://user-images.githubusercontent.com/33152403/60168940-073a5580-9806-11e9-9a44-ae5266eeb020.png,maiz@itis.swiss,2019-06-06 14:33:51.94,2019-06-06 14:33:54.329,"{""template-uuid-59d6-b1a5-6e7b2773636b"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 0D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/initial_WStates""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 150}}, ""template-uuid-562f-afd1-cca5105c8844"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-Single-Cell solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initfile"": {""nodeUuid"": ""template-uuid-59d6-b1a5-6e7b2773636b"", ""output"": ""outFile""}}, ""inputAccess"": {""Na"": ""ReadAndWrite"", ""Kr"": ""ReadOnly"", ""BCL"": ""ReadAndWrite"", ""NBeats"": ""ReadOnly"", ""Ligand"": ""Invisible"", ""cAMKII"": ""Invisible""}, ""inputNodes"": [""template-uuid-59d6-b1a5-6e7b2773636b""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 150}}, ""template-uuid-5fdd-9daa-cb03c51d8138"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-0d-viewer"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""template-uuid-562f-afd1-cca5105c8844"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""template-uuid-562f-afd1-cca5105c8844"", ""output"": ""out_1""}}, ""inputNodes"": [""template-uuid-562f-afd1-cca5105c8844""], ""outputNode"": false, ""outputs"": {}, ""progress"": 20, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 150}}}",1 diff --git a/scripts/demo/template-projects/templates_in_master.json b/scripts/demo/template-projects/templates_in_master.json new file mode 100644 index 00000000000..11566602d5d --- /dev/null +++ b/scripts/demo/template-projects/templates_in_master.json @@ -0,0 +1,302 @@ +[ + { + "uuid": "template-uuid-518d-a25d-8887bcae93f8", + "name": "ISAN2019: 3D Paraview", + "description": "3D Paraview viewer with two inputs", + "thumbnail": "https://user-images.githubusercontent.com/33152403/60168939-073a5580-9806-11e9-8dad-8a7caa3eb5ab.png", + "prjOwner": "maiz@itis.swiss", + "creationDate": "2019-06-06 14:33:43.065", + "lastChangeDate": "2019-06-06 14:33:44.747", + "workbench": { + "template-uuid-5753-af37-e6aec8120bf2": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker 1", + "inputs": {}, + "inputNodes": [], + "outputNode": false, + "outputs": { + "outFile": { + "store": 1, + "path": "Shared Data/HField_Big.vtk" + } + }, + "progress": 100, + "thumbnail": "", + "position": { + "x": 100, + "y": 100 + } + }, + "template-uuid-522c-a377-dd8d7cd1265b": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker 2", + "inputs": {}, + "inputNodes": [], + "outputNode": false, + "outputs": { + "outFile": { + "store": 1, + "path": "Shared Data/bunny.vtk" + } + }, + "progress": 100, + "thumbnail": "", + "position": { + "x": 100, + "y": 250 + } + }, + "template-uuid-9b0f-67677a20996c": { + "key": "simcore/services/dynamic/3d-viewer", + "version": "2.10.0", + "label": "3D ParaViewer", + "inputs": { + "A": { + "nodeUuid": "template-uuid-5753-af37-e6aec8120bf2", + "output": "outFile" + }, + "B": { + "nodeUuid": "template-uuid-522c-a377-dd8d7cd1265b", + "output": "outFile" + } + }, + "inputNodes": [ + "template-uuid-5753-af37-e6aec8120bf2", + "template-uuid-522c-a377-dd8d7cd1265b" + ], + "outputNode": false, + "outputs": {}, + "progress": 85, + "thumbnail": "", + "position": { + "x": 400, + "y": 175 + } + } + } + }, + { + "uuid": "template-uuid-5a9e-9580-c53d92d18803", + "name": "ISAN: MattWard use case", + "description": "MattWard Solver/PostPro viewer", + "thumbnail": "https://user-images.githubusercontent.com/33152403/60168942-073a5580-9806-11e9-9162-3683dcff0711.png", + "prjOwner": "maiz@itis.swiss", + "creationDate": "2019-06-06 14:33:58.681", + "lastChangeDate": "2019-06-06 14:34:01.617", + "workbench": { + "template-uuid-523c-8caa-4ca36c927ca2": { + "key": "simcore/services/dynamic/mattward-viewer", + "version": "2.9.0", + "label": "MattWard", + "inputs": {}, + "inputNodes": [], + "outputNode": false, + "outputs": {}, + "progress": 55, + "thumbnail": "", + "position": { + "x": 100, + "y": 100 + } + } + } + }, + { + "uuid": "154fb4ad-4913-478f-af04-19725db901a7", + "name": "ISAN2019 - opencor-py model", + "description": "Computes the cardiac myocyte action potential for the sympathetic drive input setting on the MAP-Core portal.", + "thumbnail": "https://opencor.ws/res/pics/logo.png", + "prjOwner": "crespo@itis.swiss", + "creationDate": "2019-06-18 15:20:16.567", + "lastChangeDate": "2019-06-18 15:59:21.708", + "workbench": { + "template-uuid-4179-bc47-0905e6726758": { + "key": "simcore/services/comp/osparc-opencor", + "version": "0.2.1", + "label": "osparc-opencor", + "inputs": { + "stimulation_period": "{{stimulation_period_secs}}" + }, + "inputNodes": [], + "outputNode": false, + "outputs": {}, + "progress": 100, + "thumbnail": "", + "position": { + "x": 314, + "y": 263 + } + }, + "template-uuid-4f4e-a1ae-429146d466b8": { + "key": "simcore/services/dynamic/jupyter-base-notebook", + "version": "2.10.0", + "label": "jupyter-base-notebook", + "inputs": { + "input_1": { + "nodeUuid": "template-uuid-4179-bc47-0905e6726758", + "output": "membrane_potential_json" + } + }, + "inputNodes": [ + "template-uuid-4179-bc47-0905e6726758" + ], + "outputNode": false, + "outputs": {}, + "progress": 100, + "thumbnail": "https://opencor.ws/res/pics/logo.png", + "position": { + "x": 631, + "y": 259 + } + } + } + }, + { + "uuid": "template-uuid-5716-bedd-b409bb021760", + "name": "ISAN: 2D Plot", + "description": "2D RawGraphs viewer with one input", + "thumbnail": "https://user-images.githubusercontent.com/33152403/60168938-06a1bf00-9806-11e9-99ff-20c52d851add.png", + "prjOwner": "maiz@itis.swiss", + "creationDate": "2019-06-06 14:33:34.008", + "lastChangeDate": "2019-06-06 14:33:35.825", + "workbench": { + "template-uuid-58e5-884e-2690b3d54f11": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker", + "inputs": {}, + "inputNodes": [], + "outputNode": false, + "outputs": { + "outFile": { + "store": 1, + "path": "Shared Data/Height-Weight" + } + }, + "progress": 100, + "thumbnail": "", + "position": { + "x": 100, + "y": 100 + } + }, + "template-uuid-58a5-83d7-57d0feffbfea": { + "key": "simcore/services/dynamic/raw-graphs", + "version": "2.8.0", + "label": "2D plot", + "inputs": { + "input_1": { + "nodeUuid": "template-uuid-58e5-884e-2690b3d54f11", + "output": "outFile" + } + }, + "inputNodes": [ + "template-uuid-58e5-884e-2690b3d54f11" + ], + "outputNode": false, + "outputs": {}, + "progress": 90, + "thumbnail": "", + "position": { + "x": 400, + "y": 100 + } + } + } + }, + { + "uuid": "template-uuid-5d82-b08d-d39c436ca738", + "name": "ISAN: UCDavis use case: 0D", + "description": "Colleen Clancy Single Cell solver with a file picker and PostPro viewer", + "thumbnail": "https://user-images.githubusercontent.com/33152403/60168940-073a5580-9806-11e9-9a44-ae5266eeb020.png", + "prjOwner": "maiz@itis.swiss", + "creationDate": "2019-06-06 14:33:51.94", + "lastChangeDate": "2019-06-06 14:33:54.329", + "workbench": { + "template-uuid-59d6-b1a5-6e7b2773636b": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker 0D", + "inputs": {}, + "inputNodes": [], + "outputNode": false, + "outputs": { + "outFile": { + "store": 1, + "path": "Shared Data/initial_WStates" + } + }, + "progress": 100, + "thumbnail": "", + "position": { + "x": 50, + "y": 150 + } + }, + "template-uuid-562f-afd1-cca5105c8844": { + "key": "simcore/services/comp/ucdavis-singlecell-cardiac-model", + "version": "1.0.0", + "label": "DBP-Clancy-Rabbit-Single-Cell solver", + "inputs": { + "Na": 0, + "Kr": 0, + "BCL": 200, + "NBeats": 5, + "Ligand": 0, + "cAMKII": "WT", + "initfile": { + "nodeUuid": "template-uuid-59d6-b1a5-6e7b2773636b", + "output": "outFile" + } + }, + "inputAccess": { + "Na": "ReadAndWrite", + "Kr": "ReadOnly", + "BCL": "ReadAndWrite", + "NBeats": "ReadOnly", + "Ligand": "Invisible", + "cAMKII": "Invisible" + }, + "inputNodes": [ + "template-uuid-59d6-b1a5-6e7b2773636b" + ], + "outputNode": false, + "outputs": {}, + "progress": 0, + "thumbnail": "", + "position": { + "x": 300, + "y": 150 + } + }, + "template-uuid-5fdd-9daa-cb03c51d8138": { + "key": "simcore/services/dynamic/cc-0d-viewer", + "version": "2.8.0", + "label": "cc-0d-viewer", + "inputs": { + "vm_1Hz": { + "nodeUuid": "template-uuid-562f-afd1-cca5105c8844", + "output": "out_4" + }, + "all_results_1Hz": { + "nodeUuid": "template-uuid-562f-afd1-cca5105c8844", + "output": "out_1" + } + }, + "inputNodes": [ + "template-uuid-562f-afd1-cca5105c8844" + ], + "outputNode": false, + "outputs": {}, + "progress": 20, + "thumbnail": "", + "position": { + "x": 550, + "y": 150 + } + } + } + } +] \ No newline at end of file diff --git a/scripts/template-projects/create_csv_table.py b/scripts/template-projects/create_csv_table.py deleted file mode 100644 index d699e30269e..00000000000 --- a/scripts/template-projects/create_csv_table.py +++ /dev/null @@ -1,46 +0,0 @@ -""" Produces csv with a table of projects that can be inserted in the postgres db by importing it via adminer website - -""" - -import json - -from change_case import ChangeCase - -from simcore_service_webserver.projects.projects_models import ProjectType, projects -from simcore_service_webserver.resources import resources - -TEMPLATE_STUDIES_NAME = "data/fake-template-projects.isan.json" -TEMPLATE_STUDIES_TABLE = "template-projects-table.csv" - -COLS = [c.name for c in projects.columns if c!=projects.c.id] #pylint: disable=not-an-iterable -PROJECT_KEYS = [ChangeCase.snake_to_camel(key) for key in COLS] -ROW = ",".join( ["{}", ]*len(PROJECT_KEYS) ) - -def normalize(key, value): - if key == "type": - return ProjectType.TEMPLATE.name - - if value is None: - return '""' - - value = str(value) - value = value.replace("'", '"') - value = value.replace('"', '""') - value = '"' + value + '"' - return value - - - -def main(): - with resources.stream(TEMPLATE_STUDIES_NAME) as fp: - data = json.load(fp) - - with open(TEMPLATE_STUDIES_TABLE, 'wt') as fh: - print(",".join(COLS), file=fh) - for project in data: - values = [normalize(key, project.get(key)) for key in PROJECT_KEYS] - print(ROW.format(*values), file=fh) - - -if __name__ == "__main__": - main() diff --git a/scripts/template-projects/template-projects-table.csv b/scripts/template-projects/template-projects-table.csv deleted file mode 100644 index 0ac617e2755..00000000000 --- a/scripts/template-projects/template-projects-table.csv +++ /dev/null @@ -1,5 +0,0 @@ -type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench -TEMPLATE,"template-uuid-4d5e-b80e-401c8066782f","ISAN: 2D Plot","2D RawGraphs viewer with one input","","maiz","2019-05-24T10:36:57.813Z","2019-05-24T11:36:12.015Z","{""template-uuid-48eb-a9d2-aaad6b72400a"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": False, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/Height-Weight""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""template-uuid-4c63-a705-03a2c339646c"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""template-uuid-48eb-a9d2-aaad6b72400a"", ""output"": ""outFile""}}, ""inputNodes"": [""template-uuid-48eb-a9d2-aaad6b72400a""], ""outputNode"": False, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 100}}}" -TEMPLATE,"template-uuid-4d5e-b80e-401c8066781f","ISAN: 3D Paraview","3D Paraview viewer with two inputs","","maiz","2019-05-24T10:36:57.813Z"," 2019-05-24T10:38:12.888Z","{""template-uuid-403e-865a-8c5ca30671c6"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 1"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": False, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/HField_Big.vtk""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""template-uuid-421f-be24-d44d112cc5c1"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 2"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": False, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/bunny.vtk""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 250}}, ""template-uuid-4ecd-9636-62e619a9ca69"": {""key"": ""simcore/services/dynamic/3d-viewer"", ""version"": ""2.10.0"", ""label"": ""3D ParaViewer"", ""inputs"": {""A"": {""nodeUuid"": ""template-uuid-403e-865a-8c5ca30671c6"", ""output"": ""outFile""}, ""B"": {""nodeUuid"": ""template-uuid-421f-be24-d44d112cc5c1"", ""output"": ""outFile""}}, ""inputNodes"": [""template-uuid-403e-865a-8c5ca30671c6"", ""template-uuid-421f-be24-d44d112cc5c1""], ""outputNode"": False, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 175}}}" -TEMPLATE,"template-uuid-420d-b82d-e80bfa272ebd","ISAN: MattWard use case","MattWard Solver/PostPro viewer","","MattWard","2019-04-30T08:52:20.937Z","2019-04-30T08:59:26.090Z","{""template-uuid-4021-b2ef-b2e163bfbd16"": {""key"": ""simcore/services/dynamic/mattward-viewer"", ""version"": ""2.9.0"", ""label"": ""MattWard"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": False, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}}" -TEMPLATE,"template-uuid-1234-a1a7-f7d4f3a8f26b","ISAN: UCDavis use case: 0D","Colleen Clancy Single Cell solver with a file picker and PostPro viewer","https://placeimg.com/171/96/tech/grayscale/?18.jpg","Colleen Clancy","2018-10-22T09:13:13.360Z","2018-10-22T09:33:41.858Z","{""template-uuid-4674-b758-946151cae351"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 0D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": False, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/initial_WStates""}}, ""progress"": 100, ""parent"": None, ""position"": {""x"": 50, ""y"": 150}}, ""template-uuid-409d-998c-c1f04de67f8b"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-Single-Cell solver"", ""inputAccess"": {""Na"": ""ReadAndWrite"", ""Kr"": ""ReadOnly"", ""BCL"": ""ReadAndWrite"", ""NBeats"": ""ReadOnly"", ""Ligand"": ""Invisible"", ""cAMKII"": ""Invisible""}, ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initfile"": {""nodeUuid"": ""template-uuid-4674-b758-946151cae351"", ""output"": ""outFile""}}, ""inputNodes"": [""template-uuid-4674-b758-946151cae351""], ""outputNode"": False, ""outputs"": {}, ""parent"": None, ""position"": {""x"": 300, ""y"": 150}}, ""template-uuid-43e7-9fda-cf9625e59986"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-0d-viewer"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""template-uuid-409d-998c-c1f04de67f8b"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""template-uuid-409d-998c-c1f04de67f8b"", ""output"": ""out_1""}}, ""inputNodes"": [""template-uuid-409d-998c-c1f04de67f8b""], ""outputNode"": False, ""outputs"": {}, ""parent"": None, ""position"": {""x"": 550, ""y"": 150}}}" diff --git a/scripts/template-projects/templates_in_master.csv b/scripts/template-projects/templates_in_master.csv deleted file mode 100644 index 3a542f74e5d..00000000000 --- a/scripts/template-projects/templates_in_master.csv +++ /dev/null @@ -1,9 +0,0 @@ -type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench -TEMPLATE,template-uuid-5716-bedd-b409bb021760,ISAN: 2D Plot,2D RawGraphs viewer with one input,"",maiz@itis.swiss,2019-06-06 14:33:34.008,2019-06-06 14:33:35.825,"{""dd332242-829a-58e5-884e-2690b3d54f11"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/Height-Weight""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""e1ddb64b-2935-58a5-83d7-57d0feffbfea"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""dd332242-829a-58e5-884e-2690b3d54f11"", ""output"": ""outFile""}}, ""inputNodes"": [""dd332242-829a-58e5-884e-2690b3d54f11""], ""outputNode"": false, ""outputs"": {}, ""progress"": 90, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 100}}}" -TEMPLATE,template-uuid-518d-a25d-8887bcae93f8,ISAN: 3D Paraview,3D Paraview viewer with two inputs,"",maiz@itis.swiss,2019-06-06 14:33:43.065,2019-06-06 14:33:44.747,"{""5e9f85f4-5688-5753-af37-e6aec8120bf2"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 1"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/HField_Big.vtk""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""2cab3689-cb00-522c-a377-dd8d7cd1265b"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 2"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/bunny.vtk""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 250}}, ""556c0607-1d8d-5983-9b0f-67677a20996c"": {""key"": ""simcore/services/dynamic/3d-viewer"", ""version"": ""2.10.0"", ""label"": ""3D ParaViewer"", ""inputs"": {""A"": {""nodeUuid"": ""5e9f85f4-5688-5753-af37-e6aec8120bf2"", ""output"": ""outFile""}, ""B"": {""nodeUuid"": ""2cab3689-cb00-522c-a377-dd8d7cd1265b"", ""output"": ""outFile""}}, ""inputNodes"": [""5e9f85f4-5688-5753-af37-e6aec8120bf2"", ""2cab3689-cb00-522c-a377-dd8d7cd1265b""], ""outputNode"": false, ""outputs"": {}, ""progress"": 85, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 175}}}" -TEMPLATE,template-uuid-5d82-b08d-d39c436ca738,ISAN: UCDavis use case: 0D,Colleen Clancy Single Cell solver with a file picker and PostPro viewer,"",maiz@itis.swiss,2019-06-06 14:33:51.94,2019-06-06 14:33:54.329,"{""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 0D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/initial_WStates""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 150}}, ""b239ac4b-1967-562f-afd1-cca5105c8844"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-Single-Cell solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initfile"": {""nodeUuid"": ""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b"", ""output"": ""outFile""}}, ""inputAccess"": {""Na"": ""ReadAndWrite"", ""Kr"": ""ReadOnly"", ""BCL"": ""ReadAndWrite"", ""NBeats"": ""ReadOnly"", ""Ligand"": ""Invisible"", ""cAMKII"": ""Invisible""}, ""inputNodes"": [""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 150}}, ""aa8cdc10-eab8-5fdd-9daa-cb03c51d8138"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-0d-viewer"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""b239ac4b-1967-562f-afd1-cca5105c8844"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""b239ac4b-1967-562f-afd1-cca5105c8844"", ""output"": ""out_1""}}, ""inputNodes"": [""b239ac4b-1967-562f-afd1-cca5105c8844""], ""outputNode"": false, ""outputs"": {}, ""progress"": 20, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 150}}}" -TEMPLATE,template-uuid-5a9e-9580-c53d92d18803,ISAN: MattWard use case,MattWard Solver/PostPro viewer,"",maiz@itis.swiss,2019-06-06 14:33:58.681,2019-06-06 14:34:01.617,"{""4858919c-e0f1-523c-8caa-4ca36c927ca2"": {""key"": ""simcore/services/dynamic/mattward-viewer"", ""version"": ""2.9.0"", ""label"": ""MattWard"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 55, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}}" -TEMPLATE,template-uuid-5203-915e-1ae8ae0c9991,Sleepers,"5 sleepers interconnected","",maiz@itis.swiss,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""f447dd35-b098-5f7e-92b0-5a14e84401e9"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""2745b49f-4fc4-5d8a-812c-44dacf56840e"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""f447dd35-b098-5f7e-92b0-5a14e84401e9"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""f447dd35-b098-5f7e-92b0-5a14e84401e9""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""d1448b77-84ea-5706-b741-4073a4454f0d"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""2745b49f-4fc4-5d8a-812c-44dacf56840e"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""2745b49f-4fc4-5d8a-812c-44dacf56840e"", ""output"": ""out_2""}}, ""inputNodes"": [""2745b49f-4fc4-5d8a-812c-44dacf56840e""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""ab0911be-162d-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""f447dd35-b098-5f7e-92b0-5a14e84401e9"", ""output"": ""out_2""}}, ""inputNodes"": [""f447dd35-b098-5f7e-92b0-5a14e84401e9""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""bd0f303e-92c4-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""d1448b77-84ea-5706-b741-4073a4454f0d"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""ab0911be-162d-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""d1448b77-84ea-5706-b741-4073a4454f0d"", ""ab0911be-162d-5065-a079-a5a0476e3c10""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}" -TEMPLATE,template-uuid-54fd-a9d2-d40fcfb89c5f,Kember use case,Kember Cordiac Model with PostPro Viewer,"",maiz@itis.swiss,2019-06-06 14:34:41.832,2019-06-06 14:34:44.981,"{""32a2b643-42e5-5f7f-af9c-e91b64c4989e"": {""key"": ""simcore/services/comp/kember-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Kember cardiac model"", ""inputs"": {""dt"": 0.01, ""T"": 1000, ""forcing_factor"": 0}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 100}}, ""df5e39cc-f31e-50b8-8ae7-b2a10131cc8f"": {""key"": ""simcore/services/dynamic/kember-viewer"", ""version"": ""2.9.0"", ""label"": ""kember-viewer"", ""inputs"": {""outputController"": {""nodeUuid"": ""32a2b643-42e5-5f7f-af9c-e91b64c4989e"", ""output"": ""out_1""}}, ""inputNodes"": [""32a2b643-42e5-5f7f-af9c-e91b64c4989e""], ""outputNode"": false, ""outputs"": {}, ""progress"": 10, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 100}}}" -TEMPLATE,template-uuid-59c3-a22d-ce2979b88313,"UCDavis use cases: 1D, 2D",Colleen Clancy 1D and 2D use cases with a file picker and PostPro viewers,"",maiz@itis.swiss,2019-06-06 14:34:52.882,2019-06-06 14:34:54.863,"{""919cc4b3-973b-5d51-b24e-5f9a34f0478f"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 1&2 D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""aaa65879-adb0-547e-8e94-2229dcd0b912"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 1D"", ""inputs"": {}, ""inputNodes"": [""919cc4b3-973b-5d51-b24e-5f9a34f0478f""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""5bd83ddf-582b-5b74-aa0b-f646fd835066"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 2D"", ""inputs"": {}, ""inputNodes"": [""919cc4b3-973b-5d51-b24e-5f9a34f0478f"", ""aaa65879-adb0-547e-8e94-2229dcd0b912""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 400}}, ""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e"": {""key"": ""simcore/services/comp/ucdavis-1d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-1-D solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 10, ""NBeats"": 1, ""Ligand"": 0, ""cAMKII"": ""WT"", ""tw"": 5, ""tl"": 200, ""homogeneity"": ""heterogeneous"", ""in_10"": {""nodeUuid"": ""919cc4b3-973b-5d51-b24e-5f9a34f0478f"", ""output"": ""outFile""}}, ""inputNodes"": [""919cc4b3-973b-5d51-b24e-5f9a34f0478f""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""aaa65879-adb0-547e-8e94-2229dcd0b912"", ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 350}}, ""b7fc93b3-bf51-5bb1-9ce9-8a7be6b86b63"": {""key"": ""simcore/services/dynamic/cc-1d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-1d-viewer"", ""inputs"": {""ECGs"": {""nodeUuid"": ""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e"", ""output"": ""out_1""}, ""y_1D"": {""nodeUuid"": ""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e"", ""output"": ""out_3""}}, ""inputNodes"": [""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""aaa65879-adb0-547e-8e94-2229dcd0b912"", ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""d4135bd5-02f8-56e8-90f3-d0df529a0016"": {""key"": ""simcore/services/comp/ucdavis-2d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-2-D solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 10, ""Ligand"": 0, ""cAMKII"": ""WT"", ""tw"": 5, ""tl"": 200, ""homogeneity"": ""heterogeneous""}, ""inputNodes"": [""919cc4b3-973b-5d51-b24e-5f9a34f0478f"", ""aaa65879-adb0-547e-8e94-2229dcd0b912""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""5bd83ddf-582b-5b74-aa0b-f646fd835066"", ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 350}}, ""0d54e54e-94c7-5b92-be80-a7fa7c25a69b"": {""key"": ""simcore/services/dynamic/cc-2d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-2d-viewer"", ""inputs"": {""ap"": {""nodeUuid"": ""d4135bd5-02f8-56e8-90f3-d0df529a0016"", ""output"": ""out_1""}}, ""inputNodes"": [""d4135bd5-02f8-56e8-90f3-d0df529a0016""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""5bd83ddf-582b-5b74-aa0b-f646fd835066"", ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}}" -TEMPLATE,template-uuid-5a64-98c2-7912815f9036,4x Colleen Clancy 0Ds,4x Colleen Clancy 0Ds,"",maiz@itis.swiss,2019-06-06 14:34:59.293,2019-06-06 14:34:59.293,"{""4cb1e777-5472-5bd6-8e50-cb5fe6c66527"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 0D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""parent"": null, ""position"": {""x"": 50, ""y"": 300}}, ""df846171-37fe-59a0-8cc8-1d45139b73ef"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 0D (1)"", ""inputs"": {}, ""outputs"": {}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""position"": {""x"": 300, ""y"": 100}}, ""1eb2bd66-1ad4-5a46-b772-b5e23f733f97"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 0D (2)"", ""inputs"": {}, ""outputs"": {}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""position"": {""x"": 300, ""y"": 250}}, ""5ffdbaa2-f8c6-5cb0-9abb-ed71c6cecb51"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 0D (3)"", ""inputs"": {}, ""outputs"": {}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""position"": {""x"": 300, ""y"": 400}}, ""cd2b0a83-1f4a-5732-9b3b-4c687870c63f"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 0D (4)"", ""inputs"": {}, ""outputs"": {}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""position"": {""x"": 300, ""y"": 550}}, ""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Clancy 0D solver (1)"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initFile"": {""nodeUuid"": ""4cb1e777-5472-5bd6-8e50-cb5fe6c66527"", ""output"": ""outFile""}}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""df846171-37fe-59a0-8cc8-1d45139b73ef"", ""position"": {""x"": 100, ""y"": 350}}, ""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""Clancy 0D Viewer (1)"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e"", ""output"": ""out_1""}}, ""inputNodes"": [""ea8426ea-f993-5b4b-8cdf-ff7770a9f98e""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""df846171-37fe-59a0-8cc8-1d45139b73ef"", ""position"": {""x"": 300, ""y"": 200}}, ""bfc3dcda-1a13-556a-b3b4-44c600a20418"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Clancy 0D solver (2)"", ""inputs"": {""Na"": 5, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initFile"": {""nodeUuid"": ""4cb1e777-5472-5bd6-8e50-cb5fe6c66527"", ""output"": ""outFile""}}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""1eb2bd66-1ad4-5a46-b772-b5e23f733f97"", ""position"": {""x"": 100, ""y"": 350}}, ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""Clancy 0D Viewer (2)"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""bfc3dcda-1a13-556a-b3b4-44c600a20418"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""bfc3dcda-1a13-556a-b3b4-44c600a20418"", ""output"": ""out_1""}}, ""inputNodes"": [""bfc3dcda-1a13-556a-b3b4-44c600a20418""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""1eb2bd66-1ad4-5a46-b772-b5e23f733f97"", ""position"": {""x"": 300, ""y"": 200}}, ""8f7639b7-4f55-5088-a795-2a61bc17f645"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Clancy 0D solver (3)"", ""inputs"": {""Na"": 10, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initFile"": {""nodeUuid"": ""4cb1e777-5472-5bd6-8e50-cb5fe6c66527"", ""output"": ""outFile""}}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""5ffdbaa2-f8c6-5cb0-9abb-ed71c6cecb51"", ""position"": {""x"": 100, ""y"": 350}}, ""5e788613-a331-55e1-952e-ca01919c509a"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""Clancy 0D Viewer (3)"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""8f7639b7-4f55-5088-a795-2a61bc17f645"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""8f7639b7-4f55-5088-a795-2a61bc17f645"", ""output"": ""out_1""}}, ""inputNodes"": [""8f7639b7-4f55-5088-a795-2a61bc17f645""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""5ffdbaa2-f8c6-5cb0-9abb-ed71c6cecb51"", ""position"": {""x"": 300, ""y"": 200}}, ""68075437-61e0-5c44-98be-a372584408be"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Clancy 0D solver (4)"", ""inputs"": {""Na"": 15, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initFile"": {""nodeUuid"": ""4cb1e777-5472-5bd6-8e50-cb5fe6c66527"", ""output"": ""outFile""}}, ""inputNodes"": [""4cb1e777-5472-5bd6-8e50-cb5fe6c66527""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""cd2b0a83-1f4a-5732-9b3b-4c687870c63f"", ""position"": {""x"": 100, ""y"": 350}}, ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""Clancy 0D Viewer (4)"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""68075437-61e0-5c44-98be-a372584408be"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""68075437-61e0-5c44-98be-a372584408be"", ""output"": ""out_1""}}, ""inputNodes"": [""68075437-61e0-5c44-98be-a372584408be""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""cd2b0a83-1f4a-5732-9b3b-4c687870c63f"", ""position"": {""x"": 300, ""y"": 200}}, ""ff1b2149-7401-5d79-b369-ac73ec1e4713"": {""key"": ""simcore/services/frontend/multi-plot"", ""version"": ""1.0.0"", ""label"": ""Multi Plot"", ""inputs"": {}, ""inputNodes"": [""df846171-37fe-59a0-8cc8-1d45139b73ef"", ""1eb2bd66-1ad4-5a46-b772-b5e23f733f97"", ""5ffdbaa2-f8c6-5cb0-9abb-ed71c6cecb51"", ""cd2b0a83-1f4a-5732-9b3b-4c687870c63f""], ""outputNode"": false, ""outputs"": {}, ""parent"": null, ""position"": {""x"": 550, ""y"": 300}}, ""e10b62a8-ae5a-5eb2-9737-932276d0c777"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot (1)"", ""inputs"": {""input_1"": {""nodeUuid"": ""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""output"": ""out_1""}, ""input_2"": {""nodeUuid"": ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""output"": ""out_1""}, ""input_3"": {""nodeUuid"": ""5e788613-a331-55e1-952e-ca01919c509a"", ""output"": ""out_1""}, ""input_4"": {""nodeUuid"": ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594"", ""output"": ""out_1""}}, ""inputNodes"": [""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""5e788613-a331-55e1-952e-ca01919c509a"", ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""ff1b2149-7401-5d79-b369-ac73ec1e4713"", ""position"": {""x"": 50, ""y"": 50}}, ""756dc773-fceb-5aa1-a0a2-a7f5c4f41eab"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot (2)"", ""inputs"": {""input_1"": {""nodeUuid"": ""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""output"": ""out_1""}, ""input_2"": {""nodeUuid"": ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""output"": ""out_1""}, ""input_3"": {""nodeUuid"": ""5e788613-a331-55e1-952e-ca01919c509a"", ""output"": ""out_1""}, ""input_4"": {""nodeUuid"": ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594"", ""output"": ""out_1""}}, ""inputNodes"": [""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""5e788613-a331-55e1-952e-ca01919c509a"", ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""ff1b2149-7401-5d79-b369-ac73ec1e4713"", ""position"": {""x"": 50, ""y"": 50}}, ""540bf9d0-b2ee-5928-8ff9-e59bdf23a322"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot (3)"", ""inputs"": {""input_1"": {""nodeUuid"": ""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""output"": ""out_1""}, ""input_2"": {""nodeUuid"": ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""output"": ""out_1""}, ""input_3"": {""nodeUuid"": ""5e788613-a331-55e1-952e-ca01919c509a"", ""output"": ""out_1""}, ""input_4"": {""nodeUuid"": ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594"", ""output"": ""out_1""}}, ""inputNodes"": [""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""5e788613-a331-55e1-952e-ca01919c509a"", ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""ff1b2149-7401-5d79-b369-ac73ec1e4713"", ""position"": {""x"": 50, ""y"": 50}}, ""4612f8df-2400-578f-8f42-e3bcd9788e46"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot (4)"", ""inputs"": {""input_1"": {""nodeUuid"": ""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""output"": ""out_1""}, ""input_2"": {""nodeUuid"": ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""output"": ""out_1""}, ""input_3"": {""nodeUuid"": ""5e788613-a331-55e1-952e-ca01919c509a"", ""output"": ""out_1""}, ""input_4"": {""nodeUuid"": ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594"", ""output"": ""out_1""}}, ""inputNodes"": [""eaa182e3-8a33-57a8-acbc-43f0e5859e3c"", ""cd64ca6f-00b5-5d67-b4af-caf5e07ccdfe"", ""5e788613-a331-55e1-952e-ca01919c509a"", ""86c8b7ec-fac9-5d45-8551-5a4bf6dbf594""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""ff1b2149-7401-5d79-b369-ac73ec1e4713"", ""position"": {""x"": 50, ""y"": 50}}}" diff --git a/scripts/template-projects/templates_in_production.csv b/scripts/template-projects/templates_in_production.csv deleted file mode 100644 index b3a80cdf335..00000000000 --- a/scripts/template-projects/templates_in_production.csv +++ /dev/null @@ -1,4 +0,0 @@ -type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench -TEMPLATE,template-uuid-5716-bedd-b409bb021760,ISAN: 2D Plot,2D RawGraphs viewer with one input,"",maiz@itis.swiss,2019-06-06 14:33:34.008,2019-06-06 14:33:35.825,"{""dd332242-829a-58e5-884e-2690b3d54f11"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/Height-Weight""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""e1ddb64b-2935-58a5-83d7-57d0feffbfea"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""dd332242-829a-58e5-884e-2690b3d54f11"", ""output"": ""outFile""}}, ""inputNodes"": [""dd332242-829a-58e5-884e-2690b3d54f11""], ""outputNode"": false, ""outputs"": {}, ""progress"": 90, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 100}}}" -TEMPLATE,template-uuid-5d82-b08d-d39c436ca738,ISAN: UCDavis use case: 0D,Colleen Clancy Single Cell solver with a file picker and PostPro viewer,"",maiz@itis.swiss,2019-06-06 14:33:51.94,2019-06-06 14:33:54.329,"{""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 0D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/initial_WStates""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 150}}, ""b239ac4b-1967-562f-afd1-cca5105c8844"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-Single-Cell solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initfile"": {""nodeUuid"": ""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b"", ""output"": ""outFile""}}, ""inputAccess"": {""Na"": ""ReadAndWrite"", ""Kr"": ""ReadOnly"", ""BCL"": ""ReadAndWrite"", ""NBeats"": ""ReadOnly"", ""Ligand"": ""Invisible"", ""cAMKII"": ""Invisible""}, ""inputNodes"": [""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 150}}, ""aa8cdc10-eab8-5fdd-9daa-cb03c51d8138"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-0d-viewer"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""b239ac4b-1967-562f-afd1-cca5105c8844"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""b239ac4b-1967-562f-afd1-cca5105c8844"", ""output"": ""out_1""}}, ""inputNodes"": [""b239ac4b-1967-562f-afd1-cca5105c8844""], ""outputNode"": false, ""outputs"": {}, ""progress"": 20, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 150}}}" -TEMPLATE,template-uuid-5a9e-9580-c53d92d18803,ISAN: MattWard use case,MattWard Solver/PostPro viewer,"",maiz@itis.swiss,2019-06-06 14:33:58.681,2019-06-06 14:34:01.617,"{""4858919c-e0f1-523c-8caa-4ca36c927ca2"": {""key"": ""simcore/services/dynamic/mattward-viewer"", ""version"": ""2.9.0"", ""label"": ""MattWard"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 55, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}}" diff --git a/scripts/template-projects/templates_in_staging.csv b/scripts/template-projects/templates_in_staging.csv deleted file mode 100644 index c7ae3a5182f..00000000000 --- a/scripts/template-projects/templates_in_staging.csv +++ /dev/null @@ -1,6 +0,0 @@ -type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench -TEMPLATE,template-uuid-5716-bedd-b409bb021760,ISAN: 2D Plot,2D RawGraphs viewer with one input,"",maiz@itis.swiss,2019-06-06 14:33:34.008,2019-06-06 14:33:35.825,"{""dd332242-829a-58e5-884e-2690b3d54f11"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/Height-Weight""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}, ""e1ddb64b-2935-58a5-83d7-57d0feffbfea"": {""key"": ""simcore/services/dynamic/raw-graphs"", ""version"": ""2.8.0"", ""label"": ""2D plot"", ""inputs"": {""input_1"": {""nodeUuid"": ""dd332242-829a-58e5-884e-2690b3d54f11"", ""output"": ""outFile""}}, ""inputNodes"": [""dd332242-829a-58e5-884e-2690b3d54f11""], ""outputNode"": false, ""outputs"": {}, ""progress"": 90, ""thumbnail"": """", ""position"": {""x"": 400, ""y"": 100}}}" -TEMPLATE,template-uuid-5d82-b08d-d39c436ca738,ISAN: UCDavis use case: 0D,Colleen Clancy Single Cell solver with a file picker and PostPro viewer,"",maiz@itis.swiss,2019-06-06 14:33:51.94,2019-06-06 14:33:54.329,"{""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 0D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/initial_WStates""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 150}}, ""b239ac4b-1967-562f-afd1-cca5105c8844"": {""key"": ""simcore/services/comp/ucdavis-singlecell-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-Single-Cell solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 200, ""NBeats"": 5, ""Ligand"": 0, ""cAMKII"": ""WT"", ""initfile"": {""nodeUuid"": ""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b"", ""output"": ""outFile""}}, ""inputAccess"": {""Na"": ""ReadAndWrite"", ""Kr"": ""ReadOnly"", ""BCL"": ""ReadAndWrite"", ""NBeats"": ""ReadOnly"", ""Ligand"": ""Invisible"", ""cAMKII"": ""Invisible""}, ""inputNodes"": [""5ea0e1e8-1421-59d6-b1a5-6e7b2773636b""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 150}}, ""aa8cdc10-eab8-5fdd-9daa-cb03c51d8138"": {""key"": ""simcore/services/dynamic/cc-0d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-0d-viewer"", ""inputs"": {""vm_1Hz"": {""nodeUuid"": ""b239ac4b-1967-562f-afd1-cca5105c8844"", ""output"": ""out_4""}, ""all_results_1Hz"": {""nodeUuid"": ""b239ac4b-1967-562f-afd1-cca5105c8844"", ""output"": ""out_1""}}, ""inputNodes"": [""b239ac4b-1967-562f-afd1-cca5105c8844""], ""outputNode"": false, ""outputs"": {}, ""progress"": 20, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 150}}}" -TEMPLATE,template-uuid-5a9e-9580-c53d92d18803,ISAN: MattWard use case,MattWard Solver/PostPro viewer,"",maiz@itis.swiss,2019-06-06 14:33:58.681,2019-06-06 14:34:01.617,"{""4858919c-e0f1-523c-8caa-4ca36c927ca2"": {""key"": ""simcore/services/dynamic/mattward-viewer"", ""version"": ""2.9.0"", ""label"": ""MattWard"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 55, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 100}}}" -TEMPLATE,template-uuid-5203-915e-1ae8ae0c9991,Sleepers,"5 sleepers interconnected","",maiz@itis.swiss,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""f447dd35-b098-5f7e-92b0-5a14e84401e9"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""2745b49f-4fc4-5d8a-812c-44dacf56840e"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""f447dd35-b098-5f7e-92b0-5a14e84401e9"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""f447dd35-b098-5f7e-92b0-5a14e84401e9""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""d1448b77-84ea-5706-b741-4073a4454f0d"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""2745b49f-4fc4-5d8a-812c-44dacf56840e"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""2745b49f-4fc4-5d8a-812c-44dacf56840e"", ""output"": ""out_2""}}, ""inputNodes"": [""2745b49f-4fc4-5d8a-812c-44dacf56840e""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""ab0911be-162d-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""f447dd35-b098-5f7e-92b0-5a14e84401e9"", ""output"": ""out_2""}}, ""inputNodes"": [""f447dd35-b098-5f7e-92b0-5a14e84401e9""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""bd0f303e-92c4-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""d1448b77-84ea-5706-b741-4073a4454f0d"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""ab0911be-162d-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""d1448b77-84ea-5706-b741-4073a4454f0d"", ""ab0911be-162d-5065-a079-a5a0476e3c10""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}" -TEMPLATE,template-uuid-54fd-a9d2-d40fcfb89c5f,Kember use case,Kember Cordiac Model with PostPro Viewer,"",maiz@itis.swiss,2019-06-06 14:34:41.832,2019-06-06 14:34:44.981,"{""32a2b643-42e5-5f7f-af9c-e91b64c4989e"": {""key"": ""simcore/services/comp/kember-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Kember cardiac model"", ""inputs"": {""dt"": 0.01, ""T"": 1000, ""forcing_factor"": 0}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 100}}, ""df5e39cc-f31e-50b8-8ae7-b2a10131cc8f"": {""key"": ""simcore/services/dynamic/kember-viewer"", ""version"": ""2.9.0"", ""label"": ""kember-viewer"", ""inputs"": {""outputController"": {""nodeUuid"": ""32a2b643-42e5-5f7f-af9c-e91b64c4989e"", ""output"": ""out_1""}}, ""inputNodes"": [""32a2b643-42e5-5f7f-af9c-e91b64c4989e""], ""outputNode"": false, ""outputs"": {}, ""progress"": 10, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 100}}}" diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index ca91704af76..a3ee24f426e 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -407,18 +407,18 @@ async def _start_docker_service(app: aiohttp.web.Application, except exceptions.ServiceStartTimeoutError as err: log.exception("Service failed to start") - await _silent_service_cleanup(node_uuid) + await _silent_service_cleanup(app, node_uuid) raise except aiodocker.exceptions.DockerError as err: log.exception("Unexpected error") - await _silent_service_cleanup(node_uuid) + await _silent_service_cleanup(app, node_uuid) raise exceptions.ServiceNotAvailableError( service_key, service_tag) from err -async def _silent_service_cleanup(node_uuid): +async def _silent_service_cleanup(app: aiohttp.web.Application, node_uuid): try: - await stop_service(node_uuid) + await stop_service(app, node_uuid) except exceptions.DirectorException: pass @@ -544,7 +544,8 @@ async def get_service_details(app: aiohttp.web.Application, node_uuid: str) -> D "Error while accessing container", err) from err -async def stop_service(node_uuid: str): +async def stop_service(app: aiohttp.web.Application, node_uuid: str): + log.debug("stopping service with uuid %s", node_uuid) # get the docker client async with _docker_client() as client: # pylint: disable=not-async-context-manager try: @@ -557,11 +558,33 @@ async def stop_service(node_uuid: str): # error if no service with such an id exists if not list_running_services_with_uuid: raise exceptions.ServiceUUIDNotFoundError(node_uuid) + log.debug("found service(s) with uuid %s", list_running_services_with_uuid) + # save the state of the main service if it can + service_details = await get_service_details(app, node_uuid) + service_host_name = "{}:{}{}".format(service_details["service_host"], + service_details["service_port"] if service_details["service_port"] else "80", + service_details["service_basepath"]) + log.debug("saving state of service %s...", service_host_name) + try: + async with aiohttp.ClientSession() as session: + service_url = "http://" + service_host_name + "/" + "state" + async with session.post(service_url) as response: + if 199 < response.status < 300: + log.debug("service %s successfully saved its state", service_host_name) + else: + log.warning("service %s does not allow saving state, answered %s", service_host_name, await response.text()) + except aiohttp.ClientConnectionError: + log.exception("service %s could not be contacted, state not saved") + + # remove the services try: + log.debug("removing services...") for service in list_running_services_with_uuid: await client.services.delete(service["Spec"]["Name"]) + log.debug("removed services, now removing network...") except aiodocker.exceptions.DockerError as err: raise exceptions.GenericDockerError("Error while removing services", err) # remove network(s) await _remove_overlay_network_of_swarm(client, node_uuid) + log.debug("removed network") diff --git a/services/director/src/simcore_service_director/rest/handlers.py b/services/director/src/simcore_service_director/rest/handlers.py index e662a2691bf..fd041fb8553 100644 --- a/services/director/src/simcore_service_director/rest/handlers.py +++ b/services/director/src/simcore_service_director/rest/handlers.py @@ -83,7 +83,7 @@ async def running_interactive_services_get(request, service_uuid): # pylint:dis async def running_interactive_services_delete(request, service_uuid): # pylint:disable=unused-argument log.debug("Client does running_interactive_services_delete request %s with service_uuid %s", request, service_uuid) try: - await producer.stop_service(service_uuid) + await producer.stop_service(request.app, service_uuid) except exceptions.ServiceUUIDNotFoundError as err: raise web_exceptions.HTTPNotFound(reason=str(err)) except Exception as err: diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index c6f74a93e7b..0b7390dc8f4 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -83,7 +83,7 @@ def _build_push_image(docker_dir, registry_url, service_type, name, tag, depende additional_docker_labels = [{"name": "constraints", "type": "string", "value": ["node.role==manager"]}] internal_port = None if service_type == "dynamic": - internal_port = random.randint(1, 100000) + internal_port = random.randint(1, 65535) additional_docker_labels.append({"name": "ports", "type": "int", "value": internal_port}) docker_labels["simcore.service.settings"] = json.dumps(additional_docker_labels) if bad_json_format: diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py index bbc1aa6b618..11a208b9e74 100644 --- a/services/director/tests/test_handlers.py +++ b/services/director/tests/test_handlers.py @@ -211,7 +211,8 @@ async def _start_get_stop_services(client, push_services, user_id, project_id): # stop the service web_response = await client.delete("/v0/running_interactive_services/{}".format(params["service_uuid"])) - assert web_response.status == 204 + text = await web_response.text() + assert web_response.status == 204, text assert web_response.content_type == "application/json" data = await web_response.json() assert data is None diff --git a/services/director/tests/test_producer.py b/services/director/tests/test_producer.py index 6a4659df383..f151c2f0932 100644 --- a/services/director/tests/test_producer.py +++ b/services/director/tests/test_producer.py @@ -62,7 +62,7 @@ async def push_start_services(number_comp, number_dyn, dependant=False): #teardown stop the services for service in started_services: service_uuid = service["service_uuid"] - await producer.stop_service(service_uuid) + await producer.stop_service(aiohttp_mock_app, service_uuid) with pytest.raises(exceptions.ServiceUUIDNotFoundError): await producer.get_service_details(aiohttp_mock_app, service_uuid) diff --git a/services/docker-compose.tools.yml b/services/docker-compose-tools.yml similarity index 82% rename from services/docker-compose.tools.yml rename to services/docker-compose-tools.yml index eaa73a46e35..20a2c9ae621 100644 --- a/services/docker-compose.tools.yml +++ b/services/docker-compose-tools.yml @@ -1,6 +1,6 @@ -version: '3.4' +# Tooling stack +version: '3.7' services: - # Maintenance services #-------------------------------------------------------------------- # By default disabled: celery workers monitor flower: @@ -27,11 +27,5 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock - portainer_data:/data - #---------------------------------------------- - # TODO: Pedro reviews - # wdb: - # image: kozea/wdb - # ports: - # - "1984:1984" volumes: portainer_data: diff --git a/services/docker-compose.cache.yml b/services/docker-compose.cache.yml index ccf268dc9e1..4ddd03e5f46 100644 --- a/services/docker-compose.cache.yml +++ b/services/docker-compose.cache.yml @@ -1,4 +1,4 @@ -version: '3.4' +version: '3.7' services: apihub: image: ${DOCKER_REGISTRY:-itisfoundation}/apihub:cache diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index 2b8c063da9e..918d552454e 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -1,5 +1,8 @@ -# USAGE: docker-compose -f docker-compose.yml -f docker-compose.devel.yml ... -version: '3.4' +# Overrides docker-compose.yml for development mode +# Usage: +# docker-compose -f docker-compose.yml -f docker-compose.devel.yml ... +# +version: '3.7' services: apihub: image: services_apihub:dev @@ -62,3 +65,8 @@ services: - ../packages:/devel/packages stdin_open: true tty: true + #-------------------------------------------------------------------- + maintenance: + image: services_maintenance:dev + volumes: + - ./maintenance/notebooks:/home/jovyan/work diff --git a/services/docker-compose.yml b/services/docker-compose.yml index bf827417372..a6847d006b6 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1,4 +1,5 @@ -version: '3.4' +# osparc-simcore stack (framework stack) +version: '3.7' services: apihub: image: ${DOCKER_REGISTRY:-itisfoundation}/apihub:${DOCKER_IMAGE_TAG:-latest} @@ -248,6 +249,28 @@ services: ports: - "9000" command: server /data + #-------------------------------------------------------------------- + maintenance: + image: ${DOCKER_REGISTRY:-itisfoundation}/maintenance:${DOCKER_IMAGE_TAG:-latest} + build: + context: ../ + dockerfile: services/maintenance/Dockerfile + labels: + org.label-schema.schema-version: "1.0" + org.label-schema.build-date: "${BUILD_DATE}" + org.label-schema.vcs-url: "https://github.com/ITISFoundation/osparc-simcore" + org.label-schema.vcs-ref: "${VCS_REF}" + ports: + - "8888" + env_file: + - ../.env + depends_on: + - postgres + deploy: + placement: + constraints: + - node.platform.os == linux + - node.role == manager volumes: input: output: diff --git a/services/maintenance/Dockerfile b/services/maintenance/Dockerfile new file mode 100644 index 00000000000..693383573cb --- /dev/null +++ b/services/maintenance/Dockerfile @@ -0,0 +1,25 @@ +#FROM nbgallery/jupyter-alpine:7.8.4 +FROM jupyter/base-notebook:python-3.7.3 AS base + +COPY --chown=jovyan:users scripts/docker/healthcheck_curl_host.py /healthcheck/healthcheck_curl_host.py +HEALTHCHECK --interval=10s \ + --timeout=30s \ + --start-period=1s \ + --retries=3 \ + CMD ["python3", "/healthcheck/healthcheck_curl_host.py", "http://localhost:8888"] + + +# enable single-user server +RUN unset JUPYTERHUB_API_TOKEN + +COPY --chown=jovyan:users packages/postgres-database $HOME/postgres-database +RUN pip install --upgrade pip wheel setuptools && \ + pip install $HOME/postgres-database[migration] && \ + rm -rf $HOME/postgres-database + +WORKDIR $HOME/work +COPY --chown=jovyan:users services/maintenance/notebooks . +RUN jupyter trust $HOME/work/* + +COPY --chown=jovyan:users services/maintenance/boot.sh /usr/local/bin/ +CMD [ "boot.sh" ] diff --git a/services/maintenance/README.md b/services/maintenance/README.md new file mode 100644 index 00000000000..f20da627b49 --- /dev/null +++ b/services/maintenance/README.md @@ -0,0 +1,6 @@ +# Maintenance service + +TODO: light [jupyter](https://github.com/nbgallery/jupyter-alpine) notebook for maintenance operations + + +``` diff --git a/services/maintenance/boot.sh b/services/maintenance/boot.sh new file mode 100755 index 00000000000..4687be8e171 --- /dev/null +++ b/services/maintenance/boot.sh @@ -0,0 +1 @@ +exec start-notebook.sh --NotebookApp.password=$(python -c "from notebook.auth import passwd; print(passwd('${MAINTENANCE_PASSWORD}'))") diff --git a/services/maintenance/notebooks/db-migration.ipynb b/services/maintenance/notebooks/db-migration.ipynb new file mode 100644 index 00000000000..70351c50bee --- /dev/null +++ b/services/maintenance/notebooks/db-migration.ipynb @@ -0,0 +1,146 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# simcore postgres database\n", + "\n", + "Contains database **models** served by the ``postgres`` service and adds an extension with **migration** tools (e.g. entrypoint that wraps [alembic]'s CLI in a similar way to [flask-migrate]).\n", + "\n", + "\n", + "To install migration tools add ``[migration]`` extra\n", + "```console\n", + " pip install .[migration]\n", + "```\n", + "and to call the CLI use\n", + "```console\n", + " simcore-postgres-database --help\n", + "\n", + " # or a short alias\n", + "\n", + " sc-pg --help\n", + "```\n", + "This entrypoing wraps calls to [alembic] commands and customizes it for ``simcore_postgres_database`` models and `postgres` online database.\n", + "\n", + "\n", + "A typical workflow:\n", + "\n", + "### Discover\n", + "\n", + "```console\n", + " simcore-postgres-database discover -u simcore -p simcore\n", + "```\n", + "\n", + "```console\n", + " simcore-postgres-database info\n", + "```\n", + "\n", + "### Review\n", + "\n", + "```console\n", + " simcore-postgres-database review -m \"some message about changes\"\n", + "```\n", + "Auto-generates some scripts under [migration/versions](packages/postgres-database/migration/versions). The migration script **needs to be reviewed and edited**, as Alembic currently does not detect every change you\n", + "make to your models. In particular, Alembic is currently unable to detect:\n", + "- table name changes,\n", + "- column name changes,\n", + "- or anonymously named constraints\n", + "A detailed summary of limitations can be found in the Alembic autogenerate documentation.\n", + "Once finalized, the migration script also needs to be added to version control.\n", + "\n", + "### Upgrade\n", + "\n", + "Upgrades to given revision (get ``info`` to check history)\n", + "```console\n", + " simcore-postgres-database upgrade head\n", + "```\n", + "\n", + "\n", + "[alembic]:https://alembic.sqlalchemy.org/en/latest/\n", + "[flask-migrate]:https://flask-migrate.readthedocs.io/en/latest/\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import simcore_postgres_database as scpg\n", + "\n", + "alembic_ini = os.path.join( os.path.dirname(scpg.__file__), 'alembic.ini')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! echo $alembic_ini\n", + "!alembic -c $alembic_ini history\n", + "#!alembic --help\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!alembic -c $alembic_ini stamp --sql 645807399320" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!sc-pg --help" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!sc-pg discover" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!sc-pg upgrade head" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/services/sidecar/requirements/dev.txt b/services/sidecar/requirements/dev.txt index 69b94b613d0..77ec3e359ba 100644 --- a/services/sidecar/requirements/dev.txt +++ b/services/sidecar/requirements/dev.txt @@ -12,6 +12,7 @@ # installs this repo's packages -e ../../services/storage/client-sdk/python/ -e ../../packages/s3wrapper/ +-e ../../packages/postgres-database/ -e ../../packages/simcore-sdk/ # installs current package diff --git a/services/sidecar/requirements/prod.txt b/services/sidecar/requirements/prod.txt index 533f86e6b1b..c26a540f09b 100644 --- a/services/sidecar/requirements/prod.txt +++ b/services/sidecar/requirements/prod.txt @@ -12,6 +12,7 @@ # installs this repo's packages ../../services/storage/client-sdk/python/ ../../packages/s3wrapper/ +../../packages/postgres-database/ ../../packages/simcore-sdk/ # installs current package diff --git a/services/sidecar/src/sidecar/_deprecated.py b/services/sidecar/src/sidecar/_deprecated.py index 56fb5bd71d3..31e9d694f39 100644 --- a/services/sidecar/src/sidecar/_deprecated.py +++ b/services/sidecar/src/sidecar/_deprecated.py @@ -95,8 +95,11 @@ def _process_task_input(self, port, input_ports): other_task = None _session = self._db.Session() try: - other_task =_session.query(ComputationalTask).filter(and_(ComputationalTask.node_id==other_node_id, - ComputationalTask.project_id==self._task.project_id)).one() + # pylint: disable=no-member + other_task =_session.query(ComputationalTask).filter( + and_( ComputationalTask.node_id==other_node_id, + ComputationalTask.project_id==self._task.project_id ) + ).one() except exc.SQLAlchemyError: log.exception("Could not find other task") _session.rollback() @@ -392,8 +395,13 @@ def inspect(self, celery_task, project_id, node_id): do_process = True # find the for the current node_id, skip if there is already a job_id around # pylint: disable=assignment-from-no-return - query =_session.query(ComputationalTask).filter(and_(ComputationalTask.node_id==node_id, - ComputationalTask.project_id==project_id, ComputationalTask.job_id==None)) + # pylint: disable=no-member + query =_session.query(ComputationalTask).filter( + and_( + ComputationalTask.node_id==node_id, + ComputationalTask.project_id==project_id, + ComputationalTask.job_id==None) + ) # Use SELECT FOR UPDATE TO lock the row query.with_for_update() task = query.one() diff --git a/services/sidecar/src/sidecar/core.py b/services/sidecar/src/sidecar/core.py index e5428f48508..dcb29dacdaf 100644 --- a/services/sidecar/src/sidecar/core.py +++ b/services/sidecar/src/sidecar/core.py @@ -388,8 +388,12 @@ def inspect(self, celery_task, user_id, project_id, node_id): do_process = True # find the for the current node_id, skip if there is already a job_id around # pylint: disable=assignment-from-no-return - query =_session.query(ComputationalTask).filter(and_(ComputationalTask.node_id==node_id, - ComputationalTask.project_id==project_id, ComputationalTask.job_id==None)) + # pylint: disable=no-member + query =_session.query(ComputationalTask).filter( + and_( ComputationalTask.node_id==node_id, + ComputationalTask.project_id==project_id, + ComputationalTask.job_id==None ) + ) # Use SELECT FOR UPDATE TO lock the row query.with_for_update() task = query.one() diff --git a/services/sidecar/src/sidecar/utils.py b/services/sidecar/src/sidecar/utils.py index c4e99a53455..edad99f32c9 100644 --- a/services/sidecar/src/sidecar/utils.py +++ b/services/sidecar/src/sidecar/utils.py @@ -39,9 +39,11 @@ def find_entry_point(g): return result def is_node_ready(task, graph, _session, _logger): + #pylint: disable=no-member tasks = _session.query(ComputationalTask).filter(and_( ComputationalTask.node_id.in_(list(graph.predecessors(task.node_id))), ComputationalTask.project_id==task.project_id)).all() + _logger.debug("TASK %s ready? Checking ..", task.internal_id) for dep_task in tasks: job_id = dep_task.job_id diff --git a/services/sidecar/tests/utils.py b/services/sidecar/tests/utils.py index a3fdb5d0eb1..9d4ad5cf0ef 100644 --- a/services/sidecar/tests/utils.py +++ b/services/sidecar/tests/utils.py @@ -75,6 +75,7 @@ def setup_sleepers(url): pipeline = ComputationalPipeline(dag_adjacency_list=dag_adjacency_list, state=0) db_session.add(pipeline) db_session.flush() + # pylint: disable=no-member project_id = pipeline.project_id node_id_1 = "e609a68c-d743-4a12-9745-f31734d1b911" @@ -83,29 +84,29 @@ def setup_sleepers(url): node_schema = { "inputs":{ "in_1":{ - "label": "Number of seconds to sleep", - "description": "Number of seconds to sleep", + "label": "Number of seconds to sleep", + "description": "Number of seconds to sleep", "displayOrder":0, "type": "data:*/*" }, "in_2": { - "label": "Number of seconds to sleep", - "description": "Number of seconds to sleep", + "label": "Number of seconds to sleep", + "description": "Number of seconds to sleep", "displayOrder":1, - "type": "integer", + "type": "integer", "defaultValue": 4 } }, "outputs":{ "out_1":{ - "label": "Number of seconds to sleep", - "description": "Number of seconds to sleep", + "label": "Number of seconds to sleep", + "description": "Number of seconds to sleep", "displayOrder":0, "type": "data:*/*" }, "out_2": { - "label": "Number of seconds to sleep", - "description": "Number of seconds to sleep", + "label": "Number of seconds to sleep", + "description": "Number of seconds to sleep", "displayOrder":1, "type": "integer" } diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index e4716eeb65e..24f39c61d2d 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -1,4 +1,5 @@ -FROM python:3.6-alpine as base +#FROM python:3.6-alpine as base +FROM itisfoundation/python-with-pandas:3.6-alpine as base # # USAGE: # cd sercices/storage diff --git a/services/storage/README.md b/services/storage/README.md index fe14edcf893..03661c06084 100644 --- a/services/storage/README.md +++ b/services/storage/README.md @@ -5,3 +5,4 @@ [![](https://images.microbadger.com/badges/commit/itisfoundation/storage.svg)](https://microbadger.com/images/itisfoundation/storage "More on service image in registry") Service to manage data storage in simcore + diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index 1051e4c8a35..05797e2fce7 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -1,12 +1,15 @@ # # Specifies third-party dependencies for 'storage' # +-r python-with-pandas_89f709.txt # TODO: add reference to git@github.com:ITISFoundation/dockerfiles.git/requirements urllib3~=1.24.2 # See https://nvd.nist.gov/vuln/detail/CVE-2019-11324 psycopg2-binary~=2.7.5 # See http://initd.org/psycopg/docs/install.html#binary-install-from-pypi sqlalchemy~=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164 jsonschema<3 # openapi-spec-validator from service-libs has requirement jsonschema<3 +boto3==1.9.91 # do not use lastest version, this would require botocore<1.13.0,>=1.12.179 but aiobotocore[boto3]==0.10.2 hardcodes boto3==1.9.91 that requires botocore<1.12.92,>=1.12.91 +aioboto3 aiohttp aiofiles aiopg[sa]~=0.15.0 # Fixes 0.16.0 https://github.com/aio-libs/aiopg/issues/535 diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index b5a7f5b3a94..ebe7485d662 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -4,15 +4,18 @@ # # pip-compile --output-file=_base.txt _base.in # +aioboto3==6.4.1 +aiobotocore[boto3]==0.10.2 # via aioboto3 aiofiles==0.4.0 aiohttp==3.5.4 aiopg[sa]==0.15.0 apipkg==1.5 # via execnet +async-generator==1.10 # via aiobotocore async-timeout==3.0.1 # via aiohttp attrs==19.1.0 # via aiohttp blackfynn==2.11.1 -boto3==1.9.153 # via blackfynn -botocore==1.12.153 # via boto3, s3transfer +boto3==1.9.91 +botocore==1.12.91 # via aiobotocore, boto3, s3transfer certifi==2019.3.9 # via requests chardet==3.0.4 # via aiohttp, requests configparser==3.7.4 # via blackfynn @@ -27,20 +30,20 @@ jmespath==0.9.4 # via boto3, botocore jsonschema==2.6.0 marshmallow==2.19.2 multidict==4.5.2 # via aiohttp, yarl -numpy==1.16.3 # via blackfynn, pandas -pandas==0.24.2 # via blackfynn +numpy==1.16.3 +pandas==0.24.2 protobuf==3.2.0 # via blackfynn psutil==5.6.2 # via blackfynn psycopg2-binary==2.7.7 psycopg2==2.8.2 # via aiopg -python-dateutil==2.8.0 # via botocore, pandas -pytz==2019.1 # via blackfynn, pandas +python-dateutil==2.8.0 +pytz==2019.1 pyyaml==5.1 # via trafaret-config requests==2.22.0 # via blackfynn s3transfer==0.2.0 # via boto3 semantic-version==2.6.0 semver==2.8.1 # via blackfynn -six==1.12.0 # via protobuf, python-dateutil, tenacity, websocket-client +six==1.12.0 sqlalchemy==1.3.3 tenacity==5.0.4 trafaret-config==2.0.2 @@ -48,5 +51,8 @@ trafaret==1.2.0 typing-extensions==3.7.2 # via aiohttp urllib3==1.24.3 websocket-client==0.56.0 # via blackfynn -wrapt==1.11.1 # via deprecated +wrapt==1.11.1 # via aiobotocore, deprecated yarl==1.3.0 # via aiohttp + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.0.1 # via protobuf diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index a12a497adc5..ededf0a18d9 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -4,17 +4,20 @@ # # pip-compile --output-file=_test.txt _test.in # +aioboto3==6.4.1 +aiobotocore[boto3]==0.10.2 aiofiles==0.4.0 aiohttp==3.5.4 aiopg[sa]==0.15.0 apipkg==1.5 astroid==2.2.5 # via pylint +async-generator==1.10 async-timeout==3.0.1 atomicwrites==1.3.0 # via pytest attrs==19.1.0 blackfynn==2.11.1 -boto3==1.9.153 -botocore==1.12.153 +boto3==1.9.91 +botocore==1.12.91 certifi==2019.3.9 chardet==3.0.4 codecov==2.0.15 @@ -71,3 +74,6 @@ wcwidth==0.1.7 # via pytest websocket-client==0.56.0 wrapt==1.11.1 yarl==1.3.0 + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.0.1 # via protobuf, pytest diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index a3eaeebdd85..57e41ff2850 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -11,6 +11,7 @@ # installs this repo's packages ../../packages/s3wrapper/ +../../packages/postgres-database/ ../../packages/simcore-sdk/ ../../packages/service-library/ diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt index 38dc54c399b..6c5e1bd9203 100644 --- a/services/storage/requirements/dev.txt +++ b/services/storage/requirements/dev.txt @@ -11,6 +11,7 @@ # installs this repo's packages -e ../../packages/s3wrapper/ +-e ../../packages/postgres-database/ -e ../../packages/simcore-sdk/ -e ../../packages/service-library diff --git a/services/storage/requirements/prod.txt b/services/storage/requirements/prod.txt index 42b2cd08ea9..6457146ed07 100644 --- a/services/storage/requirements/prod.txt +++ b/services/storage/requirements/prod.txt @@ -11,6 +11,7 @@ # installs this repo's packages ../../packages/s3wrapper/ +../../packages/postgres-database/ ../../packages/simcore-sdk/ ../../packages/service-library diff --git a/services/storage/requirements/python-with-pandas_89f709.txt b/services/storage/requirements/python-with-pandas_89f709.txt new file mode 100644 index 00000000000..9adaccff774 --- /dev/null +++ b/services/storage/requirements/python-with-pandas_89f709.txt @@ -0,0 +1,8 @@ +# +# Taken from https://github.com/ITISFoundation/dockerfiles/blob/89f70938b66c0e4c1966c9a62da7a6dc89778f45/python-with-pandas/3.6-alpine/requirements.txt +# +numpy==1.16.3 +pandas==0.24.2 +python-dateutil==2.8.0 # via pandas +pytz==2019.1 # via pandas +six==1.12.0 # via python-dateutil diff --git a/services/storage/setup.py b/services/storage/setup.py index 8f05193ad76..be5f9090c40 100644 --- a/services/storage/setup.py +++ b/services/storage/setup.py @@ -16,6 +16,7 @@ def read_reqs( reqs_path: Path): install_requirements = read_reqs( here / "requirements" / "_base.txt" ) + [ "s3wrapper==0.1.0", + "simcore-postgres-database==0.1.0", "simcore-sdk==0.1.0", "simcore-service-library==0.1.0" ] diff --git a/services/storage/src/simcore_service_storage/config_schema.py b/services/storage/src/simcore_service_storage/config_schema.py index 4f3fd12db88..5f518b1650e 100644 --- a/services/storage/src/simcore_service_storage/config_schema.py +++ b/services/storage/src/simcore_service_storage/config_schema.py @@ -9,7 +9,6 @@ "port": T.Int(), "log_level": T.Enum("DEBUG", "WARNING", "INFO", "ERROR", "CRITICAL", "FATAL", "NOTSET"), "testing": T.Bool(), - "python2": T.String(), T.Key("max_workers", default=8, optional=True) : T.Int(), T.Key("test_datcore", optional=True): T.Dict({ "token_key": T.String(), diff --git a/services/storage/src/simcore_service_storage/data/docker-dev-config.yaml b/services/storage/src/simcore_service_storage/data/docker-dev-config.yaml index 5dfc18cc8c6..8f4ddb239f8 100644 --- a/services/storage/src/simcore_service_storage/data/docker-dev-config.yaml +++ b/services/storage/src/simcore_service_storage/data/docker-dev-config.yaml @@ -3,9 +3,8 @@ main: host: 0.0.0.0 log_level: INFO port: 8080 - testing: True + testing: False max_workers: 8 - python2: /usr/bin/python2 test_datcore: token_key: ${BF_API_KEY} token_secret: ${BF_API_SECRET} diff --git a/services/storage/src/simcore_service_storage/data/docker-prod-config.yaml b/services/storage/src/simcore_service_storage/data/docker-prod-config.yaml index 7339e702984..ed77ca0897f 100644 --- a/services/storage/src/simcore_service_storage/data/docker-prod-config.yaml +++ b/services/storage/src/simcore_service_storage/data/docker-prod-config.yaml @@ -3,8 +3,7 @@ main: host: 0.0.0.0 log_level: INFO port: 8080 - testing: True - python2: /usr/bin/python2 + testing: False test_datcore: token_key: ${BF_API_KEY} token_secret: ${BF_API_SECRET} diff --git a/services/storage/src/simcore_service_storage/data/host-dev-config.yaml b/services/storage/src/simcore_service_storage/data/host-dev-config.yaml index ad93bc1134a..690475f584c 100644 --- a/services/storage/src/simcore_service_storage/data/host-dev-config.yaml +++ b/services/storage/src/simcore_service_storage/data/host-dev-config.yaml @@ -4,8 +4,7 @@ main: host: 127.0.0.1 log_level: INFO port: 8080 - testing: true - python2: ${VENV2}/bin/python2 + testing: False test_datcore: token_key: ${BF_API_KEY} token_secret: ${BF_API_SECRET} diff --git a/services/storage/src/simcore_service_storage/datcore.py b/services/storage/src/simcore_service_storage/datcore.py index 58e34cbdb06..a9785cb3bc4 100644 --- a/services/storage/src/simcore_service_storage/datcore.py +++ b/services/storage/src/simcore_service_storage/datcore.py @@ -6,8 +6,14 @@ # pylint: skip-file import os import urllib +from pathlib import Path +from typing import List from blackfynn import Blackfynn +from blackfynn.models import BaseCollection, Collection, DataPackage + +from simcore_service_storage.models import FileMetaData +from simcore_service_storage.settings import DATCORE_ID, DATCORE_STR #FIXME: W0611:Unused IOAPI imported from blackfynn.api.transfers #from blackfynn.api.transfers import IOAPI @@ -16,16 +22,29 @@ #FIXME: W0212:Access to a protected member _api of a client class # pylint: disable=W0212 +def _get_collection_id(folder: BaseCollection, _collections: List[str], collection_id: str)-> str: + if not len(_collections): + return collection_id + + current = _collections.pop(0) + found = False + for item in folder: + if isinstance(item, Collection) and item.name == current: + collection_id = item.id + folder = item + found = True + break + + if found: + return _get_collection_id(folder, _collections, collection_id) + + return "" + class DatcoreClient(object): def __init__(self, api_token=None, api_secret=None, host=None, streaming_host=None): self.client = Blackfynn(profile=None, api_token=api_token, api_secret=api_secret, host=host, streaming_host=streaming_host) - def _context(self): - """ - Returns current organizational context - """ - return self.client.context def profile(self): """ @@ -33,28 +52,68 @@ def profile(self): """ return self.client.profile - def organization(self): - """ - Returns organization name - """ - return self.client.context.name + def _collection_from_destination(self, destination: str): + destination_path = Path(destination) + parts = destination_path.parts - def list_datasets(self): - ds = [] - for item in self.client.datasets(): - ds.append(item.name) + dataset_name = parts[0] + dataset = self.get_dataset(dataset_name) + if dataset is None: + return None, None - return ds + collection_id = dataset.id + collection = dataset + collections = [] + if len(parts) > 1: + object_path = Path(*parts[1:]) + collections = list(object_path.parts) + collection_id = "" + collection_id = _get_collection_id(dataset, collections, collection_id) + collection = self.client.get(collection_id) + + return collection, collection_id - def list_files(self): + def list_files_recursively(self, dataset_filter: str=""): files = [] - for ds in self.client.datasets(): - for item in ds: - files.append(os.path.join(ds.name, item.name)) + + for dataset in self.client.datasets(): + if not dataset_filter or dataset_filter in dataset.name: + self.list_dataset_files_recursively(files, dataset, Path(dataset.name)) return files + def list_dataset_files_recursively(self, files: List[FileMetaData], base: BaseCollection, current_root: Path): + for item in base: + if isinstance(item, Collection): + _current_root = current_root / Path(item.name) + self.list_dataset_files_recursively(files, item, _current_root) + else: + parts = current_root.parts + bucket_name = parts[0] + file_name = item.name + file_size = 0 + # lets assume we have only one file + if item.files: + file_name = Path(item.files[0].as_dict()['content']['s3key']).name + file_size = item.files[0].as_dict()['content']['size'] + # if this is in the root directory, the object_name is the filename only + if len(parts) > 1: + object_name = str(Path(*list(parts)[1:])/ Path(file_name)) + else: + object_name = str(Path(file_name)) + + file_uuid = str(Path(bucket_name) / Path(object_name)) + file_id = item.id + created_at = item.created_at + last_modified = item.updated_at + fmd = FileMetaData(bucket_name=bucket_name, file_name=file_name, object_name=object_name, + location=DATCORE_STR, location_id=DATCORE_ID, file_uuid=file_uuid, file_id=file_id, + raw_file_path=file_uuid, display_file_path=file_uuid, created_at=created_at, + last_modified=last_modified, file_size=file_size) + files.append(fmd) + + def create_dataset(self, ds_name, force_delete=False): """ Creates a new dataset for the current user and returns it. Returns existing one @@ -123,9 +182,9 @@ def exists_dataset(self, ds_name): ds = self.get_dataset(ds_name) return ds is not None - def upload_file(self, dataset, filepath, meta_data = None): + def upload_file(self, destination: str, filepath: str, meta_data = None): """ - Uploads a file to a given dataset given its filepath on the host. Optionally + Uploads a file to a given dataset/collection given its filepath on the host. Optionally adds some meta data Args: @@ -135,23 +194,27 @@ def upload_file(self, dataset, filepath, meta_data = None): Note: Blackfynn postprocesses data based on filendings. If it can do that - the filenames on the server change. This makes it difficult to retrieve - them back by name (see get_sources below). Also, for now we assume we have - only single file data. + the filenames on the server change. """ + # parse the destination and try to find the package_id to upload to + collection, collection_id = self._collection_from_destination(destination) - + if collection is None: + return False files = [filepath] # pylint: disable = E1101 - self.client._api.io.upload_files(dataset, files, display_progress=True) - dataset.update() + self.client._api.io.upload_files(collection, files, display_progress=True) + collection.update() if meta_data is not None: - filename = os.path.basename(filepath) - package = self.get_package(dataset, filename) - if package is not None: - self._update_meta_data(package, meta_data) + for f in files: + filename = os.path.basename(f) + package = self.get_package(collection, filename) + if package is not None: + self._update_meta_data(package, meta_data) + + return True def _update_meta_data(self, package, meta_data): """ @@ -186,24 +249,24 @@ def download_file(self, source, filename, destination_path): return True return False - def download_link(self, source, filename): + def download_link(self, destination, filename): """ - returns presigned url for download, source is a dataset + returns presigned url for download, destination is a dataset or collection """ - + collection, collection_id = self._collection_from_destination(destination) # pylint: disable = E1101 - - for item in source: - if item.name == filename: - file_desc = self.client._api.packages.get_sources(item.id)[0] - url = self.client._api.packages.get_presigned_url_for_file(item.id, file_desc.id) - return url + for item in collection: + if isinstance(item, DataPackage): + if Path(item.files[0].as_dict()['content']['s3key']).name == filename: + file_desc = self.client._api.packages.get_sources(item.id)[0] + url = self.client._api.packages.get_presigned_url_for_file(item.id, file_desc.id) + return url return "" - def exists_file(self, source, filename): + def get_package(self, source, filename): """ - Checks if file exists in source + Returns package from source by name if exists Args: source (dataset/collection): The dataset or collection to donwload from @@ -213,49 +276,56 @@ def exists_file(self, source, filename): source.update() for item in source: if item.name == filename: - return True + return item - return False + return None - def get_package(self, source, filename): + def delete_file(self, destination, filename): """ - Returns package from source by name if exists + Deletes file by name from destination by name Args: - source (dataset/collection): The dataset or collection to donwload from + destination (dataset/collection): The dataset or collection to delete from filename (str): Name of the file """ + collection, collection_id = self._collection_from_destination(destination) - source.update() - for item in source: - if item.name == filename: - return item + if collection is None: + return False - return None + collection.update() + for item in collection: + if isinstance(item, DataPackage): + if Path(item.files[0].as_dict()['content']['s3key']).name == filename: + self.client.delete(item) + return True - def delete_file(self, source, filename): + return False + + def delete_file_by_id(self, id: str): """ - Deletes file by name from source by name + Deletes file by id Args: - source (dataset/collection): The dataset or collection to donwload from - filename (str): Name of the file + datcore id for the file """ - source.update() - for item in source: - if item.name == filename: - self.client.delete(item) + self.client.delete(id) - def delete_files(self, source): + def delete_files(self, destination): """ - Deletes all files in source + Deletes all files in destination Args: - source (dataset/collection): The dataset or collection to donwload from + destination (dataset/collection): The dataset or collection to delete """ - source.update() - for item in source: + collection, collection_id = self._collection_from_destination(destination) + + if collection is None: + return False + + collection.update() + for item in collection: self.client.delete(item) def update_meta_data(self, dataset, filename, meta_data): diff --git a/services/storage/src/simcore_service_storage/datcore_wrapper.py b/services/storage/src/simcore_service_storage/datcore_wrapper.py index f51053bb96a..89b3af5f92d 100644 --- a/services/storage/src/simcore_service_storage/datcore_wrapper.py +++ b/services/storage/src/simcore_service_storage/datcore_wrapper.py @@ -1,57 +1,26 @@ import asyncio import json import logging -import os from concurrent.futures import ThreadPoolExecutor -from functools import partial, wraps +from functools import wraps from pathlib import Path -from textwrap import dedent from typing import List import attr -import execnet +from .datcore import DatcoreClient from .models import FileMetaData -from .settings import DATCORE_ID, DATCORE_STR FileMetaDataVec = List[FileMetaData] CURRENT_DIR = Path(__file__).resolve().parent logger = logging.getLogger(__name__) +#FIXME: W0703: Catching too general exception Exception (broad-except) +# pylint: disable=W0703 -#TODO: Use async callbacks for retreival of progress and pass via rabbit to server - -def call_python_2(module, function, args, python_exec: Path): - """ calls a module::function from python2 with the arguments list - """ - # pylint: disable=E1101 - # "E1101:Module 'execnet' has no 'makegateway' member", - gw = execnet.makegateway("popen//python=%s" % python_exec) - channel = gw.remote_exec(""" - from %s import %s as the_function - channel.send(the_function(*channel.receive())) - """ % (module, function)) - channel.send(args) - return channel.receive() - -def call_python_2_script(script: str, python_exec: Path): - """ calls an arbitrary script with remote interpreter - - MaG: I wonder how secure it is to pass the tokens that way... - - """ - prefix = "import sys\n" \ - "sys.path.append('%s')\n" % CURRENT_DIR - script = prefix + dedent(script) - - # pylint: disable=E1101 - # for now keep the Pool mechanims for asynchronizity but switch to python3 - python_exec = "python3" - gw = execnet.makegateway("popen//python=%s" % python_exec) - channel = gw.remote_exec(script) - return channel.receive() +#TODO: Use async callbacks for retreival of progress and pass via rabbit to server def make_async(func): @wraps(func) async def async_wrapper(self, *args, **kwargs): @@ -65,213 +34,95 @@ async def async_wrapper(self, *args, **kwargs): class DatcoreWrapper: """ Wrapper to call the python2 api from datcore - Assumes that python 2 is installed in a virtual env + This can go away now. Next cleanup round... """ # pylint: disable=R0913 # Too many arguments - def __init__(self, api_token: str, api_secret: str, python2_exec: Path, loop: object, pool: ThreadPoolExecutor): + def __init__(self, api_token: str, api_secret: str, loop: object, pool: ThreadPoolExecutor): self.api_token = api_token self.api_secret = api_secret self.loop = loop self.pool = pool - #TODO: guarantee that python2_exec is a valid - self._py2_call = partial(call_python_2_script, python_exec=python2_exec) + self.d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, + host='https://api.blackfynn.io') @make_async - def list_files(self, regex = "", sortby = "")->FileMetaDataVec: #pylint: disable=W0613 - # FIXME: W0613:Unused argument 'regex', sortby!!! - script = """ - from datcore import DatcoreClient - try: - api_token = "%s" - api_secret = "%s" - - d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') - - files = d_client.list_files() - - channel.send(files) - except Exception as e: - channel.send([]) - - """%(self.api_token, self.api_secret) + def list_files_recursively(self, regex = "", sortby = "")->FileMetaDataVec: #pylint: disable=W0613 + files = [] + try: + files = self.d_client.list_files_recursively() + except Exception as e: + logger.exception("Error listing datcore files %s", e) - - files = self._py2_call(script) - - data = [] - for f in files: - # extract bucket name, object name and filename - parts = f.strip("/").split("/") - file_name = parts[-1] - if len(parts) > 1: - bucket_name = parts[0] - object_name = "/".join(parts[1:]) - else: - bucket_name = "" - object_name = file_name - - file_uuid = os.path.join(bucket_name, object_name) - # at the moment, no metadata there - fmd = FileMetaData(bucket_name=bucket_name, file_name=file_name, object_name=object_name, - location=DATCORE_STR, location_id=DATCORE_ID, file_uuid=file_uuid) - data.append(fmd) - - return data + return files @make_async - def delete_file(self, dataset: str, filename: str): + def delete_file(self, destination: str, filename: str): # the object can be found in dataset/filename <-> bucket_name/object_name - script = """ - from datcore import DatcoreClient - - api_token = "{0}" - api_secret = "{1}" - try: - d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') - - ds = d_client.get_dataset("{2}") - if ds is not None: - d_client.delete_file(ds, "{3}") - - channel.send(True) - - except Exception as e: - channel.send(False) - """.format(self.api_token, self.api_secret, dataset, filename) - - return self._py2_call(script) + try: + self.d_client.delete_file(destination, filename) + except Exception as e: + logger.exception("Error deleting datcore file %s", e) @make_async - def download_link(self, dataset: str, filename: str): - script = """ - from datcore import DatcoreClient - - api_token = "{0}" - api_secret = "{1}" - try: - d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') - - ds = d_client.get_dataset("{2}") - url = "" - if ds is not None: - url = d_client.download_link(ds, "{3}") - - channel.send(url) + def download_link(self, destination: str, filename: str): + url = "" + try: + url = self.d_client.download_link(destination, filename) + except Exception as e: + logger.exception("Error getting datcore download link %s", e) - except Exception as e: - channel.send("") - """.format(self.api_token, self.api_secret, dataset, filename) + return url - return self._py2_call(script) @make_async def create_test_dataset(self, dataset): - script = """ - from datcore import DatcoreClient - - api_token = "{0}" - api_secret = "{1}" - try: - d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') - - ds = d_client.get_dataset("{2}") - if ds is not None: - d_client.delete_files(ds) - else: - d_client.create_dataset("{2}") - - channel.send(None) - except Exception as e: - channel.send(False) - """.format(self.api_token, self.api_secret, dataset) + try: + ds = self.d_client.get_dataset(dataset) + if ds is not None: + self.d_client.delete_files(dataset) + else: + self.d_client.create_dataset(dataset) + except Exception as e: + logger.exception("Error creating test dataset %s", e) - return self._py2_call(script) @make_async def delete_test_dataset(self, dataset): - script = """ - from datcore import DatcoreClient - - api_token = "{0}" - api_secret = "{1}" - try: - d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') - - ds = d_client.get_dataset("{2}") - if ds is not None: - d_client.delete_files(ds) - - channel.send(True) - except Exception as e: - channel.send(False) - - """.format(self.api_token, self.api_secret, dataset) - - return self._py2_call(script) + try: + ds = self.d_client.get_dataset(dataset) + if ds is not None: + self.d_client.delete_files(dataset) + except Exception as e: + logger.exception("Error deleting test dataset %s", e) @make_async - def upload_file(self, dataset: str, local_path: str, meta_data: FileMetaData = None): + def upload_file(self, destination: str, local_path: str, meta_data: FileMetaData = None): json_meta = "" if meta_data: json_meta = json.dumps(attr.asdict(meta_data)) - - script = """ - from datcore import DatcoreClient - import json - - api_token = "{0}" - api_secret = "{1}" - - try: - d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') - - ds = d_client.get_dataset("{2}") - - str_meta = '{4}' - if str_meta : - meta_data = json.loads(str_meta) - d_client.upload_file(ds, "{3}", meta_data) - else: - d_client.upload_file(ds, "{3}") - - channel.send(True) - - except Exception as e: - channel.send(False) - - """.format(self.api_token, self.api_secret, dataset, local_path, json_meta) - - return self._py2_call(script) + try: + str_meta = json_meta + result = False + if str_meta : + meta_data = json.loads(str_meta) + result = self.d_client.upload_file(destination, local_path, meta_data) + else: + result = self.d_client.upload_file(destination, local_path) + return result + except Exception as e: + logger.exception("Error uploading file to datcore %s", e) + return False @make_async def ping(self): - script = """ - from datcore import DatcoreClient - - api_token = "{0}" - api_secret = "{1}" - - try: - d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') - - profile = d_client.profile() - ok = profile is not None - channel.send(ok) - - except Exception as e: - channel.send(False) - - """.format(self.api_token, self.api_secret) - - return self._py2_call(script) + try: + profile = self.d_client.profile() + ok = profile is not None + return ok + except Exception as e: + logger.exception("Error pinging %s", e) + return False diff --git a/services/storage/src/simcore_service_storage/db_tokens.py b/services/storage/src/simcore_service_storage/db_tokens.py index 2880d66e057..b3f31f8f2f9 100644 --- a/services/storage/src/simcore_service_storage/db_tokens.py +++ b/services/storage/src/simcore_service_storage/db_tokens.py @@ -7,6 +7,7 @@ from psycopg2 import Error as DbApiError from tenacity import before_sleep_log, retry, stop_after_attempt, wait_random +from .models import tokens from .settings import APP_CONFIG_KEY, APP_DB_ENGINE_KEY log = logging.getLogger(__name__) @@ -14,16 +15,6 @@ RETRY_WAIT_SECS = {"min":1, "max":3} RETRY_COUNT = 3 -# FIXME: this is a temporary solution DO NOT USE. This table needs to be in sync -# with services/web/server/src/simcore_service_webserver/db_models.py -_metadata = sa.MetaData() -_tokens = sa.Table("tokens", _metadata, - sa.Column("token_id", sa.BigInteger, nullable=False, primary_key=True), - sa.Column("user_id", sa.BigInteger, nullable=False), - sa.Column("token_service", sa.String, nullable=False), - sa.Column("token_data", sa.JSON, nullable=False), -) - @retry(wait=wait_random(**RETRY_WAIT_SECS), stop=stop_after_attempt(RETRY_COUNT), @@ -31,7 +22,7 @@ reraise=True) async def _get_tokens_from_db(engine, userid): async with engine.acquire() as conn: - stmt = sa.select([_tokens, ]).where(_tokens.c.user_id == userid) + stmt = sa.select([tokens, ]).where(tokens.c.user_id == userid) result = await conn.execute(stmt) row = await result.first() data = dict(row) if row else {} diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index 8bcf25b55bb..b9a284d7a8b 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -1,3 +1,4 @@ +import asyncio import logging import os import re @@ -6,12 +7,12 @@ from concurrent.futures import ThreadPoolExecutor from operator import itemgetter from pathlib import Path -from typing import List, Tuple, Dict +from typing import Dict, List, Tuple +import aiobotocore import aiofiles import aiohttp import attr -import asyncio import sqlalchemy as sa from aiohttp import web from aiopg.sa import Engine @@ -19,16 +20,16 @@ from yarl import URL from s3wrapper.s3_client import S3Client +from servicelib.aiopg_utils import DBAPIError from .datcore_wrapper import DatcoreWrapper from .models import (FileMetaData, _location_from_id, _parse_datcore, - file_meta_data) + file_meta_data, projects, user_to_projects) +from .s3 import get_config_s3 from .settings import (APP_CONFIG_KEY, APP_DB_ENGINE_KEY, APP_DSM_KEY, APP_S3_KEY, DATCORE_ID, DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR) -from .s3 import get_config_s3 - #pylint: disable=W0212 #FIXME: W0212:Access to a protected member _result_proxy of a client class @@ -42,10 +43,8 @@ async def _setup_dsm(app: web.Application): cfg = app[APP_CONFIG_KEY] - main_cfg = cfg["main"] main_cfg = cfg["main"] - python27_exec = Path(main_cfg["python2"]) engine = app.get(APP_DB_ENGINE_KEY) loop = asyncio.get_event_loop() @@ -57,7 +56,8 @@ async def _setup_dsm(app: web.Application): s3_cfg = get_config_s3(app) bucket_name = s3_cfg["bucket_name"] - dsm = DataStorageManager(s3_client, python27_exec, engine, loop, pool, bucket_name) + testing = main_cfg["testing"] + dsm = DataStorageManager(s3_client, engine, loop, pool, bucket_name, not testing) app[APP_DSM_KEY] = dsm @@ -108,11 +108,12 @@ class DataStorageManager: https://docs.minio.io/docs/minio-bucket-notification-guide.html """ s3_client: S3Client - python27_exec: Path engine: Engine loop: object pool: ThreadPoolExecutor simcore_bucket_name: str + has_project_db: bool + datcore_tokens: Dict[str, DatCoreApiToken]=attr.Factory(dict) # TODO: perhaps can be used a cache? add a lifetime? @@ -155,13 +156,15 @@ async def ping_datcore(self, user_id: str) -> bool: api_token, api_secret = self._get_datcore_tokens(user_id) logger.info("token: %s, secret %s", api_token, api_secret) if api_token: - dcw = DatcoreWrapper(api_token, api_secret, self.python27_exec, self.loop, self.pool) + dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) profile = await dcw.ping() if profile: return True return False # pylint: disable=too-many-arguments + # pylint: disable=too-many-branches + # pylint: disable=too-many-statements async def list_files(self, user_id: str, location: str, uuid_filter: str ="", regex: str="", sortby: str="") -> FileMetaDataVec: """ Returns a list of file paths @@ -183,10 +186,81 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re result_dict = dict(zip(row._result_proxy.keys, row._row)) d = FileMetaData(**result_dict) data.append(d) + + if self.has_project_db: + uuid_name_dict = {} + # now parse the project to search for node/project names + try: + async with self.engine.acquire() as conn: + joint_table = user_to_projects.join(projects) + query = sa.select([projects]).select_from(joint_table)\ + .where(user_to_projects.c.user_id == user_id) + + async for row in conn.execute(query): + proj_data = {key:value for key,value in row.items()} + + uuid_name_dict[proj_data["uuid"]] = proj_data["name"] + wb = proj_data['workbench'] + for node in wb.keys(): + uuid_name_dict[node] = wb[node]['label'] + except DBAPIError as _err: + logger.exception("Error querying database for project names") + + if not uuid_name_dict: + # there seems to be no project whatsoever for user_id + return [] + + # only keep files from non-deleted project --> This needs to be fixed + clean_data = [] + for d in data: + if d.project_id in uuid_name_dict: + d.project_name = uuid_name_dict[d.project_id] + if d.node_id in uuid_name_dict: + d.node_name = uuid_name_dict[d.node_id] + + d.raw_file_path = str(Path(d.project_id) / Path(d.node_id) / Path(d.file_name)) + d.display_file_path = d.raw_file_path + if d.node_name and d.project_name: + d.display_file_path = str(Path(d.project_name) / Path(d.node_name) / Path(d.file_name)) + async with self.engine.acquire() as conn: + query = file_meta_data.update().\ + where(and_(file_meta_data.c.node_id==d.node_id, + file_meta_data.c.user_id==d.user_id)).\ + values(project_name=d.project_name, + node_name = d.node_name, + raw_file_path=d.raw_file_path, + display_file_path=d.display_file_path) + await conn.execute(query) + clean_data.append(d) + + data = clean_data + + # same as above, make sure file is physically present on s3 + clean_data = [] + # MaG: This is inefficient: Do this automatically when file is modified + _loop = asyncio.get_event_loop() + session = aiobotocore.get_session(loop=_loop) + async with session.create_client('s3', endpoint_url="http://"+self.s3_client.endpoint, aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key) as client: + responses = await asyncio.gather(*[client.list_objects_v2(Bucket=d.bucket_name, Prefix=_d) for _d in [__d.object_name for __d in data]]) + for d, resp in zip(data, responses): + if 'Contents' in resp: + clean_data.append(d) + d.file_size = resp['Contents'][0]['Size'] + d.last_modified = str(resp['Contents'][0]['LastModified']) + async with self.engine.acquire() as conn: + query = file_meta_data.update().\ + where(and_(file_meta_data.c.node_id==d.node_id, + file_meta_data.c.user_id==d.user_id)).\ + values(file_size=d.file_size, + last_modified=d.last_modified) + await conn.execute(query) + data = clean_data + elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) - dcw = DatcoreWrapper(api_token, api_secret, self.python27_exec, self.loop, self.pool) - data = await dcw.list_files() + dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) + data = await dcw.list_files_recursively() if sortby: data = sorted(data, key=itemgetter(sortby)) @@ -226,8 +300,8 @@ async def list_file(self, user_id: str, location: str, file_uuid: str) -> FileMe return d elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) - _dcw = DatcoreWrapper(api_token, api_secret, self.python27_exec, self.loop, self.pool) - data = await _dcw.list_files + _dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) + data = [] #await _dcw.list_file(file_uuid) return data async def delete_file(self, user_id: str, location: str, file_uuid: str): @@ -255,16 +329,15 @@ async def delete_file(self, user_id: str, location: str, file_uuid: str): elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) - dcw = DatcoreWrapper(api_token, api_secret, self.python27_exec, self.loop, self.pool) - dataset, filename = _parse_datcore(file_uuid) -# return await dcw.delete_file(dataset=dataset, filename=filename) - return await dcw.delete_file(dataset, filename) + dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) + destination, filename = _parse_datcore(file_uuid) + return await dcw.delete_file(destination, filename) - async def upload_file_to_datcore(self, user_id: str, local_file_path: str, datcore_bucket: str, fmd: FileMetaData = None): # pylint: disable=W0613 + async def upload_file_to_datcore(self, user_id: str, local_file_path: str, destination: str, fmd: FileMetaData = None): # pylint: disable=W0613 # uploads a locally available file to dat core given the storage path, optionally attached some meta data api_token, api_secret = self._get_datcore_tokens(user_id) - dcw = DatcoreWrapper(api_token, api_secret, self.python27_exec, self.loop, self.pool) - await dcw.upload_file(datcore_bucket, local_file_path, fmd) + dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) + await dcw.upload_file(destination, local_file_path, fmd) # actually we have to query the master db async def upload_link(self, user_id: str, file_uuid: str): @@ -289,10 +362,9 @@ async def copy_file(self, user_id: str, dest_location: str, dest_uuid: str, sour # source is s3, get link and copy to datcore bucket_name = self.simcore_bucket_name object_name = source_uuid - datcore_bucket, file_path = _parse_datcore(dest_uuid) - filename = file_path.split("/")[-1] + destination, filename = _parse_datcore(dest_uuid) tmp_dirpath = tempfile.mkdtemp() - local_file_path = os.path.join(tmp_dirpath,filename) + local_file_path = os.path.join(tmp_dirpath, filename) url = self.s3_client.create_presigned_get_url(bucket_name, object_name) async with aiohttp.ClientSession() as session: async with session.get(url) as resp: @@ -302,7 +374,7 @@ async def copy_file(self, user_id: str, dest_location: str, dest_uuid: str, sour await f.close() # and then upload await self.upload_file_to_datcore(user_id=user_id, local_file_path=local_file_path, - datcore_bucket=datcore_bucket) + destination=destination) shutil.rmtree(tmp_dirpath) elif dest_location == SIMCORE_S3_STR: # source is s3, location is s3 @@ -350,7 +422,7 @@ async def download_link(self, user_id: str, location: str, file_uuid: str)->str: link = self.s3_client.create_presigned_get_url(bucket_name, object_name) elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) - dcw = DatcoreWrapper(api_token, api_secret, self.python27_exec, self.loop, self.pool) - dataset, filename = _parse_datcore(file_uuid) - link = await dcw.download_link(dataset, filename) + dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) + destination, filename = _parse_datcore(file_uuid) + link = await dcw.download_link(destination, filename) return link diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index c3abcd7fbea..2c6f8b2b9bf 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -1,12 +1,17 @@ """ Database models """ +import datetime +import uuid +from pathlib import Path from typing import Tuple import attr -import sqlalchemy as sa -from .settings import DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR +from simcore_postgres_database.storage_models import (file_meta_data, metadata, + projects, tokens, user_to_projects, users) +from simcore_service_storage.settings import (DATCORE_STR, SIMCORE_S3_ID, + SIMCORE_S3_STR) #FIXME: W0611:Unused UUID imported from sqlalchemy.dialects.postgresql #from sqlalchemy.dialects.postgresql import UUID @@ -14,40 +19,15 @@ #FIXME: R0902: Too many instance attributes (11/7) (too-many-instance-attributes) #pylint: disable=R0902 -metadata = sa.MetaData() - -# File meta data -file_meta_data = sa.Table( - "file_meta_data", metadata, - sa.Column("file_uuid", sa.String, primary_key=True), - sa.Column("location_id", sa.String), - sa.Column("location", sa.String), - sa.Column("bucket_name", sa.String), - sa.Column("object_name", sa.String), - sa.Column("project_id", sa.String), - sa.Column("project_name", sa.String), - sa.Column("node_id", sa.String), - sa.Column("node_name", sa.String), - sa.Column("file_name", sa.String), - sa.Column("user_id", sa.String), - sa.Column("user_name", sa.String) -# sa.Column("state", sa.String()) -) - def _parse_datcore(file_uuid: str) -> Tuple[str, str]: - # we should have 12/123123123/111.txt - - object_name = "invalid" - dataset_name = "invalid" - - parts = file_uuid.split("/") + # we should have 12/123123123/111.txt and return (12/123123123, 111.txt) - if len(parts) > 1: - dataset_name = parts[0] - object_name = "/".join(parts[1:]) + file_path = Path(file_uuid) + destination = file_path.parent + file_name = file_path.name - return dataset_name, object_name + return destination, file_name def _locations(): # TODO: so far this is hardcoded @@ -81,7 +61,6 @@ def _location_from_str(location : str) ->str: return intstr -@attr.s(auto_attribs=True) class FileMetaData: """ This is a proposal, probably no everything is needed. It is actually an overkill @@ -102,23 +81,32 @@ class FileMetaData: bucket_name/project_id/node_id/file_name = /bucket_name/object_name + file_id : unique uuid for the file + + simcore.s3: uuid created upon insertion + datcore: datcore uuid + + raw_file_path : raw path to file + simcore.s3: proj_id/node_id/filename.ending + emailaddress/... + datcore: dataset/collection/filename.ending + + display_file_path: human readlable path to file + + simcore.s3: proj_name/node_name/filename.ending + my_documents/... + datcore: dataset/collection/filename.ending + + created_at : time stamp + last_modified : time stamp + file_size : size in bytes + + TODO: state: on of OK, UPLOADING, DELETED """ - file_uuid: str="" - location_id: str="" - location: str="" - bucket_name: str="" - object_name: str="" - project_id: str="" - project_name: str="" - node_id: str="" - node_name: str="" - file_name: str="" - user_id: str="" - user_name: str="" - + #pylint: disable=attribute-defined-outside-init def simcore_from_uuid(self, file_uuid: str, bucket_name: str): parts = file_uuid.split("/") assert len(parts) == 3 @@ -131,3 +119,33 @@ def simcore_from_uuid(self, file_uuid: str, bucket_name: str): self.project_id = parts[0] self.node_id = parts[1] self.file_uuid = file_uuid + self.file_id = str(uuid.uuid4()) + self.raw_file_path = self.file_uuid + self.display_file_path = str(Path("not") / Path("yet") / Path("implemented")) + self.created_at = str(datetime.datetime.now()) + self.last_modified = self.created_at + self.file_size = -1 + + def __str__(self): + d = attr.asdict(self) + _str ="" + for _d in d: + _str += " {0: <25}: {1}\n".format(_d, str(d[_d])) + return _str + + +attr.s( + these={c.name:attr.ib(default=None) for c in file_meta_data.c}, + init=True, + kw_only=True)(FileMetaData) + + +__all__ = [ + "file_meta_data", + "tokens", + "metadata", + "FileMetaData", + "projects", + "users", + "user_to_projects" +] diff --git a/services/storage/tests/_test_rawdatcore.py b/services/storage/tests/_test_rawdatcore.py new file mode 100644 index 00000000000..c4e4930319d --- /dev/null +++ b/services/storage/tests/_test_rawdatcore.py @@ -0,0 +1,83 @@ +import os +import typing +from pathlib import Path + +from blackfynn import Blackfynn +from blackfynn.models import Collection + +import utils +from simcore_service_storage.datcore import DatcoreClient +from simcore_service_storage.models import FileMetaData + +import tempfile + +dir_path = os.path.dirname(os.path.realpath(__file__)) +api_token = os.environ.get("BF_API_KEY") +api_secret = os.environ.get("BF_API_SECRET") + + +if utils.has_datcore_tokens(): + client = DatcoreClient(api_token=api_token, api_secret=api_secret) + api_secret = os.environ.get("BF_API_SECRET", "none") + destination = str(Path("MaG/level1/level2")) + fd, path = tempfile.mkstemp() + + try: + with os.fdopen(fd, 'w') as tmp: + # do stuff with temp file + tmp.write('stuff') + + f = client.upload_file(destination, path) + f = client.delete_file(destination,Path(path).name) + finally: + os.remove(path) + + aa + files = [] + if True: + dataset = client.get_dataset("mag") + # dataset.print_tree() + client.list_dataset_files_recursively(files, dataset, Path(dataset.name)) + else: + files = client.list_files_recursively() + + fd, path = tempfile.mkstemp() + + try: + with os.fdopen(fd, 'w') as tmp: + # do stuff with temp file + tmp.write('stuff') + + + print(fd,path) + destination_path = Path("mag/level1/level2/bla.txt") + parts = destination_path.parts + assert len(parts) > 1 + dataset_name = parts[0] + object_path = Path(*parts[1:]) + file_name = object_path.name + collections = list(object_path.parent.parts) + destination = client.get_dataset(dataset_name) + + # check if dataset exists + def _get_collection_id(folder, _collections, collection_id): + if not len(_collections): + return collection_id + + current = _collections.pop(0) + for item in folder: + if isinstance(item, Collection) and item.name == current: + collection_id = item.id + folder = item + break + + return _get_collection_id(folder, _collections, collection_id) + + my_id = "" + my_id =_get_collection_id(destination, collections, my_id) + package = client.client.get(my_id) + client.upload_file(package, path) + print(my_id) + + finally: + os.remove(path) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index e64bcc3afa9..49966ead860 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -6,6 +6,7 @@ # pylint:disable=unsupported-assignment-operation import asyncio +import datetime import os import subprocess import sys @@ -25,8 +26,8 @@ from simcore_service_storage.models import FileMetaData from simcore_service_storage.settings import (DATCORE_ID, DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR) -from utils import ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER_ID, USER - +from utils import (ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, + USER_ID) @pytest.fixture(scope='session') @@ -54,17 +55,6 @@ def osparc_api_specs_dir(osparc_simcore_root_dir): assert dirpath.exists() return dirpath -@pytest.fixture(scope='session') -def python27_exec(osparc_simcore_root_dir, tmpdir_factory, here): - python27_exec = "python3" - return python27_exec - - -@pytest.fixture(scope='session') -def python27_path(python27_exec): - return Path(python27_exec).parent.parent - # Assumes already created with make .venv27 - @pytest.fixture(scope='session') def docker_compose_file(here): @@ -189,6 +179,22 @@ def _create_files(count): return filepaths return _create_files +@pytest.fixture(scope="function") +def dsm_mockup_complete_db(postgres_service_url, s3_client): + utils.create_full_tables(url=postgres_service_url) + bucket_name = BUCKET_NAME + s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) + + f = utils.data_dir() /Path("outputController.dat") + object_name = "161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat" + s3_client.upload_file(bucket_name, object_name, f) + + f = utils.data_dir() /Path("notebooks.zip") + object_name = "161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip" + s3_client.upload_file(bucket_name, object_name, f) + yield + utils.drop_all_tables(url=postgres_service_url) + @pytest.fixture(scope="function") def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): @@ -226,7 +232,10 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): object_name = Path(str(project_id), str( node_id), str(counter)).as_posix() file_uuid = Path(object_name).as_posix() - + raw_file_path = file_uuid + display_file_path = str(Path(project_name)/Path(node)/Path(file_name)) + created_at = str(datetime.datetime.now()) + file_size = 1234 assert s3_client.upload_file(bucket_name, object_name, _file) d = {'file_uuid': file_uuid, @@ -240,7 +249,13 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): 'node_name': node, 'file_name': file_name, 'user_id': str(user_id), - 'user_name': user_name + 'user_name': user_name, + "file_id": str(uuid.uuid4()), + "raw_file_path": file_uuid, + "display_file_path": display_file_path, + "created_at": created_at, + "last_modified": created_at, + "file_size": file_size, } counter = counter + 1 @@ -251,6 +266,7 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): utils.insert_metadata(postgres_service_url, data[object_name]) + total_count = 0 for _obj in s3_client.list_objects_v2(bucket_name, recursive=True): total_count = total_count + 1 @@ -266,41 +282,33 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): @pytest.fixture(scope="function") -async def datcore_testbucket(loop, python27_exec, mock_files_factory): +async def datcore_testbucket(loop, mock_files_factory): # TODO: what if I do not have an app to the the config from? api_token = os.environ.get("BF_API_KEY") api_secret = os.environ.get("BF_API_SECRET") + if api_secret is None: yield "no_bucket" return pool = ThreadPoolExecutor(2) - dcw = DatcoreWrapper(api_token, api_secret, python27_exec, loop, pool) + dcw = DatcoreWrapper(api_token, api_secret, loop, pool) await dcw.create_test_dataset(BUCKET_NAME) - tmp_files = mock_files_factory(2) for f in tmp_files: await dcw.upload_file(BUCKET_NAME, os.path.normpath(f)) - ready = False - counter = 0 - while not ready and counter < 5: - data = await dcw.list_files() - ready = len(data) == 2 - await asyncio.sleep(10) - counter = counter + 1 - yield BUCKET_NAME await dcw.delete_test_dataset(BUCKET_NAME) @pytest.fixture(scope="function") -def dsm_fixture(s3_client, python27_exec, postgres_engine, loop): +def dsm_fixture(s3_client, postgres_engine, loop): pool = ThreadPoolExecutor(3) dsm_fixture = DataStorageManager( - s3_client, python27_exec, postgres_engine, loop, pool, BUCKET_NAME) + s3_client, postgres_engine, loop, pool, BUCKET_NAME, False) api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") diff --git a/services/storage/tests/data/file_meta_data.csv b/services/storage/tests/data/file_meta_data.csv new file mode 100644 index 00000000000..e9a8fb3f582 --- /dev/null +++ b/services/storage/tests/data/file_meta_data.csv @@ -0,0 +1,3 @@ +file_uuid,location_id,location,bucket_name,object_name,project_id,project_name,node_id,node_name,file_name,user_id,user_name +161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,0,simcore.s3,simcore-testing,161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,161b8782-b13e-5840-9ae2-e2250c231001,"",ad9bda7f-1dc5-5480-ab22-5fef4fc53eac,"",outputController.dat,21,"" +161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,0,simcore.s3,simcore-testing,161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,161b8782-b13e-5840-9ae2-e2250c231001,"",a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8,"",notebooks.zip,21,"" diff --git a/services/storage/tests/data/notebooks.zip b/services/storage/tests/data/notebooks.zip new file mode 100644 index 00000000000..94ebaf90016 --- /dev/null +++ b/services/storage/tests/data/notebooks.zip @@ -0,0 +1,4 @@ +1 +2 +3 +4 diff --git a/services/storage/tests/data/outputController.dat b/services/storage/tests/data/outputController.dat new file mode 100644 index 00000000000..94ebaf90016 --- /dev/null +++ b/services/storage/tests/data/outputController.dat @@ -0,0 +1,4 @@ +1 +2 +3 +4 diff --git a/services/storage/tests/data/projects.csv b/services/storage/tests/data/projects.csv new file mode 100644 index 00000000000..c233e10d2a3 --- /dev/null +++ b/services/storage/tests/data/projects.csv @@ -0,0 +1,3 @@ +id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published +151,STANDARD,161b8782-b13e-5840-9ae2-e2250c231001,Kember use case,Kember Cordiac Model with PostPro Viewer,"",devops@itis.swiss,2019-06-27 11:42:03.168,2019-06-27 11:43:49.128,"{""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"": {""key"": ""simcore/services/comp/kember-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Kember cardiac model"", ""inputs"": {""dt"": 0.01, ""T"": 1000, ""forcing_factor"": 0}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 100}}, ""a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8"": {""key"": ""simcore/services/dynamic/kember-viewer"", ""version"": ""2.9.0"", ""label"": ""kember-viewer"", ""inputs"": {""outputController"": {""nodeUuid"": ""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"", ""output"": ""out_1""}}, ""inputNodes"": [""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac""], ""outputNode"": false, ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 100}}}",False +150,STANDARD,69f16620-eeb3-520a-a3b8-08d78e36c70d,"UCDavis use cases: 1D, 2D",Colleen Clancy 1D and 2D use cases with a file picker and PostPro viewers,"",devops@itis.swiss,2019-06-27 11:36:29.605,2019-06-27 11:41:57.861,"{""78b56cc9-020b-5d89-bbf2-75922360d255"": {""key"": ""simcore/services/frontend/file-picker"", ""version"": ""1.0.0"", ""label"": ""File Picker 1&2 D"", ""inputs"": {}, ""inputNodes"": [], ""outputNode"": false, ""outputs"": {""outFile"": {""store"": 1, ""path"": ""Shared Data/initial_WTstates""}}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 177, ""y"": 282}}, ""eb9793d9-c80b-5b67-a41c-66a09bb2ee8f"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 1D"", ""inputs"": {}, ""inputNodes"": [""78b56cc9-020b-5d89-bbf2-75922360d255""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 477, ""y"": 145}}, ""8a0fbea5-0a2a-58e5-b27c-8d41d51726fb"": {""key"": ""simcore/services/frontend/nodes-group"", ""version"": ""1.0.0"", ""label"": ""CC 2D"", ""inputs"": {}, ""inputNodes"": [""78b56cc9-020b-5d89-bbf2-75922360d255"", ""eb9793d9-c80b-5b67-a41c-66a09bb2ee8f""], ""outputNode"": false, ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 645, ""y"": 370}}, ""83761fa1-4572-5660-b069-9f6f3ddda3d9"": {""key"": ""simcore/services/comp/ucdavis-1d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-1-D solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 10, ""NBeats"": 1, ""Ligand"": 0, ""cAMKII"": ""WT"", ""tw"": 5, ""tl"": 200, ""homogeneity"": ""heterogeneous"", ""in_10"": {""nodeUuid"": ""78b56cc9-020b-5d89-bbf2-75922360d255"", ""output"": ""outFile""}}, ""inputNodes"": [""78b56cc9-020b-5d89-bbf2-75922360d255""], ""outputNode"": true, ""outputs"": {}, ""parent"": ""eb9793d9-c80b-5b67-a41c-66a09bb2ee8f"", ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 350}}, ""e7e903ad-d6ad-522f-90de-8e4098bdf1e6"": {""key"": ""simcore/services/dynamic/cc-1d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-1d-viewer"", ""inputs"": {""ECGs"": {""nodeUuid"": ""83761fa1-4572-5660-b069-9f6f3ddda3d9"", ""output"": ""out_1""}, ""y_1D"": {""nodeUuid"": ""83761fa1-4572-5660-b069-9f6f3ddda3d9"", ""output"": ""out_3""}}, ""inputNodes"": [""83761fa1-4572-5660-b069-9f6f3ddda3d9""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""eb9793d9-c80b-5b67-a41c-66a09bb2ee8f"", ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""8b632db9-d1ca-57b2-aed1-9cbe35fece93"": {""key"": ""simcore/services/comp/ucdavis-2d-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""DBP-Clancy-Rabbit-2-D solver"", ""inputs"": {""Na"": 0, ""Kr"": 0, ""BCL"": 10, ""Ligand"": 0, ""cAMKII"": ""WT"", ""tw"": 5, ""tl"": 200, ""homogeneity"": ""heterogeneous""}, ""inputNodes"": [""78b56cc9-020b-5d89-bbf2-75922360d255"", ""eb9793d9-c80b-5b67-a41c-66a09bb2ee8f""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""8a0fbea5-0a2a-58e5-b27c-8d41d51726fb"", ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 100, ""y"": 350}}, ""dcbcb225-f79a-5606-a4cd-fe0e327c7bc0"": {""key"": ""simcore/services/dynamic/cc-2d-viewer"", ""version"": ""2.8.0"", ""label"": ""cc-2d-viewer"", ""inputs"": {""ap"": {""nodeUuid"": ""8b632db9-d1ca-57b2-aed1-9cbe35fece93"", ""output"": ""out_1""}}, ""inputNodes"": [""8b632db9-d1ca-57b2-aed1-9cbe35fece93""], ""outputNode"": false, ""outputs"": {}, ""parent"": ""8a0fbea5-0a2a-58e5-b27c-8d41d51726fb"", ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}}",False diff --git a/services/storage/tests/data/user_to_projects.csv b/services/storage/tests/data/user_to_projects.csv new file mode 100644 index 00000000000..1e2821bfb4b --- /dev/null +++ b/services/storage/tests/data/user_to_projects.csv @@ -0,0 +1,3 @@ +id,user_id,project_id +121,21,150 +122,21,151 diff --git a/services/storage/tests/data/users.csv b/services/storage/tests/data/users.csv new file mode 100644 index 00000000000..58476f95d52 --- /dev/null +++ b/services/storage/tests/data/users.csv @@ -0,0 +1,2 @@ +id,name,email,password_hash,status,role,created_at,created_ip +21,devops,devops@itis.swiss,$5$rounds=1000$jjUWjHSG5F2dMKw.$9VRlE4YLl4bPfIrWkDz/8GtEx1XkzTpuZzyc/uiBFE4,ACTIVE,USER,2019-06-27 11:35:44.828696,172.16.8.64 diff --git a/services/storage/tests/test_configs.py b/services/storage/tests/test_configs.py index 5ac4bbc674d..e22bcc3e385 100644 --- a/services/storage/tests/test_configs.py +++ b/services/storage/tests/test_configs.py @@ -54,7 +54,6 @@ def container_environ(services_docker_compose_file, devel_environ, osparc_simcor container_environ = create_environ(skip_system_environ=True) container_environ.update({ - 'VENV2': '/home/scu/.venv27/', # defined in Dockerfile 'OSPARC_SIMCORE_REPO_ROOTDIR':str(osparc_simcore_root_dir) }) diff --git a/services/storage/tests/test_datcore.py b/services/storage/tests/test_datcore.py index 1dc7d51016f..946755a196d 100644 --- a/services/storage/tests/test_datcore.py +++ b/services/storage/tests/test_datcore.py @@ -7,29 +7,45 @@ from concurrent.futures import ThreadPoolExecutor import pytest -import utils +import utils from simcore_service_storage.datcore_wrapper import DatcoreWrapper +from pathlib import Path -async def test_datcore_list_files(loop, python27_exec): +async def test_datcore_ping(loop): if not utils.has_datcore_tokens(): return api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") pool = ThreadPoolExecutor(2) - dcw = DatcoreWrapper(api_token, api_secret, python27_exec, loop, pool) - f = await dcw.list_files() - print(f) + dcw = DatcoreWrapper(api_token, api_secret, loop, pool) + responsive = await dcw.ping() + assert responsive -async def test_datcore_ping(loop, python27_exec): +async def test_datcore_list_files_recursively(loop): if not utils.has_datcore_tokens(): return api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") pool = ThreadPoolExecutor(2) - dcw = DatcoreWrapper(api_token, api_secret, python27_exec, loop, pool) - f = await dcw.ping() - print(f) + dcw = DatcoreWrapper(api_token, api_secret, loop, pool) + f = await dcw.list_files_recursively() + assert len(f) + + +async def test_datcore_nested_download_link(loop): + if not utils.has_datcore_tokens(): + return + + api_token = os.environ.get("BF_API_KEY", "none") + api_secret = os.environ.get("BF_API_SECRET", "none") + pool = ThreadPoolExecutor(2) + dcw = DatcoreWrapper(api_token, api_secret, loop, pool) + destination = str(Path("Shared Data/ISAN/UCDavis use case 0D/inputs/")) + filename = "initial_WTstates.txt" + + f = await dcw.download_link(destination, filename) + assert(f) diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index 37ac590dced..19dc6372ba4 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -5,6 +5,7 @@ # pylint:disable=redefined-outer-name # pylint: disable=too-many-arguments +import datetime import filecmp import io import json @@ -22,7 +23,7 @@ from simcore_service_storage.models import FileMetaData from simcore_service_storage.settings import (DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR) -from utils import BUCKET_NAME, has_datcore_tokens, USER_ID +from utils import BUCKET_NAME, USER_ID, has_datcore_tokens def test_mockup(dsm_mockup_db): @@ -95,6 +96,7 @@ async def test_dsm_s3(dsm_mockup_db, dsm_fixture): assert len(dsm_mockup_db) == new_size + len(bobs_biostromy_files) assert len(dsm_mockup_db) == new_size + len(bobs_biostromy_files) + def _create_file_meta_for_s3(postgres_url, s3_client, tmp_file): utils.create_tables(url=postgres_url) bucket_name = BUCKET_NAME @@ -104,9 +106,14 @@ def _create_file_meta_for_s3(postgres_url, s3_client, tmp_file): # create file and upload filename = os.path.basename(tmp_file) project_id = "22" + project_name = "battlestar" + node_name = "galactica" node_id = "1006" file_name = filename file_uuid = os.path.join(str(project_id), str(node_id), str(file_name)) + display_name = os.path.join(str(project_name), str(node_name), str(file_name)) + created_at = str(datetime.datetime.now()) + file_size = 1234 d = { 'object_name' : os.path.join(str(project_id), str(node_id), str(file_name)), 'bucket_name' : bucket_name, @@ -114,11 +121,18 @@ def _create_file_meta_for_s3(postgres_url, s3_client, tmp_file): 'user_id' : USER_ID, 'user_name' : "starbucks", 'location' : SIMCORE_S3_STR, + 'location_id' : SIMCORE_S3_ID, 'project_id' : project_id, - 'project_name' : "battlestar", + 'project_name' : project_name, 'node_id' : node_id, - 'node_name' : "this is the name of the node", - 'file_uuid' : file_uuid + 'node_name' : node_name, + 'file_uuid' : file_uuid, + 'file_id' : str(uuid.uuid4()), + 'raw_file_path' : file_uuid, + 'display_file_path' : display_name, + 'created_at' : created_at, + 'last_modified' : created_at, + 'file_size' : file_size } fmd = FileMetaData(**d) @@ -228,7 +242,7 @@ async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_fac urllib.request.urlretrieve(down_url, tmp_file2) assert filecmp.cmp(tmp_file2, tmp_file) # now we have the file locally, upload the file - await dsm.upload_file_to_datcore(user_id, tmp_file2, datcore_testbucket, fmd) + await dsm.upload_file_to_datcore(user_id=user_id, local_file_path=tmp_file2, destination=datcore_testbucket, fmd=fmd) data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) @@ -336,3 +350,17 @@ def test_fmd_build(): assert fmd.location == SIMCORE_S3_STR assert fmd.location_id == SIMCORE_S3_ID assert fmd.bucket_name == "test-bucket" + + +async def test_dsm_complete_db(dsm_fixture, dsm_mockup_complete_db): + dsm = dsm_fixture + _id = "21" + dsm.has_project_db = True + data = await dsm.list_files(user_id=_id, location=SIMCORE_S3_STR) + + assert len(data) == 2 + for d in data: + assert d.display_file_path + assert d.node_name + assert d.project_name + assert d.raw_file_path diff --git a/services/storage/tests/test_rest.py b/services/storage/tests/test_rest.py index b8485646898..8b058d2d389 100644 --- a/services/storage/tests/test_rest.py +++ b/services/storage/tests/test_rest.py @@ -28,7 +28,7 @@ def parse_db(dsm_mockup_db): return id_file_count, id_name_map @pytest.fixture -def client(loop, aiohttp_unused_port, aiohttp_client, python27_path, postgres_service, minio_service, osparc_api_specs_dir): +def client(loop, aiohttp_unused_port, aiohttp_client, postgres_service, minio_service, osparc_api_specs_dir): app = web.Application() api_token = os.environ.get("BF_API_KEY", "none") @@ -37,8 +37,8 @@ def client(loop, aiohttp_unused_port, aiohttp_client, python27_path, postgres_se main_cfg = { 'port': aiohttp_unused_port(), 'host': 'localhost', - 'python2': python27_path, "max_workers" : 4, + "testing" : True, "test_datcore" : { 'api_token' : api_token, 'api_secret' : api_secret} } rest_cfg = { diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index 1196f00a089..8b71c4957e4 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -1,11 +1,15 @@ import logging import os +from pathlib import Path +import pandas as pd import pytest import requests import sqlalchemy as sa +import sys -from simcore_service_storage.models import FileMetaData, file_meta_data +from simcore_service_storage.models import (FileMetaData, file_meta_data, + projects, user_to_projects, users) log = logging.getLogger(__name__) @@ -20,6 +24,12 @@ BUCKET_NAME ="simcore-testing" USER_ID = '0' +def current_dir(): + return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + +def data_dir(): + return current_dir() / Path("data") + def has_datcore_tokens()->bool: token = os.environ.get("BF_API_KEY", "none") if token == "none": @@ -79,8 +89,63 @@ def insert_metadata(url: str, fmd: FileMetaData): node_name = fmd.node_name, file_name = fmd.file_name, user_id = fmd.user_id, - user_name= fmd.user_name) + user_name= fmd.user_name, + file_id = fmd.file_id, + raw_file_path = fmd.raw_file_path, + display_file_path = fmd.display_file_path, + created_at = fmd.created_at, + last_modified = fmd.last_modified, + file_size = fmd.file_size) + engine = sa.create_engine(url) conn = engine.connect() conn.execute(ins) + +def create_full_tables(url): + meta = sa.MetaData() + engine = sa.create_engine(url) + + meta.drop_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users]) + meta.create_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users]) + + for t in ["file_meta_data", "projects", "users", "user_to_projects"]: + filename = t + ".csv" + csv_file = str(data_dir() / Path(filename)) + with open(csv_file, 'r') as file: + data_df = pd.read_csv(file) + data_df.to_sql(t, con=engine, index=False, index_label="id", if_exists='append') + + # Leave here as a reference + # import psycopg2 + # conn = psycopg2.connect(url) + # cur = conn.cursor() + # columns = [["file_uuid","location_id","location","bucket_name","object_name","project_id","project_name","node_id","node_name","file_name","user_id","user_name"],[],[],[]] + # if False: + # import pdb; pdb.set_trace() + # for t in ["file_meta_data", "projects", "users", "user_to_projects"]: + # filename = t + ".sql" + # sqlfile = str(data_dir() / Path(filename)) + # cur.execute(open(sqlfile, "r").read()) + # else: + # for t in ["file_meta_data", "projects", "users", "user_to_projects"]: + # filename = t + ".csv" + # csv_file = str(data_dir() / Path(filename)) + # if False: + # with open(csv_file, 'r') as file: + # next(file) + # if t == "file_meta_data": + # cur.copy_from(file, t, sep=',', columns=columns[0]) + # else: + # cur.copy_from(file, t, sep=',') + # conn.commit() + # else: + # with open(csv_file, 'r') as file: + # data_df = pd.read_csv(file) + # data_df.to_sql(t, con=engine, index=False, index_label="id", if_exists='append') + +def drop_all_tables(url): + meta = sa.MetaData() + engine = sa.create_engine(url) + + meta.drop_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users]) diff --git a/services/web/client/compile.json b/services/web/client/compile.json index 0fa76ee68e9..0ada6b85cb9 100644 --- a/services/web/client/compile.json +++ b/services/web/client/compile.json @@ -73,6 +73,12 @@ "add-css": [ { "uri": "resource/jsontreeviewer/jsonTree.css" + }, + { + "uri": "resource/hint/hint.css" + }, + { + "uri": "resource/common/common.css" } ], "add-script": [ diff --git a/services/web/client/source/class/qxapp/Application.js b/services/web/client/source/class/qxapp/Application.js index 1c875b3521d..ba5e1ee936d 100644 --- a/services/web/client/source/class/qxapp/Application.js +++ b/services/web/client/source/class/qxapp/Application.js @@ -21,6 +21,7 @@ * This is the main application class of "qxapp" * * @asset(qxapp/*) + * @asset(common/common.css) */ qx.Class.define("qxapp.Application", { @@ -68,6 +69,7 @@ qx.Class.define("qxapp.Application", { }, this); this.__initRouting(); + this.__loadCommonCss(); }, __initRouting: function() { @@ -200,6 +202,11 @@ qx.Class.define("qxapp.Application", { "transition: background-color 0s linear 100000s, color 0s linear 100000s" ); } + }, + + __loadCommonCss: function() { + const commonCssUri = qx.util.ResourceManager.getInstance().toUri("common/common.css"); + qx.module.Css.includeStylesheet(commonCssUri); } } }); diff --git a/services/web/client/source/class/qxapp/auth/LoginPage.js b/services/web/client/source/class/qxapp/auth/LoginPage.js index 41a6299d8ee..7e5139fb301 100644 --- a/services/web/client/source/class/qxapp/auth/LoginPage.js +++ b/services/web/client/source/class/qxapp/auth/LoginPage.js @@ -32,7 +32,7 @@ qx.Class.define("qxapp.auth.LoginPage", { this.base(arguments); // Layout guarantees it gets centered in parent's page - let layout = new qx.ui.layout.Grid(); + const layout = new qx.ui.layout.Grid(); layout.setRowFlex(0, 1); layout.setColumnFlex(0, 1); this._setLayout(layout); diff --git a/services/web/client/source/class/qxapp/component/filter/TextFilter.js b/services/web/client/source/class/qxapp/component/filter/TextFilter.js index 1a91c0152f2..08e817f947e 100644 --- a/services/web/client/source/class/qxapp/component/filter/TextFilter.js +++ b/services/web/client/source/class/qxapp/component/filter/TextFilter.js @@ -49,6 +49,9 @@ qx.Class.define("qxapp.component.filter.TextFilter", { members: { __textField: null, + /** + * Function that resets the field and dispatches the update. + */ reset: function() { this.__textField.resetValue(); this.__textField.fireDataEvent("input", ""); diff --git a/services/web/client/source/class/qxapp/component/form/Auto.js b/services/web/client/source/class/qxapp/component/form/Auto.js index d3fe5965545..e89709dda4d 100644 --- a/services/web/client/source/class/qxapp/component/form/Auto.js +++ b/services/web/client/source/class/qxapp/component/form/Auto.js @@ -573,6 +573,7 @@ qx.Class.define("qxapp.component.form.Auto", { control.set(s.set); } control.key = key; + control.description = s.description; this.__ctrlMap[key] = control; let controlLink = new qx.ui.form.TextField().set({ diff --git a/services/web/client/source/class/qxapp/component/form/FieldWHint.js b/services/web/client/source/class/qxapp/component/form/FieldWHint.js new file mode 100644 index 00000000000..ff369a5dd12 --- /dev/null +++ b/services/web/client/source/class/qxapp/component/form/FieldWHint.js @@ -0,0 +1,94 @@ +/* ************************************************************************ + + qxapp - the simcore frontend + + https://osparc.io + + Copyright: + 2019 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Ignacio Pascual (ignapas) + +************************************************************************ */ + +/** + * @asset(hint/hint.css) + */ + +qx.Class.define("qxapp.component.form.FieldWHint", { + extend: qx.ui.core.Widget, + + /** + * Text field with a hint tooltip + * + * @extends qx.ui.core.Widget + */ + construct: function(value, hint, field) { + this.base(arguments); + this._setLayout(new qx.ui.layout.Canvas()); + + const hintCssUri = qx.util.ResourceManager.getInstance().toUri("hint/hint.css"); + qx.module.Css.includeStylesheet(hintCssUri); + + this.__field = field || new qx.ui.form.TextField(); + if (value) { + this.__field.setValue(value); + } + this.getContentElement().addClass("hint-input"); + this.__field.getContentElement().addClass("hint-field"); + this._add(this.__field, { + top: 0, + right: 0, + bottom: 0, + left: 0 + }); + + if (hint) { + this.__hintText = hint; + } + this.__infoButton = this.getChildControl("infobutton"); + + this.__attachEventHandlers(); + }, + + members: { + __field: null, + __hint: null, + __hintText: null, + __infoButton: null, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "infobutton": + control = new qxapp.component.form.IconButton("@FontAwesome5Solid/info-circle/14"); + control.getContentElement().addClass("hint-button"); + this._add(control, { + right: 0, + bottom: 5 + }); + break; + } + return control || this.base(arguments, id); + }, + + __attachEventHandlers: function() { + if (this.__hintText) { + this.addListener("mouseover", () => this.__field.setPaddingRight(18), this); + this.addListener("mouseout", () => this.__field.resetPaddingRight(), this); + this.__infoButton.addListener("mouseover", () => this.__hint = new qxapp.ui.hint.Hint(this.__infoButton, this.__hintText), this); + this.__infoButton.addListener("mouseout", () => this.__hint.destroy(), this); + + this.__field.bind("visibility", this, "visibility"); + } + }, + + getField: function() { + return this.__field; + } + } +}); diff --git a/services/web/client/source/class/qxapp/component/form/IconButton.js b/services/web/client/source/class/qxapp/component/form/IconButton.js index 9fd8e770058..85b3bd8c5d9 100644 --- a/services/web/client/source/class/qxapp/component/form/IconButton.js +++ b/services/web/client/source/class/qxapp/component/form/IconButton.js @@ -29,9 +29,11 @@ qx.Class.define("qxapp.component.form.IconButton", { * @param {function} cb Callback function to be executed on tap. * @param {object} context Execution context (this) of the callback function. */ - construct: function(icon, cb, context = null) { + construct: function(icon, cb, context) { this.base(arguments, icon); - this.addListener("tap", cb, context); + if (cb) { + this.addListener("tap", cb, context); + } this.setCursor("pointer"); } }); diff --git a/services/web/client/source/class/qxapp/component/form/renderer/PropForm.js b/services/web/client/source/class/qxapp/component/form/renderer/PropForm.js index b58b13505e2..e45635e2cf8 100644 --- a/services/web/client/source/class/qxapp/component/form/renderer/PropForm.js +++ b/services/web/client/source/class/qxapp/component/form/renderer/PropForm.js @@ -84,7 +84,7 @@ qx.Class.define("qxapp.component.form.renderer.PropForm", { column: 0 }); label.setBuddy(item); - this._add(item, { + this._add(new qxapp.component.form.FieldWHint(null, item.description, item), { row: this._row, column: 1 }); @@ -147,16 +147,14 @@ qx.Class.define("qxapp.component.form.renderer.PropForm", { let children = this._getChildren(); for (let i=0; i child.nodeId && this.__arePortsCompatible(output.nodeId, output.portId, child.nodeId, child.portId)); + return this._getChildren().filter(child => child.getField && this.__arePortsCompatible(output.nodeId, output.portId, child.getField().nodeId, child.getField().portId)); }, __highlightCompatibles: function(output) { const inputs = this.__getCompatibleInputs(output); for (let i in inputs) { - const input = inputs[i]; + const input = inputs[i].getField(); input.setDecorator("material-textfield-focused"); } }, __unhighlightAll: function() { - const inputs = this._getChildren().filter(child => child.nodeId); + const inputs = this._getChildren().filter(child => child.getField); for (let i in inputs) { const input = inputs[i]; - input.resetDecorator(); + input.getField().resetDecorator(); } }, diff --git a/services/web/client/source/class/qxapp/component/metadata/ServiceInfo.js b/services/web/client/source/class/qxapp/component/metadata/ServiceInfo.js new file mode 100644 index 00000000000..d1f41404e89 --- /dev/null +++ b/services/web/client/source/class/qxapp/component/metadata/ServiceInfo.js @@ -0,0 +1,97 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2019 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Ignacio Pascual (ignapas) + */ + +qx.Class.define("qxapp.component.metadata.ServiceInfo", { + extend: qx.ui.core.Widget, + construct: function(metadata) { + this.base(arguments); + + this.set({ + padding: 5, + backgroundColor: "background-main" + }); + this._setLayout(new qx.ui.layout.VBox(8)); + + this.__metadata = metadata; + + const main = new qx.ui.container.Composite(new qx.ui.layout.HBox(8)); + main.add(this.__createServiceThumbnail()); + main.add(this.__createMainInfo(), { + flex: 1 + }); + this._add(main); + + const authors = this.__createAuthors(); + this._add(authors); + + const rawMetadata = this.__createRawMetadata(); + const more = new qxapp.desktop.PanelView(this.tr("raw metadata"), rawMetadata).set({ + caretSize: 14 + }); + this._add(more, { + flex: 1 + }); + more.setCollapsed(true); + more.getChildControl("title").setFont("text-12"); + }, + + members: { + __service: null, + __metadata: null, + + __createMainInfo: function() { + const container = new qx.ui.container.Composite(new qx.ui.layout.VBox(8).set({ + alignY: "middle" + })); + + const title = new qx.ui.basic.Label(this.__metadata.name).set({ + font: "title-16", + rich: true + }); + container.add(title); + + const description = new qx.ui.basic.Label(this.__metadata.description).set({ + rich: true + }); + container.add(description); + + const author = new qx.ui.basic.Label(this.tr("Contact") + ": " + this.__metadata.contact + "").set({ + rich: true + }); + container.add(author); + + return container; + }, + + __createServiceThumbnail: function() { + return new qx.ui.basic.Image(this.__metadata.thumbnail || qxapp.utils.Utils.getThumbnailFromString(this.__metadata.key)).set({ + scale: true, + width: 200, + height: 120 + }); + }, + + __createRawMetadata: function() { + const container = new qx.ui.container.Scroll(); + container.add(new qxapp.component.widget.JsonTreeWidget(this.__metadata, "serviceDescriptionSettings")); + return container; + }, + + __createAuthors: function() { + const container = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + container.add(new qx.ui.basic.Label(this.tr("Authors")).set({ + font: "title-12" + })); + for (let i in this.__metadata.authors) { + const author = this.__metadata.authors[i]; + const authorLine = `${author.name} · ${author.affiliation} · ${author.email}`; + container.add(new qx.ui.basic.Label(authorLine)); + } + return container; + } + } +}); diff --git a/services/web/client/source/class/qxapp/component/metadata/ServiceInfoWindow.js b/services/web/client/source/class/qxapp/component/metadata/ServiceInfoWindow.js new file mode 100644 index 00000000000..97732712006 --- /dev/null +++ b/services/web/client/source/class/qxapp/component/metadata/ServiceInfoWindow.js @@ -0,0 +1,30 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2019 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Ignacio Pascual (ignapas) + */ + +qx.Class.define("qxapp.component.metadata.ServiceInfoWindow", { + extend: qx.ui.window.Window, + construct: function(metadata) { + this.base(arguments, this.tr("Service information") + " · " + metadata.name); + + this.set({ + layout: new qx.ui.layout.Grow(), + contentPadding: 0, + showMinimize: false, + resizable: false, + modal: true + }); + + this.add(new qxapp.component.metadata.ServiceInfo(metadata)); + }, + + properties: { + appearance: { + refine: true, + init: "info-service-window" + } + } +}); diff --git a/services/web/client/source/class/qxapp/component/metadata/StudyInfo.js b/services/web/client/source/class/qxapp/component/metadata/StudyInfo.js new file mode 100644 index 00000000000..de9d8442160 --- /dev/null +++ b/services/web/client/source/class/qxapp/component/metadata/StudyInfo.js @@ -0,0 +1,104 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2019 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Ignacio Pascual (ignapas) + */ + +qx.Class.define("qxapp.component.metadata.StudyInfo", { + extend: qx.ui.core.Widget, + construct: function(study) { + this.base(arguments); + + this.set({ + padding: 5, + backgroundColor: "background-main" + }); + this._setLayout(new qx.ui.layout.VBox(8)); + + this.__study = study; + + const main = new qx.ui.container.Composite(new qx.ui.layout.HBox(8)); + main.add(this.__createStudyThumbnail()); + main.add(this.__createMainInfo(), { + flex: 1 + }); + this._add(main); + + const extraInfo = this.__createExtraInfo(); + const more = new qxapp.desktop.PanelView(this.tr("more information"), extraInfo).set({ + caretSize: 14, + collapsed: true + }); + more.getChildControl("title").setFont("text-12"); + this._add(more); + }, + + members: { + __study: null, + + __createMainInfo: function() { + const container = new qx.ui.container.Composite(new qx.ui.layout.VBox(8).set({ + alignY: "middle" + })); + + const title = new qx.ui.basic.Label(this.__study.getName()).set({ + font: "title-16", + rich: true + }); + container.add(title); + + const description = new qx.ui.basic.Label(this.__study.getDescription()).set({ + rich: true + }); + container.add(description); + + const author = new qx.ui.basic.Label(this.tr("Owner") + ": " + this.__study.getPrjOwner() + "").set({ + rich: true + }); + container.add(author); + + return container; + }, + + __createStudyThumbnail: function() { + return new qx.ui.basic.Image(qxapp.utils.Utils.getThumbnailFromUuid(this.__study.getUuid())).set({ + scale: true, + width: 200, + height: 120 + }); + }, + + __createExtraInfo: function() { + const layout = new qx.ui.layout.Grid(8); + layout.setColumnAlign(0, "right", "middle"); + layout.setColumnAlign(1, "left", "middle"); + + const container = new qx.ui.container.Composite(layout); + + const dateFormatter = date => date.toLocaleString(); + + container.add(new qx.ui.basic.Label(this.tr("Creation date")), { + column: 0, + row: 0 + }); + const creation = new qx.ui.basic.Label(dateFormatter(this.__study.getCreationDate())); + container.add(creation, { + column: 1, + row: 0 + }); + + container.add(new qx.ui.basic.Label(this.tr("Last modified")), { + column: 0, + row: 1 + }); + const last = new qx.ui.basic.Label(dateFormatter(this.__study.getCreationDate())); + container.add(last, { + column: 1, + row: 1 + }); + + return container; + } + } +}); diff --git a/services/web/client/source/class/qxapp/component/service/NodeStatus.js b/services/web/client/source/class/qxapp/component/service/NodeStatus.js new file mode 100644 index 00000000000..45ce1484460 --- /dev/null +++ b/services/web/client/source/class/qxapp/component/service/NodeStatus.js @@ -0,0 +1,114 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2019 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Ignacio Pascual (ignapas) + */ + +qx.Class.define("qxapp.component.service.NodeStatus", { + extend: qx.ui.basic.Atom, + + construct: function(node) { + this.base(arguments, this.tr("Idle"), "@FontAwesome5Solid/clock/12"); + + this.__node = node; + this.__label = this.getChildControl("label"); + this.__icon = this.getChildControl("icon"); + + if (node.isInKey("file-picker")) { + this.__setupFilepicker(); + } else { + this.__setupInteractive(); + } + }, + + properties: { + appearance: { + init: "chip", + refine: true + } + }, + + members: { + __node: null, + __label: null, + __icon: null, + + __addClass: function(element, className) { + if (element) { + const currentClass = element.getAttribute("class"); + if (currentClass && currentClass.includes(className.trim())) { + return; + } + element.setAttribute("class", ((currentClass || "") + " " + className).trim()); + } + }, + + __removeClass: function(element, className) { + const currentClass = element.getAttribute("class"); + if (currentClass) { + const regex = new RegExp(className.trim(), "g"); + element.setAttribute("class", currentClass.replace(regex, "")); + } + }, + + __setupInteractive: function() { + this.__node.bind("serviceUrl", this.__label, "value", { + converter: url => { + if (url) { + return this.tr("Ready"); + } + return this.tr("Loading..."); + } + }); + + this.__node.bind("serviceUrl", this.__icon, "source", { + converter: url => { + if (url) { + return "@FontAwesome5Solid/check/12"; + } + return "@FontAwesome5Solid/circle-notch/12"; + }, + onUpdate: (source, target) => { + if (source.getServiceUrl()) { + this.__removeClass(this.__icon.getContentElement(), "rotate"); + target.setTextColor("ready-green"); + } else { + this.__addClass(this.__icon.getContentElement(), "rotate"); + target.resetTextColor(); + } + } + }); + }, + + __setupFilepicker: function() { + const node = this.__node; + this.__node.bind("progress", this.__icon, "source", { + converter: progress => { + if (progress === 100) { + return "@FontAwesome5Solid/check/12"; + } + return "@FontAwesome5Solid/file/12"; + }, + onUpdate: (source, target) => { + if (source.getProgress() === 100) { + target.setTextColor("ready-green"); + } else { + target.resetTextColor(); + } + } + }); + + this.__node.bind("progress", this.__label, "value", { + converter: progress => { + if (progress === 100) { + const file = node.getOutputValues().outFile.path; + const splitFilename = file.split("/"); + return splitFilename[splitFilename.length-1]; + } + return this.tr("Select a file"); + } + }); + } + } +}); diff --git a/services/web/client/source/class/qxapp/component/widget/JsonTreeWidget.js b/services/web/client/source/class/qxapp/component/widget/JsonTreeWidget.js index f896ff43254..c2f5fc84e2c 100644 --- a/services/web/client/source/class/qxapp/component/widget/JsonTreeWidget.js +++ b/services/web/client/source/class/qxapp/component/widget/JsonTreeWidget.js @@ -12,6 +12,7 @@ Authors: * Odei Maiz (odeimaiz) + * Ignacio Pascual (ignapas) ************************************************************************ */ @@ -23,28 +24,22 @@ * Here is a little example of how to use the widget. * *
- *   let jsonTreeWidget = new qxapp.component.widget.JsonTreeWidget(data, "elemId");
+ *   let jsonTreeWidget = new qxapp.component.widget.JsonTreeWidget(data);
  *   this.getRoot().add(jsonTreeWidget);
  * 
*/ qx.Class.define("qxapp.component.widget.JsonTreeWidget", { - extend: qx.ui.core.Widget, + extend: qx.ui.basic.Label, /** - * @param data {Object} Json object to be displayed by JsonTreeViewer - * @param elemId {String} Element id to set it as dom attribute - */ - construct: function(data, elemId) { - this.base(); - - this.addListenerOnce("appear", () => { - let elem = this.getContentElement().getDomElement(); - qx.bom.element.Attribute.set(elem, "id", elemId); - let jsonTreeViewer = qxapp.wrapper.JsonTreeViewer.getInstance(); - if (jsonTreeViewer.getLibReady()) { - jsonTreeViewer.print(data, elem); - } + * @param data {Object} Json object to be displayed by JsonTreeViewer + */ + construct: function(data) { + const prettyJson = JSON.stringify(data, null, " ").replace(/\n/ig, "
"); + this.base(arguments, prettyJson); + this.set({ + rich: true }); } }); diff --git a/services/web/client/source/class/qxapp/component/widget/NewGHIssue.js b/services/web/client/source/class/qxapp/component/widget/NewGHIssue.js index 1776247b809..93a60970c76 100644 --- a/services/web/client/source/class/qxapp/component/widget/NewGHIssue.js +++ b/services/web/client/source/class/qxapp/component/widget/NewGHIssue.js @@ -55,6 +55,8 @@ qx.Class.define("qxapp.component.widget.NewGHIssue", { ## Steps to reproduce +Note: your environment was attached but will not be displayed +