Skip to content

Commit

Permalink
Prepare Release 1.1.1 (#512)
Browse files Browse the repository at this point in the history
* Bump version 1.1.1

* add module for tween factories
add maintenance mode tween factory

* fix variable

* add test for maintenance mode

* add settings for maintenance mode

* Revert "add settings for maintenance mode"

This reverts commit ba2ad3f.

* Revert "add test for maintenance mode"

This reverts commit feb0f15.

* Revert "fix variable"

This reverts commit 29d03ca.

* Revert "add module for tween factories"

This reverts commit a67241a.

* Revert "Bump version 1.1.1"

This reverts commit dd790b6.

* Bump version 1.1.1 (#510)

* Update README (instructions and animation) (#508)

* Update deployment instructions

* update # CHANGEME tag to # CHANGE THIS

* Update deploy/compose_minimal/docker-compose.yml

Co-authored-by: Camill Kaipf <[email protected]>

* update compose files
TFA related settings were not added yet

---------

Co-authored-by: Camill Kaipf <[email protected]>
Co-authored-by: ckaipf <[email protected]>

* Handle MetaDatum unique but not mandatory (#509)

* remove not used argument

* don't count values which are `None` while ensuring
unique constraint
if MetaDatum is not mandatory
several MetaDatumRecord will have
value set to `None`

* add empty line

* add fixtures for test unique but not mandatory MetaDatum

* rename file

* add simplified submission test, to submit MetaDataSets
with a unique and
not mandatory MetaDatum

* mypy is not happy here with reassigning this
variable,
refactored it

* fix var name
wrote too often None in the last hour

* one MetaDataSet should be in an other Submission for global constraint

* list to generator

Co-authored-by: Leon Kuchenbecker <[email protected]>

* flatten nested if

---------

Co-authored-by: Leon Kuchenbecker <[email protected]>

---------

Co-authored-by: Leon Kuchenbecker <[email protected]>
  • Loading branch information
ckaipf and lkuchenb authored Feb 21, 2023
1 parent f6a0564 commit d774c22
Show file tree
Hide file tree
Showing 11 changed files with 281 additions and 28 deletions.
23 changes: 13 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,20 @@ users!

## Quick Installation

1. Create a directory for the datameta configuration (of your choice)
```
mkdir /usr/local/lib/datameta
cd /usr/local/lib/datameta
```

1. Edit the configuration file

The fields that require changing are marked with `# CHANGE THIS`. You may want
to perform additional adjustments to the compose file to fit your needs.

1. Download the Docker compose file
```
curl -LO https://raw.githubusercontent.com/ghga-de/datameta/main/datameta.compose.yml
curl -LO https://datameta.org/minimal/docker-compose.yml
```

1. Create the Docker volumes for persistent file and database storage
Expand All @@ -23,13 +34,5 @@ users!

1. Start up your DataMeta Instance
```
docker stack deploy --compose-file datameta.compose.yml datameta
docker-compose up -d
```

1. Connect to your DataMeta instance at http://localhost:9950 and log in with the default
account `[email protected]`. The initial password can be obtained using
`docker logs {your_app_container_id}`.

## Full Installation Instructions

Detailed installation instructions can be found [here](./docs).
18 changes: 11 additions & 7 deletions datameta/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from .security import authz
from .api.metadata import get_all_metadata
from .utils import get_record_from_metadataset
from typing import Dict


def validate_submission_access(db, db_files, db_msets, auth_user):
Expand Down Expand Up @@ -120,11 +121,14 @@ def validate_submission_association(db_files, db_msets, ignore_submitted_metadat
return f_names_obj, ref_fnames, errors


def validate_submission_uniquekeys(db, db_files, db_msets):
def validate_submission_uniquekeys(
db,
db_msets: Dict[str, MetaDataSet],
):
errors = []

# Submission unique keys (includes those that are globally unique)
keys_submission_unique = [ md.name for md in db.query(MetaDatum).filter(or_(MetaDatum.submission_unique.is_(True), MetaDatum.site_unique.is_(True))) ]
keys_submission_unique = [ md.name for md in db.query(MetaDatum).filter(or_(MetaDatum.submission_unique.is_(True), MetaDatum.site_unique.is_(True))) ]
# Globally unique keys
keys_site_unique = [ md.name for md in db.query(MetaDatum).filter(MetaDatum.site_unique.is_(True)) ]

Expand All @@ -134,20 +138,20 @@ def validate_submission_uniquekeys(db, db_files, db_msets):
# Associate all values for that key with the metadatasets it occurs in
for db_mset in db_msets.values():
for mdatrec in db_mset.metadatumrecords:
if mdatrec.metadatum.name == key:
if mdatrec.metadatum.name == key and mdatrec.value:
value_msets[mdatrec.value].append(db_mset)
# Reduce to those values that occur in more than one metadatast
value_msets = { k: v for k, v in value_msets.items() if len(v) > 1 }
not_unique = ( v for v in value_msets.values() if len(v) > 1 )
# Produce errrors
errors += [ (db_mset, key, "Violation of intra-submission unique constraint") for msets in value_msets.values() for db_mset in msets ]
errors += [ (db_mset, key, "Violation of intra-submission unique constraint") for msets in not_unique for db_mset in msets ]

# Validate the set of metadatasets with regard to site-wise unique key constraints
for key in keys_site_unique:
value_msets = defaultdict(list)
# Associate all values for that key with the metadatasets it occurs in
for db_mset in db_msets.values():
for mdatrec in db_mset.metadatumrecords:
if mdatrec.metadatum.name == key:
if mdatrec.metadatum.name == key and mdatrec.value:
value_msets[mdatrec.value].append(db_mset)

# Query the database for the supplied values
Expand Down Expand Up @@ -196,7 +200,7 @@ def validate_submission(request, auth_user):
val_errors += [ (db_msets[mset_id], mset_error['field'], mset_error['message']) for mset_error in mset_errors ]

# Validate unique field constraints
val_errors += validate_submission_uniquekeys(db, db_files, db_msets)
val_errors += validate_submission_uniquekeys(db, db_msets)

# If we collected any val_errors, raise 400
if val_errors:
Expand Down
29 changes: 19 additions & 10 deletions deploy/compose_minimal/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,21 @@ services:
appserver:
image: "datameta/datameta:dev"
restart: "always"
volumes:
- datameta-filestorage:/var/datameta/storage
environment:
SQLALCHEMY_URL: postgresql://datameta:datameta@dbserver/datameta
SESSION_URL: sessionserver:11211
SESSION_KEY: datameta
SESSION_SECRET: dummy # TODO: Insert 64 character random string here
SESSION_SECRET: dummy # CHANGE THIS: Insert 64 character random string here
DATAMETA_STORAGE_PATH: /var/datameta/storage

# The initial values can be changed once the DataMeta instance was
# deployed.
DATAMETA_INITIAL_FULLNAME: Joe user # TODO: Insert initial user fullname here
DATAMETA_INITIAL_EMAIL: joe@user # TODO: Insert initial user email address here
DATAMETA_INITIAL_PASS: j03us3r # TODO: Insert initial user password here
DATAMETA_INITIAL_GROUPNAME: Joe's lab # TODO: Insert intial user group name here
DATAMETA_INITIAL_FULLNAME: Joe User # CHANGE THIS: Insert initial user fullname here
DATAMETA_INITIAL_EMAIL: joe@user # CHANGE THIS: Insert initial user email address here
DATAMETA_INITIAL_PASS: j03us3r # CHANGE THIS: Insert initial user password here
DATAMETA_INITIAL_GROUPNAME: Joe's lab # CHANGE THIS: Insert intial user group name here

# The maximum number of days an API key can be valid
DATAMETA_API_KEYS_MAX_EXPIRE_DAYS: 365
Expand All @@ -45,12 +47,12 @@ services:
# DataMeta sends out emails for password forgot tokens, registration
# confirmations and to notify admins about new registrations. Configure
# an SMTP server for outgoing email below.
DATAMETA_SMTP_HOST: # TODO: Insert SMTP server address here
DATAMETA_SMTP_HOST: # CHANGE THIS: Insert SMTP server address here
DATAMETA_SMTP_PORT: 587 # Adjust if needed
DATAMETA_SMTP_USER: # TODO: Insert SMTP user here
DATAMETA_SMTP_PASS: # TODO: Insert SMTP password here
DATAMETA_SMTP_USER: # CHANGE THIS: Insert SMTP user here
DATAMETA_SMTP_PASS: # CHANGE THIS: Insert SMTP password here
DATAMETA_SMTP_TLS: "true"
DATAMETA_SMTP_FROM: # TODO: Specify SMTP FROM header here, format 'Example Support <[email protected]>'
DATAMETA_SMTP_FROM: # CHANGE THIS: Specify SMTP FROM header here, format 'Example Support <[email protected]>'

# Site ID prefixes and lengths
# The entites 'user', 'group', 'submission', 'metadataset' and 'file' are
Expand All @@ -71,6 +73,11 @@ services:
DATAMETA_SITE_ID_PREFIX_FILES: "DMF-"
DATAMETA_SITE_ID_PREFIX_SERVICES: "DMP-"

# Two factor authentication settings
DATAMETA_TFA_ENABLED: # CHANGE THIS: Insert bool to enable
DATAMETA_TFA_ENCRYPT_KEY: # CHANGE THIS: Insert secret here
DATAMETA_TFA_OTP_ISSUER: # CHANGE THIS: Insert OTP issuer name here

GUNICORN_WORKERS: 4 # Adjust to your needs. 2-4x #CPUs
GUNICORN_PROC_NAME: datameta
GUNICORN_FORWARDED_ALLOW_IPS: "*"
Expand All @@ -82,4 +89,6 @@ services:

volumes: # TODO: Create external volumes before first launch
datameta-db:
external: false
external: true
datameta-filestorage:
external: true
5 changes: 5 additions & 0 deletions deploy/compose_multiprocess/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,11 @@ services:
DATAMETA_SITE_ID_PREFIX_FILES: "DMF-"
DATAMETA_SITE_ID_PREFIX_SERVICES: "DMP-"

# Two factor authentication settings
DATAMETA_TFA_ENABLED: # CHANGE THIS: Insert bool to enable
DATAMETA_TFA_ENCRYPT_KEY: # CHANGE THIS: Insert secret here
DATAMETA_TFA_OTP_ISSUER: # CHANGE THIS: Insert OTP issuer name here

GUNICORN_WORKERS: 75 # Adjust to your needs. 2-4x #CPUs
GUNICORN_PROC_NAME: datameta
GUNICORN_FORWARDED_ALLOW_IPS: "*"
Expand Down
5 changes: 5 additions & 0 deletions deploy/compose_scale/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ services:
# The maximum number of days an API key can be valid
DATAMETA_API_KEYS_MAX_EXPIRE_DAYS: 365

# Two factor authentication settings
DATAMETA_TFA_ENABLED: # CHANGE THIS: Insert bool to enable
DATAMETA_TFA_ENCRYPT_KEY: # CHANGE THIS: Insert secret here
DATAMETA_TFA_OTP_ISSUER: # CHANGE THIS: Insert OTP issuer name here

LETSENCRYPT_HOST: # Picked up by nginx-proxy / letsencrypt. Comma separated list of hostnames.
VIRTUAL_HOST: # Picked up by nginx-proxy / letsencrypt. Comma separated list of hostnames. Every hostname in LETSENCRYPT_HOST must appear also here, otherwise cert acquisition will fail
WAITRESS_MAX_REQUEST_BODY_SIZE: 10737418240 # 10 GB
Expand Down
5 changes: 5 additions & 0 deletions deploy/compose_testing/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,11 @@ services:
DATAMETA_SITE_ID_PREFIX_FILES: "DMF-"
DATAMETA_SITE_ID_PREFIX_SERVICES: "DMP-"

# Two factor authentication settings
DATAMETA_TFA_ENABLED: # CHANGE THIS: Insert bool to enable
DATAMETA_TFA_ENCRYPT_KEY: # CHANGE THIS: Insert secret here
DATAMETA_TFA_OTP_ISSUER: # CHANGE THIS: Insert OTP issuer name here

GUNICORN_WORKERS: 4 # Adjust to your needs. 2-4x #CPUs
GUNICORN_PROC_NAME: datameta
GUNICORN_FORWARDED_ALLOW_IPS: "*"
Expand Down
Binary file modified img/datameta.demo.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@

setup(
name = 'datameta',
version = '1.1.0',
version = '1.1.1',
description = 'DataMeta - submission server for data and associated metadata',
long_description = README + '\n\n' + CHANGES,
author = 'Leon Kuchenbecker',
Expand Down
61 changes: 61 additions & 0 deletions tests/integration/fixtures/metadatasets_unique_not_mandatory.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# Copyright 2021 Universität Tübingen, DKFZ and EMBL for the German Human Genome-Phenome Archive (GHGA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

metadataset_unique_not_mandatory_0:
class: MetaDataSet
attributes:
site_id: metadataset_unique_not_mandatory_0
records:
unique_not_mandatory_id: null
references:
user:
fixtureset: users
name: user_a
submission:
fixtureset: submissions
name: submission_a
fixtureOnly:
- records

metadataset_unique_not_mandatory_1:
class: MetaDataSet
attributes:
site_id: metadataset_unique_not_mandatory_1
records:
unique_not_mandatory_id: null
references:
user:
fixtureset: users
name: user_a
submission:
fixtureset: submissions
name: submission_a
fixtureOnly:
- records

metadataset_unique_not_mandatory_2:
class: MetaDataSet
attributes:
site_id: metadataset_unique_not_mandatory_2
records:
unique_not_mandatory_id: null
references:
user:
fixtureset: users
name: user_a
submission:
fixtureset: submissions
name: submission_b
fixtureOnly:
- records
24 changes: 24 additions & 0 deletions tests/integration/fixtures/metadatum_unique_not_mandatory.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Copyright 2021 Universität Tübingen, DKFZ and EMBL for the German Human Genome-Phenome Archive (GHGA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

unique_not_mandatory_id:
class: MetaDatum
attributes:
name: unique_not_mandatory_id
mandatory: false
example: "ID123"
order: 101
isfile: false
submission_unique: true
site_unique: true
Loading

0 comments on commit d774c22

Please sign in to comment.