diff --git a/cms/data/team.yml b/cms/data/team.yml index ce175c0667..fe599e2120 100644 --- a/cms/data/team.yml +++ b/cms/data/team.yml @@ -6,7 +6,6 @@ photo: alejandra.png bio: 'Alejandra has a Bachelor’s degree in Information Science and a Master’s degree in Digital Media. She has around ten years of experience in information management, knowledge management and scholarly communication at national and international level.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mVFhjdkZWaFJZOTQ/view?usp=sharing&resourcekey=0-gMdxxHXyyJB9zFZIuh99QQ 2018: https://drive.google.com/file/d/0ByRf6PVViI-mRlRYTDBPRlZiWTRxQ3VMTUZpQnZ5ZkwyLVQ4/view?usp=sharing&resourcekey=0-mlQ6rSCEnr6RfpCwh_4SMw 2020: https://drive.google.com/file/d/1PF7Cc9vGAwWGqpqDo7nRULjxWIF2NR_Q/view?usp=sharing 2022: https://drive.google.com/file/d/1E45ycyctDfYkh65ZCM8PMtQsYym_eP6L/view?usp=sharing @@ -16,7 +15,6 @@ photo: cenyu.jpg bio: 'Cenyu holds a PhD in Information Systems Science at Hanken School of Economics in Finland. She has spent around seven years on Open Access research with a particular focus on gold OA publishing concerning journals and publishers outside the mainstream. She was one of three DOAJ ambassadors for China from 2016 to 2017.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmNHZCQmxpUmN6bUEtYUx2VHZnbjVySS1fRTlr/view?usp=sharing&resourcekey=0-1TRIV1MEQMhdGbmCd7CbOA 2020: https://drive.google.com/file/d/1rm9fjOF3OHJ9lR9wEUyQBQTO2KdoNQcE/view?usp=sharing 2022: https://drive.google.com/file/d/1Mn_CR0twKxyFbbHxsLSrgeU984BNOLlS/view?usp=sharing @@ -25,7 +23,6 @@ photo: clara.jpg bio: 'Clara has 10 years experience in the scholarly publishing industry. She worked at Cambridge University Press as an Open Access Project Manager until 2015. She also works in science communication as a freelancer at the University Pompeu Fabra, Barcelona. Clara speaks Spanish, Catalan, English and some German and French. She loves cetaceans, freediving, cycling, and is an enthusiastic cook.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mbDFybndLbldEbFE/view?usp=sharing&resourcekey=0-lKZNFwvUNdVAGKatvnKiPg 2018: https://drive.google.com/file/d/1LHmZSZ6bwf6U71fNvIibJa6R1lquNhfR/view?usp=sharing 2020: https://drive.google.com/file/d/1v4duxnoTcNo4UbL_GBa5D1T8JtTl7oY1/view?usp=sharing 2022: https://drive.google.com/file/d/1hevYxG1102llDy-_i-onwKbDuOlBguA_/view?usp=sharing @@ -33,9 +30,8 @@ - name: Dominic Mitchell role: Operations Manager photo: dominic.jpg - bio: 'Dominic has over 25 years experience working with publisher and library communities. He is responsible for operations and development of the DOAJ platform. He acts as Committee chair for the Think. Check. Submit. initiative, of which DOAJ is a founding organisation. He represents DOAJ in Project JASPER, a cross-industry project working to ensure that journals are preserved for the long term. He also sits on the OASPA Board of Directors and serves as Secretary. Outside of work, he is reluctantly becoming an expert in the playparks of Stockholm with his twin sons.' + bio: 'Dominic has over 25 years of experience working with publisher and library communities. He is responsible for operations and development of the DOAJ platform. He acts as Committee chair for the Think. Check. Submit. initiative, of which DOAJ is a founding organisation. He represents DOAJ in Project JASPER, a cross-industry project working to ensure that journals are preserved for the long term. He also sits on the OASPA Board of Directors and serves as Secretary. Outside of work, he is reluctantly becoming an expert in the playparks of Stockholm with his twin sons.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mWmU0UHZqZm1xcDQ/view?usp=sharing&resourcekey=0-BmQKwWn6Vb9ot73Xie66aA 2018: https://drive.google.com/file/d/13XX_GUrw2xRmXARjRrTxegULPT8Redka/view?usp=sharing 2020: https://drive.google.com/file/d/1nxFOuAdXLb8A-LulhNpz9i5vSmr5DBwF/view?usp=sharing 2022: https://drive.google.com/file/d/1HBF9RLaIt3lFNG6WDcV08fQSMS_C6zwA/view?usp=sharing @@ -43,7 +39,7 @@ - name: Gala García Reátegui role: Managing Editor photo: gala.jpg - bio: 'Gala holds a Masters Degree in Information and Documentation from Lyon 3 University in France. Prior to joining DOAJ, she worked for the Digital Strategy and Data Directorate at The French National Research Agency (ANR) and for the Open Archive HAL at the Center for Direct Scientific Communication (CCSD). Gala is Peruvian but lived for more than ten years in France. Today, she is based in Denmark. She loves meeting people from other cultures, trying local dishes or experiences: currently Gala goes winter bathing in the Limfjord, Denmark! She also loves running.' + bio: "Gala holds a Master's Degree in Information and Documentation from Lyon 3 University in France. Prior to joining DOAJ, she worked for the Digital Strategy and Data Directorate at The French National Research Agency (ANR) and for the Open Archive HAL at the Center for Direct Scientific Communication (CCSD). Gala is Peruvian but lived for more than ten years in France. Today, she is based in Denmark. She loves meeting people from other cultures, trying local dishes or experiences. Currently Gala goes winter bathing in the Limfjord, Denmark! She also loves running." coi: 2023: https://drive.google.com/file/d/1R7XquFauefdmtjPIsfGfAWQoAw7NLIci/view?usp=sharing @@ -58,16 +54,15 @@ - name: Joanna Ball role: Managing Director photo: joba.jpg - bio: 'Joanna has had over 25 years experience of working within research libraries in the UK and Denmark, most recently as Head of Roskilde University Library, before joining DOAJ in 2022. She has also been involved with UKSG as Chair of Insights Editorial Board and a Trustee, and is currently Vice Chair. Joanna lives with her family in Roskilde and enjoys running in her spare time.' + bio: 'Joanna has over 25 years of experience working within research libraries in the UK and Denmark, most recently as Head of Roskilde University Library, before joining DOAJ in 2022. She has also been involved with UKSG as Chair of Insights Editorial Board and a Trustee, and is currently Vice Chair. Joanna lives with her family in Roskilde and enjoys running in her spare time.' coi: 2022: https://drive.google.com/file/d/1-3xzwkHMclREgLhj_XNF5n6Nr4q2_bnw/view?usp=sharing - name: Judith Barnsby - role: Senior Managing Editor + role: Head of Editorial photo: judith.jpg - bio: 'Judith has 25 years experience in the scholarly publishing industry, working for a range of non-profit society publishers and service providers before joining DOAJ. She has a keen interest in publishing standards and protocols, and has served on the board of CLOCKSS and as chair of the PALS (publisher and library solutions) working group in the UK. Judith loves books, especially detective fiction, and volunteers in her local library.' + bio: 'Judith has 25 years of experience in the scholarly publishing industry, working for a range of non-profit society publishers and service providers before joining DOAJ. She has a keen interest in publishing standards and protocols, and has served on the board of CLOCKSS and as chair of the PALS (publisher and library solutions) working group in the UK. Judith loves books, especially detective fiction, and volunteers in her local library.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmb3JmVkFYbjN5aTh1OUhLd2lZaEV0ZlFwbTZV/view?usp=sharing&resourcekey=0-o_PXKLk5UFbPk_-4B61jVA 2018: https://drive.google.com/file/d/0ByRf6PVViI-mV2lfMjByQjYxUkpMcXhuc2l5Q3ZDWlpiYUtZ/view?usp=sharing&resourcekey=0-6eiGIRal00eXvgJUTeN_lw 2020: https://drive.google.com/file/d/18MWTsze4cDQQRPHJl2XrYgHQvlxhsPZa/view?usp=sharing 2023: https://drive.google.com/file/d/1hUsVIY09N6WceSx1edTM-h516CJGkHcu/view?usp=share_link @@ -77,7 +72,6 @@ photo: Kamel.jpg bio: 'Kamel is Full Professor of Chemistry at the University of Bejaia, Algeria (ORCID). He gained his PhD in Process Engineering and Chemistry of Materials Science at the University of Setif, Algeria. Kamel joined DOAJ in 2016 as an Ambassador for North Africa. He is currently Creative Commons Algeria Chapter lead, director of the Laboratory of Organic Materials at the University of Bejaia and editor-in-chief of Algerian Journal of Natural Products. His scientific activity is focused on chemistry of Natural Products, scholarly communications and new developments in academic publishing. Father of 3 daughters, he likes travelling, healthy local foods & home-made snacks.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmVEN4X1Q0RDdCams1NXhveW1HQmtMYU56bDE4/view?usp=sharing&resourcekey=0-wA1CGAbjB6FAX33gCDQmrA 2018: https://drive.google.com/file/d/1JdF2kh-fLXz8kPGN_3ijDt5y9K6s0hOQ/view?usp=sharing 2020: https://drive.google.com/file/d/1iXrjwLTNBXwKD2TwrPD9ApKL7O6uZ8Z7/view?usp=sharing 2022: https://drive.google.com/file/d/1cl18h_mYnNogYs8Rk-fhBTW6WKOTC2IF/view?usp=sharing @@ -85,7 +79,7 @@ - name: Katrine Sundsbø role: Community Manager photo: katrine.jpeg - bio: 'Katrine holds a Master’s degree in Cognitive Neuroscience, and has five years of experience in the field of scholarly communications. She has been an advocate for open access and visibility of research through various working groups, projects and through gamification of scholarly communications. Though Katrine is half Danish and half Norwegian, her son is named after a Swedish singer - and her British husband suggested the name!' + bio: "Katrine holds a Master’s degree in Cognitive Neuroscience, and has five years of experience in the field of scholarly communications. She has been an advocate for open access and visibility of research through various working groups, projects and through gamification of scholarly communications. Though Katrine is half Danish and half Norwegian, her son is named after a Swedish singer - and her British husband suggested the name!" coi: 2023: https://drive.google.com/file/d/1yqK-Znq62T_QR_JjtcpQl6W_Ian2Ti4F/view?usp=share_link @@ -94,7 +88,6 @@ photo: lars.jpg bio: 'Lars worked at Danish university libraries for two decades and was Director of Libraries at Lund University, Sweden from 2001 to 2011. He founded the DOAJ in 2003, and was Managing Director from 2013-2021. He has vast experience in change management, re-engineering of academic libraries, and development of information services for research & higher education. For two decades Lars has been a strong advocate of open access and for providing services to the open access movement. He is co-founder of OpenDOAR, the Directory of Open Access Books and Think. Check. Submit. Lars lives outside Copenhagen, and is married with 4 children and 4 grandchildren. He enjoys vegetable gardening, growing cacti and succulents, and playing internet chess.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mbmo2aU9NWkx5dGs/view?usp=sharing&resourcekey=0-mpdRgVU9UlFjC614-woDvg 2018: https://drive.google.com/file/d/1mm1a8nbY5MQX9loqIs2ZQuVN-73RfPuN/view?usp=sharing 2021: https://drive.google.com/file/d/1bNj5sqUsu4sRLmm_YOuh3JCSMERzQ1Ro/view?usp=sharing 2022: https://drive.google.com/file/d/1fRJtvci2_j4vad0C5N1pfqm2sHZQkFz3/view?usp=sharing @@ -104,7 +97,6 @@ photo: leena.jpg bio: "Leena joined the DOAJ team in 2016 as an Ambassador for India before becoming a Managing Editor. Prior to joining DOAJ she worked as a science librarian at Nanyang Technological University, Singapore, where she developed a keen interest in scholarly communication & open science. A recent addition to her interests is artificial intelligence in scholarly communication. Leena holds a Master’s degree in Information Studies and lives in Singapore. She loves watching sci-fi shows and is enthusiastic about travelling to new places." coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmTHZuaEtMSDNIeUpKT2Fid19jVjVFTkRoUmdj/view?usp=sharing&resourcekey=0-KqvRVa30bQEUfqO-YA1L-g 2018: https://drive.google.com/file/d/1tifEjAIlU3txBw9DjIcRW9cZL7YG7_nU/view?usp=sharing 2020: https://drive.google.com/file/d/1zU-lLB5W54E_QUm5uto5tqB6cZl83TAJ/view?usp=sharing 2022: https://drive.google.com/file/d/19rw-naMJqHkI5T7aDIDPUkwPutBdDpDm/view?usp=sharing @@ -151,7 +143,6 @@ photo: Rikard.jpg bio: 'Rikard has a Bachelor of Arts degree with a Major in Cultural Sciences and a specialization in publishing. He enjoys reading about philosophy and religion.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mdnJPdldOM0hUMFU/view?usp=sharing&resourcekey=0-8dJAtvm2n7vXV9NhqZYckw 2018: https://drive.google.com/file/d/1tOnW8L6TwolyLpIXwMKTITf9wGh_ukLb/view?usp=sharing 2020: https://drive.google.com/file/d/14c0RgpyD2Slzyh5s8LGvj5OwWbL4H8NX/view?usp=sharing 2023: https://drive.google.com/file/d/1HQIh1DlfhEutTWniXDGLYFVa9VxJ4OT9/view?usp=share_link @@ -161,7 +152,6 @@ photo: sonja.jpg bio: 'Sonja is a former Information Librarian from Lund University Library. She has a B.A. in English, Bulgarian and Russian from Lund University and specialises in applications for journals in the Slavonic languages.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mNUFoZWV4YnZ3bDg/view?usp=sharing&resourcekey=0-1JRid_DHRMKbgdzmVYL7NQ 2018: https://drive.google.com/file/d/1M5AGEDP79uk2olCcmVYjKCsmzL7tG2Vc/view?usp=sharing 2020: https://drive.google.com/file/d/1-4RJYScTs_zMBeD5zESNvCoIBCWTOWHR/view?usp=sharing 2022: https://drive.google.com/file/d/1soZtiW6gyVJPl7P_J60j2TL2Fqzl0QAs/view?usp=sharing @@ -178,7 +168,6 @@ photo: tom.jpg bio: 'Tom has a PhD in molecular microbiology and spent several years in Africa doing research on malaria, sleeping sickness and meningococcal epidemics. He has been actively advocating open access and open science since 2012 when he joined the Open Knowledge community and became a member of the DOAJ advisory board. His current research interests are development of quality systems for the assessment of scholarly journals and articles, and research in the area of soil microbiology in relation to soil health and human health.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mYUFZNDRISTZodUU/view?usp=sharing&resourcekey=0-g13FJaUJpdR_t2rMLEyzEQ 2018: https://drive.google.com/file/d/1x0w-a1TWQdJDKPtQpGhmDZSdA4BhFSpI/view?usp=sharing 2020: https://drive.google.com/file/d/1VyirUdc6FBNOujl938bHf1JCL1jLNwXV/view?usp=sharing 2022: https://drive.google.com/file/d/1ww7WHQEg1395bPn20Arb7LJn9lIROdBl/view?usp=sharing diff --git a/cms/sass/components/_form.scss b/cms/sass/components/_form.scss index b203ff5988..fd97423f9f 100644 --- a/cms/sass/components/_form.scss +++ b/cms/sass/components/_form.scss @@ -82,7 +82,7 @@ border-left: 1px solid $sanguine; } -.form__long-help { +.form__long-help, .form__click-to-copy { cursor: pointer; &:hover { diff --git a/cms/sass/components/_tag.scss b/cms/sass/components/_tag.scss index 1f24ebce92..836e55c7ef 100644 --- a/cms/sass/components/_tag.scss +++ b/cms/sass/components/_tag.scss @@ -90,3 +90,8 @@ color: $white; } } + +.tag--confirmation { + background: $dark-green; + color: $white; +} diff --git a/deploy/doaj_gunicorn_config.py b/deploy/doaj_gunicorn_config.py index f9425de5e5..a08dd6ef62 100644 --- a/deploy/doaj_gunicorn_config.py +++ b/deploy/doaj_gunicorn_config.py @@ -1,7 +1,7 @@ import multiprocessing bind = "0.0.0.0:5050" -workers = multiprocessing.cpu_count() * 8 + 1 +workers = multiprocessing.cpu_count() * 6 + 1 proc_name = 'doaj' max_requests = 1000 @@ -13,4 +13,4 @@ max_requests_jitter = 100 timeout = 40 -graceful_timeout = 40 \ No newline at end of file +graceful_timeout = 40 diff --git a/deploy/lambda/alert_backups_missing.py b/deploy/lambda/alert_backups_missing.py index 38a9edbc2e..566a361b9a 100644 --- a/deploy/lambda/alert_backups_missing.py +++ b/deploy/lambda/alert_backups_missing.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -""" Steven Eardley 2020-02-07 for DOAJ - uploaded manually (todo: we should upload this in the release script) """ +""" Steven Eardley 2023-09-15 for DOAJ - uploaded manually (todo: we should upload this in the release script) """ # ~~BackupsMissing:Monitoring->Lambda:Technology~~ @@ -8,23 +8,25 @@ import json from datetime import datetime, timezone, timedelta -from portality.lib.dates import FMT_DATETIME_STD - s3 = boto3.client('s3') # Check the doaj elasticsearch snapshot bucket has been updated today (should happen daily at 0600 via background job) -buckets = ['doaj-index-backups'] +buckets = ['doaj-index-ipt-backups'] + # Check the doaj-nginx logs bucket has been updated today (should happen daily at 0630 via cron logrotate) -buckets += ['doaj-nginx-logs'] +# buckets += ['doaj-nginx-logs'] def lambda_handler(event, context): """ The main function executed by Lambda""" + start = datetime.utcnow() summary = {'success': [], 'fail': []} for b in buckets: + print('Checking bucket {0} was updated today'.format(b)) + # First check the bucket actually exists try: s3.head_bucket(Bucket=b) @@ -32,11 +34,13 @@ def lambda_handler(event, context): error_code = int(e.response['Error']['Code']) if error_code == 404: send_alert_email(b, last_mod=None) + raise # Then check the expected entry exists in the bucket's objects. files = list_bucket_keys(bucket_name=b) old_to_new = sorted(files, key=lambda f: f['LastModified']) newest = old_to_new[-1] + print('Latest backup is', newest) # If the newest file is older than 1 day old, our backups are not up to date. if datetime.now(timezone.utc) - newest['LastModified'] > timedelta(days=1): @@ -47,6 +51,8 @@ def lambda_handler(event, context): summary['success'].append(b) print(summary) # For the CloudWatch logs + print('Completed in', str(datetime.utcnow() - start)) + return str(summary) @@ -86,8 +92,8 @@ def send_alert_email(bucket, last_mod): msg = 'AWS backup error: bucket {b} is missing.'.format(b=bucket) else: msg = 'AWS backup error: bucket {b} has not been updated today - it was last modified on {t}.' \ - '\nYou may wish to check the corresponding logs.'.format(b=bucket, - t=last_mod.strftime(FMT_DATETIME_STD)) + '\nYou may wish to check the corresponding logs.'.format(b=bucket, t=last_mod.strftime( + '%Y-%m-%dT%H:%M:%SZ')) r = botocore.vendored.requests.post('https://api.mailgun.net/v3/doaj.org/messages', auth=('api', credentials.get('ERROR_MAIL_API_KEY', '')), diff --git a/deploy/nginx/doaj b/deploy/nginx/doaj index b52db3aea0..4e6c3b0576 100644 --- a/deploy/nginx/doaj +++ b/deploy/nginx/doaj @@ -36,17 +36,27 @@ map $http_user_agent $block_ua { ~*curl 1; } +# the public server (deprecated, use failover) upstream doaj_apps { - server 10.131.191.139:5050; + server 10.131.191.139:5050; #doaj-public-app-1 } + +# Background server runs async tasks upstream doaj_bg_apps { - #server 10.131.56.133:5050; #old bg machine - server 10.131.12.33:5050; + server 10.131.12.33:5050; #doaj-background-app-1 +} + +# Editor and admin site components +upstream doaj_ed_failover { + server 10.131.56.133:5050; #doaj-editor-app-1 + server 10.131.12.33:5050 backup; #doaj-background-app-1 } + +# For public site components, try all servers upstream doaj_apps_failover { - server 10.131.191.139:5050; - #server 10.131.56.133:5050 backup; #old bg machine - server 10.131.12.33:5050 backup; + server 10.131.191.139:5050; #doaj-public-app-1 + server 10.131.12.33:5050 backup; #doaj-background-app-1 + server 10.131.56.133:5050 backup; #doaj-editor-app-1 } upstream doaj_index { server 10.131.191.132:9200; @@ -121,6 +131,7 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_buffering off; } + location /search { if ($block_ua) {return 403;} limit_req zone=general burst=10 nodelay; @@ -144,9 +155,7 @@ server { proxy_buffering off; } - # for now we are going to send all login functions to the bg machine - # technically ONLY the routes that require file upload need to go to the bg machine - # but we think it is handy to separate them out, and later we could send them to other machines + # technically only the routes that require file upload need to go to the bg machine, but separate for consistency location /account { limit_req zone=general burst=10 nodelay; proxy_pass http://doaj_bg_apps; @@ -157,6 +166,19 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_buffering off; } + + # prefer the editor machine for application form work (but application_quick_reject goes to background async) + location ~* /admin/application/ { + limit_req zone=general burst=10 nodelay; + proxy_pass http://doaj_ed_failover; + proxy_redirect off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_buffering off; + } + location /admin { # there are admin bulk actions that MUST go to bg machine limit_req zone=general burst=10 nodelay; proxy_pass http://doaj_bg_apps; @@ -167,9 +189,10 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_buffering off; } + location /editor { limit_req zone=general burst=10 nodelay; - proxy_pass http://doaj_bg_apps; + proxy_pass http://doaj_ed_failover; proxy_redirect off; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; @@ -177,9 +200,10 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_buffering off; } + location /journal/readonly { limit_req zone=general burst=10 nodelay; - proxy_pass http://doaj_bg_apps; + proxy_pass http://doaj_ed_failover; proxy_redirect off; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; @@ -187,7 +211,8 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_buffering off; } - location /publisher { # only /publisher/uploadfile MUST go to bg, and /publisher/uploadFile + + location /publisher { # only /publisher/uploadfile MUST go to background limit_req zone=general burst=10 nodelay; proxy_pass http://doaj_bg_apps; proxy_redirect off; @@ -197,7 +222,8 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_buffering off; } - location /service { + + location /service { # performs locks etc - handle on the background server limit_req zone=general burst=10 nodelay; proxy_pass http://doaj_bg_apps; proxy_redirect off; @@ -221,6 +247,7 @@ server { proxy_set_header X-Forwarded-Proto $scheme; proxy_buffering off; } + location /csv { limit_req zone=general burst=10 nodelay; proxy_pass http://doaj_bg_apps; @@ -235,6 +262,7 @@ server { location =/robots.txt { alias /home/cloo/doaj/src/doaj/deploy/robots-production.txt; } + location /static/ { alias /home/cloo/doaj/src/doaj/portality/static/; autoindex off; diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv b/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv new file mode 100644 index 0000000000..0d2f704aba --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv @@ -0,0 +1,17 @@ +test_id,eissn,pissn,validated +1,eissn_in_doaj,pissn_in_doaj,yes +2,eissn_in_doaj,eissn_not_in_doaj, +3,eissn_in_doaj,pissn_not_in_doaj, +4,eissn_in_doaj,!eissn_in_doaj, +5,pissn_in_doaj,eissn_in_doaj, +6,pissn_in_doaj,eissn_not_in_doaj, +7,pissn_in_doaj,pissn_not_in_doaj, +8,pissn_in_doaj,!pissn_in_doaj, +9,eissn_not_in_doaj,eissn_in_doaj, +10,eissn_not_in_doaj,pissn_in_doaj, +11,eissn_not_in_doaj,pissn_not_in_doaj, +12,eissn_not_in_doaj,!eissn_not_in_doaj, +13,pissn_not_in_doaj,eissn_in_doaj, +14,pissn_not_in_doaj,pissn_in_doaj, +15,pissn_not_in_doaj,eissn_not_in_doaj, +16,pissn_not_in_doaj,!pissn_not_in_doaj, diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv new file mode 100644 index 0000000000..a8eab3f4ce --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv @@ -0,0 +1,19 @@ +field,test_id,eissn,pissn,validated +type,index,generated,generated,conditional +deafult,,,,no +,,,, +values,,eissn_in_doaj,eissn_in_doaj,yes +values,,pissn_in_doaj,pissn_in_doaj,no +values,,eissn_not_in_doaj,eissn_not_in_doaj, +values,,pissn_not_in_doaj,pissn_not_in_doaj, +,,,, +,,,, +conditional validated,,eissn_in_doaj,pissn_in_doaj,yes +constraint eissn,,eissn_in_doaj,!eissn_in_doaj, +constraint eissn,,eissn_not_in_doaj,!eissn_not_in_doaj, +constraint eissn,,pissn_not_in_doaj,!pissn_not_in_doaj, +constraint eissn,,pissn_in_doaj,!pissn_in_doaj, +constraint pissn,,eissn_in_doaj,!eissn_in_doaj, +constraint pissn,,eissn_not_in_doaj,!eissn_not_in_doaj, +constraint pissn,,pissn_not_in_doaj,!pissn_not_in_doaj, +constraint pissn,,pissn_in_doaj,!pissn_in_doaj, \ No newline at end of file diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json new file mode 100644 index 0000000000..11d1012a96 --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json @@ -0,0 +1,119 @@ +{ + "parameters": [ + { + "name": "test_id", + "type": "index" + }, + { + "name": "eissn", + "type": "generated", + "values": { + "eissn_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "eissn_in_doaj" + ] + } + } + }, + "pissn_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "pissn_in_doaj" + ] + } + } + }, + "eissn_not_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "eissn_not_in_doaj" + ] + } + } + }, + "pissn_not_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "pissn_not_in_doaj" + ] + } + } + } + } + }, + { + "name": "pissn", + "type": "generated", + "values": { + "eissn_in_doaj": {}, + "pissn_in_doaj": {}, + "eissn_not_in_doaj": {}, + "pissn_not_in_doaj": {}, + "!eissn_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "eissn_in_doaj" + ] + } + } + }, + "!eissn_not_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "eissn_not_in_doaj" + ] + } + } + }, + "!pissn_not_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "pissn_not_in_doaj" + ] + } + } + }, + "!pissn_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "pissn_in_doaj" + ] + } + } + } + } + }, + { + "name": "validated", + "type": "conditional", + "values": { + "yes": { + "conditions": [ + { + "eissn": { + "or": [ + "eissn_in_doaj" + ] + }, + "pissn": { + "or": [ + "pissn_in_doaj" + ] + } + } + ] + }, + "no": {} + } + } + ] +} \ No newline at end of file diff --git a/doajtest/testbook/journal_form/associate_form.yml b/doajtest/testbook/journal_form/associate_form.yml index cd333cbc2d..8a9e68b11c 100644 --- a/doajtest/testbook/journal_form/associate_form.yml +++ b/doajtest/testbook/journal_form/associate_form.yml @@ -70,4 +70,10 @@ tests: - step: Attempt to click the "Remove" button results: - You are unable to delete the note + - step: Click "copy" button next to one of the fields (eg. Title) + results: + - Confirmation with fields value is displayed for 3 seconds + - step: Attempt to paste the value (use separate editor) + results: + - Correct value is pasted diff --git a/doajtest/testbook/journal_form/editor_form.yml b/doajtest/testbook/journal_form/editor_form.yml index 16b78e2c77..747bd3f81d 100644 --- a/doajtest/testbook/journal_form/editor_form.yml +++ b/doajtest/testbook/journal_form/editor_form.yml @@ -80,3 +80,9 @@ tests: - step: Attempt to click the "Remove" button results: - You are unable to delete the note + - step: Click "copy" button next to one of the fields (eg. Title) + results: + - Confirmation with fields value is displayed for 3 seconds + - step: Attempt to paste the value (use separate editor) + results: + - Correct value is pasted diff --git a/doajtest/testbook/journal_form/maned_form.yml b/doajtest/testbook/journal_form/maned_form.yml index 0a05f70530..935fe4504f 100644 --- a/doajtest/testbook/journal_form/maned_form.yml +++ b/doajtest/testbook/journal_form/maned_form.yml @@ -120,3 +120,9 @@ tests: - step: Attempt to click the "Remove" button results: - You are unable to delete the note + - step: Click "copy" button next to one of the fields (eg. Title) + results: + - Confirmation with fields value is displayed for 3 seconds + - step: Attempt to paste the value (use separate editor) + results: + - Correct value is pasted diff --git a/doajtest/testbook/new_application_form/associate_editor_form.yml b/doajtest/testbook/new_application_form/associate_editor_form.yml index 366fac92c4..f9f9fd4619 100644 --- a/doajtest/testbook/new_application_form/associate_editor_form.yml +++ b/doajtest/testbook/new_application_form/associate_editor_form.yml @@ -63,3 +63,9 @@ tests: - step: Attempt to click the "Remove" button results: - You are unable to delete the note + - step: Click "copy" button next to one of the fields (eg. Title) + results: + - Confirmation with fields value is displayed for 3 seconds + - step: Attempt to paste the value (use separate editor) + results: + - Correct value is pasted diff --git a/doajtest/testbook/new_application_form/editor_form.yml b/doajtest/testbook/new_application_form/editor_form.yml index cd9b8edf3d..b9db0066f7 100644 --- a/doajtest/testbook/new_application_form/editor_form.yml +++ b/doajtest/testbook/new_application_form/editor_form.yml @@ -64,4 +64,10 @@ tests: - you are unable to edit the note - step: Attempt to click the "Remove" button results: - - You are unable to delete the note \ No newline at end of file + - You are unable to delete the note + - step: Click "copy" button next to one of the fields (eg. Title) + results: + - Confirmation with fields value is displayed for 3 seconds + - step: Attempt to paste the value (use separate editor) + results: + - Correct value is pasted \ No newline at end of file diff --git a/doajtest/testbook/new_application_form/maned_form.yml b/doajtest/testbook/new_application_form/maned_form.yml index 907791691b..98dc0211f6 100644 --- a/doajtest/testbook/new_application_form/maned_form.yml +++ b/doajtest/testbook/new_application_form/maned_form.yml @@ -95,3 +95,9 @@ tests: - step: Attempt to click the "Remove" button results: - You are unable to delete the note + - step: Click "copy" button next to one of the fields (eg. Title) + results: + - Confirmation with fields value is displayed for 3 seconds + - step: Attempt to paste the value (use separate editor) + results: + - Correct value is pasted diff --git a/doajtest/unit/application_processors/test_application_processor_emails.py b/doajtest/unit/application_processors/test_application_processor_emails.py index 15df1fd727..036c86c68a 100644 --- a/doajtest/unit/application_processors/test_application_processor_emails.py +++ b/doajtest/unit/application_processors/test_application_processor_emails.py @@ -65,13 +65,14 @@ def editor_account_pull(self, _id): ACTUAL_ACCOUNT_PULL = models.Account.pull # A regex string for searching the log entries -email_log_regex = 'template.*%s.*to:\[u{0,1}\'%s.*subject:.*%s' +email_log_regex = r'template.*%s.*to:\[u{0,1}\'%s.*subject:.*%s' # A string present in each email log entry (for counting them) email_count_string = 'Email template' NOTIFICATIONS_INTERCEPT = [] + class TestPublicApplicationEmails(DoajTestCase): def setUp(self): super(TestPublicApplicationEmails, self).setUp() diff --git a/doajtest/unit/test_article_acceptable_and_permissions.py b/doajtest/unit/test_article_acceptable_and_permissions.py index eb4c04d4fb..5e0328635f 100644 --- a/doajtest/unit/test_article_acceptable_and_permissions.py +++ b/doajtest/unit/test_article_acceptable_and_permissions.py @@ -14,6 +14,11 @@ def is_acceptable_load_cases(): "test_id", {"test_id": []}) +def issn_validation_against_journal_load_sets(): + return load_parameter_sets(rel2abs(__file__, "..", "matrices", "article_create_article"), "issn_validation_against_journal", + "test_id", + {"test_id": []}) + class TestBLLPrepareUpdatePublisher(DoajTestCase): @@ -110,4 +115,73 @@ def test_has_permissions(self): assert failed_result["unowned"].sort() == [pissn, eissn].sort() # assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], # 'unowned': [pissn, eissn], - # 'unmatched': []}, "received: {}".format(failed_result) \ No newline at end of file + # 'unmatched': []}, "received: {}".format(failed_result) + + + @parameterized.expand(issn_validation_against_journal_load_sets) + def test_issn_validation_against_journal_load_sets(self, value, kwargs): + kwpissn = kwargs.get("pissn") + kweissn = kwargs.get("eissn") + validated = kwargs.get("validated") + + js = JournalFixtureFactory.make_many_journal_sources(2) + journal_in_doaj = Journal(**js[0]) + journal_in_doaj.set_in_doaj(True) + journal_in_doaj.bibjson().pissn = "1111-1111" + journal_in_doaj.bibjson().eissn = "2222-2222" + journal_in_doaj.save(blocking=True) + + journal_not_in_doaj = Journal(**js[1]) + journal_not_in_doaj.set_in_doaj(False) + journal_not_in_doaj.bibjson().pissn = "3333-3333" + journal_not_in_doaj.bibjson().eissn = "4444-4444" + journal_not_in_doaj.save(blocking=True) + + if (kwpissn == "pissn_in_doaj"): + pissn = journal_in_doaj.bibjson().pissn + elif (kwpissn == "eissn_in_doaj"): + pissn = journal_in_doaj.bibjson().eissn + elif (kwpissn == "pissn_not_in_doaj"): + pissn = journal_not_in_doaj.bibjson().pissn + else: + pissn = journal_not_in_doaj.bibjson().eissn + + if (kweissn == "pissn_in_doaj"): + eissn = journal_in_doaj.bibjson().pissn + elif (kweissn == "eissn_in_doaj"): + eissn = journal_in_doaj.bibjson().eissn + elif (kweissn == "pissn_not_in_doaj"): + eissn = journal_not_in_doaj.bibjson().pissn + else: + eissn = journal_not_in_doaj.bibjson().eissn + + + art_source = ArticleFixtureFactory.make_article_source(pissn=pissn, eissn=eissn) + article = Article(**art_source) + + if validated: + self.assertIsNone(self.svc.is_acceptable(article)) + + else: + with self.assertRaises(exceptions.ArticleNotAcceptable): + self.svc.is_acceptable(article) + + def test_check_validation_for_2_journals(self): + + js = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) + journal_in_doaj = Journal(**js[0]) + journal_in_doaj.bibjson().pissn = "1111-1111" + journal_in_doaj.bibjson().eissn = "2222-2222" + journal_in_doaj.save(blocking=True) + + journal_not_in_doaj = Journal(**js[1]) + journal_not_in_doaj.bibjson().pissn = "3333-3333" + journal_not_in_doaj.bibjson().eissn = "4444-4444" + journal_not_in_doaj.save(blocking=True) + + + art_source = ArticleFixtureFactory.make_article_source(pissn="1111-1111", eissn="4444-4444") + article = Article(**art_source) + + with self.assertRaises(exceptions.ArticleNotAcceptable): + self.svc.is_acceptable(article) \ No newline at end of file diff --git a/doajtest/unit/test_formrender.py b/doajtest/unit/test_formrender.py index 1d655420af..f57ee83a61 100644 --- a/doajtest/unit/test_formrender.py +++ b/doajtest/unit/test_formrender.py @@ -6,11 +6,16 @@ # Form context for basic test ################################################################ + class TestForm(Form): + __test__ = False # Prevent collection by PyTest one = StringField("One") two = StringField("Two") + class TestRenderer(Renderer): + __test__ = False # Prevent collection by PyTest + def __init__(self): super(TestRenderer, self).__init__() self.FIELD_GROUPS = { @@ -20,7 +25,10 @@ def __init__(self): ] } + class TestContext(FormContext): + __test__ = False # Prevent collection by PyTest + def data2form(self): self.form = TestForm(formdata=self.form_data) diff --git a/doajtest/unit/test_models.py b/doajtest/unit/test_models.py index 5551cdcf5d..06175e6d76 100644 --- a/doajtest/unit/test_models.py +++ b/doajtest/unit/test_models.py @@ -1661,3 +1661,30 @@ def test_get_name_safe(self): # account does not exist assert models.Account.get_name_safe('not existing account id') == '' + def test_11_find_by_issn(self): + js = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) + j1 = models.Journal(**js[0]) + j1.bibjson().pissn = "1111-1111" + j1.bibjson().eissn = "2222-2222" + j1.save(blocking=True) + + j2 = models.Journal(**js[1]) + j2.bibjson().pissn = "3333-3333" + j2.bibjson().eissn = "4444-4444" + j2.save(blocking=True) + + journals = models.Journal.find_by_issn(["1111-1111", "2222-2222"], True) + assert len(journals) == 1 + assert journals[0].id == j1.id + + journals = models.Journal.find_by_issn(["1111-1111", "3333-3333"], True) + assert len(journals) == 2 + assert journals[0].id == j1.id + assert journals[1].id == j2.id + + journals = models.Journal.find_by_issn_exact(["1111-1111", "2222-2222"], True) + assert len(journals) == 1 + assert journals[0].id == j1.id + + journals = models.Journal.find_by_issn_exact(["1111-1111", "3333-3333"], True) + assert len(journals) == 0 \ No newline at end of file diff --git a/doajtest/unit/test_oaipmh.py b/doajtest/unit/test_oaipmh.py index b65d319bd0..bab8102499 100644 --- a/doajtest/unit/test_oaipmh.py +++ b/doajtest/unit/test_oaipmh.py @@ -245,7 +245,7 @@ def test_06_identify(self): records = t.xpath('/oai:OAI-PMH/oai:Identify', namespaces=self.oai_ns) assert len(records) == 1 assert records[0].xpath('//oai:repositoryName', namespaces=self.oai_ns)[0].text == 'Directory of Open Access Journals' - assert records[0].xpath('//oai:adminEmail', namespaces=self.oai_ns)[0].text == 'sysadmin@cottagelabs.com' + assert records[0].xpath('//oai:adminEmail', namespaces=self.oai_ns)[0].text == 'helpdesk+oai@doaj.org' assert records[0].xpath('//oai:granularity', namespaces=self.oai_ns)[0].text == 'YYYY-MM-DDThh:mm:ssZ' def test_07_bad_verb(self): diff --git a/portality/bll/services/article.py b/portality/bll/services/article.py index 7b55894d24..777d499636 100644 --- a/portality/bll/services/article.py +++ b/portality/bll/services/article.py @@ -159,9 +159,6 @@ def _validate_issns(article_bibjson: models.ArticleBibJSON): if len(pissn) > 1 or len(eissn) > 1: raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_TOO_MANY_ISSNS) - pissn = article_bibjson.get_one_identifier("pissn") - eissn = article_bibjson.get_one_identifier("eissn") - # no pissn or eissn if not pissn and not eissn: raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_ISSNS) @@ -252,7 +249,8 @@ def has_permissions(self, account, article, limit_to_account): def is_acceptable(self, article: models.Article): """ conduct some deep validation on the article to make sure we will accept it - or the moment, this just means making sure it has a DOI and a fulltext + this just means making sure it has a DOI and a fulltext, and that its ISSNs + match a single journal """ try: bj = article.bibjson() @@ -266,12 +264,39 @@ def is_acceptable(self, article: models.Article): raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_DOI_NO_FULLTEXT) self._validate_issns(bj) + journal = self.match_journal_with_validation(bj) # is journal in doaj (we do this check last as it has more performance impact) - journal = article.get_journal() if journal is None or not journal.is_in_doaj(): raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_ADDING_ARTICLE_TO_WITHDRAWN_JOURNAL) + @staticmethod + def match_journal_with_validation(article_bibjson: models.ArticleBibJSON): + pissn = article_bibjson.get_one_identifier("pissn") + eissn = article_bibjson.get_one_identifier("eissn") + + issns = [] + + if pissn is not None: + issns.append(pissn) + if eissn is not None: + issns.append(eissn) + + journal = models.Journal.find_by_issn_exact(issns, True) + + # check if only one journal matches pissn and eissn and if they are in the correct fields + # no need to check eissn, if pissn matches, pissn and eissn are different and only 1 journal has been found - then eissn matches too + if len(journal) != 1: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + if pissn is not None: + if journal[0].bibjson().pissn != pissn: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + if eissn is not None: + if journal[0].bibjson().eissn != eissn: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + + return journal[0] + @staticmethod def is_legitimate_owner(article, owner): """ diff --git a/portality/bll/services/background_task_status.py b/portality/bll/services/background_task_status.py index 486fdb1d84..ae0c6b7908 100644 --- a/portality/bll/services/background_task_status.py +++ b/portality/bll/services/background_task_status.py @@ -95,7 +95,7 @@ def create_queues_status(self, queue_name) -> dict: # prepare for err_msgs limited_sec = app.config.get('BG_MONITOR_LAST_COMPLETED', {}).get(queue_name) if limited_sec is None: - app.logger.warn(f'BG_MONITOR_LAST_COMPLETED for {queue_name} not found ') + app.logger.warning(f'BG_MONITOR_LAST_COMPLETED for {queue_name} not found ') err_msgs = [] if limited_sec is not None and last_completed_date: diff --git a/portality/core.py b/portality/core.py index 83ce502a59..c74493a9a0 100644 --- a/portality/core.py +++ b/portality/core.py @@ -227,11 +227,11 @@ def initialise_index(app, conn, only_mappings=None): :return: """ if not app.config['INITIALISE_INDEX']: - app.logger.warn('INITIALISE_INDEX config var is not True, initialise_index command cannot run') + app.logger.warning('INITIALISE_INDEX config var is not True, initialise_index command cannot run') return if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, initialise_index command cannot run") + app.logger.warning("System is in READ-ONLY mode, initialise_index command cannot run") return # get the app mappings diff --git a/portality/dao.py b/portality/dao.py index 14c5ad125f..40f14adc7c 100644 --- a/portality/dao.py +++ b/portality/dao.py @@ -136,7 +136,7 @@ def save(self, retries=0, back_off_factor=1, differentiate=False, blocking=False :return: """ if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, save command cannot run") + app.logger.warning("System is in READ-ONLY mode, save command cannot run") return if retries > app.config.get("ES_RETRY_HARD_LIMIT", 1000): # an arbitrary large number @@ -220,7 +220,7 @@ def save(self, retries=0, back_off_factor=1, differentiate=False, blocking=False def delete(self): if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, delete command cannot run") + app.logger.warning("System is in READ-ONLY mode, delete command cannot run") return # r = requests.delete(self.target() + self.id) @@ -313,7 +313,7 @@ def bulk(cls, documents: List[dict], idkey='id', refresh=False, action='index', """ # ~~->ReadOnlyMode:Feature~~ if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, bulk command cannot run") + app.logger.warning("System is in READ-ONLY mode, bulk command cannot run") return if action not in ['index', 'update', 'delete']: @@ -363,7 +363,7 @@ def refresh(cls): :return: """ if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, refresh command cannot run") + app.logger.warning("System is in READ-ONLY mode, refresh command cannot run") return # r = requests.post(cls.target() + '_refresh', headers=CONTENT_TYPE_JSON) @@ -449,7 +449,7 @@ def send_query(cls, qobj, retry=50, **kwargs): @classmethod def remove_by_id(cls, id): if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, delete_by_id command cannot run") + app.logger.warning("System is in READ-ONLY mode, delete_by_id command cannot run") return # r = requests.delete(cls.target() + id) @@ -461,7 +461,7 @@ def remove_by_id(cls, id): @classmethod def delete_by_query(cls, query): if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, delete_by_query command cannot run") + app.logger.warning("System is in READ-ONLY mode, delete_by_query command cannot run") return #r = requests.delete(cls.target() + "_query", data=json.dumps(query)) @@ -472,7 +472,7 @@ def delete_by_query(cls, query): @classmethod def destroy_index(cls): if app.config.get("READ_ONLY_MODE", False) and app.config.get("SCRIPTS_READ_ONLY_MODE", False): - app.logger.warn("System is in READ-ONLY mode, destroy_index command cannot run") + app.logger.warning("System is in READ-ONLY mode, destroy_index command cannot run") return # if app.config['ELASTIC_SEARCH_INDEX_PER_TYPE']: diff --git a/portality/forms/application_forms.py b/portality/forms/application_forms.py index 0065b765fc..d1f8a44d20 100644 --- a/portality/forms/application_forms.py +++ b/portality/forms/application_forms.py @@ -167,11 +167,22 @@ class FieldDefinitions: "full_contents" # ~~^->FullContents:FormWidget~~ ], "contexts": { + "admin": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + }, "editor": { - "disabled": True + "disabled": True, + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] }, "associate_editor": { - "disabled": True + "disabled": True, + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] }, "update_request": { "disabled": True @@ -198,6 +209,21 @@ class FieldDefinitions: "contexts": { "update_request": { "disabled": True + }, + "admin": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + }, + "associate_editor": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + }, + "editor": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] } } } @@ -457,6 +483,21 @@ class FieldDefinitions: "contexts" : { "bulk_edit" : { "validate" : [] + }, + "admin": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + }, + "associate_editor": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + }, + "editor": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] } } } @@ -505,6 +546,23 @@ class FieldDefinitions: "a society or other type of institution, enter that here."], "placeholder": "Type or select the society or institution’s name" }, + "contexts" : { + "admin": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + }, + "associate_editor": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + }, + "editor": { + "widgets": [ + "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ + ] + } + }, "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ {"autocomplete": {"type" : "journal", "field": "bibjson.institution.name.exact"}}, @@ -770,7 +828,7 @@ class FieldDefinitions: } ], "widgets" : [ - "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ + "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ ], "asynchronous_warning": [ {"warn_on_value": {"value": "None"}} @@ -1376,7 +1434,7 @@ class FieldDefinitions: ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url", # ~~^-> ClickableURL:FormWidget~~ ], "contexts" : { "public" : { @@ -2936,6 +2994,7 @@ def wtforms(field, settings): JAVASCRIPT_FUNCTIONS = { "clickable_url": "formulaic.widgets.newClickableUrl", # ~~-> ClickableURL:FormWidget~~ + "click_to_copy": "formulaic.widgets.newClickToCopy", # ~~-> ClickToCopy:FormWidget~~ "clickable_owner": "formulaic.widgets.newClickableOwner", # ~~-> ClickableOwner:FormWidget~~ "select": "formulaic.widgets.newSelect", # ~~-> SelectBox:FormWidget~~ "taglist": "formulaic.widgets.newTagList", # ~~-> TagList:FormWidget~~ diff --git a/portality/forms/application_processors.py b/portality/forms/application_processors.py index 13a294d14d..1cd426c1f6 100644 --- a/portality/forms/application_processors.py +++ b/portality/forms/application_processors.py @@ -198,8 +198,11 @@ def _patch_target_note_id(self): for note in self.target.notes: note_date = dates.parse(note['date']) if not note.get('author_id') and note_date > dates.before_now(60): - note['author_id'] = current_user.id - + try: + note['author_id'] = current_user.id + except AttributeError: + # Skip if we don't have a current_user + pass class NewApplication(ApplicationProcessor): @@ -307,7 +310,6 @@ def patch_target(self): if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None): self.target.set_owner(self.source.owner) - def finalise(self, account, save_target=True, email_alert=True): """ account is the administrator account carrying out the action @@ -326,7 +328,6 @@ def finalise(self, account, save_target=True, email_alert=True): elif not j.is_in_doaj(): raise Exception(Messages.EXCEPTION_EDITING_WITHDRAWN_JOURNAL) - # if we are allowed to finalise, kick this up to the superclass super(AdminApplication, self).finalise() @@ -813,7 +814,6 @@ def patch_target(self): if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None): self.target.set_owner(self.source.owner) - def finalise(self): # FIXME: this first one, we ought to deal with outside the form context, but for the time being this # can be carried over from the old implementation diff --git a/portality/lib/plausible.py b/portality/lib/plausible.py index 2aa602d986..90b1b8f46b 100644 --- a/portality/lib/plausible.py +++ b/portality/lib/plausible.py @@ -62,7 +62,7 @@ def send_event(goal: str, on_completed=None, **props_kwargs): def _send(): resp = requests.post(plausible_api_url, json=payload, headers=headers) if resp.status_code >= 300: - logger.warning(f'send plausible event api fail. [{resp.status_code}][{resp.text}]') + logger.warning(f'Send plausible event API fail. snd: [{resp.url}] [{headers}] [{payload}] rcv: [{resp.status_code}] [{resp.text}]') if on_completed: on_completed(resp) diff --git a/portality/models/v2/journal.py b/portality/models/v2/journal.py index 9d8a797377..d5cbca46ef 100644 --- a/portality/models/v2/journal.py +++ b/portality/models/v2/journal.py @@ -70,6 +70,22 @@ def find_by_issn(cls, issns, in_doaj=None, max=10): records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])] return records + @classmethod + def find_by_issn_exact(cls, issns, in_doaj=None, max=2): + """ + Finds journal that matches given issns exactly - if no data problems should always be only 1 + """ + if not isinstance(issns, list): + issns = [issns] + if len(issns) > 2: + return [] + q = JournalQuery() + q.find_by_issn_exact(issns, in_doaj=in_doaj, max=max) + result = cls.query(q=q.query) + # create an array of objects, using cls rather than Journal, which means subclasses can use it too + records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])] + return records + @classmethod def issns_by_owner(cls, owner, in_doaj=None): q = IssnQuery(owner, in_doaj=in_doaj) @@ -925,6 +941,16 @@ class JournalQuery(object): } } + must_query = { + "track_total_hits": True, + "query": { + "bool": { + "must": [ + ] + } + } + } + all_doaj = { "track_total_hits": True, "query": { @@ -950,6 +976,14 @@ def find_by_issn(self, issns, in_doaj=None, max=10): self.query["query"]["bool"]["must"].append({"term": {"admin.in_doaj": in_doaj}}) self.query["size"] = max + def find_by_issn_exact(self, issns, in_doaj=None, max=10): + self.query = deepcopy(self.must_query) + for issn in issns: + self.query["query"]["bool"]["must"].append({"term": {"index.issn.exact": issn}}) + if in_doaj is not None: + self.query["query"]["bool"]["must"].append({"term": {"admin.in_doaj": in_doaj}}) + self.query["size"] = max + def all_in_doaj(self): q = deepcopy(self.all_doaj) if self.minified: diff --git a/portality/scripts/230609_find_articles_with_invalid_issns.py b/portality/scripts/230609_find_articles_with_invalid_issns.py new file mode 100644 index 0000000000..8b857faa01 --- /dev/null +++ b/portality/scripts/230609_find_articles_with_invalid_issns.py @@ -0,0 +1,56 @@ +from portality import models +from portality.bll.services import article as articlesvc +from portality.bll import exceptions +import csv + +IN_DOAJ = { + "query": { + "bool": { + "must": [ + {"term": {"admin.in_doaj": True}} + ] + } + } +} + + +if __name__ == "__main__": + + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-o", "--out", help="output file path", required=True) + args = parser.parse_args() + + with open(args.out, "w", encoding="utf-8") as f: + writer = csv.writer(f) + writer.writerow(["ID", "PISSN", "EISSN", "Journals found with article's PISSN", "In doaj?", "Journals found with article's EISSN", "In doaj?", "Error"]) + + for a in models.Article.iterate(q=IN_DOAJ, page_size=100, keepalive='5m'): + article = models.Article(_source=a) + bibjson = article.bibjson() + try: + articlesvc.ArticleService._validate_issns(bibjson) + except exceptions.ArticleNotAcceptable as e: + id = article.id + pissn = bibjson.get_identifiers("pissn") + eissn = bibjson.get_identifiers("eissn") + j_p = [j["id"] for j in models.Journal.find_by_issn(pissn)] + j_p_in_doaj = [] + if (j_p): + for j in j_p: + jobj = models.Journal.pull(j) + if (jobj): + j_p_in_doaj.append(jobj.is_in_doaj()) + else: + j_p_in_doaj.append("n/a") + j_e = [j["id"] for j in models.Journal.find_by_issn(eissn)] + j_e_in_doaj = [] + if (j_e): + for j in j_e: + jobj = models.Journal.pull(j) + if (jobj): + j_e_in_doaj.append(jobj.is_in_doaj()) + else: + j_e_in_doaj.append("n/a") + writer.writerow([id, pissn, eissn, j_p, j_p_in_doaj, j_e, j_e_in_doaj, str(e)]) diff --git a/portality/scripts/journals_update_via_csv.py b/portality/scripts/journals_update_via_csv.py index 298b7c817b..c696068a85 100644 --- a/portality/scripts/journals_update_via_csv.py +++ b/portality/scripts/journals_update_via_csv.py @@ -82,6 +82,7 @@ reader = csv.DictReader(g, fieldnames=header_row) # verify header row with current CSV headers, report errors + # TODO: Include 'Owner' field - but we should probably base this process off the AdminCSV too. expected_headers = JournalFixtureFactory.csv_headers() # Always perform a match check on supplied headers, not counting order @@ -155,6 +156,14 @@ if len(updates) > 0: [print(upd) for upd in updates] + # Check we have the expected owner (if supplied) before proceeding to create an update request + own = row.get('Owner') + if own is not None: + if own.strip().lower() != j.owner.strip().lower(): + print('ABORTING - supplied owner {0} mismatches journal owner {1}.'.format(own, j.owner)) + writer.writerow([j.id, ' | '.join(updates), 'COULD NOT UPDATE - Owner mismatch. Expected {0} Got {1}'.format(own, j.owner)]) + continue + # Create an update request for this journal update_req = None jlock = None @@ -204,7 +213,7 @@ # Add note to UR if supplied if note: - fc.target.add_note(note) + fc.target.add_note(note, author_id=sys_acc.id) if not args.manual_review: # This is the update request, in 'update request' state diff --git a/portality/settings.py b/portality/settings.py index 206648caf9..41a18c7ad8 100644 --- a/portality/settings.py +++ b/portality/settings.py @@ -9,7 +9,7 @@ # Application Version information # ~~->API:Feature~~ -DOAJ_VERSION = "6.3.15" +DOAJ_VERSION = "6.3.17" API_VERSION = "3.0.1" ###################################### @@ -427,7 +427,7 @@ HUEY_SCHEDULE = { "sitemap": {"month": "*", "day": "*", "day_of_week": "*", "hour": "8", "minute": "0"}, "reporting": {"month": "*", "day": "1", "day_of_week": "*", "hour": "0", "minute": "0"}, - "journal_csv": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "35"}, + "journal_csv": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*/2", "minute": "20"}, "read_news": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "30"}, "article_cleanup_sync": {"month": "*", "day": "2", "day_of_week": "*", "hour": "0", "minute": "0"}, "async_workflow_notifications": {"month": "*", "day": "*", "day_of_week": "1", "hour": "5", "minute": "0"}, @@ -1014,6 +1014,8 @@ # OAI-PMH SETTINGS # ~~->OAIPMH:Feature~~ +OAI_ADMIN_EMAIL = 'helpdesk+oai@doaj.org' + # ~~->OAIAriticleXML:Crosswalk~~ # ~~->OAIJournalXML:Crosswalk~~ OAI_DC_METADATA_FORMAT = { diff --git a/portality/static/js/formulaic.js b/portality/static/js/formulaic.js index 99367a6c91..c970e12c18 100644 --- a/portality/static/js/formulaic.js +++ b/portality/static/js/formulaic.js @@ -1195,7 +1195,6 @@ var formulaic = { this.init(); }, - newClickableOwner : function(params) { return edges.instantiate(formulaic.widgets.ClickableOwner, params) }, @@ -1238,7 +1237,27 @@ var formulaic = { this.init(); }, + newClickToCopy : function(params) { + return edges.instantiate(formulaic.widgets.ClickToCopy, params) + }, + ClickToCopy : function(params) { + this.fieldDef = params.fieldDef; + this.init = function() { + var elements = $("#click-to-copy--" + this.fieldDef.name); + edges.on(elements, "click", this, "copy"); + }; + this.copy = function(element) { + let form = new doaj.af.BaseApplicationForm() + let value = form.determineFieldsValue(this.fieldDef.name) + let value_to_copy = form.convertValueToText(value); + navigator.clipboard.writeText(value_to_copy) + var confirmation = $("#copy-confirmation--" + this.fieldDef.name); + confirmation.text("Copied: " + value_to_copy); + confirmation.show().delay(3000).fadeOut(); + }; + this.init(); + }, newTrimWhitespace : function(params) { return edges.instantiate(formulaic.widgets.TrimWhitespace, params) }, diff --git a/portality/tasks/async_workflow_notifications.py b/portality/tasks/async_workflow_notifications.py index 4bea0b1104..b0236af7ec 100644 --- a/portality/tasks/async_workflow_notifications.py +++ b/portality/tasks/async_workflow_notifications.py @@ -333,7 +333,7 @@ def associate_editor_notifications(emails_dict, limit=None): assoc_email = assoc.email except AttributeError: # There isn't an account for that id - app.logger.warn("No account found for ID {0}".format(assoc_id)) + app.logger.warning("No account found for ID {0}".format(assoc_id)) continue text = render_template('email/workflow_reminder_fragments/assoc_ed_age_frag', num_idle=idle, x_days=X_DAYS, num_very_idle=very_idle, y_weeks=Y_WEEKS, url=url) diff --git a/portality/tasks/harvester_helpers/epmc/client.py b/portality/tasks/harvester_helpers/epmc/client.py index fb742b0714..2957e9fe62 100644 --- a/portality/tasks/harvester_helpers/epmc/client.py +++ b/portality/tasks/harvester_helpers/epmc/client.py @@ -37,9 +37,9 @@ def check_epmc_version(resp_json): received_ver = resp_json['version'] configured_ver = app.config.get("EPMC_TARGET_VERSION") if received_ver != configured_ver: - app.logger.warn("Mismatching EPMC API version; recommend checking for changes. Expected '{0}' Found '{1}'".format(configured_ver, received_ver)) + app.logger.warning("Mismatching EPMC API version; recommend checking for changes. Expected '{0}' Found '{1}'".format(configured_ver, received_ver)) except KeyError: - app.logger.warn("Couldn't check EPMC API version; did not find 'version' key in response. Proceed with caution as the EPMC API may have changed.") + app.logger.warning("Couldn't check EPMC API version; did not find 'version' key in response. Proceed with caution as the EPMC API may have changed.") def to_keywords(s): diff --git a/portality/tasks/helpers/background_helper.py b/portality/tasks/helpers/background_helper.py index 2790475729..66a15343e8 100644 --- a/portality/tasks/helpers/background_helper.py +++ b/portality/tasks/helpers/background_helper.py @@ -26,7 +26,7 @@ def get_queue_id_by_task_queue(task_queue: RedisHuey): elif task_queue.name == main_queue.name: return constants.BGJOB_QUEUE_ID_MAIN else: - app.logger.warn(f'unknown task_queue[{task_queue}]') + app.logger.warning(f'unknown task_queue[{task_queue}]') return constants.BGJOB_QUEUE_ID_UNKNOWN @@ -141,7 +141,7 @@ def _load_bgtask_safe(_mi): return _mi.module_finder.find_spec(_mi.name).loader.load_module(_mi.name) except RuntimeError as e: if 'No configuration for scheduled action' in str(e): - app.logger.warn(f'config for {_mi.name} not found') + app.logger.warning(f'config for {_mi.name} not found') return None raise e diff --git a/portality/templates/application_form/_field.html b/portality/templates/application_form/_field.html index 8ebbaa9fe9..2e6ce601a5 100644 --- a/portality/templates/application_form/_field.html +++ b/portality/templates/application_form/_field.html @@ -8,6 +8,10 @@ {% if f.help("long_help") %} More help {% endif %} + {% if f.has_widget("click_to_copy") %} + Copy value + + {% endif %} {% if f.optional %}(Optional){% endif %} {% endset %} diff --git a/portality/templates/application_form/_list.html b/portality/templates/application_form/_list.html index bd75ea5795..02567676cc 100644 --- a/portality/templates/application_form/_list.html +++ b/portality/templates/application_form/_list.html @@ -7,6 +7,10 @@ {% if f.help("long_help") %} More help {% endif %} + {% if f.has_widget("click_to_copy") %} + Copy value + + {% endif %} {% if f.optional %}(Optional){% endif %} {% if f.get("hint") %}

{{ f.hint | safe }}

{% endif %} diff --git a/portality/templates/includes/contribution_rates.html b/portality/templates/includes/contribution_rates.html index 29f28a8426..821fefede9 100644 --- a/portality/templates/includes/contribution_rates.html +++ b/portality/templates/includes/contribution_rates.html @@ -260,6 +260,7 @@

For institutions that are members of:

The definition as to Large/Small is at the discretion of the consortium.

+ diff --git a/portality/templates/publisher/help.html b/portality/templates/publisher/help.html index ebd3d91571..9f4daddeb4 100644 --- a/portality/templates/publisher/help.html +++ b/portality/templates/publisher/help.html @@ -204,10 +204,17 @@

Failed XML uploads explained

A journal may have two ISSNs: an ISSN for the print version and an ISSN for the electronic version. Sometimes the ISSNs of the journal have changed.
The print and online ISSNs you have supplied are identical. If you supply 2 ISSNs they must be different: an ISSN for the print version and an ISSN for the electronic version. +
+ ISSNs provided don't match any journal. We do not have a record of one or both of those ISSNs in DOAJ.
+
Check that all the Article ISSNs in the file are correct

+ Check that the journal to which you are trying to upload article metadata is indexed in DOAJ.
+
+ Check that the ISSNs in the metadata are both seen on the DOAJ journal record.
+
If you need to have the ISSNs of your DOAJ record updated, please contact us and we will check that the ISSNs are registered at the ISSN Portal and will then update the record accordingly.

If you believe all the ISSNs for the articles are correct, please contact us with the relevant details. diff --git a/portality/ui/messages.py b/portality/ui/messages.py index ac7f9163bc..8eabd73f80 100644 --- a/portality/ui/messages.py +++ b/portality/ui/messages.py @@ -61,6 +61,7 @@ class Messages(object): EXCEPTION_NO_CONTRIBUTORS_EXPLANATION = "DOAJ requires at least one author for each article." EXCEPTION_TOO_MANY_ISSNS = "Too many ISSNs. Only 2 ISSNs are allowed: one Print ISSN and one Online ISSN." + EXCEPTION_MISMATCHED_ISSNS = "ISSNs provided don't match any journal." EXCEPTION_ISSNS_OF_THE_SAME_TYPE = "Both ISSNs have the same type: {type}" EXCEPTION_IDENTICAL_PISSN_AND_EISSN = "The Print and Online ISSNs supplied are identical. If you supply 2 ISSNs they must be different." EXCEPTION_NO_ISSNS = "Neither Print ISSN nor Online ISSN has been supplied. DOAJ requires at least one ISSN." diff --git a/portality/view/admin.py b/portality/view/admin.py index 9e39473b03..010907c3b1 100644 --- a/portality/view/admin.py +++ b/portality/view/admin.py @@ -68,7 +68,7 @@ def journals_list(): try: query = json.loads(request.values.get("q")) except: - app.logger.warn("Bad Request at admin/journals: " + str(request.values.get("q"))) + app.logger.warning("Bad Request at admin/journals: " + str(request.values.get("q"))) abort(400) # get the total number of journals to be affected @@ -89,7 +89,7 @@ def journals_list(): try: query = json.loads(request.data) except: - app.logger.warn("Bad Request at admin/journals: " + str(request.data)) + app.logger.warning("Bad Request at admin/journals: " + str(request.data)) abort(400) # get only the query part @@ -123,7 +123,7 @@ def articles_list(): try: query = json.loads(request.data) except: - app.logger.warn("Bad Request at admin/journals: " + str(request.data)) + app.logger.warning("Bad Request at admin/journals: " + str(request.data)) abort(400) # get only the query part diff --git a/portality/view/doaj.py b/portality/view/doaj.py index 5ac2f67f69..1292728293 100644 --- a/portality/view/doaj.py +++ b/portality/view/doaj.py @@ -198,8 +198,13 @@ def public_data_dump_redirect(record_type): if not current_user.has_role(constants.ROLE_PUBLIC_DATA_DUMP): abort(404) - target_data = models.Cache.get_public_data_dump().get(record_type, {}) - if target_data is None: + # Make sure the PDD exists + pdd = models.Cache.get_public_data_dump() + if pdd is None: + abort(404) + + target_data = pdd.get(record_type, {}) + if not target_data: abort(404) main_store = store.StoreFactory.get(constants.STORE__SCOPE__PUBLIC_DATA_DUMP) diff --git a/portality/view/oaipmh.py b/portality/view/oaipmh.py index 5006c13f02..73a5158dfe 100644 --- a/portality/view/oaipmh.py +++ b/portality/view/oaipmh.py @@ -305,7 +305,7 @@ def get_record(dao, base_url, specified_oai_endpoint, identifier=None, metadata_ def identify(dao, base_url): repo_name = app.config.get("SERVICE_NAME") - admin_email = app.config.get("ADMIN_EMAIL") + admin_email = app.config.get("OAI_ADMIN_EMAIL", app.config.get("ADMIN_EMAIL")) idobj = Identify(base_url, repo_name, admin_email) idobj.earliest_datestamp = dao.earliest_datestamp() return idobj diff --git a/production.cfg b/production.cfg index 897ff34004..d9d501f253 100644 --- a/production.cfg +++ b/production.cfg @@ -5,11 +5,9 @@ ELASTIC_SEARCH_HOST = "http://10.131.191.132:9200" # doaj-ind ELASTICSEARCH_HOSTS = [{'host': '10.131.191.132', 'port': 9200}, {'host': '10.131.191.133', 'port': 9200}] INDEX_PER_TYPE_SUBSTITUTE = '_doc' - # doaj-public-app-1 doaj-background-app-1 -APP_MACHINES_INTERNAL_IPS = ['10.131.191.139:5050', '10.131.12.33:5050'] + # doaj-public-app-1 doaj-background-app-1 doaj-editor-app-1 +APP_MACHINES_INTERNAL_IPS = ['10.131.191.139:5050', '10.131.12.33:5050', '10.131.56.133:5050'] - # doaj-public-app-1 doaj-bg-app-1 doaj-background-app-1 -#APP_MACHINES_INTERNAL_IPS = ['10.131.191.139:5050', '10.131.56.133:5050', '10.131.12.33:5050'] # The app is served via nginx / cloudlflare - they handle SSL SSL = False diff --git a/setup.py b/setup.py index b97eee8d8b..fbcac71bc0 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name='doaj', - version='6.3.15', + version='6.3.17', packages=find_packages(), install_requires=[ "awscli==1.20.50",