diff --git a/cms/assets/img/ambassadors/johndove.jpg b/cms/assets/img/team/johndove.jpg similarity index 100% rename from cms/assets/img/ambassadors/johndove.jpg rename to cms/assets/img/team/johndove.jpg diff --git a/cms/data/ambassadors.yml b/cms/data/ambassadors.yml index 3f797bcd2d..f2a949b4f0 100644 --- a/cms/data/ambassadors.yml +++ b/cms/data/ambassadors.yml @@ -62,13 +62,6 @@ photo: "ivonne.jpg" coi: 2022: https://drive.google.com/file/d/1HnGhYbvbzL34guWOmIqcthcwAN8NADX1/view?usp=sharing - -- name: John G. Dove - region: North America - bio: "John has had a career in executive management, and is now an independent consultant and open access advocate who works with organisations seeking to accelerate their transition to open access. He advises both for-profits and non-profits, and has a particular interest in identifying the steps necessary to flip an entire discipline’s scholarly record to open access. His ambassador activities focus on increasing the support to DOAJ from the community. He served for six years on NISO’s Information Discovery and Interchange Topic Committee, and has written for Learned Publishing, Against the Grain, and Scholarly Kitchen. John serves on the Board of Trustees of his local public library in Revere, Massachusetts. He has a B.A. in Mathematics from Oberlin College." - photo: "johndove.jpg" - coi: - 2022: https://drive.google.com/file/d/1cWijl2xdmVjshsvaGTABOvC_chIIfuVA/view?usp=sharing - name: Mahmoud Khalifa region: Middle East and Persian Gulf diff --git a/cms/data/nav.yml b/cms/data/nav.yml index 8c5aa278ee..ddd6b7cf7e 100644 --- a/cms/data/nav.yml +++ b/cms/data/nav.yml @@ -56,9 +56,9 @@ entries: secondary_mobile: true route: doaj.support # ~~->Support:WebRoute~~ entries: - - label: Support DOAJ + - label: Institutions and libraries route: doaj.support # ~~->Support:WebRoute~~ - - label: Publisher supporters + - label: Publishers route: doaj.publisher_supporters # ~~->PublisherSupporters:WebRoute~~ - label: Supporters route: doaj.supporters # ~~->Supporters:WebRoute~~ diff --git a/cms/data/team.yml b/cms/data/team.yml index 781c84737e..e3a0b3df8e 100644 --- a/cms/data/team.yml +++ b/cms/data/team.yml @@ -6,17 +6,15 @@ photo: alejandra.png bio: 'Alejandra has a Bachelor’s degree in Information Science and a Master’s degree in Digital Media. She has around ten years of experience in information management, knowledge management and scholarly communication at national and international level.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mVFhjdkZWaFJZOTQ/view?usp=sharing&resourcekey=0-gMdxxHXyyJB9zFZIuh99QQ 2018: https://drive.google.com/file/d/0ByRf6PVViI-mRlRYTDBPRlZiWTRxQ3VMTUZpQnZ5ZkwyLVQ4/view?usp=sharing&resourcekey=0-mlQ6rSCEnr6RfpCwh_4SMw 2020: https://drive.google.com/file/d/1PF7Cc9vGAwWGqpqDo7nRULjxWIF2NR_Q/view?usp=sharing 2022: https://drive.google.com/file/d/1E45ycyctDfYkh65ZCM8PMtQsYym_eP6L/view?usp=sharing - name: Cenyu Shen - role: Quality Team Lead and Managing Editor + role: Deputy Head of Editorial (Quality) photo: cenyu.jpg bio: 'Cenyu holds a PhD in Information Systems Science at Hanken School of Economics in Finland. She has spent around seven years on Open Access research with a particular focus on gold OA publishing concerning journals and publishers outside the mainstream. She was one of three DOAJ ambassadors for China from 2016 to 2017.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmNHZCQmxpUmN6bUEtYUx2VHZnbjVySS1fRTlr/view?usp=sharing&resourcekey=0-1TRIV1MEQMhdGbmCd7CbOA 2020: https://drive.google.com/file/d/1rm9fjOF3OHJ9lR9wEUyQBQTO2KdoNQcE/view?usp=sharing 2022: https://drive.google.com/file/d/1Mn_CR0twKxyFbbHxsLSrgeU984BNOLlS/view?usp=sharing @@ -25,7 +23,6 @@ photo: clara.jpg bio: 'Clara has 10 years experience in the scholarly publishing industry. She worked at Cambridge University Press as an Open Access Project Manager until 2015. She also works in science communication as a freelancer at the University Pompeu Fabra, Barcelona. Clara speaks Spanish, Catalan, English and some German and French. She loves cetaceans, freediving, cycling, and is an enthusiastic cook.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mbDFybndLbldEbFE/view?usp=sharing&resourcekey=0-lKZNFwvUNdVAGKatvnKiPg 2018: https://drive.google.com/file/d/1LHmZSZ6bwf6U71fNvIibJa6R1lquNhfR/view?usp=sharing 2020: https://drive.google.com/file/d/1v4duxnoTcNo4UbL_GBa5D1T8JtTl7oY1/view?usp=sharing 2022: https://drive.google.com/file/d/1hevYxG1102llDy-_i-onwKbDuOlBguA_/view?usp=sharing @@ -35,7 +32,6 @@ photo: dominic.jpg bio: 'Dominic has over 25 years of experience working with publisher and library communities. He is responsible for operations and development of the DOAJ platform. He acts as Committee chair for the Think. Check. Submit. initiative, of which DOAJ is a founding organisation. He represents DOAJ in Project JASPER, a cross-industry project working to ensure that journals are preserved for the long term. He also sits on the OASPA Board of Directors and serves as Secretary. Outside of work, he is reluctantly becoming an expert in the playparks of Stockholm with his twin sons.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mWmU0UHZqZm1xcDQ/view?usp=sharing&resourcekey=0-BmQKwWn6Vb9ot73Xie66aA 2018: https://drive.google.com/file/d/13XX_GUrw2xRmXARjRrTxegULPT8Redka/view?usp=sharing 2020: https://drive.google.com/file/d/1nxFOuAdXLb8A-LulhNpz9i5vSmr5DBwF/view?usp=sharing 2022: https://drive.google.com/file/d/1HBF9RLaIt3lFNG6WDcV08fQSMS_C6zwA/view?usp=sharing @@ -62,12 +58,18 @@ coi: 2022: https://drive.google.com/file/d/1-3xzwkHMclREgLhj_XNF5n6Nr4q2_bnw/view?usp=sharing +- name: John G. Dove + role: Advisor + photo: johndove.jpg + bio: "John has had a career in executive management, and is now an independent consultant and open access advocate who works with organisations seeking to accelerate their transition to open access. He advises both for-profits and non-profits, and has a particular interest in identifying the steps necessary to flip an entire discipline’s scholarly record to open access. His ambassador activities focus on increasing the support to DOAJ from the community. He served for six years on NISO’s Information Discovery and Interchange Topic Committee, and has written for Learned Publishing, Against the Grain, and Scholarly Kitchen. John serves on the Board of Trustees of his local public library in Revere, Massachusetts. He has a B.A. in Mathematics from Oberlin College." + coi: + 2022: https://drive.google.com/file/d/1cWijl2xdmVjshsvaGTABOvC_chIIfuVA/view?usp=sharing + - name: Judith Barnsby role: Head of Editorial photo: judith.jpg bio: 'Judith has 25 years of experience in the scholarly publishing industry, working for a range of non-profit society publishers and service providers before joining DOAJ. She has a keen interest in publishing standards and protocols, and has served on the board of CLOCKSS and as chair of the PALS (publisher and library solutions) working group in the UK. Judith loves books, especially detective fiction, and volunteers in her local library.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmb3JmVkFYbjN5aTh1OUhLd2lZaEV0ZlFwbTZV/view?usp=sharing&resourcekey=0-o_PXKLk5UFbPk_-4B61jVA 2018: https://drive.google.com/file/d/0ByRf6PVViI-mV2lfMjByQjYxUkpMcXhuc2l5Q3ZDWlpiYUtZ/view?usp=sharing&resourcekey=0-6eiGIRal00eXvgJUTeN_lw 2020: https://drive.google.com/file/d/18MWTsze4cDQQRPHJl2XrYgHQvlxhsPZa/view?usp=sharing 2023: https://drive.google.com/file/d/1hUsVIY09N6WceSx1edTM-h516CJGkHcu/view?usp=share_link @@ -77,7 +79,6 @@ photo: Kamel.jpg bio: 'Kamel is Full Professor of Chemistry at the University of Bejaia, Algeria (ORCID). He gained his PhD in Process Engineering and Chemistry of Materials Science at the University of Setif, Algeria. Kamel joined DOAJ in 2016 as an Ambassador for North Africa. He is currently Creative Commons Algeria Chapter lead, director of the Laboratory of Organic Materials at the University of Bejaia and editor-in-chief of Algerian Journal of Natural Products. His scientific activity is focused on chemistry of Natural Products, scholarly communications and new developments in academic publishing. Father of 3 daughters, he likes travelling, healthy local foods & home-made snacks.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmVEN4X1Q0RDdCams1NXhveW1HQmtMYU56bDE4/view?usp=sharing&resourcekey=0-wA1CGAbjB6FAX33gCDQmrA 2018: https://drive.google.com/file/d/1JdF2kh-fLXz8kPGN_3ijDt5y9K6s0hOQ/view?usp=sharing 2020: https://drive.google.com/file/d/1iXrjwLTNBXwKD2TwrPD9ApKL7O6uZ8Z7/view?usp=sharing 2022: https://drive.google.com/file/d/1cl18h_mYnNogYs8Rk-fhBTW6WKOTC2IF/view?usp=sharing @@ -94,28 +95,19 @@ photo: lars.jpg bio: 'Lars worked at Danish university libraries for two decades and was Director of Libraries at Lund University, Sweden from 2001 to 2011. He founded the DOAJ in 2003, and was Managing Director from 2013-2021. He has vast experience in change management, re-engineering of academic libraries, and development of information services for research & higher education. For two decades Lars has been a strong advocate of open access and for providing services to the open access movement. He is co-founder of OpenDOAR, the Directory of Open Access Books and Think. Check. Submit. Lars lives outside Copenhagen, and is married with 4 children and 4 grandchildren. He enjoys vegetable gardening, growing cacti and succulents, and playing internet chess.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mbmo2aU9NWkx5dGs/view?usp=sharing&resourcekey=0-mpdRgVU9UlFjC614-woDvg 2018: https://drive.google.com/file/d/1mm1a8nbY5MQX9loqIs2ZQuVN-73RfPuN/view?usp=sharing 2021: https://drive.google.com/file/d/1bNj5sqUsu4sRLmm_YOuh3JCSMERzQ1Ro/view?usp=sharing 2022: https://drive.google.com/file/d/1fRJtvci2_j4vad0C5N1pfqm2sHZQkFz3/view?usp=sharing - name: Leena Shah - role: Managing Editor and Ambassador + role: Deputy Head of Editorial (Workflow) and Ambassador photo: leena.jpg bio: "Leena joined the DOAJ team in 2016 as an Ambassador for India before becoming a Managing Editor. Prior to joining DOAJ she worked as a science librarian at Nanyang Technological University, Singapore, where she developed a keen interest in scholarly communication & open science. A recent addition to her interests is artificial intelligence in scholarly communication. Leena holds a Master’s degree in Information Studies and lives in Singapore. She loves watching sci-fi shows and is enthusiastic about travelling to new places." coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmTHZuaEtMSDNIeUpKT2Fid19jVjVFTkRoUmdj/view?usp=sharing&resourcekey=0-KqvRVa30bQEUfqO-YA1L-g 2018: https://drive.google.com/file/d/1tifEjAIlU3txBw9DjIcRW9cZL7YG7_nU/view?usp=sharing 2020: https://drive.google.com/file/d/1zU-lLB5W54E_QUm5uto5tqB6cZl83TAJ/view?usp=sharing 2022: https://drive.google.com/file/d/19rw-naMJqHkI5T7aDIDPUkwPutBdDpDm/view?usp=sharing -- name: Luis Montilla - role: Managing Editor - photo: luis.jpeg - bio: "Luis is a marine ecologist with a passion for improving the quality of scientific publishing. After finishing his Masters in Venezuela, he spent three years in Italy completing his PhD studying marine microbial symbioses in seagrass beds. In his free time, he enjoys reading and watching movies." - coi: - 2023: https://drive.google.com/file/d/1IJhnV2Ht5t5jilaCAFzpuFdYk7UMOjN3/view?usp=sharing - - name: Mahmoud Khalifa role: Managing Editor and Ambassador photo: mahmoud-new.jpg @@ -151,7 +143,6 @@ photo: Rikard.jpg bio: 'Rikard has a Bachelor of Arts degree with a Major in Cultural Sciences and a specialization in publishing. He enjoys reading about philosophy and religion.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mdnJPdldOM0hUMFU/view?usp=sharing&resourcekey=0-8dJAtvm2n7vXV9NhqZYckw 2018: https://drive.google.com/file/d/1tOnW8L6TwolyLpIXwMKTITf9wGh_ukLb/view?usp=sharing 2020: https://drive.google.com/file/d/14c0RgpyD2Slzyh5s8LGvj5OwWbL4H8NX/view?usp=sharing 2023: https://drive.google.com/file/d/1HQIh1DlfhEutTWniXDGLYFVa9VxJ4OT9/view?usp=share_link @@ -161,7 +152,6 @@ photo: sonja.jpg bio: 'Sonja is a former Information Librarian from Lund University Library. She has a B.A. in English, Bulgarian and Russian from Lund University and specialises in applications for journals in the Slavonic languages.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mNUFoZWV4YnZ3bDg/view?usp=sharing&resourcekey=0-1JRid_DHRMKbgdzmVYL7NQ 2018: https://drive.google.com/file/d/1M5AGEDP79uk2olCcmVYjKCsmzL7tG2Vc/view?usp=sharing 2020: https://drive.google.com/file/d/1-4RJYScTs_zMBeD5zESNvCoIBCWTOWHR/view?usp=sharing 2022: https://drive.google.com/file/d/1soZtiW6gyVJPl7P_J60j2TL2Fqzl0QAs/view?usp=sharing @@ -178,7 +168,6 @@ photo: tom.jpg bio: 'Tom has a PhD in molecular microbiology and spent several years in Africa doing research on malaria, sleeping sickness and meningococcal epidemics. He has been actively advocating open access and open science since 2012 when he joined the Open Knowledge community and became a member of the DOAJ advisory board. His current research interests are development of quality systems for the assessment of scholarly journals and articles, and research in the area of soil microbiology in relation to soil health and human health.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mYUFZNDRISTZodUU/view?usp=sharing&resourcekey=0-g13FJaUJpdR_t2rMLEyzEQ 2018: https://drive.google.com/file/d/1x0w-a1TWQdJDKPtQpGhmDZSdA4BhFSpI/view?usp=sharing 2020: https://drive.google.com/file/d/1VyirUdc6FBNOujl938bHf1JCL1jLNwXV/view?usp=sharing 2022: https://drive.google.com/file/d/1ww7WHQEg1395bPn20Arb7LJn9lIROdBl/view?usp=sharing diff --git a/cms/pages/legal/terms.md b/cms/pages/legal/terms.md index ad872453eb..1b27b66e99 100644 --- a/cms/pages/legal/terms.md +++ b/cms/pages/legal/terms.md @@ -32,7 +32,7 @@ DOAJ uses a variety of licenses for the different parts of its website and the c + In our [OAI-PMH feed](/docs/oai-pmh) + In the [full data dump of all article metadata](/docs/public-data-dump/). -4. The *open source software* that DOAJ is built with is licensed under [an Apache license Version 2](https://github.com/DOAJ/doaj/blob/a6fc2bee499b5a8a1f24fb098acfb8e10bd72503/portality/static/vendor/select2-3.5.4/LICENSE). +4. The *open source software* that DOAJ is built with is licensed under [an Apache license Version 2](https://github.com/DOAJ/doaj/blob/develop/LICENSE). --- diff --git a/cms/pages/support/index.md b/cms/pages/support/index.md index 8c9ad71dc4..cc39fbfd44 100644 --- a/cms/pages/support/index.md +++ b/cms/pages/support/index.md @@ -2,7 +2,7 @@ layout: sidenav sidenav_include: /includes/_sidenav_donation.html include: /includes/contribution_rates.html -title: Support DOAJ +title: Institutional and library supporter model section: Support sticky_sidenav: true featuremap: @@ -11,10 +11,37 @@ featuremap: --- -Support of DOAJ by academic organisations is vital and we are proud to acknowledge that over 80% of our support comes to us this way. We are very grateful to all our supporting academic organisations from around the world. +Support of DOAJ by academic organisations is vital, and we are proud to acknowledge that over 80% of our support comes to us this way. We are very grateful to all our supporting academic organisations worldwide. -The suggested contributions for academic organisations are below. Use the table to find the most appropriate option for your organisation. [Send an email](mailto:joanna@doaj.org) to Joanna Ball, Managing Director, with the details of the support level you have chosen. Alternatively, you can use our invoice-free one-time donation button to send us an amount of your choosing. +### 2024 pricing -(Publishers interested in supporting us should read the [publisher supporters](/support/publisher-supporters/) page.) +For 2024, we have revised and simplified our supporter model to align with the levels recommended by SCOSS. This new model enables us to invest in the organisation's future and to continue to provide a high-quality service to our community. + +| | Euros(€) | USDs($) | GBPs(£) | +|---------------------|----------|---------|---------| +| Large organisations | 4,000 | 4,400 | 3,440 | +| Small organisations | 2,000 | 2,200 | 1,720 | +| Organisations from [low- and middle-income countries](https://datatopics.worldbank.org/world-development-indicators/the-world-by-income-and-region.html) | 500 | 550 | 430 | + +A 30% discount will be applied to institutions supporting via a billing consortium. Please contact [supporters@doaj.org](mailto:supporters@doaj.org) for further information. + +We always have a wishlist of development projects for which we require additional funding. Please contact us if you would like to support us over and above our standard rates. + +### Why you should support us + +- We are community-led and -governed. Your support enables our commitment to being 100% independent. +- Supporting open infrastructure is a strategic choice for libraries and institutions, demonstrating your commitment to open research and sustaining open infrastructure. +- We are seeing a steady increase in demand: the number of applications we receive each year has increased by 60% since 2018, and our investigations into questionable publishing practices are becoming more complex. +- Help us deliver our role in driving standards and best practice in open access publishing, for example through the [Principles of transparency and best practice in scholarly publishing](/apply/transparency/) and the [OA Journals Toolkit](https://www.oajournals-toolkit.org/). +- You rely extensively on our metadata as a source of trusted journals, integrating it into discovery systems and open access services. + +By supporting us, your organisation will join [a growing family of like-minded institutions](/support/supporters/) committed to ensuring quality content is available online for everyone. Supporting DOAJ is a statement of belief in equitable open knowledge and science. + +### Benefits for institutional and library supporters + +- We will add your institution’s name to [our Supporters page](/support/supporters/) +- you can include details of your DOAJ support in marketing activities +- you can use our logo on your institution’s websites and in other communications +- you can integrate into your services the DOAJ metadata via our OAI/PMH service, our API or the public data dump --- diff --git a/cms/pages/support/supporters.md b/cms/pages/support/supporters.md index c7d08ddda8..6cf98e6b0f 100644 --- a/cms/pages/support/supporters.md +++ b/cms/pages/support/supporters.md @@ -7,9 +7,9 @@ featuremap: ~~Supporters:Fragment~~ --- -We are proud that over 80% of DOAJ's funding comes from academic organisations (libraries, library consortia, universities, research centres). Without this vital support, we wouldn't be able to continue the high levels of service that the research community expects of us. We are grateful for the trust shown in us by our supporters. +We are proud that over 80% of our funding comes from academic organisations (libraries, library consortia, universities, research centres). Without this vital support, we couldn't deliver the services the research community expects of us. We are grateful for the trust shown in us by our supporters. - Check [our support page](/support/) for more information on supporter levels and categories. +Check [our Institutions and libraries support page](/support/) for pricing and benefits. --- diff --git a/cms/sass/components/_skip-to-main-content.scss b/cms/sass/components/_skip-to-main-content.scss new file mode 100644 index 0000000000..3542adb62c --- /dev/null +++ b/cms/sass/components/_skip-to-main-content.scss @@ -0,0 +1,35 @@ +/* Back to main content button */ + +.skip-to-main { + position: absolute; + z-index: 10000; + display: flex; + flex-direction: row; + align-items: center; + min-width: min-content; + padding: 5px; + top: 10px; + left: 10px; + background-color: $grapefruit; + + svg { + display: block; + margin: 0 auto; + stroke: $warm-black; + margin-rigth: 10px; + } + &:hover, &:focus { + svg { + margin-right: 10px; + } + } + &:hover:after, &:focus:after { + content: " Skip to main content"; + color: $warm-black; + vertical-align: bottom; + -webkit-font-feature-settings: 'liga' 1; + -moz-font-feature-settings: 'liga' 1; + font-feature-settings: 'liga' 1; + transition: 0.5 smooth; + } +} \ No newline at end of file diff --git a/cms/sass/main.scss b/cms/sass/main.scss index cdd22133b8..17ec6552f5 100644 --- a/cms/sass/main.scss +++ b/cms/sass/main.scss @@ -52,6 +52,7 @@ "components/review-table", "components/select2", "components/search-results", + "components/skip-to-main-content", "components/stat", "components/stretch-list", "components/tabs", diff --git a/deploy/doaj_gunicorn_config.py b/deploy/doaj_gunicorn_config.py index f9425de5e5..a08dd6ef62 100644 --- a/deploy/doaj_gunicorn_config.py +++ b/deploy/doaj_gunicorn_config.py @@ -1,7 +1,7 @@ import multiprocessing bind = "0.0.0.0:5050" -workers = multiprocessing.cpu_count() * 8 + 1 +workers = multiprocessing.cpu_count() * 6 + 1 proc_name = 'doaj' max_requests = 1000 @@ -13,4 +13,4 @@ max_requests_jitter = 100 timeout = 40 -graceful_timeout = 40 \ No newline at end of file +graceful_timeout = 40 diff --git a/deploy/lambda/alert_backups_missing.py b/deploy/lambda/alert_backups_missing.py index 38a9edbc2e..566a361b9a 100644 --- a/deploy/lambda/alert_backups_missing.py +++ b/deploy/lambda/alert_backups_missing.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -""" Steven Eardley 2020-02-07 for DOAJ - uploaded manually (todo: we should upload this in the release script) """ +""" Steven Eardley 2023-09-15 for DOAJ - uploaded manually (todo: we should upload this in the release script) """ # ~~BackupsMissing:Monitoring->Lambda:Technology~~ @@ -8,23 +8,25 @@ import json from datetime import datetime, timezone, timedelta -from portality.lib.dates import FMT_DATETIME_STD - s3 = boto3.client('s3') # Check the doaj elasticsearch snapshot bucket has been updated today (should happen daily at 0600 via background job) -buckets = ['doaj-index-backups'] +buckets = ['doaj-index-ipt-backups'] + # Check the doaj-nginx logs bucket has been updated today (should happen daily at 0630 via cron logrotate) -buckets += ['doaj-nginx-logs'] +# buckets += ['doaj-nginx-logs'] def lambda_handler(event, context): """ The main function executed by Lambda""" + start = datetime.utcnow() summary = {'success': [], 'fail': []} for b in buckets: + print('Checking bucket {0} was updated today'.format(b)) + # First check the bucket actually exists try: s3.head_bucket(Bucket=b) @@ -32,11 +34,13 @@ def lambda_handler(event, context): error_code = int(e.response['Error']['Code']) if error_code == 404: send_alert_email(b, last_mod=None) + raise # Then check the expected entry exists in the bucket's objects. files = list_bucket_keys(bucket_name=b) old_to_new = sorted(files, key=lambda f: f['LastModified']) newest = old_to_new[-1] + print('Latest backup is', newest) # If the newest file is older than 1 day old, our backups are not up to date. if datetime.now(timezone.utc) - newest['LastModified'] > timedelta(days=1): @@ -47,6 +51,8 @@ def lambda_handler(event, context): summary['success'].append(b) print(summary) # For the CloudWatch logs + print('Completed in', str(datetime.utcnow() - start)) + return str(summary) @@ -86,8 +92,8 @@ def send_alert_email(bucket, last_mod): msg = 'AWS backup error: bucket {b} is missing.'.format(b=bucket) else: msg = 'AWS backup error: bucket {b} has not been updated today - it was last modified on {t}.' \ - '\nYou may wish to check the corresponding logs.'.format(b=bucket, - t=last_mod.strftime(FMT_DATETIME_STD)) + '\nYou may wish to check the corresponding logs.'.format(b=bucket, t=last_mod.strftime( + '%Y-%m-%dT%H:%M:%SZ')) r = botocore.vendored.requests.post('https://api.mailgun.net/v3/doaj.org/messages', auth=('api', credentials.get('ERROR_MAIL_API_KEY', '')), diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv b/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv new file mode 100644 index 0000000000..0d2f704aba --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv @@ -0,0 +1,17 @@ +test_id,eissn,pissn,validated +1,eissn_in_doaj,pissn_in_doaj,yes +2,eissn_in_doaj,eissn_not_in_doaj, +3,eissn_in_doaj,pissn_not_in_doaj, +4,eissn_in_doaj,!eissn_in_doaj, +5,pissn_in_doaj,eissn_in_doaj, +6,pissn_in_doaj,eissn_not_in_doaj, +7,pissn_in_doaj,pissn_not_in_doaj, +8,pissn_in_doaj,!pissn_in_doaj, +9,eissn_not_in_doaj,eissn_in_doaj, +10,eissn_not_in_doaj,pissn_in_doaj, +11,eissn_not_in_doaj,pissn_not_in_doaj, +12,eissn_not_in_doaj,!eissn_not_in_doaj, +13,pissn_not_in_doaj,eissn_in_doaj, +14,pissn_not_in_doaj,pissn_in_doaj, +15,pissn_not_in_doaj,eissn_not_in_doaj, +16,pissn_not_in_doaj,!pissn_not_in_doaj, diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv new file mode 100644 index 0000000000..a8eab3f4ce --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv @@ -0,0 +1,19 @@ +field,test_id,eissn,pissn,validated +type,index,generated,generated,conditional +deafult,,,,no +,,,, +values,,eissn_in_doaj,eissn_in_doaj,yes +values,,pissn_in_doaj,pissn_in_doaj,no +values,,eissn_not_in_doaj,eissn_not_in_doaj, +values,,pissn_not_in_doaj,pissn_not_in_doaj, +,,,, +,,,, +conditional validated,,eissn_in_doaj,pissn_in_doaj,yes +constraint eissn,,eissn_in_doaj,!eissn_in_doaj, +constraint eissn,,eissn_not_in_doaj,!eissn_not_in_doaj, +constraint eissn,,pissn_not_in_doaj,!pissn_not_in_doaj, +constraint eissn,,pissn_in_doaj,!pissn_in_doaj, +constraint pissn,,eissn_in_doaj,!eissn_in_doaj, +constraint pissn,,eissn_not_in_doaj,!eissn_not_in_doaj, +constraint pissn,,pissn_not_in_doaj,!pissn_not_in_doaj, +constraint pissn,,pissn_in_doaj,!pissn_in_doaj, \ No newline at end of file diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json new file mode 100644 index 0000000000..11d1012a96 --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json @@ -0,0 +1,119 @@ +{ + "parameters": [ + { + "name": "test_id", + "type": "index" + }, + { + "name": "eissn", + "type": "generated", + "values": { + "eissn_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "eissn_in_doaj" + ] + } + } + }, + "pissn_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "pissn_in_doaj" + ] + } + } + }, + "eissn_not_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "eissn_not_in_doaj" + ] + } + } + }, + "pissn_not_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "pissn_not_in_doaj" + ] + } + } + } + } + }, + { + "name": "pissn", + "type": "generated", + "values": { + "eissn_in_doaj": {}, + "pissn_in_doaj": {}, + "eissn_not_in_doaj": {}, + "pissn_not_in_doaj": {}, + "!eissn_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "eissn_in_doaj" + ] + } + } + }, + "!eissn_not_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "eissn_not_in_doaj" + ] + } + } + }, + "!pissn_not_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "pissn_not_in_doaj" + ] + } + } + }, + "!pissn_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "pissn_in_doaj" + ] + } + } + } + } + }, + { + "name": "validated", + "type": "conditional", + "values": { + "yes": { + "conditions": [ + { + "eissn": { + "or": [ + "eissn_in_doaj" + ] + }, + "pissn": { + "or": [ + "pissn_in_doaj" + ] + } + } + ] + }, + "no": {} + } + } + ] +} \ No newline at end of file diff --git a/doajtest/testbook/public_site/home_page.yml b/doajtest/testbook/public_site/home_page.yml index c2261786cc..625df716f7 100644 --- a/doajtest/testbook/public_site/home_page.yml +++ b/doajtest/testbook/public_site/home_page.yml @@ -120,3 +120,22 @@ tests: bottom right-hand corner. results: - You are returned to the top of the home page +- title: Skip to main content button (Accessibility) + context: + role: anonymous + steps: + - step: Refresh the page + - step: Click tab key on the keyboard once + results: + - Skip to the main content button is unfolded and focused + - step: Click enter + results: + - Focus is moved to the main content + - step: Turn on screen reader + - step: With the keyboard navigate to Skip to main content button + results: + - Screen reader reads the button title + - step: Click enter + results: + - Focus is moved to the main content + diff --git a/doajtest/unit/resources/harvester_resp.json b/doajtest/unit/resources/harvester_resp.json index dc24cb7dd9..133fedaf24 100644 --- a/doajtest/unit/resources/harvester_resp.json +++ b/doajtest/unit/resources/harvester_resp.json @@ -45,8 +45,8 @@ "journal": { "title": "My Journal", "medlineAbbreviation": "My Jour", - "essn": "1234-5678", - "issn": "9876-5432", + "issn": "1234-5678", + "essn": "9876-5432", "isoabbreviation": "My Jour", "nlmid": "123456789" } @@ -143,8 +143,8 @@ "journal": { "title": "My Journal", "medlineAbbreviation": "My Jour", - "essn": "1234-5678", - "issn": "9876-5432", + "issn": "1234-5678", + "essn": "9876-5432", "isoabbreviation": "My Jour", "nlmid": "123456789" } diff --git a/doajtest/unit/test_article_acceptable_and_permissions.py b/doajtest/unit/test_article_acceptable_and_permissions.py index eb4c04d4fb..5e0328635f 100644 --- a/doajtest/unit/test_article_acceptable_and_permissions.py +++ b/doajtest/unit/test_article_acceptable_and_permissions.py @@ -14,6 +14,11 @@ def is_acceptable_load_cases(): "test_id", {"test_id": []}) +def issn_validation_against_journal_load_sets(): + return load_parameter_sets(rel2abs(__file__, "..", "matrices", "article_create_article"), "issn_validation_against_journal", + "test_id", + {"test_id": []}) + class TestBLLPrepareUpdatePublisher(DoajTestCase): @@ -110,4 +115,73 @@ def test_has_permissions(self): assert failed_result["unowned"].sort() == [pissn, eissn].sort() # assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], # 'unowned': [pissn, eissn], - # 'unmatched': []}, "received: {}".format(failed_result) \ No newline at end of file + # 'unmatched': []}, "received: {}".format(failed_result) + + + @parameterized.expand(issn_validation_against_journal_load_sets) + def test_issn_validation_against_journal_load_sets(self, value, kwargs): + kwpissn = kwargs.get("pissn") + kweissn = kwargs.get("eissn") + validated = kwargs.get("validated") + + js = JournalFixtureFactory.make_many_journal_sources(2) + journal_in_doaj = Journal(**js[0]) + journal_in_doaj.set_in_doaj(True) + journal_in_doaj.bibjson().pissn = "1111-1111" + journal_in_doaj.bibjson().eissn = "2222-2222" + journal_in_doaj.save(blocking=True) + + journal_not_in_doaj = Journal(**js[1]) + journal_not_in_doaj.set_in_doaj(False) + journal_not_in_doaj.bibjson().pissn = "3333-3333" + journal_not_in_doaj.bibjson().eissn = "4444-4444" + journal_not_in_doaj.save(blocking=True) + + if (kwpissn == "pissn_in_doaj"): + pissn = journal_in_doaj.bibjson().pissn + elif (kwpissn == "eissn_in_doaj"): + pissn = journal_in_doaj.bibjson().eissn + elif (kwpissn == "pissn_not_in_doaj"): + pissn = journal_not_in_doaj.bibjson().pissn + else: + pissn = journal_not_in_doaj.bibjson().eissn + + if (kweissn == "pissn_in_doaj"): + eissn = journal_in_doaj.bibjson().pissn + elif (kweissn == "eissn_in_doaj"): + eissn = journal_in_doaj.bibjson().eissn + elif (kweissn == "pissn_not_in_doaj"): + eissn = journal_not_in_doaj.bibjson().pissn + else: + eissn = journal_not_in_doaj.bibjson().eissn + + + art_source = ArticleFixtureFactory.make_article_source(pissn=pissn, eissn=eissn) + article = Article(**art_source) + + if validated: + self.assertIsNone(self.svc.is_acceptable(article)) + + else: + with self.assertRaises(exceptions.ArticleNotAcceptable): + self.svc.is_acceptable(article) + + def test_check_validation_for_2_journals(self): + + js = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) + journal_in_doaj = Journal(**js[0]) + journal_in_doaj.bibjson().pissn = "1111-1111" + journal_in_doaj.bibjson().eissn = "2222-2222" + journal_in_doaj.save(blocking=True) + + journal_not_in_doaj = Journal(**js[1]) + journal_not_in_doaj.bibjson().pissn = "3333-3333" + journal_not_in_doaj.bibjson().eissn = "4444-4444" + journal_not_in_doaj.save(blocking=True) + + + art_source = ArticleFixtureFactory.make_article_source(pissn="1111-1111", eissn="4444-4444") + article = Article(**art_source) + + with self.assertRaises(exceptions.ArticleNotAcceptable): + self.svc.is_acceptable(article) \ No newline at end of file diff --git a/doajtest/unit/test_bll_article_batch_create_article.py b/doajtest/unit/test_bll_article_batch_create_article.py index 6cda9ee82c..34f537c7a8 100644 --- a/doajtest/unit/test_bll_article_batch_create_article.py +++ b/doajtest/unit/test_bll_article_batch_create_article.py @@ -5,7 +5,7 @@ from doajtest.helpers import DoajTestCase from portality.bll import DOAJ from portality.bll import exceptions -from portality.models import Article, Account,Journal +from portality.models import Article, Account, Journal from portality.lib.paths import rel2abs from doajtest.mocks.bll_article import BLLArticleMockFactory from doajtest.mocks.model_Article import ModelArticleMockFactory @@ -37,12 +37,14 @@ def setUp(self): self._get_duplicate = self.svc.get_duplicate self._issn_ownership_status = self.svc.issn_ownership_status self._get_journal = Article.get_journal + self._find_by_issn_exact = Journal.find_by_issn_exact def tearDown(self): self.svc.is_legitimate_owner = self._is_legitimate_owner self.svc.get_duplicate = self._get_duplicate self.svc.issn_ownership_status = self._issn_ownership_status Article.get_journal = self._get_journal + Journal.find_by_issn_exact = self._find_by_issn_exact super(TestBLLArticleBatchCreateArticle, self).tearDown() @parameterized.expand(load_cases) @@ -118,8 +120,8 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "0", "pissn" : "0000-0000", "eissn" : "0000-0001"}) + # We always need a journal to exist for an article to be created + journal_specs.append({"title" : "0", "pissn" : "0000-0000", "eissn" : "0000-0001"}) # another with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( @@ -132,8 +134,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "1", "pissn" : "1111-1112", "eissn" : "1111-1111"}) + journal_specs.append({"title" : "1", "pissn" : "1111-1112", "eissn" : "1111-1111"}) # one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( @@ -146,8 +147,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "2", "pissn" : "2222-2222", "eissn" : "2222-2223"}) + journal_specs.append({"title" : "2", "pissn" : "2222-2222", "eissn" : "2222-2223"}) # another one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( @@ -160,8 +160,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "3", "pissn" : "3333-3333", "eissn" : "3333-3334"}) + journal_specs.append({"title" : "3", "pissn" : "3333-3333", "eissn" : "3333-3334"}) last_issn = "3333-3333" last_doi = "10.123/abc/1" @@ -180,8 +179,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "4", "pissn" : "4444-4444", "eissn" : "4444-4445"}) + journal_specs.append({"title" : "4", "pissn" : "4444-4444", "eissn" : "4444-4445"}) # one with a duplicated Fulltext source = ArticleFixtureFactory.make_article_source( @@ -194,8 +192,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "5", "pissn" : "5555-5555", "eissn" : "5555-5556"}) + journal_specs.append({"title" : "5", "pissn" : "5555-5555", "eissn" : "5555-5556"}) ilo_mock = None if account_arg == "owner": @@ -224,6 +221,18 @@ def test_01_batch_create_article(self, name, kwargs): gj_mock = ModelArticleMockFactory.get_journal(journal_specs, in_doaj=journal_in_doaj) Article.get_journal = gj_mock + # We need the journal to be in the index for the ArticleAcceptable checks FIXME: too slow, mock this + #[Journal(**js['instance']).save(blocking=True) for js in journal_specs] + + # We need to retrieve the correct Journal by its ISSNs + def mock_find(issns: list, in_doaj=None, max=2): + for j in journal_specs: + if sorted([j['eissn'], j['pissn']]) == sorted(issns): + return [j['instance']] + return [] + + Journal.find_by_issn_exact = mock_find + ########################################################### # Execution diff --git a/doajtest/unit/test_bll_article_create_article.py b/doajtest/unit/test_bll_article_create_article.py index f595a1b96e..d9d524efe7 100644 --- a/doajtest/unit/test_bll_article_create_article.py +++ b/doajtest/unit/test_bll_article_create_article.py @@ -35,7 +35,6 @@ def setUp(self): self.prepare_update_admin = self.svc._prepare_update_admin self.prepare_update_publisher = self.svc._prepare_update_publisher - def tearDown(self): super(TestBLLArticleCreateArticle, self).tearDown() diff --git a/doajtest/unit/test_models.py b/doajtest/unit/test_models.py index 5551cdcf5d..06175e6d76 100644 --- a/doajtest/unit/test_models.py +++ b/doajtest/unit/test_models.py @@ -1661,3 +1661,30 @@ def test_get_name_safe(self): # account does not exist assert models.Account.get_name_safe('not existing account id') == '' + def test_11_find_by_issn(self): + js = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) + j1 = models.Journal(**js[0]) + j1.bibjson().pissn = "1111-1111" + j1.bibjson().eissn = "2222-2222" + j1.save(blocking=True) + + j2 = models.Journal(**js[1]) + j2.bibjson().pissn = "3333-3333" + j2.bibjson().eissn = "4444-4444" + j2.save(blocking=True) + + journals = models.Journal.find_by_issn(["1111-1111", "2222-2222"], True) + assert len(journals) == 1 + assert journals[0].id == j1.id + + journals = models.Journal.find_by_issn(["1111-1111", "3333-3333"], True) + assert len(journals) == 2 + assert journals[0].id == j1.id + assert journals[1].id == j2.id + + journals = models.Journal.find_by_issn_exact(["1111-1111", "2222-2222"], True) + assert len(journals) == 1 + assert journals[0].id == j1.id + + journals = models.Journal.find_by_issn_exact(["1111-1111", "3333-3333"], True) + assert len(journals) == 0 \ No newline at end of file diff --git a/doajtest/unit/test_tasks_ingestCrossref442Articles.py b/doajtest/unit/test_tasks_ingestCrossref442Articles.py index 2714b33644..ed2236552c 100644 --- a/doajtest/unit/test_tasks_ingestCrossref442Articles.py +++ b/doajtest/unit/test_tasks_ingestCrossref442Articles.py @@ -1315,11 +1315,11 @@ def test_40_crossref_2_journals_different_owners_issn_each_fail(self): found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] assert len(found) == 0 - def test_41_crossref_2_journals_same_owner_issn_each_success(self): + def test_41_crossref_2_journals_same_owner_issn_each_fail(self): etree.XMLSchema = self.mock_load_schema # Create 2 journals with the same owner, each with one different issn. The article's 2 issns # match each of these issns - # We expect a successful article ingest + # We expect a failed ingest - an article must match with only ONE journal j1 = models.Journal() j1.set_owner("testowner") @@ -1365,19 +1365,19 @@ def test_41_crossref_2_journals_same_owner_issn_each_success(self): fu = models.FileUpload.pull(id) assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 + assert fu.status == "failed" + assert fu.imported == 0 assert fu.updates == 0 - assert fu.new == 1 + assert fu.new == 0 fr = fu.failure_reasons + assert len(fr) > 0 assert len(fr.get("shared", [])) == 0 assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 + assert len(fr.get("unmatched", [])) == 2 found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 1 - + assert len(found) == 0 def test_42_crossref_2_journals_different_owners_different_issns_mixed_article_fail(self): etree.XMLSchema = self.mock_load_schema diff --git a/doajtest/unit/test_tasks_ingestCrossref531Articles.py b/doajtest/unit/test_tasks_ingestCrossref531Articles.py index 27308a3d22..09edcf1b1d 100644 --- a/doajtest/unit/test_tasks_ingestCrossref531Articles.py +++ b/doajtest/unit/test_tasks_ingestCrossref531Articles.py @@ -624,7 +624,7 @@ def test_23_crossref_process_success(self): j.set_owner("testowner") bj = j.bibjson() bj.add_identifier(bj.P_ISSN, "1234-5678") - j.save() + j.save(blocking=True) asource = AccountFixtureFactory.make_publisher_source() account = models.Account(**asource) @@ -634,6 +634,7 @@ def test_23_crossref_process_success(self): # push an article to initialise the mappings source = ArticleFixtureFactory.make_article_source() article = models.Article(**source) + article.bibjson().add_identifier(bj.P_ISSN, "1234-5678") article.save(blocking=True) article.delete() models.Article.blockdeleted(article.id) diff --git a/doajtest/unit/test_tasks_ingestDOAJarticles.py b/doajtest/unit/test_tasks_ingestDOAJarticles.py index 2872124a47..a2eb5f2be9 100644 --- a/doajtest/unit/test_tasks_ingestDOAJarticles.py +++ b/doajtest/unit/test_tasks_ingestDOAJarticles.py @@ -1260,10 +1260,10 @@ def test_40_doaj_2_journals_different_owners_issn_each_fail(self): found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] assert len(found) == 0 - def test_41_doaj_2_journals_same_owner_issn_each_success(self): + def test_41_doaj_2_journals_same_owner_issn_each_fail(self): # Create 2 journals with the same owner, each with one different issn. The article's 2 issns # match each of these issns - # We expect a successful article ingest + # We expect a failed article ingest - articles must match only ONE journal j1 = models.Journal() j1.set_owner("testowner") bj1 = j1.bibjson() @@ -1301,18 +1301,18 @@ def test_41_doaj_2_journals_same_owner_issn_each_success(self): fu = models.FileUpload.pull(id) assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 + assert fu.status == "failed" + assert fu.imported == 0 assert fu.updates == 0 - assert fu.new == 1 + assert fu.new == 0 fr = fu.failure_reasons assert len(fr.get("shared", [])) == 0 assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 + assert len(fr.get("unmatched", [])) == 2 # error message for each article found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 1 + assert len(found) == 0 def test_42_doaj_2_journals_different_owners_different_issns_mixed_article_fail(self): # Create 2 different journals with different owners and different issns (2 each). diff --git a/portality/bll/exceptions.py b/portality/bll/exceptions.py index 3bb676f984..005ad7f31c 100644 --- a/portality/bll/exceptions.py +++ b/portality/bll/exceptions.py @@ -66,6 +66,7 @@ class ArticleNotAcceptable(Exception): """ def __init__(self, *args, **kwargs): self.message = kwargs.get("message", "") + self.result = kwargs.get("result", {}) super(ArticleNotAcceptable, self).__init__(*args) def __str__(self): diff --git a/portality/bll/services/article.py b/portality/bll/services/article.py index 7b55894d24..b5e829cd24 100644 --- a/portality/bll/services/article.py +++ b/portality/bll/services/article.py @@ -56,6 +56,9 @@ def batch_create_articles(self, articles, account, duplicate_check=True, merge_d all_unowned = set() all_unmatched = set() + # Hold on to the exception so we can raise it later + e_not_acceptable = None + for article in articles: try: # ~~!ArticleBatchCreate:Feature->ArticleCreate:Feature~~ @@ -67,6 +70,10 @@ def batch_create_articles(self, articles, account, duplicate_check=True, merge_d dry_run=True) except (exceptions.ArticleMergeConflict, exceptions.ConfigurationException): raise exceptions.IngestException(message=Messages.EXCEPTION_ARTICLE_BATCH_CONFLICT) + except exceptions.ArticleNotAcceptable as e: + # The ArticleNotAcceptable exception is a superset of reasons we can't match a journal to this article + e_not_acceptable = e + result = {'fail': 1, 'unmatched': set(article.bibjson().issns())} success += result.get("success", 0) fail += result.get("fail", 0) @@ -90,6 +97,8 @@ def batch_create_articles(self, articles, account, duplicate_check=True, merge_d # return some stats on the import return report else: + if e_not_acceptable is not None: + raise exceptions.ArticleNotAcceptable(message=e_not_acceptable.message, result=report) raise exceptions.IngestException(message=Messages.EXCEPTION_ARTICLE_BATCH_FAIL, result=report) @staticmethod @@ -159,9 +168,6 @@ def _validate_issns(article_bibjson: models.ArticleBibJSON): if len(pissn) > 1 or len(eissn) > 1: raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_TOO_MANY_ISSNS) - pissn = article_bibjson.get_one_identifier("pissn") - eissn = article_bibjson.get_one_identifier("eissn") - # no pissn or eissn if not pissn and not eissn: raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_ISSNS) @@ -204,18 +210,18 @@ def create_article(self, article, account, duplicate_check=True, merge_duplicate {"arg": update_article_id, "instance": str, "allow_none": True, "arg_name": "update_article_id"} ], exceptions.ArgumentException) - # quickly validate that the article is acceptable - it must have a DOI and/or a fulltext - # this raises an exception if the article is not acceptable, containing all the relevant validation details + has_permissions_result = self.has_permissions(account, article, limit_to_account) + if isinstance(has_permissions_result, dict): + return has_permissions_result + # Validate that the article is acceptable: it must have a DOI and/or a fulltext & match only one in_doaj journal + # this raises an exception if the article is not acceptable, containing all the relevant validation details + # We do this after the permissions check because that gives a detailed result whereas this throws an exception try: self.is_acceptable(article) except Exception as e: raise e - has_permissions_result = self.has_permissions(account, article, limit_to_account) - if isinstance(has_permissions_result,dict): - return has_permissions_result - is_update = 0 if duplicate_check: # ~~!ArticleCreate:Feature->ArticleDeduplication:Feature~~ @@ -252,7 +258,8 @@ def has_permissions(self, account, article, limit_to_account): def is_acceptable(self, article: models.Article): """ conduct some deep validation on the article to make sure we will accept it - or the moment, this just means making sure it has a DOI and a fulltext + this just means making sure it has a DOI and a fulltext, and that its ISSNs + match a single journal """ try: bj = article.bibjson() @@ -266,12 +273,40 @@ def is_acceptable(self, article: models.Article): raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_DOI_NO_FULLTEXT) self._validate_issns(bj) + journal = self.match_journal_with_validation(bj) # is journal in doaj (we do this check last as it has more performance impact) - journal = article.get_journal() if journal is None or not journal.is_in_doaj(): raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_ADDING_ARTICLE_TO_WITHDRAWN_JOURNAL) + @staticmethod + def match_journal_with_validation(article_bibjson: models.ArticleBibJSON): + pissn = article_bibjson.get_one_identifier("pissn") + eissn = article_bibjson.get_one_identifier("eissn") + + issns = [] + + if pissn is not None: + issns.append(pissn) + if eissn is not None: + issns.append(eissn) + + # Find an exact match, whether in_doaj or not + journal = models.Journal.find_by_issn_exact(issns) + + # check if only one journal matches pissn and eissn and if they are in the correct fields + # no need to check eissn, if pissn matches, pissn and eissn are different and only 1 journal has been found - then eissn matches too + if len(journal) != 1: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + if pissn is not None: + if journal[0].bibjson().pissn != pissn: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + if eissn is not None: + if journal[0].bibjson().eissn != eissn: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + + return journal[0] + @staticmethod def is_legitimate_owner(article, owner): """ @@ -369,6 +404,10 @@ def issn_ownership_status(article, owner): issns = b.get_identifiers(b.P_ISSN) issns += b.get_identifiers(b.E_ISSN) + # FIXME: Duplicate check due to inconsistent control flow (result vs exception) + if len(issns) == 0: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_ISSNS) + owned = [] shared = [] unowned = [] diff --git a/portality/forms/application_processors.py b/portality/forms/application_processors.py index 13a294d14d..1cd426c1f6 100644 --- a/portality/forms/application_processors.py +++ b/portality/forms/application_processors.py @@ -198,8 +198,11 @@ def _patch_target_note_id(self): for note in self.target.notes: note_date = dates.parse(note['date']) if not note.get('author_id') and note_date > dates.before_now(60): - note['author_id'] = current_user.id - + try: + note['author_id'] = current_user.id + except AttributeError: + # Skip if we don't have a current_user + pass class NewApplication(ApplicationProcessor): @@ -307,7 +310,6 @@ def patch_target(self): if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None): self.target.set_owner(self.source.owner) - def finalise(self, account, save_target=True, email_alert=True): """ account is the administrator account carrying out the action @@ -326,7 +328,6 @@ def finalise(self, account, save_target=True, email_alert=True): elif not j.is_in_doaj(): raise Exception(Messages.EXCEPTION_EDITING_WITHDRAWN_JOURNAL) - # if we are allowed to finalise, kick this up to the superclass super(AdminApplication, self).finalise() @@ -813,7 +814,6 @@ def patch_target(self): if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None): self.target.set_owner(self.source.owner) - def finalise(self): # FIXME: this first one, we ought to deal with outside the form context, but for the time being this # can be carried over from the old implementation diff --git a/portality/lib/plausible.py b/portality/lib/plausible.py index 2aa602d986..90b1b8f46b 100644 --- a/portality/lib/plausible.py +++ b/portality/lib/plausible.py @@ -62,7 +62,7 @@ def send_event(goal: str, on_completed=None, **props_kwargs): def _send(): resp = requests.post(plausible_api_url, json=payload, headers=headers) if resp.status_code >= 300: - logger.warning(f'send plausible event api fail. [{resp.status_code}][{resp.text}]') + logger.warning(f'Send plausible event API fail. snd: [{resp.url}] [{headers}] [{payload}] rcv: [{resp.status_code}] [{resp.text}]') if on_completed: on_completed(resp) diff --git a/portality/models/v2/journal.py b/portality/models/v2/journal.py index ac1ce42585..efa6aa53e4 100644 --- a/portality/models/v2/journal.py +++ b/portality/models/v2/journal.py @@ -70,6 +70,22 @@ def find_by_issn(cls, issns, in_doaj=None, max=10): records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])] return records + @classmethod + def find_by_issn_exact(cls, issns, in_doaj=None, max=2): + """ + Finds journal that matches given issns exactly - if no data problems should always be only 1 + """ + if not isinstance(issns, list): + issns = [issns] + if len(issns) > 2: + return [] + q = JournalQuery() + q.find_by_issn_exact(issns, in_doaj=in_doaj, max=max) + result = cls.query(q=q.query) + # create an array of objects, using cls rather than Journal, which means subclasses can use it too + records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])] + return records + @classmethod def issns_by_owner(cls, owner, in_doaj=None): q = IssnQuery(owner, in_doaj=in_doaj) @@ -922,6 +938,16 @@ class JournalQuery(object): } } + must_query = { + "track_total_hits": True, + "query": { + "bool": { + "must": [ + ] + } + } + } + all_doaj = { "track_total_hits": True, "query": { @@ -947,6 +973,14 @@ def find_by_issn(self, issns, in_doaj=None, max=10): self.query["query"]["bool"]["must"].append({"term": {"admin.in_doaj": in_doaj}}) self.query["size"] = max + def find_by_issn_exact(self, issns, in_doaj=None, max=10): + self.query = deepcopy(self.must_query) + for issn in issns: + self.query["query"]["bool"]["must"].append({"term": {"index.issn.exact": issn}}) + if in_doaj is not None: + self.query["query"]["bool"]["must"].append({"term": {"admin.in_doaj": in_doaj}}) + self.query["size"] = max + def all_in_doaj(self): q = deepcopy(self.all_doaj) if self.minified: diff --git a/portality/scripts/journals_update_via_csv.py b/portality/scripts/journals_update_via_csv.py index 298b7c817b..c696068a85 100644 --- a/portality/scripts/journals_update_via_csv.py +++ b/portality/scripts/journals_update_via_csv.py @@ -82,6 +82,7 @@ reader = csv.DictReader(g, fieldnames=header_row) # verify header row with current CSV headers, report errors + # TODO: Include 'Owner' field - but we should probably base this process off the AdminCSV too. expected_headers = JournalFixtureFactory.csv_headers() # Always perform a match check on supplied headers, not counting order @@ -155,6 +156,14 @@ if len(updates) > 0: [print(upd) for upd in updates] + # Check we have the expected owner (if supplied) before proceeding to create an update request + own = row.get('Owner') + if own is not None: + if own.strip().lower() != j.owner.strip().lower(): + print('ABORTING - supplied owner {0} mismatches journal owner {1}.'.format(own, j.owner)) + writer.writerow([j.id, ' | '.join(updates), 'COULD NOT UPDATE - Owner mismatch. Expected {0} Got {1}'.format(own, j.owner)]) + continue + # Create an update request for this journal update_req = None jlock = None @@ -204,7 +213,7 @@ # Add note to UR if supplied if note: - fc.target.add_note(note) + fc.target.add_note(note, author_id=sys_acc.id) if not args.manual_review: # This is the update request, in 'update request' state diff --git a/portality/scripts/manage_background_jobs.py b/portality/scripts/manage_background_jobs.py index fbfa648f8b..4faa18193d 100644 --- a/portality/scripts/manage_background_jobs.py +++ b/portality/scripts/manage_background_jobs.py @@ -22,45 +22,64 @@ from portality.lib import dates from portality.lib.dates import DEFAULT_TIMESTAMP_VAL +from portality.tasks.anon_export import AnonExportBackgroundTask +from portality.tasks.article_bulk_delete import ArticleBulkDeleteBackgroundTask +from portality.tasks.article_cleanup_sync import ArticleCleanupSyncBackgroundTask +from portality.tasks.article_duplicate_report import ArticleDuplicateReportBackgroundTask +from portality.tasks.async_workflow_notifications import AsyncWorkflowBackgroundTask +from portality.tasks.check_latest_es_backup import CheckLatestESBackupBackgroundTask +# from portality.tasks.find_discontinued_soon import FindDiscontinuedSoonBackgroundTask +from portality.tasks.harvester import HarvesterBackgroundTask from portality.tasks.ingestarticles import IngestArticlesBackgroundTask -from portality.tasks.preservation import PreservationBackgroundTask -from portality.tasks.suggestion_bulk_edit import SuggestionBulkEditBackgroundTask -from portality.tasks.sitemap import SitemapBackgroundTask -from portality.tasks.read_news import ReadNewsBackgroundTask +from portality.tasks.journal_bulk_delete import JournalBulkDeleteBackgroundTask +from portality.tasks.journal_bulk_edit import JournalBulkEditBackgroundTask from portality.tasks.journal_csv import JournalCSVBackgroundTask -from portality.tasks.article_cleanup_sync import ArticleCleanupSyncBackgroundTask from portality.tasks.journal_in_out_doaj import SetInDOAJBackgroundTask -from portality.tasks.check_latest_es_backup import CheckLatestESBackupBackgroundTask +from portality.tasks.preservation import PreservationBackgroundTask from portality.tasks.prune_es_backups import PruneESBackupsBackgroundTask from portality.tasks.public_data_dump import PublicDataDumpBackgroundTask -from portality.tasks.harvester import HarvesterBackgroundTask -from portality.tasks.anon_export import AnonExportBackgroundTask +from portality.tasks.read_news import ReadNewsBackgroundTask +from portality.tasks.reporting import ReportingBackgroundTask +from portality.tasks.sitemap import SitemapBackgroundTask +from portality.tasks.suggestion_bulk_edit import SuggestionBulkEditBackgroundTask + +from portality.background import BackgroundApi # dict of {task_name: task_class} so we can interact with the jobs HANDLERS = { - PreservationBackgroundTask.__action__:PreservationBackgroundTask, + AnonExportBackgroundTask.__action__: AnonExportBackgroundTask, + ArticleBulkDeleteBackgroundTask.__action__: ArticleBulkDeleteBackgroundTask, + ArticleCleanupSyncBackgroundTask.__action__: ArticleCleanupSyncBackgroundTask, + ArticleDuplicateReportBackgroundTask.__action__: ArticleDuplicateReportBackgroundTask, + AsyncWorkflowBackgroundTask.__action__: AsyncWorkflowBackgroundTask, + CheckLatestESBackupBackgroundTask.__action__: CheckLatestESBackupBackgroundTask, + # FindDiscontinuedSoonBackgroundTask.__action__: FindDiscontinuedSoonBackgroundTask, + HarvesterBackgroundTask.__action__: HarvesterBackgroundTask, IngestArticlesBackgroundTask.__action__: IngestArticlesBackgroundTask, - SuggestionBulkEditBackgroundTask.__action__: SuggestionBulkEditBackgroundTask, - SitemapBackgroundTask.__action__: SitemapBackgroundTask, - ReadNewsBackgroundTask.__action__: ReadNewsBackgroundTask, + JournalBulkDeleteBackgroundTask.__action__: JournalBulkDeleteBackgroundTask, + JournalBulkEditBackgroundTask.__action__: JournalBulkEditBackgroundTask, JournalCSVBackgroundTask.__action__: JournalCSVBackgroundTask, - ArticleCleanupSyncBackgroundTask.__action__: ArticleCleanupSyncBackgroundTask, SetInDOAJBackgroundTask.__action__: SetInDOAJBackgroundTask, - CheckLatestESBackupBackgroundTask.__action__: CheckLatestESBackupBackgroundTask, + PreservationBackgroundTask.__action__:PreservationBackgroundTask, PruneESBackupsBackgroundTask.__action__: PruneESBackupsBackgroundTask, PublicDataDumpBackgroundTask.__action__: PublicDataDumpBackgroundTask, - HarvesterBackgroundTask.__action__: HarvesterBackgroundTask, - AnonExportBackgroundTask.__action__: AnonExportBackgroundTask, + ReadNewsBackgroundTask.__action__: ReadNewsBackgroundTask, + ReportingBackgroundTask.__action__: ReportingBackgroundTask, + SitemapBackgroundTask.__action__: SitemapBackgroundTask, + SuggestionBulkEditBackgroundTask.__action__: SuggestionBulkEditBackgroundTask } -def manage_jobs(verb, action, status, from_date, to_date): +def manage_jobs(verb, action, status, from_date, to_date, prompt=True): q = JobsQuery(action, status, from_date, to_date) jobs = models.BackgroundJob.q2obj(q=q.query()) print('You are about to {verb} {count} job(s)'.format(verb=verb, count=len(jobs))) - doit = input('Proceed? [y\\N] ') + + doit = "y" + if prompt: + doit = input('Proceed? [y\\N] ') if doit.lower() == 'y': print('Please wait...') @@ -70,7 +89,7 @@ def manage_jobs(verb, action, status, from_date, to_date): continue job.add_audit_message("Job {pp} from job management script.".format( - pp={'requeue': 'requeued', 'cancel': 'cancelled'}[verb])) + pp={'requeue': 'requeued', 'cancel': 'cancelled', "process": "processed"}[verb])) if verb == 'requeue': # Re-queue and execute immediately job.queue() @@ -78,18 +97,24 @@ def manage_jobs(verb, action, status, from_date, to_date): elif verb == 'cancel': # Just apply cancelled status job.cancel() job.save() + elif verb == 'process': + task = HANDLERS[job.action](job) # Just execute immediately without going through huey + BackgroundApi.execute(task) print('done.') else: print('No action.') -def requeue_jobs(action, status, from_date, to_date): - manage_jobs('requeue', action, status, from_date, to_date) +def requeue_jobs(action, status, from_date, to_date, prompt=True): + manage_jobs('requeue', action, status, from_date, to_date, prompt=prompt) + +def cancel_jobs(action, status, from_date, to_date, prompt=True): + manage_jobs('cancel', action, status, from_date, to_date, prompt=prompt) -def cancel_jobs(action, status, from_date, to_date): - manage_jobs('cancel', action, status, from_date, to_date) +def process_jobs(action, status, from_date, to_date, prompt=True): + manage_jobs("process", action, status, from_date, to_date, prompt=prompt) class JobsQuery(object): @@ -127,6 +152,8 @@ def query(self): help='Add these jobs back on the job queue for processing', action='store_true') parser.add_argument('-c', '--cancel', help='Cancel these jobs (set their status to "cancelled")', action='store_true') + parser.add_argument("-p", "--process", + help="Immediately process these jobs on the command line", action="store_true") parser.add_argument('-s', '--status', help='Filter for job status. Default is "queued"', default='queued') @@ -139,15 +166,18 @@ def query(self): parser.add_argument('-t', '--to_date', help='Date to which to look for jobs in the given type and status', default=dates.now_str()) + parser.add_argument("-y", "--yes", help="Answer yes to all prompts", action="store_true") args = parser.parse_args() if args.requeue and args.cancel: print('Use only --requeue OR --cancel, not both.') exit(1) elif args.requeue: - requeue_jobs(args.action, args.status, args.from_date, args.to_date) + requeue_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) elif args.cancel: - cancel_jobs(args.action, args.status, args.from_date, args.to_date) + cancel_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) + elif args.process: + process_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) else: - print('You must supply one of --requeue or --cancel to run this script') + print('You must supply one of --requeue, --cancel or --process to run this script') exit(1) diff --git a/portality/settings.py b/portality/settings.py index 2bffdbab8a..df889cbd14 100644 --- a/portality/settings.py +++ b/portality/settings.py @@ -9,7 +9,7 @@ # Application Version information # ~~->API:Feature~~ -DOAJ_VERSION = "6.3.16" +DOAJ_VERSION = "6.4.2" API_VERSION = "3.0.1" ###################################### @@ -427,7 +427,7 @@ HUEY_SCHEDULE = { "sitemap": {"month": "*", "day": "*", "day_of_week": "*", "hour": "8", "minute": "0"}, "reporting": {"month": "*", "day": "1", "day_of_week": "*", "hour": "0", "minute": "0"}, - "journal_csv": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "35"}, + "journal_csv": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "20"}, "read_news": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "30"}, "article_cleanup_sync": {"month": "*", "day": "2", "day_of_week": "*", "hour": "0", "minute": "0"}, "async_workflow_notifications": {"month": "*", "day": "*", "day_of_week": "1", "hour": "5", "minute": "0"}, diff --git a/portality/tasks/ingestarticles.py b/portality/tasks/ingestarticles.py index de6991ab40..e798f4005d 100644 --- a/portality/tasks/ingestarticles.py +++ b/portality/tasks/ingestarticles.py @@ -312,11 +312,16 @@ def _process(self, file_upload: models.FileUpload): for article in articles: article.set_upload_id(file_upload.id) result = articleService.batch_create_articles(articles, account, add_journal_info=True) - except (IngestException, CrosswalkException) as e: - job.add_audit_message("IngestException: {msg}. Inner message: {inner}. Stack: {x}" - .format(msg=e.message, inner=e.inner_message, x=e.trace())) + except (IngestException, CrosswalkException, ArticleNotAcceptable) as e: + if hasattr(e, 'inner_message'): + job.add_audit_message("{exception}: {msg}. Inner message: {inner}. Stack: {x}" + .format(exception=e.__class__.__name__, msg=e.message, inner=e.inner_message, x=e.trace())) + file_upload.failed(e.message, e.inner_message) + else: + job.add_audit_message("{exception}: {msg}.".format(exception=e.__class__.__name__, msg=e.message)) + file_upload.failed(e.message) + job.outcome_fail() - file_upload.failed(e.message, e.inner_message) result = e.result try: file_failed(path) @@ -324,7 +329,7 @@ def _process(self, file_upload: models.FileUpload): except: job.add_audit_message("Error cleaning up file which caused IngestException: {x}" .format(x=traceback.format_exc())) - except (DuplicateArticleException, ArticleNotAcceptable) as e: + except DuplicateArticleException as e: job.add_audit_message(str(e)) job.outcome_fail() file_upload.failed(str(e)) diff --git a/portality/templates/account/forgot.html b/portality/templates/account/forgot.html index 241525adfd..d8f5e9c837 100644 --- a/portality/templates/account/forgot.html +++ b/portality/templates/account/forgot.html @@ -3,7 +3,7 @@ {% block page_title %}Reset your password{% endblock %} {% block content %} -
+
@@ -23,5 +23,5 @@

Reset your password

-
+ {% endblock %} diff --git a/portality/templates/account/login.html b/portality/templates/account/login.html index 247149641a..726831e9e3 100644 --- a/portality/templates/account/login.html +++ b/portality/templates/account/login.html @@ -3,7 +3,7 @@ {% block page_title %}Login to your account{% endblock %} {% block content %} -
+
@@ -17,5 +17,5 @@

Login

-
+ {% endblock %} diff --git a/portality/templates/account/login_to_apply.html b/portality/templates/account/login_to_apply.html index 5d9ee3e8ed..556fbb71cb 100644 --- a/portality/templates/account/login_to_apply.html +++ b/portality/templates/account/login_to_apply.html @@ -3,7 +3,7 @@ {% block page_title %}Login to apply{% endblock %} {% block content %} -
+
@@ -46,5 +46,5 @@

Related help

-
+ {% endblock %} diff --git a/portality/templates/account/register.html b/portality/templates/account/register.html index 4f91d8250a..a497bd396a 100644 --- a/portality/templates/account/register.html +++ b/portality/templates/account/register.html @@ -12,7 +12,7 @@ {% endblock %} {% block content %} -
+
@@ -30,7 +30,7 @@

Register

-
+ {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/account/reset.html b/portality/templates/account/reset.html index 2b459de104..fdacc27620 100644 --- a/portality/templates/account/reset.html +++ b/portality/templates/account/reset.html @@ -4,7 +4,7 @@ {% block content %} -
+
@@ -20,6 +20,6 @@

Hi {{ account.name or account.email }}

-
+ {% endblock %} diff --git a/portality/templates/api/current/api_docs.html b/portality/templates/api/current/api_docs.html index 9c5a2bc8e5..42d3d14588 100644 --- a/portality/templates/api/current/api_docs.html +++ b/portality/templates/api/current/api_docs.html @@ -14,7 +14,7 @@ {% endblock %} {% block content %} -
+
{# todo: this nav was bumping into swagger @@ -58,7 +58,7 @@

API

-
+ {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/application_form/public_application.html b/portality/templates/application_form/public_application.html index af0e05d63e..31439b94d3 100644 --- a/portality/templates/application_form/public_application.html +++ b/portality/templates/application_form/public_application.html @@ -23,7 +23,7 @@ {% block content scoped %} -
+
{% include "application_form/_backend_validation.html" %}
@@ -64,7 +64,7 @@
-
+ {% endblock %} diff --git a/portality/templates/application_form/readonly_journal.html b/portality/templates/application_form/readonly_journal.html index 2fed49db61..fe429eea08 100644 --- a/portality/templates/application_form/readonly_journal.html +++ b/portality/templates/application_form/readonly_journal.html @@ -20,7 +20,7 @@ {% block content scoped %} -
+
@@ -47,7 +47,7 @@
-
+ {% endblock %} diff --git a/portality/templates/doaj/article.html b/portality/templates/doaj/article.html index 67720a8585..91f3f80200 100644 --- a/portality/templates/doaj/article.html +++ b/portality/templates/doaj/article.html @@ -67,7 +67,7 @@ {% set doi = bibjson.get_one_identifier("doi") %} {% set normalised_doi = article.get_normalised_doi() %} -
+

@@ -224,5 +224,5 @@

Published in {{jtitle}}

-
+ {% endblock %} diff --git a/portality/templates/doaj/articles_search.html b/portality/templates/doaj/articles_search.html index cea8977897..4a518fb3ae 100644 --- a/portality/templates/doaj/articles_search.html +++ b/portality/templates/doaj/articles_search.html @@ -10,10 +10,10 @@ {%- block meta_twitter_description -%}Find open access articles in DOAJ.{%- endblock -%} {% block content %} -
+
{% include "includes/search-help-modal.html" %} -
+ {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/doaj/contact.html b/portality/templates/doaj/contact.html index 2a4739dec8..2ffb86985f 100644 --- a/portality/templates/doaj/contact.html +++ b/portality/templates/doaj/contact.html @@ -1,7 +1,7 @@ {% extends "layouts/public_base.html" %} {% block content %} -
+

Submit your feedback and questions here. Feedback submitted about a particular journal is treated as confidential.

@@ -52,7 +52,7 @@
-
+ {% endblock %} diff --git a/portality/templates/doaj/index.html b/portality/templates/doaj/index.html index 02bb78b878..f24f55571b 100644 --- a/portality/templates/doaj/index.html +++ b/portality/templates/doaj/index.html @@ -74,7 +74,7 @@

DOAJ in numbers

{% endblock %} {% block content %} -
+
@@ -246,6 +246,6 @@

Recently-added journals

-
+ {% endblock %} diff --git a/portality/templates/doaj/journals_search.html b/portality/templates/doaj/journals_search.html index e38bedd18e..8eef0d2c63 100644 --- a/portality/templates/doaj/journals_search.html +++ b/portality/templates/doaj/journals_search.html @@ -10,10 +10,10 @@ {%- block meta_twitter_description -%}Find open access journals in DOAJ.{%- endblock -%} {% block content %} -
+
{% include "includes/search-help-modal.html" %} -
+ {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/editor/editor_base.html b/portality/templates/editor/editor_base.html index 1fc7bfbc81..c116f3d622 100644 --- a/portality/templates/editor/editor_base.html +++ b/portality/templates/editor/editor_base.html @@ -9,10 +9,15 @@ {% endblock %} {% block content %} -
- {% block editor_content %} - {% endblock %} -
+
+

Editor dashboard

+ {% include 'editor/nav.html' %} + +
+ {% block editor_content %} + {% endblock %} +
+
{% include "includes/_hotjar.html" %} {% endblock %} diff --git a/portality/templates/includes/contribution_rates.html b/portality/templates/includes/contribution_rates.html index 29f28a8426..2449e24e3c 100644 --- a/portality/templates/includes/contribution_rates.html +++ b/portality/templates/includes/contribution_rates.html @@ -1,7 +1,7 @@ - + {% endblock %} diff --git a/portality/templates/openurl/help.html b/portality/templates/openurl/help.html index 75332c5637..c1a4eb4424 100644 --- a/portality/templates/openurl/help.html +++ b/portality/templates/openurl/help.html @@ -1,7 +1,7 @@ {% extends "layouts/public_base.html" %} {% block content %} -
+

Help

@@ -33,5 +33,5 @@

Supported OpenURL version

-
+ {% endblock %} diff --git a/portality/templates/publisher/preservation.html b/portality/templates/publisher/preservation.html index c87a2fcb97..611eb380af 100644 --- a/portality/templates/publisher/preservation.html +++ b/portality/templates/publisher/preservation.html @@ -25,10 +25,10 @@

Guidance before uploading your file

  1. Only the full text of articles whose metadata is already uploaded to DOAJ can be sent to us. Check that your article metadata appears in DOAJ first.
  2. Only articles for journals indexed in DOAJ can be uploaded. -
  3. Collect the full texts into a package consisting of folders and files.
  4. -
  5. Compress the package into a ZIP file.
  6. -
  7. Upload the zipped package (on this page).
  8. -
  9. Check that the file has uploaded correctly in the History of Uploads section and is not bigger than 50MB.
  10. +
  11. Collect the full text files into a package containing folders and files.
  12. +
  13. Compress the package into a ZIP file. Keep the name of the file simple: avoid spaces, hyphens, underscores, special characters, etc
  14. +
  15. Upload the zipped package (on this page). It may not be bigger than 50MB.
  16. +
  17. Check that the file has uploaded correctly in the History of Uploads section.

The package must have the following structure:

diff --git a/portality/templates/publisher/publisher_base.html b/portality/templates/publisher/publisher_base.html index d4873f1fb5..96ba47ca72 100644 --- a/portality/templates/publisher/publisher_base.html +++ b/portality/templates/publisher/publisher_base.html @@ -7,14 +7,14 @@ {% block page_title %}Publisher dashboard{% endblock %} {% block content %} -
+

Publisher dashboard

{% include 'publisher/nav.html' %} {% block publisher_content %} {% endblock %} -
+ {% include "includes/_hotjar.html" %} {% endblock %} diff --git a/portality/ui/messages.py b/portality/ui/messages.py index ac7f9163bc..8eabd73f80 100644 --- a/portality/ui/messages.py +++ b/portality/ui/messages.py @@ -61,6 +61,7 @@ class Messages(object): EXCEPTION_NO_CONTRIBUTORS_EXPLANATION = "DOAJ requires at least one author for each article." EXCEPTION_TOO_MANY_ISSNS = "Too many ISSNs. Only 2 ISSNs are allowed: one Print ISSN and one Online ISSN." + EXCEPTION_MISMATCHED_ISSNS = "ISSNs provided don't match any journal." EXCEPTION_ISSNS_OF_THE_SAME_TYPE = "Both ISSNs have the same type: {type}" EXCEPTION_IDENTICAL_PISSN_AND_EISSN = "The Print and Online ISSNs supplied are identical. If you supply 2 ISSNs they must be different." EXCEPTION_NO_ISSNS = "Neither Print ISSN nor Online ISSN has been supplied. DOAJ requires at least one ISSN." diff --git a/portality/view/doaj.py b/portality/view/doaj.py index 80ff0b2195..4baac018ec 100644 --- a/portality/view/doaj.py +++ b/portality/view/doaj.py @@ -198,8 +198,13 @@ def public_data_dump_redirect(record_type): if not current_user.has_role(constants.ROLE_PUBLIC_DATA_DUMP): abort(404) - target_data = models.Cache.get_public_data_dump().get(record_type, {}) - if target_data is None: + # Make sure the PDD exists + pdd = models.Cache.get_public_data_dump() + if pdd is None: + abort(404) + + target_data = pdd.get(record_type, {}) + if not target_data: abort(404) main_store = store.StoreFactory.get(constants.STORE__SCOPE__PUBLIC_DATA_DUMP) diff --git a/setup.py b/setup.py index a83bf6daf2..8d951cb4eb 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name='doaj', - version='6.3.16', + version='6.4.2', packages=find_packages(), install_requires=[ "awscli==1.20.50",