diff --git a/cms/assets/img/sponsors/Degruyter.svg b/cms/assets/img/sponsors/Degruyter.svg new file mode 100644 index 0000000000..4fcff995f5 --- /dev/null +++ b/cms/assets/img/sponsors/Degruyter.svg @@ -0,0 +1,3 @@ + + + diff --git a/cms/assets/img/ambassadors/johndove.jpg b/cms/assets/img/team/johndove.jpg similarity index 100% rename from cms/assets/img/ambassadors/johndove.jpg rename to cms/assets/img/team/johndove.jpg diff --git a/cms/data/ambassadors.yml b/cms/data/ambassadors.yml index 3f797bcd2d..f2a949b4f0 100644 --- a/cms/data/ambassadors.yml +++ b/cms/data/ambassadors.yml @@ -62,13 +62,6 @@ photo: "ivonne.jpg" coi: 2022: https://drive.google.com/file/d/1HnGhYbvbzL34guWOmIqcthcwAN8NADX1/view?usp=sharing - -- name: John G. Dove - region: North America - bio: "John has had a career in executive management, and is now an independent consultant and open access advocate who works with organisations seeking to accelerate their transition to open access. He advises both for-profits and non-profits, and has a particular interest in identifying the steps necessary to flip an entire discipline’s scholarly record to open access. His ambassador activities focus on increasing the support to DOAJ from the community. He served for six years on NISO’s Information Discovery and Interchange Topic Committee, and has written for Learned Publishing, Against the Grain, and Scholarly Kitchen. John serves on the Board of Trustees of his local public library in Revere, Massachusetts. He has a B.A. in Mathematics from Oberlin College." - photo: "johndove.jpg" - coi: - 2022: https://drive.google.com/file/d/1cWijl2xdmVjshsvaGTABOvC_chIIfuVA/view?usp=sharing - name: Mahmoud Khalifa region: Middle East and Persian Gulf diff --git a/cms/data/nav.yml b/cms/data/nav.yml index 8c5aa278ee..e8ec84fc22 100644 --- a/cms/data/nav.yml +++ b/cms/data/nav.yml @@ -56,11 +56,11 @@ entries: secondary_mobile: true route: doaj.support # ~~->Support:WebRoute~~ entries: - - label: Support DOAJ + - label: Institutions and libraries route: doaj.support # ~~->Support:WebRoute~~ - - label: Publisher supporters + - label: Publishers route: doaj.publisher_supporters # ~~->PublisherSupporters:WebRoute~~ - - label: Supporters + - label: Institutional and library supporters route: doaj.supporters # ~~->Supporters:WebRoute~~ - id: apply label: Apply diff --git a/cms/data/sponsors.yml b/cms/data/sponsors.yml index 379c51cf69..62f4700a39 100644 --- a/cms/data/sponsors.yml +++ b/cms/data/sponsors.yml @@ -1,17 +1,14 @@ # List of sponsors separated by tiers (premier, sustaining, basic) # ~~Sponsors:Data~~ -gold: - name: Royal Society of Chemistry url: https://www.rsc.org/ logo: rsc.png - + - name: Georg Thieme Verlag KG url: https://www.thieme.com/ logo: thieme.svg - -silver: - name: AOSIS url: https://aosis.co.za/ logo: aosis.png @@ -19,7 +16,7 @@ silver: - name: Cappelen Damm Akademisk url: https://www.cappelendammundervisning.no/ logo: cda.jpg - + - name: Copernicus Publications url: https://publications.copernicus.org logo: copernicus.svg @@ -31,7 +28,7 @@ silver: - name: Frontiers url: https://www.frontiersin.org/ logo: frontiers.svg - + - name: Knowledge E url: https://knowledgee.com/ logo: knowledgee.png @@ -43,7 +40,7 @@ silver: - name: OA.Works url: https://oa.works/ logo: oaworks.png - + - name: SAGE Publishing url: https://www.sagepublications.com/ logo: sage.svg @@ -51,21 +48,19 @@ silver: - name: Taylor & Francis Group url: https://www.taylorandfrancisgroup.com/ logo: tf.svg - + - name: John Wiley and Sons LTD url: https://www.wiley.com/en-us logo: Wiley_Wordmark_black.png - -bronze: - name: American Chemical Society url: https://pubs.acs.org/ logo: acs.jpg - + - name: American Psychological Association url: https://www.apa.org/pubs logo: apa.png - + - name: Cambridge University Press url: https://www.cambridge.org/ logo: cambridge.svg @@ -74,6 +69,10 @@ bronze: url: https://www.digital-science.com/ logo: ds.svg +- name: De Gruyter + url: https://www.degruyter.com/ + logo: Degruyter.svg + - name: eLife Sciences Publications url: https://elifesciences.org/ logo: elife.svg @@ -81,7 +80,7 @@ bronze: - name: Elsevier url: https://www.elsevier.com/ logo: elsevier.svg - + - name: Emerald Publishing url: https://emeraldpublishing.com/ logo: emerald.svg @@ -89,15 +88,15 @@ bronze: - name: IEEE url: https://www.ieee.org/ logo: ieee.png - + - name: Institute of Physics url: https://www.iop.org/ logo: iop.jpg - + - name: International Union for Conservation of Nature url: https://iucn.org/ logo: IUCN.svg - + - name: JMIR Publications url: https://jmirpublications.com/ logo: jmir.svg @@ -129,12 +128,11 @@ bronze: - name: SciFree url: https://scifree.se/ logo: scifree.svg - + - name: The IET url: https://www.theiet.org/ logo: iet.svg - name: Ubiquity Press url: https://www.ubiquitypress.com/ - logo: ubiquity.svg - + logo: ubiquity.svg diff --git a/cms/data/team.yml b/cms/data/team.yml index 781c84737e..e3a0b3df8e 100644 --- a/cms/data/team.yml +++ b/cms/data/team.yml @@ -6,17 +6,15 @@ photo: alejandra.png bio: 'Alejandra has a Bachelor’s degree in Information Science and a Master’s degree in Digital Media. She has around ten years of experience in information management, knowledge management and scholarly communication at national and international level.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mVFhjdkZWaFJZOTQ/view?usp=sharing&resourcekey=0-gMdxxHXyyJB9zFZIuh99QQ 2018: https://drive.google.com/file/d/0ByRf6PVViI-mRlRYTDBPRlZiWTRxQ3VMTUZpQnZ5ZkwyLVQ4/view?usp=sharing&resourcekey=0-mlQ6rSCEnr6RfpCwh_4SMw 2020: https://drive.google.com/file/d/1PF7Cc9vGAwWGqpqDo7nRULjxWIF2NR_Q/view?usp=sharing 2022: https://drive.google.com/file/d/1E45ycyctDfYkh65ZCM8PMtQsYym_eP6L/view?usp=sharing - name: Cenyu Shen - role: Quality Team Lead and Managing Editor + role: Deputy Head of Editorial (Quality) photo: cenyu.jpg bio: 'Cenyu holds a PhD in Information Systems Science at Hanken School of Economics in Finland. She has spent around seven years on Open Access research with a particular focus on gold OA publishing concerning journals and publishers outside the mainstream. She was one of three DOAJ ambassadors for China from 2016 to 2017.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmNHZCQmxpUmN6bUEtYUx2VHZnbjVySS1fRTlr/view?usp=sharing&resourcekey=0-1TRIV1MEQMhdGbmCd7CbOA 2020: https://drive.google.com/file/d/1rm9fjOF3OHJ9lR9wEUyQBQTO2KdoNQcE/view?usp=sharing 2022: https://drive.google.com/file/d/1Mn_CR0twKxyFbbHxsLSrgeU984BNOLlS/view?usp=sharing @@ -25,7 +23,6 @@ photo: clara.jpg bio: 'Clara has 10 years experience in the scholarly publishing industry. She worked at Cambridge University Press as an Open Access Project Manager until 2015. She also works in science communication as a freelancer at the University Pompeu Fabra, Barcelona. Clara speaks Spanish, Catalan, English and some German and French. She loves cetaceans, freediving, cycling, and is an enthusiastic cook.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mbDFybndLbldEbFE/view?usp=sharing&resourcekey=0-lKZNFwvUNdVAGKatvnKiPg 2018: https://drive.google.com/file/d/1LHmZSZ6bwf6U71fNvIibJa6R1lquNhfR/view?usp=sharing 2020: https://drive.google.com/file/d/1v4duxnoTcNo4UbL_GBa5D1T8JtTl7oY1/view?usp=sharing 2022: https://drive.google.com/file/d/1hevYxG1102llDy-_i-onwKbDuOlBguA_/view?usp=sharing @@ -35,7 +32,6 @@ photo: dominic.jpg bio: 'Dominic has over 25 years of experience working with publisher and library communities. He is responsible for operations and development of the DOAJ platform. He acts as Committee chair for the Think. Check. Submit. initiative, of which DOAJ is a founding organisation. He represents DOAJ in Project JASPER, a cross-industry project working to ensure that journals are preserved for the long term. He also sits on the OASPA Board of Directors and serves as Secretary. Outside of work, he is reluctantly becoming an expert in the playparks of Stockholm with his twin sons.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mWmU0UHZqZm1xcDQ/view?usp=sharing&resourcekey=0-BmQKwWn6Vb9ot73Xie66aA 2018: https://drive.google.com/file/d/13XX_GUrw2xRmXARjRrTxegULPT8Redka/view?usp=sharing 2020: https://drive.google.com/file/d/1nxFOuAdXLb8A-LulhNpz9i5vSmr5DBwF/view?usp=sharing 2022: https://drive.google.com/file/d/1HBF9RLaIt3lFNG6WDcV08fQSMS_C6zwA/view?usp=sharing @@ -62,12 +58,18 @@ coi: 2022: https://drive.google.com/file/d/1-3xzwkHMclREgLhj_XNF5n6Nr4q2_bnw/view?usp=sharing +- name: John G. Dove + role: Advisor + photo: johndove.jpg + bio: "John has had a career in executive management, and is now an independent consultant and open access advocate who works with organisations seeking to accelerate their transition to open access. He advises both for-profits and non-profits, and has a particular interest in identifying the steps necessary to flip an entire discipline’s scholarly record to open access. His ambassador activities focus on increasing the support to DOAJ from the community. He served for six years on NISO’s Information Discovery and Interchange Topic Committee, and has written for Learned Publishing, Against the Grain, and Scholarly Kitchen. John serves on the Board of Trustees of his local public library in Revere, Massachusetts. He has a B.A. in Mathematics from Oberlin College." + coi: + 2022: https://drive.google.com/file/d/1cWijl2xdmVjshsvaGTABOvC_chIIfuVA/view?usp=sharing + - name: Judith Barnsby role: Head of Editorial photo: judith.jpg bio: 'Judith has 25 years of experience in the scholarly publishing industry, working for a range of non-profit society publishers and service providers before joining DOAJ. She has a keen interest in publishing standards and protocols, and has served on the board of CLOCKSS and as chair of the PALS (publisher and library solutions) working group in the UK. Judith loves books, especially detective fiction, and volunteers in her local library.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmb3JmVkFYbjN5aTh1OUhLd2lZaEV0ZlFwbTZV/view?usp=sharing&resourcekey=0-o_PXKLk5UFbPk_-4B61jVA 2018: https://drive.google.com/file/d/0ByRf6PVViI-mV2lfMjByQjYxUkpMcXhuc2l5Q3ZDWlpiYUtZ/view?usp=sharing&resourcekey=0-6eiGIRal00eXvgJUTeN_lw 2020: https://drive.google.com/file/d/18MWTsze4cDQQRPHJl2XrYgHQvlxhsPZa/view?usp=sharing 2023: https://drive.google.com/file/d/1hUsVIY09N6WceSx1edTM-h516CJGkHcu/view?usp=share_link @@ -77,7 +79,6 @@ photo: Kamel.jpg bio: 'Kamel is Full Professor of Chemistry at the University of Bejaia, Algeria (ORCID). He gained his PhD in Process Engineering and Chemistry of Materials Science at the University of Setif, Algeria. Kamel joined DOAJ in 2016 as an Ambassador for North Africa. He is currently Creative Commons Algeria Chapter lead, director of the Laboratory of Organic Materials at the University of Bejaia and editor-in-chief of Algerian Journal of Natural Products. His scientific activity is focused on chemistry of Natural Products, scholarly communications and new developments in academic publishing. Father of 3 daughters, he likes travelling, healthy local foods & home-made snacks.' coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmVEN4X1Q0RDdCams1NXhveW1HQmtMYU56bDE4/view?usp=sharing&resourcekey=0-wA1CGAbjB6FAX33gCDQmrA 2018: https://drive.google.com/file/d/1JdF2kh-fLXz8kPGN_3ijDt5y9K6s0hOQ/view?usp=sharing 2020: https://drive.google.com/file/d/1iXrjwLTNBXwKD2TwrPD9ApKL7O6uZ8Z7/view?usp=sharing 2022: https://drive.google.com/file/d/1cl18h_mYnNogYs8Rk-fhBTW6WKOTC2IF/view?usp=sharing @@ -94,28 +95,19 @@ photo: lars.jpg bio: 'Lars worked at Danish university libraries for two decades and was Director of Libraries at Lund University, Sweden from 2001 to 2011. He founded the DOAJ in 2003, and was Managing Director from 2013-2021. He has vast experience in change management, re-engineering of academic libraries, and development of information services for research & higher education. For two decades Lars has been a strong advocate of open access and for providing services to the open access movement. He is co-founder of OpenDOAR, the Directory of Open Access Books and Think. Check. Submit. Lars lives outside Copenhagen, and is married with 4 children and 4 grandchildren. He enjoys vegetable gardening, growing cacti and succulents, and playing internet chess.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mbmo2aU9NWkx5dGs/view?usp=sharing&resourcekey=0-mpdRgVU9UlFjC614-woDvg 2018: https://drive.google.com/file/d/1mm1a8nbY5MQX9loqIs2ZQuVN-73RfPuN/view?usp=sharing 2021: https://drive.google.com/file/d/1bNj5sqUsu4sRLmm_YOuh3JCSMERzQ1Ro/view?usp=sharing 2022: https://drive.google.com/file/d/1fRJtvci2_j4vad0C5N1pfqm2sHZQkFz3/view?usp=sharing - name: Leena Shah - role: Managing Editor and Ambassador + role: Deputy Head of Editorial (Workflow) and Ambassador photo: leena.jpg bio: "Leena joined the DOAJ team in 2016 as an Ambassador for India before becoming a Managing Editor. Prior to joining DOAJ she worked as a science librarian at Nanyang Technological University, Singapore, where she developed a keen interest in scholarly communication & open science. A recent addition to her interests is artificial intelligence in scholarly communication. Leena holds a Master’s degree in Information Studies and lives in Singapore. She loves watching sci-fi shows and is enthusiastic about travelling to new places." coi: - 2016: https://drive.google.com/file/d/0B0fPCpIPjZlmTHZuaEtMSDNIeUpKT2Fid19jVjVFTkRoUmdj/view?usp=sharing&resourcekey=0-KqvRVa30bQEUfqO-YA1L-g 2018: https://drive.google.com/file/d/1tifEjAIlU3txBw9DjIcRW9cZL7YG7_nU/view?usp=sharing 2020: https://drive.google.com/file/d/1zU-lLB5W54E_QUm5uto5tqB6cZl83TAJ/view?usp=sharing 2022: https://drive.google.com/file/d/19rw-naMJqHkI5T7aDIDPUkwPutBdDpDm/view?usp=sharing -- name: Luis Montilla - role: Managing Editor - photo: luis.jpeg - bio: "Luis is a marine ecologist with a passion for improving the quality of scientific publishing. After finishing his Masters in Venezuela, he spent three years in Italy completing his PhD studying marine microbial symbioses in seagrass beds. In his free time, he enjoys reading and watching movies." - coi: - 2023: https://drive.google.com/file/d/1IJhnV2Ht5t5jilaCAFzpuFdYk7UMOjN3/view?usp=sharing - - name: Mahmoud Khalifa role: Managing Editor and Ambassador photo: mahmoud-new.jpg @@ -151,7 +143,6 @@ photo: Rikard.jpg bio: 'Rikard has a Bachelor of Arts degree with a Major in Cultural Sciences and a specialization in publishing. He enjoys reading about philosophy and religion.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mdnJPdldOM0hUMFU/view?usp=sharing&resourcekey=0-8dJAtvm2n7vXV9NhqZYckw 2018: https://drive.google.com/file/d/1tOnW8L6TwolyLpIXwMKTITf9wGh_ukLb/view?usp=sharing 2020: https://drive.google.com/file/d/14c0RgpyD2Slzyh5s8LGvj5OwWbL4H8NX/view?usp=sharing 2023: https://drive.google.com/file/d/1HQIh1DlfhEutTWniXDGLYFVa9VxJ4OT9/view?usp=share_link @@ -161,7 +152,6 @@ photo: sonja.jpg bio: 'Sonja is a former Information Librarian from Lund University Library. She has a B.A. in English, Bulgarian and Russian from Lund University and specialises in applications for journals in the Slavonic languages.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mNUFoZWV4YnZ3bDg/view?usp=sharing&resourcekey=0-1JRid_DHRMKbgdzmVYL7NQ 2018: https://drive.google.com/file/d/1M5AGEDP79uk2olCcmVYjKCsmzL7tG2Vc/view?usp=sharing 2020: https://drive.google.com/file/d/1-4RJYScTs_zMBeD5zESNvCoIBCWTOWHR/view?usp=sharing 2022: https://drive.google.com/file/d/1soZtiW6gyVJPl7P_J60j2TL2Fqzl0QAs/view?usp=sharing @@ -178,7 +168,6 @@ photo: tom.jpg bio: 'Tom has a PhD in molecular microbiology and spent several years in Africa doing research on malaria, sleeping sickness and meningococcal epidemics. He has been actively advocating open access and open science since 2012 when he joined the Open Knowledge community and became a member of the DOAJ advisory board. His current research interests are development of quality systems for the assessment of scholarly journals and articles, and research in the area of soil microbiology in relation to soil health and human health.' coi: - 2016: https://drive.google.com/file/d/0ByRf6PVViI-mYUFZNDRISTZodUU/view?usp=sharing&resourcekey=0-g13FJaUJpdR_t2rMLEyzEQ 2018: https://drive.google.com/file/d/1x0w-a1TWQdJDKPtQpGhmDZSdA4BhFSpI/view?usp=sharing 2020: https://drive.google.com/file/d/1VyirUdc6FBNOujl938bHf1JCL1jLNwXV/view?usp=sharing 2022: https://drive.google.com/file/d/1ww7WHQEg1395bPn20Arb7LJn9lIROdBl/view?usp=sharing diff --git a/cms/pages/about/at-20.md b/cms/pages/about/at-20.md index 1678adfe2a..6d0e339928 100644 --- a/cms/pages/about/at-20.md +++ b/cms/pages/about/at-20.md @@ -31,15 +31,15 @@ There is also an opportunity for you to [support DOAJ during its 20th year](/at- - Event Time: 13:00 UTC - Duration: 90 mins - {% include "includes/svg/at-20/theme_global.svg" %} - - **[Registration is open](https://us02web.zoom.us/webinar/register/WN_fu42oi59S7GZ366rjyAUGg#/registration)** + - **[Recording is available](https://www.youtube.com/watch?v=TRjtc-7tg8w)** - Name: _DOAJ at 20: Global_ - Date: _28th September 2023_ - - Event Time: 13:00 UTC ([Check the event time](https://www.timeanddate.com/worldclock/fixedtime.html?iso=20230928T13&ah=1&am=30) where you are.) + - Event Time: 13:00 UTC - Duration: 2 hours - {% include "includes/svg/at-20/theme_trusted.svg" %} - Name: _DOAJ at 20: Trusted_ - Date: _7th December 2023_ - - Event Time: to be confirmed + - Event Time: 14:00 UTC ([Check the event time](https://www.timeanddate.com/worldclock/fixedtime.html?msg=DOAJ+at+20%3A+Trusted&iso=20231207T14&p1=1440&ah=1&am=30) where you are.) - Duration: 90 mins ## Open diff --git a/cms/pages/apply/seal.md b/cms/pages/apply/seal.md index b9c4968cca..7fc7f4d414 100644 --- a/cms/pages/apply/seal.md +++ b/cms/pages/apply/seal.md @@ -12,7 +12,7 @@ The DOAJ Seal is awarded to journals that demonstrate best practice in open acce **Journals do not need to meet the Seal criteria to be accepted into DOAJ.** -There are seven criteria which a journal must meet to be eligible for the DOAJ Seal. These relate to best practice in long term preservation, use of persistent identifiers, discoverability, reuse policies and authors' rights. +There are seven criteria which a journal must meet to be eligible for the DOAJ Seal. These relate to best practices in long-term preservation, use of persistent identifiers, discoverability, reuse policies and authors' rights. --- @@ -21,29 +21,29 @@ There are seven criteria which a journal must meet to be eligible for the DOAJ S All seven criteria must be met for a journal to be awarded the Seal. Failure to maintain the best practice and standards described in these criteria may lead to removal of the Seal. {:.tabular-list .tabular-list--ordered} -1. Digital preservation +1. Digital preservation (Archiving policy) - The journal content must be continuously deposited in one of these archives: - any archiving agency included in [Keepers Registry](https://keepers.issn.org/keepers) - Internet Archive - PubMed Central -2. Persistent article identifiers +2. Self-archiving (Repository policy) + - Authors must be permitted to deposit all versions of their paper in an institutional or subject repository. + - Preprint + - Author's Accepted Manuscript + - Published article (Version of Record) + - An embargo may not be applied. +3. Persistent article identifiers (Unique identifiers) - Articles must use persistent article identifiers. DOI, ARK or Handle are the most commonly used. - All persistent links must resolve correctly. -3. Metadata supply to DOAJ +4. Metadata supply to DOAJ - Article metadata must be uploaded to DOAJ regularly. -4. License type +5. License type - The journal must permit the use of a Creative Commons license that allows the creation of derivative products. - CC BY - CC BY-SA - CC BY-NC - CC BY-NC-SA -5. License information in articles +6. License information in articles - Creative Commons licensing information must be displayed in all full-text article formats. -6. Copyright and publishing rights +7. Copyright and publishing rights - Authors must retain unrestricted copyright and all publishing rights when publishing under any license permitted by the journal. -7. Self-archiving policy - - Authors must be permitted to deposit all versions of their paper in an institutional or subject repository. - - Preprint - - Author's Accepted Manuscript - - Published article (Version of Record) - - An embargo may not be applied. diff --git a/cms/pages/apply/transparency.md b/cms/pages/apply/transparency.md index 0bda2aae3d..913d061882 100644 --- a/cms/pages/apply/transparency.md +++ b/cms/pages/apply/transparency.md @@ -23,13 +23,14 @@ These principles also acknowledge that publishers and editors are responsible fo ### JOURNAL CONTENT -#### 1. Name of journal +**1. Name of journal** + The journal's name should: - Be unique and not be one that is easily confused with another journal. - Not mislead potential authors and readers about the journal's origin, scope, or association with other journals and organisations. -#### 2. Website +**2. Website** - Websites should be properly supported and maintained, with particular attention given to security aspects that help protect users from viruses and malware. - As a minimum, websites should use https and not http, and all traffic should be redirected through https. Those responsible for the website should apply web standards and best ethical practices to the website's content, presentation, and application. @@ -45,20 +46,22 @@ In addition to the requirements outlined above, the following items should be cl - Authorship criteria. - ISSNs (separate for print and electronic versions). -#### 3. Publishing schedule +**3. Publishing schedule** + A journal's publishing frequency should be clearly described, and the journal must keep to its publishing schedule unless there are exceptional circumstances. -#### 4. Archiving +**4. Archiving** + A journal's plan for electronic backup and long term digital preservation of the journal content, in the event that the journal and/or publisher stops operating, should be clearly indicated. Examples include PMC and those listed in [the Keepers Registry](https://keepers.issn.org/). -#### 5. Copyright +**5. Copyright** - The copyright terms for published content should be clearly stated on the website and in the content. - The copyright terms should be separate and distinct from the copyright of the website. - The copyright holder should be named on the full text of all published articles (HTML and PDF). - If the copyright terms are described in a separate form, this should be easy to find on the website and available to all. -#### 6. Licencing +**6. Licencing** - Licencing information should be clearly described on the website. - Licencing terms should be indicated on the full text of all published articles (HTML and PDF). @@ -69,7 +72,8 @@ If Creative Commons licences are used, then the terms of that licence should als ### JOURNAL PRACTICES -#### 7. Publication ethics and related editorial policies +**7. Publication ethics and related editorial policies** + A journal should have policies on publication ethics (for example, [COPE's Core Practice guidance](https://publicationethics.org/core-practices)). These should be visible on its website, and should refer to: - Journal's policies on [authorship and contributorship](https://publicationethics.org/authorship). @@ -84,7 +88,8 @@ A journal should have policies on publication ethics (for example, [COPE's Core Editors and publishers are responsible for ensuring the integrity of the scholarly literature in their journals and should ensure they outline their policies and procedures for handling such issues when they arise. These issues include plagiarism, citation manipulation, and data falsification/fabrication, among others. Neither the journal’s policies nor the statements of its editors should encourage such misconduct, or knowingly allow such misconduct to take place. In the event that a journal's editors or publisher are made aware of any allegation of research misconduct relating to a submitted or published article in their journal, the editor or publisher should follow [COPE's guidance](https://publicationethics.org/guidance) (or equivalent) in dealing with allegations. -#### 8. Peer review +**8. Peer review** + Peer review is defined as obtaining advice on manuscripts from reviewers/experts in the manuscript’s subject area. Those individuals should not be part of the journal's editorial team. However, the specific elements of peer review may differ by journal and discipline, so the following should be clearly stated on the website: - Whether or not the content is peer reviewed. @@ -105,30 +110,31 @@ Journals should not guarantee acceptance of initial manuscript submissions. Stat The date of publication should be published with all published research. Dates of submission and acceptance are preferred as well. -#### 9. Access +**9. Access** + If any of the online content is not freely accessible to everyone, the method of gaining access (for example, registration, subscription, or pay-per-view fees) should be clearly described. If offline versions (for example, print) are available, this should be clearly described along with any associated charges. ### ORGANISATION -#### 10. Ownership and management +**10. Ownership and management** - Information about the ownership and management of a journal should be clearly indicated on the journal's website. - Organisational names should not be used in a way that could mislead potential authors and editors about the nature of the journal's owner. - If a journal is affiliated with a society, institution, or sponsor, links to their website(s) should be provided where available. -#### 11. Advisory body +**11. Advisory body** Journals should have editorial boards or other advisory bodies whose members are recognised experts in the subject areas stated in the journal's aims and scope. - The full names and affiliations of the members should be provided on the journal's website. - The list should be up to date, and members must agree to serve. - To avoid being associated with predatory or deceptive journals, journals should periodically review their board to ensure it is still relevant and appropriate. -#### 12. Editorial team/contact information +**12. Editorial team/contact information** Journals should provide the full names and affiliations of their editors as well as contact information for the editorial office, including a full mailing address, on the journal’s website. ### BUSINESS PRACTICES -#### 13. Author fees +**13. Author fees** - If author fees are charged (such as article processing charges, page charges, editorial processing charges, language editing fees, colour charges, submission fees, membership fees, or other supplementary charges), then the fees should be clearly stated on the website. - If there are no such fees, this should be clearly stated. @@ -141,14 +147,16 @@ Journals should provide the full names and affiliations of their editors as well - When and how to apply for a waiver. - Author fees or waiver status should not influence editorial decision making, and this should be clearly stated. -#### 14. Other revenue +**14. Other revenue** + Business models or revenue sources should be clearly stated on the journal's website. Examples include author fees (see section 13), subscriptions, sponsorships and subsidies, advertising (see section 15), reprints, supplements, or special issues. Business models or revenue sources (for example, reprint income, supplements, special issues, sponsorships) should not influence editorial decision making. -#### 15. Advertising +**15. Advertising** + Journals should state whether they accept advertising. If they do, they should state their advertising policy, including: - Which types of advertisements will be considered. @@ -157,7 +165,8 @@ Journals should state whether they accept advertising. If they do, they should s Advertisements should not be related in any way to editorial decision making and should be kept separate from the published content. -#### 16. Direct marketing +**16. Direct marketing** + Any direct marketing activities, including solicitation of manuscripts, that are conducted on behalf of the journal should be appropriate, well targeted, and unobtrusive. Information provided about the publisher or journal should be truthful and not misleading for readers or authors. ## Version history diff --git a/cms/pages/legal/terms.md b/cms/pages/legal/terms.md index ad872453eb..1b27b66e99 100644 --- a/cms/pages/legal/terms.md +++ b/cms/pages/legal/terms.md @@ -32,7 +32,7 @@ DOAJ uses a variety of licenses for the different parts of its website and the c + In our [OAI-PMH feed](/docs/oai-pmh) + In the [full data dump of all article metadata](/docs/public-data-dump/). -4. The *open source software* that DOAJ is built with is licensed under [an Apache license Version 2](https://github.com/DOAJ/doaj/blob/a6fc2bee499b5a8a1f24fb098acfb8e10bd72503/portality/static/vendor/select2-3.5.4/LICENSE). +4. The *open source software* that DOAJ is built with is licensed under [an Apache license Version 2](https://github.com/DOAJ/doaj/blob/develop/LICENSE). --- diff --git a/cms/pages/support/index.md b/cms/pages/support/index.md index 8c9ad71dc4..cc39fbfd44 100644 --- a/cms/pages/support/index.md +++ b/cms/pages/support/index.md @@ -2,7 +2,7 @@ layout: sidenav sidenav_include: /includes/_sidenav_donation.html include: /includes/contribution_rates.html -title: Support DOAJ +title: Institutional and library supporter model section: Support sticky_sidenav: true featuremap: @@ -11,10 +11,37 @@ featuremap: --- -Support of DOAJ by academic organisations is vital and we are proud to acknowledge that over 80% of our support comes to us this way. We are very grateful to all our supporting academic organisations from around the world. +Support of DOAJ by academic organisations is vital, and we are proud to acknowledge that over 80% of our support comes to us this way. We are very grateful to all our supporting academic organisations worldwide. -The suggested contributions for academic organisations are below. Use the table to find the most appropriate option for your organisation. [Send an email](mailto:joanna@doaj.org) to Joanna Ball, Managing Director, with the details of the support level you have chosen. Alternatively, you can use our invoice-free one-time donation button to send us an amount of your choosing. +### 2024 pricing -(Publishers interested in supporting us should read the [publisher supporters](/support/publisher-supporters/) page.) +For 2024, we have revised and simplified our supporter model to align with the levels recommended by SCOSS. This new model enables us to invest in the organisation's future and to continue to provide a high-quality service to our community. + +| | Euros(€) | USDs($) | GBPs(£) | +|---------------------|----------|---------|---------| +| Large organisations | 4,000 | 4,400 | 3,440 | +| Small organisations | 2,000 | 2,200 | 1,720 | +| Organisations from [low- and middle-income countries](https://datatopics.worldbank.org/world-development-indicators/the-world-by-income-and-region.html) | 500 | 550 | 430 | + +A 30% discount will be applied to institutions supporting via a billing consortium. Please contact [supporters@doaj.org](mailto:supporters@doaj.org) for further information. + +We always have a wishlist of development projects for which we require additional funding. Please contact us if you would like to support us over and above our standard rates. + +### Why you should support us + +- We are community-led and -governed. Your support enables our commitment to being 100% independent. +- Supporting open infrastructure is a strategic choice for libraries and institutions, demonstrating your commitment to open research and sustaining open infrastructure. +- We are seeing a steady increase in demand: the number of applications we receive each year has increased by 60% since 2018, and our investigations into questionable publishing practices are becoming more complex. +- Help us deliver our role in driving standards and best practice in open access publishing, for example through the [Principles of transparency and best practice in scholarly publishing](/apply/transparency/) and the [OA Journals Toolkit](https://www.oajournals-toolkit.org/). +- You rely extensively on our metadata as a source of trusted journals, integrating it into discovery systems and open access services. + +By supporting us, your organisation will join [a growing family of like-minded institutions](/support/supporters/) committed to ensuring quality content is available online for everyone. Supporting DOAJ is a statement of belief in equitable open knowledge and science. + +### Benefits for institutional and library supporters + +- We will add your institution’s name to [our Supporters page](/support/supporters/) +- you can include details of your DOAJ support in marketing activities +- you can use our logo on your institution’s websites and in other communications +- you can integrate into your services the DOAJ metadata via our OAI/PMH service, our API or the public data dump --- diff --git a/cms/pages/support/publisher-supporters.md b/cms/pages/support/publisher-supporters.md index 1d82e26b2b..f010872aba 100644 --- a/cms/pages/support/publisher-supporters.md +++ b/cms/pages/support/publisher-supporters.md @@ -1,7 +1,7 @@ --- layout: sidenav include: /data/publisher-supporters.html -title: Publisher supporters +title: Publisher supporter model section: Support sticky_sidenav: true toc: true @@ -10,15 +10,62 @@ featuremap: - ~~->PublisherSupportersData:Template~~ --- -The publishers on this page have chosen to show their commitment to quality, peer-reviewed open access by supporting DOAJ. We thank them! Without them, our work would not be possible. +DOAJ relies on the support of publishers and [libraries](/support/) to ensure that its metadata and services remain free for all. The publishers on this page have chosen to show their commitment to quality, peer-reviewed open access by supporting DOAJ. We thank them as without them, our work would not be possible. -**To become a publisher supporter**, send an email to [our Help desk](mailto:helpdesk@doaj.org) and we will provide with details on how to support us. Your organisation will be listed on this page. +## 2024 pricing -'Premier' and 'Sustaining' publishers have committed to supporting DOAJ for a three-year period. 'Basic' publishers support us for one year. +We are introducing a revised and simplified model for publishers to support DOAJ for 2024 and publishing this openly in line with [our commitment to the Principles of Open Scholarly Infrastructure](https://blog.doaj.org/2022/10/06/doaj-commits-to-the-principles-of-open-scholarly-infrastructure-posi/). We are also relaunching the set of benefits for publishers choosing to support us. -
{% include '/data/sponsors.html' %}
+We only accept support through our publisher supporter model from publishers with journals already indexed in DOAJ. Other routes to support DOAJ are as [an institution](/support/) or as [an individual via Paypal](https://www.paypal.com/donate/?campaign_id=4VXR4TJ69MDJJ). Non-commercial/institutional rates are only available to community-led, smaller publishers with limited funding. Please contact [supporters@doaj.org](mailto:supporters@doaj.org) if unsure which category applies. + +Please contact [supporters@doaj.org](mailto:supporters@doaj.org) if you want to contribute to DOAJ’s operating costs as a publisher supporter. + +### Commercial publishers + +| Band | Number of journals in DOAJ | GBPs (£)* | +|------|----------------------------|-----------| +| A | 600+ | 25,000 | +| B | 400-599 | 20,000 | +| C | 150-399 | 17,000 | +| D | 100-149 | 14,000 | +| E | 50-99 | 8000 | +| F | 30-49 | 6000 | +| G | 10-29 | 5000 | +| H | 1-9 | 3500 | + +### Non-commercial / institutional publishers + +| Band | Number of journals in DOAJ | GBPs (£)* | +|------|----------------------------|-----------| +| C | 150-399 | 3500 | +| D | 100-149 | 3000 | +| E | 50-99 | 2500 | +| F | 30-49 | 2000 | +| G | 10-29 | 1500 | +| H | 1-9 | 1000 | + +*A 50% discount is available for supporters in Low- and Middle-Income Countries according to the World Bank classification. + +## 2024 publisher benefits -## Benefits for contributing publishers and aggregators +1. Your logo on the DOAJ website +2. A post from all our social media platforms (Twitter, Facebook, LinkedIn, Mastodon, Instagram) acknowledging your organisation as a Supporter +3. A blog post at the start of the year introducing our new supporters +4. Our DOAJ Supporter logo which you can use for your website +5. Access to our Public Data Dump +6. For supporters from Bands A-E, or those contributing over the suggested amounts, a personal DOAJ contact to whom all enquiries regarding your applications and updates can be directed + +## Sponsorship opportunities + +We are particularly grateful to those publishers who can contribute over and above these amounts. In these cases, we can offer sponsorship opportunities that enhance our services and support open access globally, for example: + +- Specific technical developments +- Ambassador programme +- Webinar programmes and events + +Please get in touch to discuss. + +## 2023 benefits for publisher supporters ([A downloadable version](https://docs.google.com/document/d/1xTVxUvqLkh2-r53cYlWdSIHsPGSnhcE7gi7bRFCaJik/edit?usp=sharing) of these benefits is available.) @@ -33,4 +80,6 @@ The publishers on this page have chosen to show their commitment to quality, pee | | | A CSV file, generated annually, for recording changes in and which DOAJ updates your live records with. | | | | Exposure across all our social media channels: Twitter, Instagram, LinkedIn, Facebook, WeChat. (Stats available.) | +
{% include '/data/sponsors.html' %}
+ ## Other publisher supporters diff --git a/cms/pages/support/supporters.md b/cms/pages/support/supporters.md index c7d08ddda8..6cf98e6b0f 100644 --- a/cms/pages/support/supporters.md +++ b/cms/pages/support/supporters.md @@ -7,9 +7,9 @@ featuremap: ~~Supporters:Fragment~~ --- -We are proud that over 80% of DOAJ's funding comes from academic organisations (libraries, library consortia, universities, research centres). Without this vital support, we wouldn't be able to continue the high levels of service that the research community expects of us. We are grateful for the trust shown in us by our supporters. +We are proud that over 80% of our funding comes from academic organisations (libraries, library consortia, universities, research centres). Without this vital support, we couldn't deliver the services the research community expects of us. We are grateful for the trust shown in us by our supporters. - Check [our support page](/support/) for more information on supporter levels and categories. +Check [our Institutions and libraries support page](/support/) for pricing and benefits. --- diff --git a/cms/sass/components/_accordion.scss b/cms/sass/components/_accordion.scss new file mode 100644 index 0000000000..e066ee02f6 --- /dev/null +++ b/cms/sass/components/_accordion.scss @@ -0,0 +1,3 @@ +.accordion:focus-within { + border: $grapefruit solid; +} \ No newline at end of file diff --git a/cms/sass/components/_buttons.scss b/cms/sass/components/_buttons.scss index 1e71d3aceb..061c75c454 100644 --- a/cms/sass/components/_buttons.scss +++ b/cms/sass/components/_buttons.scss @@ -117,3 +117,10 @@ button[type="submit"].button--secondary { color: currentColor; } } + +button.aria-button { + all: inherit; + -webkit-appearance: none; + -moz-appearance: none; + appearance: none; +} diff --git a/cms/sass/components/_filters.scss b/cms/sass/components/_filters.scss index e82883841b..fddb1e6e07 100644 --- a/cms/sass/components/_filters.scss +++ b/cms/sass/components/_filters.scss @@ -4,6 +4,24 @@ margin-bottom: $spacing-04; border: 0; @include typescale-06; + + input[type="checkbox"], + input[type="radio"] { + display: unset; + opacity: 0; + width: 0.8em; + height: 0.8em; + + &:focus + label { + outline: dashed 2px lightgrey; + outline-offset: 1px; + } + + &:focus:not(:focus-visible){ + outline: none; + } + } + } .filters__heading { @@ -50,6 +68,8 @@ max-height: $spacing-07; height: auto; overflow-y: auto; + padding-top: $spacing-01; + @include unstyled-list; li { diff --git a/cms/sass/components/_skip-to-main-content.scss b/cms/sass/components/_skip-to-main-content.scss new file mode 100644 index 0000000000..3542adb62c --- /dev/null +++ b/cms/sass/components/_skip-to-main-content.scss @@ -0,0 +1,35 @@ +/* Back to main content button */ + +.skip-to-main { + position: absolute; + z-index: 10000; + display: flex; + flex-direction: row; + align-items: center; + min-width: min-content; + padding: 5px; + top: 10px; + left: 10px; + background-color: $grapefruit; + + svg { + display: block; + margin: 0 auto; + stroke: $warm-black; + margin-rigth: 10px; + } + &:hover, &:focus { + svg { + margin-right: 10px; + } + } + &:hover:after, &:focus:after { + content: " Skip to main content"; + color: $warm-black; + vertical-align: bottom; + -webkit-font-feature-settings: 'liga' 1; + -moz-font-feature-settings: 'liga' 1; + font-feature-settings: 'liga' 1; + transition: 0.5 smooth; + } +} \ No newline at end of file diff --git a/cms/sass/main.scss b/cms/sass/main.scss index cdd22133b8..661f2a4ae6 100644 --- a/cms/sass/main.scss +++ b/cms/sass/main.scss @@ -28,6 +28,7 @@ "layout/sidenav", "components/alert", + "components/accordion", "components/back-to-top", "components/buttons", "components/card", @@ -52,6 +53,7 @@ "components/review-table", "components/select2", "components/search-results", + "components/skip-to-main-content", "components/stat", "components/stretch-list", "components/tabs", diff --git a/deploy/doaj_gunicorn_config.py b/deploy/doaj_gunicorn_config.py index f9425de5e5..a08dd6ef62 100644 --- a/deploy/doaj_gunicorn_config.py +++ b/deploy/doaj_gunicorn_config.py @@ -1,7 +1,7 @@ import multiprocessing bind = "0.0.0.0:5050" -workers = multiprocessing.cpu_count() * 8 + 1 +workers = multiprocessing.cpu_count() * 6 + 1 proc_name = 'doaj' max_requests = 1000 @@ -13,4 +13,4 @@ max_requests_jitter = 100 timeout = 40 -graceful_timeout = 40 \ No newline at end of file +graceful_timeout = 40 diff --git a/deploy/lambda/alert_backups_missing.py b/deploy/lambda/alert_backups_missing.py index 38a9edbc2e..566a361b9a 100644 --- a/deploy/lambda/alert_backups_missing.py +++ b/deploy/lambda/alert_backups_missing.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -""" Steven Eardley 2020-02-07 for DOAJ - uploaded manually (todo: we should upload this in the release script) """ +""" Steven Eardley 2023-09-15 for DOAJ - uploaded manually (todo: we should upload this in the release script) """ # ~~BackupsMissing:Monitoring->Lambda:Technology~~ @@ -8,23 +8,25 @@ import json from datetime import datetime, timezone, timedelta -from portality.lib.dates import FMT_DATETIME_STD - s3 = boto3.client('s3') # Check the doaj elasticsearch snapshot bucket has been updated today (should happen daily at 0600 via background job) -buckets = ['doaj-index-backups'] +buckets = ['doaj-index-ipt-backups'] + # Check the doaj-nginx logs bucket has been updated today (should happen daily at 0630 via cron logrotate) -buckets += ['doaj-nginx-logs'] +# buckets += ['doaj-nginx-logs'] def lambda_handler(event, context): """ The main function executed by Lambda""" + start = datetime.utcnow() summary = {'success': [], 'fail': []} for b in buckets: + print('Checking bucket {0} was updated today'.format(b)) + # First check the bucket actually exists try: s3.head_bucket(Bucket=b) @@ -32,11 +34,13 @@ def lambda_handler(event, context): error_code = int(e.response['Error']['Code']) if error_code == 404: send_alert_email(b, last_mod=None) + raise # Then check the expected entry exists in the bucket's objects. files = list_bucket_keys(bucket_name=b) old_to_new = sorted(files, key=lambda f: f['LastModified']) newest = old_to_new[-1] + print('Latest backup is', newest) # If the newest file is older than 1 day old, our backups are not up to date. if datetime.now(timezone.utc) - newest['LastModified'] > timedelta(days=1): @@ -47,6 +51,8 @@ def lambda_handler(event, context): summary['success'].append(b) print(summary) # For the CloudWatch logs + print('Completed in', str(datetime.utcnow() - start)) + return str(summary) @@ -86,8 +92,8 @@ def send_alert_email(bucket, last_mod): msg = 'AWS backup error: bucket {b} is missing.'.format(b=bucket) else: msg = 'AWS backup error: bucket {b} has not been updated today - it was last modified on {t}.' \ - '\nYou may wish to check the corresponding logs.'.format(b=bucket, - t=last_mod.strftime(FMT_DATETIME_STD)) + '\nYou may wish to check the corresponding logs.'.format(b=bucket, t=last_mod.strftime( + '%Y-%m-%dT%H:%M:%SZ')) r = botocore.vendored.requests.post('https://api.mailgun.net/v3/doaj.org/messages', auth=('api', credentials.get('ERROR_MAIL_API_KEY', '')), diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv b/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv new file mode 100644 index 0000000000..0d2f704aba --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.matrix.csv @@ -0,0 +1,17 @@ +test_id,eissn,pissn,validated +1,eissn_in_doaj,pissn_in_doaj,yes +2,eissn_in_doaj,eissn_not_in_doaj, +3,eissn_in_doaj,pissn_not_in_doaj, +4,eissn_in_doaj,!eissn_in_doaj, +5,pissn_in_doaj,eissn_in_doaj, +6,pissn_in_doaj,eissn_not_in_doaj, +7,pissn_in_doaj,pissn_not_in_doaj, +8,pissn_in_doaj,!pissn_in_doaj, +9,eissn_not_in_doaj,eissn_in_doaj, +10,eissn_not_in_doaj,pissn_in_doaj, +11,eissn_not_in_doaj,pissn_not_in_doaj, +12,eissn_not_in_doaj,!eissn_not_in_doaj, +13,pissn_not_in_doaj,eissn_in_doaj, +14,pissn_not_in_doaj,pissn_in_doaj, +15,pissn_not_in_doaj,eissn_not_in_doaj, +16,pissn_not_in_doaj,!pissn_not_in_doaj, diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv new file mode 100644 index 0000000000..a8eab3f4ce --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.csv @@ -0,0 +1,19 @@ +field,test_id,eissn,pissn,validated +type,index,generated,generated,conditional +deafult,,,,no +,,,, +values,,eissn_in_doaj,eissn_in_doaj,yes +values,,pissn_in_doaj,pissn_in_doaj,no +values,,eissn_not_in_doaj,eissn_not_in_doaj, +values,,pissn_not_in_doaj,pissn_not_in_doaj, +,,,, +,,,, +conditional validated,,eissn_in_doaj,pissn_in_doaj,yes +constraint eissn,,eissn_in_doaj,!eissn_in_doaj, +constraint eissn,,eissn_not_in_doaj,!eissn_not_in_doaj, +constraint eissn,,pissn_not_in_doaj,!pissn_not_in_doaj, +constraint eissn,,pissn_in_doaj,!pissn_in_doaj, +constraint pissn,,eissn_in_doaj,!eissn_in_doaj, +constraint pissn,,eissn_not_in_doaj,!eissn_not_in_doaj, +constraint pissn,,pissn_not_in_doaj,!pissn_not_in_doaj, +constraint pissn,,pissn_in_doaj,!pissn_in_doaj, \ No newline at end of file diff --git a/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json new file mode 100644 index 0000000000..11d1012a96 --- /dev/null +++ b/doajtest/matrices/article_create_article/issn_validation_against_journal.settings.json @@ -0,0 +1,119 @@ +{ + "parameters": [ + { + "name": "test_id", + "type": "index" + }, + { + "name": "eissn", + "type": "generated", + "values": { + "eissn_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "eissn_in_doaj" + ] + } + } + }, + "pissn_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "pissn_in_doaj" + ] + } + } + }, + "eissn_not_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "eissn_not_in_doaj" + ] + } + } + }, + "pissn_not_in_doaj": { + "constraints": { + "pissn": { + "nor": [ + "pissn_not_in_doaj" + ] + } + } + } + } + }, + { + "name": "pissn", + "type": "generated", + "values": { + "eissn_in_doaj": {}, + "pissn_in_doaj": {}, + "eissn_not_in_doaj": {}, + "pissn_not_in_doaj": {}, + "!eissn_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "eissn_in_doaj" + ] + } + } + }, + "!eissn_not_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "eissn_not_in_doaj" + ] + } + } + }, + "!pissn_not_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "pissn_not_in_doaj" + ] + } + } + }, + "!pissn_in_doaj": { + "constraints": { + "eissn": { + "or": [ + "pissn_in_doaj" + ] + } + } + } + } + }, + { + "name": "validated", + "type": "conditional", + "values": { + "yes": { + "conditions": [ + { + "eissn": { + "or": [ + "eissn_in_doaj" + ] + }, + "pissn": { + "or": [ + "pissn_in_doaj" + ] + } + } + ] + }, + "no": {} + } + } + ] +} \ No newline at end of file diff --git a/doajtest/testbook/public_site/home_page.yml b/doajtest/testbook/public_site/home_page.yml index c2261786cc..625df716f7 100644 --- a/doajtest/testbook/public_site/home_page.yml +++ b/doajtest/testbook/public_site/home_page.yml @@ -120,3 +120,22 @@ tests: bottom right-hand corner. results: - You are returned to the top of the home page +- title: Skip to main content button (Accessibility) + context: + role: anonymous + steps: + - step: Refresh the page + - step: Click tab key on the keyboard once + results: + - Skip to the main content button is unfolded and focused + - step: Click enter + results: + - Focus is moved to the main content + - step: Turn on screen reader + - step: With the keyboard navigate to Skip to main content button + results: + - Screen reader reads the button title + - step: Click enter + results: + - Focus is moved to the main content + diff --git a/doajtest/testbook/public_site/public_search.yml b/doajtest/testbook/public_site/public_search.yml index 1bce101f8c..6b47834e85 100644 --- a/doajtest/testbook/public_site/public_search.yml +++ b/doajtest/testbook/public_site/public_search.yml @@ -166,3 +166,25 @@ tests: results: - You are taken to the full text of this article on the Web. It opens in a new tab +- title: 'Test Public Search Results Display: Accessibility' + context: + role: anonymous + steps: + - step: Go to the DOAJ search page at /search/articles + - step: Turn on a screen reader + results: + - Extendable facets are focusable and focus is marked with an orange solid border + - The screenreader gives the header role ("button") + - The screenreader gives the state of the facet ("extended" or "folded") + - step: click spacebar to fold/unfold the facet + resuts: + - screenreader gives correct state of the facet ("extended" or "folded") + - step: click tab + results: + - focus is on the list of checkboxes + results: + - focus is clearly marked by the outline + - step: click spacebar to check the filter + results: + - filter is applied + diff --git a/doajtest/unit/resources/harvester_resp.json b/doajtest/unit/resources/harvester_resp.json index dc24cb7dd9..133fedaf24 100644 --- a/doajtest/unit/resources/harvester_resp.json +++ b/doajtest/unit/resources/harvester_resp.json @@ -45,8 +45,8 @@ "journal": { "title": "My Journal", "medlineAbbreviation": "My Jour", - "essn": "1234-5678", - "issn": "9876-5432", + "issn": "1234-5678", + "essn": "9876-5432", "isoabbreviation": "My Jour", "nlmid": "123456789" } @@ -143,8 +143,8 @@ "journal": { "title": "My Journal", "medlineAbbreviation": "My Jour", - "essn": "1234-5678", - "issn": "9876-5432", + "issn": "1234-5678", + "essn": "9876-5432", "isoabbreviation": "My Jour", "nlmid": "123456789" } diff --git a/doajtest/unit/test_article_acceptable_and_permissions.py b/doajtest/unit/test_article_acceptable_and_permissions.py index eb4c04d4fb..5e0328635f 100644 --- a/doajtest/unit/test_article_acceptable_and_permissions.py +++ b/doajtest/unit/test_article_acceptable_and_permissions.py @@ -14,6 +14,11 @@ def is_acceptable_load_cases(): "test_id", {"test_id": []}) +def issn_validation_against_journal_load_sets(): + return load_parameter_sets(rel2abs(__file__, "..", "matrices", "article_create_article"), "issn_validation_against_journal", + "test_id", + {"test_id": []}) + class TestBLLPrepareUpdatePublisher(DoajTestCase): @@ -110,4 +115,73 @@ def test_has_permissions(self): assert failed_result["unowned"].sort() == [pissn, eissn].sort() # assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], # 'unowned': [pissn, eissn], - # 'unmatched': []}, "received: {}".format(failed_result) \ No newline at end of file + # 'unmatched': []}, "received: {}".format(failed_result) + + + @parameterized.expand(issn_validation_against_journal_load_sets) + def test_issn_validation_against_journal_load_sets(self, value, kwargs): + kwpissn = kwargs.get("pissn") + kweissn = kwargs.get("eissn") + validated = kwargs.get("validated") + + js = JournalFixtureFactory.make_many_journal_sources(2) + journal_in_doaj = Journal(**js[0]) + journal_in_doaj.set_in_doaj(True) + journal_in_doaj.bibjson().pissn = "1111-1111" + journal_in_doaj.bibjson().eissn = "2222-2222" + journal_in_doaj.save(blocking=True) + + journal_not_in_doaj = Journal(**js[1]) + journal_not_in_doaj.set_in_doaj(False) + journal_not_in_doaj.bibjson().pissn = "3333-3333" + journal_not_in_doaj.bibjson().eissn = "4444-4444" + journal_not_in_doaj.save(blocking=True) + + if (kwpissn == "pissn_in_doaj"): + pissn = journal_in_doaj.bibjson().pissn + elif (kwpissn == "eissn_in_doaj"): + pissn = journal_in_doaj.bibjson().eissn + elif (kwpissn == "pissn_not_in_doaj"): + pissn = journal_not_in_doaj.bibjson().pissn + else: + pissn = journal_not_in_doaj.bibjson().eissn + + if (kweissn == "pissn_in_doaj"): + eissn = journal_in_doaj.bibjson().pissn + elif (kweissn == "eissn_in_doaj"): + eissn = journal_in_doaj.bibjson().eissn + elif (kweissn == "pissn_not_in_doaj"): + eissn = journal_not_in_doaj.bibjson().pissn + else: + eissn = journal_not_in_doaj.bibjson().eissn + + + art_source = ArticleFixtureFactory.make_article_source(pissn=pissn, eissn=eissn) + article = Article(**art_source) + + if validated: + self.assertIsNone(self.svc.is_acceptable(article)) + + else: + with self.assertRaises(exceptions.ArticleNotAcceptable): + self.svc.is_acceptable(article) + + def test_check_validation_for_2_journals(self): + + js = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) + journal_in_doaj = Journal(**js[0]) + journal_in_doaj.bibjson().pissn = "1111-1111" + journal_in_doaj.bibjson().eissn = "2222-2222" + journal_in_doaj.save(blocking=True) + + journal_not_in_doaj = Journal(**js[1]) + journal_not_in_doaj.bibjson().pissn = "3333-3333" + journal_not_in_doaj.bibjson().eissn = "4444-4444" + journal_not_in_doaj.save(blocking=True) + + + art_source = ArticleFixtureFactory.make_article_source(pissn="1111-1111", eissn="4444-4444") + article = Article(**art_source) + + with self.assertRaises(exceptions.ArticleNotAcceptable): + self.svc.is_acceptable(article) \ No newline at end of file diff --git a/doajtest/unit/test_bll_article_batch_create_article.py b/doajtest/unit/test_bll_article_batch_create_article.py index 6cda9ee82c..34f537c7a8 100644 --- a/doajtest/unit/test_bll_article_batch_create_article.py +++ b/doajtest/unit/test_bll_article_batch_create_article.py @@ -5,7 +5,7 @@ from doajtest.helpers import DoajTestCase from portality.bll import DOAJ from portality.bll import exceptions -from portality.models import Article, Account,Journal +from portality.models import Article, Account, Journal from portality.lib.paths import rel2abs from doajtest.mocks.bll_article import BLLArticleMockFactory from doajtest.mocks.model_Article import ModelArticleMockFactory @@ -37,12 +37,14 @@ def setUp(self): self._get_duplicate = self.svc.get_duplicate self._issn_ownership_status = self.svc.issn_ownership_status self._get_journal = Article.get_journal + self._find_by_issn_exact = Journal.find_by_issn_exact def tearDown(self): self.svc.is_legitimate_owner = self._is_legitimate_owner self.svc.get_duplicate = self._get_duplicate self.svc.issn_ownership_status = self._issn_ownership_status Article.get_journal = self._get_journal + Journal.find_by_issn_exact = self._find_by_issn_exact super(TestBLLArticleBatchCreateArticle, self).tearDown() @parameterized.expand(load_cases) @@ -118,8 +120,8 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "0", "pissn" : "0000-0000", "eissn" : "0000-0001"}) + # We always need a journal to exist for an article to be created + journal_specs.append({"title" : "0", "pissn" : "0000-0000", "eissn" : "0000-0001"}) # another with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( @@ -132,8 +134,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "1", "pissn" : "1111-1112", "eissn" : "1111-1111"}) + journal_specs.append({"title" : "1", "pissn" : "1111-1112", "eissn" : "1111-1111"}) # one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( @@ -146,8 +147,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "2", "pissn" : "2222-2222", "eissn" : "2222-2223"}) + journal_specs.append({"title" : "2", "pissn" : "2222-2222", "eissn" : "2222-2223"}) # another one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( @@ -160,8 +160,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "3", "pissn" : "3333-3333", "eissn" : "3333-3334"}) + journal_specs.append({"title" : "3", "pissn" : "3333-3333", "eissn" : "3333-3334"}) last_issn = "3333-3333" last_doi = "10.123/abc/1" @@ -180,8 +179,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "4", "pissn" : "4444-4444", "eissn" : "4444-4445"}) + journal_specs.append({"title" : "4", "pissn" : "4444-4444", "eissn" : "4444-4445"}) # one with a duplicated Fulltext source = ArticleFixtureFactory.make_article_source( @@ -194,8 +192,7 @@ def test_01_batch_create_article(self, name, kwargs): article = Article(**source) article.set_id() articles.append(article) - if add_journal_info: - journal_specs.append({"title" : "5", "pissn" : "5555-5555", "eissn" : "5555-5556"}) + journal_specs.append({"title" : "5", "pissn" : "5555-5555", "eissn" : "5555-5556"}) ilo_mock = None if account_arg == "owner": @@ -224,6 +221,18 @@ def test_01_batch_create_article(self, name, kwargs): gj_mock = ModelArticleMockFactory.get_journal(journal_specs, in_doaj=journal_in_doaj) Article.get_journal = gj_mock + # We need the journal to be in the index for the ArticleAcceptable checks FIXME: too slow, mock this + #[Journal(**js['instance']).save(blocking=True) for js in journal_specs] + + # We need to retrieve the correct Journal by its ISSNs + def mock_find(issns: list, in_doaj=None, max=2): + for j in journal_specs: + if sorted([j['eissn'], j['pissn']]) == sorted(issns): + return [j['instance']] + return [] + + Journal.find_by_issn_exact = mock_find + ########################################################### # Execution diff --git a/doajtest/unit/test_bll_article_create_article.py b/doajtest/unit/test_bll_article_create_article.py index f595a1b96e..d9d524efe7 100644 --- a/doajtest/unit/test_bll_article_create_article.py +++ b/doajtest/unit/test_bll_article_create_article.py @@ -35,7 +35,6 @@ def setUp(self): self.prepare_update_admin = self.svc._prepare_update_admin self.prepare_update_publisher = self.svc._prepare_update_publisher - def tearDown(self): super(TestBLLArticleCreateArticle, self).tearDown() diff --git a/doajtest/unit/test_models.py b/doajtest/unit/test_models.py index 5551cdcf5d..06175e6d76 100644 --- a/doajtest/unit/test_models.py +++ b/doajtest/unit/test_models.py @@ -1661,3 +1661,30 @@ def test_get_name_safe(self): # account does not exist assert models.Account.get_name_safe('not existing account id') == '' + def test_11_find_by_issn(self): + js = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) + j1 = models.Journal(**js[0]) + j1.bibjson().pissn = "1111-1111" + j1.bibjson().eissn = "2222-2222" + j1.save(blocking=True) + + j2 = models.Journal(**js[1]) + j2.bibjson().pissn = "3333-3333" + j2.bibjson().eissn = "4444-4444" + j2.save(blocking=True) + + journals = models.Journal.find_by_issn(["1111-1111", "2222-2222"], True) + assert len(journals) == 1 + assert journals[0].id == j1.id + + journals = models.Journal.find_by_issn(["1111-1111", "3333-3333"], True) + assert len(journals) == 2 + assert journals[0].id == j1.id + assert journals[1].id == j2.id + + journals = models.Journal.find_by_issn_exact(["1111-1111", "2222-2222"], True) + assert len(journals) == 1 + assert journals[0].id == j1.id + + journals = models.Journal.find_by_issn_exact(["1111-1111", "3333-3333"], True) + assert len(journals) == 0 \ No newline at end of file diff --git a/doajtest/unit/test_tasks_ingestCrossref442Articles.py b/doajtest/unit/test_tasks_ingestCrossref442Articles.py index 2714b33644..ed2236552c 100644 --- a/doajtest/unit/test_tasks_ingestCrossref442Articles.py +++ b/doajtest/unit/test_tasks_ingestCrossref442Articles.py @@ -1315,11 +1315,11 @@ def test_40_crossref_2_journals_different_owners_issn_each_fail(self): found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] assert len(found) == 0 - def test_41_crossref_2_journals_same_owner_issn_each_success(self): + def test_41_crossref_2_journals_same_owner_issn_each_fail(self): etree.XMLSchema = self.mock_load_schema # Create 2 journals with the same owner, each with one different issn. The article's 2 issns # match each of these issns - # We expect a successful article ingest + # We expect a failed ingest - an article must match with only ONE journal j1 = models.Journal() j1.set_owner("testowner") @@ -1365,19 +1365,19 @@ def test_41_crossref_2_journals_same_owner_issn_each_success(self): fu = models.FileUpload.pull(id) assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 + assert fu.status == "failed" + assert fu.imported == 0 assert fu.updates == 0 - assert fu.new == 1 + assert fu.new == 0 fr = fu.failure_reasons + assert len(fr) > 0 assert len(fr.get("shared", [])) == 0 assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 + assert len(fr.get("unmatched", [])) == 2 found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 1 - + assert len(found) == 0 def test_42_crossref_2_journals_different_owners_different_issns_mixed_article_fail(self): etree.XMLSchema = self.mock_load_schema diff --git a/doajtest/unit/test_tasks_ingestCrossref531Articles.py b/doajtest/unit/test_tasks_ingestCrossref531Articles.py index 27308a3d22..09edcf1b1d 100644 --- a/doajtest/unit/test_tasks_ingestCrossref531Articles.py +++ b/doajtest/unit/test_tasks_ingestCrossref531Articles.py @@ -624,7 +624,7 @@ def test_23_crossref_process_success(self): j.set_owner("testowner") bj = j.bibjson() bj.add_identifier(bj.P_ISSN, "1234-5678") - j.save() + j.save(blocking=True) asource = AccountFixtureFactory.make_publisher_source() account = models.Account(**asource) @@ -634,6 +634,7 @@ def test_23_crossref_process_success(self): # push an article to initialise the mappings source = ArticleFixtureFactory.make_article_source() article = models.Article(**source) + article.bibjson().add_identifier(bj.P_ISSN, "1234-5678") article.save(blocking=True) article.delete() models.Article.blockdeleted(article.id) diff --git a/doajtest/unit/test_tasks_ingestDOAJarticles.py b/doajtest/unit/test_tasks_ingestDOAJarticles.py index 2872124a47..a2eb5f2be9 100644 --- a/doajtest/unit/test_tasks_ingestDOAJarticles.py +++ b/doajtest/unit/test_tasks_ingestDOAJarticles.py @@ -1260,10 +1260,10 @@ def test_40_doaj_2_journals_different_owners_issn_each_fail(self): found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] assert len(found) == 0 - def test_41_doaj_2_journals_same_owner_issn_each_success(self): + def test_41_doaj_2_journals_same_owner_issn_each_fail(self): # Create 2 journals with the same owner, each with one different issn. The article's 2 issns # match each of these issns - # We expect a successful article ingest + # We expect a failed article ingest - articles must match only ONE journal j1 = models.Journal() j1.set_owner("testowner") bj1 = j1.bibjson() @@ -1301,18 +1301,18 @@ def test_41_doaj_2_journals_same_owner_issn_each_success(self): fu = models.FileUpload.pull(id) assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 + assert fu.status == "failed" + assert fu.imported == 0 assert fu.updates == 0 - assert fu.new == 1 + assert fu.new == 0 fr = fu.failure_reasons assert len(fr.get("shared", [])) == 0 assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 + assert len(fr.get("unmatched", [])) == 2 # error message for each article found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 1 + assert len(found) == 0 def test_42_doaj_2_journals_different_owners_different_issns_mixed_article_fail(self): # Create 2 different journals with different owners and different issns (2 each). diff --git a/portality/bll/exceptions.py b/portality/bll/exceptions.py index 3bb676f984..005ad7f31c 100644 --- a/portality/bll/exceptions.py +++ b/portality/bll/exceptions.py @@ -66,6 +66,7 @@ class ArticleNotAcceptable(Exception): """ def __init__(self, *args, **kwargs): self.message = kwargs.get("message", "") + self.result = kwargs.get("result", {}) super(ArticleNotAcceptable, self).__init__(*args) def __str__(self): diff --git a/portality/bll/services/article.py b/portality/bll/services/article.py index 7b55894d24..b5e829cd24 100644 --- a/portality/bll/services/article.py +++ b/portality/bll/services/article.py @@ -56,6 +56,9 @@ def batch_create_articles(self, articles, account, duplicate_check=True, merge_d all_unowned = set() all_unmatched = set() + # Hold on to the exception so we can raise it later + e_not_acceptable = None + for article in articles: try: # ~~!ArticleBatchCreate:Feature->ArticleCreate:Feature~~ @@ -67,6 +70,10 @@ def batch_create_articles(self, articles, account, duplicate_check=True, merge_d dry_run=True) except (exceptions.ArticleMergeConflict, exceptions.ConfigurationException): raise exceptions.IngestException(message=Messages.EXCEPTION_ARTICLE_BATCH_CONFLICT) + except exceptions.ArticleNotAcceptable as e: + # The ArticleNotAcceptable exception is a superset of reasons we can't match a journal to this article + e_not_acceptable = e + result = {'fail': 1, 'unmatched': set(article.bibjson().issns())} success += result.get("success", 0) fail += result.get("fail", 0) @@ -90,6 +97,8 @@ def batch_create_articles(self, articles, account, duplicate_check=True, merge_d # return some stats on the import return report else: + if e_not_acceptable is not None: + raise exceptions.ArticleNotAcceptable(message=e_not_acceptable.message, result=report) raise exceptions.IngestException(message=Messages.EXCEPTION_ARTICLE_BATCH_FAIL, result=report) @staticmethod @@ -159,9 +168,6 @@ def _validate_issns(article_bibjson: models.ArticleBibJSON): if len(pissn) > 1 or len(eissn) > 1: raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_TOO_MANY_ISSNS) - pissn = article_bibjson.get_one_identifier("pissn") - eissn = article_bibjson.get_one_identifier("eissn") - # no pissn or eissn if not pissn and not eissn: raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_ISSNS) @@ -204,18 +210,18 @@ def create_article(self, article, account, duplicate_check=True, merge_duplicate {"arg": update_article_id, "instance": str, "allow_none": True, "arg_name": "update_article_id"} ], exceptions.ArgumentException) - # quickly validate that the article is acceptable - it must have a DOI and/or a fulltext - # this raises an exception if the article is not acceptable, containing all the relevant validation details + has_permissions_result = self.has_permissions(account, article, limit_to_account) + if isinstance(has_permissions_result, dict): + return has_permissions_result + # Validate that the article is acceptable: it must have a DOI and/or a fulltext & match only one in_doaj journal + # this raises an exception if the article is not acceptable, containing all the relevant validation details + # We do this after the permissions check because that gives a detailed result whereas this throws an exception try: self.is_acceptable(article) except Exception as e: raise e - has_permissions_result = self.has_permissions(account, article, limit_to_account) - if isinstance(has_permissions_result,dict): - return has_permissions_result - is_update = 0 if duplicate_check: # ~~!ArticleCreate:Feature->ArticleDeduplication:Feature~~ @@ -252,7 +258,8 @@ def has_permissions(self, account, article, limit_to_account): def is_acceptable(self, article: models.Article): """ conduct some deep validation on the article to make sure we will accept it - or the moment, this just means making sure it has a DOI and a fulltext + this just means making sure it has a DOI and a fulltext, and that its ISSNs + match a single journal """ try: bj = article.bibjson() @@ -266,12 +273,40 @@ def is_acceptable(self, article: models.Article): raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_DOI_NO_FULLTEXT) self._validate_issns(bj) + journal = self.match_journal_with_validation(bj) # is journal in doaj (we do this check last as it has more performance impact) - journal = article.get_journal() if journal is None or not journal.is_in_doaj(): raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_ADDING_ARTICLE_TO_WITHDRAWN_JOURNAL) + @staticmethod + def match_journal_with_validation(article_bibjson: models.ArticleBibJSON): + pissn = article_bibjson.get_one_identifier("pissn") + eissn = article_bibjson.get_one_identifier("eissn") + + issns = [] + + if pissn is not None: + issns.append(pissn) + if eissn is not None: + issns.append(eissn) + + # Find an exact match, whether in_doaj or not + journal = models.Journal.find_by_issn_exact(issns) + + # check if only one journal matches pissn and eissn and if they are in the correct fields + # no need to check eissn, if pissn matches, pissn and eissn are different and only 1 journal has been found - then eissn matches too + if len(journal) != 1: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + if pissn is not None: + if journal[0].bibjson().pissn != pissn: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + if eissn is not None: + if journal[0].bibjson().eissn != eissn: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_MISMATCHED_ISSNS) + + return journal[0] + @staticmethod def is_legitimate_owner(article, owner): """ @@ -369,6 +404,10 @@ def issn_ownership_status(article, owner): issns = b.get_identifiers(b.P_ISSN) issns += b.get_identifiers(b.E_ISSN) + # FIXME: Duplicate check due to inconsistent control flow (result vs exception) + if len(issns) == 0: + raise exceptions.ArticleNotAcceptable(message=Messages.EXCEPTION_NO_ISSNS) + owned = [] shared = [] unowned = [] diff --git a/portality/bll/services/journal.py b/portality/bll/services/journal.py index 763ad2d132..fdad582080 100644 --- a/portality/bll/services/journal.py +++ b/portality/bll/services/journal.py @@ -1,27 +1,25 @@ -import csv import logging -import random -import re -import string -from datetime import datetime -from portality import lock +from portality.lib.argvalidate import argvalidate +from portality.lib import dates from portality import models, constants from portality.bll import exceptions -from portality.bll.doaj import DOAJ from portality.core import app -from portality.crosswalks.journal_questions import Journal2QuestionXwalk -from portality.lib import dates -from portality.lib.argvalidate import argvalidate +from portality import lock +from portality.bll.doaj import DOAJ from portality.lib.dates import FMT_DATETIME_SHORT -from portality.store import StoreFactory, prune_container +from portality.store import StoreFactory, prune_container, StoreException +from portality.crosswalks.journal_questions import Journal2QuestionXwalk +from portality.util import no_op + +from datetime import datetime, timedelta +import re, csv, random, string class JournalService(object): """ ~~Journal:Service~~ """ - def journal_2_application(self, journal, account=None, keep_editors=False): """ Function to convert a given journal into an application object. @@ -40,8 +38,8 @@ def journal_2_application(self, journal, account=None, keep_editors=False): # first validate the incoming arguments to ensure that we've got the right thing argvalidate("journal_2_application", [ - {"arg": journal, "instance": models.Journal, "allow_none": False, "arg_name": "journal"}, - {"arg": account, "instance": models.Account, "arg_name": "account"} + {"arg": journal, "instance" : models.Journal, "allow_none" : False, "arg_name" : "journal"}, + {"arg" : account, "instance" : models.Account, "arg_name" : "account"} ], exceptions.ArgumentException) if app.logger.isEnabledFor(logging.DEBUG): app.logger.debug("Entering journal_2_application") @@ -52,10 +50,9 @@ def journal_2_application(self, journal, account=None, keep_editors=False): # if an account is specified, check that it is allowed to perform this action if account is not None: try: - authService.can_create_update_request(account, journal) # throws exception if not allowed + authService.can_create_update_request(account, journal) # throws exception if not allowed except exceptions.AuthoriseException as e: - msg = "Account {x} is not permitted to create an update request on journal {y}".format(x=account.id, - y=journal.id) + msg = "Account {x} is not permitted to create an update request on journal {y}".format(x=account.id, y=journal.id) app.logger.info(msg) e.args += (msg,) raise @@ -64,7 +61,7 @@ def journal_2_application(self, journal, account=None, keep_editors=False): bj = journal.bibjson() notes = journal.notes - application = models.Suggestion() # ~~-> Application:Model~~ + application = models.Suggestion() # ~~-> Application:Model~~ application.set_application_status(constants.APPLICATION_STATUS_UPDATE_REQUEST) application.set_current_journal(journal.id) if keep_editors is True: @@ -82,8 +79,7 @@ def journal_2_application(self, journal, account=None, keep_editors=False): application.set_bibjson(bj) application.date_applied = dates.now_str() - if app.logger.isEnabledFor(logging.DEBUG): app.logger.debug( - "Completed journal_2_application; return application object") + if app.logger.isEnabledFor(logging.DEBUG): app.logger.debug("Completed journal_2_application; return application object") return application def journal(self, journal_id, lock_journal=False, lock_account=None, lock_timeout=None): @@ -100,10 +96,10 @@ def journal(self, journal_id, lock_journal=False, lock_account=None, lock_timeou """ # first validate the incoming arguments to ensure that we've got the right thing argvalidate("journal", [ - {"arg": journal_id, "allow_none": False, "arg_name": "journal_id"}, - {"arg": lock_journal, "instance": bool, "allow_none": False, "arg_name": "lock_journal"}, - {"arg": lock_account, "instance": models.Account, "allow_none": True, "arg_name": "lock_account"}, - {"arg": lock_timeout, "instance": int, "allow_none": True, "arg_name": "lock_timeout"} + {"arg": journal_id, "allow_none" : False, "arg_name" : "journal_id"}, + {"arg": lock_journal, "instance" : bool, "allow_none" : False, "arg_name" : "lock_journal"}, + {"arg": lock_account, "instance" : models.Account, "allow_none" : True, "arg_name" : "lock_account"}, + {"arg": lock_timeout, "instance" : int, "allow_none" : True, "arg_name" : "lock_timeout"} ], exceptions.ArgumentException) # retrieve the journal @@ -116,12 +112,11 @@ def journal(self, journal_id, lock_journal=False, lock_account=None, lock_timeou # ~~->Lock:Feature~~ the_lock = lock.lock(constants.LOCK_JOURNAL, journal_id, lock_account.id, lock_timeout) else: - raise exceptions.ArgumentException( - "If you specify lock_journal on journal retrieval, you must also provide lock_account") + raise exceptions.ArgumentException("If you specify lock_journal on journal retrieval, you must also provide lock_account") return journal, the_lock - def csv(self, prune=True): + def csv(self, prune=True, logger=None): """ Generate the Journal CSV @@ -133,43 +128,55 @@ def csv(self, prune=True): """ # first validate the incoming arguments to ensure that we've got the right thing argvalidate("csv", [ - {"arg": prune, "allow_none": False, "arg_name": "prune"} + {"arg": prune, "allow_none" : False, "arg_name" : "prune"}, + {"arg": logger, "allow_none": True, "arg_name": "logger"} ], exceptions.ArgumentException) + # None isn't executable, so convert logger to NO-OP + if logger is None: + logger = no_op + # ~~->FileStoreTemp:Feature~~ filename = 'journalcsv__doaj_' + dates.now_str(FMT_DATETIME_SHORT) + '_utf8.csv' container_id = app.config.get("STORE_CACHE_CONTAINER") tmpStore = StoreFactory.tmp() - out = tmpStore.path(container_id, filename, create_container=True, must_exist=False) + try: + out = tmpStore.path(container_id, filename, create_container=True, must_exist=False) + logger("Temporary CSV will be written to {x}".format(x=out)) + except StoreException as e: + logger("Could not create temporary CSV file: {x}".format(x=e)) + raise e with open(out, 'w', encoding='utf-8') as csvfile: - self._make_journals_csv(csvfile) + self._make_journals_csv(csvfile, logger=logger) + logger("Wrote CSV to output file {x}".format(x=out)) # ~~->FileStore:Feature~~ mainStore = StoreFactory.get("cache") try: mainStore.store(container_id, filename, source_path=out) url = mainStore.url(container_id, filename) + logger("Stored CSV in main cache store at {x}".format(x=url)) finally: - tmpStore.delete_file(container_id, - filename) # don't delete the container, just in case someone else is writing to it + tmpStore.delete_file(container_id, filename) # don't delete the container, just in case someone else is writing to it + logger("Deleted file from tmp store") action_register = [] if prune: + logger("Pruning old CSVs from store") def sort(filelist): rx = "journalcsv__doaj_(.+?)_utf8.csv" - return sorted(filelist, - key=lambda x: datetime.strptime(re.match(rx, x).groups(1)[0], FMT_DATETIME_SHORT), - reverse=True) + return sorted(filelist, key=lambda x: datetime.strptime(re.match(rx, x).groups(1)[0], FMT_DATETIME_SHORT), reverse=True) def _filter(f_name): return f_name.startswith("journalcsv__") - - action_register = prune_container(mainStore, container_id, sort, filter=_filter, keep=2) + action_register = prune_container(mainStore, container_id, sort, filter=_filter, keep=2, logger=logger) + logger("Pruned old CSVs from store") # update the ES record to point to the new file # ~~-> Cache:Model~~ models.Cache.cache_csv(url) + logger("Stored CSV URL in ES Cache") return url, action_register def admin_csv(self, file_path, account_sub_length=8, obscure_accounts=True, add_sensitive_account_info=False): @@ -193,9 +200,7 @@ def usernames(j): if o in unmap: sub = unmap[o] else: - sub = "".join( - random.choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for i in - range(account_sub_length)) + sub = "".join(random.choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for i in range(account_sub_length)) unmap[o] = sub return [("Owner", sub)] else: @@ -219,11 +224,12 @@ def acc_email(j): self._make_journals_csv(f, extra_cols) @staticmethod - def _make_journals_csv(file_object, additional_columns=None): + def _make_journals_csv(file_object, additional_columns=None, logger=None): """ Make a CSV file of information for all journals. :param file_object: a utf8 encoded file object. """ + logger = logger if logger is not None else lambda x: x YES_NO = {True: 'Yes', False: 'No', None: '', '': ''} def _get_doaj_meta_kvs(journal): @@ -254,29 +260,46 @@ def _get_article_kvs(journal): # ~~!JournalCSV:Feature->Journal:Model~~ cols = {} - for j in models.Journal.all_in_doaj(page_size=1000): # Fixme: limited by ES, this may not be sufficient + for j in models.Journal.all_in_doaj(page_size=1000): #Fixme: limited by ES, this may not be sufficient + export_start = datetime.utcnow() + logger("Exporting journal {x}".format(x=j.id)) + + time_log = [] bj = j.bibjson() issn = bj.get_one_identifier(idtype=bj.P_ISSN) if issn is None: issn = bj.get_one_identifier(idtype=bj.E_ISSN) + time_log.append("{x} - got issn".format(x=datetime.utcnow())) + if issn is None: continue # ~~!JournalCSV:Feature->JournalQuestions:Crosswalk~~ kvs = Journal2QuestionXwalk.journal2question(j) + time_log.append("{x} - crosswalked questions".format(x=datetime.utcnow())) meta_kvs = _get_doaj_meta_kvs(j) + time_log.append("{x} - got meta kvs".format(x=datetime.utcnow())) article_kvs = _get_article_kvs(j) + time_log.append("{x} - got article kvs".format(x=datetime.utcnow())) additionals = [] if additional_columns is not None: for col in additional_columns: additionals += col(j) + time_log.append("{x} - got additionals".format(x=datetime.utcnow())) cols[issn] = kvs + meta_kvs + article_kvs + additionals # Get the toc URL separately from the meta kvs because it needs to be inserted earlier in the CSV # ~~-> ToC:WebRoute~~ toc_kv = _get_doaj_toc_kv(j) cols[issn].insert(2, toc_kv) + time_log.append("{x} - got toc kvs".format(x=datetime.utcnow())) + export_end = datetime.utcnow() + if export_end - export_start > timedelta(seconds=10): + for l in time_log: + logger(l) + + logger("All journals exported") issns = cols.keys() csvwriter = csv.writer(file_object) @@ -287,3 +310,5 @@ def _get_article_kvs(journal): csvwriter.writerow(qs) vs = [v for _, v in cols[i]] csvwriter.writerow(vs) + logger("CSV Written") + diff --git a/portality/forms/application_forms.py b/portality/forms/application_forms.py index d1f8a44d20..33cd4c1eb7 100644 --- a/portality/forms/application_forms.py +++ b/portality/forms/application_forms.py @@ -169,18 +169,21 @@ class FieldDefinitions: "contexts": { "admin": { "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, "editor": { "disabled": True, "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, "associate_editor": { "disabled": True, "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, @@ -212,16 +215,19 @@ class FieldDefinitions: }, "admin": { "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, "associate_editor": { "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, "editor": { "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] } @@ -474,7 +480,7 @@ class FieldDefinitions: ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"autocomplete": {"type" : "journal", "field": "bibjson.publisher.name.exact"}}, + {"autocomplete": {"type" : "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ "full_contents" # ~~^->FullContents:FormWidget~~ ], "help": { @@ -486,16 +492,22 @@ class FieldDefinitions: }, "admin": { "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, "associate_editor": { "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, "editor": { "widgets": [ + "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] } @@ -565,7 +577,7 @@ class FieldDefinitions: }, "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"autocomplete": {"type" : "journal", "field": "bibjson.institution.name.exact"}}, + {"autocomplete": {"type" : "journal", "field": "bibjson.institution.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ "full_contents" # ~~^->FullContents:FormWidget~~ ] } @@ -1639,7 +1651,7 @@ class FieldDefinitions: "owner_exists" ], "widgets": [ - {"autocomplete": {"type" : "account", "field": "id", "include" : False}}, + {"autocomplete": {"type" : "account", "field": "id", "include" : False}}, # ~~^-> Autocomplete:FormWidget~~ "clickable_owner" ], "contexts" : { @@ -1697,7 +1709,7 @@ class FieldDefinitions: "label": "Group", "input": "text", "widgets": [ - {"autocomplete": {"type" : "editor_group", "field": "name", "include" : False}} + {"autocomplete": {"type" : "editor_group", "field": "name", "include" : False}} # ~~^-> Autocomplete:FormWidget~~ ], "contexts" : { "editor" : { @@ -1705,7 +1717,7 @@ class FieldDefinitions: }, "admin" : { "widgets" : [ - {"autocomplete": {"type": "editor_group", "field": "name", "include" : False}}, + {"autocomplete": {"type": "editor_group", "field": "name", "include" : False}}, # ~~^-> Autocomplete:FormWidget~~ {"load_editors" : {"field" : "editor"}} ] } diff --git a/portality/forms/application_processors.py b/portality/forms/application_processors.py index 13a294d14d..1cd426c1f6 100644 --- a/portality/forms/application_processors.py +++ b/portality/forms/application_processors.py @@ -198,8 +198,11 @@ def _patch_target_note_id(self): for note in self.target.notes: note_date = dates.parse(note['date']) if not note.get('author_id') and note_date > dates.before_now(60): - note['author_id'] = current_user.id - + try: + note['author_id'] = current_user.id + except AttributeError: + # Skip if we don't have a current_user + pass class NewApplication(ApplicationProcessor): @@ -307,7 +310,6 @@ def patch_target(self): if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None): self.target.set_owner(self.source.owner) - def finalise(self, account, save_target=True, email_alert=True): """ account is the administrator account carrying out the action @@ -326,7 +328,6 @@ def finalise(self, account, save_target=True, email_alert=True): elif not j.is_in_doaj(): raise Exception(Messages.EXCEPTION_EDITING_WITHDRAWN_JOURNAL) - # if we are allowed to finalise, kick this up to the superclass super(AdminApplication, self).finalise() @@ -813,7 +814,6 @@ def patch_target(self): if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None): self.target.set_owner(self.source.owner) - def finalise(self): # FIXME: this first one, we ought to deal with outside the form context, but for the time being this # can be carried over from the old implementation diff --git a/portality/lib/plausible.py b/portality/lib/plausible.py index 2aa602d986..90b1b8f46b 100644 --- a/portality/lib/plausible.py +++ b/portality/lib/plausible.py @@ -62,7 +62,7 @@ def send_event(goal: str, on_completed=None, **props_kwargs): def _send(): resp = requests.post(plausible_api_url, json=payload, headers=headers) if resp.status_code >= 300: - logger.warning(f'send plausible event api fail. [{resp.status_code}][{resp.text}]') + logger.warning(f'Send plausible event API fail. snd: [{resp.url}] [{headers}] [{payload}] rcv: [{resp.status_code}] [{resp.text}]') if on_completed: on_completed(resp) diff --git a/portality/models/background.py b/portality/models/background.py index ac3d3bfc65..604eccc95d 100644 --- a/portality/models/background.py +++ b/portality/models/background.py @@ -152,13 +152,16 @@ def pretty_audit(self): class StdOutBackgroundJob(BackgroundJob): - def __init__(self, inner): + def __init__(self, inner, force_logging=False): super(StdOutBackgroundJob, self).__init__(**inner.data) + self._force_logging = force_logging def add_audit_message(self, msg, timestamp=None): super(StdOutBackgroundJob, self).add_audit_message(msg, timestamp) - if app.config.get("DOAJENV") == 'dev': - print(msg) + if app.config.get("DOAJENV") == 'dev' or self._force_logging: + if timestamp is None: + timestamp = dates.now_str_with_microseconds() + print("[" + timestamp + "] " + msg) # ~~-> DataObj:Library~~ diff --git a/portality/models/v2/journal.py b/portality/models/v2/journal.py index d83be0d1ce..0160fb0682 100644 --- a/portality/models/v2/journal.py +++ b/portality/models/v2/journal.py @@ -70,6 +70,22 @@ def find_by_issn(cls, issns, in_doaj=None, max=10): records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])] return records + @classmethod + def find_by_issn_exact(cls, issns, in_doaj=None, max=2): + """ + Finds journal that matches given issns exactly - if no data problems should always be only 1 + """ + if not isinstance(issns, list): + issns = [issns] + if len(issns) > 2: + return [] + q = JournalQuery() + q.find_by_issn_exact(issns, in_doaj=in_doaj, max=max) + result = cls.query(q=q.query) + # create an array of objects, using cls rather than Journal, which means subclasses can use it too + records = [cls(**r.get("_source")) for r in result.get("hits", {}).get("hits", [])] + return records + @classmethod def issns_by_owner(cls, owner, in_doaj=None): q = IssnQuery(owner, in_doaj=in_doaj) @@ -920,6 +936,16 @@ class JournalQuery(object): } } + must_query = { + "track_total_hits": True, + "query": { + "bool": { + "must": [ + ] + } + } + } + all_doaj = { "track_total_hits": True, "query": { @@ -945,6 +971,14 @@ def find_by_issn(self, issns, in_doaj=None, max=10): self.query["query"]["bool"]["must"].append({"term": {"admin.in_doaj": in_doaj}}) self.query["size"] = max + def find_by_issn_exact(self, issns, in_doaj=None, max=10): + self.query = deepcopy(self.must_query) + for issn in issns: + self.query["query"]["bool"]["must"].append({"term": {"index.issn.exact": issn}}) + if in_doaj is not None: + self.query["query"]["bool"]["must"].append({"term": {"admin.in_doaj": in_doaj}}) + self.query["size"] = max + def all_in_doaj(self): q = deepcopy(self.all_doaj) if self.minified: diff --git a/portality/scripts/journalcsv.py b/portality/scripts/journalcsv.py index 7c00cbdf41..dedfb51c9b 100644 --- a/portality/scripts/journalcsv.py +++ b/portality/scripts/journalcsv.py @@ -9,10 +9,17 @@ exit() user = app.config.get("SYSTEM_USERNAME") + print("Running journal CSV export for user {}".format(user)) + job = journal_csv.JournalCSVBackgroundTask.prepare(user) - job = StdOutBackgroundJob(job) + job = StdOutBackgroundJob(job, force_logging=True) + print("Background Job prepared with id {}".format(job.id)) + task = journal_csv.JournalCSVBackgroundTask(job) + print("Background task created") + BackgroundApi.execute(task) + print("Finished journal CSV export for user {}".format(user)) diff --git a/portality/scripts/journals_update_via_csv.py b/portality/scripts/journals_update_via_csv.py index 298b7c817b..c696068a85 100644 --- a/portality/scripts/journals_update_via_csv.py +++ b/portality/scripts/journals_update_via_csv.py @@ -82,6 +82,7 @@ reader = csv.DictReader(g, fieldnames=header_row) # verify header row with current CSV headers, report errors + # TODO: Include 'Owner' field - but we should probably base this process off the AdminCSV too. expected_headers = JournalFixtureFactory.csv_headers() # Always perform a match check on supplied headers, not counting order @@ -155,6 +156,14 @@ if len(updates) > 0: [print(upd) for upd in updates] + # Check we have the expected owner (if supplied) before proceeding to create an update request + own = row.get('Owner') + if own is not None: + if own.strip().lower() != j.owner.strip().lower(): + print('ABORTING - supplied owner {0} mismatches journal owner {1}.'.format(own, j.owner)) + writer.writerow([j.id, ' | '.join(updates), 'COULD NOT UPDATE - Owner mismatch. Expected {0} Got {1}'.format(own, j.owner)]) + continue + # Create an update request for this journal update_req = None jlock = None @@ -204,7 +213,7 @@ # Add note to UR if supplied if note: - fc.target.add_note(note) + fc.target.add_note(note, author_id=sys_acc.id) if not args.manual_review: # This is the update request, in 'update request' state diff --git a/portality/scripts/manage_background_jobs.py b/portality/scripts/manage_background_jobs.py index fbfa648f8b..4faa18193d 100644 --- a/portality/scripts/manage_background_jobs.py +++ b/portality/scripts/manage_background_jobs.py @@ -22,45 +22,64 @@ from portality.lib import dates from portality.lib.dates import DEFAULT_TIMESTAMP_VAL +from portality.tasks.anon_export import AnonExportBackgroundTask +from portality.tasks.article_bulk_delete import ArticleBulkDeleteBackgroundTask +from portality.tasks.article_cleanup_sync import ArticleCleanupSyncBackgroundTask +from portality.tasks.article_duplicate_report import ArticleDuplicateReportBackgroundTask +from portality.tasks.async_workflow_notifications import AsyncWorkflowBackgroundTask +from portality.tasks.check_latest_es_backup import CheckLatestESBackupBackgroundTask +# from portality.tasks.find_discontinued_soon import FindDiscontinuedSoonBackgroundTask +from portality.tasks.harvester import HarvesterBackgroundTask from portality.tasks.ingestarticles import IngestArticlesBackgroundTask -from portality.tasks.preservation import PreservationBackgroundTask -from portality.tasks.suggestion_bulk_edit import SuggestionBulkEditBackgroundTask -from portality.tasks.sitemap import SitemapBackgroundTask -from portality.tasks.read_news import ReadNewsBackgroundTask +from portality.tasks.journal_bulk_delete import JournalBulkDeleteBackgroundTask +from portality.tasks.journal_bulk_edit import JournalBulkEditBackgroundTask from portality.tasks.journal_csv import JournalCSVBackgroundTask -from portality.tasks.article_cleanup_sync import ArticleCleanupSyncBackgroundTask from portality.tasks.journal_in_out_doaj import SetInDOAJBackgroundTask -from portality.tasks.check_latest_es_backup import CheckLatestESBackupBackgroundTask +from portality.tasks.preservation import PreservationBackgroundTask from portality.tasks.prune_es_backups import PruneESBackupsBackgroundTask from portality.tasks.public_data_dump import PublicDataDumpBackgroundTask -from portality.tasks.harvester import HarvesterBackgroundTask -from portality.tasks.anon_export import AnonExportBackgroundTask +from portality.tasks.read_news import ReadNewsBackgroundTask +from portality.tasks.reporting import ReportingBackgroundTask +from portality.tasks.sitemap import SitemapBackgroundTask +from portality.tasks.suggestion_bulk_edit import SuggestionBulkEditBackgroundTask + +from portality.background import BackgroundApi # dict of {task_name: task_class} so we can interact with the jobs HANDLERS = { - PreservationBackgroundTask.__action__:PreservationBackgroundTask, + AnonExportBackgroundTask.__action__: AnonExportBackgroundTask, + ArticleBulkDeleteBackgroundTask.__action__: ArticleBulkDeleteBackgroundTask, + ArticleCleanupSyncBackgroundTask.__action__: ArticleCleanupSyncBackgroundTask, + ArticleDuplicateReportBackgroundTask.__action__: ArticleDuplicateReportBackgroundTask, + AsyncWorkflowBackgroundTask.__action__: AsyncWorkflowBackgroundTask, + CheckLatestESBackupBackgroundTask.__action__: CheckLatestESBackupBackgroundTask, + # FindDiscontinuedSoonBackgroundTask.__action__: FindDiscontinuedSoonBackgroundTask, + HarvesterBackgroundTask.__action__: HarvesterBackgroundTask, IngestArticlesBackgroundTask.__action__: IngestArticlesBackgroundTask, - SuggestionBulkEditBackgroundTask.__action__: SuggestionBulkEditBackgroundTask, - SitemapBackgroundTask.__action__: SitemapBackgroundTask, - ReadNewsBackgroundTask.__action__: ReadNewsBackgroundTask, + JournalBulkDeleteBackgroundTask.__action__: JournalBulkDeleteBackgroundTask, + JournalBulkEditBackgroundTask.__action__: JournalBulkEditBackgroundTask, JournalCSVBackgroundTask.__action__: JournalCSVBackgroundTask, - ArticleCleanupSyncBackgroundTask.__action__: ArticleCleanupSyncBackgroundTask, SetInDOAJBackgroundTask.__action__: SetInDOAJBackgroundTask, - CheckLatestESBackupBackgroundTask.__action__: CheckLatestESBackupBackgroundTask, + PreservationBackgroundTask.__action__:PreservationBackgroundTask, PruneESBackupsBackgroundTask.__action__: PruneESBackupsBackgroundTask, PublicDataDumpBackgroundTask.__action__: PublicDataDumpBackgroundTask, - HarvesterBackgroundTask.__action__: HarvesterBackgroundTask, - AnonExportBackgroundTask.__action__: AnonExportBackgroundTask, + ReadNewsBackgroundTask.__action__: ReadNewsBackgroundTask, + ReportingBackgroundTask.__action__: ReportingBackgroundTask, + SitemapBackgroundTask.__action__: SitemapBackgroundTask, + SuggestionBulkEditBackgroundTask.__action__: SuggestionBulkEditBackgroundTask } -def manage_jobs(verb, action, status, from_date, to_date): +def manage_jobs(verb, action, status, from_date, to_date, prompt=True): q = JobsQuery(action, status, from_date, to_date) jobs = models.BackgroundJob.q2obj(q=q.query()) print('You are about to {verb} {count} job(s)'.format(verb=verb, count=len(jobs))) - doit = input('Proceed? [y\\N] ') + + doit = "y" + if prompt: + doit = input('Proceed? [y\\N] ') if doit.lower() == 'y': print('Please wait...') @@ -70,7 +89,7 @@ def manage_jobs(verb, action, status, from_date, to_date): continue job.add_audit_message("Job {pp} from job management script.".format( - pp={'requeue': 'requeued', 'cancel': 'cancelled'}[verb])) + pp={'requeue': 'requeued', 'cancel': 'cancelled', "process": "processed"}[verb])) if verb == 'requeue': # Re-queue and execute immediately job.queue() @@ -78,18 +97,24 @@ def manage_jobs(verb, action, status, from_date, to_date): elif verb == 'cancel': # Just apply cancelled status job.cancel() job.save() + elif verb == 'process': + task = HANDLERS[job.action](job) # Just execute immediately without going through huey + BackgroundApi.execute(task) print('done.') else: print('No action.') -def requeue_jobs(action, status, from_date, to_date): - manage_jobs('requeue', action, status, from_date, to_date) +def requeue_jobs(action, status, from_date, to_date, prompt=True): + manage_jobs('requeue', action, status, from_date, to_date, prompt=prompt) + +def cancel_jobs(action, status, from_date, to_date, prompt=True): + manage_jobs('cancel', action, status, from_date, to_date, prompt=prompt) -def cancel_jobs(action, status, from_date, to_date): - manage_jobs('cancel', action, status, from_date, to_date) +def process_jobs(action, status, from_date, to_date, prompt=True): + manage_jobs("process", action, status, from_date, to_date, prompt=prompt) class JobsQuery(object): @@ -127,6 +152,8 @@ def query(self): help='Add these jobs back on the job queue for processing', action='store_true') parser.add_argument('-c', '--cancel', help='Cancel these jobs (set their status to "cancelled")', action='store_true') + parser.add_argument("-p", "--process", + help="Immediately process these jobs on the command line", action="store_true") parser.add_argument('-s', '--status', help='Filter for job status. Default is "queued"', default='queued') @@ -139,15 +166,18 @@ def query(self): parser.add_argument('-t', '--to_date', help='Date to which to look for jobs in the given type and status', default=dates.now_str()) + parser.add_argument("-y", "--yes", help="Answer yes to all prompts", action="store_true") args = parser.parse_args() if args.requeue and args.cancel: print('Use only --requeue OR --cancel, not both.') exit(1) elif args.requeue: - requeue_jobs(args.action, args.status, args.from_date, args.to_date) + requeue_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) elif args.cancel: - cancel_jobs(args.action, args.status, args.from_date, args.to_date) + cancel_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) + elif args.process: + process_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) else: - print('You must supply one of --requeue or --cancel to run this script') + print('You must supply one of --requeue, --cancel or --process to run this script') exit(1) diff --git a/portality/settings.py b/portality/settings.py index 2bffdbab8a..cf19eaaaa2 100644 --- a/portality/settings.py +++ b/portality/settings.py @@ -9,7 +9,7 @@ # Application Version information # ~~->API:Feature~~ -DOAJ_VERSION = "6.3.16" +DOAJ_VERSION = "6.4.4" API_VERSION = "3.0.1" ###################################### @@ -427,7 +427,7 @@ HUEY_SCHEDULE = { "sitemap": {"month": "*", "day": "*", "day_of_week": "*", "hour": "8", "minute": "0"}, "reporting": {"month": "*", "day": "1", "day_of_week": "*", "hour": "0", "minute": "0"}, - "journal_csv": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "35"}, + "journal_csv": CRON_NEVER, # {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "20"}, "read_news": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "30"}, "article_cleanup_sync": {"month": "*", "day": "2", "day_of_week": "*", "hour": "0", "minute": "0"}, "async_workflow_notifications": {"month": "*", "day": "*", "day_of_week": "1", "hour": "5", "minute": "0"}, diff --git a/portality/static/js/doaj.fieldrender.edges.js b/portality/static/js/doaj.fieldrender.edges.js index 169ce93133..49faf4b543 100644 --- a/portality/static/js/doaj.fieldrender.edges.js +++ b/portality/static/js/doaj.fieldrender.edges.js @@ -645,13 +645,13 @@ $.extend(true, doaj, { toggle = ''; } var placeholder = 'Search ' + this.component.nodeCount + ' subjects'; - var frag = '

' + this.title + toggle + '

\ - '; // substitute in the component parts frag = frag.replace(/{{FILTERS}}/g, treeFrag); @@ -1832,10 +1832,10 @@ $.extend(true, doaj, { if (this.togglable) { toggle = ''; } - var frag = '

' + this.component.display + toggle + '

\ - '; // substitute in the component parts frag = frag.replace(/{{FILTERS}}/g, filterFrag + results); @@ -2083,10 +2083,10 @@ $.extend(true, doaj, { if (this.togglable) { toggle = ''; } - var frag = '

' + this.component.display + toggle + '

\ - '; // substitute in the component parts frag = frag.replace(/{{FILTERS}}/g, filterFrag + results); diff --git a/portality/store.py b/portality/store.py index 90300fb1aa..2d0935ee19 100644 --- a/portality/store.py +++ b/portality/store.py @@ -292,7 +292,8 @@ def list_container_ids(self): return [x for x in os.listdir(self.dir) if os.path.isdir(os.path.join(self.dir, x))] -def prune_container(storage, container_id, sort, filter=None, keep=1): +def prune_container(storage, container_id, sort, filter=None, keep=1, logger=None): + logger = logger if logger is not None else lambda x: x action_register = [] filelist = storage.list(container_id) @@ -316,7 +317,9 @@ def prune_container(storage, container_id, sort, filter=None, keep=1): #action_register.append("Considering files for retention in the following order: " + ", ".join(filtered_sorted)) remove = filtered_sorted[keep:] - action_register.append("Removed old files: " + ", ".join(remove)) + msg = "Removed old files: " + ", ".join(remove) + action_register.append(msg) + logger(msg) for fn in remove: storage.delete_file(container_id, fn) diff --git a/portality/tasks/ingestarticles.py b/portality/tasks/ingestarticles.py index de6991ab40..e798f4005d 100644 --- a/portality/tasks/ingestarticles.py +++ b/portality/tasks/ingestarticles.py @@ -312,11 +312,16 @@ def _process(self, file_upload: models.FileUpload): for article in articles: article.set_upload_id(file_upload.id) result = articleService.batch_create_articles(articles, account, add_journal_info=True) - except (IngestException, CrosswalkException) as e: - job.add_audit_message("IngestException: {msg}. Inner message: {inner}. Stack: {x}" - .format(msg=e.message, inner=e.inner_message, x=e.trace())) + except (IngestException, CrosswalkException, ArticleNotAcceptable) as e: + if hasattr(e, 'inner_message'): + job.add_audit_message("{exception}: {msg}. Inner message: {inner}. Stack: {x}" + .format(exception=e.__class__.__name__, msg=e.message, inner=e.inner_message, x=e.trace())) + file_upload.failed(e.message, e.inner_message) + else: + job.add_audit_message("{exception}: {msg}.".format(exception=e.__class__.__name__, msg=e.message)) + file_upload.failed(e.message) + job.outcome_fail() - file_upload.failed(e.message, e.inner_message) result = e.result try: file_failed(path) @@ -324,7 +329,7 @@ def _process(self, file_upload: models.FileUpload): except: job.add_audit_message("Error cleaning up file which caused IngestException: {x}" .format(x=traceback.format_exc())) - except (DuplicateArticleException, ArticleNotAcceptable) as e: + except DuplicateArticleException as e: job.add_audit_message(str(e)) job.outcome_fail() file_upload.failed(str(e)) diff --git a/portality/tasks/journal_csv.py b/portality/tasks/journal_csv.py index e863aeb9c4..9b5b74269d 100644 --- a/portality/tasks/journal_csv.py +++ b/portality/tasks/journal_csv.py @@ -15,12 +15,16 @@ def run(self): Execute the task as specified by the background_job :return: """ + + def logger(msg): + self.background_job.add_audit_message(msg) + job = self.background_job journalService = DOAJ.journalService() - url, action_register = journalService.csv() - for ar in action_register: - job.add_audit_message(ar) + url, action_register = journalService.csv(logger=logger) + # for ar in action_register: + # job.add_audit_message(ar) job.add_audit_message("CSV generated; will be served from {y}".format(y=url)) def cleanup(self): diff --git a/portality/templates/account/forgot.html b/portality/templates/account/forgot.html index 241525adfd..d8f5e9c837 100644 --- a/portality/templates/account/forgot.html +++ b/portality/templates/account/forgot.html @@ -3,7 +3,7 @@ {% block page_title %}Reset your password{% endblock %} {% block content %} -
+
@@ -23,5 +23,5 @@

Reset your password

-
+ {% endblock %} diff --git a/portality/templates/account/login.html b/portality/templates/account/login.html index 247149641a..726831e9e3 100644 --- a/portality/templates/account/login.html +++ b/portality/templates/account/login.html @@ -3,7 +3,7 @@ {% block page_title %}Login to your account{% endblock %} {% block content %} -
+
@@ -17,5 +17,5 @@

Login

-
+ {% endblock %} diff --git a/portality/templates/account/login_to_apply.html b/portality/templates/account/login_to_apply.html index 5d9ee3e8ed..556fbb71cb 100644 --- a/portality/templates/account/login_to_apply.html +++ b/portality/templates/account/login_to_apply.html @@ -3,7 +3,7 @@ {% block page_title %}Login to apply{% endblock %} {% block content %} -
+
@@ -46,5 +46,5 @@

Related help

-
+ {% endblock %} diff --git a/portality/templates/account/register.html b/portality/templates/account/register.html index 4f91d8250a..a497bd396a 100644 --- a/portality/templates/account/register.html +++ b/portality/templates/account/register.html @@ -12,7 +12,7 @@ {% endblock %} {% block content %} -
+
@@ -30,7 +30,7 @@

Register

-
+ {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/account/reset.html b/portality/templates/account/reset.html index 2b459de104..fdacc27620 100644 --- a/portality/templates/account/reset.html +++ b/portality/templates/account/reset.html @@ -4,7 +4,7 @@ {% block content %} -
+
@@ -20,6 +20,6 @@

Hi {{ account.name or account.email }}

-
+ {% endblock %} diff --git a/portality/templates/api/current/api_docs.html b/portality/templates/api/current/api_docs.html index 9c5a2bc8e5..42d3d14588 100644 --- a/portality/templates/api/current/api_docs.html +++ b/portality/templates/api/current/api_docs.html @@ -14,7 +14,7 @@ {% endblock %} {% block content %} -
+
{# todo: this nav was bumping into swagger @@ -58,7 +58,7 @@

API

-
+ {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/application_form/public_application.html b/portality/templates/application_form/public_application.html index af0e05d63e..31439b94d3 100644 --- a/portality/templates/application_form/public_application.html +++ b/portality/templates/application_form/public_application.html @@ -23,7 +23,7 @@ {% block content scoped %} -
+
{% include "application_form/_backend_validation.html" %}
@@ -64,7 +64,7 @@
-
+ {% endblock %} diff --git a/portality/templates/application_form/readonly_journal.html b/portality/templates/application_form/readonly_journal.html index 2fed49db61..fe429eea08 100644 --- a/portality/templates/application_form/readonly_journal.html +++ b/portality/templates/application_form/readonly_journal.html @@ -20,7 +20,7 @@ {% block content scoped %} -
+
@@ -47,7 +47,7 @@
-
+ {% endblock %} diff --git a/portality/templates/data/sponsors.html b/portality/templates/data/sponsors.html index 5dfee23989..a9723ad4d5 100644 --- a/portality/templates/data/sponsors.html +++ b/portality/templates/data/sponsors.html @@ -1,35 +1,7 @@ -{% if data.sponsors.gold %} -

Premier contributors

-
- {% for i in data.sponsors.gold %} - - {% endfor %} -
-{% endif %} - -
- -

Sustaining contributors

+

Contributors

- {% for i in data.sponsors.silver %} - - {% endfor %} -
- -
- -

Basic contributors

-
- {% for i in data.sponsors.bronze %} - {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/doaj/contact.html b/portality/templates/doaj/contact.html index 2a4739dec8..2ffb86985f 100644 --- a/portality/templates/doaj/contact.html +++ b/portality/templates/doaj/contact.html @@ -1,7 +1,7 @@ {% extends "layouts/public_base.html" %} {% block content %} -
+

Submit your feedback and questions here. Feedback submitted about a particular journal is treated as confidential.

@@ -52,7 +52,7 @@
-
+
{% endblock %} diff --git a/portality/templates/doaj/index.html b/portality/templates/doaj/index.html index 02bb78b878..f24f55571b 100644 --- a/portality/templates/doaj/index.html +++ b/portality/templates/doaj/index.html @@ -74,7 +74,7 @@

DOAJ in numbers

{% endblock %} {% block content %} -
+
@@ -246,6 +246,6 @@

Recently-added journals

-
+ {% endblock %} diff --git a/portality/templates/doaj/journals_search.html b/portality/templates/doaj/journals_search.html index e38bedd18e..8eef0d2c63 100644 --- a/portality/templates/doaj/journals_search.html +++ b/portality/templates/doaj/journals_search.html @@ -10,10 +10,10 @@ {%- block meta_twitter_description -%}Find open access journals in DOAJ.{%- endblock -%} {% block content %} -
+
{% include "includes/search-help-modal.html" %} -
+ {% endblock %} {% block extra_js_bottom %} diff --git a/portality/templates/doaj/toc.html b/portality/templates/doaj/toc.html index 276d9392be..adcb4c5ad9 100644 --- a/portality/templates/doaj/toc.html +++ b/portality/templates/doaj/toc.html @@ -42,7 +42,7 @@ } %} -
+
{% if journal.last_manually_updated_since(days=30) %} @@ -450,7 +450,7 @@

Journal metadata

-
+ {% include "includes/_hotjar.html" %} {% endblock %} diff --git a/portality/templates/editor/editor_base.html b/portality/templates/editor/editor_base.html index 1fc7bfbc81..c116f3d622 100644 --- a/portality/templates/editor/editor_base.html +++ b/portality/templates/editor/editor_base.html @@ -9,10 +9,15 @@ {% endblock %} {% block content %} -
- {% block editor_content %} - {% endblock %} -
+
+

Editor dashboard

+ {% include 'editor/nav.html' %} + +
+ {% block editor_content %} + {% endblock %} +
+
{% include "includes/_hotjar.html" %} {% endblock %} diff --git a/portality/templates/includes/contribution_rates.html b/portality/templates/includes/contribution_rates.html index 29f28a8426..2449e24e3c 100644 --- a/portality/templates/includes/contribution_rates.html +++ b/portality/templates/includes/contribution_rates.html @@ -1,7 +1,7 @@ - + {% endblock %} diff --git a/portality/templates/openurl/help.html b/portality/templates/openurl/help.html index 75332c5637..c1a4eb4424 100644 --- a/portality/templates/openurl/help.html +++ b/portality/templates/openurl/help.html @@ -1,7 +1,7 @@ {% extends "layouts/public_base.html" %} {% block content %} -
+

Help

@@ -33,5 +33,5 @@

Supported OpenURL version

-
+ {% endblock %} diff --git a/portality/templates/publisher/help.html b/portality/templates/publisher/help.html index 9f4daddeb4..abf09a4cff 100644 --- a/portality/templates/publisher/help.html +++ b/portality/templates/publisher/help.html @@ -6,7 +6,7 @@

Help for publishers

-

Uploading metadata/article content a file

+

Uploading metadata/article content

There are three ways to upload article metadata to DOAJ:

@@ -20,6 +20,8 @@

Uploading metadata/article content a file

There are instructions on how to prepare and upload your XML file on our XML documentation page.

+

Are you receiving an error about one of your ISSNs that you haven't seen before? We recently changed the rules for uploading article metadata. We now require that a Print ISSN is in an issn tag and the Online ISSN is in an eissn tag. [See our sample XML file](https://doaj.org/docs/xml/#example-doaj-xml-file) for more information.

+

Failed XML uploads explained

This section explains the error messages that you may see when you upload article XML. Use the message in the 'Notes' column of your History of uploads table to correct your XML.

diff --git a/portality/templates/publisher/preservation.html b/portality/templates/publisher/preservation.html index c87a2fcb97..611eb380af 100644 --- a/portality/templates/publisher/preservation.html +++ b/portality/templates/publisher/preservation.html @@ -25,10 +25,10 @@

Guidance before uploading your file

  1. Only the full text of articles whose metadata is already uploaded to DOAJ can be sent to us. Check that your article metadata appears in DOAJ first.
  2. Only articles for journals indexed in DOAJ can be uploaded. -
  3. Collect the full texts into a package consisting of folders and files.
  4. -
  5. Compress the package into a ZIP file.
  6. -
  7. Upload the zipped package (on this page).
  8. -
  9. Check that the file has uploaded correctly in the History of Uploads section and is not bigger than 50MB.
  10. +
  11. Collect the full text files into a package containing folders and files.
  12. +
  13. Compress the package into a ZIP file. Keep the name of the file simple: avoid spaces, hyphens, underscores, special characters, etc
  14. +
  15. Upload the zipped package (on this page). It may not be bigger than 50MB.
  16. +
  17. Check that the file has uploaded correctly in the History of Uploads section.

The package must have the following structure:

diff --git a/portality/templates/publisher/publisher_base.html b/portality/templates/publisher/publisher_base.html index d4873f1fb5..96ba47ca72 100644 --- a/portality/templates/publisher/publisher_base.html +++ b/portality/templates/publisher/publisher_base.html @@ -7,14 +7,14 @@ {% block page_title %}Publisher dashboard{% endblock %} {% block content %} -
+

Publisher dashboard

{% include 'publisher/nav.html' %} {% block publisher_content %} {% endblock %} -
+ {% include "includes/_hotjar.html" %} {% endblock %} diff --git a/portality/ui/messages.py b/portality/ui/messages.py index ac7f9163bc..8eabd73f80 100644 --- a/portality/ui/messages.py +++ b/portality/ui/messages.py @@ -61,6 +61,7 @@ class Messages(object): EXCEPTION_NO_CONTRIBUTORS_EXPLANATION = "DOAJ requires at least one author for each article." EXCEPTION_TOO_MANY_ISSNS = "Too many ISSNs. Only 2 ISSNs are allowed: one Print ISSN and one Online ISSN." + EXCEPTION_MISMATCHED_ISSNS = "ISSNs provided don't match any journal." EXCEPTION_ISSNS_OF_THE_SAME_TYPE = "Both ISSNs have the same type: {type}" EXCEPTION_IDENTICAL_PISSN_AND_EISSN = "The Print and Online ISSNs supplied are identical. If you supply 2 ISSNs they must be different." EXCEPTION_NO_ISSNS = "Neither Print ISSN nor Online ISSN has been supplied. DOAJ requires at least one ISSN." diff --git a/portality/util.py b/portality/util.py index 84423e1b91..2a4e1f36f3 100644 --- a/portality/util.py +++ b/portality/util.py @@ -186,3 +186,7 @@ def get_full_url_safe(endpoint): except werkzeug.routing.BuildError: app.logger.warning(f'endpoint not found -- [{endpoint}]') return None + +def no_op(*args, **kwargs): + """ noop (no operation) function """ + pass \ No newline at end of file diff --git a/portality/view/publisher.py b/portality/view/publisher.py index 2410a8323a..c525ce176c 100644 --- a/portality/view/publisher.py +++ b/portality/view/publisher.py @@ -4,7 +4,7 @@ from portality.app_email import EmailException from portality import models -from portality.bll.exceptions import AuthoriseException, ArticleMergeConflict, DuplicateArticleException +from portality.bll.exceptions import AuthoriseException, ArticleMergeConflict, DuplicateArticleException, ArticleNotAcceptable from portality.decorators import ssl_required, restrict_to_role, write_required from portality.dao import ESMappingMissingError from portality.forms.application_forms import ApplicationFormFactory @@ -362,7 +362,8 @@ def metadata(): Messages.flash(Messages.ARTICLE_METADATA_MERGE_CONFLICT) except DuplicateArticleException: Messages.flash(Messages.ARTICLE_METADATA_UPDATE_CONFLICT) - + except ArticleNotAcceptable as e: + Messages.flash_with_param(e.message, "error") return fc.render_template(validated=validated) diff --git a/setup.py b/setup.py index a83bf6daf2..90bc90514a 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name='doaj', - version='6.3.16', + version='6.4.4', packages=find_packages(), install_requires=[ "awscli==1.20.50",