From 11b6ec88ad23c877475a96b7c631f0c5eb8f2a30 Mon Sep 17 00:00:00 2001 From: Jim Balhoff Date: Wed, 23 Feb 2022 15:37:45 -0500 Subject: [PATCH] Optimize imports and reformat everything with IntelliJ defaults. (#458) * Optimize imports and reformat everything with IntelliJ defaults. * Fix test to use correct root type for complexes. * Fix lookup test for new protein complex classification. --- .github/workflows/maven.yml | 18 +- INSTRUCTIONS.md | 73 +- README.md | 18 +- configuration.md | 6 +- minerva-cli/pom.xml | 206 +- .../minerva/cli/CommandLineInterface.java | 2671 ++++++----- .../src/main/resources/log4j.properties | 4 +- minerva-converter/pom.xml | 86 +- .../minerva/evidence/FindGoCodes.java | 103 +- .../minerva/explanation/Explanation.java | 55 +- .../minerva/explanation/ExplanationRule.java | 75 +- .../minerva/explanation/ExplanationTerm.java | 72 +- .../explanation/ExplanationTriple.java | 111 +- .../minerva/explanation/ModelExplanation.java | 71 +- .../minerva/legacy/LegoModelWalker.java | 837 ++-- .../legacy/sparql/AnnotationExtension.java | 50 +- .../minerva/legacy/sparql/BasicGPADData.java | 140 +- .../legacy/sparql/DefaultGPADData.java | 274 +- .../legacy/sparql/ExportExplanation.java | 382 +- .../minerva/legacy/sparql/GPADData.java | 77 +- .../minerva/legacy/sparql/GPADEvidence.java | 160 +- .../legacy/sparql/GPADOperatorStatus.java | 6 +- .../minerva/legacy/sparql/GPADRenderer.java | 239 +- .../legacy/sparql/GPADSPARQLExport.java | 759 ++- .../minerva/taxon/FindTaxonTool.java | 139 +- .../minerva/evidence/FindGoCodesTest.java | 76 +- .../minerva/legacy/sparql/GPADSPARQLTest.java | 299 +- .../minerva/taxon/FindTaxonToolTest.java | 60 +- .../src/test/resources/catalog-v001.xml | 2 +- .../src/test/resources/log4j.properties | 4 +- .../BlazegraphMolecularModelManager.java | 1980 ++++---- .../minerva/BlazegraphOntologyManager.java | 1640 ++++--- .../minerva/CoreMolecularModelManager.java | 2919 ++++++------ .../minerva/MinervaOWLGraphWrapper.java | 1290 +++-- .../geneontology/minerva/ModelContainer.java | 324 +- .../minerva/MolecularModelManager.java | 1145 +++-- .../UndoAwareMolecularModelManager.java | 596 ++- .../minerva/curie/CurieHandler.java | 37 +- .../minerva/curie/CurieMappings.java | 28 +- .../minerva/curie/CurieMappingsJsonld.java | 121 +- .../minerva/curie/DefaultCurieHandler.java | 122 +- .../minerva/curie/MappedCurieHandler.java | 232 +- .../minerva/json/InferenceProvider.java | 16 +- .../geneontology/minerva/json/JsonTools.java | 187 +- .../json/MolecularModelJsonRenderer.java | 1156 +++-- .../minerva/model/ActivityUnit.java | 448 +- .../minerva/model/AnatomicalEntity.java | 8 +- .../minerva/model/BiologicalProcessUnit.java | 19 +- .../minerva/model/GoCamEntity.java | 338 +- .../minerva/model/GoCamModel.java | 611 ++- .../minerva/model/GoCamModelStats.java | 400 +- .../minerva/model/GoCamOccurent.java | 131 +- .../minerva/model/PhysicalEntity.java | 8 +- .../minerva/model/ProvenanceAnnotated.java | 85 +- .../minerva/util/AmigoContextGenerator.java | 194 +- .../minerva/util/AnnotationShorthand.java | 136 +- .../util/BlazegraphMutationCounter.java | 45 +- .../geneontology/minerva/util/DebugTools.java | 24 +- .../minerva/util/JenaOwlTool.java | 62 +- .../minerva/util/ReverseChangeGenerator.java | 52 +- .../minerva/validation/Enricher.java | 344 +- .../validation/ModelValidationReport.java | 146 +- .../validation/ModelValidationResult.java | 69 +- .../validation/OWLValidationReport.java | 22 +- .../minerva/validation/ShexConstraint.java | 205 +- .../minerva/validation/ShexExplanation.java | 70 +- .../validation/ShexValidationReport.java | 134 +- .../minerva/validation/ShexValidator.java | 1980 ++++---- .../minerva/validation/ShexViolation.java | 44 +- .../geneontology/minerva/validation/Util.java | 296 +- .../validation/ValidationResultSet.java | 96 +- .../minerva/validation/Violation.java | 33 +- .../BatchPipelineValidationReport.java | 253 +- .../validation/pipeline/ErrorMessage.java | 54 +- .../src/main/java/owltools/cli/Opts.java | 338 +- .../main/java/owltools/gaf/eco/EcoMapper.java | 116 +- .../owltools/gaf/eco/EcoMapperFactory.java | 1080 +++-- .../java/owltools/gaf/eco/EcoMapperImpl.java | 86 +- .../owltools/gaf/eco/SimpleEcoMapper.java | 14 +- .../owltools/gaf/eco/SimpleEcoMapperImpl.java | 67 +- .../owltools/gaf/eco/TraversingEcoMapper.java | 126 +- .../gaf/eco/TraversingEcoMapperImpl.java | 210 +- .../java/owltools/io/CatalogXmlIRIMapper.java | 334 +- .../main/java/owltools/io/ParserWrapper.java | 35 +- .../main/java/owltools/util/OwlHelper.java | 631 ++- .../java/owltools/version/VersionInfo.java | 73 +- .../java/owltools/vocab/OBONamespaces.java | 19 +- .../owltools/vocab/OBOUpperVocabulary.java | 124 +- .../BlazegraphMolecularModelManagerTest.java | 649 ++- .../BlazegraphOntologyManagerTest.java | 478 +- .../CoreMolecularModelManagerTest.java | 178 +- .../minerva/MolecularModelManagerTest.java | 373 +- .../UndoAwareMolecularModelManagerTest.java | 149 +- .../curie/DefaultCurieHandlerTest.java | 108 +- .../json/MolecularModelJsonRendererTest.java | 589 ++- .../minerva/util/AnnotationShortHandTest.java | 34 +- .../minerva/validation/ShexValidatorTest.java | 190 +- .../src/test/resources/log4j.properties | 4 +- .../src/test/resources/mmg/catalog-v001.xml | 2 +- minerva-json/pom.xml | 30 +- .../minerva/json/JsonAnnotatedObject.java | 62 +- .../minerva/json/JsonAnnotation.java | 136 +- .../minerva/json/JsonEvidenceInfo.java | 6 +- .../geneontology/minerva/json/JsonModel.java | 70 +- .../minerva/json/JsonOwlFact.java | 104 +- .../minerva/json/JsonOwlIndividual.java | 88 +- .../minerva/json/JsonOwlObject.java | 284 +- .../minerva/json/JsonRelationInfo.java | 6 +- minerva-lookup/pom.xml | 70 +- .../bbop/golr/java/AbstractRetrieveGolr.java | 603 ++- .../golr/java/RetrieveGolrAnnotations.java | 407 +- .../golr/java/RetrieveGolrBioentities.java | 172 +- .../golr/java/RetrieveGolrOntologyClass.java | 279 +- .../lookup/CachingExternalLookupService.java | 156 +- .../lookup/CombinedExternalLookupService.java | 108 +- .../minerva/lookup/ExternalLookupService.java | 123 +- .../lookup/GolrExternalLookupService.java | 273 +- .../lookup/MonarchExternalLookupService.java | 172 +- .../minerva/lookup/TableLookupService.java | 107 +- .../lookup/GolrExternalLookupServiceTest.java | 376 +- minerva-server/pom.xml | 360 +- .../src/main/assembly/minerva-server.xml | 60 +- .../minerva/ModelReaderHelper.java | 160 +- .../server/AuthorizationRequestFilter.java | 69 +- .../server/GsonMessageBodyHandler.java | 181 +- .../LoggingApplicationEventListener.java | 175 +- .../minerva/server/RequireJsonpFilter.java | 27 +- .../minerva/server/StartUpTool.java | 824 ++-- .../handler/JsonOrJsonpBatchHandler.java | 571 ++- .../server/handler/M3BatchHandler.java | 484 +- .../server/handler/M3ExpressionParser.java | 255 +- .../minerva/server/handler/M3SeedHandler.java | 236 +- .../server/handler/MinervaRequest.java | 16 +- .../server/handler/MinervaResponse.java | 130 +- .../server/handler/ModelARTHandler.java | 236 +- .../minerva/server/handler/ModelCreator.java | 418 +- .../server/handler/ModelSearchHandler.java | 1192 +++-- .../server/handler/OperationsImpl.java | 1496 +++--- .../server/handler/OperationsTools.java | 215 +- .../server/handler/PacketIdGenerator.java | 22 +- .../minerva/server/handler/StatusHandler.java | 203 +- .../minerva/server/handler/TaxonHandler.java | 160 +- .../CachingInferenceProviderCreatorImpl.java | 158 +- .../inferences/InferenceProviderCreator.java | 2 +- .../InferenceProviderCreatorImpl.java | 437 +- .../inferences/MapInferenceProvider.java | 209 +- .../validation/BeforeSaveModelValidator.java | 102 +- .../validation/MinervaShexValidator.java | 65 +- .../resources/ModelSearchQueryTemplate.rq | 47 +- .../src/main/resources/log4j.properties | 5 +- .../server/handler/ARTHandlerTest.java | 728 ++- .../server/handler/BatchModelHandlerTest.java | 4159 ++++++++--------- .../server/handler/BatchTestTools.java | 433 +- .../server/handler/DataPropertyTest.java | 342 +- .../server/handler/LocalServerTest.java | 208 +- .../handler/M3ExpressionParserTest.java | 397 +- .../minerva/server/handler/ModelEditTest.java | 460 +- .../server/handler/ModelReasonerTest.java | 188 +- .../handler/ModelSearchHandlerTest.java | 1286 +++-- .../handler/ParallelModelReasonerTest.java | 398 +- .../server/handler/TaxonHandlerTest.java | 384 +- .../server/validation/ValidationTest.java | 512 +- .../ontology/catalog-for-validation.xml | 5 +- pom.xml | 765 +-- specs/README.md | 4 +- specs/gaf-to-lego.md | 20 +- specs/owl-model.md | 193 +- 167 files changed, 26409 insertions(+), 27400 deletions(-) diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 8494a61e..b09c8149 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -9,7 +9,7 @@ on: - master - dev pull_request: - branches: + branches: - master - dev @@ -19,11 +19,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up JDK 11 - uses: actions/setup-java@v2 - with: - java-version: '11' - distribution: 'adopt' - - name: Build with Maven - run: mvn test + - uses: actions/checkout@v2 + - name: Set up JDK 11 + uses: actions/setup-java@v2 + with: + java-version: '11' + distribution: 'adopt' + - name: Build with Maven + run: mvn test diff --git a/INSTRUCTIONS.md b/INSTRUCTIONS.md index e3cfc017..59ef9391 100644 --- a/INSTRUCTIONS.md +++ b/INSTRUCTIONS.md @@ -1,24 +1,24 @@ - [About this document](#about-this-document) - - [Building the server](#building-the-server) - - [Prerequisites to build the code](#prerequisites-to-build-the-code) - - [Building the Minerva Server](#building-the-minerva-server) - - [Running the Minerva Server](#running-the-minerva-server) - - [Prerequisites](#prerequisites) - - [Start the MolecularModelManager server from the command line](#start-the-molecularmodelmanager-server-from-the-command-line) - - [Start Server via Eclipse:](#start-server-via-eclipse) - - [Running Tests](#running-tests) - - [Failing Tests](#failing-tests) - - [Quick Test via `curl`](#quick-test-via-curl) - - [Sonarqube](#Sonarqube) - - [Obtaining `owl-models` and `go-lego.owl`](#obtaining-owl-models-and-go-legoowl) - - [Useful source files for learning](#useful-source-files-for-learning) - - [Using the Blazegraph model store](#using-the-blazegraph-model-store) - - [Create a new Blazegraph journal from a directory of existing model files](#create-a-new-blazegraph-journal-from-a-directory-of-existing-model-files) - - [Dump model files from a Blazegraph journal that is not in use](#dump-model-files-from-a-blazegraph-journal-that-is-not-in-use) - - [Start the Minerva Server with configuration for Blazegraph journal and model dump folder](#start-the-minerva-server-with-configuration-for-blazegraph-journal-and-model-dump-folder) - - [Request an OWL dump of all models from a running Minerva Server](#request-an-owl-dump-of-all-models-from-a-running-minerva-server) + - [Building the server](#building-the-server) + - [Prerequisites to build the code](#prerequisites-to-build-the-code) + - [Building the Minerva Server](#building-the-minerva-server) + - [Running the Minerva Server](#running-the-minerva-server) + - [Prerequisites](#prerequisites) + - [Start the MolecularModelManager server from the command line](#start-the-molecularmodelmanager-server-from-the-command-line) + - [Start Server via Eclipse:](#start-server-via-eclipse) + - [Running Tests](#running-tests) + - [Failing Tests](#failing-tests) + - [Quick Test via `curl`](#quick-test-via-curl) + - [Sonarqube](#Sonarqube) + - [Obtaining `owl-models` and `go-lego.owl`](#obtaining-owl-models-and-go-legoowl) + - [Useful source files for learning](#useful-source-files-for-learning) + - [Using the Blazegraph model store](#using-the-blazegraph-model-store) + - [Create a new Blazegraph journal from a directory of existing model files](#create-a-new-blazegraph-journal-from-a-directory-of-existing-model-files) + - [Dump model files from a Blazegraph journal that is not in use](#dump-model-files-from-a-blazegraph-journal-that-is-not-in-use) + - [Start the Minerva Server with configuration for Blazegraph journal and model dump folder](#start-the-minerva-server-with-configuration-for-blazegraph-journal-and-model-dump-folder) + - [Request an OWL dump of all models from a running Minerva Server](#request-an-owl-dump-of-all-models-from-a-running-minerva-server) @@ -30,8 +30,8 @@ This is a quick overview on how to setup a Java server for the MolecularModelMan ### Prerequisites to build the code - * Java (JDK 1.8 or later) as compiler - * Maven (3.0.x) Build-Tool +* Java (JDK 1.8 or later) as compiler +* Maven (3.0.x) Build-Tool ### Building the Minerva Server @@ -79,8 +79,8 @@ start-m3-server.sh -c go-trunk/ontology/extensions/catalog-v001.xml \ ### Automatically create a catalog file pointing to local copies of the imported ontologies -If you have [ROBOT](http://robot.obolibrary.org) installed, you can easily create a local mirror of an OWL imports chain, so that large -imported ontologies don't need to be repeatedly downloaded while you are developing locally: +If you have [ROBOT](http://robot.obolibrary.org) installed, you can easily create a local mirror of an OWL imports +chain, so that large imported ontologies don't need to be repeatedly downloaded while you are developing locally: `robot mirror --input my-ontology.owl --directory my-cache --output my-catalog.xml` @@ -102,7 +102,6 @@ https://raw.githubusercontent.com/evidenceontology/evidenceontology/master/gaf-e [Maven CLI](http://maven.apache.org/ref/3.3.9/maven-embedder/cli.html) - ### Quick Test via `curl` This assumes you are in the `minerva/` directory, which is the parent of `minerva-server/`. @@ -113,7 +112,8 @@ curl localhost:6800/`cat minerva-server/src/test/resources/server-test/long-get. ### Sonarqube -Run sonarqube server locally using docker and ensure it is up and running by visiting [http://localhost:9000](http://localhost:9000) +Run sonarqube server locally using docker and ensure it is up and running by +visiting [http://localhost:9000](http://localhost:9000) ``` docker run -d --rm --name sonarqube -p 9000:9000 sonarqube:7.9.6-community @@ -131,7 +131,8 @@ For static analysis and code coverage: mvn clean package sonar:sonar ``` -Stopping sonarqube docker container. This would automatically remove the container since the --rm option was used above. +Stopping sonarqube docker container. This would automatically remove the container since the --rm option was used +above. ``` docker stop sonarqube @@ -139,7 +140,8 @@ docker stop sonarqube ## Obtaining `owl-models` and `go-lego.owl` -See [Monarch Ontology](https://github.com/monarch-initiative/monarch-ontology) and use the instructions there to generate a `catalog-v001.xml`. +See [Monarch Ontology](https://github.com/monarch-initiative/monarch-ontology) and use the instructions there to +generate a `catalog-v001.xml`. - ftp://ftp.geneontology.org/pub/go//experimental/lego/server/owl-models - ftp://ftp.geneontology.org/pub/go//ontology/extensions/go-lego.owl @@ -148,7 +150,6 @@ See [Monarch Ontology](https://github.com/monarch-initiative/monarch-ontology) a - `/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3BatchHandler.java` - ## Using the Blazegraph model store ### Create a new Blazegraph journal from a directory of existing model files @@ -163,7 +164,8 @@ See [Monarch Ontology](https://github.com/monarch-initiative/monarch-ontology) a `java "-Xmx$MINERVA_MEMORY" -jar minerva-server.jar -c catalog-v001.xml -g http://purl.obolibrary.org/obo/go/extensions/go-lego.owl -f blazegraph.jnl --export-folder exported-models --port 9999 --use-request-logging --slme-elk --skip-class-id-validation --set-important-relation-parent http://purl.obolibrary.org/obo/LEGOREL_0000000` -Note the options `-f blazegraph.jnl` for specifying the journal file and `--export-folder exported-models` for specifying where to write OWL models in response to a `export-all` operation request. +Note the options `-f blazegraph.jnl` for specifying the journal file and `--export-folder exported-models` for +specifying where to write OWL models in response to a `export-all` operation request. ### Request an OWL dump of all models from a running Minerva Server @@ -173,9 +175,13 @@ This will output to the folder configured in the startup arguments. ### Run a SPARQL Update against the triples in the database -*This should be handled with care since direct changes to triples will bypass any validations that typically occur when data are edited via the standard Minerva server API.* +*This should be handled with care since direct changes to triples will bypass any validations that typically occur when +data are edited via the standard Minerva server API.* -[SPARQL Update](http://www.w3.org/TR/sparql11-update/) is useful for various bulk maintenance operations that may periodically be necessary, e.g. updating all uses of an obsolete property to the current preferred IRI. Before running the update, the server should be stopped, since the Blazegraph journal can only be used from one Java process at a time. Then simply run the command like this: +[SPARQL Update](http://www.w3.org/TR/sparql11-update/) is useful for various bulk maintenance operations that may +periodically be necessary, e.g. updating all uses of an obsolete property to the current preferred IRI. Before running +the update, the server should be stopped, since the Blazegraph journal can only be used from one Java process at a time. +Then simply run the command like this: ```bash java -jar minerva-cli.jar --sparql-update -j blazegraph.jnl -f update.rq @@ -199,8 +205,11 @@ WHERE { ## SPARQL endpoint service -Minerva provides a read-only SPARQL query service at the `/sparql` path. Using GET, a URL-encoded query can be submitted as a value for the `query` parameter. Alternatively, POST can be used to submit form data with a `query` parameter, or to submit a SPARQL query directly, using the `application/sparql-query` MIME type. +Minerva provides a read-only SPARQL query service at the `/sparql` path. Using GET, a URL-encoded query can be submitted +as a value for the `query` parameter. Alternatively, POST can be used to submit form data with a `query` parameter, or +to submit a SPARQL query directly, using the `application/sparql-query` MIME type. ### SPARQL endpoint configuration -The only configurable aspect of the SPARQL endpoint is the query timeout. This can be set with a command-line option to the Minerva server at startup: `--sparql-endpoint-timeout 10`. The value is the time in seconds; the default is `10`. +The only configurable aspect of the SPARQL endpoint is the query timeout. This can be set with a command-line option to +the Minerva server at startup: `--sparql-endpoint-timeout 10`. The value is the time in seconds; the default is `10`. diff --git a/README.md b/README.md index f02745a5..82490bd7 100644 --- a/README.md +++ b/README.md @@ -8,18 +8,16 @@ To build and launch a server, see [INSTRUCTIONS.md](INSTRUCTIONS.md) ## About -Minerva is a wrapper and server for the OWL API and a triplestore (currently -blazegraph) that serves as the -back end for Noctua. It communicates with Noctua via Barista. It gains -its knowledge of the world through a Golr instance. +Minerva is a wrapper and server for the OWL API and a triplestore (currently blazegraph) that serves as the back end for +Noctua. It communicates with Noctua via Barista. It gains its knowledge of the world through a Golr instance. For specifications, see [specs/](specs) ## Code - * minerva-core : core logic - * minerva-json : conversion to and from the JSON-LD esque transport and model exchange format - * minerva-converter : converter to/from other formats. Primarily GAF/GPAD - * minerva-lookup : To be deprecated? Non-generic functions for looking up genes in golr - * minerva-server : JAX-RS server - * minerva-cli : command line interface +* minerva-core : core logic +* minerva-json : conversion to and from the JSON-LD esque transport and model exchange format +* minerva-converter : converter to/from other formats. Primarily GAF/GPAD +* minerva-lookup : To be deprecated? Non-generic functions for looking up genes in golr +* minerva-server : JAX-RS server +* minerva-cli : command line interface diff --git a/configuration.md b/configuration.md index 2258133c..12234cf0 100644 --- a/configuration.md +++ b/configuration.md @@ -1,7 +1,11 @@ # Configurable Minerva options + This document covers some of the configurable aspects of Minerva. ## Model ID prefix -The model ID prefix is used when constructing IRIs to name new models and individuals (which are based on their containing model ID). The default is `http://model.geneontology.org/`, however this can be changed via a command-line argument for most CLI commands and the server startup. E.g. `--model-id-prefix 'http://model.myproject.org/'`. + +The model ID prefix is used when constructing IRIs to name new models and individuals (which are based on their +containing model ID). The default is `http://model.geneontology.org/`, however this can be changed via a command-line +argument for most CLI commands and the server startup. E.g. `--model-id-prefix 'http://model.myproject.org/'`. *TODO: check consistency of argument names across CLI commands.* diff --git a/minerva-cli/pom.xml b/minerva-cli/pom.xml index c7a58e9b..e579b42d 100644 --- a/minerva-cli/pom.xml +++ b/minerva-cli/pom.xml @@ -1,105 +1,109 @@ - 4.0.0 - - minerva - org.geneontology - 0.6.1 - - minerva-cli - Minerva-CommandLineInterface - - - - - pl.project13.maven - git-commit-id-plugin - 4.0.3 - - - git-commit-id - - revision - - validate - - false - - false - - - - - - - org.apache.maven.plugins - maven-shade-plugin - - - package - - shade - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - bin/minerva-cli.jar - false - - - - - org.geneontology.minerva.cli.CommandLineInterface - ${git.commit.id} - https://github.com/geneontology/minerva/commit/${git.commit.id} - ${git.branch} - ${git.dirty} - - - - - - - - - + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + + minerva + org.geneontology + 0.6.1 + + minerva-cli + Minerva-CommandLineInterface - - - org.geneontology - minerva-converter - ${project.parent.version} - - - org.geneontology - minerva-server - ${project.parent.version} - - - org.geneontology - whelk_2.12 - 0.1.3 - - - org.obolibrary.robot - robot-core - - - org.apache.logging.log4j - log4j-core - - - org.apache.logging.log4j - log4j-1.2-api - - + + + + pl.project13.maven + git-commit-id-plugin + 4.0.3 + + + git-commit-id + + revision + + validate + + false + + false + + + + + + + org.apache.maven.plugins + maven-shade-plugin + + + package + + shade + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + bin/minerva-cli.jar + false + + + + + org.geneontology.minerva.cli.CommandLineInterface + ${git.commit.id} + + https://github.com/geneontology/minerva/commit/${git.commit.id} + + ${git.branch} + ${git.dirty} + + + + + + + + + + + + + org.geneontology + minerva-converter + ${project.parent.version} + + + org.geneontology + minerva-server + ${project.parent.version} + + + org.geneontology + whelk_2.12 + 0.1.3 + + + org.obolibrary.robot + robot-core + + + org.apache.logging.log4j + log4j-core + + + org.apache.logging.log4j + log4j-1.2-api + + diff --git a/minerva-cli/src/main/java/org/geneontology/minerva/cli/CommandLineInterface.java b/minerva-cli/src/main/java/org/geneontology/minerva/cli/CommandLineInterface.java index 34aab436..aeafabca 100644 --- a/minerva-cli/src/main/java/org/geneontology/minerva/cli/CommandLineInterface.java +++ b/minerva-cli/src/main/java/org/geneontology/minerva/cli/CommandLineInterface.java @@ -1,46 +1,20 @@ package org.geneontology.minerva.cli; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.Writer; -import java.net.MalformedURLException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.Paths; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionGroup; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; +import com.bigdata.rdf.sail.BigdataSail; +import com.bigdata.rdf.sail.BigdataSailRepository; +import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; +import com.google.common.base.Optional; +import com.google.common.collect.Sets; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.apache.commons.cli.*; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.jena.rdf.model.Model; import org.apache.log4j.Level; import org.apache.log4j.Logger; -import org.geneontology.minerva.BlazegraphMolecularModelManager; -import org.geneontology.minerva.BlazegraphOntologyManager; -import org.geneontology.minerva.CoreMolecularModelManager; -import org.geneontology.minerva.ModelContainer; -import org.geneontology.minerva.UndoAwareMolecularModelManager; +import org.geneontology.minerva.*; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.curie.CurieMappings; import org.geneontology.minerva.curie.DefaultCurieHandler; @@ -50,9 +24,6 @@ import org.geneontology.minerva.json.MolecularModelJsonRenderer; import org.geneontology.minerva.legacy.sparql.GPADData; import org.geneontology.minerva.legacy.sparql.GPADSPARQLExport; -import org.geneontology.minerva.lookup.GolrExternalLookupService; -import org.geneontology.minerva.lookup.ExternalLookupService; -import org.geneontology.minerva.lookup.ExternalLookupService.LookupEntry; import org.geneontology.minerva.model.ActivityUnit; import org.geneontology.minerva.model.GoCamModel; import org.geneontology.minerva.model.GoCamModelStats; @@ -60,15 +31,11 @@ import org.geneontology.minerva.server.inferences.InferenceProviderCreator; import org.geneontology.minerva.server.validation.MinervaShexValidator; import org.geneontology.minerva.util.BlazegraphMutationCounter; -import org.geneontology.minerva.validation.Enricher; import org.geneontology.minerva.validation.ShexValidationReport; -import org.geneontology.minerva.validation.ShexValidator; import org.geneontology.minerva.validation.ValidationResultSet; import org.geneontology.minerva.validation.Violation; import org.geneontology.minerva.validation.pipeline.BatchPipelineValidationReport; import org.geneontology.minerva.validation.pipeline.ErrorMessage; -import org.geneontology.whelk.owlapi.WhelkOWLReasoner; -import org.geneontology.whelk.owlapi.WhelkOWLReasonerFactory; import org.obolibrary.robot.CatalogXmlIRIMapper; import org.openrdf.query.MalformedQueryException; import org.openrdf.query.QueryLanguage; @@ -76,1336 +43,1320 @@ import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; -import org.semanticweb.elk.owlapi.ElkReasonerFactory; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.formats.TurtleDocumentFormat; import org.semanticweb.owlapi.io.IRIDocumentSource; -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDocumentFormat; -import org.semanticweb.owlapi.model.OWLImportsDeclaration; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyAlreadyExistsException; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.reasoner.InconsistentOntologyException; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory; import org.semanticweb.owlapi.search.EntitySearcher; import org.semanticweb.owlapi.util.InferredOntologyGenerator; - -import com.bigdata.rdf.sail.BigdataSail; -import com.bigdata.rdf.sail.BigdataSailRepository; -import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; -import com.google.common.base.Optional; -import com.google.common.collect.Sets; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -import fr.inria.lille.shexjava.schema.ShexSchema; -import fr.inria.lille.shexjava.schema.parsing.GenParser; -import owltools.cli.Opts; import owltools.io.ParserWrapper; +import java.io.*; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.Paths; +import java.util.*; + public class CommandLineInterface { - private static final Logger LOGGER = Logger.getLogger(CommandLineInterface.class); - - public static void main(String[] args) { - - reportSystemParams(); - Options main_options = new Options(); - OptionGroup methods = new OptionGroup(); - methods.setRequired(true); - Option dump = Option.builder() - .longOpt("dump-owl-models") - .desc("export OWL GO-CAM models from journal") - .hasArg(false) - .build(); - methods.addOption(dump); - - Option merge_ontologies = Option.builder() - .longOpt("merge-ontologies") - .desc("Merge owl ontologies") - .hasArg(false) - .build(); - methods.addOption(merge_ontologies); - Option import_owl = Option.builder() - .longOpt("import-owl-models") - .desc("import OWL GO-CAM models into journal") - .hasArg(false) - .build(); - methods.addOption(import_owl); - Option import_tbox_ontologies = Option.builder() - .longOpt("import-tbox-ontologies") - .desc("import OWL tbox ontologies into journal") - .hasArg(false) - .build(); - methods.addOption(import_tbox_ontologies); - - Option add_taxon_metadata = Option.builder() - .longOpt("add-taxon-metadata") - .desc("add taxon associated with genes in each model as an annotation on the model") - .hasArg(false) - .build(); - methods.addOption(add_taxon_metadata); - - Option clean_gocams = Option.builder() - .longOpt("clean-gocams") - .desc("remove import statements, add property declarations, remove json-model annotation") - .hasArg(false) - .build(); - methods.addOption(clean_gocams); - - Option sparql = Option.builder() - .longOpt("sparql-update") - .desc("update the blazegraph journal with the given sparql statement") - .hasArg(false) - .build(); - methods.addOption(sparql); - Option json = Option.builder() - .longOpt("owl-lego-to-json") - .desc("Given a GO-CAM OWL file, make its minerva json represention") - .hasArg(false) - .build(); - methods.addOption(json); - Option gpad = Option.builder() - .longOpt("lego-to-gpad-sparql") - .desc("Given a GO-CAM journal, export GPAD representation for all the go-cams") - .hasArg(false) - .build(); - methods.addOption(gpad); - Option version = Option.builder() - .longOpt("version") - .desc("Print the version of the minerva stack used here. Extracts this from JAR file.") - .hasArg(false) - .build(); - methods.addOption(version); - Option validate = Option.builder() - .longOpt("validate-go-cams") - .desc("Check a collection of go-cam files or a journal for valid semantics (owl) and structure (shex)") - .hasArg(false) - .build(); - methods.addOption(validate); - - main_options.addOptionGroup(methods); - - CommandLineParser parser = new DefaultParser(); - try { - CommandLine cmd = parser.parse( main_options, args, true); - - if(cmd.hasOption("add-taxon-metadata")) { - Options add_taxon_options = new Options(); - add_taxon_options.addOption(add_taxon_metadata); - add_taxon_options.addOption("j", "journal", true, "This is the go-cam journal that will be updated with taxon annotations."); - add_taxon_options.addOption("ontojournal", "ontojournal", true, "Specify a blazegraph journal file containing the merged, pre-reasoned tbox aka go-lego.owl"); - cmd = parser.parse( add_taxon_options, args, false); - String journalFilePath = cmd.getOptionValue("j"); //--journal - String ontojournal = cmd.getOptionValue("ontojournal"); //--folder - addTaxonMetaData(journalFilePath, ontojournal); - } - - if(cmd.hasOption("clean-gocams")) { - Options clean_options = new Options(); - clean_options.addOption(clean_gocams); - clean_options.addOption("i", "input", true, "This is the directory of gocam files to clean."); - clean_options.addOption("o", "output", true, "This is the directory of cleaned gocam files that are produced."); - cmd = parser.parse(clean_options, args, false); - cleanGoCams(cmd.getOptionValue("i"), cmd.getOptionValue("o")); - } - - if(cmd.hasOption("import-tbox-ontologies")) { - Options import_tbox_options = new Options(); - import_tbox_options.addOption(import_tbox_ontologies); - import_tbox_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); - import_tbox_options.addOption("f", "file", true, "Sets the input file containing the ontology to load"); - import_tbox_options.addOption("r", "reset", false, "If present, will clear out the journal, otherwise adds to it"); - cmd = parser.parse( import_tbox_options, args, false); - String journalFilePath = cmd.getOptionValue("j"); //--journal - String inputFile = cmd.getOptionValue("f"); //--folder - importOWLOntologyIntoJournal(journalFilePath, inputFile, cmd.hasOption("r")); - } - if(cmd.hasOption("merge-ontologies")) { - Options merge_options = new Options(); - merge_options.addOption(merge_ontologies); - merge_options.addOption("i", "input", true, "The input folder containing ontologies to merge"); - merge_options.addOption("o", "output", true, "The file to write the ontology to"); - merge_options.addOption("u", "iri", true, "The base iri for the merged ontology"); - merge_options.addOption("r", "reason", false, "Add inferences to the merged ontology"); - cmd = parser.parse(merge_options, args, false); - buildMergedOwlOntology(cmd.getOptionValue("i"), cmd.getOptionValue("o"), cmd.getOptionValue("u"), cmd.hasOption("r")); - } - - if(cmd.hasOption("dump-owl-models")) { - Options dump_options = new Options(); - dump_options.addOption(dump); - dump_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); - dump_options.addOption("f", "folder", true, "Sets the output folder the GO-CAM model files"); - dump_options.addOption("p", "model-id-prefix", true, "prefix for GO-CAM model ids"); - cmd = parser.parse( dump_options, args, false); - String journalFilePath = cmd.getOptionValue("j"); //--journal - String outputFolder = cmd.getOptionValue("f"); //--folder - String modelIdPrefix = cmd.getOptionValue("p"); //--prefix - modelsToOWL(journalFilePath, outputFolder, modelIdPrefix); - }else if(cmd.hasOption("import-owl-models")) { - Options import_options = new Options(); - import_options.addOption(import_owl); - import_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); - import_options.addOption("f", "folder", true, "Sets the input folder the GO-CAM model files"); - cmd = parser.parse( import_options, args, false); - String journalFilePath = cmd.getOptionValue("j"); //--journal - String outputFolder = cmd.getOptionValue("f"); //--folder - importOWLModels(journalFilePath, outputFolder); - }else if(cmd.hasOption("sparql-update")) { - Options sparql_options = new Options(); - sparql_options.addOption(sparql); - sparql_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); - sparql_options.addOption("f", "file", true, "Sets the file containing a SPARQL update"); - cmd = parser.parse( sparql_options, args, false); - String journalFilePath = cmd.getOptionValue("j"); //--journal - String file = cmd.getOptionValue("f"); - sparqlUpdate(journalFilePath, file); - }else if(cmd.hasOption("owl-lego-to-json")) { - Options json_options = new Options(); - json_options.addOption(json); - json_options.addOption("i", "OWLFile", true, "Input GO-CAM OWL file"); - json_options.addOption("o", "JSONFILE", true, "Output JSON file"); - OptionGroup format = new OptionGroup(); - Option pretty = Option.builder() - .longOpt("pretty-json") - .desc("pretty json format") - .hasArg(false) - .build(); - format.addOption(pretty); - Option compact = Option.builder() - .longOpt("compact-json") - .desc("compact json format") - .hasArg(false) - .build(); - format.addOption(compact); - json_options.addOptionGroup(format); - cmd = parser.parse( json_options, args, false); - String input = cmd.getOptionValue("i"); - String output = cmd.getOptionValue("o"); - boolean usePretty = true; - if(cmd.hasOption("compact-json")) { - usePretty = false; - } - owl2LegoJson(input, output, usePretty); - }else if(cmd.hasOption("lego-to-gpad-sparql")) { - Options gpad_options = new Options(); - gpad_options.addOption(gpad); - gpad_options.addOption("i", "input", true, "Sets the Blazegraph journal file for the database"); - gpad_options.addOption("o", "gpad-output", true, "Sets the output location for the GPAD"); - gpad_options.addOption("p", "model-id-prefix", true, "prefix for GO-CAM model ids"); - gpad_options.addOption("c", "model-id-curie", true, "prefix for GO-CAM curies"); - gpad_options.addOption("ont", "ontology", true, "IRI of tbox ontology for classification - usually default go-lego.owl"); - gpad_options.addOption("cat", "catalog", true, "Catalog file for tbox ontology. " + - "Use this to specify local copies of the ontology and or its imports to " + - "speed and control the process. If not used, will download the tbox and all its imports."); - gpad_options.addOption("ontojournal", "ontojournal", true, "Specify a blazegraph journal file containing the merged, pre-reasoned tbox aka go-lego.owl"); - cmd = parser.parse(gpad_options, args, false); - String inputDB = cmd.getOptionValue("input"); - String gpadOutputFolder = cmd.getOptionValue("gpad-output"); - String modelIdPrefix = cmd.getOptionValue("model-id-prefix"); - String modelIdcurie = cmd.getOptionValue("model-id-curie"); - String ontologyIRI = cmd.getOptionValue("ontology"); - String catalog = cmd.getOptionValue("catalog"); - String go_lego_journal_file = null; - if(cmd.hasOption("ontojournal")) { - go_lego_journal_file = cmd.getOptionValue("ontojournal"); - } - if(go_lego_journal_file==null) { - System.err.println("Missing -- ontojournal . Need to specify location for blazegraph journal file containing the merged go-lego tbox (neo, GO-plus, etc..). If a journal does not exist at that location, the tbox ontology will be used to initialize one."); - System.exit(-1); - } - legoToAnnotationsSPARQL(modelIdPrefix, modelIdcurie, inputDB, gpadOutputFolder, ontologyIRI, catalog, go_lego_journal_file); - }else if(cmd.hasOption("version")) { - printVersion(); - }else if(cmd.hasOption("validate-go-cams")) { - Options validate_options = new Options(); - validate_options.addOption(validate); - validate_options.addOption("i", "input", true, "Either a blazegraph journal or a folder with go-cams in it"); - validate_options.addOption("shex", "shex", false, "If present, will execute shex validation"); - validate_options.addOption("owl", "owl", false, "If present, will execute shex validation"); - validate_options.addOption("r", "report-folder", true, "Folder where output files will appear"); - validate_options.addOption("p", "model-id-prefix", true, "prefix for GO-CAM model ids"); - validate_options.addOption("cu", "model-id-curie", true, "prefix for GO-CAM curies"); - validate_options.addOption("ont", "ontology", true, "IRI of tbox ontology - usually default go-lego.owl"); - validate_options.addOption("c", "catalog", true, "Catalog file for tbox ontology. " - + "Use this to specify local copies of the ontology and or its imports to " - + "speed and control the process. If not used, will download the tbox and all its imports."); - validate_options.addOption("shouldfail", "shouldfail", false, "When used in travis mode for tests, shouldfail " - + "parameter will allow a successful run on a folder that only contains incorrect models."); - validate_options.addOption("t", "travis", false, "If travis, then the program will stop upon a failed " - + "validation and report an error. Otherwise it will continue to test all the models."); - validate_options.addOption("m", "shapemap", true, "Specify a shapemap file. Otherwise will download from go_shapes repo."); - validate_options.addOption("s", "shexpath", true, "Specify a shex schema file. Otherwise will download from go_shapes repo."); - validate_options.addOption("ontojournal", "ontojournal", true, "Specify a blazegraph journal file containing the merged, pre-reasoned tbox aka go-lego.owl"); - validate_options.addOption("reasoner_report", "reasoner_report", false, "Add a report with reasoning results to the output of the validation. "); - - - cmd = parser.parse(validate_options, args, false); - String input = cmd.getOptionValue("input"); - String outputFolder = cmd.getOptionValue("report-folder"); - String shexpath = cmd.getOptionValue("s"); - String shapemappath = cmd.getOptionValue("shapemap"); - - String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; - if(cmd.hasOption("ontology")) { - ontologyIRI = cmd.getOptionValue("ontology"); - } - String catalog = cmd.getOptionValue("catalog"); - String modelIdPrefix = "http://model.geneontology.org/"; - if(cmd.hasOption("model-id-prefix")) { - modelIdPrefix = cmd.getOptionValue("model-id-prefix"); - } - String modelIdcurie = "gomodel"; - if(cmd.hasOption("model-id-curie")) { - modelIdcurie = cmd.getOptionValue("model-id-curie"); - } - boolean travisMode = false; - if(cmd.hasOption("travis")) { - travisMode = true; - } - boolean shouldFail = false; - if(cmd.hasOption("shouldfail")) { - shouldFail = true; - } - boolean checkShex = false; - if(cmd.hasOption("shex")) { - checkShex = true; - } - String go_lego_journal_file = null; - if(cmd.hasOption("ontojournal")) { - go_lego_journal_file = cmd.getOptionValue("ontojournal"); - } - if(go_lego_journal_file==null) { - System.err.println("Missing -- ontojournal . Need to specify blazegraph journal file containing the merged go-lego tbox (neo, GO-plus, etc..)"); - System.exit(-1); - } - boolean run_reasoner_report = false; - if(cmd.hasOption("reasoner_report")) { - run_reasoner_report = true; - } - validateGoCams(input, outputFolder, ontologyIRI, catalog, modelIdPrefix, modelIdcurie, shexpath, shapemappath, travisMode, shouldFail, checkShex, go_lego_journal_file, run_reasoner_report); - } - }catch( ParseException exp ) { - System.out.println( "Parameter parse exception. Note that the first parameter must be one of: " - + "[--validate-go-cams, --dump-owl-models, --import-owl-models, --sparql-update, --owl-lego-to-json, --lego-to-gpad-sparql, --version, --update-gene-product-types]" - + "\nSubsequent parameters are specific to each top level command. " - + "\nError message: " + exp.getMessage() ); - System.exit(-1); - } catch (Exception e) { - e.printStackTrace(); - //explicitly exiting to inform travis of failure. - System.exit(-1); - } - } - - /** - * Given a blazegraph journal with go-cams in it, write them all out as OWL files. - * cli --dump-owl-models - * @param journalFilePath - * @param outputFolder - * @param modelIdPrefix - * @throws Exception - */ - public static void modelsToOWL(String journalFilePath, String outputFolder, String modelIdPrefix) throws Exception { - if(modelIdPrefix==null) { - modelIdPrefix = "http://model.geneontology.org/"; - } - - // minimal inputs - if (journalFilePath == null) { - System.err.println("No journal file was configured."); - System.exit(-1); - return; - } - if (outputFolder == null) { - System.err.println("No output folder was configured."); - System.exit(-1); - return; - } - - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - CurieHandler curieHandler = new MappedCurieHandler(); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, journalFilePath, outputFolder, null, false); - m3.dumpAllStoredModels(); - m3.dispose(); - } - - /** - * Load the go-cam files in the input folder into the journal - * cli import-owl-models - * @param journalFilePath - * @param inputFolder - * @throws Exception - */ - public static void importOWLModels(String journalFilePath, String inputFolder) throws Exception { - // minimal inputs - if (journalFilePath == null) { - System.err.println("No journal file was configured."); - System.exit(-1); - return; - } - if (inputFolder == null) { - System.err.println("No input folder was configured."); - System.exit(-1); - return; - } - int total_files = 0; - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - String modelIdPrefix = "http://model.geneontology.org/"; // this will not be used for anything - CurieHandler curieHandler = new MappedCurieHandler(); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, journalFilePath, null, null, false); - //in case of update rather than whole new journal - Set stored = new HashSet(m3.getStoredModelIds()); - LOGGER.info("loading gocams from "+inputFolder); - //for (File file : FileUtils.listFiles(new File(inputFolder), null, true)) { - File i = new File(inputFolder); - if(i.exists()) { - if(i.isDirectory()) { - total_files = i.listFiles().length; - FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file-> { - if(file.getName().endsWith("ttl")){ - java.util.Optional irio; - try { - irio = m3.scanForOntologyIRI(file); - IRI iri = null; - if(irio.isPresent()) { - iri = IRI.create(irio.get()); - } - //is it in there already? - if(stored.contains(iri)) { - LOGGER.error("Attempted to load gocam ttl file into database but gocam with that iri already exists, skipping "+ file+" "+iri); - }else { - stored.add(iri); - m3.importModelToDatabase(file, true); - } - } catch (RDFParseException | RDFHandlerException | IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RepositoryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - }else { - LOGGER.info("Ignored for not ending with .ttl" + file); - } - }); - }} - m3.dispose(); - LOGGER.info("done loading gocams, loaded: "+stored.size()+" out of: "+total_files+" files"); - } - - /** - * - * @param journalFilePath - * @param inputFolder - * @throws Exception - */ - public static void buildMergedOwlOntology(String inputFolder, String outputfile, String base_iri, boolean addInferences) throws Exception { - // minimal inputs - if (outputfile == null) { - System.err.println("No output file was configured."); - System.exit(-1); - return; - } - if (inputFolder == null) { - System.err.println("No input folder was configured."); - System.exit(-1); - return; - } - if (base_iri == null) { - System.err.println("No base iri was configured."); - System.exit(-1); - return; - } - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - OWLDataFactory df = ontman.getOWLDataFactory(); - OWLOntology merged = ontman.createOntology(IRI.create(base_iri)); - for (File file : FileUtils.listFiles(new File(inputFolder), null, true)) { - LOGGER.info("Loading " + file); - if(file.getName().endsWith("ttl")||file.getName().endsWith("owl")) { - try { - OWLOntology ont = ontman.loadOntologyFromOntologyDocument(file); - ontman.addAxioms(merged, ont.getAxioms()); - }catch(OWLOntologyAlreadyExistsException e) { - LOGGER.error("error loading already loaded ontology: "+file); - } - } else { - LOGGER.info("Ignored for not ending with .ttl or .owl " + file); - } - } - if(addInferences) { - LOGGER.info("Running reasoner"); - //OWLReasonerFactory reasonerFactory = new WhelkOWLReasonerFactory(); - //WhelkOWLReasoner reasoner = (WhelkOWLReasoner)reasonerFactory.createReasoner(merged); - OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); - OWLReasoner reasoner = reasonerFactory.createReasoner(merged); - InferredOntologyGenerator gen = new InferredOntologyGenerator(reasoner); - gen.fillOntology(df, merged); - } - try { - ontman.saveOntology(merged, new FileOutputStream(new File(outputfile))); - } catch (OWLOntologyStorageException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (FileNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - /** - * Load the go-cam files in the input folder into the journal - * cli import-owl-models - * @param journalFilePath - * @param inputFolder - * @throws Exception - */ - public static void importOWLOntologyIntoJournal(String journalFilePath, String inputFile, boolean reset) throws Exception { - // minimal inputs - if (journalFilePath == null) { - System.err.println("No journal file was configured."); - System.exit(-1); - return; - } - if (inputFile == null) { - System.err.println("No input file was configured."); - System.exit(-1); - return; - } - - BlazegraphOntologyManager man = new BlazegraphOntologyManager(journalFilePath, false); - String iri_for_ontology_graph = "http://geneontology.org/go-lego-graph"; - man.loadRepositoryFromOWLFile(new File(inputFile), iri_for_ontology_graph, reset); - } - - /** - * Updates the journal with the provided update sparql statement. - * cli parameter --sparql-update - * @param journalFilePath - * @param updateFile - * @throws OWLOntologyCreationException - * @throws IOException - * @throws RepositoryException - * @throws MalformedQueryException - * @throws UpdateExecutionException - */ - public static void sparqlUpdate(String journalFilePath, String updateFile) throws OWLOntologyCreationException, IOException, RepositoryException, MalformedQueryException, UpdateExecutionException { - // minimal inputs - if (journalFilePath == null) { - System.err.println("No journal file was configured."); - System.exit(-1); - return; - } - if (updateFile == null) { - System.err.println("No update file was configured."); - System.exit(-1); - return; - } - - String update = FileUtils.readFileToString(new File(updateFile), StandardCharsets.UTF_8); - Properties properties = new Properties(); - properties.load(CommandLineInterface.class.getResourceAsStream("/org/geneontology/minerva/blazegraph.properties")); - properties.setProperty(com.bigdata.journal.Options.FILE, journalFilePath); - - BigdataSail sail = new BigdataSail(properties); - BigdataSailRepository repository = new BigdataSailRepository(sail); - repository.initialize(); - BigdataSailRepositoryConnection conn = repository.getUnisolatedConnection(); - BlazegraphMutationCounter counter = new BlazegraphMutationCounter(); - conn.addChangeLog(counter); - conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); - int changes = counter.mutationCount(); - conn.removeChangeLog(counter); - System.out.println("\nApplied " + changes + " changes"); - conn.close(); - } - - /** - * Convert a GO-CAM owl file to a minerva json structure - * --owl-lego-to-json - * @param input - * @param output - * @param usePretty - * @throws Exception - */ - public static void owl2LegoJson(String input, String output, boolean usePretty) throws Exception { - - // minimal inputs - if (input == null) { - System.err.println("No input model was configured."); - System.exit(-1); - return; - } - if (output == null) { - System.err.println("No output file was configured."); - System.exit(-1); - return; - } - - // configuration - CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - GsonBuilder gsonBuilder = new GsonBuilder(); - if (usePretty) { - gsonBuilder.setPrettyPrinting(); - } - Gson gson = gsonBuilder.create(); - - // process each model - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Loading model from file: "+input); - } - OWLOntology model = null; - final JsonModel jsonModel; - ParserWrapper pw = new ParserWrapper(); - try { - - // load model - model = pw.parseOWL(IRI.create(new File(input).getCanonicalFile())); - InferenceProvider inferenceProvider = null; // TODO decide if we need reasoning - String modelId = null; - Optional ontologyIRI = model.getOntologyID().getOntologyIRI(); - if (ontologyIRI.isPresent()) { - modelId = curieHandler.getCuri(ontologyIRI.get()); - } - - // render json - final MolecularModelJsonRenderer renderer = new MolecularModelJsonRenderer(modelId, model, inferenceProvider, curieHandler); - jsonModel = renderer.renderModel(); - } - finally { - if (model != null) { - pw.getManager().removeOntology(model); - model = null; - } - } - - // save as json string - final String json = gson.toJson(jsonModel); - final File outputFile = new File(output).getCanonicalFile(); - try (OutputStream outputStream = new FileOutputStream(outputFile)) { - if (LOGGER.isInfoEnabled()) { - LOGGER.info("Saving json to file: "+outputFile); - } - IOUtils.write(json, outputStream); - } - } - - /** - * Output GPAD files via inference+SPARQL - * cli --lego-to-gpad-sparql - * @param modelIdPrefix - * @param modelIdcurie - * @param inputDB - * @param gpadOutputFolder - * @param ontologyIRI - * @throws Exception - */ - public static void legoToAnnotationsSPARQL(String modelIdPrefix, String modelIdcurie, String inputDB, String gpadOutputFolder, String ontologyIRI, String catalog, String go_lego_journal_file) throws Exception { - if(modelIdPrefix==null) { - modelIdPrefix = "http://model.geneontology.org/"; - } - if(modelIdcurie==null) { - modelIdcurie = "gomodel"; - } - if(inputDB==null) { - inputDB = "blazegraph.jnl"; - } - if(gpadOutputFolder==null) { - gpadOutputFolder = null; - } - if(ontologyIRI==null) { - ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; - } - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - if(catalog!=null) { - LOGGER.info("using catalog: "+catalog); - ontman.setIRIMappers(Sets.newHashSet(new owltools.io.CatalogXmlIRIMapper(catalog))); - }else { - LOGGER.info("no catalog, resolving all ontology uris directly"); - } - - OWLOntology ontology = ontman.loadOntology(IRI.create(ontologyIRI)); - CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - CurieHandler curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - boolean loadTboxIntoOntJournal = (!(new File(go_lego_journal_file)).exists()); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(ontology, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, false); - if (loadTboxIntoOntJournal) { - m3.getGolego_repo().loadRepositoryFromOntology(ontology, "http://example.org/", true); - } - final String immutableModelIdPrefix = modelIdPrefix; - final String immutableGpadOutputFolder = gpadOutputFolder; - m3.getAvailableModelIds().stream().parallel().forEach(modelIRI -> { - try { - //TODO investigate whether changing to a neo-lite model has an impact on this - may need to make use of ontology journal - String gpad = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getGolego_repo().regulatorsToRegulated).exportGPAD(m3.createInferredModel(modelIRI), modelIRI); - String fileName = StringUtils.replaceOnce(modelIRI.toString(), immutableModelIdPrefix, "") + ".gpad"; - Writer writer = new OutputStreamWriter(new FileOutputStream(Paths.get(immutableGpadOutputFolder, fileName).toFile()), StandardCharsets.UTF_8); - writer.write(gpad); - writer.close(); - } catch (InconsistentOntologyException e) { - LOGGER.error("Inconsistent ontology: " + modelIRI); - } catch (IOException e) { - LOGGER.error("Couldn't export GPAD for: " + modelIRI, e); - } - }); - m3.dispose(); - } - - - /** - * --validate-go-cams - * -i /GitHub/GO_Shapes/test_ttl/go_cams/should_pass/ - * -c ./catalog-no-import.xml - * @param input - * @param basicOutputFile - * @param explanationOutputFile - * @param ontologyIRI - * @param catalog - * @param modelIdPrefix - * @param modelIdcurie - * @param shexpath - * @param shapemappath - * @param travisMode - * @param shouldPass - * @throws IOException - * @throws OWLOntologyCreationException - */ - public static void validateGoCams(String input, String outputFolder, - String ontologyIRI, String catalog, String modelIdPrefix, String modelIdcurie, - String shexpath, String shapemappath, boolean travisMode, boolean shouldFail, boolean checkShex, - String go_lego_journal_file, boolean run_reasoner_report) throws OWLOntologyCreationException, IOException { - LOGGER.setLevel(Level.INFO); - String inputDB = "blazegraph.jnl"; - String shexFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shex"; - String goshapemapFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shapeMap"; - CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - CurieHandler curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - Map modelid_filename = new HashMap(); - - if(outputFolder==null) { - LOGGER.error("please specify an output folder with -r "); - System.exit(-1); - }else if(!outputFolder.endsWith("/")) { - outputFolder+="/"; - } - - if(input==null) { - LOGGER.error("please provide an input file - either a directory of ttl files or a blazegraph journal"); - System.exit(-1); - } - - LOGGER.info("loading tbox ontology: "+ontologyIRI); - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - if(catalog!=null) { - LOGGER.info("using catalog: "+catalog); - try { - ontman.setIRIMappers(Sets.newHashSet(new CatalogXmlIRIMapper(catalog))); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - }else { - LOGGER.info("no catalog, resolving all ontology uris directly"); - } - - OWLOntology tbox_ontology = null; - try { - tbox_ontology = ontman.loadOntology(IRI.create(ontologyIRI)); - LOGGER.info("tbox ontology axioms loaded: "+tbox_ontology.getAxiomCount()); - } catch (OWLOntologyCreationException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - //either load directly from existing journal - if(input.endsWith(".jnl")) { - inputDB = input; - }else { - //or make sure that the journal file provided is cleared out and ready - File i = new File(input); - if(i.exists()) { - //remove anything that existed earlier - File bgdb = new File(inputDB); - if(bgdb.exists()) { - bgdb.delete(); - } - } - } - //make the manager - LOGGER.info("Setting up model manager and initializing rules for Arachne reasoner"); - UndoAwareMolecularModelManager m3 = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); - //if provided a directory as input, load them ttl files into the manager - File i = new File(input); - if(i.exists()&&!input.endsWith(".jnl")) { - if(i.isDirectory()) { - LOGGER.info("Loading models from " + i.getAbsolutePath()); - Set model_iris = new HashSet(); - FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file-> { - if(file.getName().endsWith(".ttl")||file.getName().endsWith("owl")) { - try { - String modeluri = m3.importModelToDatabase(file, true); - if(modeluri==null) { - LOGGER.error("Null model IRI: "+modeluri+" file: "+file); - } - else if(!model_iris.add(modeluri)) { - LOGGER.error("Multiple models with same IRI: "+modeluri+" file: "+file+" file: "+modelid_filename.get(modeluri)); - }else { - modelid_filename.put(modeluri, file.getName()); - } - } catch (OWLOntologyCreationException | RepositoryException | RDFParseException - | RDFHandlerException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - }); - }else {//just load the one provided - LOGGER.info("Loading " + i); - try { - m3.importModelToDatabase(i, true); - } catch (OWLOntologyCreationException | RepositoryException | RDFParseException - | RDFHandlerException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - LOGGER.info("loaded files into blazegraph journal: "+input); - } - //models ready - //now set up shex validator - if(shexpath==null) { - //fall back on downloading from shapes repo - URL shex_schema_url; - try { - shex_schema_url = new URL(shexFileUrl); - shexpath = "./go-cam-schema.shex"; - File shex_schema_file = new File(shexpath); - org.apache.commons.io.FileUtils.copyURLToFile(shex_schema_url, shex_schema_file); - System.err.println("-s .No shex schema provided, using: "+shexFileUrl); - } catch (MalformedURLException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - if(shapemappath==null) { - URL shex_map_url; - try { - shex_map_url = new URL(goshapemapFileUrl); - shapemappath = "./go-cam-shapes.shapeMap"; - File shex_map_file = new File(shapemappath); - org.apache.commons.io.FileUtils.copyURLToFile(shex_map_url, shex_map_file); - System.err.println("-m .No shape map file provided, using: "+goshapemapFileUrl); - } catch (MalformedURLException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - LOGGER.info("making shex validator: "+shexpath+" "+shapemappath+" "+curieHandler+" "); - MinervaShexValidator shex = null; - try { - shex = new MinervaShexValidator(shexpath, shapemappath, curieHandler, m3.getGolego_repo()); - } catch (Exception e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - - if(checkShex) { - shex.setActive(true); - }else { - shex.setActive(false); - } - - //shex validator is ready, now build the inference provider (which provides access to the shex validator and provides inferences useful for shex) - String reasonerOpt = "arachne"; - LOGGER.info("Building OWL inference provider: "+reasonerOpt); - InferenceProviderCreator ipc = StartUpTool.createInferenceProviderCreator(reasonerOpt, m3, shex); - LOGGER.info("Validating models: "+reasonerOpt); - - //Set up all the report files. - String basic_output_file = outputFolder+"main_report.txt"; - String explanations_file = outputFolder+"explanations.txt"; - String activity_output_file = outputFolder+"activity_report.txt"; - if(outputFolder!=null) { - try { - //valid or not - FileWriter basic_shex_output = new FileWriter(basic_output_file, false); - basic_shex_output.write("filename\tmodel_title\tmodel_url\tmodelstate\tcontributor\tprovider\tdate\tOWL_consistent\tshex_valid\tshex_meta_problem\tshex_data_problem\tvalidation_time_milliseconds\taxioms\tn_rows_gpad\t"); - basic_shex_output.write(GoCamModelStats.statsHeader()+"\n"); - basic_shex_output.close(); - //tab delimited explanations for failures - FileWriter explanations = new FileWriter(explanations_file, false); - explanations.write("filename\tmodel_title\tmodel_iri\tnode\tNode_types\tproperty\tIntended_range_shapes\tobject\tObject_types\tObject_shapes\n"); - explanations.close(); - //tab delimited summary of properties of activity units - FileWriter activity_output = new FileWriter(activity_output_file, false); - activity_output.write("filename\tmodel_title\tmodel_url\tmodelstate\tcontributor\tprovider\tdate\tactivity_iri\tactivity_xref\tactivity_label\tcomplete\tinputs\toutputs\tenablers\tlocations\tcausal upstream\tcausal downstream\tpart of n BP\tMF\tBP\n"); - activity_output.close(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - //this will generate the json file used for the go rules report for the pipeline - BatchPipelineValidationReport pipe_report = null; - Set owl_errors = new HashSet(); - Set shex_errors = new HashSet(); - pipe_report = new BatchPipelineValidationReport(); - try { - pipe_report.setNumber_of_models(m3.getAvailableModelIds().size()); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - int bad_models = 0; int good_models = 0; - final boolean shex_output = checkShex; - - //only used if OWL reasoning report is requested - ReasonerReport reasoner_report = null; - if(run_reasoner_report) { - reasoner_report = initReasonerReport(outputFolder); - } - //now process each gocam - try { - for(IRI modelIRI : m3.getAvailableModelIds()) { - long start = System.currentTimeMillis(); - String filename = modelid_filename.get(modelIRI.toString()); - boolean isConsistent = true; //OWL - boolean isConformant = true; //shex - if(filename !=null) { - LOGGER.info("processing "+filename+"\t"+modelIRI); - }else { - LOGGER.info("processing \t"+modelIRI); - } - //this is where everything actually happens - ModelContainer mc = m3.getModel(modelIRI); - OWLOntology gocam = mc.getAboxOntology(); - try { - //if a model does not have an import statement that links in an ontology that defines all of its classes and object properties - //or if the model does not define the classes and object properties itself, parsing problems will prevail - //this step makes sure that does not happen - gocam = CoreMolecularModelManager.fixBrokenObjectPropertiesAndAxioms(gocam); - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - LOGGER.info("preparing model stats..."); - //The GoCamModel code is used to capture model-level statistics such as 'how many causal relations are there?' - //This might be an area for a speed improvement if needed - GoCamModel gcm = new GoCamModel(gocam, m3); - String title = "title"; - if(gcm.getTitle()!=null) { - title = makeColSafe(gcm.getTitle()); - }else { - LOGGER.error("no title for "+filename); - } - //this is to make clickable links in reports - String link = modelIRI.toString().replace("http://model.geneontology.org/", "http://noctua.geneontology.org/editor/graph/gomodel:"); - if(modelIRI.toString().contains("R-HSA")) { - link = link.replace("noctua.geneontology", "noctua-dev.berkeleybop"); - } - String modelstate = makeColSafe(gcm.getModelstate()); - String contributor = makeColSafe(gcm.getContributors().toString()); - String date = makeColSafe(gcm.getDate()); - String provider = makeColSafe(gcm.getProvided_by().toString()); - pipe_report.setTaxa(gcm.getIn_taxon()); - LOGGER.info("model stats done for title: "+title); - int axioms = gocam.getAxiomCount(); - //add activity level statistics as a default - FileWriter activity_output = new FileWriter(activity_output_file, true); - for(ActivityUnit unit : gcm.getActivities()){ - activity_output.write(filename+"\t"+title+"\t"+link+"\t"+modelstate+"\t"+contributor+"\t"+provider+"\t"+date+"\t"+unit.getIndividual().getIRI().toString()+"\t"+unit.getXref()+"\t"+unit.getLabel()+"\t"); - activity_output.write(unit.isComplete()+"\t"+unit.getInputs().size()+"\t"+unit.getOutputs().size()+"\t"+unit.getEnablers().size()+"\t"+unit.getLocations().size()+ - "\t"+unit.getCausal_in().size()+"\t"+unit.getCausal_out().size()+"\t"+unit.getContaining_processes().size()+"\t"+unit.stringForClasses(unit.getDirect_types())+"\t"+unit.getURIsForConnectedBPs()+"\n"); - } - activity_output.close(); - - InferenceProvider ip = ipc.create(mc); - isConsistent = ip.isConsistent(); - //TODO re-use reasoner object from ip - //TODO this is another area that could be touched/removed for speed improvement - int n_rows_gpad = 0; - if(isConsistent) { - try { - Set gpad = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getGolego_repo().regulatorsToRegulated).getGPAD(m3.createInferredModel(modelIRI), modelIRI); - if(gpad!=null) { - n_rows_gpad = gpad.size(); - } - }catch(InconsistentOntologyException e) { - LOGGER.error("inconsistent ontology, can't make gpad"); - } - } - long done = System.currentTimeMillis(); - long milliseconds = (done-start); - //for rules report in pipeline - if(!ip.isConsistent()) { - String level = "ERROR"; - String model_id = curieHandler.getCuri(modelIRI); - String message = BatchPipelineValidationReport.getOwlMessage(); - int rule = BatchPipelineValidationReport.getOwlRule(); - ErrorMessage owl = new ErrorMessage(level, model_id, gcm.getIn_taxon(), message, rule); - owl_errors.add(owl); - } - if(!isConsistent) { - FileWriter explanations = new FileWriter(explanations_file, true); - explanations.write(filename+"\t"+title+"\t"+modelIRI+"\tOWL fail explanation: "+ip.getValidation_results().getOwlvalidation().getAsText()+"\n"); - explanations.close(); - } - //travis mode causes the system to exit when an invalid model is detected (unless shouldFail is on) - if(travisMode&&!isConsistent) { - if(!shouldFail) { - LOGGER.error(filename+"\t"+title+"\t"+modelIRI+"\tOWL:is inconsistent, quitting"); - System.exit(-1); - } - } - //basic is just one row per model - did it validate or not - FileWriter basic= new FileWriter(basic_output_file, true); - if(!shex_output) { - if(ip.isConsistent()) { - good_models++; - }else { - bad_models++; - } - }else{ - ValidationResultSet validations = ip.getValidation_results(); - isConformant = validations.allConformant(); - if(isConformant) { - good_models++; - }else { - bad_models++; - } - if(!validations.getShexvalidation().isConformant()) { - String level = "WARNING"; - String model_id = curieHandler.getCuri(modelIRI); - String message = BatchPipelineValidationReport.getShexMessage(); - int rule = BatchPipelineValidationReport.getShexRule(); - ErrorMessage shex_message = new ErrorMessage(level, model_id, gcm.getIn_taxon(), message, rule); - boolean include_explanations_in_json = true; //TODO set as a parameter - if(include_explanations_in_json) { - shex_message.setExplanations(validations); - } - shex_errors.add(shex_message); - FileWriter explanations = new FileWriter(explanations_file, true); - explanations.write(ip.getValidation_results().getShexvalidation().getAsTab(filename+"\t"+title+"\t"+modelIRI)); - explanations.close(); - } - if(travisMode) { - if(!isConformant&&!shouldFail) { - LOGGER.error(filename+"\t"+title+"\t"+modelIRI+"\tshex is nonconformant, quitting, explanation:\n"+ip.getValidation_results().getShexvalidation().getAsText()); - System.exit(-1); - }else if(isConformant&&shouldFail) { - LOGGER.error(filename+"\t"+title+"\t"+modelIRI+"\tshex validates, but it should not be, quitting"); - System.exit(-1); - } - } - //is it a metadata violation or data ? - boolean shex_meta_problem = false; - boolean shex_data_problem = false; - if(!validations.getShexvalidation().isConformant()) { - String model_curie = curieHandler.getCuri(modelIRI); - ValidationResultSet validationset = ip.getValidation_results(); - ShexValidationReport shex_report = validationset.getShexvalidation(); - Set violations = shex_report.getViolations(); - if(violations!=null) { - for(Violation v : violations) { - if(v.getNode().equals(model_curie)){ - shex_meta_problem = true; - }else { - shex_data_problem = true; - } - } - }else { - LOGGER.error("Invalid model but no violations reported"); - } - } - LOGGER.info(filename+"\t"+title+"\t"+modelIRI+"\tOWL:"+isConsistent+"\tshex:"+isConformant); - basic.write(filename+"\t"+title+"\t"+link+"\t"+modelstate+"\t"+contributor+"\t"+provider+"\t"+date+"\t"+isConsistent+"\t"+isConformant+"\t"+shex_meta_problem+"\t"+shex_data_problem+"\t"+milliseconds+"\t"+axioms+"\t"+ - n_rows_gpad+"\t"+ gcm.getGoCamModelStats().stats2cols()+"\n"); - } - basic.close(); - if(run_reasoner_report) { - addReasonerReport(outputFolder, gocam, ip, title, reasoner_report); - } - } - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - if(run_reasoner_report) { - summarizeReasonerReport(outputFolder, reasoner_report); - } - - pipe_report.setNumber_of_correct_models(good_models); - pipe_report.setNumber_of_models_in_error(bad_models); - pipe_report.getMessages().put(BatchPipelineValidationReport.getShexRuleString(), shex_errors); - pipe_report.getMessages().put(BatchPipelineValidationReport.getOwlRuleString(), owl_errors); - GsonBuilder builder = new GsonBuilder(); - Gson gson = builder.setPrettyPrinting().create(); - String json = gson.toJson(pipe_report); - try { - FileWriter pipe_json = new FileWriter(outputFolder+"gorules_report.json", false); - pipe_json.write(json); - pipe_json.close(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - m3.dispose(); - LOGGER.info("done with validation"); - } - - static class ReasonerReport { - Map term_asserted_instances_mapped = new HashMap(); - Map term_deepened_instances_mapped = new HashMap(); - Map term_asserted_instances_created = new HashMap(); - Map term_deepened_instances_created = new HashMap(); - } - - - private static ReasonerReport initReasonerReport(String outputFolder) { - String reasoner_report_file = outputFolder+"reasoner_report_all.txt"; - FileWriter reasoner_report; - try { - reasoner_report = new FileWriter(reasoner_report_file, false); - reasoner_report.write("title\tindividual\txref\tasserted\tinferred\n"); - reasoner_report.close(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - ReasonerReport report = new ReasonerReport(); - return report; - } - - private static ReasonerReport addReasonerReport(String outputFolder, OWLOntology gocam, InferenceProvider ip, String title, ReasonerReport report) throws IOException { - String reasoner_report_file = outputFolder+"reasoner_report_all.txt"; - FileWriter reasoner_report = new FileWriter(reasoner_report_file, true); - Set individuals = gocam.getIndividualsInSignature(); - for (OWLNamedIndividual individual : individuals) { - //what kind of individual - mapped or created. mapped have xrefs, created do not. - String xref = "none"; - for(OWLAnnotation anno : EntitySearcher.getAnnotations(individual, gocam)){ - if(anno.getProperty().getIRI().toString().equals("http://www.geneontology.org/formats/oboInOwl#hasDbXref")) { - xref = anno.getValue().asLiteral().get().getLiteral(); - } - } - - Collection asserted_ce = EntitySearcher.getTypes(individual, gocam); - Set asserted = new HashSet(); - for(OWLClassExpression ce : asserted_ce) { - if(!ce.isAnonymous()) { - OWLClass a = ce.asOWLClass(); - if(a.isBuiltIn() == false) { - asserted.add(a); - } - } - } - Set inferred_direct = new HashSet<>(); - Set flattened = ip.getTypes(individual); - for (OWLClass cls : flattened) { - if (cls.isBuiltIn() == false) { - inferred_direct.add(cls); - } - } - inferred_direct.removeAll(asserted); - reasoner_report.write(title+"\t"+individual.getIRI()+"\t"+xref+"\t"+asserted+"\t"+inferred_direct+"\n"); - if(asserted!=null) { - for(OWLClass go : asserted) { - if(xref.equals("none")) { - Integer n = report.term_asserted_instances_created.get(go.toString()); - if(n==null) { - n = 0; - } - n = n+1; - report.term_asserted_instances_created.put(go.toString(), n); - - if(inferred_direct!=null&&inferred_direct.size()>0) { - Integer deepened = report.term_deepened_instances_created.get(go.toString()); - if(deepened==null) { - deepened = 0; - } - deepened = deepened+1; - report.term_deepened_instances_created.put(go.toString(), deepened); - } - }else { - Integer n = report.term_asserted_instances_mapped.get(go.toString()); - if(n==null) { - n = 0; - } - n = n+1; - report.term_asserted_instances_mapped.put(go.toString(), n); - - if(inferred_direct!=null&&inferred_direct.size()>0) { - Integer deepened = report.term_deepened_instances_mapped.get(go.toString()); - if(deepened==null) { - deepened = 0; - } - deepened = deepened+1; - report.term_deepened_instances_mapped.put(go.toString(), deepened); - } - } - } - } - } - reasoner_report.close(); - return report; - } - - private static void summarizeReasonerReport(String outputFolder, ReasonerReport report) { - String reasoner_report_summary_file = outputFolder+"reasoner_report_summary.txt"; - FileWriter reasoner_report_summary; - try { - reasoner_report_summary = new FileWriter(reasoner_report_summary_file, false); - reasoner_report_summary.write("asserted GO term\tmapped individual count\tmapped N deepened\tcreated individual count\tcreated N deepened\n"); - Set terms = new HashSet(); - terms.addAll(report.term_asserted_instances_mapped.keySet()); - terms.addAll(report.term_asserted_instances_created.keySet()); - for(String goterm : terms) { - int n_deepened_mapped = 0; int n_mapped = 0; - if(report.term_asserted_instances_mapped.containsKey(goterm)) { - n_mapped = report.term_asserted_instances_mapped.get(goterm); - } - - if(report.term_deepened_instances_mapped.get(goterm)!=null) { - n_deepened_mapped = report.term_deepened_instances_mapped.get(goterm); - } - int n_deepened_created = 0; int n_created = 0; - if(report.term_asserted_instances_created.containsKey(goterm)) { - n_created = report.term_asserted_instances_created.get(goterm); - } - if(report.term_deepened_instances_created.get(goterm)!=null) { - n_deepened_created = report.term_deepened_instances_created.get(goterm); - } - reasoner_report_summary.write(goterm+"\t"+n_mapped+"\t"+n_deepened_mapped+"\t"+n_created+"\t"+n_deepened_created+"\n"); - } - reasoner_report_summary.close(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - - private static String makeColSafe(String text) { - text = text.replaceAll("\n", " "); - text = text.replaceAll("\r", " "); - text = text.replaceAll("\t", " "); - return text; - } - - public static void addTaxonMetaData(String go_cam_journal, String go_lego_journal_file) throws OWLOntologyCreationException, IOException { - String modelIdPrefix = "http://model.geneontology.org/"; - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - CurieHandler curieHandler = new MappedCurieHandler(); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, go_cam_journal, null, go_lego_journal_file, true); - m3.addTaxonMetadata(); - } - - public static void cleanGoCams(String input_dir, String output_dir) { - OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - File directory = new File(input_dir); - boolean ignore_imports = true; - if(directory.isDirectory()) { - for(File file : directory.listFiles()) { - if(file.getName().endsWith("ttl")) { - System.out.println("fixing "+file.getAbsolutePath()); - final IRI modelFile = IRI.create(file.getAbsoluteFile()); - OWLOntology o; - try { - o = CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(modelFile), ignore_imports, m); - //in case the reader was confused by the missing import, fix declarations - o = CoreMolecularModelManager.fixBrokenObjectPropertiesAndAxioms(o); - //clean the model - OWLOntology cleaned_ont = CoreMolecularModelManager.removeDeadAnnotationsAndImports(o); - //saved the blessed ontology - OWLDocumentFormat owlFormat = new TurtleDocumentFormat(); - m.setOntologyFormat(cleaned_ont, owlFormat); - String cleaned_ont_file = output_dir+file.getName(); - try { - m.saveOntology(cleaned_ont, new FileOutputStream(cleaned_ont_file)); - } catch (OWLOntologyStorageException | FileNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } catch (OWLOntologyCreationException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - } - } - } - } - - - public static void printVersion() throws Exception { - printManifestEntry("git-revision-sha1", "UNKNOWN"); - printManifestEntry("git-revision-url", "UNKNOWN"); - printManifestEntry("git-branch", "UNKNOWN"); - printManifestEntry("git-dirty", "UNKNOWN"); - } - - private static String printManifestEntry(String key, String defaultValue) { - String value = owltools.version.VersionInfo.getManifestVersion(key); - if (value == null || value.isEmpty()) { - value = defaultValue; - } - System.out.println(key+"\t"+value); - return value; - } - - public static void reportSystemParams() { - /* Total number of processors or cores available to the JVM */ - LOGGER.info("Available processors (cores): " + - Runtime.getRuntime().availableProcessors()); - - /* Total amount of free memory available to the JVM */ - LOGGER.info("Free memory (m bytes): " + - Runtime.getRuntime().freeMemory()/1048576); - - /* This will return Long.MAX_VALUE if there is no preset limit */ - long maxMemory = Runtime.getRuntime().maxMemory()/1048576; - /* Maximum amount of memory the JVM will attempt to use */ - LOGGER.info("Maximum memory (m bytes): " + - (maxMemory == Long.MAX_VALUE ? "no limit" : maxMemory)); - - /* Total memory currently in use by the JVM */ - LOGGER.info("Total memory (m bytes): " + - Runtime.getRuntime().totalMemory()/1048576); - - /* Get a list of all filesystem roots on this system */ - File[] roots = File.listRoots(); - - /* For each filesystem root, print some info */ - for (File root : roots) { - LOGGER.info("File system root: " + root.getAbsolutePath()); - LOGGER.info("Total space (bytes): " + root.getTotalSpace()); - LOGGER.info("Free space (bytes): " + root.getFreeSpace()); - LOGGER.info("Usable space (bytes): " + root.getUsableSpace()); - } - } + private static final Logger LOGGER = Logger.getLogger(CommandLineInterface.class); + + public static void main(String[] args) { + + reportSystemParams(); + Options main_options = new Options(); + OptionGroup methods = new OptionGroup(); + methods.setRequired(true); + Option dump = Option.builder() + .longOpt("dump-owl-models") + .desc("export OWL GO-CAM models from journal") + .hasArg(false) + .build(); + methods.addOption(dump); + + Option merge_ontologies = Option.builder() + .longOpt("merge-ontologies") + .desc("Merge owl ontologies") + .hasArg(false) + .build(); + methods.addOption(merge_ontologies); + Option import_owl = Option.builder() + .longOpt("import-owl-models") + .desc("import OWL GO-CAM models into journal") + .hasArg(false) + .build(); + methods.addOption(import_owl); + Option import_tbox_ontologies = Option.builder() + .longOpt("import-tbox-ontologies") + .desc("import OWL tbox ontologies into journal") + .hasArg(false) + .build(); + methods.addOption(import_tbox_ontologies); + + Option add_taxon_metadata = Option.builder() + .longOpt("add-taxon-metadata") + .desc("add taxon associated with genes in each model as an annotation on the model") + .hasArg(false) + .build(); + methods.addOption(add_taxon_metadata); + + Option clean_gocams = Option.builder() + .longOpt("clean-gocams") + .desc("remove import statements, add property declarations, remove json-model annotation") + .hasArg(false) + .build(); + methods.addOption(clean_gocams); + + Option sparql = Option.builder() + .longOpt("sparql-update") + .desc("update the blazegraph journal with the given sparql statement") + .hasArg(false) + .build(); + methods.addOption(sparql); + Option json = Option.builder() + .longOpt("owl-lego-to-json") + .desc("Given a GO-CAM OWL file, make its minerva json represention") + .hasArg(false) + .build(); + methods.addOption(json); + Option gpad = Option.builder() + .longOpt("lego-to-gpad-sparql") + .desc("Given a GO-CAM journal, export GPAD representation for all the go-cams") + .hasArg(false) + .build(); + methods.addOption(gpad); + Option version = Option.builder() + .longOpt("version") + .desc("Print the version of the minerva stack used here. Extracts this from JAR file.") + .hasArg(false) + .build(); + methods.addOption(version); + Option validate = Option.builder() + .longOpt("validate-go-cams") + .desc("Check a collection of go-cam files or a journal for valid semantics (owl) and structure (shex)") + .hasArg(false) + .build(); + methods.addOption(validate); + + main_options.addOptionGroup(methods); + + CommandLineParser parser = new DefaultParser(); + try { + CommandLine cmd = parser.parse(main_options, args, true); + + if (cmd.hasOption("add-taxon-metadata")) { + Options add_taxon_options = new Options(); + add_taxon_options.addOption(add_taxon_metadata); + add_taxon_options.addOption("j", "journal", true, "This is the go-cam journal that will be updated with taxon annotations."); + add_taxon_options.addOption("ontojournal", "ontojournal", true, "Specify a blazegraph journal file containing the merged, pre-reasoned tbox aka go-lego.owl"); + cmd = parser.parse(add_taxon_options, args, false); + String journalFilePath = cmd.getOptionValue("j"); //--journal + String ontojournal = cmd.getOptionValue("ontojournal"); //--folder + addTaxonMetaData(journalFilePath, ontojournal); + } + + if (cmd.hasOption("clean-gocams")) { + Options clean_options = new Options(); + clean_options.addOption(clean_gocams); + clean_options.addOption("i", "input", true, "This is the directory of gocam files to clean."); + clean_options.addOption("o", "output", true, "This is the directory of cleaned gocam files that are produced."); + cmd = parser.parse(clean_options, args, false); + cleanGoCams(cmd.getOptionValue("i"), cmd.getOptionValue("o")); + } + + if (cmd.hasOption("import-tbox-ontologies")) { + Options import_tbox_options = new Options(); + import_tbox_options.addOption(import_tbox_ontologies); + import_tbox_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); + import_tbox_options.addOption("f", "file", true, "Sets the input file containing the ontology to load"); + import_tbox_options.addOption("r", "reset", false, "If present, will clear out the journal, otherwise adds to it"); + cmd = parser.parse(import_tbox_options, args, false); + String journalFilePath = cmd.getOptionValue("j"); //--journal + String inputFile = cmd.getOptionValue("f"); //--folder + importOWLOntologyIntoJournal(journalFilePath, inputFile, cmd.hasOption("r")); + } + if (cmd.hasOption("merge-ontologies")) { + Options merge_options = new Options(); + merge_options.addOption(merge_ontologies); + merge_options.addOption("i", "input", true, "The input folder containing ontologies to merge"); + merge_options.addOption("o", "output", true, "The file to write the ontology to"); + merge_options.addOption("u", "iri", true, "The base iri for the merged ontology"); + merge_options.addOption("r", "reason", false, "Add inferences to the merged ontology"); + cmd = parser.parse(merge_options, args, false); + buildMergedOwlOntology(cmd.getOptionValue("i"), cmd.getOptionValue("o"), cmd.getOptionValue("u"), cmd.hasOption("r")); + } + + if (cmd.hasOption("dump-owl-models")) { + Options dump_options = new Options(); + dump_options.addOption(dump); + dump_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); + dump_options.addOption("f", "folder", true, "Sets the output folder the GO-CAM model files"); + dump_options.addOption("p", "model-id-prefix", true, "prefix for GO-CAM model ids"); + cmd = parser.parse(dump_options, args, false); + String journalFilePath = cmd.getOptionValue("j"); //--journal + String outputFolder = cmd.getOptionValue("f"); //--folder + String modelIdPrefix = cmd.getOptionValue("p"); //--prefix + modelsToOWL(journalFilePath, outputFolder, modelIdPrefix); + } else if (cmd.hasOption("import-owl-models")) { + Options import_options = new Options(); + import_options.addOption(import_owl); + import_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); + import_options.addOption("f", "folder", true, "Sets the input folder the GO-CAM model files"); + cmd = parser.parse(import_options, args, false); + String journalFilePath = cmd.getOptionValue("j"); //--journal + String outputFolder = cmd.getOptionValue("f"); //--folder + importOWLModels(journalFilePath, outputFolder); + } else if (cmd.hasOption("sparql-update")) { + Options sparql_options = new Options(); + sparql_options.addOption(sparql); + sparql_options.addOption("j", "journal", true, "Sets the Blazegraph journal file for the database"); + sparql_options.addOption("f", "file", true, "Sets the file containing a SPARQL update"); + cmd = parser.parse(sparql_options, args, false); + String journalFilePath = cmd.getOptionValue("j"); //--journal + String file = cmd.getOptionValue("f"); + sparqlUpdate(journalFilePath, file); + } else if (cmd.hasOption("owl-lego-to-json")) { + Options json_options = new Options(); + json_options.addOption(json); + json_options.addOption("i", "OWLFile", true, "Input GO-CAM OWL file"); + json_options.addOption("o", "JSONFILE", true, "Output JSON file"); + OptionGroup format = new OptionGroup(); + Option pretty = Option.builder() + .longOpt("pretty-json") + .desc("pretty json format") + .hasArg(false) + .build(); + format.addOption(pretty); + Option compact = Option.builder() + .longOpt("compact-json") + .desc("compact json format") + .hasArg(false) + .build(); + format.addOption(compact); + json_options.addOptionGroup(format); + cmd = parser.parse(json_options, args, false); + String input = cmd.getOptionValue("i"); + String output = cmd.getOptionValue("o"); + boolean usePretty = true; + if (cmd.hasOption("compact-json")) { + usePretty = false; + } + owl2LegoJson(input, output, usePretty); + } else if (cmd.hasOption("lego-to-gpad-sparql")) { + Options gpad_options = new Options(); + gpad_options.addOption(gpad); + gpad_options.addOption("i", "input", true, "Sets the Blazegraph journal file for the database"); + gpad_options.addOption("o", "gpad-output", true, "Sets the output location for the GPAD"); + gpad_options.addOption("p", "model-id-prefix", true, "prefix for GO-CAM model ids"); + gpad_options.addOption("c", "model-id-curie", true, "prefix for GO-CAM curies"); + gpad_options.addOption("ont", "ontology", true, "IRI of tbox ontology for classification - usually default go-lego.owl"); + gpad_options.addOption("cat", "catalog", true, "Catalog file for tbox ontology. " + + "Use this to specify local copies of the ontology and or its imports to " + + "speed and control the process. If not used, will download the tbox and all its imports."); + gpad_options.addOption("ontojournal", "ontojournal", true, "Specify a blazegraph journal file containing the merged, pre-reasoned tbox aka go-lego.owl"); + cmd = parser.parse(gpad_options, args, false); + String inputDB = cmd.getOptionValue("input"); + String gpadOutputFolder = cmd.getOptionValue("gpad-output"); + String modelIdPrefix = cmd.getOptionValue("model-id-prefix"); + String modelIdcurie = cmd.getOptionValue("model-id-curie"); + String ontologyIRI = cmd.getOptionValue("ontology"); + String catalog = cmd.getOptionValue("catalog"); + String go_lego_journal_file = null; + if (cmd.hasOption("ontojournal")) { + go_lego_journal_file = cmd.getOptionValue("ontojournal"); + } + if (go_lego_journal_file == null) { + System.err.println("Missing -- ontojournal . Need to specify location for blazegraph journal file containing the merged go-lego tbox (neo, GO-plus, etc..). If a journal does not exist at that location, the tbox ontology will be used to initialize one."); + System.exit(-1); + } + legoToAnnotationsSPARQL(modelIdPrefix, modelIdcurie, inputDB, gpadOutputFolder, ontologyIRI, catalog, go_lego_journal_file); + } else if (cmd.hasOption("version")) { + printVersion(); + } else if (cmd.hasOption("validate-go-cams")) { + Options validate_options = new Options(); + validate_options.addOption(validate); + validate_options.addOption("i", "input", true, "Either a blazegraph journal or a folder with go-cams in it"); + validate_options.addOption("shex", "shex", false, "If present, will execute shex validation"); + validate_options.addOption("owl", "owl", false, "If present, will execute shex validation"); + validate_options.addOption("r", "report-folder", true, "Folder where output files will appear"); + validate_options.addOption("p", "model-id-prefix", true, "prefix for GO-CAM model ids"); + validate_options.addOption("cu", "model-id-curie", true, "prefix for GO-CAM curies"); + validate_options.addOption("ont", "ontology", true, "IRI of tbox ontology - usually default go-lego.owl"); + validate_options.addOption("c", "catalog", true, "Catalog file for tbox ontology. " + + "Use this to specify local copies of the ontology and or its imports to " + + "speed and control the process. If not used, will download the tbox and all its imports."); + validate_options.addOption("shouldfail", "shouldfail", false, "When used in travis mode for tests, shouldfail " + + "parameter will allow a successful run on a folder that only contains incorrect models."); + validate_options.addOption("t", "travis", false, "If travis, then the program will stop upon a failed " + + "validation and report an error. Otherwise it will continue to test all the models."); + validate_options.addOption("m", "shapemap", true, "Specify a shapemap file. Otherwise will download from go_shapes repo."); + validate_options.addOption("s", "shexpath", true, "Specify a shex schema file. Otherwise will download from go_shapes repo."); + validate_options.addOption("ontojournal", "ontojournal", true, "Specify a blazegraph journal file containing the merged, pre-reasoned tbox aka go-lego.owl"); + validate_options.addOption("reasoner_report", "reasoner_report", false, "Add a report with reasoning results to the output of the validation. "); + + + cmd = parser.parse(validate_options, args, false); + String input = cmd.getOptionValue("input"); + String outputFolder = cmd.getOptionValue("report-folder"); + String shexpath = cmd.getOptionValue("s"); + String shapemappath = cmd.getOptionValue("shapemap"); + + String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; + if (cmd.hasOption("ontology")) { + ontologyIRI = cmd.getOptionValue("ontology"); + } + String catalog = cmd.getOptionValue("catalog"); + String modelIdPrefix = "http://model.geneontology.org/"; + if (cmd.hasOption("model-id-prefix")) { + modelIdPrefix = cmd.getOptionValue("model-id-prefix"); + } + String modelIdcurie = "gomodel"; + if (cmd.hasOption("model-id-curie")) { + modelIdcurie = cmd.getOptionValue("model-id-curie"); + } + boolean travisMode = false; + if (cmd.hasOption("travis")) { + travisMode = true; + } + boolean shouldFail = false; + if (cmd.hasOption("shouldfail")) { + shouldFail = true; + } + boolean checkShex = false; + if (cmd.hasOption("shex")) { + checkShex = true; + } + String go_lego_journal_file = null; + if (cmd.hasOption("ontojournal")) { + go_lego_journal_file = cmd.getOptionValue("ontojournal"); + } + if (go_lego_journal_file == null) { + System.err.println("Missing -- ontojournal . Need to specify blazegraph journal file containing the merged go-lego tbox (neo, GO-plus, etc..)"); + System.exit(-1); + } + boolean run_reasoner_report = false; + if (cmd.hasOption("reasoner_report")) { + run_reasoner_report = true; + } + validateGoCams(input, outputFolder, ontologyIRI, catalog, modelIdPrefix, modelIdcurie, shexpath, shapemappath, travisMode, shouldFail, checkShex, go_lego_journal_file, run_reasoner_report); + } + } catch (ParseException exp) { + System.out.println("Parameter parse exception. Note that the first parameter must be one of: " + + "[--validate-go-cams, --dump-owl-models, --import-owl-models, --sparql-update, --owl-lego-to-json, --lego-to-gpad-sparql, --version, --update-gene-product-types]" + + "\nSubsequent parameters are specific to each top level command. " + + "\nError message: " + exp.getMessage()); + System.exit(-1); + } catch (Exception e) { + e.printStackTrace(); + //explicitly exiting to inform travis of failure. + System.exit(-1); + } + } + + /** + * Given a blazegraph journal with go-cams in it, write them all out as OWL files. + * cli --dump-owl-models + * + * @param journalFilePath + * @param outputFolder + * @param modelIdPrefix + * @throws Exception + */ + public static void modelsToOWL(String journalFilePath, String outputFolder, String modelIdPrefix) throws Exception { + if (modelIdPrefix == null) { + modelIdPrefix = "http://model.geneontology.org/"; + } + + // minimal inputs + if (journalFilePath == null) { + System.err.println("No journal file was configured."); + System.exit(-1); + return; + } + if (outputFolder == null) { + System.err.println("No output folder was configured."); + System.exit(-1); + return; + } + + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + CurieHandler curieHandler = new MappedCurieHandler(); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, journalFilePath, outputFolder, null, false); + m3.dumpAllStoredModels(); + m3.dispose(); + } + + /** + * Load the go-cam files in the input folder into the journal + * cli import-owl-models + * + * @param journalFilePath + * @param inputFolder + * @throws Exception + */ + public static void importOWLModels(String journalFilePath, String inputFolder) throws Exception { + // minimal inputs + if (journalFilePath == null) { + System.err.println("No journal file was configured."); + System.exit(-1); + return; + } + if (inputFolder == null) { + System.err.println("No input folder was configured."); + System.exit(-1); + return; + } + int total_files = 0; + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + String modelIdPrefix = "http://model.geneontology.org/"; // this will not be used for anything + CurieHandler curieHandler = new MappedCurieHandler(); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, journalFilePath, null, null, false); + //in case of update rather than whole new journal + Set stored = new HashSet(m3.getStoredModelIds()); + LOGGER.info("loading gocams from " + inputFolder); + //for (File file : FileUtils.listFiles(new File(inputFolder), null, true)) { + File i = new File(inputFolder); + if (i.exists()) { + if (i.isDirectory()) { + total_files = i.listFiles().length; + FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file -> { + if (file.getName().endsWith("ttl")) { + java.util.Optional irio; + try { + irio = m3.scanForOntologyIRI(file); + IRI iri = null; + if (irio.isPresent()) { + iri = IRI.create(irio.get()); + } + //is it in there already? + if (stored.contains(iri)) { + LOGGER.error("Attempted to load gocam ttl file into database but gocam with that iri already exists, skipping " + file + " " + iri); + } else { + stored.add(iri); + m3.importModelToDatabase(file, true); + } + } catch (RDFParseException | RDFHandlerException | IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (RepositoryException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } else { + LOGGER.info("Ignored for not ending with .ttl" + file); + } + }); + } + } + m3.dispose(); + LOGGER.info("done loading gocams, loaded: " + stored.size() + " out of: " + total_files + " files"); + } + + /** + * @param journalFilePath + * @param inputFolder + * @throws Exception + */ + public static void buildMergedOwlOntology(String inputFolder, String outputfile, String base_iri, boolean addInferences) throws Exception { + // minimal inputs + if (outputfile == null) { + System.err.println("No output file was configured."); + System.exit(-1); + return; + } + if (inputFolder == null) { + System.err.println("No input folder was configured."); + System.exit(-1); + return; + } + if (base_iri == null) { + System.err.println("No base iri was configured."); + System.exit(-1); + return; + } + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + OWLDataFactory df = ontman.getOWLDataFactory(); + OWLOntology merged = ontman.createOntology(IRI.create(base_iri)); + for (File file : FileUtils.listFiles(new File(inputFolder), null, true)) { + LOGGER.info("Loading " + file); + if (file.getName().endsWith("ttl") || file.getName().endsWith("owl")) { + try { + OWLOntology ont = ontman.loadOntologyFromOntologyDocument(file); + ontman.addAxioms(merged, ont.getAxioms()); + } catch (OWLOntologyAlreadyExistsException e) { + LOGGER.error("error loading already loaded ontology: " + file); + } + } else { + LOGGER.info("Ignored for not ending with .ttl or .owl " + file); + } + } + if (addInferences) { + LOGGER.info("Running reasoner"); + //OWLReasonerFactory reasonerFactory = new WhelkOWLReasonerFactory(); + //WhelkOWLReasoner reasoner = (WhelkOWLReasoner)reasonerFactory.createReasoner(merged); + OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); + OWLReasoner reasoner = reasonerFactory.createReasoner(merged); + InferredOntologyGenerator gen = new InferredOntologyGenerator(reasoner); + gen.fillOntology(df, merged); + } + try { + ontman.saveOntology(merged, new FileOutputStream(new File(outputfile))); + } catch (OWLOntologyStorageException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + /** + * Load the go-cam files in the input folder into the journal + * cli import-owl-models + * + * @param journalFilePath + * @param inputFolder + * @throws Exception + */ + public static void importOWLOntologyIntoJournal(String journalFilePath, String inputFile, boolean reset) throws Exception { + // minimal inputs + if (journalFilePath == null) { + System.err.println("No journal file was configured."); + System.exit(-1); + return; + } + if (inputFile == null) { + System.err.println("No input file was configured."); + System.exit(-1); + return; + } + + BlazegraphOntologyManager man = new BlazegraphOntologyManager(journalFilePath, false); + String iri_for_ontology_graph = "http://geneontology.org/go-lego-graph"; + man.loadRepositoryFromOWLFile(new File(inputFile), iri_for_ontology_graph, reset); + } + + /** + * Updates the journal with the provided update sparql statement. + * cli parameter --sparql-update + * + * @param journalFilePath + * @param updateFile + * @throws OWLOntologyCreationException + * @throws IOException + * @throws RepositoryException + * @throws MalformedQueryException + * @throws UpdateExecutionException + */ + public static void sparqlUpdate(String journalFilePath, String updateFile) throws OWLOntologyCreationException, IOException, RepositoryException, MalformedQueryException, UpdateExecutionException { + // minimal inputs + if (journalFilePath == null) { + System.err.println("No journal file was configured."); + System.exit(-1); + return; + } + if (updateFile == null) { + System.err.println("No update file was configured."); + System.exit(-1); + return; + } + + String update = FileUtils.readFileToString(new File(updateFile), StandardCharsets.UTF_8); + Properties properties = new Properties(); + properties.load(CommandLineInterface.class.getResourceAsStream("/org/geneontology/minerva/blazegraph.properties")); + properties.setProperty(com.bigdata.journal.Options.FILE, journalFilePath); + + BigdataSail sail = new BigdataSail(properties); + BigdataSailRepository repository = new BigdataSailRepository(sail); + repository.initialize(); + BigdataSailRepositoryConnection conn = repository.getUnisolatedConnection(); + BlazegraphMutationCounter counter = new BlazegraphMutationCounter(); + conn.addChangeLog(counter); + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + int changes = counter.mutationCount(); + conn.removeChangeLog(counter); + System.out.println("\nApplied " + changes + " changes"); + conn.close(); + } + + /** + * Convert a GO-CAM owl file to a minerva json structure + * --owl-lego-to-json + * + * @param input + * @param output + * @param usePretty + * @throws Exception + */ + public static void owl2LegoJson(String input, String output, boolean usePretty) throws Exception { + + // minimal inputs + if (input == null) { + System.err.println("No input model was configured."); + System.exit(-1); + return; + } + if (output == null) { + System.err.println("No output file was configured."); + System.exit(-1); + return; + } + + // configuration + CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + GsonBuilder gsonBuilder = new GsonBuilder(); + if (usePretty) { + gsonBuilder.setPrettyPrinting(); + } + Gson gson = gsonBuilder.create(); + + // process each model + if (LOGGER.isInfoEnabled()) { + LOGGER.info("Loading model from file: " + input); + } + OWLOntology model = null; + final JsonModel jsonModel; + ParserWrapper pw = new ParserWrapper(); + try { + + // load model + model = pw.parseOWL(IRI.create(new File(input).getCanonicalFile())); + InferenceProvider inferenceProvider = null; // TODO decide if we need reasoning + String modelId = null; + Optional ontologyIRI = model.getOntologyID().getOntologyIRI(); + if (ontologyIRI.isPresent()) { + modelId = curieHandler.getCuri(ontologyIRI.get()); + } + + // render json + final MolecularModelJsonRenderer renderer = new MolecularModelJsonRenderer(modelId, model, inferenceProvider, curieHandler); + jsonModel = renderer.renderModel(); + } finally { + if (model != null) { + pw.getManager().removeOntology(model); + model = null; + } + } + + // save as json string + final String json = gson.toJson(jsonModel); + final File outputFile = new File(output).getCanonicalFile(); + try (OutputStream outputStream = new FileOutputStream(outputFile)) { + if (LOGGER.isInfoEnabled()) { + LOGGER.info("Saving json to file: " + outputFile); + } + IOUtils.write(json, outputStream); + } + } + + /** + * Output GPAD files via inference+SPARQL + * cli --lego-to-gpad-sparql + * + * @param modelIdPrefix + * @param modelIdcurie + * @param inputDB + * @param gpadOutputFolder + * @param ontologyIRI + * @throws Exception + */ + public static void legoToAnnotationsSPARQL(String modelIdPrefix, String modelIdcurie, String inputDB, String gpadOutputFolder, String ontologyIRI, String catalog, String go_lego_journal_file) throws Exception { + if (modelIdPrefix == null) { + modelIdPrefix = "http://model.geneontology.org/"; + } + if (modelIdcurie == null) { + modelIdcurie = "gomodel"; + } + if (inputDB == null) { + inputDB = "blazegraph.jnl"; + } + if (gpadOutputFolder == null) { + gpadOutputFolder = null; + } + if (ontologyIRI == null) { + ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; + } + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + if (catalog != null) { + LOGGER.info("using catalog: " + catalog); + ontman.setIRIMappers(Sets.newHashSet(new owltools.io.CatalogXmlIRIMapper(catalog))); + } else { + LOGGER.info("no catalog, resolving all ontology uris directly"); + } + + OWLOntology ontology = ontman.loadOntology(IRI.create(ontologyIRI)); + CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + CurieHandler curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + boolean loadTboxIntoOntJournal = (!(new File(go_lego_journal_file)).exists()); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(ontology, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, false); + if (loadTboxIntoOntJournal) { + m3.getGolego_repo().loadRepositoryFromOntology(ontology, "http://example.org/", true); + } + final String immutableModelIdPrefix = modelIdPrefix; + final String immutableGpadOutputFolder = gpadOutputFolder; + m3.getAvailableModelIds().stream().parallel().forEach(modelIRI -> { + try { + //TODO investigate whether changing to a neo-lite model has an impact on this - may need to make use of ontology journal + String gpad = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getGolego_repo().regulatorsToRegulated).exportGPAD(m3.createInferredModel(modelIRI), modelIRI); + String fileName = StringUtils.replaceOnce(modelIRI.toString(), immutableModelIdPrefix, "") + ".gpad"; + Writer writer = new OutputStreamWriter(new FileOutputStream(Paths.get(immutableGpadOutputFolder, fileName).toFile()), StandardCharsets.UTF_8); + writer.write(gpad); + writer.close(); + } catch (InconsistentOntologyException e) { + LOGGER.error("Inconsistent ontology: " + modelIRI); + } catch (IOException e) { + LOGGER.error("Couldn't export GPAD for: " + modelIRI, e); + } + }); + m3.dispose(); + } + + + /** + * --validate-go-cams + * -i /GitHub/GO_Shapes/test_ttl/go_cams/should_pass/ + * -c ./catalog-no-import.xml + * + * @param input + * @param basicOutputFile + * @param explanationOutputFile + * @param ontologyIRI + * @param catalog + * @param modelIdPrefix + * @param modelIdcurie + * @param shexpath + * @param shapemappath + * @param travisMode + * @param shouldPass + * @throws IOException + * @throws OWLOntologyCreationException + */ + public static void validateGoCams(String input, String outputFolder, + String ontologyIRI, String catalog, String modelIdPrefix, String modelIdcurie, + String shexpath, String shapemappath, boolean travisMode, boolean shouldFail, boolean checkShex, + String go_lego_journal_file, boolean run_reasoner_report) throws OWLOntologyCreationException, IOException { + LOGGER.setLevel(Level.INFO); + String inputDB = "blazegraph.jnl"; + String shexFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shex"; + String goshapemapFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shapeMap"; + CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + CurieHandler curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + Map modelid_filename = new HashMap(); + + if (outputFolder == null) { + LOGGER.error("please specify an output folder with -r "); + System.exit(-1); + } else if (!outputFolder.endsWith("/")) { + outputFolder += "/"; + } + + if (input == null) { + LOGGER.error("please provide an input file - either a directory of ttl files or a blazegraph journal"); + System.exit(-1); + } + + LOGGER.info("loading tbox ontology: " + ontologyIRI); + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + if (catalog != null) { + LOGGER.info("using catalog: " + catalog); + try { + ontman.setIRIMappers(Sets.newHashSet(new CatalogXmlIRIMapper(catalog))); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } else { + LOGGER.info("no catalog, resolving all ontology uris directly"); + } + + OWLOntology tbox_ontology = null; + try { + tbox_ontology = ontman.loadOntology(IRI.create(ontologyIRI)); + LOGGER.info("tbox ontology axioms loaded: " + tbox_ontology.getAxiomCount()); + } catch (OWLOntologyCreationException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + //either load directly from existing journal + if (input.endsWith(".jnl")) { + inputDB = input; + } else { + //or make sure that the journal file provided is cleared out and ready + File i = new File(input); + if (i.exists()) { + //remove anything that existed earlier + File bgdb = new File(inputDB); + if (bgdb.exists()) { + bgdb.delete(); + } + } + } + //make the manager + LOGGER.info("Setting up model manager and initializing rules for Arachne reasoner"); + UndoAwareMolecularModelManager m3 = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); + //if provided a directory as input, load them ttl files into the manager + File i = new File(input); + if (i.exists() && !input.endsWith(".jnl")) { + if (i.isDirectory()) { + LOGGER.info("Loading models from " + i.getAbsolutePath()); + Set model_iris = new HashSet(); + FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file -> { + if (file.getName().endsWith(".ttl") || file.getName().endsWith("owl")) { + try { + String modeluri = m3.importModelToDatabase(file, true); + if (modeluri == null) { + LOGGER.error("Null model IRI: " + modeluri + " file: " + file); + } else if (!model_iris.add(modeluri)) { + LOGGER.error("Multiple models with same IRI: " + modeluri + " file: " + file + " file: " + modelid_filename.get(modeluri)); + } else { + modelid_filename.put(modeluri, file.getName()); + } + } catch (OWLOntologyCreationException | RepositoryException | RDFParseException + | RDFHandlerException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + }); + } else {//just load the one provided + LOGGER.info("Loading " + i); + try { + m3.importModelToDatabase(i, true); + } catch (OWLOntologyCreationException | RepositoryException | RDFParseException + | RDFHandlerException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + LOGGER.info("loaded files into blazegraph journal: " + input); + } + //models ready + //now set up shex validator + if (shexpath == null) { + //fall back on downloading from shapes repo + URL shex_schema_url; + try { + shex_schema_url = new URL(shexFileUrl); + shexpath = "./go-cam-schema.shex"; + File shex_schema_file = new File(shexpath); + org.apache.commons.io.FileUtils.copyURLToFile(shex_schema_url, shex_schema_file); + System.err.println("-s .No shex schema provided, using: " + shexFileUrl); + } catch (MalformedURLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + if (shapemappath == null) { + URL shex_map_url; + try { + shex_map_url = new URL(goshapemapFileUrl); + shapemappath = "./go-cam-shapes.shapeMap"; + File shex_map_file = new File(shapemappath); + org.apache.commons.io.FileUtils.copyURLToFile(shex_map_url, shex_map_file); + System.err.println("-m .No shape map file provided, using: " + goshapemapFileUrl); + } catch (MalformedURLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + LOGGER.info("making shex validator: " + shexpath + " " + shapemappath + " " + curieHandler + " "); + MinervaShexValidator shex = null; + try { + shex = new MinervaShexValidator(shexpath, shapemappath, curieHandler, m3.getGolego_repo()); + } catch (Exception e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + + if (checkShex) { + shex.setActive(true); + } else { + shex.setActive(false); + } + + //shex validator is ready, now build the inference provider (which provides access to the shex validator and provides inferences useful for shex) + String reasonerOpt = "arachne"; + LOGGER.info("Building OWL inference provider: " + reasonerOpt); + InferenceProviderCreator ipc = StartUpTool.createInferenceProviderCreator(reasonerOpt, m3, shex); + LOGGER.info("Validating models: " + reasonerOpt); + + //Set up all the report files. + String basic_output_file = outputFolder + "main_report.txt"; + String explanations_file = outputFolder + "explanations.txt"; + String activity_output_file = outputFolder + "activity_report.txt"; + if (outputFolder != null) { + try { + //valid or not + FileWriter basic_shex_output = new FileWriter(basic_output_file, false); + basic_shex_output.write("filename\tmodel_title\tmodel_url\tmodelstate\tcontributor\tprovider\tdate\tOWL_consistent\tshex_valid\tshex_meta_problem\tshex_data_problem\tvalidation_time_milliseconds\taxioms\tn_rows_gpad\t"); + basic_shex_output.write(GoCamModelStats.statsHeader() + "\n"); + basic_shex_output.close(); + //tab delimited explanations for failures + FileWriter explanations = new FileWriter(explanations_file, false); + explanations.write("filename\tmodel_title\tmodel_iri\tnode\tNode_types\tproperty\tIntended_range_shapes\tobject\tObject_types\tObject_shapes\n"); + explanations.close(); + //tab delimited summary of properties of activity units + FileWriter activity_output = new FileWriter(activity_output_file, false); + activity_output.write("filename\tmodel_title\tmodel_url\tmodelstate\tcontributor\tprovider\tdate\tactivity_iri\tactivity_xref\tactivity_label\tcomplete\tinputs\toutputs\tenablers\tlocations\tcausal upstream\tcausal downstream\tpart of n BP\tMF\tBP\n"); + activity_output.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + //this will generate the json file used for the go rules report for the pipeline + BatchPipelineValidationReport pipe_report = null; + Set owl_errors = new HashSet(); + Set shex_errors = new HashSet(); + pipe_report = new BatchPipelineValidationReport(); + try { + pipe_report.setNumber_of_models(m3.getAvailableModelIds().size()); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + int bad_models = 0; + int good_models = 0; + final boolean shex_output = checkShex; + + //only used if OWL reasoning report is requested + ReasonerReport reasoner_report = null; + if (run_reasoner_report) { + reasoner_report = initReasonerReport(outputFolder); + } + //now process each gocam + try { + for (IRI modelIRI : m3.getAvailableModelIds()) { + long start = System.currentTimeMillis(); + String filename = modelid_filename.get(modelIRI.toString()); + boolean isConsistent = true; //OWL + boolean isConformant = true; //shex + if (filename != null) { + LOGGER.info("processing " + filename + "\t" + modelIRI); + } else { + LOGGER.info("processing \t" + modelIRI); + } + //this is where everything actually happens + ModelContainer mc = m3.getModel(modelIRI); + OWLOntology gocam = mc.getAboxOntology(); + try { + //if a model does not have an import statement that links in an ontology that defines all of its classes and object properties + //or if the model does not define the classes and object properties itself, parsing problems will prevail + //this step makes sure that does not happen + gocam = CoreMolecularModelManager.fixBrokenObjectPropertiesAndAxioms(gocam); + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + LOGGER.info("preparing model stats..."); + //The GoCamModel code is used to capture model-level statistics such as 'how many causal relations are there?' + //This might be an area for a speed improvement if needed + GoCamModel gcm = new GoCamModel(gocam, m3); + String title = "title"; + if (gcm.getTitle() != null) { + title = makeColSafe(gcm.getTitle()); + } else { + LOGGER.error("no title for " + filename); + } + //this is to make clickable links in reports + String link = modelIRI.toString().replace("http://model.geneontology.org/", "http://noctua.geneontology.org/editor/graph/gomodel:"); + if (modelIRI.toString().contains("R-HSA")) { + link = link.replace("noctua.geneontology", "noctua-dev.berkeleybop"); + } + String modelstate = makeColSafe(gcm.getModelstate()); + String contributor = makeColSafe(gcm.getContributors().toString()); + String date = makeColSafe(gcm.getDate()); + String provider = makeColSafe(gcm.getProvided_by().toString()); + pipe_report.setTaxa(gcm.getIn_taxon()); + LOGGER.info("model stats done for title: " + title); + int axioms = gocam.getAxiomCount(); + //add activity level statistics as a default + FileWriter activity_output = new FileWriter(activity_output_file, true); + for (ActivityUnit unit : gcm.getActivities()) { + activity_output.write(filename + "\t" + title + "\t" + link + "\t" + modelstate + "\t" + contributor + "\t" + provider + "\t" + date + "\t" + unit.getIndividual().getIRI().toString() + "\t" + unit.getXref() + "\t" + unit.getLabel() + "\t"); + activity_output.write(unit.isComplete() + "\t" + unit.getInputs().size() + "\t" + unit.getOutputs().size() + "\t" + unit.getEnablers().size() + "\t" + unit.getLocations().size() + + "\t" + unit.getCausal_in().size() + "\t" + unit.getCausal_out().size() + "\t" + unit.getContaining_processes().size() + "\t" + unit.stringForClasses(unit.getDirect_types()) + "\t" + unit.getURIsForConnectedBPs() + "\n"); + } + activity_output.close(); + + InferenceProvider ip = ipc.create(mc); + isConsistent = ip.isConsistent(); + //TODO re-use reasoner object from ip + //TODO this is another area that could be touched/removed for speed improvement + int n_rows_gpad = 0; + if (isConsistent) { + try { + Set gpad = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getGolego_repo().regulatorsToRegulated).getGPAD(m3.createInferredModel(modelIRI), modelIRI); + if (gpad != null) { + n_rows_gpad = gpad.size(); + } + } catch (InconsistentOntologyException e) { + LOGGER.error("inconsistent ontology, can't make gpad"); + } + } + long done = System.currentTimeMillis(); + long milliseconds = (done - start); + //for rules report in pipeline + if (!ip.isConsistent()) { + String level = "ERROR"; + String model_id = curieHandler.getCuri(modelIRI); + String message = BatchPipelineValidationReport.getOwlMessage(); + int rule = BatchPipelineValidationReport.getOwlRule(); + ErrorMessage owl = new ErrorMessage(level, model_id, gcm.getIn_taxon(), message, rule); + owl_errors.add(owl); + } + if (!isConsistent) { + FileWriter explanations = new FileWriter(explanations_file, true); + explanations.write(filename + "\t" + title + "\t" + modelIRI + "\tOWL fail explanation: " + ip.getValidation_results().getOwlvalidation().getAsText() + "\n"); + explanations.close(); + } + //travis mode causes the system to exit when an invalid model is detected (unless shouldFail is on) + if (travisMode && !isConsistent) { + if (!shouldFail) { + LOGGER.error(filename + "\t" + title + "\t" + modelIRI + "\tOWL:is inconsistent, quitting"); + System.exit(-1); + } + } + //basic is just one row per model - did it validate or not + FileWriter basic = new FileWriter(basic_output_file, true); + if (!shex_output) { + if (ip.isConsistent()) { + good_models++; + } else { + bad_models++; + } + } else { + ValidationResultSet validations = ip.getValidation_results(); + isConformant = validations.allConformant(); + if (isConformant) { + good_models++; + } else { + bad_models++; + } + if (!validations.getShexvalidation().isConformant()) { + String level = "WARNING"; + String model_id = curieHandler.getCuri(modelIRI); + String message = BatchPipelineValidationReport.getShexMessage(); + int rule = BatchPipelineValidationReport.getShexRule(); + ErrorMessage shex_message = new ErrorMessage(level, model_id, gcm.getIn_taxon(), message, rule); + boolean include_explanations_in_json = true; //TODO set as a parameter + if (include_explanations_in_json) { + shex_message.setExplanations(validations); + } + shex_errors.add(shex_message); + FileWriter explanations = new FileWriter(explanations_file, true); + explanations.write(ip.getValidation_results().getShexvalidation().getAsTab(filename + "\t" + title + "\t" + modelIRI)); + explanations.close(); + } + if (travisMode) { + if (!isConformant && !shouldFail) { + LOGGER.error(filename + "\t" + title + "\t" + modelIRI + "\tshex is nonconformant, quitting, explanation:\n" + ip.getValidation_results().getShexvalidation().getAsText()); + System.exit(-1); + } else if (isConformant && shouldFail) { + LOGGER.error(filename + "\t" + title + "\t" + modelIRI + "\tshex validates, but it should not be, quitting"); + System.exit(-1); + } + } + //is it a metadata violation or data ? + boolean shex_meta_problem = false; + boolean shex_data_problem = false; + if (!validations.getShexvalidation().isConformant()) { + String model_curie = curieHandler.getCuri(modelIRI); + ValidationResultSet validationset = ip.getValidation_results(); + ShexValidationReport shex_report = validationset.getShexvalidation(); + Set violations = shex_report.getViolations(); + if (violations != null) { + for (Violation v : violations) { + if (v.getNode().equals(model_curie)) { + shex_meta_problem = true; + } else { + shex_data_problem = true; + } + } + } else { + LOGGER.error("Invalid model but no violations reported"); + } + } + LOGGER.info(filename + "\t" + title + "\t" + modelIRI + "\tOWL:" + isConsistent + "\tshex:" + isConformant); + basic.write(filename + "\t" + title + "\t" + link + "\t" + modelstate + "\t" + contributor + "\t" + provider + "\t" + date + "\t" + isConsistent + "\t" + isConformant + "\t" + shex_meta_problem + "\t" + shex_data_problem + "\t" + milliseconds + "\t" + axioms + "\t" + + n_rows_gpad + "\t" + gcm.getGoCamModelStats().stats2cols() + "\n"); + } + basic.close(); + if (run_reasoner_report) { + addReasonerReport(outputFolder, gocam, ip, title, reasoner_report); + } + } + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + if (run_reasoner_report) { + summarizeReasonerReport(outputFolder, reasoner_report); + } + + pipe_report.setNumber_of_correct_models(good_models); + pipe_report.setNumber_of_models_in_error(bad_models); + pipe_report.getMessages().put(BatchPipelineValidationReport.getShexRuleString(), shex_errors); + pipe_report.getMessages().put(BatchPipelineValidationReport.getOwlRuleString(), owl_errors); + GsonBuilder builder = new GsonBuilder(); + Gson gson = builder.setPrettyPrinting().create(); + String json = gson.toJson(pipe_report); + try { + FileWriter pipe_json = new FileWriter(outputFolder + "gorules_report.json", false); + pipe_json.write(json); + pipe_json.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + m3.dispose(); + LOGGER.info("done with validation"); + } + + static class ReasonerReport { + Map term_asserted_instances_mapped = new HashMap(); + Map term_deepened_instances_mapped = new HashMap(); + Map term_asserted_instances_created = new HashMap(); + Map term_deepened_instances_created = new HashMap(); + } + + + private static ReasonerReport initReasonerReport(String outputFolder) { + String reasoner_report_file = outputFolder + "reasoner_report_all.txt"; + FileWriter reasoner_report; + try { + reasoner_report = new FileWriter(reasoner_report_file, false); + reasoner_report.write("title\tindividual\txref\tasserted\tinferred\n"); + reasoner_report.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + ReasonerReport report = new ReasonerReport(); + return report; + } + + private static ReasonerReport addReasonerReport(String outputFolder, OWLOntology gocam, InferenceProvider ip, String title, ReasonerReport report) throws IOException { + String reasoner_report_file = outputFolder + "reasoner_report_all.txt"; + FileWriter reasoner_report = new FileWriter(reasoner_report_file, true); + Set individuals = gocam.getIndividualsInSignature(); + for (OWLNamedIndividual individual : individuals) { + //what kind of individual - mapped or created. mapped have xrefs, created do not. + String xref = "none"; + for (OWLAnnotation anno : EntitySearcher.getAnnotations(individual, gocam)) { + if (anno.getProperty().getIRI().toString().equals("http://www.geneontology.org/formats/oboInOwl#hasDbXref")) { + xref = anno.getValue().asLiteral().get().getLiteral(); + } + } + + Collection asserted_ce = EntitySearcher.getTypes(individual, gocam); + Set asserted = new HashSet(); + for (OWLClassExpression ce : asserted_ce) { + if (!ce.isAnonymous()) { + OWLClass a = ce.asOWLClass(); + if (a.isBuiltIn() == false) { + asserted.add(a); + } + } + } + Set inferred_direct = new HashSet<>(); + Set flattened = ip.getTypes(individual); + for (OWLClass cls : flattened) { + if (cls.isBuiltIn() == false) { + inferred_direct.add(cls); + } + } + inferred_direct.removeAll(asserted); + reasoner_report.write(title + "\t" + individual.getIRI() + "\t" + xref + "\t" + asserted + "\t" + inferred_direct + "\n"); + if (asserted != null) { + for (OWLClass go : asserted) { + if (xref.equals("none")) { + Integer n = report.term_asserted_instances_created.get(go.toString()); + if (n == null) { + n = 0; + } + n = n + 1; + report.term_asserted_instances_created.put(go.toString(), n); + + if (inferred_direct != null && inferred_direct.size() > 0) { + Integer deepened = report.term_deepened_instances_created.get(go.toString()); + if (deepened == null) { + deepened = 0; + } + deepened = deepened + 1; + report.term_deepened_instances_created.put(go.toString(), deepened); + } + } else { + Integer n = report.term_asserted_instances_mapped.get(go.toString()); + if (n == null) { + n = 0; + } + n = n + 1; + report.term_asserted_instances_mapped.put(go.toString(), n); + + if (inferred_direct != null && inferred_direct.size() > 0) { + Integer deepened = report.term_deepened_instances_mapped.get(go.toString()); + if (deepened == null) { + deepened = 0; + } + deepened = deepened + 1; + report.term_deepened_instances_mapped.put(go.toString(), deepened); + } + } + } + } + } + reasoner_report.close(); + return report; + } + + private static void summarizeReasonerReport(String outputFolder, ReasonerReport report) { + String reasoner_report_summary_file = outputFolder + "reasoner_report_summary.txt"; + FileWriter reasoner_report_summary; + try { + reasoner_report_summary = new FileWriter(reasoner_report_summary_file, false); + reasoner_report_summary.write("asserted GO term\tmapped individual count\tmapped N deepened\tcreated individual count\tcreated N deepened\n"); + Set terms = new HashSet(); + terms.addAll(report.term_asserted_instances_mapped.keySet()); + terms.addAll(report.term_asserted_instances_created.keySet()); + for (String goterm : terms) { + int n_deepened_mapped = 0; + int n_mapped = 0; + if (report.term_asserted_instances_mapped.containsKey(goterm)) { + n_mapped = report.term_asserted_instances_mapped.get(goterm); + } + + if (report.term_deepened_instances_mapped.get(goterm) != null) { + n_deepened_mapped = report.term_deepened_instances_mapped.get(goterm); + } + int n_deepened_created = 0; + int n_created = 0; + if (report.term_asserted_instances_created.containsKey(goterm)) { + n_created = report.term_asserted_instances_created.get(goterm); + } + if (report.term_deepened_instances_created.get(goterm) != null) { + n_deepened_created = report.term_deepened_instances_created.get(goterm); + } + reasoner_report_summary.write(goterm + "\t" + n_mapped + "\t" + n_deepened_mapped + "\t" + n_created + "\t" + n_deepened_created + "\n"); + } + reasoner_report_summary.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + + private static String makeColSafe(String text) { + text = text.replaceAll("\n", " "); + text = text.replaceAll("\r", " "); + text = text.replaceAll("\t", " "); + return text; + } + + public static void addTaxonMetaData(String go_cam_journal, String go_lego_journal_file) throws OWLOntologyCreationException, IOException { + String modelIdPrefix = "http://model.geneontology.org/"; + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + CurieHandler curieHandler = new MappedCurieHandler(); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, go_cam_journal, null, go_lego_journal_file, true); + m3.addTaxonMetadata(); + } + + public static void cleanGoCams(String input_dir, String output_dir) { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + File directory = new File(input_dir); + boolean ignore_imports = true; + if (directory.isDirectory()) { + for (File file : directory.listFiles()) { + if (file.getName().endsWith("ttl")) { + System.out.println("fixing " + file.getAbsolutePath()); + final IRI modelFile = IRI.create(file.getAbsoluteFile()); + OWLOntology o; + try { + o = CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(modelFile), ignore_imports, m); + //in case the reader was confused by the missing import, fix declarations + o = CoreMolecularModelManager.fixBrokenObjectPropertiesAndAxioms(o); + //clean the model + OWLOntology cleaned_ont = CoreMolecularModelManager.removeDeadAnnotationsAndImports(o); + //saved the blessed ontology + OWLDocumentFormat owlFormat = new TurtleDocumentFormat(); + m.setOntologyFormat(cleaned_ont, owlFormat); + String cleaned_ont_file = output_dir + file.getName(); + try { + m.saveOntology(cleaned_ont, new FileOutputStream(cleaned_ont_file)); + } catch (OWLOntologyStorageException | FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } catch (OWLOntologyCreationException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } + } + } + } + + + public static void printVersion() throws Exception { + printManifestEntry("git-revision-sha1", "UNKNOWN"); + printManifestEntry("git-revision-url", "UNKNOWN"); + printManifestEntry("git-branch", "UNKNOWN"); + printManifestEntry("git-dirty", "UNKNOWN"); + } + + private static String printManifestEntry(String key, String defaultValue) { + String value = owltools.version.VersionInfo.getManifestVersion(key); + if (value == null || value.isEmpty()) { + value = defaultValue; + } + System.out.println(key + "\t" + value); + return value; + } + + public static void reportSystemParams() { + /* Total number of processors or cores available to the JVM */ + LOGGER.info("Available processors (cores): " + + Runtime.getRuntime().availableProcessors()); + + /* Total amount of free memory available to the JVM */ + LOGGER.info("Free memory (m bytes): " + + Runtime.getRuntime().freeMemory() / 1048576); + + /* This will return Long.MAX_VALUE if there is no preset limit */ + long maxMemory = Runtime.getRuntime().maxMemory() / 1048576; + /* Maximum amount of memory the JVM will attempt to use */ + LOGGER.info("Maximum memory (m bytes): " + + (maxMemory == Long.MAX_VALUE ? "no limit" : maxMemory)); + + /* Total memory currently in use by the JVM */ + LOGGER.info("Total memory (m bytes): " + + Runtime.getRuntime().totalMemory() / 1048576); + + /* Get a list of all filesystem roots on this system */ + File[] roots = File.listRoots(); + + /* For each filesystem root, print some info */ + for (File root : roots) { + LOGGER.info("File system root: " + root.getAbsolutePath()); + LOGGER.info("Total space (bytes): " + root.getTotalSpace()); + LOGGER.info("Free space (bytes): " + root.getFreeSpace()); + LOGGER.info("Usable space (bytes): " + root.getUsableSpace()); + } + } } diff --git a/minerva-cli/src/main/resources/log4j.properties b/minerva-cli/src/main/resources/log4j.properties index 59f57563..854f81f2 100644 --- a/minerva-cli/src/main/resources/log4j.properties +++ b/minerva-cli/src/main/resources/log4j.properties @@ -1,9 +1,7 @@ log4j.appender.console=org.apache.log4j.ConsoleAppender log4j.appender.console.layout=org.apache.log4j.PatternLayout log4j.appender.console.layout.ConversionPattern=%d %-5p (%c:%L) %m\n - -log4j.logger.org.semanticweb.elk = ERROR +log4j.logger.org.semanticweb.elk=ERROR log4j.logger.org.obolibrary.obo2owl=OFF log4j.logger.org.semanticweb.owlapi=error - log4j.rootLogger=INFO, console diff --git a/minerva-converter/pom.xml b/minerva-converter/pom.xml index 0d3e2d5e..1f1e25c3 100644 --- a/minerva-converter/pom.xml +++ b/minerva-converter/pom.xml @@ -1,48 +1,48 @@ - 4.0.0 - - minerva - org.geneontology - 0.6.1 - - minerva-converter - Minerva-Converter + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + + minerva + org.geneontology + 0.6.1 + + minerva-converter + Minerva-Converter - - - - org.apache.maven.plugins - maven-surefire-plugin - - - **/FooTest.java - - - - - org.jacoco - jacoco-maven-plugin - - - + + + + org.apache.maven.plugins + maven-surefire-plugin + + + **/FooTest.java + + + + + org.jacoco + jacoco-maven-plugin + + + - - - org.geneontology - minerva-lookup - ${project.parent.version} - - - org.geneontology - minerva-json - ${project.parent.version} - test - - - com.google.code.gson - gson - - + + + org.geneontology + minerva-lookup + ${project.parent.version} + + + org.geneontology + minerva-json + ${project.parent.version} + test + + + com.google.code.gson + gson + + diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/evidence/FindGoCodes.java b/minerva-converter/src/main/java/org/geneontology/minerva/evidence/FindGoCodes.java index 3a797732..6222e8e4 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/evidence/FindGoCodes.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/evidence/FindGoCodes.java @@ -1,71 +1,70 @@ package org.geneontology.minerva.evidence; -import java.io.IOException; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.Queue; -import java.util.Set; - import org.apache.commons.lang3.tuple.Pair; import org.geneontology.minerva.curie.CurieHandler; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; import org.semanticweb.owlapi.util.OWLClassExpressionVisitorAdapter; - import owltools.gaf.eco.EcoMapperFactory; import owltools.gaf.eco.SimpleEcoMapper; +import java.io.IOException; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Queue; +import java.util.Set; + public class FindGoCodes { - private final SimpleEcoMapper mapper; - private final CurieHandler curieHandler; + private final SimpleEcoMapper mapper; + private final CurieHandler curieHandler; - public FindGoCodes(CurieHandler curieHandler) throws IOException { - this(EcoMapperFactory.createSimple(), curieHandler); - } + public FindGoCodes(CurieHandler curieHandler) throws IOException { + this(EcoMapperFactory.createSimple(), curieHandler); + } - public FindGoCodes(SimpleEcoMapper mapper, CurieHandler curieHandler) { - this.mapper = mapper; - this.curieHandler = curieHandler; - } + public FindGoCodes(SimpleEcoMapper mapper, CurieHandler curieHandler) { + this.mapper = mapper; + this.curieHandler = curieHandler; + } - public Pair findShortEvidence(OWLClass eco, String ecoId, OWLOntology model) { - Pair pair = mapper.getGoCode(ecoId); - if (pair == null) { - // try to find a GO-Code mapping in the named super classes - // mini walker code, with cycle detection - final Set done = new HashSet<>(); - final Queue queue = new LinkedList<>(); - queue.addAll(getNamedDirectSuperClasses(eco, model)); - done.add(eco); - while (queue.isEmpty() == false && pair == null) { - OWLClass current = queue.poll(); - pair = mapper.getGoCode(curieHandler.getCuri(current)); - if (done.add(current) && pair == null) { - queue.addAll(getNamedDirectSuperClasses(current, model)) ; - } - } - } - return pair; - } + public Pair findShortEvidence(OWLClass eco, String ecoId, OWLOntology model) { + Pair pair = mapper.getGoCode(ecoId); + if (pair == null) { + // try to find a GO-Code mapping in the named super classes + // mini walker code, with cycle detection + final Set done = new HashSet<>(); + final Queue queue = new LinkedList<>(); + queue.addAll(getNamedDirectSuperClasses(eco, model)); + done.add(eco); + while (queue.isEmpty() == false && pair == null) { + OWLClass current = queue.poll(); + pair = mapper.getGoCode(curieHandler.getCuri(current)); + if (done.add(current) && pair == null) { + queue.addAll(getNamedDirectSuperClasses(current, model)); + } + } + } + return pair; + } - private Set getNamedDirectSuperClasses(OWLClass current, OWLOntology model) { - final Set dedup = new HashSet(); - Set closure = model.getImportsClosure(); - for (OWLOntology ont : closure) { - for(OWLSubClassOfAxiom ax : ont.getSubClassAxiomsForSubClass(current)) { - ax.getSuperClass().accept(new OWLClassExpressionVisitorAdapter(){ + private Set getNamedDirectSuperClasses(OWLClass current, OWLOntology model) { + final Set dedup = new HashSet(); + Set closure = model.getImportsClosure(); + for (OWLOntology ont : closure) { + for (OWLSubClassOfAxiom ax : ont.getSubClassAxiomsForSubClass(current)) { + ax.getSuperClass().accept(new OWLClassExpressionVisitorAdapter() { - @Override - public void visit(OWLClass cls) { - if (cls.isBuiltIn() == false) { - dedup.add(cls); - } - } - }); - } - } - return dedup; - } + @Override + public void visit(OWLClass cls) { + if (cls.isBuiltIn() == false) { + dedup.add(cls); + } + } + }); + } + } + return dedup; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/Explanation.java b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/Explanation.java index 0717ba7c..90eee627 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/Explanation.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/Explanation.java @@ -3,32 +3,33 @@ import java.util.Arrays; public class Explanation { - - public String[] triples; - public String[] rules; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + Arrays.hashCode(rules); - result = prime * result + Arrays.hashCode(triples); - return result; - } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - Explanation other = (Explanation) obj; - if (!Arrays.equals(rules, other.rules)) - return false; - if (!Arrays.equals(triples, other.triples)) - return false; - return true; - } + + public String[] triples; + public String[] rules; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(rules); + result = prime * result + Arrays.hashCode(triples); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Explanation other = (Explanation) obj; + if (!Arrays.equals(rules, other.rules)) + return false; + if (!Arrays.equals(triples, other.triples)) + return false; + return true; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationRule.java b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationRule.java index e01c8805..01a86e53 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationRule.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationRule.java @@ -1,44 +1,45 @@ package org.geneontology.minerva.explanation; -import java.util.Arrays; - import com.google.gson.annotations.SerializedName; +import java.util.Arrays; + public class ExplanationRule { - - @SerializedName("@id") - public String id; - public ExplanationTriple[] body; - public ExplanationTriple[] head; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + Arrays.hashCode(body); - result = prime * result + Arrays.hashCode(head); - result = prime * result + ((id == null) ? 0 : id.hashCode()); - return result; - } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - ExplanationRule other = (ExplanationRule) obj; - if (!Arrays.equals(body, other.body)) - return false; - if (!Arrays.equals(head, other.head)) - return false; - if (id == null) { - if (other.id != null) - return false; - } else if (!id.equals(other.id)) - return false; - return true; - } + + @SerializedName("@id") + public String id; + public ExplanationTriple[] body; + public ExplanationTriple[] head; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(body); + result = prime * result + Arrays.hashCode(head); + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + ExplanationRule other = (ExplanationRule) obj; + if (!Arrays.equals(body, other.body)) + return false; + if (!Arrays.equals(head, other.head)) + return false; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + return true; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTerm.java b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTerm.java index 0a3255f9..417758a6 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTerm.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTerm.java @@ -3,41 +3,41 @@ import com.google.gson.annotations.SerializedName; public class ExplanationTerm { - - @SerializedName("@id") - public String id; - public String label; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((id == null) ? 0 : id.hashCode()); - result = prime * result + ((label == null) ? 0 : label.hashCode()); - return result; - } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - ExplanationTerm other = (ExplanationTerm) obj; - if (id == null) { - if (other.id != null) - return false; - } else if (!id.equals(other.id)) - return false; - if (label == null) { - if (other.label != null) - return false; - } else if (!label.equals(other.label)) - return false; - return true; - } - - + + @SerializedName("@id") + public String id; + public String label; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((label == null) ? 0 : label.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + ExplanationTerm other = (ExplanationTerm) obj; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (label == null) { + if (other.label != null) + return false; + } else if (!label.equals(other.label)) + return false; + return true; + } + } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTriple.java b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTriple.java index 5284fa67..a7b725a7 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTriple.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ExplanationTriple.java @@ -3,60 +3,61 @@ import com.google.gson.annotations.SerializedName; public class ExplanationTriple { - - @SerializedName("@id") - public String id; - public String subject; - public String predicate; - public String object; - public Explanation explanation; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((explanation == null) ? 0 : explanation.hashCode()); - result = prime * result + ((id == null) ? 0 : id.hashCode()); - result = prime * result + ((object == null) ? 0 : object.hashCode()); - result = prime * result + ((predicate == null) ? 0 : predicate.hashCode()); - result = prime * result + ((subject == null) ? 0 : subject.hashCode()); - return result; - } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - ExplanationTriple other = (ExplanationTriple) obj; - if (explanation == null) { - if (other.explanation != null) - return false; - } else if (!explanation.equals(other.explanation)) - return false; - if (id == null) { - if (other.id != null) - return false; - } else if (!id.equals(other.id)) - return false; - if (object == null) { - if (other.object != null) - return false; - } else if (!object.equals(other.object)) - return false; - if (predicate == null) { - if (other.predicate != null) - return false; - } else if (!predicate.equals(other.predicate)) - return false; - if (subject == null) { - if (other.subject != null) - return false; - } else if (!subject.equals(other.subject)) - return false; - return true; - } + + @SerializedName("@id") + public String id; + public String subject; + public String predicate; + public String object; + public Explanation explanation; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((explanation == null) ? 0 : explanation.hashCode()); + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((object == null) ? 0 : object.hashCode()); + result = prime * result + ((predicate == null) ? 0 : predicate.hashCode()); + result = prime * result + ((subject == null) ? 0 : subject.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + ExplanationTriple other = (ExplanationTriple) obj; + if (explanation == null) { + if (other.explanation != null) + return false; + } else if (!explanation.equals(other.explanation)) + return false; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (object == null) { + if (other.object != null) + return false; + } else if (!object.equals(other.object)) + return false; + if (predicate == null) { + if (other.predicate != null) + return false; + } else if (!predicate.equals(other.predicate)) + return false; + if (subject == null) { + if (other.subject != null) + return false; + } else if (!subject.equals(other.subject)) + return false; + return true; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ModelExplanation.java b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ModelExplanation.java index 530d458f..6b4d17cb 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ModelExplanation.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/explanation/ModelExplanation.java @@ -3,40 +3,41 @@ import java.util.Arrays; public class ModelExplanation { - - public ExplanationTerm[] terms; - public ExplanationTriple[] assertions; - public ExplanationTriple[] inferences; - public ExplanationRule[] rules; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + Arrays.hashCode(assertions); - result = prime * result + Arrays.hashCode(inferences); - result = prime * result + Arrays.hashCode(rules); - result = prime * result + Arrays.hashCode(terms); - return result; - } - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - ModelExplanation other = (ModelExplanation) obj; - if (!Arrays.equals(assertions, other.assertions)) - return false; - if (!Arrays.equals(inferences, other.inferences)) - return false; - if (!Arrays.equals(rules, other.rules)) - return false; - if (!Arrays.equals(terms, other.terms)) - return false; - return true; - } + + public ExplanationTerm[] terms; + public ExplanationTriple[] assertions; + public ExplanationTriple[] inferences; + public ExplanationRule[] rules; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(assertions); + result = prime * result + Arrays.hashCode(inferences); + result = prime * result + Arrays.hashCode(rules); + result = prime * result + Arrays.hashCode(terms); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + ModelExplanation other = (ModelExplanation) obj; + if (!Arrays.equals(assertions, other.assertions)) + return false; + if (!Arrays.equals(inferences, other.inferences)) + return false; + if (!Arrays.equals(rules, other.rules)) + return false; + if (!Arrays.equals(terms, other.terms)) + return false; + return true; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/LegoModelWalker.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/LegoModelWalker.java index fcefd33e..d288084a 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/LegoModelWalker.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/LegoModelWalker.java @@ -1,448 +1,417 @@ package org.geneontology.minerva.legacy; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - import org.geneontology.minerva.MinervaOWLGraphWrapper; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.lookup.ExternalLookupService; import org.geneontology.minerva.util.AnnotationShorthand; import org.obolibrary.obo2owl.Obo2OWLConstants; -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLAnnotationValue; -import org.semanticweb.owlapi.model.OWLAnnotationValueVisitorEx; -import org.semanticweb.owlapi.model.OWLAnonymousIndividual; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.util.OWLClassExpressionVisitorAdapter; - import owltools.vocab.OBOUpperVocabulary; +import java.util.*; + abstract class LegoModelWalker { - protected final OWLObjectProperty partOf; - protected final OWLObjectProperty occursIn; - protected final OWLObjectProperty enabledBy; - protected final OWLObjectProperty hasSupportingRef; - protected final OWLObjectProperty withSupportFrom; - - protected final OWLAnnotationProperty source_old; - protected final OWLAnnotationProperty contributor; - protected final OWLAnnotationProperty group; - protected final OWLAnnotationProperty date; - protected final OWLAnnotationProperty evidenceOld; - protected final OWLAnnotationProperty axiomHasEvidence; - protected final OWLAnnotationProperty with_old; - - private final OWLAnnotationProperty shortIdProp; - - protected final OWLDataFactory f; - - protected LegoModelWalker(OWLDataFactory df) { - this.f = df; - - partOf = OBOUpperVocabulary.BFO_part_of.getObjectProperty(f); - occursIn = OBOUpperVocabulary.BFO_occurs_in.getObjectProperty(f); - - enabledBy = OBOUpperVocabulary.GOREL_enabled_by.getObjectProperty(f); - - shortIdProp = df.getOWLAnnotationProperty(IRI.create(Obo2OWLConstants.OIOVOCAB_IRI_PREFIX+"id")); - - contributor = f.getOWLAnnotationProperty(AnnotationShorthand.contributor.getAnnotationProperty()); - date = f.getOWLAnnotationProperty(AnnotationShorthand.date.getAnnotationProperty()); - group = f.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/group")); // TODO place holder - - axiomHasEvidence = f.getOWLAnnotationProperty(IRI.create("http://purl.obolibrary.org/obo/RO_0002612")); - hasSupportingRef = f.getOWLObjectProperty(IRI.create("http://purl.obolibrary.org/obo/SEPIO_0000124")); - withSupportFrom = f.getOWLObjectProperty(IRI.create("http://purl.obolibrary.org/obo/RO_0002614")); - - evidenceOld = f.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/evidence")); - source_old = f.getOWLAnnotationProperty(AnnotationShorthand.source.getAnnotationProperty()); - with_old = f.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/evidence-with")); - } - - protected static class Entry { - T value; - Metadata metadata; - List evidences; - Set expressions; - // TODO multi-species interactions - } - - protected static class Evidence { - OWLClass evidenceCls = null; - String source = null; - String with = null; - - Evidence copy() { - Evidence evidence = new Evidence(); - evidence.evidenceCls = this.evidenceCls; - evidence.source = this.source; - evidence.with = this.with; - return evidence; - } - } - - protected static class Metadata { - - String modelId = null; - Set individualIds = null; - Set contributors = null; - Set groups = null; - String date = null; - } - - public void walkModel(OWLOntology model, ExternalLookupService lookup, Collection allPayloads) throws UnknownIdentifierException { - final MinervaOWLGraphWrapper modelGraph = new MinervaOWLGraphWrapper(model); - - String modelId = null; - for(OWLAnnotation modelAnnotation : model.getAnnotations()) { - if (shortIdProp.equals(modelAnnotation.getProperty())) { - modelId = modelAnnotation.getValue().accept(new OWLAnnotationValueVisitorEx() { - - @Override - public String visit(IRI iri) { - return null; - } - - @Override - public String visit(OWLAnonymousIndividual individual) { - return null; - } - - @Override - public String visit(OWLLiteral literal) { - return literal.getLiteral(); - } - }); - } - } - - final Set annotationIndividuals = new HashSet(); - final Map evidenceIndividuals = new HashMap(); - - for(OWLNamedIndividual individual : model.getIndividualsInSignature()) { - Set individualTypes = getTypes(individual, model); - OWLClass eco = getEco(individualTypes); - if (eco != null) { - // is eco - Evidence evidence = assembleEvidence(individual, eco, model); - evidenceIndividuals.put(individual.getIRI(), evidence); - } - else if (isAnnotationIndividual(individual, individualTypes)) { - annotationIndividuals.add(individual); - } - } - - final Map allMetadata = new HashMap(); - for(OWLNamedIndividual individual : annotationIndividuals) { - Metadata metadata = extractMetadata(individual, modelGraph, modelId); - allMetadata.put(individual, metadata); - } - - for (OWLObjectPropertyAssertionAxiom axiom : model.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION)) { - final OWLObjectPropertyExpression p = axiom.getProperty(); - if (enabledBy.equals(p)) { - // gene/protein/complex - final OWLNamedIndividual object = axiom.getObject().asOWLNamedIndividual(); - Set expressions = getSvfTypes(object, model); - Set objectTypes = getTypes(object, model); - for (OWLClass objectType : objectTypes) { - final PAYLOAD payload = initPayload(object, objectType, model, modelGraph, lookup); - allPayloads.add(payload); - - final OWLNamedIndividual subject = axiom.getSubject().asOWLNamedIndividual(); - - // get associated meta data - final Metadata linkMetadata = extractMetadata(axiom.getAnnotations(), modelGraph, modelId); - final Set linkEvidences = getEvidences(axiom, evidenceIndividuals); - - // get all OWLObjectPropertyAssertionAxiom for subject - Set subjectAxioms = model.getObjectPropertyAssertionAxioms(subject); - for(OWLObjectPropertyAssertionAxiom current : subjectAxioms) { - final Metadata currentMetadata = extractMetadata(current.getAnnotations(), modelGraph, modelId); - final Set currentEvidences = getEvidences(current, evidenceIndividuals); - final OWLObjectPropertyExpression currentP = current.getProperty(); - final OWLNamedIndividual currentObj = current.getObject().asOWLNamedIndividual(); - - if (occursIn.equals(currentP)) { - // check for cc for subject (occurs in) - for(OWLClass cls : getTypes(currentObj, model)) { - boolean added = handleCC(payload, cls, currentMetadata, currentEvidences, getExpressions(currentObj, model)); - if (!added) { - expressions.add(createSvf(occursIn, cls)); - } - } - } - else if (partOf.equals(currentP)) { - // check for bp for subject (part_of) - for(OWLClass cls : getTypes(currentObj, model)) { - boolean added = handleBP(payload, cls, currentMetadata, currentEvidences, getExpressions(currentObj, model));; - if (!added) { - expressions.add(createSvf(partOf, cls)); - } - } - - }else if (enabledBy.equals(currentP)) { - // do nothing - } - else { - Set types = getTypes(currentObj, model); - for (OWLClass cls : types) { - expressions.add(createSvf(currentP, cls)); - } - } - } - - // handle types - for(OWLClass cls : getTypes(subject, model)) { - handleMF(payload, cls, linkMetadata, linkEvidences, expressions); - } - } - } - } - } - - private Evidence assembleEvidence(OWLNamedIndividual individual, OWLClass eco, OWLOntology model) { - Evidence evidence = new Evidence(); - evidence.evidenceCls = eco; - evidence.source = null; - evidence.with = null; - Set evidenceLinks = model.getObjectPropertyAssertionAxioms(individual); - for(OWLObjectPropertyAssertionAxiom ax : evidenceLinks) { - OWLObjectPropertyExpression p = ax.getProperty(); - if (hasSupportingRef.equals(p)) { - OWLIndividual object = ax.getObject(); - if (object.isNamed()) { - OWLNamedIndividual namedIndividual = object.asOWLNamedIndividual(); - evidence.source = getShortHand(namedIndividual.getIRI()); - } - } - else if (withSupportFrom.equals(p)) { - OWLIndividual object = ax.getObject(); - if (object.isNamed()) { - Set types = getTypes(object.asOWLNamedIndividual(), model); - for (OWLClass cls : types) { - evidence.with = getShortHand(cls.getIRI()); - } - } - } - } - if (evidence.source == null) { - // check old type of modelling as annotations - for (OWLAnnotationAssertionAxiom annotation : model.getAnnotationAssertionAxioms(individual.getIRI())) { - OWLAnnotationProperty p = annotation.getProperty(); - if (source_old.equals(p)) { - evidence.source = getStringValue(annotation); - } - else if (with_old.equals(p)) { - evidence.with = getStringValue(annotation); - } - } - } - - return evidence; - } - - private String getStringValue(OWLAnnotationAssertionAxiom ax) { - OWLAnnotationValue value = ax.getValue(); - String stringValue = value.accept(new OWLAnnotationValueVisitorEx() { - - @Override - public String visit(IRI iri) { - return iri.toString(); - } - - @Override - public String visit(OWLAnonymousIndividual individual) { - return null; - } - - @Override - public String visit(OWLLiteral literal) { - return literal.getLiteral(); - } - }); - return stringValue; - } - - private Set getEvidences(OWLObjectPropertyAssertionAxiom axiom, Map evidenceIndividuals) { - Set evidences = new HashSet<>(); - for (OWLAnnotation annotation : axiom.getAnnotations()) { - OWLAnnotationProperty property = annotation.getProperty(); - if (evidenceOld.equals(property) || hasSupportingRef.equals(property)) { - IRI iri = annotation.getValue().accept(new OWLAnnotationValueVisitorEx() { - - @Override - public IRI visit(IRI iri) { - return iri; - } - - @Override - public IRI visit(OWLAnonymousIndividual individual) { - return null; - } - - @Override - public IRI visit(OWLLiteral literal) { - return null; - } - }); - if (iri != null) { - Evidence evidence = evidenceIndividuals.get(iri); - if (evidence != null) { - evidences.add(evidence); - } - } - } - } - return evidences; - } - - private Set getSvfTypes(OWLNamedIndividual i, OWLOntology model) { - Set axioms = model.getClassAssertionAxioms(i); - final Set svfs = new HashSet(); - for (OWLClassAssertionAxiom axiom : axioms) { - axiom.getClassExpression().accept(new OWLClassExpressionVisitorAdapter(){ - - @Override - public void visit(OWLObjectSomeValuesFrom svf) { - svfs.add(svf); - } - }); - } - return svfs; - } - - protected abstract boolean isEco(OWLClass cls); - - protected abstract boolean isAnnotationIndividual(OWLNamedIndividual i, Set types); - - private OWLClass getEco(Set set) { - for (OWLClass cls : set) { - if (isEco(cls)) { - return cls; - } - } - return null; - } - - private Set getTypes(OWLNamedIndividual i, OWLOntology model) { - Set axioms = model.getClassAssertionAxioms(i); - Set types = new HashSet(); - for (OWLClassAssertionAxiom axiom : axioms) { - OWLClassExpression ce = axiom.getClassExpression(); - if (ce instanceof OWLClass) { - OWLClass cls = ce.asOWLClass(); - if (cls.isBuiltIn() == false) { - types.add(cls); - } - } - } - return types; - } - - private Set getExpressions(OWLNamedIndividual i, OWLOntology model) { - Set result = new HashSet(); - Set axioms = model.getObjectPropertyAssertionAxioms(i); - for (OWLObjectPropertyAssertionAxiom ax : axioms) { - if (enabledBy.equals(ax.getProperty())) { - continue; - } - OWLIndividual object = ax.getObject(); - if (object.isNamed()) { - Set types = getTypes(object.asOWLNamedIndividual(), model); - for (OWLClass cls : types) { - result.add(createSvf(ax.getProperty(), cls)); - } - } - } - return result; - } - - protected abstract PAYLOAD initPayload(OWLNamedIndividual object, OWLClass objectType, OWLOntology model, MinervaOWLGraphWrapper modelGraph, ExternalLookupService lookup) throws UnknownIdentifierException; - - protected abstract boolean handleCC(PAYLOAD payload, OWLClass cls, Metadata metadata, Set evidences, Set expressions); - - protected abstract boolean handleMF(PAYLOAD payload, OWLClass cls, Metadata metadata, Set evidences, Set expressions); - - protected abstract boolean handleBP(PAYLOAD payload, OWLClass cls, Metadata metadata, Set evidences, Set expressions); - - protected abstract String getShortHand(IRI iri); - - private OWLObjectSomeValuesFrom createSvf(OWLObjectPropertyExpression p, OWLClass c) { - return f.getOWLObjectSomeValuesFrom(p, c); - } - - private Metadata extractMetadata(OWLNamedIndividual individual, MinervaOWLGraphWrapper modelGraph, String modelId) { - Metadata metadata = new Metadata(); - metadata.modelId = modelId; - metadata.individualIds = new HashSet(); - metadata.individualIds.add(individual.getIRI()); - Set assertionAxioms = modelGraph.getSourceOntology().getAnnotationAssertionAxioms(individual.getIRI()); - for (OWLAnnotationAssertionAxiom axiom : assertionAxioms) { - OWLAnnotationProperty currentProperty = axiom.getProperty(); - OWLAnnotationValue value = axiom.getValue(); - extractMetadata(currentProperty, value, metadata); - } - return metadata; - } - - private void extractMetadata(OWLAnnotationProperty p, OWLAnnotationValue v, final Metadata metadata) { - if (this.contributor.equals(p)) { - if (v instanceof OWLLiteral) { - String contributor = ((OWLLiteral) v).getLiteral(); - if (metadata.contributors == null) { - metadata.contributors = new HashSet<>(); - } - metadata.contributors.add(contributor); - } - } - else if (this.date.equals(p)) { - if (v instanceof OWLLiteral) { - metadata.date = ((OWLLiteral) v).getLiteral(); - } - } - else if (this.group.equals(p)) { - if (v instanceof OWLLiteral) { - String group = ((OWLLiteral) v).getLiteral(); - if(metadata.groups == null) { - metadata.groups = new HashSet<>(); - } - metadata.groups.add(group); - } - } - } - - private Metadata extractMetadata(Collection annotations, MinervaOWLGraphWrapper modelGraph, String modelId) { - Metadata metadata = new Metadata(); - metadata.modelId = modelId; - if (annotations != null && !annotations.isEmpty()) { - for (OWLAnnotation owlAnnotation : annotations) { - OWLAnnotationProperty currentProperty = owlAnnotation.getProperty(); - OWLAnnotationValue value = owlAnnotation.getValue(); - extractMetadata(currentProperty, value, metadata); - } - } - return metadata; - } + protected final OWLObjectProperty partOf; + protected final OWLObjectProperty occursIn; + protected final OWLObjectProperty enabledBy; + protected final OWLObjectProperty hasSupportingRef; + protected final OWLObjectProperty withSupportFrom; + + protected final OWLAnnotationProperty source_old; + protected final OWLAnnotationProperty contributor; + protected final OWLAnnotationProperty group; + protected final OWLAnnotationProperty date; + protected final OWLAnnotationProperty evidenceOld; + protected final OWLAnnotationProperty axiomHasEvidence; + protected final OWLAnnotationProperty with_old; + + private final OWLAnnotationProperty shortIdProp; + + protected final OWLDataFactory f; + + protected LegoModelWalker(OWLDataFactory df) { + this.f = df; + + partOf = OBOUpperVocabulary.BFO_part_of.getObjectProperty(f); + occursIn = OBOUpperVocabulary.BFO_occurs_in.getObjectProperty(f); + + enabledBy = OBOUpperVocabulary.GOREL_enabled_by.getObjectProperty(f); + + shortIdProp = df.getOWLAnnotationProperty(IRI.create(Obo2OWLConstants.OIOVOCAB_IRI_PREFIX + "id")); + + contributor = f.getOWLAnnotationProperty(AnnotationShorthand.contributor.getAnnotationProperty()); + date = f.getOWLAnnotationProperty(AnnotationShorthand.date.getAnnotationProperty()); + group = f.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/group")); // TODO place holder + + axiomHasEvidence = f.getOWLAnnotationProperty(IRI.create("http://purl.obolibrary.org/obo/RO_0002612")); + hasSupportingRef = f.getOWLObjectProperty(IRI.create("http://purl.obolibrary.org/obo/SEPIO_0000124")); + withSupportFrom = f.getOWLObjectProperty(IRI.create("http://purl.obolibrary.org/obo/RO_0002614")); + + evidenceOld = f.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/evidence")); + source_old = f.getOWLAnnotationProperty(AnnotationShorthand.source.getAnnotationProperty()); + with_old = f.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/evidence-with")); + } + + protected static class Entry { + T value; + Metadata metadata; + List evidences; + Set expressions; + // TODO multi-species interactions + } + + protected static class Evidence { + OWLClass evidenceCls = null; + String source = null; + String with = null; + + Evidence copy() { + Evidence evidence = new Evidence(); + evidence.evidenceCls = this.evidenceCls; + evidence.source = this.source; + evidence.with = this.with; + return evidence; + } + } + + protected static class Metadata { + + String modelId = null; + Set individualIds = null; + Set contributors = null; + Set groups = null; + String date = null; + } + + public void walkModel(OWLOntology model, ExternalLookupService lookup, Collection allPayloads) throws UnknownIdentifierException { + final MinervaOWLGraphWrapper modelGraph = new MinervaOWLGraphWrapper(model); + + String modelId = null; + for (OWLAnnotation modelAnnotation : model.getAnnotations()) { + if (shortIdProp.equals(modelAnnotation.getProperty())) { + modelId = modelAnnotation.getValue().accept(new OWLAnnotationValueVisitorEx() { + + @Override + public String visit(IRI iri) { + return null; + } + + @Override + public String visit(OWLAnonymousIndividual individual) { + return null; + } + + @Override + public String visit(OWLLiteral literal) { + return literal.getLiteral(); + } + }); + } + } + + final Set annotationIndividuals = new HashSet(); + final Map evidenceIndividuals = new HashMap(); + + for (OWLNamedIndividual individual : model.getIndividualsInSignature()) { + Set individualTypes = getTypes(individual, model); + OWLClass eco = getEco(individualTypes); + if (eco != null) { + // is eco + Evidence evidence = assembleEvidence(individual, eco, model); + evidenceIndividuals.put(individual.getIRI(), evidence); + } else if (isAnnotationIndividual(individual, individualTypes)) { + annotationIndividuals.add(individual); + } + } + + final Map allMetadata = new HashMap(); + for (OWLNamedIndividual individual : annotationIndividuals) { + Metadata metadata = extractMetadata(individual, modelGraph, modelId); + allMetadata.put(individual, metadata); + } + + for (OWLObjectPropertyAssertionAxiom axiom : model.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION)) { + final OWLObjectPropertyExpression p = axiom.getProperty(); + if (enabledBy.equals(p)) { + // gene/protein/complex + final OWLNamedIndividual object = axiom.getObject().asOWLNamedIndividual(); + Set expressions = getSvfTypes(object, model); + Set objectTypes = getTypes(object, model); + for (OWLClass objectType : objectTypes) { + final PAYLOAD payload = initPayload(object, objectType, model, modelGraph, lookup); + allPayloads.add(payload); + + final OWLNamedIndividual subject = axiom.getSubject().asOWLNamedIndividual(); + + // get associated meta data + final Metadata linkMetadata = extractMetadata(axiom.getAnnotations(), modelGraph, modelId); + final Set linkEvidences = getEvidences(axiom, evidenceIndividuals); + + // get all OWLObjectPropertyAssertionAxiom for subject + Set subjectAxioms = model.getObjectPropertyAssertionAxioms(subject); + for (OWLObjectPropertyAssertionAxiom current : subjectAxioms) { + final Metadata currentMetadata = extractMetadata(current.getAnnotations(), modelGraph, modelId); + final Set currentEvidences = getEvidences(current, evidenceIndividuals); + final OWLObjectPropertyExpression currentP = current.getProperty(); + final OWLNamedIndividual currentObj = current.getObject().asOWLNamedIndividual(); + + if (occursIn.equals(currentP)) { + // check for cc for subject (occurs in) + for (OWLClass cls : getTypes(currentObj, model)) { + boolean added = handleCC(payload, cls, currentMetadata, currentEvidences, getExpressions(currentObj, model)); + if (!added) { + expressions.add(createSvf(occursIn, cls)); + } + } + } else if (partOf.equals(currentP)) { + // check for bp for subject (part_of) + for (OWLClass cls : getTypes(currentObj, model)) { + boolean added = handleBP(payload, cls, currentMetadata, currentEvidences, getExpressions(currentObj, model)); + ; + if (!added) { + expressions.add(createSvf(partOf, cls)); + } + } + + } else if (enabledBy.equals(currentP)) { + // do nothing + } else { + Set types = getTypes(currentObj, model); + for (OWLClass cls : types) { + expressions.add(createSvf(currentP, cls)); + } + } + } + + // handle types + for (OWLClass cls : getTypes(subject, model)) { + handleMF(payload, cls, linkMetadata, linkEvidences, expressions); + } + } + } + } + } + + private Evidence assembleEvidence(OWLNamedIndividual individual, OWLClass eco, OWLOntology model) { + Evidence evidence = new Evidence(); + evidence.evidenceCls = eco; + evidence.source = null; + evidence.with = null; + Set evidenceLinks = model.getObjectPropertyAssertionAxioms(individual); + for (OWLObjectPropertyAssertionAxiom ax : evidenceLinks) { + OWLObjectPropertyExpression p = ax.getProperty(); + if (hasSupportingRef.equals(p)) { + OWLIndividual object = ax.getObject(); + if (object.isNamed()) { + OWLNamedIndividual namedIndividual = object.asOWLNamedIndividual(); + evidence.source = getShortHand(namedIndividual.getIRI()); + } + } else if (withSupportFrom.equals(p)) { + OWLIndividual object = ax.getObject(); + if (object.isNamed()) { + Set types = getTypes(object.asOWLNamedIndividual(), model); + for (OWLClass cls : types) { + evidence.with = getShortHand(cls.getIRI()); + } + } + } + } + if (evidence.source == null) { + // check old type of modelling as annotations + for (OWLAnnotationAssertionAxiom annotation : model.getAnnotationAssertionAxioms(individual.getIRI())) { + OWLAnnotationProperty p = annotation.getProperty(); + if (source_old.equals(p)) { + evidence.source = getStringValue(annotation); + } else if (with_old.equals(p)) { + evidence.with = getStringValue(annotation); + } + } + } + + return evidence; + } + + private String getStringValue(OWLAnnotationAssertionAxiom ax) { + OWLAnnotationValue value = ax.getValue(); + String stringValue = value.accept(new OWLAnnotationValueVisitorEx() { + + @Override + public String visit(IRI iri) { + return iri.toString(); + } + + @Override + public String visit(OWLAnonymousIndividual individual) { + return null; + } + + @Override + public String visit(OWLLiteral literal) { + return literal.getLiteral(); + } + }); + return stringValue; + } + + private Set getEvidences(OWLObjectPropertyAssertionAxiom axiom, Map evidenceIndividuals) { + Set evidences = new HashSet<>(); + for (OWLAnnotation annotation : axiom.getAnnotations()) { + OWLAnnotationProperty property = annotation.getProperty(); + if (evidenceOld.equals(property) || hasSupportingRef.equals(property)) { + IRI iri = annotation.getValue().accept(new OWLAnnotationValueVisitorEx() { + + @Override + public IRI visit(IRI iri) { + return iri; + } + + @Override + public IRI visit(OWLAnonymousIndividual individual) { + return null; + } + + @Override + public IRI visit(OWLLiteral literal) { + return null; + } + }); + if (iri != null) { + Evidence evidence = evidenceIndividuals.get(iri); + if (evidence != null) { + evidences.add(evidence); + } + } + } + } + return evidences; + } + + private Set getSvfTypes(OWLNamedIndividual i, OWLOntology model) { + Set axioms = model.getClassAssertionAxioms(i); + final Set svfs = new HashSet(); + for (OWLClassAssertionAxiom axiom : axioms) { + axiom.getClassExpression().accept(new OWLClassExpressionVisitorAdapter() { + + @Override + public void visit(OWLObjectSomeValuesFrom svf) { + svfs.add(svf); + } + }); + } + return svfs; + } + + protected abstract boolean isEco(OWLClass cls); + + protected abstract boolean isAnnotationIndividual(OWLNamedIndividual i, Set types); + + private OWLClass getEco(Set set) { + for (OWLClass cls : set) { + if (isEco(cls)) { + return cls; + } + } + return null; + } + + private Set getTypes(OWLNamedIndividual i, OWLOntology model) { + Set axioms = model.getClassAssertionAxioms(i); + Set types = new HashSet(); + for (OWLClassAssertionAxiom axiom : axioms) { + OWLClassExpression ce = axiom.getClassExpression(); + if (ce instanceof OWLClass) { + OWLClass cls = ce.asOWLClass(); + if (cls.isBuiltIn() == false) { + types.add(cls); + } + } + } + return types; + } + + private Set getExpressions(OWLNamedIndividual i, OWLOntology model) { + Set result = new HashSet(); + Set axioms = model.getObjectPropertyAssertionAxioms(i); + for (OWLObjectPropertyAssertionAxiom ax : axioms) { + if (enabledBy.equals(ax.getProperty())) { + continue; + } + OWLIndividual object = ax.getObject(); + if (object.isNamed()) { + Set types = getTypes(object.asOWLNamedIndividual(), model); + for (OWLClass cls : types) { + result.add(createSvf(ax.getProperty(), cls)); + } + } + } + return result; + } + + protected abstract PAYLOAD initPayload(OWLNamedIndividual object, OWLClass objectType, OWLOntology model, MinervaOWLGraphWrapper modelGraph, ExternalLookupService lookup) throws UnknownIdentifierException; + + protected abstract boolean handleCC(PAYLOAD payload, OWLClass cls, Metadata metadata, Set evidences, Set expressions); + + protected abstract boolean handleMF(PAYLOAD payload, OWLClass cls, Metadata metadata, Set evidences, Set expressions); + + protected abstract boolean handleBP(PAYLOAD payload, OWLClass cls, Metadata metadata, Set evidences, Set expressions); + + protected abstract String getShortHand(IRI iri); + + private OWLObjectSomeValuesFrom createSvf(OWLObjectPropertyExpression p, OWLClass c) { + return f.getOWLObjectSomeValuesFrom(p, c); + } + + private Metadata extractMetadata(OWLNamedIndividual individual, MinervaOWLGraphWrapper modelGraph, String modelId) { + Metadata metadata = new Metadata(); + metadata.modelId = modelId; + metadata.individualIds = new HashSet(); + metadata.individualIds.add(individual.getIRI()); + Set assertionAxioms = modelGraph.getSourceOntology().getAnnotationAssertionAxioms(individual.getIRI()); + for (OWLAnnotationAssertionAxiom axiom : assertionAxioms) { + OWLAnnotationProperty currentProperty = axiom.getProperty(); + OWLAnnotationValue value = axiom.getValue(); + extractMetadata(currentProperty, value, metadata); + } + return metadata; + } + + private void extractMetadata(OWLAnnotationProperty p, OWLAnnotationValue v, final Metadata metadata) { + if (this.contributor.equals(p)) { + if (v instanceof OWLLiteral) { + String contributor = ((OWLLiteral) v).getLiteral(); + if (metadata.contributors == null) { + metadata.contributors = new HashSet<>(); + } + metadata.contributors.add(contributor); + } + } else if (this.date.equals(p)) { + if (v instanceof OWLLiteral) { + metadata.date = ((OWLLiteral) v).getLiteral(); + } + } else if (this.group.equals(p)) { + if (v instanceof OWLLiteral) { + String group = ((OWLLiteral) v).getLiteral(); + if (metadata.groups == null) { + metadata.groups = new HashSet<>(); + } + metadata.groups.add(group); + } + } + } + + private Metadata extractMetadata(Collection annotations, MinervaOWLGraphWrapper modelGraph, String modelId) { + Metadata metadata = new Metadata(); + metadata.modelId = modelId; + if (annotations != null && !annotations.isEmpty()) { + for (OWLAnnotation owlAnnotation : annotations) { + OWLAnnotationProperty currentProperty = owlAnnotation.getProperty(); + OWLAnnotationValue value = owlAnnotation.getValue(); + extractMetadata(currentProperty, value, metadata); + } + } + return metadata; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/AnnotationExtension.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/AnnotationExtension.java index a559478b..5536cb2d 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/AnnotationExtension.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/AnnotationExtension.java @@ -1,33 +1,33 @@ package org.geneontology.minerva.legacy.sparql; -import javax.annotation.Nonnull; - import org.apache.jena.graph.Triple; import org.semanticweb.owlapi.model.IRI; +import javax.annotation.Nonnull; + public class AnnotationExtension { - - private final Triple triple; - private final IRI valueType; - - public AnnotationExtension(Triple triple, IRI valueType) { - this.triple = triple; - this.valueType = valueType; - } - - @Nonnull - public Triple getTriple() { - return triple; - } - - @Nonnull - public IRI getValueType() { - return valueType; - } - - @Override - public String toString() { - return this.getTriple() + " " + this.getValueType(); - } + + private final Triple triple; + private final IRI valueType; + + public AnnotationExtension(Triple triple, IRI valueType) { + this.triple = triple; + this.valueType = valueType; + } + + @Nonnull + public Triple getTriple() { + return triple; + } + + @Nonnull + public IRI getValueType() { + return valueType; + } + + @Override + public String toString() { + return this.getTriple() + " " + this.getValueType(); + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/BasicGPADData.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/BasicGPADData.java index cd17687c..a352ff82 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/BasicGPADData.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/BasicGPADData.java @@ -4,79 +4,81 @@ import org.semanticweb.owlapi.model.IRI; public class BasicGPADData { - private final Node objectNode; - private final IRI object; - private GPADOperatorStatus operator; - private final IRI qualifier; - private final Node ontologyClassNode; - private final IRI ontologyClass; - - public BasicGPADData(Node objectNode, IRI object, IRI qualifier, Node ontologyClassNode, IRI ontologyClass) { - this.object = object; - this.operator = GPADOperatorStatus.NONE; - this.qualifier = qualifier; - this.ontologyClass = ontologyClass; - this.objectNode = objectNode; - this.ontologyClassNode = ontologyClassNode; - } - - public IRI getObject() { - return this.object; - } + private final Node objectNode; + private final IRI object; + private GPADOperatorStatus operator; + private final IRI qualifier; + private final Node ontologyClassNode; + private final IRI ontologyClass; - public void setOperator(GPADOperatorStatus operator) { - this.operator = operator; - } - - public GPADOperatorStatus getOperator() { - return operator; - } - - public IRI getQualifier() { - return this.qualifier; - } + public BasicGPADData(Node objectNode, IRI object, IRI qualifier, Node ontologyClassNode, IRI ontologyClass) { + this.object = object; + this.operator = GPADOperatorStatus.NONE; + this.qualifier = qualifier; + this.ontologyClass = ontologyClass; + this.objectNode = objectNode; + this.ontologyClassNode = ontologyClassNode; + } - public IRI getOntologyClass() { - return this.ontologyClass; - } + public IRI getObject() { + return this.object; + } - public Node getObjectNode() { - return this.objectNode; - } + public void setOperator(GPADOperatorStatus operator) { + this.operator = operator; + } - public Node getOntologyClassNode() { - return this.ontologyClassNode; - } + public GPADOperatorStatus getOperator() { + return operator; + } - @Override - public boolean equals(Object other) { - if (this == other) { return true; } - else if (!(other instanceof BasicGPADData)) { return false; } - else { - BasicGPADData otherData = (BasicGPADData)other; - return this.getObject().equals(otherData.getObject()) - && this.getOperator().equals(otherData.getOperator()) - && this.getQualifier().equals(otherData.getQualifier()) - && this.getOntologyClass().equals(otherData.getOntologyClass()) - && this.getObjectNode().equals(otherData.getObjectNode()) - && this.getOntologyClassNode().equals(otherData.getOntologyClassNode()); - } - } + public IRI getQualifier() { + return this.qualifier; + } - @Override - public int hashCode() { - int result = 17; - result = 37 * result + this.getObject().hashCode(); - result = 37 * result + this.getOperator().hashCode(); - result = 37 * result + this.getQualifier().hashCode(); - result = 37 * result + this.getOntologyClass().hashCode(); - result = 37 * result + this.getObjectNode().hashCode(); - result = 37 * result + this.getOntologyClassNode().hashCode(); - return result; - } - - @Override - public String toString() { - return this.object.toString() + ", " + this.operator.toString() + "," + this.qualifier.toString() + ", " + this.ontologyClass.toString(); - } + public IRI getOntologyClass() { + return this.ontologyClass; + } + + public Node getObjectNode() { + return this.objectNode; + } + + public Node getOntologyClassNode() { + return this.ontologyClassNode; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (!(other instanceof BasicGPADData)) { + return false; + } else { + BasicGPADData otherData = (BasicGPADData) other; + return this.getObject().equals(otherData.getObject()) + && this.getOperator().equals(otherData.getOperator()) + && this.getQualifier().equals(otherData.getQualifier()) + && this.getOntologyClass().equals(otherData.getOntologyClass()) + && this.getObjectNode().equals(otherData.getObjectNode()) + && this.getOntologyClassNode().equals(otherData.getOntologyClassNode()); + } + } + + @Override + public int hashCode() { + int result = 17; + result = 37 * result + this.getObject().hashCode(); + result = 37 * result + this.getOperator().hashCode(); + result = 37 * result + this.getQualifier().hashCode(); + result = 37 * result + this.getOntologyClass().hashCode(); + result = 37 * result + this.getObjectNode().hashCode(); + result = 37 * result + this.getOntologyClassNode().hashCode(); + return result; + } + + @Override + public String toString() { + return this.object.toString() + ", " + this.operator.toString() + "," + this.qualifier.toString() + ", " + this.ontologyClass.toString(); + } } \ No newline at end of file diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/DefaultGPADData.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/DefaultGPADData.java index 9e70cdd0..a27517c4 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/DefaultGPADData.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/DefaultGPADData.java @@ -1,144 +1,146 @@ package org.geneontology.minerva.legacy.sparql; -import java.util.Optional; -import java.util.Set; - import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.owlapi.model.IRI; +import java.util.Optional; +import java.util.Set; + public class DefaultGPADData implements GPADData { - private final IRI object; - private GPADOperatorStatus operator; - private final IRI qualifier; - private final IRI ontologyClass; - private final Set annotationExtensions; - private final String reference; - private final IRI evidence; - private final Optional withOrFrom; - private final Optional interactingTaxon; - private final String modificationDate; - private final String assignedBy; - private final Set> annotations; - - public DefaultGPADData(IRI object, IRI qualifier, IRI ontologyClass, Set annotationExtensions, - String reference, IRI evidence, Optional withOrFrom, Optional interactingTaxon, - String modificationDate, String assignedBy, Set> annotations) { - this.object = object; - this.qualifier = qualifier; - this.ontologyClass = ontologyClass; - this.annotationExtensions = annotationExtensions; - this.reference = reference; - this.evidence = evidence; - this.withOrFrom = withOrFrom; - this.interactingTaxon = interactingTaxon; - this.modificationDate = modificationDate; - this.assignedBy = assignedBy; - this.annotations = annotations; - } - - @Override - public IRI getObject() { - return this.object; - } - - public void setOperator(GPADOperatorStatus operator) { - this.operator = operator; - } - - @Override - public GPADOperatorStatus getOperator() { - return operator; - } - - @Override - public IRI getQualifier() { - return this.qualifier; - } - - @Override - public IRI getOntologyClass() { - return this.ontologyClass; - } - - @Override - public Set getAnnotationExtensions() { - return this.annotationExtensions; - } - - - @Override - public String getReference() { - return this.reference; - } - - @Override - public IRI getEvidence() { - return this.evidence; - } - - @Override - public Optional getWithOrFrom() { - return this.withOrFrom; - } - - @Override - public Optional getInteractingTaxonID() { - return this.interactingTaxon; - } - - @Override - public String getModificationDate() { - return this.modificationDate; - } - - @Override - public String getAssignedBy() { - return this.assignedBy; - } - - @Override - public Set> getAnnotations() { - return this.annotations; - } - - @Override - public boolean equals(Object other) { - if (this == other) { return true; } - else if (!(other instanceof DefaultGPADData)) { return false; } - else { - DefaultGPADData otherData = (DefaultGPADData)other; - return this.getObject().equals(otherData.getObject()) - && this.getOperator().equals(otherData.getOperator()) - && this.getQualifier().equals(otherData.getQualifier()) - && this.getOntologyClass().equals(otherData.getOntologyClass()) - && this.getAnnotationExtensions().equals(otherData.getAnnotationExtensions()) - && this.getReference().equals(otherData.getReference()) - && this.getEvidence().equals(otherData.getEvidence()) - && this.getWithOrFrom().equals(otherData.getWithOrFrom()) - && this.getInteractingTaxonID().equals(otherData.getInteractingTaxonID()) - && this.getModificationDate().equals(otherData.getModificationDate()) - && this.getAssignedBy().equals(otherData.getAssignedBy()) - && this.getAnnotations().equals(otherData.getAnnotations()); - } - } - - @Override - public int hashCode() { - int result = 17; - result = 37 * result + this.getObject().hashCode(); - result = 37 * result + this.getOperator().hashCode(); - result = 37 * result + this.getQualifier().hashCode(); - result = 37 * result + this.getOntologyClass().hashCode(); - result = 37 * result + this.getAnnotationExtensions().hashCode(); - result = 37 * result + this.getReference().hashCode(); - result = 37 * result + this.getEvidence().hashCode(); - result = 37 * result + this.getReference().hashCode(); - result = 37 * result + this.getWithOrFrom().hashCode(); - result = 37 * result + this.getInteractingTaxonID().hashCode(); - result = 37 * result + this.getModificationDate().hashCode(); - result = 37 * result + this.getAssignedBy().hashCode(); - result = 37 * result + this.getAnnotations().hashCode(); - return result; - } + private final IRI object; + private GPADOperatorStatus operator; + private final IRI qualifier; + private final IRI ontologyClass; + private final Set annotationExtensions; + private final String reference; + private final IRI evidence; + private final Optional withOrFrom; + private final Optional interactingTaxon; + private final String modificationDate; + private final String assignedBy; + private final Set> annotations; + + public DefaultGPADData(IRI object, IRI qualifier, IRI ontologyClass, Set annotationExtensions, + String reference, IRI evidence, Optional withOrFrom, Optional interactingTaxon, + String modificationDate, String assignedBy, Set> annotations) { + this.object = object; + this.qualifier = qualifier; + this.ontologyClass = ontologyClass; + this.annotationExtensions = annotationExtensions; + this.reference = reference; + this.evidence = evidence; + this.withOrFrom = withOrFrom; + this.interactingTaxon = interactingTaxon; + this.modificationDate = modificationDate; + this.assignedBy = assignedBy; + this.annotations = annotations; + } + + @Override + public IRI getObject() { + return this.object; + } + + public void setOperator(GPADOperatorStatus operator) { + this.operator = operator; + } + + @Override + public GPADOperatorStatus getOperator() { + return operator; + } + + @Override + public IRI getQualifier() { + return this.qualifier; + } + + @Override + public IRI getOntologyClass() { + return this.ontologyClass; + } + + @Override + public Set getAnnotationExtensions() { + return this.annotationExtensions; + } + + + @Override + public String getReference() { + return this.reference; + } + + @Override + public IRI getEvidence() { + return this.evidence; + } + + @Override + public Optional getWithOrFrom() { + return this.withOrFrom; + } + + @Override + public Optional getInteractingTaxonID() { + return this.interactingTaxon; + } + + @Override + public String getModificationDate() { + return this.modificationDate; + } + + @Override + public String getAssignedBy() { + return this.assignedBy; + } + + @Override + public Set> getAnnotations() { + return this.annotations; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (!(other instanceof DefaultGPADData)) { + return false; + } else { + DefaultGPADData otherData = (DefaultGPADData) other; + return this.getObject().equals(otherData.getObject()) + && this.getOperator().equals(otherData.getOperator()) + && this.getQualifier().equals(otherData.getQualifier()) + && this.getOntologyClass().equals(otherData.getOntologyClass()) + && this.getAnnotationExtensions().equals(otherData.getAnnotationExtensions()) + && this.getReference().equals(otherData.getReference()) + && this.getEvidence().equals(otherData.getEvidence()) + && this.getWithOrFrom().equals(otherData.getWithOrFrom()) + && this.getInteractingTaxonID().equals(otherData.getInteractingTaxonID()) + && this.getModificationDate().equals(otherData.getModificationDate()) + && this.getAssignedBy().equals(otherData.getAssignedBy()) + && this.getAnnotations().equals(otherData.getAnnotations()); + } + } + + @Override + public int hashCode() { + int result = 17; + result = 37 * result + this.getObject().hashCode(); + result = 37 * result + this.getOperator().hashCode(); + result = 37 * result + this.getQualifier().hashCode(); + result = 37 * result + this.getOntologyClass().hashCode(); + result = 37 * result + this.getAnnotationExtensions().hashCode(); + result = 37 * result + this.getReference().hashCode(); + result = 37 * result + this.getEvidence().hashCode(); + result = 37 * result + this.getReference().hashCode(); + result = 37 * result + this.getWithOrFrom().hashCode(); + result = 37 * result + this.getInteractingTaxonID().hashCode(); + result = 37 * result + this.getModificationDate().hashCode(); + result = 37 * result + this.getAssignedBy().hashCode(); + result = 37 * result + this.getAnnotations().hashCode(); + return result; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/ExportExplanation.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/ExportExplanation.java index a4e57f1a..72acd27a 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/ExportExplanation.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/ExportExplanation.java @@ -1,18 +1,7 @@ package org.geneontology.minerva.legacy.sparql; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; - +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import org.apache.commons.io.IOUtils; import org.apache.jena.query.QueryExecution; import org.apache.jena.query.QueryExecutionFactory; @@ -26,204 +15,199 @@ import org.geneontology.minerva.explanation.ExplanationTerm; import org.geneontology.minerva.explanation.ExplanationTriple; import org.geneontology.minerva.explanation.ModelExplanation; -import org.geneontology.minerva.lookup.ExternalLookupService; -import org.geneontology.minerva.lookup.ExternalLookupService.LookupEntry; -import org.geneontology.rules.engine.Explanation; -import org.geneontology.rules.engine.Node; -import org.geneontology.rules.engine.Rule; -import org.geneontology.rules.engine.Triple; -import org.geneontology.rules.engine.TriplePattern; -import org.geneontology.rules.engine.URI; -import org.geneontology.rules.engine.Variable; -import org.geneontology.rules.engine.WorkingMemory; +import org.geneontology.rules.engine.*; import org.geneontology.rules.util.Bridge; import org.semanticweb.owlapi.model.IRI; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - import scala.collection.JavaConverters; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + public class ExportExplanation { - private static final Logger LOG = Logger.getLogger(ExportExplanation.class); - private static String mainQuery; - static { - try { - mainQuery = IOUtils.toString(ExportExplanation.class.getResourceAsStream("ExplanationTriples.rq"), StandardCharsets.UTF_8); - } catch (IOException e) { - LOG.error("Could not load SPARQL query from jar", e); - } - } + private static final Logger LOG = Logger.getLogger(ExportExplanation.class); + private static String mainQuery; + + static { + try { + mainQuery = IOUtils.toString(ExportExplanation.class.getResourceAsStream("ExplanationTriples.rq"), StandardCharsets.UTF_8); + } catch (IOException e) { + LOG.error("Could not load SPARQL query from jar", e); + } + } - public static String exportExplanation(WorkingMemory wm, BlazegraphOntologyManager go_lego, Map labelMap) { - Set triples = new HashSet<>(); - Model model = ModelFactory.createDefaultModel(); - model.add(toJava(wm.facts()).stream().map(t -> model.asStatement(Bridge.jenaFromTriple(t))).collect(Collectors.toList())); - QueryExecution qe = QueryExecutionFactory.create(mainQuery, model); - ResultSet results = qe.execSelect(); - while (results.hasNext()) { - QuerySolution qs = results.next(); - triples.add(new Triple(new URI(qs.getResource("s").getURI()), new URI(qs.getResource("p").getURI()), new URI(qs.getResource("o").getURI()))); - } - qe.close(); - // Make sure all the asserted triples are included, in case they got filtered out as indirect by the first query - Model assertedModel = ModelFactory.createDefaultModel(); - assertedModel.add(toJava(wm.asserted()).stream().map(t -> assertedModel.asStatement(Bridge.jenaFromTriple(t))).collect(Collectors.toList())); - QueryExecution aqe = QueryExecutionFactory.create(mainQuery, assertedModel); - ResultSet assertedResults = aqe.execSelect(); - while (assertedResults.hasNext()) { - QuerySolution qs = assertedResults.next(); - triples.add(new Triple(new URI(qs.getResource("s").getURI()), new URI(qs.getResource("p").getURI()), new URI(qs.getResource("o").getURI()))); - } - aqe.close(); - Set asserted = triples.stream().filter(t -> wm.asserted().contains(t)).collect(Collectors.toSet()); - Set inferred = triples.stream().filter(t -> !wm.asserted().contains(t)).collect(Collectors.toSet()); - Map> allExplanations = inferred.stream().collect(Collectors.toMap(Function.identity(), s -> toJava(wm.explain(s)))); - Set allRules = allExplanations.values().stream().flatMap(es -> es.stream().flatMap(e -> toJava(e.rules()).stream())).collect(Collectors.toSet()); - Stream subjects = triples.stream().map(t -> (URI)(t.s())); - Stream predicates = triples.stream().map(t -> (URI)(t.p())); - Stream objects = triples.stream().map(t -> (URI)(t.o())); - Set allTerms = new HashSet<>(); - for (Rule rule : allRules) { - for (TriplePattern tp : toJavaList(rule.body())) { - if (tp.s() instanceof URI) allTerms.add((URI)tp.s()); - if (tp.p() instanceof URI) allTerms.add((URI)tp.p()); - if (tp.o() instanceof URI) allTerms.add((URI)tp.o()); - } - for (TriplePattern tp : toJavaList(rule.head())) { - if (tp.s() instanceof URI) allTerms.add((URI)tp.s()); - if (tp.p() instanceof URI) allTerms.add((URI)tp.p()); - if (tp.o() instanceof URI) allTerms.add((URI)tp.o()); - } - } - allTerms.addAll(subjects.collect(Collectors.toSet())); - allTerms.addAll(predicates.collect(Collectors.toSet())); - allTerms.addAll(objects.collect(Collectors.toSet())); - Map labels = findLabels(allTerms, asserted, go_lego, labelMap); - int currentBlankNode = 0; - Map assertedForJSON = new HashMap<>(); - for (Triple t : asserted) { - ExplanationTriple et = new ExplanationTriple(); - et.id = "_:" + currentBlankNode++; - et.subject = ((URI)(t.s())).uri(); - et.predicate = ((URI)(t.p())).uri(); - et.object = ((URI)(t.o())).uri(); - assertedForJSON.put(t, et); - } - Map rulesForJSON = new HashMap<>(); - for (Rule r : allRules) { - ExplanationRule er = new ExplanationRule(); - er.id = "_:" + currentBlankNode++; - List body = new ArrayList<>(); - List head = new ArrayList<>(); - for (TriplePattern t : toJavaList(r.body())) { - ExplanationTriple et = new ExplanationTriple(); - et.subject = patternNodeToString(t.s()); - et.predicate = patternNodeToString(t.p()); - et.object = patternNodeToString(t.o()); - body.add(et); - } - for (TriplePattern t : toJavaList(r.head())) { - ExplanationTriple et = new ExplanationTriple(); - et.subject = patternNodeToString(t.s()); - et.predicate = patternNodeToString(t.p()); - et.object = patternNodeToString(t.o()); - head.add(et); - } - er.body = body.toArray(new ExplanationTriple[] {}); - er.head = head.toArray(new ExplanationTriple[] {}); - rulesForJSON.put(r, er); - } - Map inferredForJSON = new HashMap<>(); - for (Triple t : inferred) { - ExplanationTriple et = new ExplanationTriple(); - et.subject = ((URI)(t.s())).uri(); - et.predicate = ((URI)(t.p())).uri(); - et.object = ((URI)(t.o())).uri(); - Explanation explanation = allExplanations.get(t).iterator().next(); - org.geneontology.minerva.explanation.Explanation ex = new org.geneontology.minerva.explanation.Explanation(); - ex.triples = toJava(explanation.facts()).stream().map(f -> assertedForJSON.get(f).id).toArray(String[]::new); - ex.rules = toJava(explanation.rules()).stream().map(r -> rulesForJSON.get(r).id).toArray(String[]::new); - et.explanation = ex; - inferredForJSON.put(t, et); - } - ModelExplanation me = new ModelExplanation(); - me.terms = labels.keySet().stream().map(uri -> { - ExplanationTerm et = new ExplanationTerm(); - et.id = uri.uri(); - et.label = labels.get(uri); - return et; - }).toArray(ExplanationTerm[]::new); - me.assertions = assertedForJSON.values().toArray(new ExplanationTriple[] {}); - me.rules = rulesForJSON.values().toArray(new ExplanationRule[] {}); - me.inferences = inferredForJSON.values().toArray(new ExplanationTriple[] {}); - GsonBuilder builder = new GsonBuilder(); - builder = builder.setPrettyPrinting(); - Gson gson = builder.create(); - String json = gson.toJson(me); - return json; - } + public static String exportExplanation(WorkingMemory wm, BlazegraphOntologyManager go_lego, Map labelMap) { + Set triples = new HashSet<>(); + Model model = ModelFactory.createDefaultModel(); + model.add(toJava(wm.facts()).stream().map(t -> model.asStatement(Bridge.jenaFromTriple(t))).collect(Collectors.toList())); + QueryExecution qe = QueryExecutionFactory.create(mainQuery, model); + ResultSet results = qe.execSelect(); + while (results.hasNext()) { + QuerySolution qs = results.next(); + triples.add(new Triple(new URI(qs.getResource("s").getURI()), new URI(qs.getResource("p").getURI()), new URI(qs.getResource("o").getURI()))); + } + qe.close(); + // Make sure all the asserted triples are included, in case they got filtered out as indirect by the first query + Model assertedModel = ModelFactory.createDefaultModel(); + assertedModel.add(toJava(wm.asserted()).stream().map(t -> assertedModel.asStatement(Bridge.jenaFromTriple(t))).collect(Collectors.toList())); + QueryExecution aqe = QueryExecutionFactory.create(mainQuery, assertedModel); + ResultSet assertedResults = aqe.execSelect(); + while (assertedResults.hasNext()) { + QuerySolution qs = assertedResults.next(); + triples.add(new Triple(new URI(qs.getResource("s").getURI()), new URI(qs.getResource("p").getURI()), new URI(qs.getResource("o").getURI()))); + } + aqe.close(); + Set asserted = triples.stream().filter(t -> wm.asserted().contains(t)).collect(Collectors.toSet()); + Set inferred = triples.stream().filter(t -> !wm.asserted().contains(t)).collect(Collectors.toSet()); + Map> allExplanations = inferred.stream().collect(Collectors.toMap(Function.identity(), s -> toJava(wm.explain(s)))); + Set allRules = allExplanations.values().stream().flatMap(es -> es.stream().flatMap(e -> toJava(e.rules()).stream())).collect(Collectors.toSet()); + Stream subjects = triples.stream().map(t -> (URI) (t.s())); + Stream predicates = triples.stream().map(t -> (URI) (t.p())); + Stream objects = triples.stream().map(t -> (URI) (t.o())); + Set allTerms = new HashSet<>(); + for (Rule rule : allRules) { + for (TriplePattern tp : toJavaList(rule.body())) { + if (tp.s() instanceof URI) allTerms.add((URI) tp.s()); + if (tp.p() instanceof URI) allTerms.add((URI) tp.p()); + if (tp.o() instanceof URI) allTerms.add((URI) tp.o()); + } + for (TriplePattern tp : toJavaList(rule.head())) { + if (tp.s() instanceof URI) allTerms.add((URI) tp.s()); + if (tp.p() instanceof URI) allTerms.add((URI) tp.p()); + if (tp.o() instanceof URI) allTerms.add((URI) tp.o()); + } + } + allTerms.addAll(subjects.collect(Collectors.toSet())); + allTerms.addAll(predicates.collect(Collectors.toSet())); + allTerms.addAll(objects.collect(Collectors.toSet())); + Map labels = findLabels(allTerms, asserted, go_lego, labelMap); + int currentBlankNode = 0; + Map assertedForJSON = new HashMap<>(); + for (Triple t : asserted) { + ExplanationTriple et = new ExplanationTriple(); + et.id = "_:" + currentBlankNode++; + et.subject = ((URI) (t.s())).uri(); + et.predicate = ((URI) (t.p())).uri(); + et.object = ((URI) (t.o())).uri(); + assertedForJSON.put(t, et); + } + Map rulesForJSON = new HashMap<>(); + for (Rule r : allRules) { + ExplanationRule er = new ExplanationRule(); + er.id = "_:" + currentBlankNode++; + List body = new ArrayList<>(); + List head = new ArrayList<>(); + for (TriplePattern t : toJavaList(r.body())) { + ExplanationTriple et = new ExplanationTriple(); + et.subject = patternNodeToString(t.s()); + et.predicate = patternNodeToString(t.p()); + et.object = patternNodeToString(t.o()); + body.add(et); + } + for (TriplePattern t : toJavaList(r.head())) { + ExplanationTriple et = new ExplanationTriple(); + et.subject = patternNodeToString(t.s()); + et.predicate = patternNodeToString(t.p()); + et.object = patternNodeToString(t.o()); + head.add(et); + } + er.body = body.toArray(new ExplanationTriple[]{}); + er.head = head.toArray(new ExplanationTriple[]{}); + rulesForJSON.put(r, er); + } + Map inferredForJSON = new HashMap<>(); + for (Triple t : inferred) { + ExplanationTriple et = new ExplanationTriple(); + et.subject = ((URI) (t.s())).uri(); + et.predicate = ((URI) (t.p())).uri(); + et.object = ((URI) (t.o())).uri(); + Explanation explanation = allExplanations.get(t).iterator().next(); + org.geneontology.minerva.explanation.Explanation ex = new org.geneontology.minerva.explanation.Explanation(); + ex.triples = toJava(explanation.facts()).stream().map(f -> assertedForJSON.get(f).id).toArray(String[]::new); + ex.rules = toJava(explanation.rules()).stream().map(r -> rulesForJSON.get(r).id).toArray(String[]::new); + et.explanation = ex; + inferredForJSON.put(t, et); + } + ModelExplanation me = new ModelExplanation(); + me.terms = labels.keySet().stream().map(uri -> { + ExplanationTerm et = new ExplanationTerm(); + et.id = uri.uri(); + et.label = labels.get(uri); + return et; + }).toArray(ExplanationTerm[]::new); + me.assertions = assertedForJSON.values().toArray(new ExplanationTriple[]{}); + me.rules = rulesForJSON.values().toArray(new ExplanationRule[]{}); + me.inferences = inferredForJSON.values().toArray(new ExplanationTriple[]{}); + GsonBuilder builder = new GsonBuilder(); + builder = builder.setPrettyPrinting(); + Gson gson = builder.create(); + String json = gson.toJson(me); + return json; + } - private static String patternNodeToString(Node node) { - if (node instanceof URI) { - return ((URI)node).uri(); - } else { - return ((Variable)node).name(); - } - } + private static String patternNodeToString(Node node) { + if (node instanceof URI) { + return ((URI) node).uri(); + } else { + return ((Variable) node).name(); + } + } - private static Map findLabels(Set uris, Set assertions, BlazegraphOntologyManager go_lego, Map labelMap) { - final URI rdfType = new URI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); - Map labels = new HashMap<>(); - labels.put(rdfType, "type"); - for (URI uri : uris) { - Optional possibleLabel = lookup(uri, go_lego, labelMap, labels); - if (possibleLabel.isPresent()) { - labels.put(uri, possibleLabel.get()); - } else { - Optional type = assertions.stream().filter(t -> t.s().equals(uri) && t.p().equals(rdfType)).map(t -> (URI)(t.o())).findAny(); - if (type.isPresent()) { - Optional possibleTypeLabel = lookup(type.get(), go_lego, labelMap, labels); - if (possibleTypeLabel.isPresent()) { - labels.put(uri, possibleTypeLabel.get() + "#" + uri.uri().substring(uri.uri().lastIndexOf("/") + 1)); - } else { - labels.put(uri, uri.uri()); - } - } - } + private static Map findLabels(Set uris, Set assertions, BlazegraphOntologyManager go_lego, Map labelMap) { + final URI rdfType = new URI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); + Map labels = new HashMap<>(); + labels.put(rdfType, "type"); + for (URI uri : uris) { + Optional possibleLabel = lookup(uri, go_lego, labelMap, labels); + if (possibleLabel.isPresent()) { + labels.put(uri, possibleLabel.get()); + } else { + Optional type = assertions.stream().filter(t -> t.s().equals(uri) && t.p().equals(rdfType)).map(t -> (URI) (t.o())).findAny(); + if (type.isPresent()) { + Optional possibleTypeLabel = lookup(type.get(), go_lego, labelMap, labels); + if (possibleTypeLabel.isPresent()) { + labels.put(uri, possibleTypeLabel.get() + "#" + uri.uri().substring(uri.uri().lastIndexOf("/") + 1)); + } else { + labels.put(uri, uri.uri()); + } + } + } - } - return labels; - } + } + return labels; + } - private static Optional lookup(URI uri, BlazegraphOntologyManager go_lego, Map labelMap, Map previous) { - if (previous.containsKey(uri)) { - return Optional.of(previous.get(uri)); - } else if (labelMap.containsKey(IRI.create(uri.uri()))) { - return Optional.of(labelMap.get(IRI.create(uri.uri()))); - } else { - String label = null; - try { - label = go_lego.getLabel(uri.uri()); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - if (null == label) { - return Optional.empty(); - } else { - return Optional.ofNullable(label); - } - } - } + private static Optional lookup(URI uri, BlazegraphOntologyManager go_lego, Map labelMap, Map previous) { + if (previous.containsKey(uri)) { + return Optional.of(previous.get(uri)); + } else if (labelMap.containsKey(IRI.create(uri.uri()))) { + return Optional.of(labelMap.get(IRI.create(uri.uri()))); + } else { + String label = null; + try { + label = go_lego.getLabel(uri.uri()); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + if (null == label) { + return Optional.empty(); + } else { + return Optional.ofNullable(label); + } + } + } - private static Set toJava(scala.collection.Set scalaSet) { - return JavaConverters.setAsJavaSetConverter(scalaSet).asJava(); - } + private static Set toJava(scala.collection.Set scalaSet) { + return JavaConverters.setAsJavaSetConverter(scalaSet).asJava(); + } - private static List toJavaList(scala.collection.Seq scalaList) { - return JavaConverters.seqAsJavaListConverter(scalaList).asJava(); - } + private static List toJavaList(scala.collection.Seq scalaList) { + return JavaConverters.seqAsJavaListConverter(scalaList).asJava(); + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADData.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADData.java index f58ea0d6..22ae9c47 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADData.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADData.java @@ -1,63 +1,62 @@ package org.geneontology.minerva.legacy.sparql; -import java.util.Optional; -import java.util.Set; - -import javax.annotation.Nonnull; - import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.owlapi.model.IRI; +import javax.annotation.Nonnull; +import java.util.Optional; +import java.util.Set; + /** * Standard data needed to render a GPAD file at IRI level. * Adding labels or curie transformations can build from this. - * This is not meant to be a fully general representation of GPAD; + * This is not meant to be a fully general representation of GPAD; * just the information expected to be provided for a GPAD annotation * extraction from a LEGO model. */ public interface GPADData { - @Nonnull - public IRI getObject(); + @Nonnull + public IRI getObject(); + + @Nonnull + public GPADOperatorStatus getOperator(); + + @Nonnull + public IRI getQualifier(); + + @Nonnull + public IRI getOntologyClass(); + + @Nonnull + public Set getAnnotationExtensions(); + + @Nonnull + public String getReference(); - @Nonnull - public GPADOperatorStatus getOperator(); - - @Nonnull - public IRI getQualifier(); + @Nonnull + public IRI getEvidence(); - @Nonnull - public IRI getOntologyClass(); + @Nonnull + public Optional getWithOrFrom(); - @Nonnull - public Set getAnnotationExtensions(); + @Nonnull + public Optional getInteractingTaxonID(); - @Nonnull - public String getReference(); - - @Nonnull - public IRI getEvidence(); + @Nonnull + public String getModificationDate(); - @Nonnull - public Optional getWithOrFrom(); + @Nonnull + public String getAssignedBy(); - @Nonnull - public Optional getInteractingTaxonID(); - - @Nonnull - public String getModificationDate(); + @Nonnull + public Set> getAnnotations(); - @Nonnull - public String getAssignedBy(); - - @Nonnull - public Set> getAnnotations(); - - public static interface ConjunctiveExpression { + public static interface ConjunctiveExpression { - public IRI getRelation(); + public IRI getRelation(); - public IRI getFiller(); + public IRI getFiller(); - } + } } \ No newline at end of file diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADEvidence.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADEvidence.java index a4f0b275..8d8d3330 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADEvidence.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADEvidence.java @@ -1,88 +1,90 @@ package org.geneontology.minerva.legacy.sparql; -import java.util.Optional; -import java.util.Set; - import org.apache.commons.lang3.tuple.Pair; import org.semanticweb.owlapi.model.IRI; +import java.util.Optional; +import java.util.Set; + public class GPADEvidence { - private final IRI evidenceType; - private final String reference; - private final Optional withOrFrom; - private final String date; - private final String assignedBy; - private final Set> annotations; - private final Optional interactingTaxon; - - - public GPADEvidence(IRI evidenceType, String ref, Optional withOrFrom, String date, String assignedBy, Set> annotations, Optional interactingTaxon) { - this.evidenceType = evidenceType; - this.reference = ref; - this.withOrFrom = withOrFrom; - this.date = date; - this.assignedBy = assignedBy; - this.annotations = annotations; - this.interactingTaxon = interactingTaxon; - } - - public String getReference() { - return reference; - } - - public IRI getEvidence() { - return evidenceType; - } - - public Optional getWithOrFrom() { - return withOrFrom; - } - - public Optional getInteractingTaxonID() { - return interactingTaxon; - } - - public String getModificationDate() { - return this.date; - } - - public String getAssignedBy() { - return this.assignedBy; - } - - public Set> getAnnotations() { - return this.annotations; - } - - @Override - public boolean equals(Object other) { - if (this == other) { return true; } - else if (!(other instanceof GPADEvidence)) { return false; } - else { - GPADEvidence otherEvidence = (GPADEvidence)other; - return this.getReference().equals(otherEvidence.getReference()) - && this.getEvidence().equals(otherEvidence.getEvidence()) - && this.getWithOrFrom().equals(otherEvidence.getWithOrFrom()) - && this.getInteractingTaxonID().equals(otherEvidence.getInteractingTaxonID()) - && this.getModificationDate().equals(otherEvidence.getModificationDate()) - && this.getAssignedBy().equals(otherEvidence.getAssignedBy()) - && this.getAnnotations().equals(otherEvidence.getAnnotations()); - } - } - - @Override - public int hashCode() { - int result = 17; - result = 37 * result + this.getReference().hashCode(); - result = 37 * result + this.getEvidence().hashCode(); - result = 37 * result + this.getReference().hashCode(); - result = 37 * result + this.getWithOrFrom().hashCode(); - result = 37 * result + this.getInteractingTaxonID().hashCode(); - result = 37 * result + this.getModificationDate().hashCode(); - result = 37 * result + this.getAssignedBy().hashCode(); - result = 37 * result + this.getAnnotations().hashCode(); - return result; - } + private final IRI evidenceType; + private final String reference; + private final Optional withOrFrom; + private final String date; + private final String assignedBy; + private final Set> annotations; + private final Optional interactingTaxon; + + + public GPADEvidence(IRI evidenceType, String ref, Optional withOrFrom, String date, String assignedBy, Set> annotations, Optional interactingTaxon) { + this.evidenceType = evidenceType; + this.reference = ref; + this.withOrFrom = withOrFrom; + this.date = date; + this.assignedBy = assignedBy; + this.annotations = annotations; + this.interactingTaxon = interactingTaxon; + } + + public String getReference() { + return reference; + } + + public IRI getEvidence() { + return evidenceType; + } + + public Optional getWithOrFrom() { + return withOrFrom; + } + + public Optional getInteractingTaxonID() { + return interactingTaxon; + } + + public String getModificationDate() { + return this.date; + } + + public String getAssignedBy() { + return this.assignedBy; + } + + public Set> getAnnotations() { + return this.annotations; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (!(other instanceof GPADEvidence)) { + return false; + } else { + GPADEvidence otherEvidence = (GPADEvidence) other; + return this.getReference().equals(otherEvidence.getReference()) + && this.getEvidence().equals(otherEvidence.getEvidence()) + && this.getWithOrFrom().equals(otherEvidence.getWithOrFrom()) + && this.getInteractingTaxonID().equals(otherEvidence.getInteractingTaxonID()) + && this.getModificationDate().equals(otherEvidence.getModificationDate()) + && this.getAssignedBy().equals(otherEvidence.getAssignedBy()) + && this.getAnnotations().equals(otherEvidence.getAnnotations()); + } + } + + @Override + public int hashCode() { + int result = 17; + result = 37 * result + this.getReference().hashCode(); + result = 37 * result + this.getEvidence().hashCode(); + result = 37 * result + this.getReference().hashCode(); + result = 37 * result + this.getWithOrFrom().hashCode(); + result = 37 * result + this.getInteractingTaxonID().hashCode(); + result = 37 * result + this.getModificationDate().hashCode(); + result = 37 * result + this.getAssignedBy().hashCode(); + result = 37 * result + this.getAnnotations().hashCode(); + return result; + } } diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADOperatorStatus.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADOperatorStatus.java index 9fe682d8..d328c228 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADOperatorStatus.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADOperatorStatus.java @@ -2,7 +2,7 @@ /* See also: http://www.geneontology.org/page/gene-product-association-data-gpad-format */ public enum GPADOperatorStatus { - NONE, - NOT, - ALWAYS; + NONE, + NOT, + ALWAYS; } \ No newline at end of file diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADRenderer.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADRenderer.java index bebc7b9a..785dc651 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADRenderer.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADRenderer.java @@ -1,138 +1,133 @@ package org.geneontology.minerva.legacy.sparql; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.legacy.sparql.GPADData.ConjunctiveExpression; import org.semanticweb.owlapi.model.IRI; +import java.util.*; +import java.util.stream.Collectors; + public class GPADRenderer { - private final CurieHandler curieHandler; - private final Map relationShorthandIndex; - - public static final String HEADER = "!gpa-version: 1.1"; - public static final String ATTRIBUTE = "!DB DBObjectID Qualifier GOID DB:References EvidenceCode With(Or)From InteractingTaxonID AssignedBy AnnoExt AnnoProp"; - - public GPADRenderer(CurieHandler handler, Map shorthandIndex) { - this.curieHandler = handler; - this.relationShorthandIndex = shorthandIndex; - } - - public String renderAll(Collection data) { - StringBuilder sb = new StringBuilder(); - sb.append(HEADER); - sb.append("\n"); - - /* Added for debugging. Can be removed if this would be a problem */ + private final CurieHandler curieHandler; + private final Map relationShorthandIndex; + + public static final String HEADER = "!gpa-version: 1.1"; + public static final String ATTRIBUTE = "!DB DBObjectID Qualifier GOID DB:References EvidenceCode With(Or)From InteractingTaxonID AssignedBy AnnoExt AnnoProp"; + + public GPADRenderer(CurieHandler handler, Map shorthandIndex) { + this.curieHandler = handler; + this.relationShorthandIndex = shorthandIndex; + } + + public String renderAll(Collection data) { + StringBuilder sb = new StringBuilder(); + sb.append(HEADER); + sb.append("\n"); + + /* Added for debugging. Can be removed if this would be a problem */ /* sb.append(ATTRIBUTE); sb.append("\n"); */ - - for (GPADData annotation : data) { - sb.append(render(annotation)); - sb.append("\n"); - } - return sb.toString(); - } - - public String render(GPADData data) { - try { - List columns = new ArrayList<>(); - columns.add(dbForObject(data.getObject())); - columns.add(localIDForObject(data.getObject())); - columns.add(symbolForRelation(data.getQualifier(), data.getOperator())); - columns.add(curieHandler.getCuri(data.getOntologyClass())); - columns.add(data.getReference()); - columns.add(curieHandler.getCuri(data.getEvidence())); - columns.add(data.getWithOrFrom().orElse("")); - columns.add(""); // not using interacting taxon in LEGO models - columns.add(formatDate(data.getModificationDate())); - columns.add(data.getAssignedBy()); - columns.add(formatAnnotationExtensions(data.getAnnotationExtensions())); - columns.add(data.getAnnotations().stream() - .map(a -> a.getLeft() + "=" + a.getRight()) - .collect(Collectors.joining("|"))); - return String.join("\t", columns); - } catch (Exception e) { - e.printStackTrace(); - throw e; - } - } - - private String localIDForObject(IRI iri) { - String curie = curieHandler.getCuri(iri); - if (curie.startsWith("http")) { - return curie; - } else { - return curie.split(":", 2)[1]; //TODO temporary? - } - } - - private String dbForObject(IRI iri) { - String curie = curieHandler.getCuri(iri); - if (curie.startsWith("http")) { - return ""; - } else { - return curie.split(":", 2)[0]; //TODO temporary? - } - } - - private String symbolForRelation(IRI iri) { - return symbolForRelation(iri, null); - } - - private String symbolForRelation(IRI iri, GPADOperatorStatus operator) { - // Property labels don't seem to be in external lookup service? + + for (GPADData annotation : data) { + sb.append(render(annotation)); + sb.append("\n"); + } + return sb.toString(); + } + + public String render(GPADData data) { + try { + List columns = new ArrayList<>(); + columns.add(dbForObject(data.getObject())); + columns.add(localIDForObject(data.getObject())); + columns.add(symbolForRelation(data.getQualifier(), data.getOperator())); + columns.add(curieHandler.getCuri(data.getOntologyClass())); + columns.add(data.getReference()); + columns.add(curieHandler.getCuri(data.getEvidence())); + columns.add(data.getWithOrFrom().orElse("")); + columns.add(""); // not using interacting taxon in LEGO models + columns.add(formatDate(data.getModificationDate())); + columns.add(data.getAssignedBy()); + columns.add(formatAnnotationExtensions(data.getAnnotationExtensions())); + columns.add(data.getAnnotations().stream() + .map(a -> a.getLeft() + "=" + a.getRight()) + .collect(Collectors.joining("|"))); + return String.join("\t", columns); + } catch (Exception e) { + e.printStackTrace(); + throw e; + } + } + + private String localIDForObject(IRI iri) { + String curie = curieHandler.getCuri(iri); + if (curie.startsWith("http")) { + return curie; + } else { + return curie.split(":", 2)[1]; //TODO temporary? + } + } + + private String dbForObject(IRI iri) { + String curie = curieHandler.getCuri(iri); + if (curie.startsWith("http")) { + return ""; + } else { + return curie.split(":", 2)[0]; //TODO temporary? + } + } + + private String symbolForRelation(IRI iri) { + return symbolForRelation(iri, null); + } + + private String symbolForRelation(IRI iri, GPADOperatorStatus operator) { + // Property labels don't seem to be in external lookup service? // Optional labelOpt = Optional.ofNullable(lookupService.lookup(iri)).orElse(Collections.emptyList()).stream() // .filter(e -> e.label != null).findAny().map(e -> e.label.replaceAll(" ", "_")); // return labelOpt.orElse(curieHandler.getCuri(iri)); - if (operator == null || operator.equals(GPADOperatorStatus.NONE)) { - if (relationShorthandIndex.containsKey(iri)) { - return relationShorthandIndex.get(iri); - } else { - return curieHandler.getCuri(iri); - } - } else { - if (relationShorthandIndex.containsKey(iri)) { - return relationShorthandIndex.get(iri) + "|" + operator.name(); - } else { - return curieHandler.getCuri(iri) + "|" + operator.name(); - } - } - } - - /** - * Convert "2016-12-26" to "20161226" - */ - private String formatDate(String date) { - return date.replaceAll("-", ""); - } - - private String formatAnnotationExtensions(Set extensions) { - return extensions.stream() - .sorted(extensionComparator) - .map(ce -> this.renderConjunctiveExpression(ce)) - .collect(Collectors.joining(",")); - } - - private static Comparator extensionComparator = new Comparator() { - @Override - public int compare(ConjunctiveExpression a, ConjunctiveExpression b) { - return (a.getRelation().toString() + a.getFiller().toString()).compareTo(b.getRelation().toString() + b.getFiller().toString()); - } - }; - - private String renderConjunctiveExpression(ConjunctiveExpression ce) { - String relation = symbolForRelation(ce.getRelation()); - String filler = curieHandler.getCuri(ce.getFiller()); - return relation + "(" + filler + ")"; - } + if (operator == null || operator.equals(GPADOperatorStatus.NONE)) { + if (relationShorthandIndex.containsKey(iri)) { + return relationShorthandIndex.get(iri); + } else { + return curieHandler.getCuri(iri); + } + } else { + if (relationShorthandIndex.containsKey(iri)) { + return relationShorthandIndex.get(iri) + "|" + operator.name(); + } else { + return curieHandler.getCuri(iri) + "|" + operator.name(); + } + } + } + + /** + * Convert "2016-12-26" to "20161226" + */ + private String formatDate(String date) { + return date.replaceAll("-", ""); + } + + private String formatAnnotationExtensions(Set extensions) { + return extensions.stream() + .sorted(extensionComparator) + .map(ce -> this.renderConjunctiveExpression(ce)) + .collect(Collectors.joining(",")); + } + + private static Comparator extensionComparator = new Comparator() { + @Override + public int compare(ConjunctiveExpression a, ConjunctiveExpression b) { + return (a.getRelation().toString() + a.getFiller().toString()).compareTo(b.getRelation().toString() + b.getFiller().toString()); + } + }; + + private String renderConjunctiveExpression(ConjunctiveExpression ce) { + String relation = symbolForRelation(ce.getRelation()); + String filler = curieHandler.getCuri(ce.getFiller()); + return relation + "(" + filler + ")"; + } } \ No newline at end of file diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLExport.java b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLExport.java index 1f41d189..3f58ec37 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLExport.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLExport.java @@ -1,31 +1,11 @@ package org.geneontology.minerva.legacy.sparql; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; - import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.jena.graph.Node; import org.apache.jena.graph.NodeFactory; import org.apache.jena.graph.Triple; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryExecutionFactory; -import org.apache.jena.query.QueryFactory; -import org.apache.jena.query.QuerySolution; -import org.apache.jena.query.ResultSet; +import org.apache.jena.query.*; import org.apache.jena.rdf.model.Literal; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; @@ -43,371 +23,390 @@ import org.geneontology.rules.util.Bridge; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.reasoner.InconsistentOntologyException; - import scala.collection.JavaConverters; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + import static java.util.stream.Collectors.mapping; import static java.util.stream.Collectors.toSet; /* Note: the example GPAD files are available at this link: http://www.informatics.jax.org/downloads/reports/mgi.gpa.gz */ public class GPADSPARQLExport { - private static final Logger LOG = Logger.getLogger(GPADSPARQLExport.class); - private static final String ND = "http://purl.obolibrary.org/obo/ECO_0000307"; - private static final String MF = "http://purl.obolibrary.org/obo/GO_0003674"; - private static final String BP = "http://purl.obolibrary.org/obo/GO_0008150"; - private static final String CC = "http://purl.obolibrary.org/obo/GO_0005575"; - private static final Set rootTerms = new HashSet<>(Arrays.asList(MF, BP, CC)); - private static final String ENABLES = "http://purl.obolibrary.org/obo/RO_0002327"; - private static final String CONTRIBUTES_TO = "http://purl.obolibrary.org/obo/RO_0002326"; - private static final Set functionRelations = new HashSet<>(Arrays.asList(ENABLES, CONTRIBUTES_TO)); - private static final String EMAPA_NAMESPACE = "http://purl.obolibrary.org/obo/EMAPA_"; - private static final String UBERON_NAMESPACE = "http://purl.obolibrary.org/obo/UBERON_"; - private static final String inconsistentQuery = - "PREFIX rdf: " + - "PREFIX owl: " + - "ASK WHERE { ?s rdf:type owl:Nothing . } "; - - private static String mainQuery; - static { - try { - mainQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-basic.rq"), StandardCharsets.UTF_8); - } catch (IOException e) { - LOG.error("Could not load SPARQL query from jar", e); - } - } - private static String multipleEvidenceQuery; - static { - try { - multipleEvidenceQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-relation-evidence-multiple.rq"), StandardCharsets.UTF_8); - } catch (IOException e) { - LOG.error("Could not load SPARQL query from jar", e); - } - } - private static String extensionsQuery; - static { - try { - extensionsQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-extensions.rq"), StandardCharsets.UTF_8); - } catch (IOException e) { - LOG.error("Could not load SPARQL query from jar", e); - } - } - private static String modelAnnotationsQuery; - static { - try { - modelAnnotationsQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-model-level-annotations.rq"), StandardCharsets.UTF_8); - } catch (IOException e) { - LOG.error("Could not load SPARQL query from jar", e); - } - } - private final CurieHandler curieHandler; - private final Map relationShorthandIndex; - private final Map tboxShorthandIndex; - private final Map> regulators; - - public GPADSPARQLExport(CurieHandler handler, Map shorthandIndex, Map tboxShorthandIndex, Map> regulators) { - this.curieHandler = handler; - this.relationShorthandIndex = shorthandIndex; - this.tboxShorthandIndex = tboxShorthandIndex; - this.regulators = regulators; - } - - public String exportGPAD(WorkingMemory wm, IRI modelIRI) throws InconsistentOntologyException { - Set annotations = getGPAD(wm, modelIRI); - return new GPADRenderer(curieHandler, relationShorthandIndex).renderAll(annotations); - } - - /* This is a bit convoluted in order to minimize redundant queries, for performance reasons. */ - public Set getGPAD(WorkingMemory wm, IRI modelIRI) throws InconsistentOntologyException { - Model model = ModelFactory.createDefaultModel(); - model.add(JavaConverters.setAsJavaSetConverter(wm.facts()).asJava().stream().map(t -> model.asStatement(Bridge.jenaFromTriple(t))).collect(Collectors.toList())); - if (!isConsistent(model)) throw new InconsistentOntologyException(); - Map modelLevelAnnotations = getModelAnnotations(model); - /* The first step of constructing GPAD records is to construct candidate/basic GPAD records by running gpad-basic.rq. */ - QueryExecution qe = QueryExecutionFactory.create(mainQuery, model); - Set annotations = new HashSet<>(); - //this is unpredictable if more than one - //String modelID = model.listResourcesWithProperty(RDF.type, OWL.Ontology).mapWith(r -> curieHandler.getCuri(IRI.create(r.getURI()))).next(); - String modelID = curieHandler.getCuri(modelIRI); - ResultSet results = qe.execSelect(); - Set basicAnnotations = new HashSet<>(); - while (results.hasNext()) { - QuerySolution qs = results.next(); - BasicGPADData basicGPADData = new BasicGPADData(qs.getResource("pr").asNode(), IRI.create(qs.getResource("pr_type").getURI()), IRI.create(qs.getResource("rel").getURI()), qs.getResource("target").asNode(), IRI.create(qs.getResource("target_type").getURI())); - - /* See whether the query answer contains not-null blank nodes, which are only set if the matching subgraph - * contains the property ComplementOf. If we see such cases, we set the operator field as NOT so that NOT value - * can be printed in GPAD. */ - if (qs.getResource("blank_comp") != null) basicGPADData.setOperator(GPADOperatorStatus.NOT); - basicAnnotations.add(basicGPADData); - } - qe.close(); - - /* The bindings of ?pr_type, ?rel, ?target_type are candidate mappings or values for the final GPAD records - * (i.e. not every mapping is used for building the final records of GPAD file; many of them are filtered out later). - * The mappings are - * ?pr_type: DB Object ID (2nd in GPAD), ?rel: Qualifier(3rd), ?target_type: GO ID(4th) - * The rest of fields in GPAD are then constructed by joining the candidate mappings with mappings describing evidences and so on. - * If the output of this exporter (i.e. GPAD files) does not contain the values you expect, - * dump the above "QuerySolution qs" variable and see whether they are included in the dump. */ - Set possibleExtensions = possibleExtensions(basicAnnotations, model); - Set statementsToExplain = new HashSet<>(); - basicAnnotations.forEach(ba -> statementsToExplain.add(Triple.create(ba.getObjectNode(), NodeFactory.createURI(ba.getQualifier().toString()), ba.getOntologyClassNode()))); - possibleExtensions.forEach(ae -> statementsToExplain.add(ae.getTriple())); - Map> allExplanations = statementsToExplain.stream().collect(Collectors.toMap(Function.identity(), s -> toJava(wm.explain(Bridge.tripleFromJena(s))))); - - Map> allEvidences = evidencesForFacts(allExplanations.values().stream().flatMap(es -> es.stream()).flatMap(e -> toJava(e.facts()).stream().map(t -> Bridge.jenaFromTriple(t))).collect(toSet()), model, modelID, modelLevelAnnotations); - Set gpsWithAnyMFNotRootMF = basicAnnotations.stream().filter(a -> functionRelations.contains(a.getQualifier().toString())).filter(a -> !a.getOntologyClass().toString().equals(MF)).map(a -> a.getObject()).collect(toSet()); - Map> nodesToOntologyClasses = basicAnnotations.stream().collect(Collectors.groupingBy(BasicGPADData::getObjectNode, mapping(BasicGPADData::getOntologyClass, toSet()))); - for (BasicGPADData annotation : basicAnnotations) { - Set termsRegulatedByAnnotationsForThisGPNode = nodesToOntologyClasses.get(annotation.getObjectNode()).stream().flatMap(term -> regulators.getOrDefault(term, Collections.emptySet()).stream()).collect(toSet()); - boolean regulationViolation = termsRegulatedByAnnotationsForThisGPNode.contains(annotation.getOntologyClass()); - if (regulationViolation) continue; - for (Explanation explanation : allExplanations.get(Triple.create(annotation.getObjectNode(), NodeFactory.createURI(annotation.getQualifier().toString()), annotation.getOntologyClassNode()))) { - Set requiredFacts = toJava(explanation.facts()).stream().map(t -> Bridge.jenaFromTriple(t)).collect(toSet()); - // Every statement in the explanation must have at least one evidence, unless the statement is a class assertion - if (requiredFacts.stream().filter(t -> !t.getPredicate().getURI().equals(RDF.type.getURI())).allMatch(f -> !(allEvidences.get(f).isEmpty()))) { - // The evidence used for the annotation must be on an edge to or from the target node - Stream annotationEvidences = requiredFacts.stream() - .filter(f -> (f.getSubject().equals(annotation.getOntologyClassNode()) || f.getObject().equals(annotation.getOntologyClassNode()))) - .flatMap(f -> allEvidences.getOrDefault(f, Collections.emptySet()).stream()); - annotationEvidences.forEach(currentEvidence -> { - String reference = currentEvidence.getReference(); - Set goodExtensions = new HashSet<>(); - for (AnnotationExtension extension : possibleExtensions) { - if (extension.getTriple().getSubject().equals(annotation.getOntologyClassNode()) && !(extension.getTriple().getObject().equals(annotation.getObjectNode()))) { - for (Explanation expl : allExplanations.get(extension.getTriple())) { - boolean allFactsOfExplanationHaveRefMatchingAnnotation = toJava(expl.facts()).stream().map(fact -> allEvidences.getOrDefault(Bridge.jenaFromTriple(fact), Collections.emptySet())).allMatch(evidenceSet -> - evidenceSet.stream().anyMatch(ev -> ev.getReference().equals(reference))); - if (allFactsOfExplanationHaveRefMatchingAnnotation) { - goodExtensions.add(new DefaultConjunctiveExpression(IRI.create(extension.getTriple().getPredicate().getURI()), extension.getValueType())); - } - } - } - } - // Handle special case of EMAPA; don't include Uberon extensions - final boolean isMouseExtension = goodExtensions.stream().anyMatch(e -> e.getFiller().toString().startsWith(EMAPA_NAMESPACE)); - if (isMouseExtension) goodExtensions.removeIf(e -> e.getFiller().toString().startsWith(UBERON_NAMESPACE)); - final boolean rootViolation; - if (rootTerms.contains(annotation.getOntologyClass().toString())) { - rootViolation = !ND.equals(currentEvidence.getEvidence().toString()); - } else { rootViolation = false; } - final boolean rootMFWithOtherMF = annotation.getOntologyClass().toString().equals(MF) && gpsWithAnyMFNotRootMF.contains(annotation.getObject()); - if (!rootViolation && !rootMFWithOtherMF) { - DefaultGPADData defaultGPADData = new DefaultGPADData(annotation.getObject(), annotation.getQualifier(), annotation.getOntologyClass(), goodExtensions, - reference, currentEvidence.getEvidence(), currentEvidence.getWithOrFrom(), Optional.empty(), currentEvidence.getModificationDate(), - currentEvidence.getAssignedBy(), currentEvidence.getAnnotations()); - defaultGPADData.setOperator(annotation.getOperator()); - annotations.add(defaultGPADData); - } - }); - } - } - } - return annotations; - } - - private Map getModelAnnotations(Model model) { - QueryExecution qe = QueryExecutionFactory.create(modelAnnotationsQuery, model); - ResultSet result = qe.execSelect(); - Map modelAnnotations = new HashMap<>(); - while (result.hasNext()) { - QuerySolution qs = result.next(); - if (qs.get("model_state") != null) { - String modelState = qs.getLiteral("model_state").getLexicalForm(); - modelAnnotations.put("model-state", modelState); - } - if (qs.get("provided_by") != null) { - String providedBy = qs.getLiteral("provided_by").getLexicalForm(); - modelAnnotations.put("assigned-by", providedBy); - } - //break; - } - return modelAnnotations; - } - - /** - * Given a set of triples extracted/generated from the result/answer of query gpad-basic.rq, we find matching evidence subgraphs. - * In other words, if there are no matching evidence (i.e. no bindings for evidence_type), we discard (basic) GPAD instance. - * - * The parameter "facts" consists of triples constructed from a binding of ?pr, ?rel, ?target in gpad_basic.rq. - * (The codes that constructing these triples are executed right before this method is called). - * - * These triples are then decomposed into values used as the parameters/bindings for objects of the following patterns. - * ?axiom owl:annotatedSource ?subject (i.e. ?pr in gpad_basic.rq) - * ?axiom owl:annotatedProperty ?predicate (i.e., ?rel in gpad_basic.rq, which denotes qualifier in GPAD) - * ?axiom owl:annotatedTarget ?object (i.e., ?target in gpad_basic.rq) - * - * If we find the bindings of ?axioms and the values of these bindings have some rdf:type triples, we proceed. (If not, we discard). - * The bindings of the query gpad-relation-evidence-multiple.rq are then used for filling up fields in GPAD records/tuples. - */ - private Map> evidencesForFacts(Set facts, Model model, String modelID, Map modelLevelAnnotations) { - Query query = QueryFactory.create(multipleEvidenceQuery); - Var subject = Var.alloc("subject"); - Var predicate = Var.alloc("predicate"); - Var object = Var.alloc("object"); - List variables = new ArrayList<>(); - variables.add(subject); - variables.add(predicate); - variables.add(object); - Stream bindings = facts.stream().map(f -> createBinding(Pair.of(subject, f.getSubject()), Pair.of(predicate, f.getPredicate()), Pair.of(object, f.getObject()))); - query.setValuesDataBlock(variables, bindings.collect(Collectors.toList())); - QueryExecution evidenceExecution = QueryExecutionFactory.create(query, model); - ResultSet evidenceResults = evidenceExecution.execSelect(); - Map> allEvidences = facts.stream().collect(Collectors.toMap(Function.identity(), f -> new HashSet())); - while (evidenceResults.hasNext()) { - QuerySolution eqs = evidenceResults.next(); - if (eqs.get("evidence_type") != null) { - Triple statement = Triple.create(eqs.getResource("subject").asNode(), eqs.getResource("predicate").asNode(), eqs.getResource("object").asNode()); - IRI evidenceType = IRI.create(eqs.getResource("evidence_type").getURI()); - Optional with = Optional.ofNullable(eqs.getLiteral("with")).map(Literal::getLexicalForm); - Set> annotationAnnotations = new HashSet<>(); - annotationAnnotations.add(Pair.of("noctua-model-id", modelID)); - annotationAnnotations.addAll(getContributors(eqs).stream().map(c -> Pair.of("contributor", c)).collect(toSet())); - String modificationDate = eqs.getLiteral("modification_date").getLexicalForm(); - Optional creationDate = Optional.ofNullable(eqs.getLiteral("creation_date")).map(Literal::getLexicalForm); - // Add this back after announced to consortium; also re-enable tests - //creationDate.ifPresent(date -> annotationAnnotations.add(Pair.of("creation-date", date))); - String reference = eqs.getLiteral("source").getLexicalForm(); - final String usableAssignedBy; - Optional assignedByIRIOpt = getAnnotationAssignedBy(eqs); - if (assignedByIRIOpt.isPresent()) { - String usableAssignedByIRI = assignedByIRIOpt.get(); - usableAssignedBy = this.tboxShorthandIndex.getOrDefault(IRI.create(usableAssignedByIRI), usableAssignedByIRI); - } else if (modelLevelAnnotations.containsKey("assigned-by")) { - String usableAssignedByIRI = modelLevelAnnotations.get("assigned-by"); - usableAssignedBy = this.tboxShorthandIndex.getOrDefault(IRI.create(usableAssignedByIRI), usableAssignedByIRI); - } else { usableAssignedBy = "GO_Noctua"; } - if (modelLevelAnnotations.containsKey("model-state")) { - annotationAnnotations.add(Pair.of("model-state", modelLevelAnnotations.get("model-state"))); - } - allEvidences.get(statement).add(new GPADEvidence(evidenceType, reference, with, modificationDate, usableAssignedBy, annotationAnnotations, Optional.empty())); - } - } - evidenceExecution.close(); - return allEvidences; - } - - @SafeVarargs - private final Binding createBinding(Pair... bindings) { - BindingMap map = BindingFactory.create(); - for (Pair binding : bindings) { - map.add(binding.getLeft(), binding.getRight()); - } - return map; - } - - private Set possibleExtensions(Set basicAnnotations, Model model) { - Set possibleExtensions = new HashSet<>(); - Var targetVar = Var.alloc("target"); - List bindings = basicAnnotations.stream().map(ba -> createBinding(Pair.of(targetVar, ba.getOntologyClassNode()))).collect(Collectors.toList()); - Query query = QueryFactory.create(extensionsQuery); - query.setValuesDataBlock(Arrays.asList(targetVar), bindings); - QueryExecution qe = QueryExecutionFactory.create(query, model); - ResultSet results = qe.execSelect(); - while (results.hasNext()) { - QuerySolution result = results.next(); - Triple statement = Triple.create(result.getResource("target").asNode(), result.getResource("extension_rel").asNode(), result.getResource("extension").asNode()); - IRI extensionType = IRI.create(result.getResource("extension_type").getURI()); - possibleExtensions.add(new AnnotationExtension(statement, extensionType)); - } - qe.close(); - return possibleExtensions; - } - - private Set getContributors(QuerySolution result) { - Set contributors = new HashSet<>(); - if (result.getLiteral("contributors") != null) { - for (String contributor : result.getLiteral("contributors").getLexicalForm().split("\\|")) { - contributors.add(contributor); - } - } - return Collections.unmodifiableSet(contributors); - } - - private Optional getAnnotationAssignedBy(QuerySolution result) { - if (result.getLiteral("provided_bys") != null) { - for (String group : result.getLiteral("provided_bys").getLexicalForm().split("\\|")) { - return Optional.of(group); - } - } - return Optional.empty(); - } - - private boolean isConsistent(Model model) { - QueryExecution qe = QueryExecutionFactory.create(inconsistentQuery, model); - boolean inconsistent = qe.execAsk(); - qe.close(); - if(inconsistent) { - String sparql_why = "PREFIX rdf: " - + "PREFIX owl: " - + "SELECT ?s WHERE { ?s rdf:type owl:Nothing . } "; - qe = QueryExecutionFactory.create(sparql_why, model); - ResultSet result = qe.execSelect(); - while (result.hasNext()) { - QuerySolution qs = result.next(); - Resource bad = qs.getResource("s"); - LOG.info("owl nothing instance: "+bad.getURI()); - } - } - - return !inconsistent; - } - - private static Set toJava(scala.collection.Set scalaSet) { - return JavaConverters.setAsJavaSetConverter(scalaSet).asJava(); - } - - private static class DefaultConjunctiveExpression implements ConjunctiveExpression { - - private final IRI relation; - private final IRI filler; - - public DefaultConjunctiveExpression(IRI rel, IRI fill) { - this.relation = rel; - this.filler = fill; - } - - @Override - public IRI getRelation() { - return relation; - } - - @Override - public IRI getFiller() { - return filler; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((filler == null) ? 0 : filler.hashCode()); - result = prime * result + ((relation == null) ? 0 : relation.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - DefaultConjunctiveExpression other = (DefaultConjunctiveExpression) obj; - if (filler == null) { - if (other.filler != null) - return false; - } else if (!filler.equals(other.filler)) - return false; - if (relation == null) { - if (other.relation != null) - return false; - } else if (!relation.equals(other.relation)) - return false; - return true; - } - - } + private static final Logger LOG = Logger.getLogger(GPADSPARQLExport.class); + private static final String ND = "http://purl.obolibrary.org/obo/ECO_0000307"; + private static final String MF = "http://purl.obolibrary.org/obo/GO_0003674"; + private static final String BP = "http://purl.obolibrary.org/obo/GO_0008150"; + private static final String CC = "http://purl.obolibrary.org/obo/GO_0005575"; + private static final Set rootTerms = new HashSet<>(Arrays.asList(MF, BP, CC)); + private static final String ENABLES = "http://purl.obolibrary.org/obo/RO_0002327"; + private static final String CONTRIBUTES_TO = "http://purl.obolibrary.org/obo/RO_0002326"; + private static final Set functionRelations = new HashSet<>(Arrays.asList(ENABLES, CONTRIBUTES_TO)); + private static final String EMAPA_NAMESPACE = "http://purl.obolibrary.org/obo/EMAPA_"; + private static final String UBERON_NAMESPACE = "http://purl.obolibrary.org/obo/UBERON_"; + private static final String inconsistentQuery = + "PREFIX rdf: " + + "PREFIX owl: " + + "ASK WHERE { ?s rdf:type owl:Nothing . } "; + + private static String mainQuery; + + static { + try { + mainQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-basic.rq"), StandardCharsets.UTF_8); + } catch (IOException e) { + LOG.error("Could not load SPARQL query from jar", e); + } + } + + private static String multipleEvidenceQuery; + + static { + try { + multipleEvidenceQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-relation-evidence-multiple.rq"), StandardCharsets.UTF_8); + } catch (IOException e) { + LOG.error("Could not load SPARQL query from jar", e); + } + } + + private static String extensionsQuery; + + static { + try { + extensionsQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-extensions.rq"), StandardCharsets.UTF_8); + } catch (IOException e) { + LOG.error("Could not load SPARQL query from jar", e); + } + } + + private static String modelAnnotationsQuery; + + static { + try { + modelAnnotationsQuery = IOUtils.toString(GPADSPARQLExport.class.getResourceAsStream("gpad-model-level-annotations.rq"), StandardCharsets.UTF_8); + } catch (IOException e) { + LOG.error("Could not load SPARQL query from jar", e); + } + } + + private final CurieHandler curieHandler; + private final Map relationShorthandIndex; + private final Map tboxShorthandIndex; + private final Map> regulators; + + public GPADSPARQLExport(CurieHandler handler, Map shorthandIndex, Map tboxShorthandIndex, Map> regulators) { + this.curieHandler = handler; + this.relationShorthandIndex = shorthandIndex; + this.tboxShorthandIndex = tboxShorthandIndex; + this.regulators = regulators; + } + + public String exportGPAD(WorkingMemory wm, IRI modelIRI) throws InconsistentOntologyException { + Set annotations = getGPAD(wm, modelIRI); + return new GPADRenderer(curieHandler, relationShorthandIndex).renderAll(annotations); + } + + /* This is a bit convoluted in order to minimize redundant queries, for performance reasons. */ + public Set getGPAD(WorkingMemory wm, IRI modelIRI) throws InconsistentOntologyException { + Model model = ModelFactory.createDefaultModel(); + model.add(JavaConverters.setAsJavaSetConverter(wm.facts()).asJava().stream().map(t -> model.asStatement(Bridge.jenaFromTriple(t))).collect(Collectors.toList())); + if (!isConsistent(model)) throw new InconsistentOntologyException(); + Map modelLevelAnnotations = getModelAnnotations(model); + /* The first step of constructing GPAD records is to construct candidate/basic GPAD records by running gpad-basic.rq. */ + QueryExecution qe = QueryExecutionFactory.create(mainQuery, model); + Set annotations = new HashSet<>(); + //this is unpredictable if more than one + //String modelID = model.listResourcesWithProperty(RDF.type, OWL.Ontology).mapWith(r -> curieHandler.getCuri(IRI.create(r.getURI()))).next(); + String modelID = curieHandler.getCuri(modelIRI); + ResultSet results = qe.execSelect(); + Set basicAnnotations = new HashSet<>(); + while (results.hasNext()) { + QuerySolution qs = results.next(); + BasicGPADData basicGPADData = new BasicGPADData(qs.getResource("pr").asNode(), IRI.create(qs.getResource("pr_type").getURI()), IRI.create(qs.getResource("rel").getURI()), qs.getResource("target").asNode(), IRI.create(qs.getResource("target_type").getURI())); + + /* See whether the query answer contains not-null blank nodes, which are only set if the matching subgraph + * contains the property ComplementOf. If we see such cases, we set the operator field as NOT so that NOT value + * can be printed in GPAD. */ + if (qs.getResource("blank_comp") != null) basicGPADData.setOperator(GPADOperatorStatus.NOT); + basicAnnotations.add(basicGPADData); + } + qe.close(); + + /* The bindings of ?pr_type, ?rel, ?target_type are candidate mappings or values for the final GPAD records + * (i.e. not every mapping is used for building the final records of GPAD file; many of them are filtered out later). + * The mappings are + * ?pr_type: DB Object ID (2nd in GPAD), ?rel: Qualifier(3rd), ?target_type: GO ID(4th) + * The rest of fields in GPAD are then constructed by joining the candidate mappings with mappings describing evidences and so on. + * If the output of this exporter (i.e. GPAD files) does not contain the values you expect, + * dump the above "QuerySolution qs" variable and see whether they are included in the dump. */ + Set possibleExtensions = possibleExtensions(basicAnnotations, model); + Set statementsToExplain = new HashSet<>(); + basicAnnotations.forEach(ba -> statementsToExplain.add(Triple.create(ba.getObjectNode(), NodeFactory.createURI(ba.getQualifier().toString()), ba.getOntologyClassNode()))); + possibleExtensions.forEach(ae -> statementsToExplain.add(ae.getTriple())); + Map> allExplanations = statementsToExplain.stream().collect(Collectors.toMap(Function.identity(), s -> toJava(wm.explain(Bridge.tripleFromJena(s))))); + + Map> allEvidences = evidencesForFacts(allExplanations.values().stream().flatMap(es -> es.stream()).flatMap(e -> toJava(e.facts()).stream().map(t -> Bridge.jenaFromTriple(t))).collect(toSet()), model, modelID, modelLevelAnnotations); + Set gpsWithAnyMFNotRootMF = basicAnnotations.stream().filter(a -> functionRelations.contains(a.getQualifier().toString())).filter(a -> !a.getOntologyClass().toString().equals(MF)).map(a -> a.getObject()).collect(toSet()); + Map> nodesToOntologyClasses = basicAnnotations.stream().collect(Collectors.groupingBy(BasicGPADData::getObjectNode, mapping(BasicGPADData::getOntologyClass, toSet()))); + for (BasicGPADData annotation : basicAnnotations) { + Set termsRegulatedByAnnotationsForThisGPNode = nodesToOntologyClasses.get(annotation.getObjectNode()).stream().flatMap(term -> regulators.getOrDefault(term, Collections.emptySet()).stream()).collect(toSet()); + boolean regulationViolation = termsRegulatedByAnnotationsForThisGPNode.contains(annotation.getOntologyClass()); + if (regulationViolation) continue; + for (Explanation explanation : allExplanations.get(Triple.create(annotation.getObjectNode(), NodeFactory.createURI(annotation.getQualifier().toString()), annotation.getOntologyClassNode()))) { + Set requiredFacts = toJava(explanation.facts()).stream().map(t -> Bridge.jenaFromTriple(t)).collect(toSet()); + // Every statement in the explanation must have at least one evidence, unless the statement is a class assertion + if (requiredFacts.stream().filter(t -> !t.getPredicate().getURI().equals(RDF.type.getURI())).allMatch(f -> !(allEvidences.get(f).isEmpty()))) { + // The evidence used for the annotation must be on an edge to or from the target node + Stream annotationEvidences = requiredFacts.stream() + .filter(f -> (f.getSubject().equals(annotation.getOntologyClassNode()) || f.getObject().equals(annotation.getOntologyClassNode()))) + .flatMap(f -> allEvidences.getOrDefault(f, Collections.emptySet()).stream()); + annotationEvidences.forEach(currentEvidence -> { + String reference = currentEvidence.getReference(); + Set goodExtensions = new HashSet<>(); + for (AnnotationExtension extension : possibleExtensions) { + if (extension.getTriple().getSubject().equals(annotation.getOntologyClassNode()) && !(extension.getTriple().getObject().equals(annotation.getObjectNode()))) { + for (Explanation expl : allExplanations.get(extension.getTriple())) { + boolean allFactsOfExplanationHaveRefMatchingAnnotation = toJava(expl.facts()).stream().map(fact -> allEvidences.getOrDefault(Bridge.jenaFromTriple(fact), Collections.emptySet())).allMatch(evidenceSet -> + evidenceSet.stream().anyMatch(ev -> ev.getReference().equals(reference))); + if (allFactsOfExplanationHaveRefMatchingAnnotation) { + goodExtensions.add(new DefaultConjunctiveExpression(IRI.create(extension.getTriple().getPredicate().getURI()), extension.getValueType())); + } + } + } + } + // Handle special case of EMAPA; don't include Uberon extensions + final boolean isMouseExtension = goodExtensions.stream().anyMatch(e -> e.getFiller().toString().startsWith(EMAPA_NAMESPACE)); + if (isMouseExtension) + goodExtensions.removeIf(e -> e.getFiller().toString().startsWith(UBERON_NAMESPACE)); + final boolean rootViolation; + if (rootTerms.contains(annotation.getOntologyClass().toString())) { + rootViolation = !ND.equals(currentEvidence.getEvidence().toString()); + } else { + rootViolation = false; + } + final boolean rootMFWithOtherMF = annotation.getOntologyClass().toString().equals(MF) && gpsWithAnyMFNotRootMF.contains(annotation.getObject()); + if (!rootViolation && !rootMFWithOtherMF) { + DefaultGPADData defaultGPADData = new DefaultGPADData(annotation.getObject(), annotation.getQualifier(), annotation.getOntologyClass(), goodExtensions, + reference, currentEvidence.getEvidence(), currentEvidence.getWithOrFrom(), Optional.empty(), currentEvidence.getModificationDate(), + currentEvidence.getAssignedBy(), currentEvidence.getAnnotations()); + defaultGPADData.setOperator(annotation.getOperator()); + annotations.add(defaultGPADData); + } + }); + } + } + } + return annotations; + } + + private Map getModelAnnotations(Model model) { + QueryExecution qe = QueryExecutionFactory.create(modelAnnotationsQuery, model); + ResultSet result = qe.execSelect(); + Map modelAnnotations = new HashMap<>(); + while (result.hasNext()) { + QuerySolution qs = result.next(); + if (qs.get("model_state") != null) { + String modelState = qs.getLiteral("model_state").getLexicalForm(); + modelAnnotations.put("model-state", modelState); + } + if (qs.get("provided_by") != null) { + String providedBy = qs.getLiteral("provided_by").getLexicalForm(); + modelAnnotations.put("assigned-by", providedBy); + } + //break; + } + return modelAnnotations; + } + + /** + * Given a set of triples extracted/generated from the result/answer of query gpad-basic.rq, we find matching evidence subgraphs. + * In other words, if there are no matching evidence (i.e. no bindings for evidence_type), we discard (basic) GPAD instance. + *

+ * The parameter "facts" consists of triples constructed from a binding of ?pr, ?rel, ?target in gpad_basic.rq. + * (The codes that constructing these triples are executed right before this method is called). + *

+ * These triples are then decomposed into values used as the parameters/bindings for objects of the following patterns. + * ?axiom owl:annotatedSource ?subject (i.e. ?pr in gpad_basic.rq) + * ?axiom owl:annotatedProperty ?predicate (i.e., ?rel in gpad_basic.rq, which denotes qualifier in GPAD) + * ?axiom owl:annotatedTarget ?object (i.e., ?target in gpad_basic.rq) + *

+ * If we find the bindings of ?axioms and the values of these bindings have some rdf:type triples, we proceed. (If not, we discard). + * The bindings of the query gpad-relation-evidence-multiple.rq are then used for filling up fields in GPAD records/tuples. + */ + private Map> evidencesForFacts(Set facts, Model model, String modelID, Map modelLevelAnnotations) { + Query query = QueryFactory.create(multipleEvidenceQuery); + Var subject = Var.alloc("subject"); + Var predicate = Var.alloc("predicate"); + Var object = Var.alloc("object"); + List variables = new ArrayList<>(); + variables.add(subject); + variables.add(predicate); + variables.add(object); + Stream bindings = facts.stream().map(f -> createBinding(Pair.of(subject, f.getSubject()), Pair.of(predicate, f.getPredicate()), Pair.of(object, f.getObject()))); + query.setValuesDataBlock(variables, bindings.collect(Collectors.toList())); + QueryExecution evidenceExecution = QueryExecutionFactory.create(query, model); + ResultSet evidenceResults = evidenceExecution.execSelect(); + Map> allEvidences = facts.stream().collect(Collectors.toMap(Function.identity(), f -> new HashSet())); + while (evidenceResults.hasNext()) { + QuerySolution eqs = evidenceResults.next(); + if (eqs.get("evidence_type") != null) { + Triple statement = Triple.create(eqs.getResource("subject").asNode(), eqs.getResource("predicate").asNode(), eqs.getResource("object").asNode()); + IRI evidenceType = IRI.create(eqs.getResource("evidence_type").getURI()); + Optional with = Optional.ofNullable(eqs.getLiteral("with")).map(Literal::getLexicalForm); + Set> annotationAnnotations = new HashSet<>(); + annotationAnnotations.add(Pair.of("noctua-model-id", modelID)); + annotationAnnotations.addAll(getContributors(eqs).stream().map(c -> Pair.of("contributor", c)).collect(toSet())); + String modificationDate = eqs.getLiteral("modification_date").getLexicalForm(); + Optional creationDate = Optional.ofNullable(eqs.getLiteral("creation_date")).map(Literal::getLexicalForm); + // Add this back after announced to consortium; also re-enable tests + //creationDate.ifPresent(date -> annotationAnnotations.add(Pair.of("creation-date", date))); + String reference = eqs.getLiteral("source").getLexicalForm(); + final String usableAssignedBy; + Optional assignedByIRIOpt = getAnnotationAssignedBy(eqs); + if (assignedByIRIOpt.isPresent()) { + String usableAssignedByIRI = assignedByIRIOpt.get(); + usableAssignedBy = this.tboxShorthandIndex.getOrDefault(IRI.create(usableAssignedByIRI), usableAssignedByIRI); + } else if (modelLevelAnnotations.containsKey("assigned-by")) { + String usableAssignedByIRI = modelLevelAnnotations.get("assigned-by"); + usableAssignedBy = this.tboxShorthandIndex.getOrDefault(IRI.create(usableAssignedByIRI), usableAssignedByIRI); + } else { + usableAssignedBy = "GO_Noctua"; + } + if (modelLevelAnnotations.containsKey("model-state")) { + annotationAnnotations.add(Pair.of("model-state", modelLevelAnnotations.get("model-state"))); + } + allEvidences.get(statement).add(new GPADEvidence(evidenceType, reference, with, modificationDate, usableAssignedBy, annotationAnnotations, Optional.empty())); + } + } + evidenceExecution.close(); + return allEvidences; + } + + @SafeVarargs + private final Binding createBinding(Pair... bindings) { + BindingMap map = BindingFactory.create(); + for (Pair binding : bindings) { + map.add(binding.getLeft(), binding.getRight()); + } + return map; + } + + private Set possibleExtensions(Set basicAnnotations, Model model) { + Set possibleExtensions = new HashSet<>(); + Var targetVar = Var.alloc("target"); + List bindings = basicAnnotations.stream().map(ba -> createBinding(Pair.of(targetVar, ba.getOntologyClassNode()))).collect(Collectors.toList()); + Query query = QueryFactory.create(extensionsQuery); + query.setValuesDataBlock(Arrays.asList(targetVar), bindings); + QueryExecution qe = QueryExecutionFactory.create(query, model); + ResultSet results = qe.execSelect(); + while (results.hasNext()) { + QuerySolution result = results.next(); + Triple statement = Triple.create(result.getResource("target").asNode(), result.getResource("extension_rel").asNode(), result.getResource("extension").asNode()); + IRI extensionType = IRI.create(result.getResource("extension_type").getURI()); + possibleExtensions.add(new AnnotationExtension(statement, extensionType)); + } + qe.close(); + return possibleExtensions; + } + + private Set getContributors(QuerySolution result) { + Set contributors = new HashSet<>(); + if (result.getLiteral("contributors") != null) { + for (String contributor : result.getLiteral("contributors").getLexicalForm().split("\\|")) { + contributors.add(contributor); + } + } + return Collections.unmodifiableSet(contributors); + } + + private Optional getAnnotationAssignedBy(QuerySolution result) { + if (result.getLiteral("provided_bys") != null) { + for (String group : result.getLiteral("provided_bys").getLexicalForm().split("\\|")) { + return Optional.of(group); + } + } + return Optional.empty(); + } + + private boolean isConsistent(Model model) { + QueryExecution qe = QueryExecutionFactory.create(inconsistentQuery, model); + boolean inconsistent = qe.execAsk(); + qe.close(); + if (inconsistent) { + String sparql_why = "PREFIX rdf: " + + "PREFIX owl: " + + "SELECT ?s WHERE { ?s rdf:type owl:Nothing . } "; + qe = QueryExecutionFactory.create(sparql_why, model); + ResultSet result = qe.execSelect(); + while (result.hasNext()) { + QuerySolution qs = result.next(); + Resource bad = qs.getResource("s"); + LOG.info("owl nothing instance: " + bad.getURI()); + } + } + + return !inconsistent; + } + + private static Set toJava(scala.collection.Set scalaSet) { + return JavaConverters.setAsJavaSetConverter(scalaSet).asJava(); + } + + private static class DefaultConjunctiveExpression implements ConjunctiveExpression { + + private final IRI relation; + private final IRI filler; + + public DefaultConjunctiveExpression(IRI rel, IRI fill) { + this.relation = rel; + this.filler = fill; + } + + @Override + public IRI getRelation() { + return relation; + } + + @Override + public IRI getFiller() { + return filler; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((filler == null) ? 0 : filler.hashCode()); + result = prime * result + ((relation == null) ? 0 : relation.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DefaultConjunctiveExpression other = (DefaultConjunctiveExpression) obj; + if (filler == null) { + if (other.filler != null) + return false; + } else if (!filler.equals(other.filler)) + return false; + if (relation == null) { + if (other.relation != null) + return false; + } else if (!relation.equals(other.relation)) + return false; + return true; + } + + } } \ No newline at end of file diff --git a/minerva-converter/src/main/java/org/geneontology/minerva/taxon/FindTaxonTool.java b/minerva-converter/src/main/java/org/geneontology/minerva/taxon/FindTaxonTool.java index f3e8c850..e3ffe2e2 100644 --- a/minerva-converter/src/main/java/org/geneontology/minerva/taxon/FindTaxonTool.java +++ b/minerva-converter/src/main/java/org/geneontology/minerva/taxon/FindTaxonTool.java @@ -1,87 +1,78 @@ package org.geneontology.minerva.taxon; -import java.util.HashSet; -import java.util.Set; - import org.apache.commons.io.IOUtils; import org.geneontology.minerva.MinervaOWLGraphWrapper; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; import org.obolibrary.obo2owl.Obo2OWLConstants; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.util.OWLClassExpressionVisitorExAdapter; +import java.util.HashSet; +import java.util.Set; + public class FindTaxonTool { - - public static final IRI IN_TAXON_IRI = IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"RO_0002162"); - - private final OWLObjectProperty inTaxon; - private final CurieHandler curieHandler; - - public FindTaxonTool(CurieHandler curieHandler, OWLDataFactory df) { - this.curieHandler = curieHandler; - inTaxon = df.getOWLObjectProperty(IN_TAXON_IRI); - } - - public String getEntityTaxon(String curie, OWLOntology model) throws UnknownIdentifierException { - if (curie == null || curie.isEmpty()) { - return null; - } - OWLDataFactory df = model.getOWLOntologyManager().getOWLDataFactory(); - OWLClass cls = df.getOWLClass(curieHandler.getIRI(curie)); - String taxon = getEntityTaxon(cls, model); - if (taxon == null) { - MinervaOWLGraphWrapper g = new MinervaOWLGraphWrapper(model); - cls = g.getOWLClassByIdentifier(curie); - if (cls != null) { - taxon = getEntityTaxon(cls, model); - } - IOUtils.closeQuietly(g); - } - return taxon; - } - String getEntityTaxon(OWLClass entity, OWLOntology model) { - Set axioms = new HashSet(); - for(OWLOntology ont : model.getImportsClosure()) { - axioms.addAll(ont.getSubClassAxiomsForSubClass(entity)); - } - for (OWLSubClassOfAxiom axiom : axioms) { - OWLClassExpression ce = axiom.getSuperClass(); - if (ce instanceof OWLObjectSomeValuesFrom) { - OWLObjectSomeValuesFrom svf = (OWLObjectSomeValuesFrom) ce; - if (inTaxon.equals(svf.getProperty())) { - OWLClassExpression filler = svf.getFiller(); - OWLClass c = filler.accept(new OWLClassExpressionVisitorExAdapter(null) { + public static final IRI IN_TAXON_IRI = IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX + "RO_0002162"); + + private final OWLObjectProperty inTaxon; + private final CurieHandler curieHandler; + + public FindTaxonTool(CurieHandler curieHandler, OWLDataFactory df) { + this.curieHandler = curieHandler; + inTaxon = df.getOWLObjectProperty(IN_TAXON_IRI); + } + + public String getEntityTaxon(String curie, OWLOntology model) throws UnknownIdentifierException { + if (curie == null || curie.isEmpty()) { + return null; + } + OWLDataFactory df = model.getOWLOntologyManager().getOWLDataFactory(); + OWLClass cls = df.getOWLClass(curieHandler.getIRI(curie)); + String taxon = getEntityTaxon(cls, model); + if (taxon == null) { + MinervaOWLGraphWrapper g = new MinervaOWLGraphWrapper(model); + cls = g.getOWLClassByIdentifier(curie); + if (cls != null) { + taxon = getEntityTaxon(cls, model); + } + IOUtils.closeQuietly(g); + } + return taxon; + } + + String getEntityTaxon(OWLClass entity, OWLOntology model) { + Set axioms = new HashSet(); + for (OWLOntology ont : model.getImportsClosure()) { + axioms.addAll(ont.getSubClassAxiomsForSubClass(entity)); + } + for (OWLSubClassOfAxiom axiom : axioms) { + OWLClassExpression ce = axiom.getSuperClass(); + if (ce instanceof OWLObjectSomeValuesFrom) { + OWLObjectSomeValuesFrom svf = (OWLObjectSomeValuesFrom) ce; + if (inTaxon.equals(svf.getProperty())) { + OWLClassExpression filler = svf.getFiller(); + OWLClass c = filler.accept(new OWLClassExpressionVisitorExAdapter(null) { + + @Override + public OWLClass visit(OWLClass c) { + return c; + } + }); + if (c != null) { + return curieHandler.getCuri(c); + } + } + } + } + return null; + } + + public OWLAxiom createTaxonAxiom(OWLClass entity, String taxon, OWLOntology model, Set tags) throws UnknownIdentifierException { + OWLDataFactory df = model.getOWLOntologyManager().getOWLDataFactory(); + OWLClass taxonCls = df.getOWLClass(curieHandler.getIRI(taxon)); + OWLAxiom axiom = df.getOWLSubClassOfAxiom(entity, df.getOWLObjectSomeValuesFrom(inTaxon, taxonCls), tags); - @Override - public OWLClass visit(OWLClass c) { - return c; - } - }); - if (c != null) { - return curieHandler.getCuri(c); - } - } - } - } - return null; - } - - public OWLAxiom createTaxonAxiom(OWLClass entity, String taxon, OWLOntology model, Set tags) throws UnknownIdentifierException { - OWLDataFactory df = model.getOWLOntologyManager().getOWLDataFactory(); - OWLClass taxonCls = df.getOWLClass(curieHandler.getIRI(taxon)); - OWLAxiom axiom = df.getOWLSubClassOfAxiom(entity, df.getOWLObjectSomeValuesFrom(inTaxon, taxonCls), tags); - - return axiom; - } + return axiom; + } } diff --git a/minerva-converter/src/test/java/org/geneontology/minerva/evidence/FindGoCodesTest.java b/minerva-converter/src/test/java/org/geneontology/minerva/evidence/FindGoCodesTest.java index e3495f58..802d7fa2 100644 --- a/minerva-converter/src/test/java/org/geneontology/minerva/evidence/FindGoCodesTest.java +++ b/minerva-converter/src/test/java/org/geneontology/minerva/evidence/FindGoCodesTest.java @@ -1,7 +1,5 @@ package org.geneontology.minerva.evidence; -import static org.junit.Assert.*; - import org.apache.commons.lang3.tuple.Pair; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; @@ -11,44 +9,46 @@ import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLOntology; - import owltools.io.ParserWrapper; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + public class FindGoCodesTest { - - private static CurieHandler curieHandler; - private static OWLOntology eco; - private static FindGoCodes codes; - - @BeforeClass - public static void beforeClass() throws Exception { - curieHandler = DefaultCurieHandler.getDefaultHandler(); - codes = new FindGoCodes(curieHandler); - ParserWrapper pw = new ParserWrapper(); - eco = pw.parseOWL(IRI.create("http://purl.obolibrary.org/obo/eco.owl")); - } - - @Test - public void testFindShortEvidence() throws Exception { - // ECO:0000305 (IC) <- ECO:0000306 <- ECO:0001828 - Pair pair0 = lookup("ECO:0000305"); - assertNotNull(pair0); - assertEquals("IC", pair0.getLeft()); - - Pair pair1 = lookup("ECO:0000306"); - assertNotNull(pair1); - assertEquals("IC", pair1.getLeft()); - - Pair pair2 = lookup("ECO:0000269"); - assertNotNull(pair2); - assertEquals("EXP", pair2.getLeft()); - } - - private Pair lookup(String testId) throws UnknownIdentifierException { - IRI testIRI = curieHandler.getIRI(testId); - OWLClass testOwlClass = eco.getOWLOntologyManager().getOWLDataFactory().getOWLClass(testIRI); - Pair pair = codes.findShortEvidence(testOwlClass, testId, eco); - return pair; - } + + private static CurieHandler curieHandler; + private static OWLOntology eco; + private static FindGoCodes codes; + + @BeforeClass + public static void beforeClass() throws Exception { + curieHandler = DefaultCurieHandler.getDefaultHandler(); + codes = new FindGoCodes(curieHandler); + ParserWrapper pw = new ParserWrapper(); + eco = pw.parseOWL(IRI.create("http://purl.obolibrary.org/obo/eco.owl")); + } + + @Test + public void testFindShortEvidence() throws Exception { + // ECO:0000305 (IC) <- ECO:0000306 <- ECO:0001828 + Pair pair0 = lookup("ECO:0000305"); + assertNotNull(pair0); + assertEquals("IC", pair0.getLeft()); + + Pair pair1 = lookup("ECO:0000306"); + assertNotNull(pair1); + assertEquals("IC", pair1.getLeft()); + + Pair pair2 = lookup("ECO:0000269"); + assertNotNull(pair2); + assertEquals("EXP", pair2.getLeft()); + } + + private Pair lookup(String testId) throws UnknownIdentifierException { + IRI testIRI = curieHandler.getIRI(testId); + OWLClass testOwlClass = eco.getOWLOntologyManager().getOWLDataFactory().getOWLClass(testIRI); + Pair pair = codes.findShortEvidence(testOwlClass, testId, eco); + return pair; + } } diff --git a/minerva-converter/src/test/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLTest.java b/minerva-converter/src/test/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLTest.java index ec69d505..d0756d0d 100644 --- a/minerva-converter/src/test/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLTest.java +++ b/minerva-converter/src/test/java/org/geneontology/minerva/legacy/sparql/GPADSPARQLTest.java @@ -29,154 +29,155 @@ import java.util.stream.Collectors; public class GPADSPARQLTest { - private static RuleEngine arachne; - private static GPADSPARQLExport exporter; - - @BeforeClass - public static void setupRules() throws OWLOntologyCreationException { - OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); - OWLOntology ont = manager.loadOntologyFromOntologyDocument(GPADSPARQLTest.class.getResourceAsStream("/ro-merged-2017-10-02.ofn")); - Set rules = new HashSet<>(); - rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.translate(ont, Imports.INCLUDED, true, true, true, true)).asJava()); - rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.indirectRules(ont)).asJava()); - arachne = new RuleEngine(Bridge.rulesFromJena(JavaConverters.asScalaSetConverter(rules).asScala()), true); - } - - @BeforeClass - public static void setupExporter() { - JenaSystem.init(); - exporter = new GPADSPARQLExport(DefaultCurieHandler.getDefaultHandler(), new HashMap(), new HashMap(), new HashMap<>()); - } - - @Test - public void testGPADOutput() throws Exception { - Model model = ModelFactory.createDefaultModel(); - model.read(this.getClass().getResourceAsStream("/581e072c00000473.ttl"), "", "ttl"); - Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); - String gpad = exporter.exportGPAD(mem, IRI.create("http://test.org")); - int lines = gpad.split("\n", -1).length; - //TODO test contents of annotations; dumb test for now - Assert.assertTrue(gpad.contains("model-state=production")); - Assert.assertTrue("Should produce annotations", lines > 2); - } - - - /** - * This test needs improvements; the current background axioms used in the tests are resulting in the Uberon inference we're trying to avoid - * @throws Exception - */ - @Test - public void testSuppressUberonExtensionsWhenEMAPA() throws Exception { - Model model = ModelFactory.createDefaultModel(); - model.read(this.getClass().getResourceAsStream("/no_uberon_with_emapa.ttl"), "", "ttl"); - Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); - Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); - Assert.assertTrue(annotations.stream().anyMatch(a -> a.getAnnotationExtensions().stream().anyMatch(e -> e.getFiller().toString().startsWith("http://purl.obolibrary.org/obo/EMAPA_")))); - Assert.assertTrue(annotations.stream().noneMatch(a -> a.getAnnotationExtensions().stream().anyMatch(e -> e.getFiller().toString().startsWith("http://purl.obolibrary.org/obo/UBERON_")))); - } - - /** - * Test whether the GPAD output contains all required entries and rows without any spurious results. - * Example Input file: the owl dump from http://noctua-dev.berkeleybop.org/editor/graph/gomodel:59d1072300000074 - * - * Note on the GPAD file format and its contents: - * 1. the number of entries in the GPAD output from this owl dump should be 6, not 7 (although there are 7 individuals/boxes) - * because the edge/relationship "molecular_function" is a trivial one, which is supposed to be removed from the output. - * 2. the 4th columns, which consists of the list of GO IDs attributed to the DB object ID (These should be GO:0005634, GO:0007267, GO:0007507, GO:0016301) - * 3. the 2nd columns: the rest of entities in the noctua screen, i.e. S000028630 (YFR032C-B Scer) or S000004724(SHH3 Scer) - * - * @throws Exception - */ - @Test - public void testGPADOutputWithNegation() throws Exception { - Model model = ModelFactory.createDefaultModel(); - model.read(this.getClass().getResourceAsStream("/59d1072300000074.ttl"), "", "ttl"); - Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); - String gpad = exporter.exportGPAD(mem, IRI.create("http://test.org")); - - /* Check the number of rows in GPAD output */ - String gpadOutputArr[] = gpad.split("\n", -1); - /* 1 for header and 6 for the rest of the rows. the length should be 7 or 8.*/ - Assert.assertTrue("Should produce annotations", gpadOutputArr.length >= 1 + 6); - - /* Compare the output with the GPAD file that contains sample answers */ - List lines = FileUtils.readLines(new File("src/test/resources/59d1072300000074.gpad"), "UTF-8"); - /* The order of the rows in the GPAD file can be different, so we compare rows by rows */ - for (String gpadOutputRow : gpadOutputArr) { - /* Additionally check all rows's qualifier contains |NOT substring inside */ - String gpadRowArr[] = gpadOutputRow.split("\t"); - /* Skip checking the header; all rows need to contain NOT in its qualifier */ - if (gpadRowArr.length > 2) { - Assert.assertTrue(gpadRowArr[2].contains("|NOT")); - } - } - } - - @Test - @Ignore - public void testGPADContainsAcceptedAndCreatedDates() throws Exception { - Model model = ModelFactory.createDefaultModel(); - model.read(this.getClass().getResourceAsStream("/created-date-test.ttl"), "", "ttl"); - Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); - Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); - IRI gene = IRI.create("http://identifiers.org/wormbase/WBGene00001326"); - Pair creationDate = Pair.of("creation-date", "2021-05-13"); - Assert.assertTrue(annotations.stream().anyMatch(a -> a.getObject().equals(gene) && a.getAnnotations().contains(creationDate))); - } - - @Test - public void testFilterRootMFWhenOtherMF() throws Exception { - IRI rootMF = IRI.create("http://purl.obolibrary.org/obo/GO_0003674"); - IRI rootBP = IRI.create("http://purl.obolibrary.org/obo/GO_0008150"); - IRI rootCC = IRI.create("http://purl.obolibrary.org/obo/GO_0005575"); - - Model model = ModelFactory.createDefaultModel(); - model.read(this.getClass().getResourceAsStream("/test_root_mf_filter.ttl"), "", "ttl"); - Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); - Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); - IRI gene = IRI.create("http://identifiers.org/mgi/MGI:2153470"); - Assert.assertTrue(annotations.stream().noneMatch(a -> a.getObject().equals(gene) && a.getOntologyClass().equals(rootMF))); - - Model model2 = ModelFactory.createDefaultModel(); - model2.read(this.getClass().getResourceAsStream("/test_root_mf_filter2.ttl"), "", "ttl"); - Set triples2 = model2.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem2 = arachne.processTriples(JavaConverters.asScalaSetConverter(triples2).asScala()); - Set annotations2 = exporter.getGPAD(mem2, IRI.create("http://test.org")); - IRI gene2 = IRI.create("http://identifiers.org/mgi/MGI:98392"); - Assert.assertTrue(annotations2.stream().anyMatch(a -> a.getObject().equals(gene2) && a.getOntologyClass().equals(rootMF))); - Assert.assertTrue(annotations2.stream().anyMatch(a -> a.getObject().equals(gene2) && a.getOntologyClass().equals(rootBP))); - - Model model3 = ModelFactory.createDefaultModel(); - model3.read(this.getClass().getResourceAsStream("/test_root_mf_filter3.ttl"), "", "ttl"); - Set triples3 = model3.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem3 = arachne.processTriples(JavaConverters.asScalaSetConverter(triples3).asScala()); - Set annotations3 = exporter.getGPAD(mem3, IRI.create("http://test.org")); - IRI gene3 = IRI.create("http://identifiers.org/sgd/S000002650"); - Assert.assertTrue(annotations3.stream().anyMatch(a -> a.getObject().equals(gene3) && a.getOntologyClass().equals(rootMF))); - Assert.assertTrue(annotations3.stream().anyMatch(a -> a.getObject().equals(gene3) && a.getOntologyClass().equals(rootBP))); - Assert.assertTrue(annotations3.stream().anyMatch(a -> a.getObject().equals(gene3) && a.getOntologyClass().equals(rootCC))); - } - - @Test - public void testFilterAnnotationsToRegulatedProcess() throws Exception { - HashMap> regulators = new HashMap<>(); - regulators.put(IRI.create("http://purl.obolibrary.org/obo/GO_0030511"), Collections.singleton(IRI.create("http://purl.obolibrary.org/obo/GO_0007179"))); - GPADSPARQLExport exporter = new GPADSPARQLExport(DefaultCurieHandler.getDefaultHandler(), new HashMap(), new HashMap(), regulators); - Model model = ModelFactory.createDefaultModel(); - model.read(this.getClass().getResourceAsStream("/test_filter_regulated_process.ttl"), "", "ttl"); - Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); - Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); - IRI gene = IRI.create("http://identifiers.org/mgi/MGI:2148811"); - IRI regulator = IRI.create("http://purl.obolibrary.org/obo/GO_0030511"); - IRI regulated = IRI.create("http://purl.obolibrary.org/obo/GO_0007179"); - Assert.assertTrue(annotations.stream().anyMatch(a -> a.getObject().equals(gene) && a.getOntologyClass().equals(regulator))); - Assert.assertTrue(annotations.stream().noneMatch(a -> a.getObject().equals(gene) && a.getOntologyClass().equals(regulated))); - } + private static RuleEngine arachne; + private static GPADSPARQLExport exporter; + + @BeforeClass + public static void setupRules() throws OWLOntologyCreationException { + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLOntology ont = manager.loadOntologyFromOntologyDocument(GPADSPARQLTest.class.getResourceAsStream("/ro-merged-2017-10-02.ofn")); + Set rules = new HashSet<>(); + rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.translate(ont, Imports.INCLUDED, true, true, true, true)).asJava()); + rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.indirectRules(ont)).asJava()); + arachne = new RuleEngine(Bridge.rulesFromJena(JavaConverters.asScalaSetConverter(rules).asScala()), true); + } + + @BeforeClass + public static void setupExporter() { + JenaSystem.init(); + exporter = new GPADSPARQLExport(DefaultCurieHandler.getDefaultHandler(), new HashMap(), new HashMap(), new HashMap<>()); + } + + @Test + public void testGPADOutput() throws Exception { + Model model = ModelFactory.createDefaultModel(); + model.read(this.getClass().getResourceAsStream("/581e072c00000473.ttl"), "", "ttl"); + Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + String gpad = exporter.exportGPAD(mem, IRI.create("http://test.org")); + int lines = gpad.split("\n", -1).length; + //TODO test contents of annotations; dumb test for now + Assert.assertTrue(gpad.contains("model-state=production")); + Assert.assertTrue("Should produce annotations", lines > 2); + } + + + /** + * This test needs improvements; the current background axioms used in the tests are resulting in the Uberon inference we're trying to avoid + * + * @throws Exception + */ + @Test + public void testSuppressUberonExtensionsWhenEMAPA() throws Exception { + Model model = ModelFactory.createDefaultModel(); + model.read(this.getClass().getResourceAsStream("/no_uberon_with_emapa.ttl"), "", "ttl"); + Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); + Assert.assertTrue(annotations.stream().anyMatch(a -> a.getAnnotationExtensions().stream().anyMatch(e -> e.getFiller().toString().startsWith("http://purl.obolibrary.org/obo/EMAPA_")))); + Assert.assertTrue(annotations.stream().noneMatch(a -> a.getAnnotationExtensions().stream().anyMatch(e -> e.getFiller().toString().startsWith("http://purl.obolibrary.org/obo/UBERON_")))); + } + + /** + * Test whether the GPAD output contains all required entries and rows without any spurious results. + * Example Input file: the owl dump from http://noctua-dev.berkeleybop.org/editor/graph/gomodel:59d1072300000074 + *

+ * Note on the GPAD file format and its contents: + * 1. the number of entries in the GPAD output from this owl dump should be 6, not 7 (although there are 7 individuals/boxes) + * because the edge/relationship "molecular_function" is a trivial one, which is supposed to be removed from the output. + * 2. the 4th columns, which consists of the list of GO IDs attributed to the DB object ID (These should be GO:0005634, GO:0007267, GO:0007507, GO:0016301) + * 3. the 2nd columns: the rest of entities in the noctua screen, i.e. S000028630 (YFR032C-B Scer) or S000004724(SHH3 Scer) + * + * @throws Exception + */ + @Test + public void testGPADOutputWithNegation() throws Exception { + Model model = ModelFactory.createDefaultModel(); + model.read(this.getClass().getResourceAsStream("/59d1072300000074.ttl"), "", "ttl"); + Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + String gpad = exporter.exportGPAD(mem, IRI.create("http://test.org")); + + /* Check the number of rows in GPAD output */ + String gpadOutputArr[] = gpad.split("\n", -1); + /* 1 for header and 6 for the rest of the rows. the length should be 7 or 8.*/ + Assert.assertTrue("Should produce annotations", gpadOutputArr.length >= 1 + 6); + + /* Compare the output with the GPAD file that contains sample answers */ + List lines = FileUtils.readLines(new File("src/test/resources/59d1072300000074.gpad"), "UTF-8"); + /* The order of the rows in the GPAD file can be different, so we compare rows by rows */ + for (String gpadOutputRow : gpadOutputArr) { + /* Additionally check all rows's qualifier contains |NOT substring inside */ + String gpadRowArr[] = gpadOutputRow.split("\t"); + /* Skip checking the header; all rows need to contain NOT in its qualifier */ + if (gpadRowArr.length > 2) { + Assert.assertTrue(gpadRowArr[2].contains("|NOT")); + } + } + } + + @Test + @Ignore + public void testGPADContainsAcceptedAndCreatedDates() throws Exception { + Model model = ModelFactory.createDefaultModel(); + model.read(this.getClass().getResourceAsStream("/created-date-test.ttl"), "", "ttl"); + Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); + IRI gene = IRI.create("http://identifiers.org/wormbase/WBGene00001326"); + Pair creationDate = Pair.of("creation-date", "2021-05-13"); + Assert.assertTrue(annotations.stream().anyMatch(a -> a.getObject().equals(gene) && a.getAnnotations().contains(creationDate))); + } + + @Test + public void testFilterRootMFWhenOtherMF() throws Exception { + IRI rootMF = IRI.create("http://purl.obolibrary.org/obo/GO_0003674"); + IRI rootBP = IRI.create("http://purl.obolibrary.org/obo/GO_0008150"); + IRI rootCC = IRI.create("http://purl.obolibrary.org/obo/GO_0005575"); + + Model model = ModelFactory.createDefaultModel(); + model.read(this.getClass().getResourceAsStream("/test_root_mf_filter.ttl"), "", "ttl"); + Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); + IRI gene = IRI.create("http://identifiers.org/mgi/MGI:2153470"); + Assert.assertTrue(annotations.stream().noneMatch(a -> a.getObject().equals(gene) && a.getOntologyClass().equals(rootMF))); + + Model model2 = ModelFactory.createDefaultModel(); + model2.read(this.getClass().getResourceAsStream("/test_root_mf_filter2.ttl"), "", "ttl"); + Set triples2 = model2.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem2 = arachne.processTriples(JavaConverters.asScalaSetConverter(triples2).asScala()); + Set annotations2 = exporter.getGPAD(mem2, IRI.create("http://test.org")); + IRI gene2 = IRI.create("http://identifiers.org/mgi/MGI:98392"); + Assert.assertTrue(annotations2.stream().anyMatch(a -> a.getObject().equals(gene2) && a.getOntologyClass().equals(rootMF))); + Assert.assertTrue(annotations2.stream().anyMatch(a -> a.getObject().equals(gene2) && a.getOntologyClass().equals(rootBP))); + + Model model3 = ModelFactory.createDefaultModel(); + model3.read(this.getClass().getResourceAsStream("/test_root_mf_filter3.ttl"), "", "ttl"); + Set triples3 = model3.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem3 = arachne.processTriples(JavaConverters.asScalaSetConverter(triples3).asScala()); + Set annotations3 = exporter.getGPAD(mem3, IRI.create("http://test.org")); + IRI gene3 = IRI.create("http://identifiers.org/sgd/S000002650"); + Assert.assertTrue(annotations3.stream().anyMatch(a -> a.getObject().equals(gene3) && a.getOntologyClass().equals(rootMF))); + Assert.assertTrue(annotations3.stream().anyMatch(a -> a.getObject().equals(gene3) && a.getOntologyClass().equals(rootBP))); + Assert.assertTrue(annotations3.stream().anyMatch(a -> a.getObject().equals(gene3) && a.getOntologyClass().equals(rootCC))); + } + + @Test + public void testFilterAnnotationsToRegulatedProcess() throws Exception { + HashMap> regulators = new HashMap<>(); + regulators.put(IRI.create("http://purl.obolibrary.org/obo/GO_0030511"), Collections.singleton(IRI.create("http://purl.obolibrary.org/obo/GO_0007179"))); + GPADSPARQLExport exporter = new GPADSPARQLExport(DefaultCurieHandler.getDefaultHandler(), new HashMap(), new HashMap(), regulators); + Model model = ModelFactory.createDefaultModel(); + model.read(this.getClass().getResourceAsStream("/test_filter_regulated_process.ttl"), "", "ttl"); + Set triples = model.listStatements().toList().stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + WorkingMemory mem = arachne.processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + Set annotations = exporter.getGPAD(mem, IRI.create("http://test.org")); + IRI gene = IRI.create("http://identifiers.org/mgi/MGI:2148811"); + IRI regulator = IRI.create("http://purl.obolibrary.org/obo/GO_0030511"); + IRI regulated = IRI.create("http://purl.obolibrary.org/obo/GO_0007179"); + Assert.assertTrue(annotations.stream().anyMatch(a -> a.getObject().equals(gene) && a.getOntologyClass().equals(regulator))); + Assert.assertTrue(annotations.stream().noneMatch(a -> a.getObject().equals(gene) && a.getOntologyClass().equals(regulated))); + } } diff --git a/minerva-converter/src/test/java/org/geneontology/minerva/taxon/FindTaxonToolTest.java b/minerva-converter/src/test/java/org/geneontology/minerva/taxon/FindTaxonToolTest.java index 3700fb77..881f3532 100644 --- a/minerva-converter/src/test/java/org/geneontology/minerva/taxon/FindTaxonToolTest.java +++ b/minerva-converter/src/test/java/org/geneontology/minerva/taxon/FindTaxonToolTest.java @@ -1,45 +1,43 @@ package org.geneontology.minerva.taxon; -import static org.junit.Assert.*; - import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.curie.DefaultCurieHandler; import org.junit.BeforeClass; -import org.junit.Test; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLOntology; - import owltools.io.ParserWrapper; +import static org.junit.Assert.assertNotNull; + public class FindTaxonToolTest { - - private static OWLOntology NEO = null; - private static CurieHandler curieHandler = null; - - @BeforeClass - public static void beforeClass() throws Exception { - ParserWrapper pw = new ParserWrapper(); - NEO = pw.parse("http://purl.obolibrary.org/obo/go/noctua/neo.owl"); - curieHandler = DefaultCurieHandler.getDefaultHandler(); - } - - //FIXME @Test //loads all of neo... make reduced copy - public void test1() throws Exception { - OWLDataFactory df = NEO.getOWLOntologyManager().getOWLDataFactory(); - FindTaxonTool tool = new FindTaxonTool(curieHandler , df); - OWLClass zfin1 = df.getOWLClass(IRI.create("http://purl.obolibrary.org/obo/ZFIN_ZDB-GENE-991124-7")); - String taxon1 = tool.getEntityTaxon(zfin1 , NEO); - assertNotNull(taxon1); - } - - //FIXME @Test //loads all of neo... make reduced copy - public void test2() throws Exception { - OWLDataFactory df = NEO.getOWLOntologyManager().getOWLDataFactory(); - FindTaxonTool tool = new FindTaxonTool(curieHandler, df); - String taxon1 = tool.getEntityTaxon("ZFIN:ZDB-GENE-991124-7" , NEO); - assertNotNull(taxon1); - } + + private static OWLOntology NEO = null; + private static CurieHandler curieHandler = null; + + @BeforeClass + public static void beforeClass() throws Exception { + ParserWrapper pw = new ParserWrapper(); + NEO = pw.parse("http://purl.obolibrary.org/obo/go/noctua/neo.owl"); + curieHandler = DefaultCurieHandler.getDefaultHandler(); + } + + //FIXME @Test //loads all of neo... make reduced copy + public void test1() throws Exception { + OWLDataFactory df = NEO.getOWLOntologyManager().getOWLDataFactory(); + FindTaxonTool tool = new FindTaxonTool(curieHandler, df); + OWLClass zfin1 = df.getOWLClass(IRI.create("http://purl.obolibrary.org/obo/ZFIN_ZDB-GENE-991124-7")); + String taxon1 = tool.getEntityTaxon(zfin1, NEO); + assertNotNull(taxon1); + } + + //FIXME @Test //loads all of neo... make reduced copy + public void test2() throws Exception { + OWLDataFactory df = NEO.getOWLOntologyManager().getOWLDataFactory(); + FindTaxonTool tool = new FindTaxonTool(curieHandler, df); + String taxon1 = tool.getEntityTaxon("ZFIN:ZDB-GENE-991124-7", NEO); + assertNotNull(taxon1); + } } diff --git a/minerva-converter/src/test/resources/catalog-v001.xml b/minerva-converter/src/test/resources/catalog-v001.xml index 0973a563..1a25445f 100644 --- a/minerva-converter/src/test/resources/catalog-v001.xml +++ b/minerva-converter/src/test/resources/catalog-v001.xml @@ -1,4 +1,4 @@ - + diff --git a/minerva-converter/src/test/resources/log4j.properties b/minerva-converter/src/test/resources/log4j.properties index 064d09c5..bc86eb1f 100644 --- a/minerva-converter/src/test/resources/log4j.properties +++ b/minerva-converter/src/test/resources/log4j.properties @@ -1,9 +1,7 @@ log4j.appender.console=org.apache.log4j.ConsoleAppender log4j.appender.console.layout=org.apache.log4j.PatternLayout log4j.appender.console.layout.ConversionPattern=%d %-5p (%c{1}:%L) %m\n - -log4j.logger.org.semanticweb.elk = ERROR +log4j.logger.org.semanticweb.elk=ERROR log4j.logger.org.obolibrary.obo2owl=OFF log4j.logger.org.semanticweb.owlapi=error - log4j.rootLogger=INFO, console diff --git a/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphMolecularModelManager.java b/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphMolecularModelManager.java index 7bedb58d..73a69b81 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphMolecularModelManager.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphMolecularModelManager.java @@ -1,22 +1,13 @@ package org.geneontology.minerva; -import java.io.*; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.Set; -import java.util.stream.Collectors; - -import com.bigdata.rdf.rio.json.BigdataSPARQLResultsJSONWriter; +import com.bigdata.journal.Options; +import com.bigdata.rdf.sail.BigdataSail; +import com.bigdata.rdf.sail.BigdataSailRepository; +import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; +import info.aduna.iteration.Iterations; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; -import org.geneontology.minerva.ModelContainer.ModelChangeListener; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.util.AnnotationShorthand; @@ -27,1014 +18,981 @@ import org.openrdf.model.vocabulary.OWL; import org.openrdf.model.vocabulary.RDF; import org.openrdf.query.*; -import org.openrdf.query.impl.TupleQueryResultBuilder; -import org.openrdf.query.parser.QueryParser; -import org.openrdf.query.parser.QueryParserRegistry; -import org.openrdf.query.parser.QueryParserUtil; import org.openrdf.query.parser.QueryPrologLexer; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.RepositoryResult; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.RDFWriter; -import org.openrdf.rio.Rio; +import org.openrdf.rio.*; import org.openrdf.rio.helpers.RDFHandlerBase; import org.openrdf.rio.helpers.StatementCollector; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.formats.FunctionalSyntaxDocumentFormat; -import org.semanticweb.owlapi.formats.ManchesterSyntaxDocumentFormat; -import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; -import org.semanticweb.owlapi.formats.RDFXMLDocumentFormat; -import org.semanticweb.owlapi.formats.TurtleDocumentFormat; -import org.semanticweb.owlapi.model.AddImport; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDeclarationAxiom; -import org.semanticweb.owlapi.model.OWLDocumentFormat; -import org.semanticweb.owlapi.model.OWLImportsDeclaration; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyAlreadyExistsException; -import org.semanticweb.owlapi.model.OWLOntologyChange; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyDocumentAlreadyExistsException; -import org.semanticweb.owlapi.model.OWLOntologyID; -import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; -import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; -import org.semanticweb.owlapi.model.RemoveImport; +import org.semanticweb.owlapi.formats.*; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.rio.RioMemoryTripleSource; import org.semanticweb.owlapi.rio.RioRenderer; -import com.bigdata.journal.Options; -import com.bigdata.rdf.sail.BigdataSail; -import com.bigdata.rdf.sail.BigdataSailRepository; -import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; -import com.google.common.base.Optional; - -import info.aduna.iteration.Iterations; +import java.io.*; +import java.util.*; +import java.util.Map.Entry; +import java.util.stream.Collectors; public class BlazegraphMolecularModelManager extends CoreMolecularModelManager { - private static Logger LOG = Logger - .getLogger(BlazegraphMolecularModelManager.class); - - boolean isPrecomputePropertyClassCombinations = false; - - final String pathToOWLStore; - final String pathToExportFolder; - private final BigdataSailRepository repo; - private final CurieHandler curieHandler; - - private final String modelIdPrefix; - - OWLDocumentFormat ontologyFormat = new TurtleDocumentFormat(); - - private final List preFileSaveHandlers = new ArrayList(); - private final List postLoadOntologyFilters = new ArrayList(); - - - /** - * @param tbox - * @param modelIdPrefix - * @param pathToJournal Path to Blazegraph journal file to use. - * Only one instance of Blazegraph can use this file at a time. - * @throws OWLOntologyCreationException - * @throws IOException - */ - public BlazegraphMolecularModelManager(OWLOntology tbox, CurieHandler curieHandler, String modelIdPrefix, String pathToJournal, String pathToExportFolder, String pathToOntologyJournal, boolean downloadOntologyJournal) - throws OWLOntologyCreationException, IOException { - super(tbox, pathToOntologyJournal, downloadOntologyJournal); - if(curieHandler==null) { - LOG.error("curie handler required for blazegraph model manager startup "); - System.exit(-1); - }else if(curieHandler.getMappings()==null) { - LOG.error("curie handler WITH MAPPINGS required for blazegraph model manager startup "); - System.exit(-1); - } - this.modelIdPrefix = modelIdPrefix; - this.curieHandler = curieHandler; - this.pathToOWLStore = pathToJournal; - this.pathToExportFolder = pathToExportFolder; - this.repo = initializeRepository(this.pathToOWLStore); - } - - /** - * Note this may move to an implementation-specific subclass in future - * - * @return path to owl on server - */ - public String getPathToOWLStore() { - return pathToOWLStore; - } - - /** - * @return the curieHandler - */ - public CurieHandler getCuriHandler() { - return curieHandler; - } - - private BigdataSailRepository initializeRepository(String pathToJournal) { - try { - Properties properties = new Properties(); - properties.load(this.getClass().getResourceAsStream("blazegraph.properties")); - properties.setProperty(Options.FILE, pathToJournal); - BigdataSail sail = new BigdataSail(properties); - BigdataSailRepository repository = new BigdataSailRepository(sail); - - repository.initialize(); - return repository; - } catch (RepositoryException e) { - LOG.fatal("Could not create Blazegraph sail", e); - return null; - } catch (IOException e) { - LOG.fatal("Could not create Blazegraph sail", e); - return null; - } - } - - /** - * Generates a blank model - * - * @param metadata - * @return modelId - * @throws OWLOntologyCreationException - */ - public ModelContainer generateBlankModel(METADATA metadata) - throws OWLOntologyCreationException { - - // Create an arbitrary unique ID and add it to the system. - IRI modelId = generateId(modelIdPrefix); - if (modelMap.containsKey(modelId)) { - throw new OWLOntologyCreationException( - "A model already exists for this db: " + modelId); - } - LOG.info("Generating blank model for new modelId: " + modelId); - - // create empty ontology, use model id as ontology IRI - final OWLOntologyManager m = tbox.getOWLOntologyManager(); - OWLOntology abox = null; - ModelContainer model = null; - try { - abox = m.createOntology(modelId); - // generate model - model = new ModelContainer(modelId, tbox, abox); - } catch (OWLOntologyCreationException exception) { - if (abox != null) { - m.removeOntology(abox); - } - throw exception; - } - // add to internal map - modelMap.put(modelId, model); - return model; - } - - /** - * Save all models to disk. The optional annotations may be used to set - * saved_by and other meta data. - * - * @param annotations - * @param metadata - * - * @throws OWLOntologyStorageException - * @throws OWLOntologyCreationException - * @throws IOException - * @throws RepositoryException - * @throws UnknownIdentifierException - */ - public void saveAllModels(Set annotations, METADATA metadata) - throws OWLOntologyStorageException, OWLOntologyCreationException, - IOException, RepositoryException, UnknownIdentifierException { - for (Entry entry : modelMap.entrySet()) { - saveModel(entry.getValue(), annotations, metadata); - } - } - - /** - * Save a model to the database. - * - * @param m - * @param annotations - * @param metadata - * - * @throws OWLOntologyStorageException - * @throws OWLOntologyCreationException - * @throws IOException - * @throws RepositoryException - * @throws UnknownIdentifierException - */ - public void saveModel(ModelContainer m, - Set annotations, METADATA metadata) - throws OWLOntologyStorageException, OWLOntologyCreationException, - IOException, RepositoryException, UnknownIdentifierException { - IRI modelId = m.getModelId(); - OWLOntology ont2save = m.getAboxOntology(); - Set taxa = getTaxonsForModel(modelId.toString()); - if(taxa!=null) { - for(String taxon : taxa) { - ont2save = getGolego_repo().addTaxonModelMetaData(ont2save, IRI.create(taxon)); - } - } - final OWLOntology ont = ont2save; - final OWLOntologyManager manager = ont.getOWLOntologyManager(); - List changes = preSaveFileHandler(ont); - synchronized(ont) { - try { - this.writeModelToDatabase(ont, modelId); - // reset modified flag for abox after successful save - m.setAboxModified(false); - } finally { - if (changes != null) { - List invertedChanges = ReverseChangeGenerator - .invertChanges(changes); - if (invertedChanges != null && !invertedChanges.isEmpty()) { - manager.applyChanges(invertedChanges); - } - } - } - } - } - - - private void writeModelToDatabase(OWLOntology model, IRI modelId) throws RepositoryException, IOException { - // Only one thread at a time can use the unisolated connection. - synchronized(repo) { - final BigdataSailRepositoryConnection connection = repo.getUnisolatedConnection(); - try { - connection.begin(); - try { - URI graph = new URIImpl(modelId.toString()); - connection.clear(graph); - StatementCollector collector = new StatementCollector(); - RioRenderer renderer = new RioRenderer(model, collector, null); - renderer.render(); - connection.add(collector.getStatements(), graph); - connection.commit(); - } catch (Exception e) { - connection.rollback(); - throw e; - } - } finally { - connection.close(); - } - } - } - - private List preSaveFileHandler(OWLOntology model) throws UnknownIdentifierException { - List allChanges = null; - for (PreFileSaveHandler handler : preFileSaveHandlers) { - List changes = handler.handle(model); - if (changes != null && !changes.isEmpty()) { - if (allChanges == null) { - allChanges = new ArrayList( - changes.size()); - } - allChanges.addAll(changes); - } - } - return allChanges; - } - - public static interface PreFileSaveHandler { - - public List handle(OWLOntology model) throws UnknownIdentifierException; - - } - - public void addPreFileSaveHandler(PreFileSaveHandler handler) { - if (handler != null) { - preFileSaveHandlers.add(handler); - } - } - - /** - * Export the ABox for the given modelId in the default - * {@link OWLDocumentFormat}. - * - * @param model - * @return modelContent - * @throws OWLOntologyStorageException - */ - public String exportModel(ModelContainer model) - throws OWLOntologyStorageException { - return exportModel(model, ontologyFormat); - } - - /** - * Export the ABox for the given modelId in the given ontology format.
- * Warning: The mapping from String to {@link OWLDocumentFormat} does not - * map every format! - * - * @param model - * @param format - * @return modelContent - * @throws OWLOntologyStorageException - */ - public String exportModel(ModelContainer model, String format) - throws OWLOntologyStorageException { - OWLDocumentFormat ontologyFormat = getOWLOntologyFormat(format); - if (ontologyFormat == null) { - ontologyFormat = this.ontologyFormat; - } - - return exportModel(model, ontologyFormat); - } - - private OWLDocumentFormat getOWLOntologyFormat(String fmt) { - OWLDocumentFormat ofmt = null; - if (fmt != null) { - fmt = fmt.toLowerCase(); - if (fmt.equals("rdfxml")) - ofmt = new RDFXMLDocumentFormat(); - else if (fmt.equals("owl")) - ofmt = new RDFXMLDocumentFormat(); - else if (fmt.equals("rdf")) - ofmt = new RDFXMLDocumentFormat(); - else if (fmt.equals("owx")) - ofmt = new OWLXMLDocumentFormat(); - else if (fmt.equals("owf")) - ofmt = new FunctionalSyntaxDocumentFormat(); - else if (fmt.equals("owm")) - ofmt = new ManchesterSyntaxDocumentFormat(); - } - return ofmt; - } - - /** - * Retrieve a collection of all file/stored model ids found in the repo.
- * Note: Models may not be loaded at this point. - * - * @return set of modelids. - * @throws IOException - */ - public Set getStoredModelIds() throws IOException { - try { - BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); - try { - RepositoryResult graphs = connection.getContextIDs(); - Set modelIds = new HashSet<>(); - while (graphs.hasNext()) { - modelIds.add(IRI.create(graphs.next().stringValue())); - } - graphs.close(); - return Collections.unmodifiableSet(modelIds); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - } - - /** - * Retrieve all model ids currently in memory in long and short form.
- * - * @return set of modelids. - * @throws IOException - */ - public Set getCurrentModelIds() throws IOException { - return new HashSet(modelMap.keySet()); - } - - /** - * Retrieve a collection of all available model ids.
- * Note: Models may not be loaded at this point. - * - * @return set of modelids. - * @throws IOException - */ - public Set getAvailableModelIds() throws IOException { - Set allModelIds = new HashSet<>(); - allModelIds.addAll(this.getStoredModelIds()); - allModelIds.addAll(this.getCurrentModelIds()); - return allModelIds; - } - - public Map> getAllModelAnnotations() throws IOException { - Map> annotations = new HashMap<>(); - // First get annotations from all the stored ontologies - try { - BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); - try { - String query = "PREFIX owl: " + - "PREFIX rdf: " + - "SELECT ?model ?p ?o " + - "WHERE { " + - "?model a owl:Ontology . " + - "?model ?p ?o . " + - "FILTER(?p NOT IN (owl:imports, rdf:type, )) " + - "} "; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - OWLDataFactory factory = OWLManager.getOWLDataFactory(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value model = binding.getValue("model"); - Value predicate = binding.getValue("p"); - String value = binding.getValue("o").stringValue(); - if ((model instanceof URI) && (predicate instanceof URI)) { - IRI modelId = IRI.create(((URI)model).toString()); - OWLAnnotationProperty property = factory - .getOWLAnnotationProperty(IRI.create(((URI)predicate).toString())); - OWLAnnotation annotation = factory.getOWLAnnotation(property, factory.getOWLLiteral(value)); - Set modelAnnotations = annotations.getOrDefault(modelId, new HashSet<>()); - modelAnnotations.add(annotation); - annotations.put(modelId, modelAnnotations); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - // Next get annotations from ontologies that may not be stored, replacing any stored annotations - modelMap.values().stream().filter(mc -> mc.isModified()).forEach(mc -> { - annotations.put(mc.getModelId(), mc.getAboxOntology().getAnnotations()); - }); - return annotations; - } - - public QueryResult executeSPARQLQuery(String queryText, int timeout) throws MalformedQueryException, QueryEvaluationException, RepositoryException { - BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); - try { - List tokens = QueryPrologLexer.lex(queryText); - Set declaredPrefixes = tokens.stream().filter(token -> token.getType().equals(QueryPrologLexer.TokenType.PREFIX)).map(token -> token.getStringValue()).collect(Collectors.toSet()); - StringBuffer queryWithDefaultPrefixes = new StringBuffer(); - for (Entry entry : getCuriHandler().getMappings().entrySet()) { - if (!declaredPrefixes.contains(entry.getKey())) { - queryWithDefaultPrefixes.append("PREFIX " + entry.getKey() + ": <" + entry.getValue() + ">"); - queryWithDefaultPrefixes.append("\n"); - } - } - queryWithDefaultPrefixes.append(queryText); - Query query = connection.prepareQuery(QueryLanguage.SPARQL, queryWithDefaultPrefixes.toString()); - query.setMaxQueryTime(timeout); - if (query instanceof TupleQuery) { - TupleQuery tupleQuery = (TupleQuery) query; - return tupleQuery.evaluate(); - } else if (query instanceof GraphQuery) { - GraphQuery graphQuery = (GraphQuery) query; - return graphQuery.evaluate(); - } else if (query instanceof BooleanQuery) { - throw new UnsupportedOperationException("Unsupported query type."); //FIXME - } else { - throw new UnsupportedOperationException("Unsupported query type."); - } - } finally { - connection.close(); - } - } - - public QueryResult executeSPARQLQueryWithoutPrefixManipulation(String queryText, int timeout) throws MalformedQueryException, QueryEvaluationException, RepositoryException { - BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); - try { - Query query = connection.prepareQuery(QueryLanguage.SPARQL, queryText.toString()); - query.setMaxQueryTime(timeout); - if (query instanceof TupleQuery) { - TupleQuery tupleQuery = (TupleQuery) query; - return tupleQuery.evaluate(); - } else if (query instanceof GraphQuery) { - GraphQuery graphQuery = (GraphQuery) query; - return graphQuery.evaluate(); - } else if (query instanceof BooleanQuery) { - throw new UnsupportedOperationException("Unsupported query type."); //FIXME - } else { - throw new UnsupportedOperationException("Unsupported query type."); - } - } finally { - connection.close(); - } - } - - @Override - public void loadModel(IRI modelId, boolean isOverride) throws OWLOntologyCreationException { - if (modelMap.containsKey(modelId)) { - if (!isOverride) { - throw new OWLOntologyCreationException("Model already exists: " + modelId); - } - unlinkModel(modelId); - } - try { - BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); - try { - RepositoryResult graphs = connection.getContextIDs(); - if (!Iterations.asSet(graphs).contains(new URIImpl(modelId.toString()))) { - throw new OWLOntologyCreationException("No such model in datastore: " + modelId); - } - graphs.close(); - RepositoryResult statements = - connection.getStatements(null, null, null, false, new URIImpl(modelId.toString())); - //setting minimal = false will load the abox with the tbox ontology manager, allowing for OWL understanding of tbox content - boolean minimal = false; - OWLOntology abox = loadOntologyDocumentSource(new RioMemoryTripleSource(statements), minimal); - statements.close(); - abox = postLoadFileFilter(abox); - ModelContainer model = addModel(modelId, abox); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new OWLOntologyCreationException(e); - } - } - - @Override - public OWLOntology loadModelABox(IRI modelId) throws OWLOntologyCreationException { - return loadModelABox(modelId, null); - } - @Override - public OWLOntology loadModelABox(IRI modelId, OWLOntologyManager manager) throws OWLOntologyCreationException { - LOG.info("Load model abox: " + modelId + " from database"); - try { - BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); - try { - //TODO repeated code with loadModel - RepositoryResult graphs = connection.getContextIDs(); - if (!Iterations.asSet(graphs).contains(new URIImpl(modelId.toString()))) { - throw new OWLOntologyCreationException("No such model in datastore: " + modelId); - } - graphs.close(); - RepositoryResult statements = - connection.getStatements(null, null, null, false, new URIImpl(modelId.toString())); - //setting minimal to true will give an OWL abox with triples that won't be connected to the tbox, hence e.g. object properties might not be recognized. - boolean minimal = true; - OWLOntology abox; - if(manager ==null) { - abox = loadOntologyDocumentSource(new RioMemoryTripleSource(statements), minimal); - } else { - abox = loadOntologyDocumentSource(new RioMemoryTripleSource(statements), minimal, manager); - } - - statements.close(); - abox = postLoadFileFilter(abox); - return abox; - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new OWLOntologyCreationException(e); - } - } - - private OWLOntology postLoadFileFilter(OWLOntology model) { - for (PostLoadOntologyFilter filter : postLoadOntologyFilters) { - model = filter.filter(model); - } - return model; - } - - public static interface PostLoadOntologyFilter { - - OWLOntology filter(OWLOntology model); - } - - public void addPostLoadOntologyFilter(PostLoadOntologyFilter filter) { - if (filter != null) { - postLoadOntologyFilters.add(filter); - } - } - - /** - * Imports ontology RDF directly to database. Will remove any import statements in the ontology (because GO-CAMs should not have any as of now) - * @param file - * @throws OWLOntologyCreationException - * @throws IOException - * @throws RepositoryException - */ - public String importModelToDatabase(File file, boolean skipMarkedDelete) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException { - final boolean delete; - if (skipMarkedDelete) { - delete = scanForIsDelete(file); - } else { - delete = false; - } - String modeliri = null; - if (!delete) { - java.util.Optional ontIRIOpt = scanForOntologyIRI(file).map(id -> new URIImpl(id)); - if (ontIRIOpt.isPresent()) { - java.util.Optional importOpt = scanForImport(file).map(id -> new URIImpl(id)); - if(importOpt.isPresent()) { - modeliri = ontIRIOpt.get().stringValue(); - //need to remove the imports before loading. - //if the imports are large, this gets slow - //consider 1) loading the model as below 2) running a SPARQL update to get rid of the imports - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - OWLOntology cam = ontman.loadOntologyFromOntologyDocument(file); - Set imports = cam.getImportsDeclarations(); - for(OWLImportsDeclaration impdec : imports) { - RemoveImport rm = new RemoveImport(cam, impdec); - ontman.applyChange(rm); - } - //write it - this.writeModelToDatabase(cam, IRI.create(ontIRIOpt.get().stringValue())); - }else { //otherwise just load it all up as rdf (faster because avoids owl api) - synchronized(repo) { - final BigdataSailRepositoryConnection connection = repo.getUnisolatedConnection(); - try { - connection.begin(); - try { - URI graph = ontIRIOpt.get(); - connection.clear(graph); - //FIXME Turtle format is hard-coded here - if(file.getName().endsWith(".ttl")) { - connection.add(file, "", RDFFormat.TURTLE, graph); - }else if(file.getName().endsWith(".owl")) { - connection.add(file, "", RDFFormat.RDFXML, graph); - } - connection.commit(); - modeliri = graph.toString(); - } catch (Exception e) { - connection.rollback(); - throw e; - } - } finally { - connection.close(); - } - } - } - }else { - throw new OWLOntologyCreationException("Detected anonymous ontology; must have IRI"); - } - }else { - System.err.println("skipping "+file.getName()); - } - return modeliri; - - } - - /** - * checks an OWLRDF (ttl) file for owl import statements - * @param file - * @return - * @throws RDFParseException - * @throws RDFHandlerException - * @throws IOException - */ - private java.util.Optional scanForImport(File file) throws RDFParseException, RDFHandlerException, IOException { - RDFHandlerBase handler = new RDFHandlerBase() { - public void handleStatement(Statement statement) { - if (statement.getPredicate().stringValue().equals("http://www.w3.org/2002/07/owl#imports")) throw new FoundTripleException(statement); - } - }; - InputStream inputStream = new FileInputStream(file); - try { - //FIXME Turtle format is hard-coded here - RDFParser parser = Rio.createParser(RDFFormat.RDFXML); - if(file.getName().endsWith(".ttl")) { - parser = Rio.createParser(RDFFormat.TURTLE); - } - parser.setRDFHandler(handler); - parser.parse(inputStream, ""); - // If an import triple is found, it will be thrown out - // in an exception. Otherwise, return empty. - return java.util.Optional.empty(); - } catch (FoundTripleException fte) { - Statement statement = fte.getStatement(); - return java.util.Optional.of(statement.getObject().stringValue()); - } finally { - inputStream.close(); - } - } - - /** - * Tries to efficiently find the ontology IRI triple without loading the whole file. - * @throws IOException - * @throws RDFHandlerException - * @throws RDFParseException - */ - public java.util.Optional scanForOntologyIRI(File file) throws RDFParseException, RDFHandlerException, IOException { - RDFHandlerBase handler = new RDFHandlerBase() { - public void handleStatement(Statement statement) { - if (statement.getObject().stringValue().equals("http://www.w3.org/2002/07/owl#Ontology") && - statement.getPredicate().stringValue().equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) throw new FoundTripleException(statement); - } - }; - InputStream inputStream = new FileInputStream(file); - try { - //FIXME Turtle format is hard-coded here - RDFParser parser = Rio.createParser(RDFFormat.RDFXML); - if(file.getName().endsWith(".ttl")) { - parser = Rio.createParser(RDFFormat.TURTLE); - } - parser.setRDFHandler(handler); - parser.parse(inputStream, ""); - // If an ontology IRI triple is found, it will be thrown out - // in an exception. Otherwise, return empty. - return java.util.Optional.empty(); - } catch (FoundTripleException fte) { - Statement statement = fte.getStatement(); - if (statement.getSubject() instanceof BNode ) { - LOG.warn("Blank node subject for ontology triple: " + statement); - return java.util.Optional.empty(); - } else { - return java.util.Optional.of(statement.getSubject().stringValue()); - } - } finally { - inputStream.close(); - } - } - - private boolean scanForIsDelete(File file) throws RDFParseException, RDFHandlerException, IOException { - RDFHandlerBase handler = new RDFHandlerBase() { - - public void handleStatement(Statement statement) { - if (statement.getPredicate().stringValue().equals(AnnotationShorthand.modelstate.getAnnotationProperty().toString()) && - statement.getObject().stringValue().equals("delete")) throw new FoundTripleException(statement); - } - }; - InputStream inputStream = new FileInputStream(file); - try { - //FIXME Turtle format is hard-coded here - RDFParser parser = Rio.createParser(RDFFormat.TURTLE); - parser.setRDFHandler(handler); - parser.parse(inputStream, ""); - // If an ontology IRI triple is found, it will be thrown out - // in an exception. Otherwise, return false. - return false; - } catch (FoundTripleException fte) { - return true; - } finally { - inputStream.close(); - } - } - - private static class FoundTripleException extends RuntimeException { - - private static final long serialVersionUID = 8366509854229115430L; - private final Statement statement; - - public FoundTripleException(Statement statement) { - this.statement = statement; - } - - public Statement getStatement() { - return this.statement; - } - } - - private static class EmptyOntologyIRIMapper implements OWLOntologyIRIMapper { - - private static final long serialVersionUID = 8432563430320023805L; - - public static IRI emptyOntologyIRI = IRI.create("http://example.org/empty"); - - @Override - public IRI getDocumentIRI(IRI ontologyIRI) { - return emptyOntologyIRI; - } - - } - - /** - * Export all models to disk. - * - * @throws OWLOntologyStorageException - * @throws OWLOntologyCreationException - * @throws IOException - */ - public void dumpAllStoredModels() throws OWLOntologyStorageException, OWLOntologyCreationException, IOException { - File folder = new File(this.pathToExportFolder); - for (IRI modelId : this.getStoredModelIds()) { - dumpStoredModel(modelId, folder); - } - } - - /** - * Save a model to disk. - * - * @throws OWLOntologyStorageException - * @throws OWLOntologyCreationException - * @throws IOException - */ - public void dumpStoredModel(IRI modelId, File folder) throws IOException { - // preliminary checks for the target file - String fileName = StringUtils.replaceOnce(modelId.toString(), modelIdPrefix, "") + ".ttl"; - File targetFile = new File(folder, fileName).getAbsoluteFile(); - if (targetFile.exists()) { - if (targetFile.isFile() == false) { - throw new IOException("For modelId: '"+modelId+"', the resulting path is not a file: " + targetFile.getAbsolutePath()); - } - if (targetFile.canWrite() == false) { - throw new IOException("For modelId: '"+modelId+"', Cannot write to the file: " + targetFile.getAbsolutePath()); - } - } - else { - File targetFolder = targetFile.getParentFile(); - FileUtils.forceMkdir(targetFolder); - } - File tempFile = null; - try { - // create tempFile - String prefix = modelId.toString(); // TODO escape - tempFile = File.createTempFile(prefix, ".ttl"); - try { - BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); - OutputStream out = new FileOutputStream(tempFile); - try { - // Workaround for order dependence of RDF reading by OWL API - // Need to output ontology triple first until this bug is fixed: - // https://github.com/owlcs/owlapi/issues/574 - ValueFactory factory = connection.getValueFactory(); - Statement ontologyDeclaration = factory.createStatement(factory.createURI(modelId.toString()), RDF.TYPE, OWL.ONTOLOGY); - Rio.write(Collections.singleton(ontologyDeclaration), out, RDFFormat.TURTLE); - // end workaround - RDFWriter writer = Rio.createWriter(RDFFormat.TURTLE, out); - connection.export(writer, new URIImpl(modelId.toString())); - // copy temp file to the finalFile - FileUtils.copyFile(tempFile, targetFile); - } finally { - out.close(); - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } catch (RDFHandlerException e) { - throw new IOException(e); - } - } finally { - // delete temp file - FileUtils.deleteQuietly(tempFile); - } - } - - public void dispose() { - super.dispose(); - try { - if(repo.getSail().isOpen()) { - repo.shutDown(); - } - if(this.getGolego_repo()!=null) { - if(this.getGolego_repo().getGo_lego_repo().getSail().isOpen()) { - getGolego_repo().dispose(); - } - } - } catch (RepositoryException e) { - LOG.error("Failed to shutdown Blazegraph sail.", e); - } - } - - public Map> buildTaxonModelMap() throws IOException { - Map> model_genes = buildModelGeneMap(); - Map> taxon_models = new HashMap>(); - for(String model : model_genes.keySet()) { - Set genes = model_genes.get(model); - Set taxa = this.getGolego_repo().getTaxaByGenes(genes); - for(String taxon : taxa) { - Set models = taxon_models.get(taxon); - if(models==null) { - models = new HashSet(); - } - models.add(model); - taxon_models.put(taxon, models); - } - } - return taxon_models; - } - - public Map> buildModelGeneMap(){ - Map> model_genes = new HashMap>(); - TupleQueryResult result; - String sparql = "SELECT ?id (GROUP_CONCAT(DISTINCT ?type;separator=\";\") AS ?types) WHERE {\n" + - " GRAPH ?id { \n" + - "?i rdf:type ?type .\n" + - "FILTER (?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != ) . \n" + - "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/\" ) ) \n" + - " }\n" + - " } \n" + - " \n" + - "GROUP BY ?id"; - try { - result = (TupleQueryResult) executeSPARQLQueryWithoutPrefixManipulation(sparql, 1000); - while(result.hasNext()) { - BindingSet bs = result.next(); - String model = bs.getBinding("id").getValue().stringValue(); - String genes = bs.getBinding("types").getValue().stringValue(); - Set g = new HashSet(); - if(genes!=null) { - String[] geness = genes.split(";"); - for(String gene : geness) { - g.add(gene); - } - } - model_genes.put(model, g); - } - } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { - e.printStackTrace(); - } - return model_genes; - } - - public Set getTaxonsForModel(String model_id) throws IOException { - Set genes = getModelGenes(model_id); - if(genes.isEmpty()) { - return null; - } - Set taxa = this.getGolego_repo().getTaxaByGenes(genes); - return taxa; - - } - - public Set getModelGenes(String model_id){ - Set g = new HashSet(); - TupleQueryResult result; - String sparql = "SELECT ?type WHERE {\n" + - " GRAPH <"+model_id+"> { \n" + - " ?i rdf:type ?type .\n" + - "FILTER (?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != \n" + - " && ?type != ) . \n" + - //this one cuts out all the reacto genes - // "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/\" ) ) \n" + - //this will probably let a few past but the effect would only be a slight slow down when looking up taxa - "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/ECO_\" ) ) . \n" + - "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/GO_\" ) ) " + - " }\n" + - " } \n" + - " \n"; - try { - result = (TupleQueryResult) executeSPARQLQueryWithoutPrefixManipulation(sparql, 10); - - while(result.hasNext()) { - BindingSet bs = result.next(); - String gene = bs.getBinding("type").getValue().stringValue(); - g.add(gene); - } - } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { - e.printStackTrace(); - } - return g; - } - - - public void addTaxonMetadata() throws IOException { - Map> taxon_models = buildTaxonModelMap(); - LOG.info("Ready to update "+taxon_models.keySet().size()+" "+taxon_models.keySet()); - for(String taxon : taxon_models.keySet()) { - LOG.info("Updating models in taxon "+taxon); - Set models = taxon_models.get(taxon); - models.stream().parallel().forEach(model -> { - //fine for a few thousand models, but ends up eating massive ram for many - //addTaxonWithOWL(IRI.create(model), IRI.create(taxon)); - try { - addTaxonToDatabaseWithSparql(IRI.create(model), IRI.create(taxon)); - } catch (RepositoryException | UpdateExecutionException | MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - }); - } - } - - //now try with sparql insert - public int addTaxonToDatabaseWithSparql(IRI model_iri, IRI taxon_iri) throws RepositoryException, UpdateExecutionException, MalformedQueryException, InterruptedException { - int changes = 0; - String update = - "INSERT DATA\n" + - "{ GRAPH <"+model_iri.toString()+"> { "+ - " <"+model_iri.toString()+"> <"+BlazegraphOntologyManager.in_taxon_uri+"> <"+taxon_iri.toString()+">" + - "} }"; - - synchronized(repo) { - final BigdataSailRepositoryConnection conn = repo.getUnisolatedConnection(); - try { - conn.begin(); - BlazegraphMutationCounter counter = new BlazegraphMutationCounter(); - conn.addChangeLog(counter); - conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); - changes = counter.mutationCount(); - conn.removeChangeLog(counter); - conn.commit(); - } finally { - conn.close(); - } - } - return changes; - } + private static Logger LOG = Logger + .getLogger(BlazegraphMolecularModelManager.class); + + boolean isPrecomputePropertyClassCombinations = false; + + final String pathToOWLStore; + final String pathToExportFolder; + private final BigdataSailRepository repo; + private final CurieHandler curieHandler; + + private final String modelIdPrefix; + + OWLDocumentFormat ontologyFormat = new TurtleDocumentFormat(); + + private final List preFileSaveHandlers = new ArrayList(); + private final List postLoadOntologyFilters = new ArrayList(); + + + /** + * @param tbox + * @param modelIdPrefix + * @param pathToJournal Path to Blazegraph journal file to use. + * Only one instance of Blazegraph can use this file at a time. + * @throws OWLOntologyCreationException + * @throws IOException + */ + public BlazegraphMolecularModelManager(OWLOntology tbox, CurieHandler curieHandler, String modelIdPrefix, String pathToJournal, String pathToExportFolder, String pathToOntologyJournal, boolean downloadOntologyJournal) + throws OWLOntologyCreationException, IOException { + super(tbox, pathToOntologyJournal, downloadOntologyJournal); + if (curieHandler == null) { + LOG.error("curie handler required for blazegraph model manager startup "); + System.exit(-1); + } else if (curieHandler.getMappings() == null) { + LOG.error("curie handler WITH MAPPINGS required for blazegraph model manager startup "); + System.exit(-1); + } + this.modelIdPrefix = modelIdPrefix; + this.curieHandler = curieHandler; + this.pathToOWLStore = pathToJournal; + this.pathToExportFolder = pathToExportFolder; + this.repo = initializeRepository(this.pathToOWLStore); + } + + /** + * Note this may move to an implementation-specific subclass in future + * + * @return path to owl on server + */ + public String getPathToOWLStore() { + return pathToOWLStore; + } + + /** + * @return the curieHandler + */ + public CurieHandler getCuriHandler() { + return curieHandler; + } + + private BigdataSailRepository initializeRepository(String pathToJournal) { + try { + Properties properties = new Properties(); + properties.load(this.getClass().getResourceAsStream("blazegraph.properties")); + properties.setProperty(Options.FILE, pathToJournal); + BigdataSail sail = new BigdataSail(properties); + BigdataSailRepository repository = new BigdataSailRepository(sail); + + repository.initialize(); + return repository; + } catch (RepositoryException e) { + LOG.fatal("Could not create Blazegraph sail", e); + return null; + } catch (IOException e) { + LOG.fatal("Could not create Blazegraph sail", e); + return null; + } + } + + /** + * Generates a blank model + * + * @param metadata + * @return modelId + * @throws OWLOntologyCreationException + */ + public ModelContainer generateBlankModel(METADATA metadata) + throws OWLOntologyCreationException { + + // Create an arbitrary unique ID and add it to the system. + IRI modelId = generateId(modelIdPrefix); + if (modelMap.containsKey(modelId)) { + throw new OWLOntologyCreationException( + "A model already exists for this db: " + modelId); + } + LOG.info("Generating blank model for new modelId: " + modelId); + + // create empty ontology, use model id as ontology IRI + final OWLOntologyManager m = tbox.getOWLOntologyManager(); + OWLOntology abox = null; + ModelContainer model = null; + try { + abox = m.createOntology(modelId); + // generate model + model = new ModelContainer(modelId, tbox, abox); + } catch (OWLOntologyCreationException exception) { + if (abox != null) { + m.removeOntology(abox); + } + throw exception; + } + // add to internal map + modelMap.put(modelId, model); + return model; + } + + /** + * Save all models to disk. The optional annotations may be used to set + * saved_by and other meta data. + * + * @param annotations + * @param metadata + * @throws OWLOntologyStorageException + * @throws OWLOntologyCreationException + * @throws IOException + * @throws RepositoryException + * @throws UnknownIdentifierException + */ + public void saveAllModels(Set annotations, METADATA metadata) + throws OWLOntologyStorageException, OWLOntologyCreationException, + IOException, RepositoryException, UnknownIdentifierException { + for (Entry entry : modelMap.entrySet()) { + saveModel(entry.getValue(), annotations, metadata); + } + } + + /** + * Save a model to the database. + * + * @param m + * @param annotations + * @param metadata + * @throws OWLOntologyStorageException + * @throws OWLOntologyCreationException + * @throws IOException + * @throws RepositoryException + * @throws UnknownIdentifierException + */ + public void saveModel(ModelContainer m, + Set annotations, METADATA metadata) + throws OWLOntologyStorageException, OWLOntologyCreationException, + IOException, RepositoryException, UnknownIdentifierException { + IRI modelId = m.getModelId(); + OWLOntology ont2save = m.getAboxOntology(); + Set taxa = getTaxonsForModel(modelId.toString()); + if (taxa != null) { + for (String taxon : taxa) { + ont2save = getGolego_repo().addTaxonModelMetaData(ont2save, IRI.create(taxon)); + } + } + final OWLOntology ont = ont2save; + final OWLOntologyManager manager = ont.getOWLOntologyManager(); + List changes = preSaveFileHandler(ont); + synchronized (ont) { + try { + this.writeModelToDatabase(ont, modelId); + // reset modified flag for abox after successful save + m.setAboxModified(false); + } finally { + if (changes != null) { + List invertedChanges = ReverseChangeGenerator + .invertChanges(changes); + if (invertedChanges != null && !invertedChanges.isEmpty()) { + manager.applyChanges(invertedChanges); + } + } + } + } + } + + + private void writeModelToDatabase(OWLOntology model, IRI modelId) throws RepositoryException, IOException { + // Only one thread at a time can use the unisolated connection. + synchronized (repo) { + final BigdataSailRepositoryConnection connection = repo.getUnisolatedConnection(); + try { + connection.begin(); + try { + URI graph = new URIImpl(modelId.toString()); + connection.clear(graph); + StatementCollector collector = new StatementCollector(); + RioRenderer renderer = new RioRenderer(model, collector, null); + renderer.render(); + connection.add(collector.getStatements(), graph); + connection.commit(); + } catch (Exception e) { + connection.rollback(); + throw e; + } + } finally { + connection.close(); + } + } + } + + private List preSaveFileHandler(OWLOntology model) throws UnknownIdentifierException { + List allChanges = null; + for (PreFileSaveHandler handler : preFileSaveHandlers) { + List changes = handler.handle(model); + if (changes != null && !changes.isEmpty()) { + if (allChanges == null) { + allChanges = new ArrayList( + changes.size()); + } + allChanges.addAll(changes); + } + } + return allChanges; + } + + public static interface PreFileSaveHandler { + + public List handle(OWLOntology model) throws UnknownIdentifierException; + + } + + public void addPreFileSaveHandler(PreFileSaveHandler handler) { + if (handler != null) { + preFileSaveHandlers.add(handler); + } + } + + /** + * Export the ABox for the given modelId in the default + * {@link OWLDocumentFormat}. + * + * @param model + * @return modelContent + * @throws OWLOntologyStorageException + */ + public String exportModel(ModelContainer model) + throws OWLOntologyStorageException { + return exportModel(model, ontologyFormat); + } + + /** + * Export the ABox for the given modelId in the given ontology format.
+ * Warning: The mapping from String to {@link OWLDocumentFormat} does not + * map every format! + * + * @param model + * @param format + * @return modelContent + * @throws OWLOntologyStorageException + */ + public String exportModel(ModelContainer model, String format) + throws OWLOntologyStorageException { + OWLDocumentFormat ontologyFormat = getOWLOntologyFormat(format); + if (ontologyFormat == null) { + ontologyFormat = this.ontologyFormat; + } + + return exportModel(model, ontologyFormat); + } + + private OWLDocumentFormat getOWLOntologyFormat(String fmt) { + OWLDocumentFormat ofmt = null; + if (fmt != null) { + fmt = fmt.toLowerCase(); + if (fmt.equals("rdfxml")) + ofmt = new RDFXMLDocumentFormat(); + else if (fmt.equals("owl")) + ofmt = new RDFXMLDocumentFormat(); + else if (fmt.equals("rdf")) + ofmt = new RDFXMLDocumentFormat(); + else if (fmt.equals("owx")) + ofmt = new OWLXMLDocumentFormat(); + else if (fmt.equals("owf")) + ofmt = new FunctionalSyntaxDocumentFormat(); + else if (fmt.equals("owm")) + ofmt = new ManchesterSyntaxDocumentFormat(); + } + return ofmt; + } + + /** + * Retrieve a collection of all file/stored model ids found in the repo.
+ * Note: Models may not be loaded at this point. + * + * @return set of modelids. + * @throws IOException + */ + public Set getStoredModelIds() throws IOException { + try { + BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); + try { + RepositoryResult graphs = connection.getContextIDs(); + Set modelIds = new HashSet<>(); + while (graphs.hasNext()) { + modelIds.add(IRI.create(graphs.next().stringValue())); + } + graphs.close(); + return Collections.unmodifiableSet(modelIds); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + } + + /** + * Retrieve all model ids currently in memory in long and short form.
+ * + * @return set of modelids. + * @throws IOException + */ + public Set getCurrentModelIds() throws IOException { + return new HashSet(modelMap.keySet()); + } + + /** + * Retrieve a collection of all available model ids.
+ * Note: Models may not be loaded at this point. + * + * @return set of modelids. + * @throws IOException + */ + public Set getAvailableModelIds() throws IOException { + Set allModelIds = new HashSet<>(); + allModelIds.addAll(this.getStoredModelIds()); + allModelIds.addAll(this.getCurrentModelIds()); + return allModelIds; + } + + public Map> getAllModelAnnotations() throws IOException { + Map> annotations = new HashMap<>(); + // First get annotations from all the stored ontologies + try { + BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); + try { + String query = "PREFIX owl: " + + "PREFIX rdf: " + + "SELECT ?model ?p ?o " + + "WHERE { " + + "?model a owl:Ontology . " + + "?model ?p ?o . " + + "FILTER(?p NOT IN (owl:imports, rdf:type, )) " + + "} "; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + OWLDataFactory factory = OWLManager.getOWLDataFactory(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value model = binding.getValue("model"); + Value predicate = binding.getValue("p"); + String value = binding.getValue("o").stringValue(); + if ((model instanceof URI) && (predicate instanceof URI)) { + IRI modelId = IRI.create(((URI) model).toString()); + OWLAnnotationProperty property = factory + .getOWLAnnotationProperty(IRI.create(((URI) predicate).toString())); + OWLAnnotation annotation = factory.getOWLAnnotation(property, factory.getOWLLiteral(value)); + Set modelAnnotations = annotations.getOrDefault(modelId, new HashSet<>()); + modelAnnotations.add(annotation); + annotations.put(modelId, modelAnnotations); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + // Next get annotations from ontologies that may not be stored, replacing any stored annotations + modelMap.values().stream().filter(mc -> mc.isModified()).forEach(mc -> { + annotations.put(mc.getModelId(), mc.getAboxOntology().getAnnotations()); + }); + return annotations; + } + + public QueryResult executeSPARQLQuery(String queryText, int timeout) throws MalformedQueryException, QueryEvaluationException, RepositoryException { + BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); + try { + List tokens = QueryPrologLexer.lex(queryText); + Set declaredPrefixes = tokens.stream().filter(token -> token.getType().equals(QueryPrologLexer.TokenType.PREFIX)).map(token -> token.getStringValue()).collect(Collectors.toSet()); + StringBuffer queryWithDefaultPrefixes = new StringBuffer(); + for (Entry entry : getCuriHandler().getMappings().entrySet()) { + if (!declaredPrefixes.contains(entry.getKey())) { + queryWithDefaultPrefixes.append("PREFIX " + entry.getKey() + ": <" + entry.getValue() + ">"); + queryWithDefaultPrefixes.append("\n"); + } + } + queryWithDefaultPrefixes.append(queryText); + Query query = connection.prepareQuery(QueryLanguage.SPARQL, queryWithDefaultPrefixes.toString()); + query.setMaxQueryTime(timeout); + if (query instanceof TupleQuery) { + TupleQuery tupleQuery = (TupleQuery) query; + return tupleQuery.evaluate(); + } else if (query instanceof GraphQuery) { + GraphQuery graphQuery = (GraphQuery) query; + return graphQuery.evaluate(); + } else if (query instanceof BooleanQuery) { + throw new UnsupportedOperationException("Unsupported query type."); //FIXME + } else { + throw new UnsupportedOperationException("Unsupported query type."); + } + } finally { + connection.close(); + } + } + + public QueryResult executeSPARQLQueryWithoutPrefixManipulation(String queryText, int timeout) throws MalformedQueryException, QueryEvaluationException, RepositoryException { + BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); + try { + Query query = connection.prepareQuery(QueryLanguage.SPARQL, queryText.toString()); + query.setMaxQueryTime(timeout); + if (query instanceof TupleQuery) { + TupleQuery tupleQuery = (TupleQuery) query; + return tupleQuery.evaluate(); + } else if (query instanceof GraphQuery) { + GraphQuery graphQuery = (GraphQuery) query; + return graphQuery.evaluate(); + } else if (query instanceof BooleanQuery) { + throw new UnsupportedOperationException("Unsupported query type."); //FIXME + } else { + throw new UnsupportedOperationException("Unsupported query type."); + } + } finally { + connection.close(); + } + } + + @Override + public void loadModel(IRI modelId, boolean isOverride) throws OWLOntologyCreationException { + if (modelMap.containsKey(modelId)) { + if (!isOverride) { + throw new OWLOntologyCreationException("Model already exists: " + modelId); + } + unlinkModel(modelId); + } + try { + BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); + try { + RepositoryResult graphs = connection.getContextIDs(); + if (!Iterations.asSet(graphs).contains(new URIImpl(modelId.toString()))) { + throw new OWLOntologyCreationException("No such model in datastore: " + modelId); + } + graphs.close(); + RepositoryResult statements = + connection.getStatements(null, null, null, false, new URIImpl(modelId.toString())); + //setting minimal = false will load the abox with the tbox ontology manager, allowing for OWL understanding of tbox content + boolean minimal = false; + OWLOntology abox = loadOntologyDocumentSource(new RioMemoryTripleSource(statements), minimal); + statements.close(); + abox = postLoadFileFilter(abox); + ModelContainer model = addModel(modelId, abox); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new OWLOntologyCreationException(e); + } + } + + @Override + public OWLOntology loadModelABox(IRI modelId) throws OWLOntologyCreationException { + return loadModelABox(modelId, null); + } + + @Override + public OWLOntology loadModelABox(IRI modelId, OWLOntologyManager manager) throws OWLOntologyCreationException { + LOG.info("Load model abox: " + modelId + " from database"); + try { + BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); + try { + //TODO repeated code with loadModel + RepositoryResult graphs = connection.getContextIDs(); + if (!Iterations.asSet(graphs).contains(new URIImpl(modelId.toString()))) { + throw new OWLOntologyCreationException("No such model in datastore: " + modelId); + } + graphs.close(); + RepositoryResult statements = + connection.getStatements(null, null, null, false, new URIImpl(modelId.toString())); + //setting minimal to true will give an OWL abox with triples that won't be connected to the tbox, hence e.g. object properties might not be recognized. + boolean minimal = true; + OWLOntology abox; + if (manager == null) { + abox = loadOntologyDocumentSource(new RioMemoryTripleSource(statements), minimal); + } else { + abox = loadOntologyDocumentSource(new RioMemoryTripleSource(statements), minimal, manager); + } + + statements.close(); + abox = postLoadFileFilter(abox); + return abox; + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new OWLOntologyCreationException(e); + } + } + + private OWLOntology postLoadFileFilter(OWLOntology model) { + for (PostLoadOntologyFilter filter : postLoadOntologyFilters) { + model = filter.filter(model); + } + return model; + } + + public static interface PostLoadOntologyFilter { + + OWLOntology filter(OWLOntology model); + } + + public void addPostLoadOntologyFilter(PostLoadOntologyFilter filter) { + if (filter != null) { + postLoadOntologyFilters.add(filter); + } + } + + /** + * Imports ontology RDF directly to database. Will remove any import statements in the ontology (because GO-CAMs should not have any as of now) + * + * @param file + * @throws OWLOntologyCreationException + * @throws IOException + * @throws RepositoryException + */ + public String importModelToDatabase(File file, boolean skipMarkedDelete) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException { + final boolean delete; + if (skipMarkedDelete) { + delete = scanForIsDelete(file); + } else { + delete = false; + } + String modeliri = null; + if (!delete) { + java.util.Optional ontIRIOpt = scanForOntologyIRI(file).map(id -> new URIImpl(id)); + if (ontIRIOpt.isPresent()) { + java.util.Optional importOpt = scanForImport(file).map(id -> new URIImpl(id)); + if (importOpt.isPresent()) { + modeliri = ontIRIOpt.get().stringValue(); + //need to remove the imports before loading. + //if the imports are large, this gets slow + //consider 1) loading the model as below 2) running a SPARQL update to get rid of the imports + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + OWLOntology cam = ontman.loadOntologyFromOntologyDocument(file); + Set imports = cam.getImportsDeclarations(); + for (OWLImportsDeclaration impdec : imports) { + RemoveImport rm = new RemoveImport(cam, impdec); + ontman.applyChange(rm); + } + //write it + this.writeModelToDatabase(cam, IRI.create(ontIRIOpt.get().stringValue())); + } else { //otherwise just load it all up as rdf (faster because avoids owl api) + synchronized (repo) { + final BigdataSailRepositoryConnection connection = repo.getUnisolatedConnection(); + try { + connection.begin(); + try { + URI graph = ontIRIOpt.get(); + connection.clear(graph); + //FIXME Turtle format is hard-coded here + if (file.getName().endsWith(".ttl")) { + connection.add(file, "", RDFFormat.TURTLE, graph); + } else if (file.getName().endsWith(".owl")) { + connection.add(file, "", RDFFormat.RDFXML, graph); + } + connection.commit(); + modeliri = graph.toString(); + } catch (Exception e) { + connection.rollback(); + throw e; + } + } finally { + connection.close(); + } + } + } + } else { + throw new OWLOntologyCreationException("Detected anonymous ontology; must have IRI"); + } + } else { + System.err.println("skipping " + file.getName()); + } + return modeliri; + + } + + /** + * checks an OWLRDF (ttl) file for owl import statements + * + * @param file + * @return + * @throws RDFParseException + * @throws RDFHandlerException + * @throws IOException + */ + private java.util.Optional scanForImport(File file) throws RDFParseException, RDFHandlerException, IOException { + RDFHandlerBase handler = new RDFHandlerBase() { + public void handleStatement(Statement statement) { + if (statement.getPredicate().stringValue().equals("http://www.w3.org/2002/07/owl#imports")) + throw new FoundTripleException(statement); + } + }; + InputStream inputStream = new FileInputStream(file); + try { + //FIXME Turtle format is hard-coded here + RDFParser parser = Rio.createParser(RDFFormat.RDFXML); + if (file.getName().endsWith(".ttl")) { + parser = Rio.createParser(RDFFormat.TURTLE); + } + parser.setRDFHandler(handler); + parser.parse(inputStream, ""); + // If an import triple is found, it will be thrown out + // in an exception. Otherwise, return empty. + return java.util.Optional.empty(); + } catch (FoundTripleException fte) { + Statement statement = fte.getStatement(); + return java.util.Optional.of(statement.getObject().stringValue()); + } finally { + inputStream.close(); + } + } + + /** + * Tries to efficiently find the ontology IRI triple without loading the whole file. + * + * @throws IOException + * @throws RDFHandlerException + * @throws RDFParseException + */ + public java.util.Optional scanForOntologyIRI(File file) throws RDFParseException, RDFHandlerException, IOException { + RDFHandlerBase handler = new RDFHandlerBase() { + public void handleStatement(Statement statement) { + if (statement.getObject().stringValue().equals("http://www.w3.org/2002/07/owl#Ontology") && + statement.getPredicate().stringValue().equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) + throw new FoundTripleException(statement); + } + }; + InputStream inputStream = new FileInputStream(file); + try { + //FIXME Turtle format is hard-coded here + RDFParser parser = Rio.createParser(RDFFormat.RDFXML); + if (file.getName().endsWith(".ttl")) { + parser = Rio.createParser(RDFFormat.TURTLE); + } + parser.setRDFHandler(handler); + parser.parse(inputStream, ""); + // If an ontology IRI triple is found, it will be thrown out + // in an exception. Otherwise, return empty. + return java.util.Optional.empty(); + } catch (FoundTripleException fte) { + Statement statement = fte.getStatement(); + if (statement.getSubject() instanceof BNode) { + LOG.warn("Blank node subject for ontology triple: " + statement); + return java.util.Optional.empty(); + } else { + return java.util.Optional.of(statement.getSubject().stringValue()); + } + } finally { + inputStream.close(); + } + } + + private boolean scanForIsDelete(File file) throws RDFParseException, RDFHandlerException, IOException { + RDFHandlerBase handler = new RDFHandlerBase() { + + public void handleStatement(Statement statement) { + if (statement.getPredicate().stringValue().equals(AnnotationShorthand.modelstate.getAnnotationProperty().toString()) && + statement.getObject().stringValue().equals("delete")) throw new FoundTripleException(statement); + } + }; + InputStream inputStream = new FileInputStream(file); + try { + //FIXME Turtle format is hard-coded here + RDFParser parser = Rio.createParser(RDFFormat.TURTLE); + parser.setRDFHandler(handler); + parser.parse(inputStream, ""); + // If an ontology IRI triple is found, it will be thrown out + // in an exception. Otherwise, return false. + return false; + } catch (FoundTripleException fte) { + return true; + } finally { + inputStream.close(); + } + } + + private static class FoundTripleException extends RuntimeException { + + private static final long serialVersionUID = 8366509854229115430L; + private final Statement statement; + + public FoundTripleException(Statement statement) { + this.statement = statement; + } + + public Statement getStatement() { + return this.statement; + } + } + + private static class EmptyOntologyIRIMapper implements OWLOntologyIRIMapper { + + private static final long serialVersionUID = 8432563430320023805L; + + public static IRI emptyOntologyIRI = IRI.create("http://example.org/empty"); + + @Override + public IRI getDocumentIRI(IRI ontologyIRI) { + return emptyOntologyIRI; + } + + } + + /** + * Export all models to disk. + * + * @throws OWLOntologyStorageException + * @throws OWLOntologyCreationException + * @throws IOException + */ + public void dumpAllStoredModels() throws OWLOntologyStorageException, OWLOntologyCreationException, IOException { + File folder = new File(this.pathToExportFolder); + for (IRI modelId : this.getStoredModelIds()) { + dumpStoredModel(modelId, folder); + } + } + + /** + * Save a model to disk. + * + * @throws OWLOntologyStorageException + * @throws OWLOntologyCreationException + * @throws IOException + */ + public void dumpStoredModel(IRI modelId, File folder) throws IOException { + // preliminary checks for the target file + String fileName = StringUtils.replaceOnce(modelId.toString(), modelIdPrefix, "") + ".ttl"; + File targetFile = new File(folder, fileName).getAbsoluteFile(); + if (targetFile.exists()) { + if (targetFile.isFile() == false) { + throw new IOException("For modelId: '" + modelId + "', the resulting path is not a file: " + targetFile.getAbsolutePath()); + } + if (targetFile.canWrite() == false) { + throw new IOException("For modelId: '" + modelId + "', Cannot write to the file: " + targetFile.getAbsolutePath()); + } + } else { + File targetFolder = targetFile.getParentFile(); + FileUtils.forceMkdir(targetFolder); + } + File tempFile = null; + try { + // create tempFile + String prefix = modelId.toString(); // TODO escape + tempFile = File.createTempFile(prefix, ".ttl"); + try { + BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection(); + OutputStream out = new FileOutputStream(tempFile); + try { + // Workaround for order dependence of RDF reading by OWL API + // Need to output ontology triple first until this bug is fixed: + // https://github.com/owlcs/owlapi/issues/574 + ValueFactory factory = connection.getValueFactory(); + Statement ontologyDeclaration = factory.createStatement(factory.createURI(modelId.toString()), RDF.TYPE, OWL.ONTOLOGY); + Rio.write(Collections.singleton(ontologyDeclaration), out, RDFFormat.TURTLE); + // end workaround + RDFWriter writer = Rio.createWriter(RDFFormat.TURTLE, out); + connection.export(writer, new URIImpl(modelId.toString())); + // copy temp file to the finalFile + FileUtils.copyFile(tempFile, targetFile); + } finally { + out.close(); + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } catch (RDFHandlerException e) { + throw new IOException(e); + } + } finally { + // delete temp file + FileUtils.deleteQuietly(tempFile); + } + } + + public void dispose() { + super.dispose(); + try { + if (repo.getSail().isOpen()) { + repo.shutDown(); + } + if (this.getGolego_repo() != null) { + if (this.getGolego_repo().getGo_lego_repo().getSail().isOpen()) { + getGolego_repo().dispose(); + } + } + } catch (RepositoryException e) { + LOG.error("Failed to shutdown Blazegraph sail.", e); + } + } + + public Map> buildTaxonModelMap() throws IOException { + Map> model_genes = buildModelGeneMap(); + Map> taxon_models = new HashMap>(); + for (String model : model_genes.keySet()) { + Set genes = model_genes.get(model); + Set taxa = this.getGolego_repo().getTaxaByGenes(genes); + for (String taxon : taxa) { + Set models = taxon_models.get(taxon); + if (models == null) { + models = new HashSet(); + } + models.add(model); + taxon_models.put(taxon, models); + } + } + return taxon_models; + } + + public Map> buildModelGeneMap() { + Map> model_genes = new HashMap>(); + TupleQueryResult result; + String sparql = "SELECT ?id (GROUP_CONCAT(DISTINCT ?type;separator=\";\") AS ?types) WHERE {\n" + + " GRAPH ?id { \n" + + "?i rdf:type ?type .\n" + + "FILTER (?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != ) . \n" + + "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/\" ) ) \n" + + " }\n" + + " } \n" + + " \n" + + "GROUP BY ?id"; + try { + result = (TupleQueryResult) executeSPARQLQueryWithoutPrefixManipulation(sparql, 1000); + while (result.hasNext()) { + BindingSet bs = result.next(); + String model = bs.getBinding("id").getValue().stringValue(); + String genes = bs.getBinding("types").getValue().stringValue(); + Set g = new HashSet(); + if (genes != null) { + String[] geness = genes.split(";"); + for (String gene : geness) { + g.add(gene); + } + } + model_genes.put(model, g); + } + } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { + e.printStackTrace(); + } + return model_genes; + } + + public Set getTaxonsForModel(String model_id) throws IOException { + Set genes = getModelGenes(model_id); + if (genes.isEmpty()) { + return null; + } + Set taxa = this.getGolego_repo().getTaxaByGenes(genes); + return taxa; + + } + + public Set getModelGenes(String model_id) { + Set g = new HashSet(); + TupleQueryResult result; + String sparql = "SELECT ?type WHERE {\n" + + " GRAPH <" + model_id + "> { \n" + + " ?i rdf:type ?type .\n" + + "FILTER (?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != \n" + + " && ?type != ) . \n" + + //this one cuts out all the reacto genes + // "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/\" ) ) \n" + + //this will probably let a few past but the effect would only be a slight slow down when looking up taxa + "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/ECO_\" ) ) . \n" + + "FILTER (!regex(str(?type), \"http://purl.obolibrary.org/obo/GO_\" ) ) " + + " }\n" + + " } \n" + + " \n"; + try { + result = (TupleQueryResult) executeSPARQLQueryWithoutPrefixManipulation(sparql, 10); + + while (result.hasNext()) { + BindingSet bs = result.next(); + String gene = bs.getBinding("type").getValue().stringValue(); + g.add(gene); + } + } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { + e.printStackTrace(); + } + return g; + } + + + public void addTaxonMetadata() throws IOException { + Map> taxon_models = buildTaxonModelMap(); + LOG.info("Ready to update " + taxon_models.keySet().size() + " " + taxon_models.keySet()); + for (String taxon : taxon_models.keySet()) { + LOG.info("Updating models in taxon " + taxon); + Set models = taxon_models.get(taxon); + models.stream().parallel().forEach(model -> { + //fine for a few thousand models, but ends up eating massive ram for many + //addTaxonWithOWL(IRI.create(model), IRI.create(taxon)); + try { + addTaxonToDatabaseWithSparql(IRI.create(model), IRI.create(taxon)); + } catch (RepositoryException | UpdateExecutionException | MalformedQueryException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }); + } + } + + //now try with sparql insert + public int addTaxonToDatabaseWithSparql(IRI model_iri, IRI taxon_iri) throws RepositoryException, UpdateExecutionException, MalformedQueryException, InterruptedException { + int changes = 0; + String update = + "INSERT DATA\n" + + "{ GRAPH <" + model_iri.toString() + "> { " + + " <" + model_iri.toString() + "> <" + BlazegraphOntologyManager.in_taxon_uri + "> <" + taxon_iri.toString() + ">" + + "} }"; + + synchronized (repo) { + final BigdataSailRepositoryConnection conn = repo.getUnisolatedConnection(); + try { + conn.begin(); + BlazegraphMutationCounter counter = new BlazegraphMutationCounter(); + conn.addChangeLog(counter); + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + changes = counter.mutationCount(); + conn.removeChangeLog(counter); + conn.commit(); + } finally { + conn.close(); + } + } + return changes; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphOntologyManager.java b/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphOntologyManager.java index d012db49..02b169ec 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphOntologyManager.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/BlazegraphOntologyManager.java @@ -1,34 +1,18 @@ /** - * + * */ package org.geneontology.minerva; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.zip.GZIPInputStream; - +import com.bigdata.journal.Options; +import com.bigdata.rdf.sail.BigdataSail; +import com.bigdata.rdf.sail.BigdataSailRepository; +import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.tuple.Pair; import org.apache.log4j.Logger; import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.impl.URIImpl; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResult; +import org.openrdf.query.*; import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; @@ -38,815 +22,819 @@ import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.rio.RioRenderer; import org.semanticweb.owlapi.search.EntitySearcher; -import org.semarglproject.vocab.OWL; -import com.bigdata.journal.Options; -import com.bigdata.rdf.sail.BigdataSail; -import com.bigdata.rdf.sail.BigdataSailRepository; -import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.zip.GZIPInputStream; /** * @author benjamingood * */ public class BlazegraphOntologyManager { - private static Logger LOG = Logger.getLogger(BlazegraphOntologyManager.class); - private final BigdataSailRepository go_lego_repo; - private final static String public_blazegraph_url = "http://skyhook.berkeleybop.org/blazegraph-go-lego-reacto-neo.jnl.gz"; - //TODO this should probably go somewhere else - like an ontology file - this was missing.. - public static String in_taxon_uri = "https://w3id.org/biolink/vocab/in_taxon"; - public static OWLAnnotationProperty in_taxon = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create(in_taxon_uri)); - private static final Set root_types; - public final Map> regulatorsToRegulated; - public Map class_depth; - static { - root_types = new HashSet(); - root_types.add("http://purl.obolibrary.org/obo/GO_0008150"); //BP - root_types.add("http://purl.obolibrary.org/obo/GO_0003674"); //MF - root_types.add("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event");//ME - root_types.add("http://purl.obolibrary.org/obo/GO_0005575"); //CC - root_types.add("http://purl.obolibrary.org/obo/GO_0032991"); //Complex - root_types.add("http://purl.obolibrary.org/obo/CHEBI_36080"); //protein - root_types.add("http://purl.obolibrary.org/obo/CHEBI_33695"); //information biomacromolecule - root_types.add("http://purl.obolibrary.org/obo/CHEBI_50906"); //chemical role - root_types.add("http://purl.obolibrary.org/obo/CHEBI_24431"); //chemical entity - root_types.add("http://purl.obolibrary.org/obo/UBERON_0001062"); //anatomical entity - root_types.add("http://purl.obolibrary.org/obo/GO_0110165"); //cellular anatomical entity - root_types.add("http://purl.obolibrary.org/obo/CARO_0000000"); // root root anatomical entity - root_types.add("http://purl.obolibrary.org/obo/UBERON_0000105"); // life cycle stage - root_types.add("http://purl.obolibrary.org/obo/PO_0009012"); // plant structure development stage - root_types.add("http://purl.obolibrary.org/obo/ECO_0000000"); //evidence root. - } - - public BlazegraphOntologyManager(String go_lego_repo_file, boolean downloadJournal) throws IOException { - if (!new File(go_lego_repo_file).exists() && downloadJournal) { - LOG.info("No blazegraph tbox journal found at " + go_lego_repo_file + " . Downloading from " + public_blazegraph_url + " and putting there."); - URL blazegraph_url = new URL(public_blazegraph_url); - File go_lego_repo_local = new File(go_lego_repo_file); - if (public_blazegraph_url.endsWith(".gz")) { - go_lego_repo_local = new File(go_lego_repo_file + ".gz"); - } - org.apache.commons.io.FileUtils.copyURLToFile(blazegraph_url, go_lego_repo_local); - if (public_blazegraph_url.endsWith(".gz")) { - unGunzipFile(go_lego_repo_file + ".gz", go_lego_repo_file); - } - } - go_lego_repo = initializeRepository(go_lego_repo_file); - class_depth = buildClassDepthMap("http://purl.obolibrary.org/obo/GO_0003674"); - class_depth.putAll(buildClassDepthMap("http://purl.obolibrary.org/obo/GO_0008150")); - class_depth.putAll(buildClassDepthMap("http://purl.obolibrary.org/obo/GO_0005575")); - class_depth.put("http://purl.obolibrary.org/obo/GO_0008150", 0); - class_depth.put("http://purl.obolibrary.org/obo/GO_0003674", 0); - class_depth.put("http://purl.obolibrary.org/obo/GO_0005575", 0); - class_depth.put("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event", 0); - regulatorsToRegulated = buildRegulationMap(); - } - - public BigdataSailRepository getGo_lego_repo() { - return go_lego_repo; - } - - public OWLOntology addTaxonModelMetaData(OWLOntology model, IRI taxon_iri) { - OWLOntologyManager ontman = model.getOWLOntologyManager(); - OWLDataFactory df = ontman.getOWLDataFactory(); - OWLAnnotation taxon_anno = df.getOWLAnnotation(in_taxon, taxon_iri); - OWLAxiom taxonannoaxiom = df.getOWLAnnotationAssertionAxiom(model.getOntologyID().getOntologyIRI().get(), taxon_anno); - ontman.addAxiom(model, taxonannoaxiom); - return model; - } - - public void unGunzipFile(String compressedFile, String decompressedFile) { - byte[] buffer = new byte[1024]; - try { - FileInputStream fileIn = new FileInputStream(compressedFile); - GZIPInputStream gZIPInputStream = new GZIPInputStream(fileIn); - FileOutputStream fileOutputStream = new FileOutputStream(decompressedFile); - int bytes_read; - while ((bytes_read = gZIPInputStream.read(buffer)) > 0) { - fileOutputStream.write(buffer, 0, bytes_read); - } - gZIPInputStream.close(); - fileOutputStream.close(); - } catch (IOException ex) { - ex.printStackTrace(); - } - } - - - private BigdataSailRepository initializeRepository(String pathToJournal) { - try { - Properties properties = new Properties(); - properties.load(this.getClass().getResourceAsStream("onto-blazegraph.properties")); - properties.setProperty(Options.FILE, pathToJournal); - BigdataSail sail = new BigdataSail(properties); - BigdataSailRepository repository = new BigdataSailRepository(sail); - - repository.initialize(); - return repository; - } catch (RepositoryException e) { - LOG.fatal("Could not create Blazegraph sail", e); - return null; - } catch (IOException e) { - LOG.fatal("Could not create Blazegraph sail", e); - return null; - } - } - - public void loadRepositoryFromOWLFile(File file, String iri, boolean reset) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException { - synchronized(go_lego_repo) { - final BigdataSailRepositoryConnection connection = go_lego_repo.getUnisolatedConnection(); - try { - connection.begin(); - try { - URI graph = new URIImpl(iri); - if(reset) { - connection.clear(graph); - } - if(file.getName().endsWith(".ttl")) { - connection.add(file, "", RDFFormat.TURTLE, graph); - }else if(file.getName().endsWith(".owl")) { - connection.add(file, "", RDFFormat.RDFXML, graph); - } - connection.commit(); - } catch (Exception e) { - connection.rollback(); - throw e; - } - } finally { - connection.close(); - } - } - } - - public void loadRepositoryFromOntology(OWLOntology ontology, String iri, boolean reset) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException { - synchronized(go_lego_repo) { - final BigdataSailRepositoryConnection connection = go_lego_repo.getUnisolatedConnection(); - try { - connection.begin(); - try { - URI graph = new URIImpl(iri); - if(reset) { - connection.clear(graph); - } - StatementCollector collector = new StatementCollector(); - RioRenderer renderer = new RioRenderer(ontology, collector, null); - renderer.render(); - connection.add(collector.getStatements(), graph); - connection.commit(); - } catch (Exception e) { - connection.rollback(); - throw e; - } - } finally { - connection.close(); - } - } - } - - public Set getAllSuperClasses(String uri) throws IOException { - Set supers = new HashSet(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String query = "PREFIX owl: " + - "PREFIX rdf: " - + "PREFIX rdfs: " + - "SELECT ?super " + - "WHERE { " + - "<"+uri+"> rdfs:subClassOf* ?super . " + - "} "; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("super"); - //ignore anonymous super classes - if ( v instanceof URI ) { - String superclass = binding.getValue("super").stringValue(); - supers.add(superclass); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return supers; - } - public Set getAllSubClasses(String uri) throws IOException { - Set supers = new HashSet(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String query = "PREFIX owl: " + - "PREFIX rdf: " - + "PREFIX rdfs: " + - "SELECT ?sub " + - "WHERE { " + - "?sub rdfs:subClassOf* <"+uri+"> . " + - "} "; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("sub"); - //ignore anonymous sub classes - if ( v instanceof URI ) { - String superclass = binding.getValue("sub").stringValue(); - supers.add(superclass); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return supers; - } - - - public Map buildClassDepthMap(String root_term) throws IOException { - Map class_depth = new HashMap(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String query = "PREFIX owl: " + - "PREFIX rdf: " - + "PREFIX rdfs: " + - "SELECT ?class (count(?mid) as ?depth) " + - "WHERE { " - + "?class rdfs:subClassOf* ?mid . " - + "values ?root_term {<"+root_term+">} . " - +" ?mid rdfs:subClassOf* ?root_term ." + - "filter ( ?class != ?mid )}" - + "group by ?class " + - " order by ?depth"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("depth"); - Integer depth = Integer.parseInt(v.stringValue()); - String c = binding.getValue("class").stringValue(); - Integer k = class_depth.get(c); - if((k==null)||(depth " - + "PREFIX rdfs: " + - "SELECT ?class (count(?mid) as ?depth) " + - "WHERE { " - + "values ?class {<"+term+">} . " - + "?class rdfs:subClassOf* ?mid . " + - " ?mid rdfs:subClassOf* <"+root_term+"> ." + - "filter ( ?class != ?mid )}" - + "group by ?class " + - " order by ?depth"; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("depth"); - depth = Integer.parseInt(v.stringValue()); - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return depth; - } - - private Map> buildRegulationMap() throws IOException { - String regulationTargetsQuery = IOUtils.toString(BlazegraphOntologyManager.class.getResourceAsStream("regulation_targets.rq"), StandardCharsets.UTF_8); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, regulationTargetsQuery); - TupleQueryResult result = tupleQuery.evaluate(); - Map> regulators = new HashMap<>(); - while (result.hasNext()) { - BindingSet binding = result.next(); - IRI regulator = IRI.create(binding.getValue("subjectDown").stringValue()); - IRI regulated = IRI.create(binding.getValue("fillerUp").stringValue()); - if (!regulators.containsKey(regulator)) { - regulators.put(regulator, new HashSet<>()); - } - regulators.get(regulator).add(regulated); - } - return regulators; - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - } - - public Map> getSuperCategoryMapForIndividuals(Set inds, OWLOntology ont, boolean fix_deprecated) throws IOException{ - Map> ind_roots = new HashMap>(); - Set all_types = new HashSet(); - Map> ind_types = new HashMap>(); - for (OWLNamedIndividual ind : inds) { - Set types = new HashSet(); - for(OWLClassExpression oc : EntitySearcher.getTypes(ind, ont)) { - if (oc.isNamed()) { - types.add(oc.asOWLClass().getIRI().toString()); - } else if (oc instanceof OWLObjectComplementOf) { - types.add(((OWLObjectComplementOf)oc).getOperand().asOWLClass().getIRI().toString()); - } - } - all_types.addAll(types); - if(fix_deprecated) { - ind_types.put(ind, replaceDeprecated(types)); - }else { - ind_types.put(ind, types); - } - } - if(fix_deprecated) { - all_types = replaceDeprecated(all_types); - } - //just one query.. - Map> type_roots = getSuperCategoryMap(all_types); - for(OWLNamedIndividual ind : inds) { - Set types = ind_types.get(ind); - for(String type : types) { - Set roots = type_roots.get(type); - ind_roots.put(ind, roots); - } - } - return ind_roots; - } - - public Set replaceDeprecated(Set uris){ - Set fixed = new HashSet(); - Map old_new = mapDeprecated(uris); - for(String t : uris) { - if(old_new.get(t)!=null) { - fixed.add(old_new.get(t)); - }else { - fixed.add(t); - } - } - return fixed; - } - - public Set replaceDeprecated(Set uris, Map old_new){ - Set fixed = new HashSet(); - for(String t : uris) { - if(old_new.get(t)!=null) { - fixed.add(old_new.get(t)); - }else { - fixed.add(t); - } - } - return fixed; - } - - public Map mapDeprecated(Set uris){ - Map old_new = new HashMap(); - BigdataSailRepositoryConnection connection; - try { - connection = go_lego_repo.getReadOnlyConnection(); - try { - String q = "VALUES ?c {"; - for(String uri : uris) { - if(uri.startsWith("http")) { - q+="<"+uri+"> \n"; - } - } - q+="} . " ; - - String query = - "SELECT ?c ?replacement " + - "WHERE { " + q - + "?c ?replacement . " + - "} "; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value c = binding.getValue("c"); - Value replacement = binding.getValue("replacement"); - old_new.put(c.stringValue(),replacement.stringValue()); - } - } catch (MalformedQueryException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (QueryEvaluationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } finally { - connection.close(); - } - } catch (RepositoryException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - return old_new; - } - - public Map> getSuperCategoryMap(Set uris) throws IOException { - Map> sub_supers = new HashMap>(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String q = "VALUES ?sub {"; - for(String uri : uris) { - if(uri.startsWith("http")) { - q+="<"+uri+"> "; - } - } - q+="} . " ; - - String categories = "VALUES ?super {"; - for(String c : root_types) { - categories += "<"+c+"> "; - } - categories +="} . "; - String query = "PREFIX owl: " + - "PREFIX rdf: " - + "PREFIX rdfs: " + - "SELECT ?sub ?super " + - "WHERE { " + q + categories - + "?sub rdfs:subClassOf* ?super . " + - "} "; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value parent = binding.getValue("super"); - Value child = binding.getValue("sub"); - //System.out.println(child +" "+parent); - //ignore anonymous super classes - if ( parent instanceof URI && child instanceof URI) { - String superclass = binding.getValue("super").stringValue(); - String subclass = binding.getValue("sub").stringValue(); - Set supers = sub_supers.get(subclass); - if(supers==null) { - supers = new HashSet(); - } - supers.add(superclass); - sub_supers.put(subclass, supers); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return sub_supers; - } - - /** - * This reproduces the results of the golr lookup service for gene product typing - * @param uris - * @return - * @throws IOException - */ - public Map> getNeoRoots(Set uris) throws IOException { - Map> all = getSuperClassMap(uris); - Map> roots = new HashMap>(); - //only do what the golr was doing and working - for(String term : all.keySet()) { - Set isa_closure = all.get(term); - String direct_parent_iri = null; - if(isa_closure.contains("http://purl.obolibrary.org/obo/CHEBI_36080")) { - //protein - direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_36080"; - }else if(isa_closure.contains("http://purl.obolibrary.org/obo/CHEBI_33695")) { - //information biomacrolecule (gene, complex) - direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_33695"; - } - if(direct_parent_iri!=null) { - Set r = new HashSet(); - r.add(direct_parent_iri); - roots.put(term, r); - } - } - return roots; - } - - - public Map> getSuperClassMap(Set uris) throws IOException { - Map> sub_supers = new HashMap>(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String q = "VALUES ?sub {"; - for(String uri : uris) { - q+="<"+uri+"> "; - } - q+="} . " ; - String query = "PREFIX owl: " + - "PREFIX rdf: " - + "PREFIX rdfs: " + - "SELECT ?sub ?super " + - "WHERE { " + q - + "?sub rdfs:subClassOf* ?super . " + - "} "; - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value parent = binding.getValue("super"); - Value child = binding.getValue("sub"); - //System.out.println(child +" "+parent); - //ignore anonymous super classes - if ( parent instanceof URI && child instanceof URI) { - String superclass = binding.getValue("super").stringValue(); - String subclass = binding.getValue("sub").stringValue(); - Set supers = sub_supers.get(subclass); - if(supers==null) { - supers = new HashSet(); - } - supers.add(superclass); - sub_supers.put(subclass, supers); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return sub_supers; - } - - - public Set getGenesByTaxid(String ncbi_tax_id) throws IOException { - Set genes = new HashSet(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String query = - "select ?gene \n" + - "where { \n" + - " ?gene rdfs:subClassOf ?taxon_restriction .\n" + - " ?taxon_restriction owl:onProperty .\n" + - " ?taxon_restriction owl:someValuesFrom \n" + - "}"; - - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("gene"); - //ignore anonymous sub classes - if ( v instanceof URI ) { - String gene = binding.getValue("gene").stringValue(); - genes.add(gene); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return genes; - } - - public Set getAllTaxaWithGenes() throws IOException { - Set taxa = new HashSet(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String query = - "select distinct ?taxon \n" + - "where { \n" + - " ?gene rdfs:subClassOf ?taxon_restriction .\n" + - " ?taxon_restriction owl:onProperty .\n" + - " ?taxon_restriction owl:someValuesFrom ?taxon \n" + - "\n" + - "}"; - - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("taxon"); - //ignore anonymous sub classes - if ( v instanceof URI ) { - String taxon = binding.getValue("taxon").stringValue(); - taxa.add(taxon); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return taxa; - } - - - - public void dispose() { - try { - go_lego_repo.shutDown(); - } catch (RepositoryException e) { - LOG.error("Failed to shutdown Lego Blazegraph sail.", e); - } - } - public Set getTaxaByGenes(Set genes) throws IOException { - String expansion = "VALUES ?gene { "; - for(String gene : genes) { - expansion += "<"+gene+"> \n"; - } - expansion+= " } . \n"; - Set taxa = new HashSet(); - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - String query = - "select distinct ?taxon \n" + - "where { \n" + expansion + - " ?gene rdfs:subClassOf ?taxon_restriction .\n" + - " ?taxon_restriction owl:onProperty .\n" + - " ?taxon_restriction owl:someValuesFrom ?taxon \n" + - "\n" + - "}"; - - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("taxon"); - //ignore anonymous sub classes - if ( v instanceof URI ) { - String taxon = binding.getValue("taxon").stringValue(); - taxa.add(taxon); - } - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return taxa; - } - - - public String getLabel(OWLNamedObject i) throws IOException { - String entity = i.getIRI().toString(); - return getLabel(entity); - } - - public String getLabel(String entity) throws IOException { - String label = null; - - String query = "select ?label where { <"+entity+"> rdfs:label ?label } limit 1"; - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - if (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("label"); - label = v.stringValue(); - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return label; - } - - - - public boolean exists(String entity) throws IOException { - boolean exists = false; - String query = "select * " - + "WHERE {" + - "{<"+entity+"> ?p ?o . } " + - "UNION " + - "{?s ?p <"+entity+"> . }" + - "} limit 1"; - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - if (result.hasNext()) { - exists = true; - return exists; - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return exists; - } - - public Map getLabels(Set entities) throws IOException { - Map uri_label = new HashMap(); - - String values = "VALUES ?entity {"; - for(String uri : entities) { - values+="<"+uri+"> "; - } - values+="} . " ; - - String query = "select ?entity ?label where { "+values+" ?entity rdfs:label ?label }"; - try { - BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); - try { - TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); - while (result.hasNext()) { - BindingSet binding = result.next(); - Value v = binding.getValue("label"); - String label = v.stringValue(); - Value ev = binding.getValue("entity"); - String entity = ev.stringValue(); - uri_label.put(entity, label); - } - } catch (MalformedQueryException e) { - throw new IOException(e); - } catch (QueryEvaluationException e) { - throw new IOException(e); - } finally { - connection.close(); - } - } catch (RepositoryException e) { - throw new IOException(e); - } - return uri_label; - } + private static Logger LOG = Logger.getLogger(BlazegraphOntologyManager.class); + private final BigdataSailRepository go_lego_repo; + private final static String public_blazegraph_url = "http://skyhook.berkeleybop.org/blazegraph-go-lego-reacto-neo.jnl.gz"; + //TODO this should probably go somewhere else - like an ontology file - this was missing.. + public static String in_taxon_uri = "https://w3id.org/biolink/vocab/in_taxon"; + public static OWLAnnotationProperty in_taxon = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create(in_taxon_uri)); + private static final Set root_types; + public final Map> regulatorsToRegulated; + public Map class_depth; + + static { + root_types = new HashSet(); + root_types.add("http://purl.obolibrary.org/obo/GO_0008150"); //BP + root_types.add("http://purl.obolibrary.org/obo/GO_0003674"); //MF + root_types.add("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event");//ME + root_types.add("http://purl.obolibrary.org/obo/GO_0005575"); //CC + root_types.add("http://purl.obolibrary.org/obo/GO_0032991"); //Complex + root_types.add("http://purl.obolibrary.org/obo/CHEBI_36080"); //protein + root_types.add("http://purl.obolibrary.org/obo/CHEBI_33695"); //information biomacromolecule + root_types.add("http://purl.obolibrary.org/obo/CHEBI_50906"); //chemical role + root_types.add("http://purl.obolibrary.org/obo/CHEBI_24431"); //chemical entity + root_types.add("http://purl.obolibrary.org/obo/UBERON_0001062"); //anatomical entity + root_types.add("http://purl.obolibrary.org/obo/GO_0110165"); //cellular anatomical entity + root_types.add("http://purl.obolibrary.org/obo/CARO_0000000"); // root root anatomical entity + root_types.add("http://purl.obolibrary.org/obo/UBERON_0000105"); // life cycle stage + root_types.add("http://purl.obolibrary.org/obo/PO_0009012"); // plant structure development stage + root_types.add("http://purl.obolibrary.org/obo/ECO_0000000"); //evidence root. + } + + public BlazegraphOntologyManager(String go_lego_repo_file, boolean downloadJournal) throws IOException { + if (!new File(go_lego_repo_file).exists() && downloadJournal) { + LOG.info("No blazegraph tbox journal found at " + go_lego_repo_file + " . Downloading from " + public_blazegraph_url + " and putting there."); + URL blazegraph_url = new URL(public_blazegraph_url); + File go_lego_repo_local = new File(go_lego_repo_file); + if (public_blazegraph_url.endsWith(".gz")) { + go_lego_repo_local = new File(go_lego_repo_file + ".gz"); + } + org.apache.commons.io.FileUtils.copyURLToFile(blazegraph_url, go_lego_repo_local); + if (public_blazegraph_url.endsWith(".gz")) { + unGunzipFile(go_lego_repo_file + ".gz", go_lego_repo_file); + } + } + go_lego_repo = initializeRepository(go_lego_repo_file); + class_depth = buildClassDepthMap("http://purl.obolibrary.org/obo/GO_0003674"); + class_depth.putAll(buildClassDepthMap("http://purl.obolibrary.org/obo/GO_0008150")); + class_depth.putAll(buildClassDepthMap("http://purl.obolibrary.org/obo/GO_0005575")); + class_depth.put("http://purl.obolibrary.org/obo/GO_0008150", 0); + class_depth.put("http://purl.obolibrary.org/obo/GO_0003674", 0); + class_depth.put("http://purl.obolibrary.org/obo/GO_0005575", 0); + class_depth.put("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event", 0); + regulatorsToRegulated = buildRegulationMap(); + } + + public BigdataSailRepository getGo_lego_repo() { + return go_lego_repo; + } + + public OWLOntology addTaxonModelMetaData(OWLOntology model, IRI taxon_iri) { + OWLOntologyManager ontman = model.getOWLOntologyManager(); + OWLDataFactory df = ontman.getOWLDataFactory(); + OWLAnnotation taxon_anno = df.getOWLAnnotation(in_taxon, taxon_iri); + OWLAxiom taxonannoaxiom = df.getOWLAnnotationAssertionAxiom(model.getOntologyID().getOntologyIRI().get(), taxon_anno); + ontman.addAxiom(model, taxonannoaxiom); + return model; + } + + public void unGunzipFile(String compressedFile, String decompressedFile) { + byte[] buffer = new byte[1024]; + try { + FileInputStream fileIn = new FileInputStream(compressedFile); + GZIPInputStream gZIPInputStream = new GZIPInputStream(fileIn); + FileOutputStream fileOutputStream = new FileOutputStream(decompressedFile); + int bytes_read; + while ((bytes_read = gZIPInputStream.read(buffer)) > 0) { + fileOutputStream.write(buffer, 0, bytes_read); + } + gZIPInputStream.close(); + fileOutputStream.close(); + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + + private BigdataSailRepository initializeRepository(String pathToJournal) { + try { + Properties properties = new Properties(); + properties.load(this.getClass().getResourceAsStream("onto-blazegraph.properties")); + properties.setProperty(Options.FILE, pathToJournal); + BigdataSail sail = new BigdataSail(properties); + BigdataSailRepository repository = new BigdataSailRepository(sail); + + repository.initialize(); + return repository; + } catch (RepositoryException e) { + LOG.fatal("Could not create Blazegraph sail", e); + return null; + } catch (IOException e) { + LOG.fatal("Could not create Blazegraph sail", e); + return null; + } + } + + public void loadRepositoryFromOWLFile(File file, String iri, boolean reset) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException { + synchronized (go_lego_repo) { + final BigdataSailRepositoryConnection connection = go_lego_repo.getUnisolatedConnection(); + try { + connection.begin(); + try { + URI graph = new URIImpl(iri); + if (reset) { + connection.clear(graph); + } + if (file.getName().endsWith(".ttl")) { + connection.add(file, "", RDFFormat.TURTLE, graph); + } else if (file.getName().endsWith(".owl")) { + connection.add(file, "", RDFFormat.RDFXML, graph); + } + connection.commit(); + } catch (Exception e) { + connection.rollback(); + throw e; + } + } finally { + connection.close(); + } + } + } + + public void loadRepositoryFromOntology(OWLOntology ontology, String iri, boolean reset) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException { + synchronized (go_lego_repo) { + final BigdataSailRepositoryConnection connection = go_lego_repo.getUnisolatedConnection(); + try { + connection.begin(); + try { + URI graph = new URIImpl(iri); + if (reset) { + connection.clear(graph); + } + StatementCollector collector = new StatementCollector(); + RioRenderer renderer = new RioRenderer(ontology, collector, null); + renderer.render(); + connection.add(collector.getStatements(), graph); + connection.commit(); + } catch (Exception e) { + connection.rollback(); + throw e; + } + } finally { + connection.close(); + } + } + } + + public Set getAllSuperClasses(String uri) throws IOException { + Set supers = new HashSet(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String query = "PREFIX owl: " + + "PREFIX rdf: " + + "PREFIX rdfs: " + + "SELECT ?super " + + "WHERE { " + + "<" + uri + "> rdfs:subClassOf* ?super . " + + "} "; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("super"); + //ignore anonymous super classes + if (v instanceof URI) { + String superclass = binding.getValue("super").stringValue(); + supers.add(superclass); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return supers; + } + + public Set getAllSubClasses(String uri) throws IOException { + Set supers = new HashSet(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String query = "PREFIX owl: " + + "PREFIX rdf: " + + "PREFIX rdfs: " + + "SELECT ?sub " + + "WHERE { " + + "?sub rdfs:subClassOf* <" + uri + "> . " + + "} "; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("sub"); + //ignore anonymous sub classes + if (v instanceof URI) { + String superclass = binding.getValue("sub").stringValue(); + supers.add(superclass); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return supers; + } + + + public Map buildClassDepthMap(String root_term) throws IOException { + Map class_depth = new HashMap(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String query = "PREFIX owl: " + + "PREFIX rdf: " + + "PREFIX rdfs: " + + "SELECT ?class (count(?mid) as ?depth) " + + "WHERE { " + + "?class rdfs:subClassOf* ?mid . " + + "values ?root_term {<" + root_term + ">} . " + + " ?mid rdfs:subClassOf* ?root_term ." + + "filter ( ?class != ?mid )}" + + "group by ?class " + + " order by ?depth"; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("depth"); + Integer depth = Integer.parseInt(v.stringValue()); + String c = binding.getValue("class").stringValue(); + Integer k = class_depth.get(c); + if ((k == null) || (depth < k)) { + class_depth.put(c, depth); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return class_depth; + } + + public int getClassDepth(String term, String root_term) throws IOException { + int depth = -1; + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String query = "PREFIX owl: " + + "PREFIX rdf: " + + "PREFIX rdfs: " + + "SELECT ?class (count(?mid) as ?depth) " + + "WHERE { " + + "values ?class {<" + term + ">} . " + + "?class rdfs:subClassOf* ?mid . " + + " ?mid rdfs:subClassOf* <" + root_term + "> ." + + "filter ( ?class != ?mid )}" + + "group by ?class " + + " order by ?depth"; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("depth"); + depth = Integer.parseInt(v.stringValue()); + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return depth; + } + + private Map> buildRegulationMap() throws IOException { + String regulationTargetsQuery = IOUtils.toString(BlazegraphOntologyManager.class.getResourceAsStream("regulation_targets.rq"), StandardCharsets.UTF_8); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, regulationTargetsQuery); + TupleQueryResult result = tupleQuery.evaluate(); + Map> regulators = new HashMap<>(); + while (result.hasNext()) { + BindingSet binding = result.next(); + IRI regulator = IRI.create(binding.getValue("subjectDown").stringValue()); + IRI regulated = IRI.create(binding.getValue("fillerUp").stringValue()); + if (!regulators.containsKey(regulator)) { + regulators.put(regulator, new HashSet<>()); + } + regulators.get(regulator).add(regulated); + } + return regulators; + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + } + + public Map> getSuperCategoryMapForIndividuals(Set inds, OWLOntology ont, boolean fix_deprecated) throws IOException { + Map> ind_roots = new HashMap>(); + Set all_types = new HashSet(); + Map> ind_types = new HashMap>(); + for (OWLNamedIndividual ind : inds) { + Set types = new HashSet(); + for (OWLClassExpression oc : EntitySearcher.getTypes(ind, ont)) { + if (oc.isNamed()) { + types.add(oc.asOWLClass().getIRI().toString()); + } else if (oc instanceof OWLObjectComplementOf) { + types.add(((OWLObjectComplementOf) oc).getOperand().asOWLClass().getIRI().toString()); + } + } + all_types.addAll(types); + if (fix_deprecated) { + ind_types.put(ind, replaceDeprecated(types)); + } else { + ind_types.put(ind, types); + } + } + if (fix_deprecated) { + all_types = replaceDeprecated(all_types); + } + //just one query.. + Map> type_roots = getSuperCategoryMap(all_types); + for (OWLNamedIndividual ind : inds) { + Set types = ind_types.get(ind); + for (String type : types) { + Set roots = type_roots.get(type); + ind_roots.put(ind, roots); + } + } + return ind_roots; + } + + public Set replaceDeprecated(Set uris) { + Set fixed = new HashSet(); + Map old_new = mapDeprecated(uris); + for (String t : uris) { + if (old_new.get(t) != null) { + fixed.add(old_new.get(t)); + } else { + fixed.add(t); + } + } + return fixed; + } + + public Set replaceDeprecated(Set uris, Map old_new) { + Set fixed = new HashSet(); + for (String t : uris) { + if (old_new.get(t) != null) { + fixed.add(old_new.get(t)); + } else { + fixed.add(t); + } + } + return fixed; + } + + public Map mapDeprecated(Set uris) { + Map old_new = new HashMap(); + BigdataSailRepositoryConnection connection; + try { + connection = go_lego_repo.getReadOnlyConnection(); + try { + String q = "VALUES ?c {"; + for (String uri : uris) { + if (uri.startsWith("http")) { + q += "<" + uri + "> \n"; + } + } + q += "} . "; + + String query = + "SELECT ?c ?replacement " + + "WHERE { " + q + + "?c ?replacement . " + + "} "; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value c = binding.getValue("c"); + Value replacement = binding.getValue("replacement"); + old_new.put(c.stringValue(), replacement.stringValue()); + } + } catch (MalformedQueryException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (QueryEvaluationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } finally { + connection.close(); + } + } catch (RepositoryException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + return old_new; + } + + public Map> getSuperCategoryMap(Set uris) throws IOException { + Map> sub_supers = new HashMap>(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String q = "VALUES ?sub {"; + for (String uri : uris) { + if (uri.startsWith("http")) { + q += "<" + uri + "> "; + } + } + q += "} . "; + + String categories = "VALUES ?super {"; + for (String c : root_types) { + categories += "<" + c + "> "; + } + categories += "} . "; + String query = "PREFIX owl: " + + "PREFIX rdf: " + + "PREFIX rdfs: " + + "SELECT ?sub ?super " + + "WHERE { " + q + categories + + "?sub rdfs:subClassOf* ?super . " + + "} "; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value parent = binding.getValue("super"); + Value child = binding.getValue("sub"); + //System.out.println(child +" "+parent); + //ignore anonymous super classes + if (parent instanceof URI && child instanceof URI) { + String superclass = binding.getValue("super").stringValue(); + String subclass = binding.getValue("sub").stringValue(); + Set supers = sub_supers.get(subclass); + if (supers == null) { + supers = new HashSet(); + } + supers.add(superclass); + sub_supers.put(subclass, supers); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return sub_supers; + } + + /** + * This reproduces the results of the golr lookup service for gene product typing + * @param uris + * @return + * @throws IOException + */ + public Map> getNeoRoots(Set uris) throws IOException { + Map> all = getSuperClassMap(uris); + Map> roots = new HashMap>(); + //only do what the golr was doing and working + for (String term : all.keySet()) { + Set isa_closure = all.get(term); + String direct_parent_iri = null; + if (isa_closure.contains("http://purl.obolibrary.org/obo/CHEBI_36080")) { + //protein + direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_36080"; + } else if (isa_closure.contains("http://purl.obolibrary.org/obo/CHEBI_33695")) { + //information biomacrolecule (gene, complex) + direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_33695"; + } + if (direct_parent_iri != null) { + Set r = new HashSet(); + r.add(direct_parent_iri); + roots.put(term, r); + } + } + return roots; + } + + + public Map> getSuperClassMap(Set uris) throws IOException { + Map> sub_supers = new HashMap>(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String q = "VALUES ?sub {"; + for (String uri : uris) { + q += "<" + uri + "> "; + } + q += "} . "; + String query = "PREFIX owl: " + + "PREFIX rdf: " + + "PREFIX rdfs: " + + "SELECT ?sub ?super " + + "WHERE { " + q + + "?sub rdfs:subClassOf* ?super . " + + "} "; + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value parent = binding.getValue("super"); + Value child = binding.getValue("sub"); + //System.out.println(child +" "+parent); + //ignore anonymous super classes + if (parent instanceof URI && child instanceof URI) { + String superclass = binding.getValue("super").stringValue(); + String subclass = binding.getValue("sub").stringValue(); + Set supers = sub_supers.get(subclass); + if (supers == null) { + supers = new HashSet(); + } + supers.add(superclass); + sub_supers.put(subclass, supers); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return sub_supers; + } + + + public Set getGenesByTaxid(String ncbi_tax_id) throws IOException { + Set genes = new HashSet(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String query = + "select ?gene \n" + + "where { \n" + + " ?gene rdfs:subClassOf ?taxon_restriction .\n" + + " ?taxon_restriction owl:onProperty .\n" + + " ?taxon_restriction owl:someValuesFrom \n" + + "}"; + + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("gene"); + //ignore anonymous sub classes + if (v instanceof URI) { + String gene = binding.getValue("gene").stringValue(); + genes.add(gene); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return genes; + } + + public Set getAllTaxaWithGenes() throws IOException { + Set taxa = new HashSet(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String query = + "select distinct ?taxon \n" + + "where { \n" + + " ?gene rdfs:subClassOf ?taxon_restriction .\n" + + " ?taxon_restriction owl:onProperty .\n" + + " ?taxon_restriction owl:someValuesFrom ?taxon \n" + + "\n" + + "}"; + + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("taxon"); + //ignore anonymous sub classes + if (v instanceof URI) { + String taxon = binding.getValue("taxon").stringValue(); + taxa.add(taxon); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return taxa; + } + + + public void dispose() { + try { + go_lego_repo.shutDown(); + } catch (RepositoryException e) { + LOG.error("Failed to shutdown Lego Blazegraph sail.", e); + } + } + + public Set getTaxaByGenes(Set genes) throws IOException { + String expansion = "VALUES ?gene { "; + for (String gene : genes) { + expansion += "<" + gene + "> \n"; + } + expansion += " } . \n"; + Set taxa = new HashSet(); + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + String query = + "select distinct ?taxon \n" + + "where { \n" + expansion + + " ?gene rdfs:subClassOf ?taxon_restriction .\n" + + " ?taxon_restriction owl:onProperty .\n" + + " ?taxon_restriction owl:someValuesFrom ?taxon \n" + + "\n" + + "}"; + + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("taxon"); + //ignore anonymous sub classes + if (v instanceof URI) { + String taxon = binding.getValue("taxon").stringValue(); + taxa.add(taxon); + } + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return taxa; + } + + + public String getLabel(OWLNamedObject i) throws IOException { + String entity = i.getIRI().toString(); + return getLabel(entity); + } + + public String getLabel(String entity) throws IOException { + String label = null; + + String query = "select ?label where { <" + entity + "> rdfs:label ?label } limit 1"; + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + if (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("label"); + label = v.stringValue(); + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return label; + } + + + public boolean exists(String entity) throws IOException { + boolean exists = false; + String query = "select * " + + "WHERE {" + + "{<" + entity + "> ?p ?o . } " + + "UNION " + + "{?s ?p <" + entity + "> . }" + + "} limit 1"; + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + if (result.hasNext()) { + exists = true; + return exists; + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return exists; + } + + public Map getLabels(Set entities) throws IOException { + Map uri_label = new HashMap(); + + String values = "VALUES ?entity {"; + for (String uri : entities) { + values += "<" + uri + "> "; + } + values += "} . "; + + String query = "select ?entity ?label where { " + values + " ?entity rdfs:label ?label }"; + try { + BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection(); + try { + TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + BindingSet binding = result.next(); + Value v = binding.getValue("label"); + String label = v.stringValue(); + Value ev = binding.getValue("entity"); + String entity = ev.stringValue(); + uri_label.put(entity, label); + } + } catch (MalformedQueryException e) { + throw new IOException(e); + } catch (QueryEvaluationException e) { + throw new IOException(e); + } finally { + connection.close(); + } + } catch (RepositoryException e) { + throw new IOException(e); + } + return uri_label; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java b/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java index 01e6e94f..f99eafe1 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/CoreMolecularModelManager.java @@ -1,19 +1,6 @@ package org.geneontology.minerva; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; - +import com.google.common.base.Optional; import org.apache.commons.lang3.tuple.Pair; import org.apache.jena.rdf.model.Statement; import org.apache.jena.reasoner.rulesys.Rule; @@ -31,49 +18,7 @@ import org.semanticweb.owlapi.io.OWLOntologyDocumentSource; import org.semanticweb.owlapi.io.OWLParserFactory; import org.semanticweb.owlapi.io.StringDocumentSource; -import org.semanticweb.owlapi.model.AddAxiom; -import org.semanticweb.owlapi.model.AddImport; -import org.semanticweb.owlapi.model.AddOntologyAnnotation; -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLAnnotationSubject; -import org.semanticweb.owlapi.model.OWLAnnotationSubjectVisitor; -import org.semanticweb.owlapi.model.OWLAnnotationValueVisitor; -import org.semanticweb.owlapi.model.OWLAnonymousIndividual; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLDeclarationAxiom; -import org.semanticweb.owlapi.model.OWLDocumentFormat; -import org.semanticweb.owlapi.model.OWLImportsDeclaration; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyAlreadyExistsException; -import org.semanticweb.owlapi.model.OWLOntologyChange; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyID; -import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; -import org.semanticweb.owlapi.model.OWLOntologyLoaderConfiguration; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; -import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; -import org.semanticweb.owlapi.model.RemoveAxiom; -import org.semanticweb.owlapi.model.RemoveImport; -import org.semanticweb.owlapi.model.RemoveOntologyAnnotation; -import org.semanticweb.owlapi.model.SetOntologyID; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.model.parameters.Imports; import org.semanticweb.owlapi.model.parameters.OntologyCopy; import org.semanticweb.owlapi.oboformat.OBOFormatOWLAPIParserFactory; @@ -85,1446 +30,1444 @@ import org.semanticweb.owlapi.search.EntitySearcher; import org.semanticweb.owlapi.util.PriorityCollection; import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; - -import com.google.common.base.Optional; - import owltools.vocab.OBOUpperVocabulary; import scala.collection.JavaConverters; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.*; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; + /** * Manager and core operations for in memory MolecularModels (aka lego diagrams). - * + *

* Any number of models can be loaded at any time
* TODO - impose some limit to avoid using too much memory - * - * Each model is an OWLOntology, see {@link ModelContainer}. - * + *

+ * Each model is an OWLOntology, see {@link ModelContainer}. + * * @param object for holding meta data associated with each operation */ public abstract class CoreMolecularModelManager { - private static Logger LOG = Logger.getLogger(CoreMolecularModelManager.class); - - // axiom has evidence RO:0002612 - private static final IRI HAS_EVIDENCE_IRI = IRI.create("http://purl.obolibrary.org/obo/RO_0002612"); - // legacy - private static final IRI HAS_EVIDENCE_IRI_OLD = AnnotationShorthand.evidence.getAnnotationProperty(); - - private static final OWLAnnotationProperty HAS_SHORTHAND = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#shorthand")); - private static final OWLAnnotationProperty IN_SUBSET = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#inSubset")); - private static final Set DO_NOT_ANNOTATE_SUBSETS = new HashSet<>(); - static { - DO_NOT_ANNOTATE_SUBSETS.add(IRI.create("http://purl.obolibrary.org/obo/go#gocheck_do_not_annotate")); - DO_NOT_ANNOTATE_SUBSETS.add(IRI.create("http://purl.obolibrary.org/obo/go#gocheck_do_not_manually_annotate")); - } - - final OWLOntology tbox; - // final OWLReasonerFactory rf; - // final OWLReasoner tbox_reasoner; - //replacing tbox_reasoner structural reasoner functionality with blazegraph queries over pre-inferred relations.. - private BlazegraphOntologyManager go_lego_repo; - private final IRI tboxIRI; - - final Map modelMap = new HashMap(); - Set additionalImports; - - private final RuleEngine ruleEngine; - private final Map legacyRelationIndex = new HashMap(); - private final Map tboxLabelIndex = new HashMap(); - private final Map tboxShorthandIndex = new HashMap(); - private final Set doNotAnnotateSubset = new HashSet<>(); - - - /** - * Use start up time to create a unique prefix for id generation - */ - static String uniqueTop = Long.toHexString(Math.abs((System.currentTimeMillis()/1000))); - static final AtomicLong instanceCounter = new AtomicLong(0L); - - /** - * Generate a new id from the unique server prefix and a global counter - * - * @return id - */ - private static String localUnique(){ - final long counterValue = instanceCounter.getAndIncrement(); - String unique = uniqueTop + String.format("%08d", counterValue); - return unique; - } - - /** - * Check that the given string looks similar to a local unique id - * - * @param s - * @return true if the string looks like a generated id - */ - static boolean isLocalUnique(String s) { - boolean result = false; - if (s != null && s.length() > 8) { - result = true; - for (int i = 0; i < s.length(); i++) { - char c = s.charAt(i); - if (isHex(c) == false) { - result = false; - break; - } - } - } - return result; - } - - private static boolean isHex(char c) { - // check that char is a digit or a-e - boolean result = false; - if (Character.isDigit(c)) { - result = true; - } - else if (c == 'a' || c == 'b' || c == 'c' || c == 'd' || c == 'e' || c == 'f') { - result = true; - } - return result; - } - - /** - * Generate an id and prepend the given prefixes. - * - * This method must should be used for model identifiers and individual identifiers. - * - * @param prefixes - * @return id - */ - static IRI generateId(CharSequence...prefixes) { - StringBuilder sb = new StringBuilder(); - for (CharSequence prefix : prefixes) { - sb.append(prefix); - } - sb.append(localUnique()); - return IRI.create(sb.toString()); - } - - /** - * @param tbox - * @throws OWLOntologyCreationException - * @throws IOException - */ - public CoreMolecularModelManager(OWLOntology tbox, String go_lego_repo_file, boolean downloadOntologyJournal) throws OWLOntologyCreationException, IOException { - super(); - this.tbox = tbox; - tboxIRI = getTboxIRI(tbox); - this.ruleEngine = initializeRuleEngine(); - initializeLegacyRelationIndex(); - initializeTboxLabelIndex(); - initializeTboxShorthandIndex(); - initializeDoNotAnnotateSubset(); - if(go_lego_repo_file!=null) { - this.go_lego_repo = new BlazegraphOntologyManager(go_lego_repo_file, downloadOntologyJournal); - } - init(); - } - - - private OWLReasoner initializeTboxReasoner(OWLOntology tbox) { - OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); - OWLReasoner r = reasonerFactory.createReasoner(tbox); - return r; - } - - private static synchronized Set removeOBOParserFactories(OWLOntologyManager m) { - // hacky workaround: remove the too liberal OBO parser - PriorityCollection factories = m.getOntologyParsers(); - Set copied = new HashSet<>(); - for (OWLParserFactory factory : factories) { - copied.add(factory); - } - for (OWLParserFactory factory : copied) { - Class cls = factory.getClass(); - boolean remove = false; - if (OBOFormatOWLAPIParserFactory.class.equals(cls)) { - remove = true; - } - if (remove) { - factories.remove(factory); - } - } - return copied; - } - - private static synchronized void resetOBOParserFactories(OWLOntologyManager m, Set factories) { - m.setOntologyParsers(factories); - } - - /** - * Executed before the init call {@link #init()}. - * - * @param tbox - * @return IRI, never null - * @throws OWLOntologyCreationException - */ - protected IRI getTboxIRI(OWLOntology tbox) throws OWLOntologyCreationException { - OWLOntologyID ontologyID = tbox.getOntologyID(); - if (ontologyID != null) { - Optional ontologyIRI = ontologyID.getOntologyIRI(); - if (ontologyIRI.isPresent()) { - return ontologyIRI.get(); - } - } - throw new OWLOntologyCreationException("No ontology id available for tbox. An ontology IRI is required for the import into the abox."); - } - - /** - * @throws OWLOntologyCreationException - */ - protected void init() throws OWLOntologyCreationException { - // set default imports - additionalImports = new HashSet(); - } - - /** - * @return core/source ontology - */ - public OWLOntology getOntology() { - return tbox; - } - - public Map getLegacyRelationShorthandIndex() { - return Collections.unmodifiableMap(this.legacyRelationIndex); - } - - public Map getTboxLabelIndex() { - return Collections.unmodifiableMap(this.tboxLabelIndex); - } - - public Map getTboxShorthandIndex() { - return Collections.unmodifiableMap(this.tboxShorthandIndex); - } - - public Set getDoNotAnnotateSubset() { - return Collections.unmodifiableSet(this.doNotAnnotateSubset); - } - - public RuleEngine getRuleEngine() { - return ruleEngine; - } - - private RuleEngine initializeRuleEngine() { - Set rules = new HashSet<>(); - rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.translate(getOntology(), Imports.INCLUDED, true, true, true, true)).asJava()); - rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.indirectRules(getOntology())).asJava()); - return new RuleEngine(Bridge.rulesFromJena(JavaConverters.asScalaSetConverter(rules).asScala()), true); - } - - public WorkingMemory createInferredModel(OWLOntology abox, IRI modelId) { - Set statements = JavaConverters.setAsJavaSetConverter(SesameJena.ontologyAsTriples(abox)).asJava(); - Set triples = statements.stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); - try { - // Using model's ontology IRI so that a spurious different ontology declaration triple isn't added - // OWLOntology schemaOntology = OWLManager.createOWLOntologyManager().createOntology(getOntology().getRBoxAxioms(Imports.INCLUDED), modelId); - // I think the re-use of the model IRI as the IRI of the rule ontology has some weird effects on the model in question, rendering its contents inaccesible. - OWLOntologyManager tmp_man = OWLManager.createOWLOntologyManager(); - OWLOntology schemaOntology = tmp_man.createOntology(IRI.create("http://therules.org")); - Set owl_rules = getOntology().getRBoxAxioms(Imports.INCLUDED); - tmp_man.addAxioms(schemaOntology, owl_rules); - // - Set schemaStatements = JavaConverters.setAsJavaSetConverter(SesameJena.ontologyAsTriples(schemaOntology)).asJava(); - triples.addAll(schemaStatements.stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet())); - } catch (OWLOntologyCreationException e) { - LOG.error("Couldn't add rbox statements to data model.", e); - } - return getRuleEngine().processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); - - } - - /** - * Return Arachne working memory representing LEGO model combined with inference rules. - * This model will not remain synchronized with changes to data. - * @param modelId - * @return Jena model - */ - public WorkingMemory createInferredModel(IRI modelId) { - return createInferredModel(getModelAbox(modelId), modelId); - } - - public WorkingMemory createCanonicalInferredModel(IRI modelId) { - //swap out any non-canonical types - OWLOntology source_abox = getModelAbox(modelId); - OWLOntologyManager aman = OWLManager.createOWLOntologyManager(); - OWLDataFactory df = aman.getOWLDataFactory(); - OWLAnnotationProperty canonical_record = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/canonical_record")); - OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); - try { - OWLOntology abox = aman.copyOntology(source_abox, OntologyCopy.DEEP); - OWLReasoner abox_reasoner = reasonerFactory.createReasoner(abox); - //convert to canonical wherever possible - abox.getIndividualsInSignature().forEach(i->{ - Set types = abox_reasoner.getTypes(i, true).getFlattened(); - for(OWLClass type : types) { - Collection canons = EntitySearcher.getAnnotationObjects(type, tbox, canonical_record); - //adding multiple types to an instance of a set object is - //probably not kosher.. but seems to work for now. - //more correct to create new instances for each - if(canons!=null&&canons.size()>0) { - for(OWLAnnotation canon : canons) { - if(canon.getValue().asIRI().isPresent()) { - OWLClass canonical = df.getOWLClass(canon.getValue().asIRI().get()); - //direct swap - //remove the old one - OWLClassAssertionAxiom original = df.getOWLClassAssertionAxiom(type, i); - aman.removeAxiom(abox, original); - //add the new one - OWLClassAssertionAxiom canonical_type = df.getOWLClassAssertionAxiom(canonical, i); - aman.addAxiom(abox, canonical_type); - } - } - } - } - }); - WorkingMemory inferred = createInferredModel(abox, modelId); - abox_reasoner.dispose(); - aman.removeOntology(abox); - return inferred; - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - return createInferredModel(source_abox, modelId); - } - } - - private void initializeLegacyRelationIndex() { - synchronized(legacyRelationIndex) { - OWLAnnotationProperty rdfsLabel = OWLManager.getOWLDataFactory().getRDFSLabel(); - for (OWLOntology ont : this.getOntology().getImportsClosure()) { - for (OWLObjectProperty prop : ont.getObjectPropertiesInSignature()) { - for (OWLAnnotationAssertionAxiom axiom : ont.getAnnotationAssertionAxioms(prop.getIRI())) { - if (axiom.getProperty().equals(rdfsLabel)) { - Optional literalOpt = axiom.getValue().asLiteral(); - if (literalOpt.isPresent()) { - String label = literalOpt.get().getLiteral(); - legacyRelationIndex.put(prop.getIRI(), label.replaceAll(" ", "_").replaceAll(",", "")); - } - } - } - } - } - } - } - - private void initializeTboxLabelIndex() { - synchronized(tboxLabelIndex) { - OWLAnnotationProperty rdfsLabel = OWLManager.getOWLDataFactory().getRDFSLabel(); - for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { - if (axiom.getProperty().equals(rdfsLabel) && (axiom.getSubject() instanceof IRI) && axiom.getValue() instanceof OWLLiteral) { - IRI subject = (IRI)(axiom.getSubject()); - String label = axiom.getValue().asLiteral().get().getLiteral(); - tboxLabelIndex.put(subject, label); - } - } - } - } - - private void initializeTboxShorthandIndex() { - synchronized(tboxShorthandIndex) { - for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { - if (axiom.getProperty().equals(HAS_SHORTHAND) && (axiom.getSubject() instanceof IRI) && axiom.getValue() instanceof OWLLiteral) { - IRI subject = (IRI)(axiom.getSubject()); - String shorthand = axiom.getValue().asLiteral().get().getLiteral(); - tboxShorthandIndex.put(subject, shorthand); - } - } - } - } - - private void initializeDoNotAnnotateSubset() { - synchronized(doNotAnnotateSubset) { - for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { - if (axiom.getProperty().equals(IN_SUBSET) && (axiom.getSubject() instanceof IRI) && DO_NOT_ANNOTATE_SUBSETS.contains(axiom.getValue())) { - doNotAnnotateSubset.add((IRI)(axiom.getSubject())); - } - } - } - } - - /** - * Add additional import declarations for any newly generated model. - * - * @param imports - */ - public void addImports(Iterable imports) { - if (imports != null) { - for (String importIRIString : imports) { - additionalImports.add(IRI.create(importIRIString)); - } - } - } - - public Collection getImports() { - Set allImports = new HashSet(); - allImports.add(tboxIRI); - allImports.addAll(additionalImports); - return allImports; - } - - /** - * - * @param modelId - * @return all individuals in the model - */ - public Set getIndividuals(IRI modelId) { - ModelContainer mod = getModel(modelId); - return mod.getAboxOntology().getIndividualsInSignature(); - } - - - // /** - // * @param mod - // * @param q - // * @return all individuals in the model that satisfy q - // */ - // public Set getIndividualsByQuery(ModelContainer mod, OWLClassExpression q) { - // return mod.getReasoner().getInstances(q, false).getFlattened(); - // } - - /** - * @param model - * @param ce - * @param metadata - * @return individual - */ - public OWLNamedIndividual createIndividual(ModelContainer model, OWLClassExpression ce, METADATA metadata) { - OWLNamedIndividual individual = createIndividual(model, ce, null, metadata); - return individual; - } - - OWLNamedIndividual createIndividual(ModelContainer model, OWLClassExpression ce, Set annotations, METADATA metadata) { - Pair> pair = createIndividual(model.getModelId(), model.getAboxOntology(), ce, annotations); - addAxioms(model, pair.getRight(), metadata); - return pair.getLeft(); - } - - OWLNamedIndividual createIndividualWithIRI(ModelContainer model, IRI individualIRI, Set annotations, METADATA metadata) { - Pair> pair = createIndividualInternal(individualIRI, model.getAboxOntology(), null, annotations); - addAxioms(model, pair.getRight(), metadata); - return pair.getLeft(); - } - - public static Pair> createIndividual(IRI modelId, OWLOntology abox, OWLClassExpression ce, Set annotations) { - IRI iri = generateId(modelId, "/"); - return createIndividualInternal(iri, abox, ce, annotations); - } - - private static Pair> createIndividualInternal(IRI iri, OWLOntology abox, OWLClassExpression ce, Set annotations) { - LOG.info("Generating individual for IRI: "+iri); - OWLDataFactory f = abox.getOWLOntologyManager().getOWLDataFactory(); - OWLNamedIndividual i = f.getOWLNamedIndividual(iri); - - // create axioms - Set axioms = new HashSet(); - // declaration - axioms.add(f.getOWLDeclarationAxiom(i)); - // annotation assertions - if(annotations != null) { - for(OWLAnnotation annotation : annotations) { - axioms.add(f.getOWLAnnotationAssertionAxiom(iri, annotation)); - } - } - - if (ce != null) { - OWLClassAssertionAxiom typeAxiom = createType(f, i, ce); - if (typeAxiom != null) { - axioms.add(typeAxiom); - } - } - - return Pair.of(i, axioms); - } - - public static class DeleteInformation { - public final Set usedIRIs = new HashSet(); - public final Set updated = new HashSet(); - public final Set touched = new HashSet(); - } - - /** - * Deletes an individual and return all IRIs used as an annotation value. - * Also tries to delete all annotations (OWLObjectPropertyAssertionAxiom - * annotations and OWLAnnotationAssertionAxiom) with the individual IRI as - * value. - * - * @param model - * @param i - * @param metadata - * @return set of IRIs used in annotations - */ - public DeleteInformation deleteIndividual(ModelContainer model, OWLNamedIndividual i, METADATA metadata) { - Set toRemoveAxioms = new HashSet(); - final DeleteInformation deleteInformation = new DeleteInformation(); - - final OWLOntology ont = model.getAboxOntology(); - final OWLDataFactory f = model.getOWLDataFactory(); - - // Declaration axiom - toRemoveAxioms.add(model.getOWLDataFactory().getOWLDeclarationAxiom(i)); - - // Logic axiom - for (OWLAxiom ax : ont.getAxioms(i, Imports.EXCLUDED)) { - extractEvidenceIRIValues(ax.getAnnotations(), deleteInformation.usedIRIs); - toRemoveAxioms.add(ax); - } - - // OWLObjectPropertyAssertionAxiom - Set allAssertions = ont.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION); - final IRI iIRI = i.getIRI(); - for (OWLObjectPropertyAssertionAxiom ax : allAssertions) { - if (toRemoveAxioms.contains(ax) == false) { - Set currentIndividuals = ax.getIndividualsInSignature(); - if (currentIndividuals.contains(i)) { - extractEvidenceIRIValues(ax.getAnnotations(), deleteInformation.usedIRIs); - toRemoveAxioms.add(ax); - continue; - } - // check annotations for deleted individual IRI - Set annotations = ax.getAnnotations(); - Set removeAnnotations = new HashSet(); - for (OWLAnnotation annotation : annotations) { - if (iIRI.equals(annotation.getValue())) { - removeAnnotations.add(annotation); - } - } - // if there is an annotations that needs to be removed, - // recreate axiom with cleaned annotation set - if (removeAnnotations.isEmpty() == false) { - annotations.removeAll(removeAnnotations); - toRemoveAxioms.add(ax); - deleteInformation.updated.add(f. - getOWLObjectPropertyAssertionAxiom( - ax.getProperty(), ax.getSubject(), ax.getObject(), annotations)); - } - } - } - // OWLAnnotationAssertionAxiom - Set annotationAssertionAxioms = ont.getAnnotationAssertionAxioms(i.getIRI()); - for (OWLAnnotationAssertionAxiom axiom : annotationAssertionAxioms) { - extractEvidenceIRIValues(axiom.getAnnotation(), deleteInformation.usedIRIs); - toRemoveAxioms.add(axiom); - } - - // search for all annotations which use individual IRI as value - Set axioms = ont.getAxioms(AxiomType.ANNOTATION_ASSERTION); - for (OWLAnnotationAssertionAxiom ax : axioms) { - if (toRemoveAxioms.contains(ax) == false) { - if (iIRI.equals(ax.getValue())) { - toRemoveAxioms.add(ax); - OWLAnnotationSubject subject = ax.getSubject(); - subject.accept(new OWLAnnotationSubjectVisitor() { - - @Override - public void visit(OWLAnonymousIndividual individual) { - // do nothing - } - - @Override - public void visit(IRI iri) { - // check if they subject is a declared named individual - if (ont.containsIndividualInSignature(iri)) { - deleteInformation.touched.add(iri); - } - } - }); - } - } - } - - removeAxioms(model, toRemoveAxioms, metadata); - if (deleteInformation.updated.isEmpty() == false) { - addAxioms(model, deleteInformation.updated, metadata); - } - - return deleteInformation; - } - - public static Set extractEvidenceIRIValues(Set annotations) { - if (annotations == null || annotations.isEmpty()) { - return Collections.emptySet(); - } - Set iriSet = new HashSet(); - extractEvidenceIRIValues(annotations, iriSet); - return iriSet; - } - - private static void extractEvidenceIRIValues(Set annotations, final Set iriSet) { - if (annotations != null) { - for (OWLAnnotation annotation : annotations) { - extractEvidenceIRIValues(annotation, iriSet); - } - } - } - - private static void extractEvidenceIRIValues(OWLAnnotation annotation, final Set iriSet) { - if (annotation != null) { - OWLAnnotationProperty property = annotation.getProperty(); - if (HAS_EVIDENCE_IRI.equals(property.getIRI()) || HAS_EVIDENCE_IRI_OLD.equals(property.getIRI())){ - annotation.getValue().accept(new OWLAnnotationValueVisitor() { - - @Override - public void visit(OWLLiteral literal) { - // ignore - } - - @Override - public void visit(OWLAnonymousIndividual individual) { - // ignore - } - - @Override - public void visit(IRI iri) { - iriSet.add(iri); - } - }); - } - } - } - - public void addAnnotations(ModelContainer model, OWLNamedIndividual i, Collection annotations, METADATA metadata) { - addAnnotations(model, i.getIRI(), annotations, metadata); - } - - public void addAnnotations(ModelContainer model, IRI subject, Collection annotations, METADATA metadata) { - Set axioms = new HashSet(); - OWLDataFactory f = model.getOWLDataFactory(); - for (OWLAnnotation annotation : annotations) { - axioms.add(f.getOWLAnnotationAssertionAxiom(subject, annotation)); - } - addAxioms(model, axioms, metadata); - } - - public void updateAnnotation(ModelContainer model, IRI subject, OWLAnnotation update, METADATA metadata) { - Set removeAxioms = new HashSet(); - OWLDataFactory f = model.getOWLDataFactory(); - Set existing = model.getAboxOntology().getAnnotationAssertionAxioms(subject); - OWLAnnotationProperty target = update.getProperty(); - for (OWLAnnotationAssertionAxiom axiom : existing) { - if (target.equals(axiom.getProperty())) { - removeAxioms.add(axiom); - } - } - removeAxioms(model, removeAxioms, metadata); - addAxiom(model, f.getOWLAnnotationAssertionAxiom(subject, update), metadata); - } - - public void addModelAnnotations(ModelContainer model, Collection annotations, METADATA metadata) { - OWLOntology aBox = model.getAboxOntology(); - List changes = new ArrayList(); - for (OWLAnnotation annotation : annotations) { - changes.add(new AddOntologyAnnotation(aBox, annotation)); - } - applyChanges(model, changes, metadata); - } - - public void updateAnnotation(ModelContainer model, OWLAnnotation update, METADATA metadata) { - OWLOntology aBox = model.getAboxOntology(); - List changes = new ArrayList(); - Set existing = model.getAboxOntology().getAnnotations(); - OWLAnnotationProperty target = update.getProperty(); - for (OWLAnnotation annotation : existing) { - if (target.equals(annotation.getProperty())) { - changes.add(new RemoveOntologyAnnotation(aBox, annotation)); - } - } - changes.add(new AddOntologyAnnotation(aBox, update)); - applyChanges(model, changes, metadata); - } - - public void removeAnnotations(ModelContainer model, OWLNamedIndividual i, Collection annotations, METADATA metadata) { - removeAnnotations(model, i.getIRI(), annotations, metadata); - } - - void removeAnnotations(ModelContainer model, IRI subject, Collection annotations, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - Set toRemove = new HashSet(); - Set candidates = ont.getAnnotationAssertionAxioms(subject); - for (OWLAnnotationAssertionAxiom axiom : candidates) { - OWLAnnotation annotation = axiom.getAnnotation(); - if (annotations.contains(annotation)) { - toRemove.add(axiom); - } - } - removeAxioms(model, toRemove, metadata); - } - - public void removeAnnotations(ModelContainer model, Collection annotations, METADATA metadata) { - OWLOntology aBox = model.getAboxOntology(); - List changes = new ArrayList(); - for (OWLAnnotation annotation : annotations) { - changes.add(new RemoveOntologyAnnotation(aBox, annotation)); - } - applyChanges(model, changes, metadata); - } - - public void addDataProperty(ModelContainer model, - OWLNamedIndividual i, OWLDataProperty prop, OWLLiteral literal, - METADATA metadata) { - OWLAxiom axiom = model.getOWLDataFactory().getOWLDataPropertyAssertionAxiom(prop, i, literal); - addAxiom(model, axiom, metadata); - } - - public void removeDataProperty(ModelContainer model, - OWLNamedIndividual i, OWLDataProperty prop, OWLLiteral literal, - METADATA metadata) { - OWLAxiom toRemove = null; - Set existing = model.getAboxOntology().getDataPropertyAssertionAxioms(i); - for (OWLDataPropertyAssertionAxiom ax : existing) { - if (prop.equals(ax.getProperty()) && literal.equals(ax.getObject())) { - toRemove = ax; - break; - } - } - - if (toRemove != null) { - removeAxiom(model, toRemove, metadata); - } - } - - /** - * Fetches a model by its Id - * - * @param id - * @return wrapped model - */ - public ModelContainer getModel(IRI id) { - synchronized (modelMap) { - // synchronized to avoid race condition for simultaneous loads of the same model - if (!modelMap.containsKey(id)) { - try { - loadModel(id, false); - } catch (OWLOntologyCreationException e) { - LOG.info("Could not load model with id: "+id, e); - } - } - return modelMap.get(id); - } - } - - /** - * Retrieve the abox ontology. May skip loading the imports. - * This method is mostly intended to read metadata from a model. - * - * @param id - * @return abox, maybe without any imports loaded - */ - public OWLOntology getModelAbox(IRI id) { - ModelContainer model = modelMap.get(id); - if (model != null) { - return model.getAboxOntology(); - } - OWLOntology abox = null; - try { - abox = loadModelABox(id); - } catch (OWLOntologyCreationException e) { - LOG.info("Could not load model with id: "+id, e); - } - return abox; - } - - public boolean isModelModified(IRI modelId) { - ModelContainer model = modelMap.get(modelId); - if (model != null) { - // ask model about modification - return model.isModified(); - } - // non in-memory models are considered not modified. - return false; - } - - /** - * @param modelId - * @return ontology - * @throws OWLOntologyCreationException - */ - protected abstract OWLOntology loadModelABox(IRI modelId) throws OWLOntologyCreationException; - - /** - * @param modelId - * @param manager - * @return ontology - * @throws OWLOntologyCreationException - */ - protected abstract OWLOntology loadModelABox(IRI modelId, OWLOntologyManager manager) throws OWLOntologyCreationException; - - /** - * @param id - */ - public void unlinkModel(IRI id) { - ModelContainer model = modelMap.get(id); - model.dispose(); - modelMap.remove(id); - } - - /** - * @return ids for all loaded models - */ - public Set getModelIds() { - return modelMap.keySet(); - } - - /** - * internal method to cleanup this instance - */ - public void dispose() { - Set ids = new HashSet(getModelIds()); - for (IRI id : ids) { - unlinkModel(id); - } - } - - /** - * Export the ABox, will try to set the ontologyID to the given modelId (to - * ensure import assumptions are met) - * - * @param model - * @param ontologyFormat - * @return modelContent - * @throws OWLOntologyStorageException - */ - public String exportModel(ModelContainer model, OWLDocumentFormat ontologyFormat) throws OWLOntologyStorageException { - final OWLOntology aBox = model.getAboxOntology(); - final OWLOntologyManager manager = aBox.getOWLOntologyManager(); - - // make sure the exported ontology has an ontologyId and that it maps to the modelId - final IRI expectedABoxIRI = model.getModelId(); - Optional currentABoxIRI = aBox.getOntologyID().getOntologyIRI(); - if (currentABoxIRI.isPresent() == false) { - manager.applyChange(new SetOntologyID(aBox, expectedABoxIRI)); - } - else { - if (expectedABoxIRI.equals(currentABoxIRI) == false) { - OWLOntologyID ontologyID = new OWLOntologyID(Optional.of(expectedABoxIRI), Optional.of(expectedABoxIRI)); - manager.applyChange(new SetOntologyID(aBox, ontologyID)); - } - } - - // write the model into a buffer - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - if (ontologyFormat != null) { - manager.saveOntology(aBox, ontologyFormat, outputStream); - } - else { - manager.saveOntology(aBox, outputStream); - } - - // extract the string from the buffer - String modelString = outputStream.toString(); - return modelString; - } - - /** - * Try to load (or replace) a model with the given ontology. It is expected - * that the content is an A-Box ontology, which imports the T-BOX. Also the - * ontology ID is used to extract the modelId.
- *
- * This method will currently NOT work due to a bug in the OWL-API. - * The functional syntax parser does not properly report the exceptions and - * will return an ontology with an wrong ontology ID! - * - * @param modelData - * @return modelId - * @throws OWLOntologyCreationException - */ - public ModelContainer importModel(String modelData) throws OWLOntologyCreationException { - // load data from String - final OWLOntologyManager manager = tbox.getOWLOntologyManager(); - final OWLOntologyDocumentSource documentSource = new StringDocumentSource(modelData); - OWLOntology modelOntology; - final Set originalFactories = removeOBOParserFactories(manager); - try { - modelOntology = manager.loadOntologyFromOntologyDocument(documentSource); - } - catch (OWLOntologyAlreadyExistsException e) { - // exception is thrown if there is an ontology with the same ID already in memory - OWLOntologyID id = e.getOntologyID(); - IRI existingModelId = id.getOntologyIRI().orNull(); - - // remove the existing memory model - unlinkModel(existingModelId); - - // try loading the import version (again) - modelOntology = manager.loadOntologyFromOntologyDocument(documentSource); - } - finally { - resetOBOParserFactories(manager, originalFactories); - } - - // try to extract modelId - IRI modelId = null; - Optional ontologyIRI = modelOntology.getOntologyID().getOntologyIRI(); - if (ontologyIRI.isPresent()) { - modelId = ontologyIRI.get(); - } - if (modelId == null) { - throw new OWLOntologyCreationException("Could not extract the modelId from the given model"); - } - // paranoia check - ModelContainer existingModel = modelMap.get(modelId); - if (existingModel != null) { - unlinkModel(modelId); - } - - // add to internal model - ModelContainer newModel = addModel(modelId, modelOntology); - - return newModel; - } - - protected abstract void loadModel(IRI modelId, boolean isOverride) throws OWLOntologyCreationException; - - ModelContainer addModel(IRI modelId, OWLOntology abox) throws OWLOntologyCreationException { - ModelContainer m = new ModelContainer(modelId, tbox, abox); - modelMap.put(modelId, m); - return m; - } - - /** - * Adds ClassAssertion(c,i) to specified model - * - * @param modelId - * @param i - * @param c - * @param metadata - */ - public void addType(IRI modelId, OWLNamedIndividual i, OWLClass c, METADATA metadata) { - ModelContainer model = getModel(modelId); - addType(model, i, c, metadata); - } - - /** - * Adds ClassAssertion(c,i) to specified model - * - * @param model - * @param i - * @param c - * @param metadata - */ - public void addType(ModelContainer model, OWLIndividual i, - OWLClassExpression c, METADATA metadata) { - OWLClassAssertionAxiom axiom = createType(model.getOWLDataFactory(), i, c); - addAxiom(model, axiom, metadata); - } - - /** - * @param f - * @param i - * @param c - * @return axiom - */ - public static OWLClassAssertionAxiom createType(OWLDataFactory f, OWLIndividual i, OWLClassExpression c) { - OWLClassAssertionAxiom axiom = f.getOWLClassAssertionAxiom(c,i); - return axiom; - } - - /** - * Adds a ClassAssertion, where the class expression instantiated is an - * ObjectSomeValuesFrom expression - * - * Example: Individual: i Type: enabledBy some PRO_123 - * - * @param modelId - * @param i - * @param p - * @param filler - * @param metadata - */ - public void addType(IRI modelId, - OWLNamedIndividual i, - OWLObjectPropertyExpression p, - OWLClassExpression filler, - METADATA metadata) { - ModelContainer model = getModel(modelId); - addType(model, i, p, filler, metadata); - } - - /** - * Adds a ClassAssertion, where the class expression instantiated is an - * ObjectSomeValuesFrom expression - * - * Example: Individual: i Type: enabledBy some PRO_123 - * - * @param model - * @param i - * @param p - * @param filler - * @param metadata - */ - void addType(ModelContainer model, - OWLIndividual i, - OWLObjectPropertyExpression p, - OWLClassExpression filler, - METADATA metadata) { - if (LOG.isDebugEnabled()) { - LOG.debug("Adding "+i+ " type "+p+" some "+filler); - } - OWLDataFactory f = model.getOWLDataFactory(); - OWLObjectSomeValuesFrom c = f.getOWLObjectSomeValuesFrom(p, filler); - OWLClassAssertionAxiom axiom = f.getOWLClassAssertionAxiom(c, i); - addAxiom(model, axiom, metadata); - } - - /** - * remove ClassAssertion(c,i) from the model - * - * @param model - * @param i - * @param ce - * @param metadata - */ - public void removeType(ModelContainer model, OWLIndividual i, - OWLClassExpression ce, METADATA metadata) { - Set allAxioms = model.getAboxOntology().getClassAssertionAxioms(i); - // use search to remove also axioms with annotations - for (OWLClassAssertionAxiom ax : allAxioms) { - if (ce.equals(ax.getClassExpression())) { - removeAxiom(model, ax, metadata); - } - } - - } - - void removeType(ModelContainer model, - OWLIndividual i, - OWLObjectPropertyExpression p, - OWLClassExpression filler, - METADATA metadata) { - OWLDataFactory f = model.getOWLDataFactory(); - OWLClassAssertionAxiom axiom = f.getOWLClassAssertionAxiom(f.getOWLObjectSomeValuesFrom(p, filler), i); - removeAxiom(model, axiom, metadata); - } - - public void addFact(ModelContainer model, OBOUpperVocabulary vocabElement, - OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, METADATA metadata) { - OWLObjectProperty p = vocabElement.getObjectProperty(model.getAboxOntology()); - addFact(model, p, i, j, annotations, metadata); - } - - public void addFact(ModelContainer model, OWLObjectPropertyExpression p, - OWLIndividual i, OWLIndividual j, Set annotations, METADATA metadata) { - OWLObjectPropertyAssertionAxiom axiom = createFact(model.getOWLDataFactory(), p, i, j, annotations); - addAxiom(model, axiom, metadata); - } - - /** - * @param f - * @param p - * @param i - * @param j - * @param annotations - * @return axiom - */ - public static OWLObjectPropertyAssertionAxiom createFact(OWLDataFactory f, - OWLObjectPropertyExpression p, OWLIndividual i, OWLIndividual j, - Set annotations) { - final OWLObjectPropertyAssertionAxiom axiom; - if (annotations != null && !annotations.isEmpty()) { - axiom = f.getOWLObjectPropertyAssertionAxiom(p, i, j, annotations); - } - else { - axiom = f.getOWLObjectPropertyAssertionAxiom(p, i, j); - } - return axiom; - } - - public Set removeFact(ModelContainer model, OWLObjectPropertyExpression p, - OWLIndividual i, OWLIndividual j, METADATA metadata) { - OWLDataFactory f = model.getOWLDataFactory(); - - OWLOntology ont = model.getAboxOntology(); - OWLAxiom toRemove = null; - Set iriSet = new HashSet(); - Set candidates = ont.getObjectPropertyAssertionAxioms(i); - for (OWLObjectPropertyAssertionAxiom axiom : candidates) { - if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { - toRemove = axiom; - extractEvidenceIRIValues(axiom.getAnnotations(), iriSet); - break; - } - } - if (toRemove == null) { - // fall back solution - toRemove = f.getOWLObjectPropertyAssertionAxiom(p, i, j); - } - removeAxiom(model, toRemove, metadata); - return iriSet; - } - - public void addAnnotations(ModelContainer model, OWLObjectPropertyExpression p, - OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, - METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - Set axioms = ont.getObjectPropertyAssertionAxioms(i); - OWLObjectPropertyAssertionAxiom toModify = null; - for (OWLObjectPropertyAssertionAxiom axiom : axioms) { - if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { - toModify = axiom; - break; - } - } - addAnnotations(model, toModify, annotations, metadata); - } - - void addAnnotations(ModelContainer model, OWLObjectPropertyAssertionAxiom toModify, - Set annotations, METADATA metadata) { - if (toModify != null) { - Set combindedAnnotations = new HashSet(annotations); - combindedAnnotations.addAll(toModify.getAnnotations()); - modifyAnnotations(toModify, combindedAnnotations, model, metadata); - } - } - - public void updateAnnotation(ModelContainer model, OWLObjectPropertyExpression p, - OWLNamedIndividual i, OWLNamedIndividual j, OWLAnnotation update, - METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - Set axioms = ont.getObjectPropertyAssertionAxioms(i); - OWLObjectPropertyAssertionAxiom toModify = null; - for (OWLObjectPropertyAssertionAxiom axiom : axioms) { - if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { - toModify = axiom; - break; - } - } - updateAnnotation(model, toModify, update, metadata); - } - - OWLObjectPropertyAssertionAxiom updateAnnotation(ModelContainer model, - OWLObjectPropertyAssertionAxiom toModify, OWLAnnotation update, - METADATA metadata) { - OWLObjectPropertyAssertionAxiom newAxiom = null; - if (toModify != null) { - Set combindedAnnotations = new HashSet(); - OWLAnnotationProperty target = update.getProperty(); - for(OWLAnnotation existing : toModify.getAnnotations()) { - if (target.equals(existing.getProperty()) == false) { - combindedAnnotations.add(existing); - } - } - combindedAnnotations.add(update); - newAxiom = modifyAnnotations(toModify, combindedAnnotations, model, metadata); - } - return newAxiom; - } - - public OWLObjectPropertyAssertionAxiom removeAnnotations(ModelContainer model, OWLObjectPropertyExpression p, - OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - Set axioms = ont.getObjectPropertyAssertionAxioms(i); - OWLObjectPropertyAssertionAxiom toModify = null; - for (OWLObjectPropertyAssertionAxiom axiom : axioms) { - if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { - toModify = axiom; - break; - } - } - OWLObjectPropertyAssertionAxiom newAxiom = null; - if (toModify != null) { - Set combindedAnnotations = new HashSet(toModify.getAnnotations()); - combindedAnnotations.removeAll(annotations); - newAxiom = modifyAnnotations(toModify, combindedAnnotations, model, metadata); - } - return newAxiom; - } - - private OWLObjectPropertyAssertionAxiom modifyAnnotations(OWLObjectPropertyAssertionAxiom axiom, - Set replacement, - ModelContainer model, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - OWLDataFactory f = model.getOWLDataFactory(); - List changes = new ArrayList(2); - changes.add(new RemoveAxiom(ont, axiom)); - OWLObjectPropertyAssertionAxiom newAxiom = - f.getOWLObjectPropertyAssertionAxiom(axiom.getProperty(), axiom.getSubject(), axiom.getObject(), replacement); - changes.add(new AddAxiom(ont, newAxiom)); - applyChanges(model, changes, metadata); - return newAxiom; - } - - public void addAxiom(ModelContainer model, OWLAxiom axiom, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - List changes = Collections.singletonList(new AddAxiom(ont, axiom)); - synchronized (ont) { - /* - * all changes to the ontology are synchronized via the ontology object - */ - applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); - } - } - - void addAxioms(ModelContainer model, Set axioms, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - List changes = new ArrayList(axioms.size()); - for(OWLAxiom axiom : axioms) { - changes.add(new AddAxiom(ont, axiom)); - } - synchronized (ont) { - /* - * all changes to the ontology are synchronized via the ontology object - */ - applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); - } - } - - void removeAxiom(ModelContainer model, OWLAxiom axiom, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - List changes = Collections.singletonList(new RemoveAxiom(ont, axiom)); - synchronized (ont) { - /* - * all changes to the ontology are synchronized via the ontology object - */ - applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); - } - } - - void removeAxioms(IRI modelId, Set axioms, METADATA metadata) { - ModelContainer model = getModel(modelId); - removeAxioms(model, axioms, metadata); - } - - void removeAxioms(ModelContainer model, Set axioms, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - List changes = new ArrayList(axioms.size()); - for(OWLAxiom axiom : axioms) { - changes.add(new RemoveAxiom(ont, axiom)); - } - synchronized (ont) { - /* - * all changes to the ontology are synchronized via the ontology object - */ - applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); - } - } - - private void applyChanges(ModelContainer model, List changes, METADATA metadata) { - OWLOntology ont = model.getAboxOntology(); - synchronized (ont) { - /* - * all changes to the ontology are synchronized via the ontology object - */ - applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); - } - } - - private void applyChanges(ModelContainer model, OWLOntologyManager m, - List changes, METADATA metadata) { - List appliedChanges = model.applyChanges(changes); - addToHistory(model, appliedChanges, metadata); - } - - /** - * Hook for implementing an undo and redo. - * - * @param model - * @param appliedChanges - * @param metadata - */ - protected void addToHistory(ModelContainer model, - List appliedChanges, METADATA metadata) { - // do nothing, for now - } - - protected OWLOntology loadOntologyDocumentSource(final OWLOntologyDocumentSource source, boolean minimal) throws OWLOntologyCreationException { - return loadOntologyDocumentSource(source, minimal, tbox.getOWLOntologyManager()); - } - - public static OWLOntology loadOntologyDocumentSource(final OWLOntologyDocumentSource source, boolean minimal, OWLOntologyManager manager) throws OWLOntologyCreationException { - // silence the OBO parser in the OWL-API - java.util.logging.Logger.getLogger("org.obolibrary").setLevel(java.util.logging.Level.SEVERE); - final Set originalFactories = removeOBOParserFactories(manager); - try { - // load model from source - if (minimal == false) { - //this gets the model to load all the OWL properly because it is using the tbox manager - //otherwise it doesn't understand the object properties. - OWLOntology abox_tbox_manager = loadOWLOntologyDocumentSource(source, manager); - //unfortunately it bizarrely does not retrieve the http://purl.org/dc/elements/1.1/title annotation - return abox_tbox_manager; - } - else { - // only load the model, skip imports - // approach: return an empty ontology IRI for any IRI mapping request using. - final OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - final Set emptyOntologies = new HashSet(); - m.getIRIMappers().add(new OWLOntologyIRIMapper() { - - // generated - private static final long serialVersionUID = -8200679663396870351L; - - @Override - public IRI getDocumentIRI(IRI ontologyIRI) { - - // quick check: - // do nothing for the original IRI and known empty ontologies - if (source.getDocumentIRI().equals(ontologyIRI) || emptyOntologies.contains(ontologyIRI)) { - return null; - } - emptyOntologies.add(ontologyIRI); - try { - OWLOntology emptyOntology = m.createOntology(ontologyIRI); - return emptyOntology.getOntologyID().getDefaultDocumentIRI().orNull(); - } catch (OWLOntologyCreationException e) { - throw new RuntimeException(e); - } - } - }); - OWLOntology minimalAbox = loadOWLOntologyDocumentSource(source, m); - return minimalAbox; - } - } finally { - resetOBOParserFactories(manager, originalFactories); - } - } - - private static OWLOntology loadOWLOntologyDocumentSource(final OWLOntologyDocumentSource source, final OWLOntologyManager manager) throws OWLOntologyCreationException { - final OWLOntology ontology; - if (source instanceof RioMemoryTripleSource) { - RioParserImpl parser = new RioParserImpl(new RioRDFXMLDocumentFormatFactory()); - ontology = manager.createOntology(); - OWLOntologyLoaderConfiguration config = new OWLOntologyLoaderConfiguration(); - try { - parser.parse(source, ontology, config); - } catch (IOException e) { - throw new OWLOntologyCreationException(e); - } - } else { - ontology = manager.loadOntologyFromOntologyDocument(source); - } - return ontology; - } - - public OWLOntology getTbox() { - return tbox; - } - - public BlazegraphOntologyManager getGolego_repo() { - return go_lego_repo; - } - - - /** - * even if the manager has loaded a property before, and should know what kind it is, - if the next ontology doesn't include an import statement or otherwise declare the properties used, the loader will guess that - object properties are annotation properties and screw up. - This purifies the gocam - * @param ont - * @return - * @throws OWLOntologyCreationException - */ - public static OWLOntology fixBrokenObjectPropertiesAndAxioms(OWLOntology ont) throws OWLOntologyCreationException { - OWLOntologyManager newman = OWLManager.createOWLOntologyManager(); - OWLOntology frank = newman.createOntology(ont.getOntologyID()); - OWLDataFactory df = newman.getOWLDataFactory(); - - //declare known annotation properties - OWLAnnotationProperty title_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/title")); - OWLDeclarationAxiom title_prop_declaration = df.getOWLDeclarationAxiom(title_prop); - newman.addAxiom(frank, title_prop_declaration); - OWLAnnotationProperty title_prop2 = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/terms/title")); - OWLDeclarationAxiom title_prop2_declaration = df.getOWLDeclarationAxiom(title_prop2); - newman.addAxiom(frank, title_prop2_declaration); - OWLAnnotationProperty skos_note = df.getOWLAnnotationProperty(IRI.create("http://www.w3.org/2004/02/skos/core#note")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(skos_note)); - OWLAnnotationProperty version_info = df.getOWLAnnotationProperty(IRI.create(OWL.versionInfo.getURI())); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(version_info)); - OWLAnnotationProperty contributor_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/contributor")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(contributor_prop)); - OWLAnnotationProperty date_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/date")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(date_prop)); - OWLAnnotationProperty source_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/source")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(source_prop)); - OWLAnnotationProperty state_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/modelstate")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(state_prop)); - OWLAnnotationProperty evidence_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/evidence")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(evidence_prop)); - OWLAnnotationProperty provided_by_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/pav/providedBy")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(provided_by_prop)); - OWLAnnotationProperty x_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/hint/layout/x")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(x_prop)); - OWLAnnotationProperty y_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/hint/layout/y")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(y_prop)); - OWLAnnotationProperty rdfs_label = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI()); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(rdfs_label)); - OWLAnnotationProperty rdfs_comment = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_COMMENT.getIRI()); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(rdfs_comment)); - OWLAnnotationProperty rdfs_seealso = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_SEE_ALSO.getIRI()); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(rdfs_seealso)); - OWLAnnotationProperty skos_exact_match = df.getOWLAnnotationProperty(IRI.create("http://www.w3.org/2004/02/skos/core#exactMatch")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(skos_exact_match)); - OWLAnnotationProperty skos_altlabel = df.getOWLAnnotationProperty(IRI.create("http://www.w3.org/2004/02/skos/core#altLabel")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(skos_altlabel)); - OWLAnnotationProperty definition = df.getOWLAnnotationProperty(IRI.create("http://purl.obolibrary.org/obo/IAO_0000115")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(definition)); - OWLAnnotationProperty database_cross_reference = df.getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#hasDbXref")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(database_cross_reference)); - OWLAnnotationProperty canonical_record = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/canonical_record")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(canonical_record)); - OWLAnnotationProperty iuphar_id = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/iuphar_id")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(iuphar_id)); - OWLAnnotationProperty in_taxon = df.getOWLAnnotationProperty(IRI.create("https://w3id.org/biolink/vocab/in_taxon")); - newman.addAxiom(frank, df.getOWLDeclarationAxiom(in_taxon)); - - //copy over ontology annotations - for(OWLAnnotation anno : ont.getAnnotations()) { - AddOntologyAnnotation add = new AddOntologyAnnotation(frank, anno); - newman.applyChange(add); - } - - //add correct property declarations - Set anno_properties = ont.getAnnotationPropertiesInSignature(); - Set bad_props = new HashSet(); - for(OWLAnnotationProperty anno_prop : anno_properties) { - if(anno_prop.getIRI().toString().contains("http://purl.obolibrary.org/obo/RO_")|| - anno_prop.getIRI().toString().contains("http://purl.obolibrary.org/obo/BFO_")) { - bad_props.add(anno_prop.getIRI().toString()); - OWLObjectProperty object_prop = df.getOWLObjectProperty(anno_prop.getIRI()); - OWLDeclarationAxiom object_prop_declaration = df.getOWLDeclarationAxiom(object_prop); - newman.addAxiom(frank, object_prop_declaration); - } - } - //fix screwed up axioms, collect the rest - for(OWLAxiom axiom : ont.getAxioms()) { - if(axiom.isOfType(AxiomType.ANNOTATION_ASSERTION)) { - OWLAnnotationAssertionAxiom a = (OWLAnnotationAssertionAxiom)axiom; - String prop_iri = a.getProperty().getIRI().toString(); - if(bad_props.contains(prop_iri)) { - Set annos = a.getAnnotations(); - OWLObjectProperty p = df.getOWLObjectProperty(IRI.create(prop_iri)); - IRI object = a.getValue().asIRI().get(); - IRI subject = IRI.create(a.getSubject().toString()); - OWLObjectPropertyAssertionAxiom new_ass = df.getOWLObjectPropertyAssertionAxiom(p, df.getOWLNamedIndividual(subject), df.getOWLNamedIndividual(object), annos); - newman.addAxiom(frank, new_ass); - }else { - newman.addAxiom(frank, axiom); - } - }else { - newman.addAxiom(frank, axiom); - } - } - //return new fixed ontology - return frank; - } - - public static OWLOntology removeDeadAnnotationsAndImports(OWLOntology ont) throws OWLOntologyCreationException { - OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - OWLAnnotationProperty json_model_prop = m.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/json-model")); - //get rid of all imports - Set imports = ont.getImportsDeclarations(); - for(OWLImportsDeclaration import_declaration : imports) { - m.applyChange(new RemoveImport(ont, import_declaration)); - } - //get rid of the json annotations lurking about - for(OWLAnnotation anno : ont.getAnnotations()) { - if(anno.getProperty().equals(json_model_prop)) { - RemoveOntologyAnnotation rm = new RemoveOntologyAnnotation(ont, anno); - m.applyChange(rm); - } - } - //purify of the json annotation property as well - OWLDeclarationAxiom json_prop_declaration = m.getOWLDataFactory().getOWLDeclarationAxiom(json_model_prop); - m.removeAxiom(ont, json_prop_declaration); - return ont; - } + private static Logger LOG = Logger.getLogger(CoreMolecularModelManager.class); + + // axiom has evidence RO:0002612 + private static final IRI HAS_EVIDENCE_IRI = IRI.create("http://purl.obolibrary.org/obo/RO_0002612"); + // legacy + private static final IRI HAS_EVIDENCE_IRI_OLD = AnnotationShorthand.evidence.getAnnotationProperty(); + + private static final OWLAnnotationProperty HAS_SHORTHAND = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#shorthand")); + private static final OWLAnnotationProperty IN_SUBSET = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#inSubset")); + private static final Set DO_NOT_ANNOTATE_SUBSETS = new HashSet<>(); + + static { + DO_NOT_ANNOTATE_SUBSETS.add(IRI.create("http://purl.obolibrary.org/obo/go#gocheck_do_not_annotate")); + DO_NOT_ANNOTATE_SUBSETS.add(IRI.create("http://purl.obolibrary.org/obo/go#gocheck_do_not_manually_annotate")); + } + + final OWLOntology tbox; + // final OWLReasonerFactory rf; + // final OWLReasoner tbox_reasoner; + //replacing tbox_reasoner structural reasoner functionality with blazegraph queries over pre-inferred relations.. + private BlazegraphOntologyManager go_lego_repo; + private final IRI tboxIRI; + + final Map modelMap = new HashMap(); + Set additionalImports; + + private final RuleEngine ruleEngine; + private final Map legacyRelationIndex = new HashMap(); + private final Map tboxLabelIndex = new HashMap(); + private final Map tboxShorthandIndex = new HashMap(); + private final Set doNotAnnotateSubset = new HashSet<>(); + + + /** + * Use start up time to create a unique prefix for id generation + */ + static String uniqueTop = Long.toHexString(Math.abs((System.currentTimeMillis() / 1000))); + static final AtomicLong instanceCounter = new AtomicLong(0L); + + /** + * Generate a new id from the unique server prefix and a global counter + * + * @return id + */ + private static String localUnique() { + final long counterValue = instanceCounter.getAndIncrement(); + String unique = uniqueTop + String.format("%08d", counterValue); + return unique; + } + + /** + * Check that the given string looks similar to a local unique id + * + * @param s + * @return true if the string looks like a generated id + */ + static boolean isLocalUnique(String s) { + boolean result = false; + if (s != null && s.length() > 8) { + result = true; + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + if (isHex(c) == false) { + result = false; + break; + } + } + } + return result; + } + + private static boolean isHex(char c) { + // check that char is a digit or a-e + boolean result = false; + if (Character.isDigit(c)) { + result = true; + } else if (c == 'a' || c == 'b' || c == 'c' || c == 'd' || c == 'e' || c == 'f') { + result = true; + } + return result; + } + + /** + * Generate an id and prepend the given prefixes. + *

+ * This method must should be used for model identifiers and individual identifiers. + * + * @param prefixes + * @return id + */ + static IRI generateId(CharSequence... prefixes) { + StringBuilder sb = new StringBuilder(); + for (CharSequence prefix : prefixes) { + sb.append(prefix); + } + sb.append(localUnique()); + return IRI.create(sb.toString()); + } + + /** + * @param tbox + * @throws OWLOntologyCreationException + * @throws IOException + */ + public CoreMolecularModelManager(OWLOntology tbox, String go_lego_repo_file, boolean downloadOntologyJournal) throws OWLOntologyCreationException, IOException { + super(); + this.tbox = tbox; + tboxIRI = getTboxIRI(tbox); + this.ruleEngine = initializeRuleEngine(); + initializeLegacyRelationIndex(); + initializeTboxLabelIndex(); + initializeTboxShorthandIndex(); + initializeDoNotAnnotateSubset(); + if (go_lego_repo_file != null) { + this.go_lego_repo = new BlazegraphOntologyManager(go_lego_repo_file, downloadOntologyJournal); + } + init(); + } + + + private OWLReasoner initializeTboxReasoner(OWLOntology tbox) { + OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); + OWLReasoner r = reasonerFactory.createReasoner(tbox); + return r; + } + + private static synchronized Set removeOBOParserFactories(OWLOntologyManager m) { + // hacky workaround: remove the too liberal OBO parser + PriorityCollection factories = m.getOntologyParsers(); + Set copied = new HashSet<>(); + for (OWLParserFactory factory : factories) { + copied.add(factory); + } + for (OWLParserFactory factory : copied) { + Class cls = factory.getClass(); + boolean remove = false; + if (OBOFormatOWLAPIParserFactory.class.equals(cls)) { + remove = true; + } + if (remove) { + factories.remove(factory); + } + } + return copied; + } + + private static synchronized void resetOBOParserFactories(OWLOntologyManager m, Set factories) { + m.setOntologyParsers(factories); + } + + /** + * Executed before the init call {@link #init()}. + * + * @param tbox + * @return IRI, never null + * @throws OWLOntologyCreationException + */ + protected IRI getTboxIRI(OWLOntology tbox) throws OWLOntologyCreationException { + OWLOntologyID ontologyID = tbox.getOntologyID(); + if (ontologyID != null) { + Optional ontologyIRI = ontologyID.getOntologyIRI(); + if (ontologyIRI.isPresent()) { + return ontologyIRI.get(); + } + } + throw new OWLOntologyCreationException("No ontology id available for tbox. An ontology IRI is required for the import into the abox."); + } + + /** + * @throws OWLOntologyCreationException + */ + protected void init() throws OWLOntologyCreationException { + // set default imports + additionalImports = new HashSet(); + } + + /** + * @return core/source ontology + */ + public OWLOntology getOntology() { + return tbox; + } + + public Map getLegacyRelationShorthandIndex() { + return Collections.unmodifiableMap(this.legacyRelationIndex); + } + + public Map getTboxLabelIndex() { + return Collections.unmodifiableMap(this.tboxLabelIndex); + } + + public Map getTboxShorthandIndex() { + return Collections.unmodifiableMap(this.tboxShorthandIndex); + } + + public Set getDoNotAnnotateSubset() { + return Collections.unmodifiableSet(this.doNotAnnotateSubset); + } + + public RuleEngine getRuleEngine() { + return ruleEngine; + } + + private RuleEngine initializeRuleEngine() { + Set rules = new HashSet<>(); + rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.translate(getOntology(), Imports.INCLUDED, true, true, true, true)).asJava()); + rules.addAll(JavaConverters.setAsJavaSetConverter(OWLtoRules.indirectRules(getOntology())).asJava()); + return new RuleEngine(Bridge.rulesFromJena(JavaConverters.asScalaSetConverter(rules).asScala()), true); + } + + public WorkingMemory createInferredModel(OWLOntology abox, IRI modelId) { + Set statements = JavaConverters.setAsJavaSetConverter(SesameJena.ontologyAsTriples(abox)).asJava(); + Set triples = statements.stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet()); + try { + // Using model's ontology IRI so that a spurious different ontology declaration triple isn't added + // OWLOntology schemaOntology = OWLManager.createOWLOntologyManager().createOntology(getOntology().getRBoxAxioms(Imports.INCLUDED), modelId); + // I think the re-use of the model IRI as the IRI of the rule ontology has some weird effects on the model in question, rendering its contents inaccesible. + OWLOntologyManager tmp_man = OWLManager.createOWLOntologyManager(); + OWLOntology schemaOntology = tmp_man.createOntology(IRI.create("http://therules.org")); + Set owl_rules = getOntology().getRBoxAxioms(Imports.INCLUDED); + tmp_man.addAxioms(schemaOntology, owl_rules); + // + Set schemaStatements = JavaConverters.setAsJavaSetConverter(SesameJena.ontologyAsTriples(schemaOntology)).asJava(); + triples.addAll(schemaStatements.stream().map(s -> Bridge.tripleFromJena(s.asTriple())).collect(Collectors.toSet())); + } catch (OWLOntologyCreationException e) { + LOG.error("Couldn't add rbox statements to data model.", e); + } + return getRuleEngine().processTriples(JavaConverters.asScalaSetConverter(triples).asScala()); + + } + + /** + * Return Arachne working memory representing LEGO model combined with inference rules. + * This model will not remain synchronized with changes to data. + * + * @param modelId + * @return Jena model + */ + public WorkingMemory createInferredModel(IRI modelId) { + return createInferredModel(getModelAbox(modelId), modelId); + } + + public WorkingMemory createCanonicalInferredModel(IRI modelId) { + //swap out any non-canonical types + OWLOntology source_abox = getModelAbox(modelId); + OWLOntologyManager aman = OWLManager.createOWLOntologyManager(); + OWLDataFactory df = aman.getOWLDataFactory(); + OWLAnnotationProperty canonical_record = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/canonical_record")); + OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); + try { + OWLOntology abox = aman.copyOntology(source_abox, OntologyCopy.DEEP); + OWLReasoner abox_reasoner = reasonerFactory.createReasoner(abox); + //convert to canonical wherever possible + abox.getIndividualsInSignature().forEach(i -> { + Set types = abox_reasoner.getTypes(i, true).getFlattened(); + for (OWLClass type : types) { + Collection canons = EntitySearcher.getAnnotationObjects(type, tbox, canonical_record); + //adding multiple types to an instance of a set object is + //probably not kosher.. but seems to work for now. + //more correct to create new instances for each + if (canons != null && canons.size() > 0) { + for (OWLAnnotation canon : canons) { + if (canon.getValue().asIRI().isPresent()) { + OWLClass canonical = df.getOWLClass(canon.getValue().asIRI().get()); + //direct swap + //remove the old one + OWLClassAssertionAxiom original = df.getOWLClassAssertionAxiom(type, i); + aman.removeAxiom(abox, original); + //add the new one + OWLClassAssertionAxiom canonical_type = df.getOWLClassAssertionAxiom(canonical, i); + aman.addAxiom(abox, canonical_type); + } + } + } + } + }); + WorkingMemory inferred = createInferredModel(abox, modelId); + abox_reasoner.dispose(); + aman.removeOntology(abox); + return inferred; + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + return createInferredModel(source_abox, modelId); + } + } + + private void initializeLegacyRelationIndex() { + synchronized (legacyRelationIndex) { + OWLAnnotationProperty rdfsLabel = OWLManager.getOWLDataFactory().getRDFSLabel(); + for (OWLOntology ont : this.getOntology().getImportsClosure()) { + for (OWLObjectProperty prop : ont.getObjectPropertiesInSignature()) { + for (OWLAnnotationAssertionAxiom axiom : ont.getAnnotationAssertionAxioms(prop.getIRI())) { + if (axiom.getProperty().equals(rdfsLabel)) { + Optional literalOpt = axiom.getValue().asLiteral(); + if (literalOpt.isPresent()) { + String label = literalOpt.get().getLiteral(); + legacyRelationIndex.put(prop.getIRI(), label.replaceAll(" ", "_").replaceAll(",", "")); + } + } + } + } + } + } + } + + private void initializeTboxLabelIndex() { + synchronized (tboxLabelIndex) { + OWLAnnotationProperty rdfsLabel = OWLManager.getOWLDataFactory().getRDFSLabel(); + for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { + if (axiom.getProperty().equals(rdfsLabel) && (axiom.getSubject() instanceof IRI) && axiom.getValue() instanceof OWLLiteral) { + IRI subject = (IRI) (axiom.getSubject()); + String label = axiom.getValue().asLiteral().get().getLiteral(); + tboxLabelIndex.put(subject, label); + } + } + } + } + + private void initializeTboxShorthandIndex() { + synchronized (tboxShorthandIndex) { + for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { + if (axiom.getProperty().equals(HAS_SHORTHAND) && (axiom.getSubject() instanceof IRI) && axiom.getValue() instanceof OWLLiteral) { + IRI subject = (IRI) (axiom.getSubject()); + String shorthand = axiom.getValue().asLiteral().get().getLiteral(); + tboxShorthandIndex.put(subject, shorthand); + } + } + } + } + + private void initializeDoNotAnnotateSubset() { + synchronized (doNotAnnotateSubset) { + for (OWLAnnotationAssertionAxiom axiom : this.getOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION, Imports.INCLUDED)) { + if (axiom.getProperty().equals(IN_SUBSET) && (axiom.getSubject() instanceof IRI) && DO_NOT_ANNOTATE_SUBSETS.contains(axiom.getValue())) { + doNotAnnotateSubset.add((IRI) (axiom.getSubject())); + } + } + } + } + + /** + * Add additional import declarations for any newly generated model. + * + * @param imports + */ + public void addImports(Iterable imports) { + if (imports != null) { + for (String importIRIString : imports) { + additionalImports.add(IRI.create(importIRIString)); + } + } + } + + public Collection getImports() { + Set allImports = new HashSet(); + allImports.add(tboxIRI); + allImports.addAll(additionalImports); + return allImports; + } + + /** + * @param modelId + * @return all individuals in the model + */ + public Set getIndividuals(IRI modelId) { + ModelContainer mod = getModel(modelId); + return mod.getAboxOntology().getIndividualsInSignature(); + } + + + // /** + // * @param mod + // * @param q + // * @return all individuals in the model that satisfy q + // */ + // public Set getIndividualsByQuery(ModelContainer mod, OWLClassExpression q) { + // return mod.getReasoner().getInstances(q, false).getFlattened(); + // } + + /** + * @param model + * @param ce + * @param metadata + * @return individual + */ + public OWLNamedIndividual createIndividual(ModelContainer model, OWLClassExpression ce, METADATA metadata) { + OWLNamedIndividual individual = createIndividual(model, ce, null, metadata); + return individual; + } + + OWLNamedIndividual createIndividual(ModelContainer model, OWLClassExpression ce, Set annotations, METADATA metadata) { + Pair> pair = createIndividual(model.getModelId(), model.getAboxOntology(), ce, annotations); + addAxioms(model, pair.getRight(), metadata); + return pair.getLeft(); + } + + OWLNamedIndividual createIndividualWithIRI(ModelContainer model, IRI individualIRI, Set annotations, METADATA metadata) { + Pair> pair = createIndividualInternal(individualIRI, model.getAboxOntology(), null, annotations); + addAxioms(model, pair.getRight(), metadata); + return pair.getLeft(); + } + + public static Pair> createIndividual(IRI modelId, OWLOntology abox, OWLClassExpression ce, Set annotations) { + IRI iri = generateId(modelId, "/"); + return createIndividualInternal(iri, abox, ce, annotations); + } + + private static Pair> createIndividualInternal(IRI iri, OWLOntology abox, OWLClassExpression ce, Set annotations) { + LOG.info("Generating individual for IRI: " + iri); + OWLDataFactory f = abox.getOWLOntologyManager().getOWLDataFactory(); + OWLNamedIndividual i = f.getOWLNamedIndividual(iri); + + // create axioms + Set axioms = new HashSet(); + // declaration + axioms.add(f.getOWLDeclarationAxiom(i)); + // annotation assertions + if (annotations != null) { + for (OWLAnnotation annotation : annotations) { + axioms.add(f.getOWLAnnotationAssertionAxiom(iri, annotation)); + } + } + + if (ce != null) { + OWLClassAssertionAxiom typeAxiom = createType(f, i, ce); + if (typeAxiom != null) { + axioms.add(typeAxiom); + } + } + + return Pair.of(i, axioms); + } + + public static class DeleteInformation { + public final Set usedIRIs = new HashSet(); + public final Set updated = new HashSet(); + public final Set touched = new HashSet(); + } + + /** + * Deletes an individual and return all IRIs used as an annotation value. + * Also tries to delete all annotations (OWLObjectPropertyAssertionAxiom + * annotations and OWLAnnotationAssertionAxiom) with the individual IRI as + * value. + * + * @param model + * @param i + * @param metadata + * @return set of IRIs used in annotations + */ + public DeleteInformation deleteIndividual(ModelContainer model, OWLNamedIndividual i, METADATA metadata) { + Set toRemoveAxioms = new HashSet(); + final DeleteInformation deleteInformation = new DeleteInformation(); + + final OWLOntology ont = model.getAboxOntology(); + final OWLDataFactory f = model.getOWLDataFactory(); + + // Declaration axiom + toRemoveAxioms.add(model.getOWLDataFactory().getOWLDeclarationAxiom(i)); + + // Logic axiom + for (OWLAxiom ax : ont.getAxioms(i, Imports.EXCLUDED)) { + extractEvidenceIRIValues(ax.getAnnotations(), deleteInformation.usedIRIs); + toRemoveAxioms.add(ax); + } + + // OWLObjectPropertyAssertionAxiom + Set allAssertions = ont.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION); + final IRI iIRI = i.getIRI(); + for (OWLObjectPropertyAssertionAxiom ax : allAssertions) { + if (toRemoveAxioms.contains(ax) == false) { + Set currentIndividuals = ax.getIndividualsInSignature(); + if (currentIndividuals.contains(i)) { + extractEvidenceIRIValues(ax.getAnnotations(), deleteInformation.usedIRIs); + toRemoveAxioms.add(ax); + continue; + } + // check annotations for deleted individual IRI + Set annotations = ax.getAnnotations(); + Set removeAnnotations = new HashSet(); + for (OWLAnnotation annotation : annotations) { + if (iIRI.equals(annotation.getValue())) { + removeAnnotations.add(annotation); + } + } + // if there is an annotations that needs to be removed, + // recreate axiom with cleaned annotation set + if (removeAnnotations.isEmpty() == false) { + annotations.removeAll(removeAnnotations); + toRemoveAxioms.add(ax); + deleteInformation.updated.add(f. + getOWLObjectPropertyAssertionAxiom( + ax.getProperty(), ax.getSubject(), ax.getObject(), annotations)); + } + } + } + // OWLAnnotationAssertionAxiom + Set annotationAssertionAxioms = ont.getAnnotationAssertionAxioms(i.getIRI()); + for (OWLAnnotationAssertionAxiom axiom : annotationAssertionAxioms) { + extractEvidenceIRIValues(axiom.getAnnotation(), deleteInformation.usedIRIs); + toRemoveAxioms.add(axiom); + } + + // search for all annotations which use individual IRI as value + Set axioms = ont.getAxioms(AxiomType.ANNOTATION_ASSERTION); + for (OWLAnnotationAssertionAxiom ax : axioms) { + if (toRemoveAxioms.contains(ax) == false) { + if (iIRI.equals(ax.getValue())) { + toRemoveAxioms.add(ax); + OWLAnnotationSubject subject = ax.getSubject(); + subject.accept(new OWLAnnotationSubjectVisitor() { + + @Override + public void visit(OWLAnonymousIndividual individual) { + // do nothing + } + + @Override + public void visit(IRI iri) { + // check if they subject is a declared named individual + if (ont.containsIndividualInSignature(iri)) { + deleteInformation.touched.add(iri); + } + } + }); + } + } + } + + removeAxioms(model, toRemoveAxioms, metadata); + if (deleteInformation.updated.isEmpty() == false) { + addAxioms(model, deleteInformation.updated, metadata); + } + + return deleteInformation; + } + + public static Set extractEvidenceIRIValues(Set annotations) { + if (annotations == null || annotations.isEmpty()) { + return Collections.emptySet(); + } + Set iriSet = new HashSet(); + extractEvidenceIRIValues(annotations, iriSet); + return iriSet; + } + + private static void extractEvidenceIRIValues(Set annotations, final Set iriSet) { + if (annotations != null) { + for (OWLAnnotation annotation : annotations) { + extractEvidenceIRIValues(annotation, iriSet); + } + } + } + + private static void extractEvidenceIRIValues(OWLAnnotation annotation, final Set iriSet) { + if (annotation != null) { + OWLAnnotationProperty property = annotation.getProperty(); + if (HAS_EVIDENCE_IRI.equals(property.getIRI()) || HAS_EVIDENCE_IRI_OLD.equals(property.getIRI())) { + annotation.getValue().accept(new OWLAnnotationValueVisitor() { + + @Override + public void visit(OWLLiteral literal) { + // ignore + } + + @Override + public void visit(OWLAnonymousIndividual individual) { + // ignore + } + + @Override + public void visit(IRI iri) { + iriSet.add(iri); + } + }); + } + } + } + + public void addAnnotations(ModelContainer model, OWLNamedIndividual i, Collection annotations, METADATA metadata) { + addAnnotations(model, i.getIRI(), annotations, metadata); + } + + public void addAnnotations(ModelContainer model, IRI subject, Collection annotations, METADATA metadata) { + Set axioms = new HashSet(); + OWLDataFactory f = model.getOWLDataFactory(); + for (OWLAnnotation annotation : annotations) { + axioms.add(f.getOWLAnnotationAssertionAxiom(subject, annotation)); + } + addAxioms(model, axioms, metadata); + } + + public void updateAnnotation(ModelContainer model, IRI subject, OWLAnnotation update, METADATA metadata) { + Set removeAxioms = new HashSet(); + OWLDataFactory f = model.getOWLDataFactory(); + Set existing = model.getAboxOntology().getAnnotationAssertionAxioms(subject); + OWLAnnotationProperty target = update.getProperty(); + for (OWLAnnotationAssertionAxiom axiom : existing) { + if (target.equals(axiom.getProperty())) { + removeAxioms.add(axiom); + } + } + removeAxioms(model, removeAxioms, metadata); + addAxiom(model, f.getOWLAnnotationAssertionAxiom(subject, update), metadata); + } + + public void addModelAnnotations(ModelContainer model, Collection annotations, METADATA metadata) { + OWLOntology aBox = model.getAboxOntology(); + List changes = new ArrayList(); + for (OWLAnnotation annotation : annotations) { + changes.add(new AddOntologyAnnotation(aBox, annotation)); + } + applyChanges(model, changes, metadata); + } + + public void updateAnnotation(ModelContainer model, OWLAnnotation update, METADATA metadata) { + OWLOntology aBox = model.getAboxOntology(); + List changes = new ArrayList(); + Set existing = model.getAboxOntology().getAnnotations(); + OWLAnnotationProperty target = update.getProperty(); + for (OWLAnnotation annotation : existing) { + if (target.equals(annotation.getProperty())) { + changes.add(new RemoveOntologyAnnotation(aBox, annotation)); + } + } + changes.add(new AddOntologyAnnotation(aBox, update)); + applyChanges(model, changes, metadata); + } + + public void removeAnnotations(ModelContainer model, OWLNamedIndividual i, Collection annotations, METADATA metadata) { + removeAnnotations(model, i.getIRI(), annotations, metadata); + } + + void removeAnnotations(ModelContainer model, IRI subject, Collection annotations, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + Set toRemove = new HashSet(); + Set candidates = ont.getAnnotationAssertionAxioms(subject); + for (OWLAnnotationAssertionAxiom axiom : candidates) { + OWLAnnotation annotation = axiom.getAnnotation(); + if (annotations.contains(annotation)) { + toRemove.add(axiom); + } + } + removeAxioms(model, toRemove, metadata); + } + + public void removeAnnotations(ModelContainer model, Collection annotations, METADATA metadata) { + OWLOntology aBox = model.getAboxOntology(); + List changes = new ArrayList(); + for (OWLAnnotation annotation : annotations) { + changes.add(new RemoveOntologyAnnotation(aBox, annotation)); + } + applyChanges(model, changes, metadata); + } + + public void addDataProperty(ModelContainer model, + OWLNamedIndividual i, OWLDataProperty prop, OWLLiteral literal, + METADATA metadata) { + OWLAxiom axiom = model.getOWLDataFactory().getOWLDataPropertyAssertionAxiom(prop, i, literal); + addAxiom(model, axiom, metadata); + } + + public void removeDataProperty(ModelContainer model, + OWLNamedIndividual i, OWLDataProperty prop, OWLLiteral literal, + METADATA metadata) { + OWLAxiom toRemove = null; + Set existing = model.getAboxOntology().getDataPropertyAssertionAxioms(i); + for (OWLDataPropertyAssertionAxiom ax : existing) { + if (prop.equals(ax.getProperty()) && literal.equals(ax.getObject())) { + toRemove = ax; + break; + } + } + + if (toRemove != null) { + removeAxiom(model, toRemove, metadata); + } + } + + /** + * Fetches a model by its Id + * + * @param id + * @return wrapped model + */ + public ModelContainer getModel(IRI id) { + synchronized (modelMap) { + // synchronized to avoid race condition for simultaneous loads of the same model + if (!modelMap.containsKey(id)) { + try { + loadModel(id, false); + } catch (OWLOntologyCreationException e) { + LOG.info("Could not load model with id: " + id, e); + } + } + return modelMap.get(id); + } + } + + /** + * Retrieve the abox ontology. May skip loading the imports. + * This method is mostly intended to read metadata from a model. + * + * @param id + * @return abox, maybe without any imports loaded + */ + public OWLOntology getModelAbox(IRI id) { + ModelContainer model = modelMap.get(id); + if (model != null) { + return model.getAboxOntology(); + } + OWLOntology abox = null; + try { + abox = loadModelABox(id); + } catch (OWLOntologyCreationException e) { + LOG.info("Could not load model with id: " + id, e); + } + return abox; + } + + public boolean isModelModified(IRI modelId) { + ModelContainer model = modelMap.get(modelId); + if (model != null) { + // ask model about modification + return model.isModified(); + } + // non in-memory models are considered not modified. + return false; + } + + /** + * @param modelId + * @return ontology + * @throws OWLOntologyCreationException + */ + protected abstract OWLOntology loadModelABox(IRI modelId) throws OWLOntologyCreationException; + + /** + * @param modelId + * @param manager + * @return ontology + * @throws OWLOntologyCreationException + */ + protected abstract OWLOntology loadModelABox(IRI modelId, OWLOntologyManager manager) throws OWLOntologyCreationException; + + /** + * @param id + */ + public void unlinkModel(IRI id) { + ModelContainer model = modelMap.get(id); + model.dispose(); + modelMap.remove(id); + } + + /** + * @return ids for all loaded models + */ + public Set getModelIds() { + return modelMap.keySet(); + } + + /** + * internal method to cleanup this instance + */ + public void dispose() { + Set ids = new HashSet(getModelIds()); + for (IRI id : ids) { + unlinkModel(id); + } + } + + /** + * Export the ABox, will try to set the ontologyID to the given modelId (to + * ensure import assumptions are met) + * + * @param model + * @param ontologyFormat + * @return modelContent + * @throws OWLOntologyStorageException + */ + public String exportModel(ModelContainer model, OWLDocumentFormat ontologyFormat) throws OWLOntologyStorageException { + final OWLOntology aBox = model.getAboxOntology(); + final OWLOntologyManager manager = aBox.getOWLOntologyManager(); + + // make sure the exported ontology has an ontologyId and that it maps to the modelId + final IRI expectedABoxIRI = model.getModelId(); + Optional currentABoxIRI = aBox.getOntologyID().getOntologyIRI(); + if (currentABoxIRI.isPresent() == false) { + manager.applyChange(new SetOntologyID(aBox, expectedABoxIRI)); + } else { + if (expectedABoxIRI.equals(currentABoxIRI) == false) { + OWLOntologyID ontologyID = new OWLOntologyID(Optional.of(expectedABoxIRI), Optional.of(expectedABoxIRI)); + manager.applyChange(new SetOntologyID(aBox, ontologyID)); + } + } + + // write the model into a buffer + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + if (ontologyFormat != null) { + manager.saveOntology(aBox, ontologyFormat, outputStream); + } else { + manager.saveOntology(aBox, outputStream); + } + + // extract the string from the buffer + String modelString = outputStream.toString(); + return modelString; + } + + /** + * Try to load (or replace) a model with the given ontology. It is expected + * that the content is an A-Box ontology, which imports the T-BOX. Also the + * ontology ID is used to extract the modelId.
+ *
+ * This method will currently NOT work due to a bug in the OWL-API. + * The functional syntax parser does not properly report the exceptions and + * will return an ontology with an wrong ontology ID! + * + * @param modelData + * @return modelId + * @throws OWLOntologyCreationException + */ + public ModelContainer importModel(String modelData) throws OWLOntologyCreationException { + // load data from String + final OWLOntologyManager manager = tbox.getOWLOntologyManager(); + final OWLOntologyDocumentSource documentSource = new StringDocumentSource(modelData); + OWLOntology modelOntology; + final Set originalFactories = removeOBOParserFactories(manager); + try { + modelOntology = manager.loadOntologyFromOntologyDocument(documentSource); + } catch (OWLOntologyAlreadyExistsException e) { + // exception is thrown if there is an ontology with the same ID already in memory + OWLOntologyID id = e.getOntologyID(); + IRI existingModelId = id.getOntologyIRI().orNull(); + + // remove the existing memory model + unlinkModel(existingModelId); + + // try loading the import version (again) + modelOntology = manager.loadOntologyFromOntologyDocument(documentSource); + } finally { + resetOBOParserFactories(manager, originalFactories); + } + + // try to extract modelId + IRI modelId = null; + Optional ontologyIRI = modelOntology.getOntologyID().getOntologyIRI(); + if (ontologyIRI.isPresent()) { + modelId = ontologyIRI.get(); + } + if (modelId == null) { + throw new OWLOntologyCreationException("Could not extract the modelId from the given model"); + } + // paranoia check + ModelContainer existingModel = modelMap.get(modelId); + if (existingModel != null) { + unlinkModel(modelId); + } + + // add to internal model + ModelContainer newModel = addModel(modelId, modelOntology); + + return newModel; + } + + protected abstract void loadModel(IRI modelId, boolean isOverride) throws OWLOntologyCreationException; + + ModelContainer addModel(IRI modelId, OWLOntology abox) throws OWLOntologyCreationException { + ModelContainer m = new ModelContainer(modelId, tbox, abox); + modelMap.put(modelId, m); + return m; + } + + /** + * Adds ClassAssertion(c,i) to specified model + * + * @param modelId + * @param i + * @param c + * @param metadata + */ + public void addType(IRI modelId, OWLNamedIndividual i, OWLClass c, METADATA metadata) { + ModelContainer model = getModel(modelId); + addType(model, i, c, metadata); + } + + /** + * Adds ClassAssertion(c,i) to specified model + * + * @param model + * @param i + * @param c + * @param metadata + */ + public void addType(ModelContainer model, OWLIndividual i, + OWLClassExpression c, METADATA metadata) { + OWLClassAssertionAxiom axiom = createType(model.getOWLDataFactory(), i, c); + addAxiom(model, axiom, metadata); + } + + /** + * @param f + * @param i + * @param c + * @return axiom + */ + public static OWLClassAssertionAxiom createType(OWLDataFactory f, OWLIndividual i, OWLClassExpression c) { + OWLClassAssertionAxiom axiom = f.getOWLClassAssertionAxiom(c, i); + return axiom; + } + + /** + * Adds a ClassAssertion, where the class expression instantiated is an + * ObjectSomeValuesFrom expression + *

+ * Example: Individual: i Type: enabledBy some PRO_123 + * + * @param modelId + * @param i + * @param p + * @param filler + * @param metadata + */ + public void addType(IRI modelId, + OWLNamedIndividual i, + OWLObjectPropertyExpression p, + OWLClassExpression filler, + METADATA metadata) { + ModelContainer model = getModel(modelId); + addType(model, i, p, filler, metadata); + } + + /** + * Adds a ClassAssertion, where the class expression instantiated is an + * ObjectSomeValuesFrom expression + *

+ * Example: Individual: i Type: enabledBy some PRO_123 + * + * @param model + * @param i + * @param p + * @param filler + * @param metadata + */ + void addType(ModelContainer model, + OWLIndividual i, + OWLObjectPropertyExpression p, + OWLClassExpression filler, + METADATA metadata) { + if (LOG.isDebugEnabled()) { + LOG.debug("Adding " + i + " type " + p + " some " + filler); + } + OWLDataFactory f = model.getOWLDataFactory(); + OWLObjectSomeValuesFrom c = f.getOWLObjectSomeValuesFrom(p, filler); + OWLClassAssertionAxiom axiom = f.getOWLClassAssertionAxiom(c, i); + addAxiom(model, axiom, metadata); + } + + /** + * remove ClassAssertion(c,i) from the model + * + * @param model + * @param i + * @param ce + * @param metadata + */ + public void removeType(ModelContainer model, OWLIndividual i, + OWLClassExpression ce, METADATA metadata) { + Set allAxioms = model.getAboxOntology().getClassAssertionAxioms(i); + // use search to remove also axioms with annotations + for (OWLClassAssertionAxiom ax : allAxioms) { + if (ce.equals(ax.getClassExpression())) { + removeAxiom(model, ax, metadata); + } + } + + } + + void removeType(ModelContainer model, + OWLIndividual i, + OWLObjectPropertyExpression p, + OWLClassExpression filler, + METADATA metadata) { + OWLDataFactory f = model.getOWLDataFactory(); + OWLClassAssertionAxiom axiom = f.getOWLClassAssertionAxiom(f.getOWLObjectSomeValuesFrom(p, filler), i); + removeAxiom(model, axiom, metadata); + } + + public void addFact(ModelContainer model, OBOUpperVocabulary vocabElement, + OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, METADATA metadata) { + OWLObjectProperty p = vocabElement.getObjectProperty(model.getAboxOntology()); + addFact(model, p, i, j, annotations, metadata); + } + + public void addFact(ModelContainer model, OWLObjectPropertyExpression p, + OWLIndividual i, OWLIndividual j, Set annotations, METADATA metadata) { + OWLObjectPropertyAssertionAxiom axiom = createFact(model.getOWLDataFactory(), p, i, j, annotations); + addAxiom(model, axiom, metadata); + } + + /** + * @param f + * @param p + * @param i + * @param j + * @param annotations + * @return axiom + */ + public static OWLObjectPropertyAssertionAxiom createFact(OWLDataFactory f, + OWLObjectPropertyExpression p, OWLIndividual i, OWLIndividual j, + Set annotations) { + final OWLObjectPropertyAssertionAxiom axiom; + if (annotations != null && !annotations.isEmpty()) { + axiom = f.getOWLObjectPropertyAssertionAxiom(p, i, j, annotations); + } else { + axiom = f.getOWLObjectPropertyAssertionAxiom(p, i, j); + } + return axiom; + } + + public Set removeFact(ModelContainer model, OWLObjectPropertyExpression p, + OWLIndividual i, OWLIndividual j, METADATA metadata) { + OWLDataFactory f = model.getOWLDataFactory(); + + OWLOntology ont = model.getAboxOntology(); + OWLAxiom toRemove = null; + Set iriSet = new HashSet(); + Set candidates = ont.getObjectPropertyAssertionAxioms(i); + for (OWLObjectPropertyAssertionAxiom axiom : candidates) { + if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { + toRemove = axiom; + extractEvidenceIRIValues(axiom.getAnnotations(), iriSet); + break; + } + } + if (toRemove == null) { + // fall back solution + toRemove = f.getOWLObjectPropertyAssertionAxiom(p, i, j); + } + removeAxiom(model, toRemove, metadata); + return iriSet; + } + + public void addAnnotations(ModelContainer model, OWLObjectPropertyExpression p, + OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, + METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + Set axioms = ont.getObjectPropertyAssertionAxioms(i); + OWLObjectPropertyAssertionAxiom toModify = null; + for (OWLObjectPropertyAssertionAxiom axiom : axioms) { + if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { + toModify = axiom; + break; + } + } + addAnnotations(model, toModify, annotations, metadata); + } + + void addAnnotations(ModelContainer model, OWLObjectPropertyAssertionAxiom toModify, + Set annotations, METADATA metadata) { + if (toModify != null) { + Set combindedAnnotations = new HashSet(annotations); + combindedAnnotations.addAll(toModify.getAnnotations()); + modifyAnnotations(toModify, combindedAnnotations, model, metadata); + } + } + + public void updateAnnotation(ModelContainer model, OWLObjectPropertyExpression p, + OWLNamedIndividual i, OWLNamedIndividual j, OWLAnnotation update, + METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + Set axioms = ont.getObjectPropertyAssertionAxioms(i); + OWLObjectPropertyAssertionAxiom toModify = null; + for (OWLObjectPropertyAssertionAxiom axiom : axioms) { + if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { + toModify = axiom; + break; + } + } + updateAnnotation(model, toModify, update, metadata); + } + + OWLObjectPropertyAssertionAxiom updateAnnotation(ModelContainer model, + OWLObjectPropertyAssertionAxiom toModify, OWLAnnotation update, + METADATA metadata) { + OWLObjectPropertyAssertionAxiom newAxiom = null; + if (toModify != null) { + Set combindedAnnotations = new HashSet(); + OWLAnnotationProperty target = update.getProperty(); + for (OWLAnnotation existing : toModify.getAnnotations()) { + if (target.equals(existing.getProperty()) == false) { + combindedAnnotations.add(existing); + } + } + combindedAnnotations.add(update); + newAxiom = modifyAnnotations(toModify, combindedAnnotations, model, metadata); + } + return newAxiom; + } + + public OWLObjectPropertyAssertionAxiom removeAnnotations(ModelContainer model, OWLObjectPropertyExpression p, + OWLNamedIndividual i, OWLNamedIndividual j, Set annotations, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + Set axioms = ont.getObjectPropertyAssertionAxioms(i); + OWLObjectPropertyAssertionAxiom toModify = null; + for (OWLObjectPropertyAssertionAxiom axiom : axioms) { + if (p.equals(axiom.getProperty()) && j.equals(axiom.getObject())) { + toModify = axiom; + break; + } + } + OWLObjectPropertyAssertionAxiom newAxiom = null; + if (toModify != null) { + Set combindedAnnotations = new HashSet(toModify.getAnnotations()); + combindedAnnotations.removeAll(annotations); + newAxiom = modifyAnnotations(toModify, combindedAnnotations, model, metadata); + } + return newAxiom; + } + + private OWLObjectPropertyAssertionAxiom modifyAnnotations(OWLObjectPropertyAssertionAxiom axiom, + Set replacement, + ModelContainer model, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + OWLDataFactory f = model.getOWLDataFactory(); + List changes = new ArrayList(2); + changes.add(new RemoveAxiom(ont, axiom)); + OWLObjectPropertyAssertionAxiom newAxiom = + f.getOWLObjectPropertyAssertionAxiom(axiom.getProperty(), axiom.getSubject(), axiom.getObject(), replacement); + changes.add(new AddAxiom(ont, newAxiom)); + applyChanges(model, changes, metadata); + return newAxiom; + } + + public void addAxiom(ModelContainer model, OWLAxiom axiom, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + List changes = Collections.singletonList(new AddAxiom(ont, axiom)); + synchronized (ont) { + /* + * all changes to the ontology are synchronized via the ontology object + */ + applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); + } + } + + void addAxioms(ModelContainer model, Set axioms, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + List changes = new ArrayList(axioms.size()); + for (OWLAxiom axiom : axioms) { + changes.add(new AddAxiom(ont, axiom)); + } + synchronized (ont) { + /* + * all changes to the ontology are synchronized via the ontology object + */ + applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); + } + } + + void removeAxiom(ModelContainer model, OWLAxiom axiom, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + List changes = Collections.singletonList(new RemoveAxiom(ont, axiom)); + synchronized (ont) { + /* + * all changes to the ontology are synchronized via the ontology object + */ + applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); + } + } + + void removeAxioms(IRI modelId, Set axioms, METADATA metadata) { + ModelContainer model = getModel(modelId); + removeAxioms(model, axioms, metadata); + } + + void removeAxioms(ModelContainer model, Set axioms, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + List changes = new ArrayList(axioms.size()); + for (OWLAxiom axiom : axioms) { + changes.add(new RemoveAxiom(ont, axiom)); + } + synchronized (ont) { + /* + * all changes to the ontology are synchronized via the ontology object + */ + applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); + } + } + + private void applyChanges(ModelContainer model, List changes, METADATA metadata) { + OWLOntology ont = model.getAboxOntology(); + synchronized (ont) { + /* + * all changes to the ontology are synchronized via the ontology object + */ + applyChanges(model, ont.getOWLOntologyManager(), changes, metadata); + } + } + + private void applyChanges(ModelContainer model, OWLOntologyManager m, + List changes, METADATA metadata) { + List appliedChanges = model.applyChanges(changes); + addToHistory(model, appliedChanges, metadata); + } + + /** + * Hook for implementing an undo and redo. + * + * @param model + * @param appliedChanges + * @param metadata + */ + protected void addToHistory(ModelContainer model, + List appliedChanges, METADATA metadata) { + // do nothing, for now + } + + protected OWLOntology loadOntologyDocumentSource(final OWLOntologyDocumentSource source, boolean minimal) throws OWLOntologyCreationException { + return loadOntologyDocumentSource(source, minimal, tbox.getOWLOntologyManager()); + } + + public static OWLOntology loadOntologyDocumentSource(final OWLOntologyDocumentSource source, boolean minimal, OWLOntologyManager manager) throws OWLOntologyCreationException { + // silence the OBO parser in the OWL-API + java.util.logging.Logger.getLogger("org.obolibrary").setLevel(java.util.logging.Level.SEVERE); + final Set originalFactories = removeOBOParserFactories(manager); + try { + // load model from source + if (minimal == false) { + //this gets the model to load all the OWL properly because it is using the tbox manager + //otherwise it doesn't understand the object properties. + OWLOntology abox_tbox_manager = loadOWLOntologyDocumentSource(source, manager); + //unfortunately it bizarrely does not retrieve the http://purl.org/dc/elements/1.1/title annotation + return abox_tbox_manager; + } else { + // only load the model, skip imports + // approach: return an empty ontology IRI for any IRI mapping request using. + final OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + final Set emptyOntologies = new HashSet(); + m.getIRIMappers().add(new OWLOntologyIRIMapper() { + + // generated + private static final long serialVersionUID = -8200679663396870351L; + + @Override + public IRI getDocumentIRI(IRI ontologyIRI) { + + // quick check: + // do nothing for the original IRI and known empty ontologies + if (source.getDocumentIRI().equals(ontologyIRI) || emptyOntologies.contains(ontologyIRI)) { + return null; + } + emptyOntologies.add(ontologyIRI); + try { + OWLOntology emptyOntology = m.createOntology(ontologyIRI); + return emptyOntology.getOntologyID().getDefaultDocumentIRI().orNull(); + } catch (OWLOntologyCreationException e) { + throw new RuntimeException(e); + } + } + }); + OWLOntology minimalAbox = loadOWLOntologyDocumentSource(source, m); + return minimalAbox; + } + } finally { + resetOBOParserFactories(manager, originalFactories); + } + } + + private static OWLOntology loadOWLOntologyDocumentSource(final OWLOntologyDocumentSource source, final OWLOntologyManager manager) throws OWLOntologyCreationException { + final OWLOntology ontology; + if (source instanceof RioMemoryTripleSource) { + RioParserImpl parser = new RioParserImpl(new RioRDFXMLDocumentFormatFactory()); + ontology = manager.createOntology(); + OWLOntologyLoaderConfiguration config = new OWLOntologyLoaderConfiguration(); + try { + parser.parse(source, ontology, config); + } catch (IOException e) { + throw new OWLOntologyCreationException(e); + } + } else { + ontology = manager.loadOntologyFromOntologyDocument(source); + } + return ontology; + } + + public OWLOntology getTbox() { + return tbox; + } + + public BlazegraphOntologyManager getGolego_repo() { + return go_lego_repo; + } + + + /** + * even if the manager has loaded a property before, and should know what kind it is, + * if the next ontology doesn't include an import statement or otherwise declare the properties used, the loader will guess that + * object properties are annotation properties and screw up. + * This purifies the gocam + * + * @param ont + * @return + * @throws OWLOntologyCreationException + */ + public static OWLOntology fixBrokenObjectPropertiesAndAxioms(OWLOntology ont) throws OWLOntologyCreationException { + OWLOntologyManager newman = OWLManager.createOWLOntologyManager(); + OWLOntology frank = newman.createOntology(ont.getOntologyID()); + OWLDataFactory df = newman.getOWLDataFactory(); + + //declare known annotation properties + OWLAnnotationProperty title_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/title")); + OWLDeclarationAxiom title_prop_declaration = df.getOWLDeclarationAxiom(title_prop); + newman.addAxiom(frank, title_prop_declaration); + OWLAnnotationProperty title_prop2 = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/terms/title")); + OWLDeclarationAxiom title_prop2_declaration = df.getOWLDeclarationAxiom(title_prop2); + newman.addAxiom(frank, title_prop2_declaration); + OWLAnnotationProperty skos_note = df.getOWLAnnotationProperty(IRI.create("http://www.w3.org/2004/02/skos/core#note")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(skos_note)); + OWLAnnotationProperty version_info = df.getOWLAnnotationProperty(IRI.create(OWL.versionInfo.getURI())); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(version_info)); + OWLAnnotationProperty contributor_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/contributor")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(contributor_prop)); + OWLAnnotationProperty date_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/date")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(date_prop)); + OWLAnnotationProperty source_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/source")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(source_prop)); + OWLAnnotationProperty state_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/modelstate")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(state_prop)); + OWLAnnotationProperty evidence_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/evidence")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(evidence_prop)); + OWLAnnotationProperty provided_by_prop = df.getOWLAnnotationProperty(IRI.create("http://purl.org/pav/providedBy")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(provided_by_prop)); + OWLAnnotationProperty x_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/hint/layout/x")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(x_prop)); + OWLAnnotationProperty y_prop = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/hint/layout/y")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(y_prop)); + OWLAnnotationProperty rdfs_label = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI()); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(rdfs_label)); + OWLAnnotationProperty rdfs_comment = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_COMMENT.getIRI()); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(rdfs_comment)); + OWLAnnotationProperty rdfs_seealso = df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_SEE_ALSO.getIRI()); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(rdfs_seealso)); + OWLAnnotationProperty skos_exact_match = df.getOWLAnnotationProperty(IRI.create("http://www.w3.org/2004/02/skos/core#exactMatch")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(skos_exact_match)); + OWLAnnotationProperty skos_altlabel = df.getOWLAnnotationProperty(IRI.create("http://www.w3.org/2004/02/skos/core#altLabel")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(skos_altlabel)); + OWLAnnotationProperty definition = df.getOWLAnnotationProperty(IRI.create("http://purl.obolibrary.org/obo/IAO_0000115")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(definition)); + OWLAnnotationProperty database_cross_reference = df.getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#hasDbXref")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(database_cross_reference)); + OWLAnnotationProperty canonical_record = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/canonical_record")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(canonical_record)); + OWLAnnotationProperty iuphar_id = df.getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/iuphar_id")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(iuphar_id)); + OWLAnnotationProperty in_taxon = df.getOWLAnnotationProperty(IRI.create("https://w3id.org/biolink/vocab/in_taxon")); + newman.addAxiom(frank, df.getOWLDeclarationAxiom(in_taxon)); + + //copy over ontology annotations + for (OWLAnnotation anno : ont.getAnnotations()) { + AddOntologyAnnotation add = new AddOntologyAnnotation(frank, anno); + newman.applyChange(add); + } + + //add correct property declarations + Set anno_properties = ont.getAnnotationPropertiesInSignature(); + Set bad_props = new HashSet(); + for (OWLAnnotationProperty anno_prop : anno_properties) { + if (anno_prop.getIRI().toString().contains("http://purl.obolibrary.org/obo/RO_") || + anno_prop.getIRI().toString().contains("http://purl.obolibrary.org/obo/BFO_")) { + bad_props.add(anno_prop.getIRI().toString()); + OWLObjectProperty object_prop = df.getOWLObjectProperty(anno_prop.getIRI()); + OWLDeclarationAxiom object_prop_declaration = df.getOWLDeclarationAxiom(object_prop); + newman.addAxiom(frank, object_prop_declaration); + } + } + //fix screwed up axioms, collect the rest + for (OWLAxiom axiom : ont.getAxioms()) { + if (axiom.isOfType(AxiomType.ANNOTATION_ASSERTION)) { + OWLAnnotationAssertionAxiom a = (OWLAnnotationAssertionAxiom) axiom; + String prop_iri = a.getProperty().getIRI().toString(); + if (bad_props.contains(prop_iri)) { + Set annos = a.getAnnotations(); + OWLObjectProperty p = df.getOWLObjectProperty(IRI.create(prop_iri)); + IRI object = a.getValue().asIRI().get(); + IRI subject = IRI.create(a.getSubject().toString()); + OWLObjectPropertyAssertionAxiom new_ass = df.getOWLObjectPropertyAssertionAxiom(p, df.getOWLNamedIndividual(subject), df.getOWLNamedIndividual(object), annos); + newman.addAxiom(frank, new_ass); + } else { + newman.addAxiom(frank, axiom); + } + } else { + newman.addAxiom(frank, axiom); + } + } + //return new fixed ontology + return frank; + } + + public static OWLOntology removeDeadAnnotationsAndImports(OWLOntology ont) throws OWLOntologyCreationException { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLAnnotationProperty json_model_prop = m.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://geneontology.org/lego/json-model")); + //get rid of all imports + Set imports = ont.getImportsDeclarations(); + for (OWLImportsDeclaration import_declaration : imports) { + m.applyChange(new RemoveImport(ont, import_declaration)); + } + //get rid of the json annotations lurking about + for (OWLAnnotation anno : ont.getAnnotations()) { + if (anno.getProperty().equals(json_model_prop)) { + RemoveOntologyAnnotation rm = new RemoveOntologyAnnotation(ont, anno); + m.applyChange(rm); + } + } + //purify of the json annotation property as well + OWLDeclarationAxiom json_prop_declaration = m.getOWLDataFactory().getOWLDeclarationAxiom(json_model_prop); + m.removeAxiom(ont, json_prop_declaration); + return ont; + } } \ No newline at end of file diff --git a/minerva-core/src/main/java/org/geneontology/minerva/MinervaOWLGraphWrapper.java b/minerva-core/src/main/java/org/geneontology/minerva/MinervaOWLGraphWrapper.java index e6f6744e..6ed3274a 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/MinervaOWLGraphWrapper.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/MinervaOWLGraphWrapper.java @@ -1,94 +1,68 @@ package org.geneontology.minerva; -import java.io.Closeable; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - +import com.google.common.collect.Sets; +import gnu.trove.set.hash.THashSet; import org.apache.commons.lang.SerializationUtils; import org.apache.log4j.Logger; import org.obolibrary.obo2owl.Obo2OWLConstants; import org.obolibrary.obo2owl.Obo2OWLConstants.Obo2OWLVocabulary; import org.obolibrary.oboformat.parser.OBOFormatConstants.OboFormatTag; -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLAnnotationSubject; -import org.semanticweb.owlapi.model.OWLAnnotationValue; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDeclarationAxiom; -import org.semanticweb.owlapi.model.OWLEntity; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLNamedObject; -import org.semanticweb.owlapi.model.OWLObject; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; - -import com.google.common.collect.Sets; - -import gnu.trove.set.hash.THashSet; import owltools.io.ParserWrapper; import owltools.util.OwlHelper; +import java.io.Closeable; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.*; + /** * Consolidation of methods actually used in Minerva from the OWLTools OWLGraphWrapper class */ public class MinervaOWLGraphWrapper implements Closeable { - private static final Logger LOG = Logger.getLogger(MinervaOWLGraphWrapper.class); - private Map altIdMap = null; - private String defaultIDSpace = ""; - final Map idSpaceMap; - public OWLOntology sourceOntology; // graph is seeded from this ontology. - public static Map annotationPropertyMap = initAnnotationPropertyMap(); - public Set supportOntologySet = new HashSet(); - - public MinervaOWLGraphWrapper(OWLOntology ontology) { - super(); - idSpaceMap = new HashMap(); - sourceOntology = ontology; - } - - public MinervaOWLGraphWrapper(String iri) throws OWLOntologyCreationException { - super(); - idSpaceMap = new HashMap(); - ParserWrapper pw = new ParserWrapper(); - OWLOntologyManager manager = pw.getManager(); - sourceOntology = manager.createOntology(IRI.create(iri)); - } - - public static final String DEFAULT_IRI_PREFIX = Obo2OWLConstants.DEFAULT_IRI_PREFIX; - - /** - * Table 5.8 Translation of Annotation Vocabulary. - * - * @return property map - */ - private static HashMap initAnnotationPropertyMap() { - - HashMap map = new HashMap(); - map.put(OboFormatTag.TAG_IS_OBSELETE.getTag(),OWLRDFVocabulary.OWL_DEPRECATED.getIRI()); - map.put(OboFormatTag.TAG_NAME.getTag(),OWLRDFVocabulary.RDFS_LABEL.getIRI()); - map.put(OboFormatTag.TAG_COMMENT.getTag(),OWLRDFVocabulary.RDFS_COMMENT.getIRI()); - - for(Obo2OWLVocabulary vac: Obo2OWLVocabulary.values()){ - map.put(vac.getMappedTag(), vac.getIRI()); - } + private static final Logger LOG = Logger.getLogger(MinervaOWLGraphWrapper.class); + private Map altIdMap = null; + private String defaultIDSpace = ""; + final Map idSpaceMap; + public OWLOntology sourceOntology; // graph is seeded from this ontology. + public static Map annotationPropertyMap = initAnnotationPropertyMap(); + public Set supportOntologySet = new HashSet(); + + public MinervaOWLGraphWrapper(OWLOntology ontology) { + super(); + idSpaceMap = new HashMap(); + sourceOntology = ontology; + } + + public MinervaOWLGraphWrapper(String iri) throws OWLOntologyCreationException { + super(); + idSpaceMap = new HashMap(); + ParserWrapper pw = new ParserWrapper(); + OWLOntologyManager manager = pw.getManager(); + sourceOntology = manager.createOntology(IRI.create(iri)); + } + + public static final String DEFAULT_IRI_PREFIX = Obo2OWLConstants.DEFAULT_IRI_PREFIX; + + /** + * Table 5.8 Translation of Annotation Vocabulary. + * + * @return property map + */ + private static HashMap initAnnotationPropertyMap() { + + HashMap map = new HashMap(); + map.put(OboFormatTag.TAG_IS_OBSELETE.getTag(), OWLRDFVocabulary.OWL_DEPRECATED.getIRI()); + map.put(OboFormatTag.TAG_NAME.getTag(), OWLRDFVocabulary.RDFS_LABEL.getIRI()); + map.put(OboFormatTag.TAG_COMMENT.getTag(), OWLRDFVocabulary.RDFS_COMMENT.getIRI()); + + for (Obo2OWLVocabulary vac : Obo2OWLVocabulary.values()) { + map.put(vac.getMappedTag(), vac.getIRI()); + } /* map.put("expand_expression_to",Obo2OWLVocabulary.IRI_IAO_0000424.getIRI()); map.put("expand_assertion_to",Obo2OWLVocabulary.IRI_IAO_0000425.getIRI()); @@ -97,379 +71,371 @@ private static HashMap initAnnotationPropertyMap() { map.put("is_anti_symmetric",Obo2OWLVocabulary.IRI_IAO_0000427.getIRI()); map.put("replaced_by", Obo2OWLVocabulary.IRI_IAO_0100001.getIRI());*/ - return map; - } - - /** - * Returns an OWLClass given an IRI - *

- * the class must be declared in either the source ontology, or in a support ontology, - * otherwise null is returned - * - * @param iri - * @return {@link OWLClass} - */ - public OWLClass getOWLClass(IRI iri) { - OWLClass c = getDataFactory().getOWLClass(iri); - return c; - //there used to be a check here to ensure that the class IRI existed in a tbox ontology - //as there is no way to create a class using the UI without getting one out of the tbox ontology - //I think it is probably safe to remove this check. To add it, use BlazegraphOntologyManager.exists() - } - - - /** - * Returns the OWLObjectProperty with this IRI - *

- * Must have been declared in one of the ontologies - * - * @param iri - * @return {@link OWLObjectProperty} - */ - public OWLObjectProperty getOWLObjectProperty(String iri) { - return getOWLObjectProperty(IRI.create(iri)); - } - - public OWLObjectProperty getOWLObjectProperty(IRI iri) { - OWLObjectProperty p = getDataFactory().getOWLObjectProperty(iri); + return map; + } + + /** + * Returns an OWLClass given an IRI + *

+ * the class must be declared in either the source ontology, or in a support ontology, + * otherwise null is returned + * + * @param iri + * @return {@link OWLClass} + */ + public OWLClass getOWLClass(IRI iri) { + OWLClass c = getDataFactory().getOWLClass(iri); + return c; + //there used to be a check here to ensure that the class IRI existed in a tbox ontology + //as there is no way to create a class using the UI without getting one out of the tbox ontology + //I think it is probably safe to remove this check. To add it, use BlazegraphOntologyManager.exists() + } + + + /** + * Returns the OWLObjectProperty with this IRI + *

+ * Must have been declared in one of the ontologies + * + * @param iri + * @return {@link OWLObjectProperty} + */ + public OWLObjectProperty getOWLObjectProperty(String iri) { + return getOWLObjectProperty(IRI.create(iri)); + } + + public OWLObjectProperty getOWLObjectProperty(IRI iri) { + OWLObjectProperty p = getDataFactory().getOWLObjectProperty(iri); //TODO re-instate checks using blazegraph ontology // for (OWLOntology o : getAllOntologies()) { // if (o.getDeclarationAxioms(p).size() > 0) { - return p; + return p; // } // } // return null; - } - - /** - * fetches the rdfs:label for an OWLObject - *

- * assumes zero or one rdfs:label - * - * @param c - * @return label - */ - public String getLabel(OWLObject c) { - return getAnnotationValue(c, getDataFactory().getRDFSLabel()); - } - - /** - * fetches the value of a single-valued annotation property for an OWLObject - *

- * TODO: provide a flag that determines behavior in the case of >1 value - * - * @param c - * @param lap - * @return value - */ - public String getAnnotationValue(OWLObject c, OWLAnnotationProperty lap) { - Setanns = new HashSet(); - if (c instanceof OWLEntity) { - for (OWLOntology ont : getAllOntologies()) { - anns.addAll(OwlHelper.getAnnotations((OWLEntity) c, lap, ont)); - } - } - else { - return null; - } - for (OWLAnnotation a : anns) { - if (a.getValue() instanceof OWLLiteral) { - OWLLiteral val = (OWLLiteral) a.getValue(); - return (String) SerializationUtils.clone(val.getLiteral()); // return first - TODO - check zero or one - } - } - - return null; - } - - /** - * Every OWLGraphWrapper objects wraps zero or one source ontologies. - * - * @return ontology - */ - public OWLOntology getSourceOntology() { - return sourceOntology; - } - - public void setSourceOntology(OWLOntology sourceOntology) { - this.sourceOntology = sourceOntology; - } - - - public OWLOntologyManager getManager() { - return sourceOntology.getOWLOntologyManager(); - } - - - public void addSupportOntology(OWLOntology o) { - this.supportOntologySet.add(o); - } - public void removeSupportOntology(OWLOntology o) { - this.supportOntologySet.remove(o); - } - - - /** - * in general application code need not call this - it is mostly used internally - * - * @return union of source ontology plus all supporting ontologies plus their import closures - */ - public Set getAllOntologies() { - Set all = new HashSet(getSupportOntologySet()); - for (OWLOntology o : getSupportOntologySet()) { - all.addAll(o.getImportsClosure()); - } - all.add(getSourceOntology()); - all.addAll(getSourceOntology().getImportsClosure()); - return all; - } - - /** - * all operations are over a set of ontologies - the source ontology plus - * any number of supporting ontologies. The supporting ontologies may be drawn - * from the imports closure of the source ontology, although this need not be the case. - * - * @return set of support ontologies - */ - public Set getSupportOntologySet() { - return supportOntologySet; - } - -// @Override - public synchronized void close() throws IOException { + } + + /** + * fetches the rdfs:label for an OWLObject + *

+ * assumes zero or one rdfs:label + * + * @param c + * @return label + */ + public String getLabel(OWLObject c) { + return getAnnotationValue(c, getDataFactory().getRDFSLabel()); + } + + /** + * fetches the value of a single-valued annotation property for an OWLObject + *

+ * TODO: provide a flag that determines behavior in the case of >1 value + * + * @param c + * @param lap + * @return value + */ + public String getAnnotationValue(OWLObject c, OWLAnnotationProperty lap) { + Set anns = new HashSet(); + if (c instanceof OWLEntity) { + for (OWLOntology ont : getAllOntologies()) { + anns.addAll(OwlHelper.getAnnotations((OWLEntity) c, lap, ont)); + } + } else { + return null; + } + for (OWLAnnotation a : anns) { + if (a.getValue() instanceof OWLLiteral) { + OWLLiteral val = (OWLLiteral) a.getValue(); + return (String) SerializationUtils.clone(val.getLiteral()); // return first - TODO - check zero or one + } + } + + return null; + } + + /** + * Every OWLGraphWrapper objects wraps zero or one source ontologies. + * + * @return ontology + */ + public OWLOntology getSourceOntology() { + return sourceOntology; + } + + public void setSourceOntology(OWLOntology sourceOntology) { + this.sourceOntology = sourceOntology; + } + + + public OWLOntologyManager getManager() { + return sourceOntology.getOWLOntologyManager(); + } + + + public void addSupportOntology(OWLOntology o) { + this.supportOntologySet.add(o); + } + + public void removeSupportOntology(OWLOntology o) { + this.supportOntologySet.remove(o); + } + + + /** + * in general application code need not call this - it is mostly used internally + * + * @return union of source ontology plus all supporting ontologies plus their import closures + */ + public Set getAllOntologies() { + Set all = new HashSet(getSupportOntologySet()); + for (OWLOntology o : getSupportOntologySet()) { + all.addAll(o.getImportsClosure()); + } + all.add(getSourceOntology()); + all.addAll(getSourceOntology().getImportsClosure()); + return all; + } + + /** + * all operations are over a set of ontologies - the source ontology plus + * any number of supporting ontologies. The supporting ontologies may be drawn + * from the imports closure of the source ontology, although this need not be the case. + * + * @return set of support ontologies + */ + public Set getSupportOntologySet() { + return supportOntologySet; + } + + // @Override + public synchronized void close() throws IOException { // if (reasoner != null) { // reasoner.dispose(); // reasoner = null; // isSynchronized = false; // } // neighborAxioms = null; - } - - - /** - * Fetch all {@link OWLClass} objects from all ontologies. - * This set is a copy. Changes are not reflected in the ontologies. - * - * @return set of all {@link OWLClass} - */ - public Set getAllOWLClasses() { - Set owlClasses = new THashSet(); - for (OWLOntology o : getAllOntologies()) { - owlClasses.addAll(o.getClassesInSignature()); - } - return owlClasses; - } - - - /** - * Given an OBO-style ID, return the corresponding OWLClass, if it is declared - otherwise null - * - * @param id - e.g. GO:0008150 - * @return OWLClass with id or null - */ - public OWLClass getOWLClassByIdentifier(String id) { - return getOWLClassByIdentifier(id, false); - } - - /** - * - * As {@link #getOWLClassByIdentifier(String)} but include pre-resolution step - * using altId map. - * - * Currently this additional boolean option is obo-format specific; in OBO, - * when a class A is merged into B, the OBO-ID of A is preserved with an hasAlternateId - * annotation on the IRI of B. Using this method, with isAutoResolve=true, a query for - * the OBO ID of A will return class B. - * - * In future, analogous options will be added to IRI-based access to classes. - * - * @param id - * @param isAutoResolve - * @return OWLClass with id or null - */ - public OWLClass getOWLClassByIdentifier(String id, boolean isAutoResolve) { - IRI iri = getIRIByIdentifier(id, isAutoResolve); - if (iri != null) - return getOWLClass(iri); - return null; - } - - public IRI getIRIByIdentifier(String id, boolean isAutoResolve) { - if (isAutoResolve) { - OWLObject obj = this.getObjectByAltId(id); - if (obj != null) { - return ((OWLNamedObject) obj).getIRI(); - } - } - - // special magic for finding IRIs from a non-standard identifier - // This is the case for relations (OWLObject properties) with a short hand - // or for relations with a non identifiers with-out a colon, e.g. negative_regulation - // we first collect all candidate matching properties in candIRISet. - Set candIRISet = Sets.newHashSet(); - if (!id.contains(":")) { - final OWLAnnotationProperty shortHand = getDataFactory().getOWLAnnotationProperty(Obo2OWLVocabulary.IRI_OIO_shorthand.getIRI()); - final OWLAnnotationProperty oboIdInOwl = getDataFactory().getOWLAnnotationProperty(trTagToIRI(OboFormatTag.TAG_ID.getTag())); - for (OWLOntology o : getAllOntologies()) { - for(OWLObjectProperty p : o.getObjectPropertiesInSignature()) { - // check for short hand or obo ID in owl - Set annotations = OwlHelper.getAnnotations(p, o); - if (annotations != null) { - for (OWLAnnotation owlAnnotation : annotations) { - OWLAnnotationProperty property = owlAnnotation.getProperty(); - if ((shortHand != null && shortHand.equals(property)) - || (oboIdInOwl != null && oboIdInOwl.equals(property))) - { - OWLAnnotationValue value = owlAnnotation.getValue(); - if (value != null && value instanceof OWLLiteral) { - OWLLiteral literal = (OWLLiteral) value; - String shortHandLabel = literal.getLiteral(); - if (id.equals(shortHandLabel)) { - candIRISet.add(p.getIRI()); - } - } - } - } - } - } - } - } - - // In the case where we find multiple candidate IRIs, we give priorities for IRIs from BFO or RO ontologies. - IRI returnIRI = null; - for (IRI iri: candIRISet) { - String iriStr = iri.toString(); - if (iriStr.contains("BFO") || iriStr.contains("RO")) { - returnIRI = iri; - } - } - - // If we were not able to find RO/BFO candidate IRIs for id - if (returnIRI == null) { - // We return it only if we have only one candidate. - if (candIRISet.size() == 1) - return new ArrayList(candIRISet).get(0); - // This is the unexpected case. Multiple non-RO/BPO properties are mapped to given id and it's not clear what to return. - else if (candIRISet.size() > 1) - throw new RuntimeException("Multiple candidate IRIs are found for id: " + id + ". None of them are from BFO or RO."); - } - // If we were able to find the property from RO/BFO, just return it. - else { - return returnIRI; - } - - // otherwise use the obo2owl method - //Obo2Owl b = new Obo2Owl(getManager()); // re-use manager, creating a new one can be expensive as this is a highly used code path - //b.setObodoc(new OBODoc()); - return oboIdToIRI(id); - } - - public static IRI trTagToIRI(String tag){ - IRI iri = null; - if (annotationPropertyMap.containsKey(tag)) { - iri = annotationPropertyMap.get(tag); - } - else { - //iri = IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"IAO_"+tag); - iri = IRI.create(Obo2OWLConstants.OIOVOCAB_IRI_PREFIX+tag); - - } - - return iri; - - } - - public IRI oboIdToIRI(String id) { - if (id.contains(" ")) { - LOG.error("id contains space: \""+id+"\""); - //throw new UnsupportedEncodingException(); - return null; - } - - // No conversion is required if this is already an IRI (ID-as-URI rule) - if (id.startsWith("http:")) { // TODO - roundtrip from other schemes - return IRI.create(id); - } - else if (id.startsWith("https:")) { // TODO - roundtrip from other schemes - return IRI.create(id); - } - else if (id.startsWith("ftp:")) { // TODO - roundtrip from other schemes - return IRI.create(id); - } - else if (id.startsWith("urn:")) { // TODO - roundtrip from other schemes - return IRI.create(id); - } - - // TODO - treat_xrefs_as_equivalent - // special case rule for relation xrefs: - // 5.9.3. Special Rules for Relations - if (!id.contains(":")) { - String xid = translateShorthandIdToExpandedId(id); - if (!xid.equals(id)) - return oboIdToIRI(xid); - } - - String[] idParts = id.split(":", 2); - String db; - String localId; - if (idParts.length > 1) { - db = idParts[0]; - localId = idParts[1]; - if(localId.contains("_")){ - db += "#_"; // NonCanonical-Prefixed-ID - }else - db += "_"; - } - else if (idParts.length == 0) { - db = getDefaultIDSpace()+"#"; - localId = id; - } - else { // == 1 - // todo use owlOntology IRI - db = getDefaultIDSpace()+"#"; - // if(id.contains("_")) - // db += "_"; - - localId = idParts[0]; // Unprefixed-ID - } - - - String uriPrefix = Obo2OWLConstants.DEFAULT_IRI_PREFIX+db; - if (idSpaceMap.containsKey(db)) { - uriPrefix = idSpaceMap.get(db); - } - - String safeId; - try { - safeId = java.net.URLEncoder.encode(localId,"US-ASCII"); - } catch (UnsupportedEncodingException e1) { - // TODO Auto-generated catch block - return null; - } - - if (safeId.contains(" ")) - safeId = safeId.replace(" ", "_"); - IRI iri = null; - try { - iri = IRI.create(uriPrefix + safeId); - } catch (IllegalArgumentException e) { - // TODO - define new exception class for this - // throw new UnsupportedEncodingException(); - return null; - } - - return iri; - } - - // 5.9.3. Special Rules for Relations - private String translateShorthandIdToExpandedId(String id) { - if (id.contains(":")) { - return id; - }else { - System.err.println("line 467 translateShorthandIdToExpandedId fail on need for obo"); - System.exit(-1); - } - return null; + } + + + /** + * Fetch all {@link OWLClass} objects from all ontologies. + * This set is a copy. Changes are not reflected in the ontologies. + * + * @return set of all {@link OWLClass} + */ + public Set getAllOWLClasses() { + Set owlClasses = new THashSet(); + for (OWLOntology o : getAllOntologies()) { + owlClasses.addAll(o.getClassesInSignature()); + } + return owlClasses; + } + + + /** + * Given an OBO-style ID, return the corresponding OWLClass, if it is declared - otherwise null + * + * @param id - e.g. GO:0008150 + * @return OWLClass with id or null + */ + public OWLClass getOWLClassByIdentifier(String id) { + return getOWLClassByIdentifier(id, false); + } + + /** + * As {@link #getOWLClassByIdentifier(String)} but include pre-resolution step + * using altId map. + *

+ * Currently this additional boolean option is obo-format specific; in OBO, + * when a class A is merged into B, the OBO-ID of A is preserved with an hasAlternateId + * annotation on the IRI of B. Using this method, with isAutoResolve=true, a query for + * the OBO ID of A will return class B. + *

+ * In future, analogous options will be added to IRI-based access to classes. + * + * @param id + * @param isAutoResolve + * @return OWLClass with id or null + */ + public OWLClass getOWLClassByIdentifier(String id, boolean isAutoResolve) { + IRI iri = getIRIByIdentifier(id, isAutoResolve); + if (iri != null) + return getOWLClass(iri); + return null; + } + + public IRI getIRIByIdentifier(String id, boolean isAutoResolve) { + if (isAutoResolve) { + OWLObject obj = this.getObjectByAltId(id); + if (obj != null) { + return ((OWLNamedObject) obj).getIRI(); + } + } + + // special magic for finding IRIs from a non-standard identifier + // This is the case for relations (OWLObject properties) with a short hand + // or for relations with a non identifiers with-out a colon, e.g. negative_regulation + // we first collect all candidate matching properties in candIRISet. + Set candIRISet = Sets.newHashSet(); + if (!id.contains(":")) { + final OWLAnnotationProperty shortHand = getDataFactory().getOWLAnnotationProperty(Obo2OWLVocabulary.IRI_OIO_shorthand.getIRI()); + final OWLAnnotationProperty oboIdInOwl = getDataFactory().getOWLAnnotationProperty(trTagToIRI(OboFormatTag.TAG_ID.getTag())); + for (OWLOntology o : getAllOntologies()) { + for (OWLObjectProperty p : o.getObjectPropertiesInSignature()) { + // check for short hand or obo ID in owl + Set annotations = OwlHelper.getAnnotations(p, o); + if (annotations != null) { + for (OWLAnnotation owlAnnotation : annotations) { + OWLAnnotationProperty property = owlAnnotation.getProperty(); + if ((shortHand != null && shortHand.equals(property)) + || (oboIdInOwl != null && oboIdInOwl.equals(property))) { + OWLAnnotationValue value = owlAnnotation.getValue(); + if (value != null && value instanceof OWLLiteral) { + OWLLiteral literal = (OWLLiteral) value; + String shortHandLabel = literal.getLiteral(); + if (id.equals(shortHandLabel)) { + candIRISet.add(p.getIRI()); + } + } + } + } + } + } + } + } + + // In the case where we find multiple candidate IRIs, we give priorities for IRIs from BFO or RO ontologies. + IRI returnIRI = null; + for (IRI iri : candIRISet) { + String iriStr = iri.toString(); + if (iriStr.contains("BFO") || iriStr.contains("RO")) { + returnIRI = iri; + } + } + + // If we were not able to find RO/BFO candidate IRIs for id + if (returnIRI == null) { + // We return it only if we have only one candidate. + if (candIRISet.size() == 1) + return new ArrayList(candIRISet).get(0); + // This is the unexpected case. Multiple non-RO/BPO properties are mapped to given id and it's not clear what to return. + else if (candIRISet.size() > 1) + throw new RuntimeException("Multiple candidate IRIs are found for id: " + id + ". None of them are from BFO or RO."); + } + // If we were able to find the property from RO/BFO, just return it. + else { + return returnIRI; + } + + // otherwise use the obo2owl method + //Obo2Owl b = new Obo2Owl(getManager()); // re-use manager, creating a new one can be expensive as this is a highly used code path + //b.setObodoc(new OBODoc()); + return oboIdToIRI(id); + } + + public static IRI trTagToIRI(String tag) { + IRI iri = null; + if (annotationPropertyMap.containsKey(tag)) { + iri = annotationPropertyMap.get(tag); + } else { + //iri = IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"IAO_"+tag); + iri = IRI.create(Obo2OWLConstants.OIOVOCAB_IRI_PREFIX + tag); + + } + + return iri; + + } + + public IRI oboIdToIRI(String id) { + if (id.contains(" ")) { + LOG.error("id contains space: \"" + id + "\""); + //throw new UnsupportedEncodingException(); + return null; + } + + // No conversion is required if this is already an IRI (ID-as-URI rule) + if (id.startsWith("http:")) { // TODO - roundtrip from other schemes + return IRI.create(id); + } else if (id.startsWith("https:")) { // TODO - roundtrip from other schemes + return IRI.create(id); + } else if (id.startsWith("ftp:")) { // TODO - roundtrip from other schemes + return IRI.create(id); + } else if (id.startsWith("urn:")) { // TODO - roundtrip from other schemes + return IRI.create(id); + } + + // TODO - treat_xrefs_as_equivalent + // special case rule for relation xrefs: + // 5.9.3. Special Rules for Relations + if (!id.contains(":")) { + String xid = translateShorthandIdToExpandedId(id); + if (!xid.equals(id)) + return oboIdToIRI(xid); + } + + String[] idParts = id.split(":", 2); + String db; + String localId; + if (idParts.length > 1) { + db = idParts[0]; + localId = idParts[1]; + if (localId.contains("_")) { + db += "#_"; // NonCanonical-Prefixed-ID + } else + db += "_"; + } else if (idParts.length == 0) { + db = getDefaultIDSpace() + "#"; + localId = id; + } else { // == 1 + // todo use owlOntology IRI + db = getDefaultIDSpace() + "#"; + // if(id.contains("_")) + // db += "_"; + + localId = idParts[0]; // Unprefixed-ID + } + + + String uriPrefix = Obo2OWLConstants.DEFAULT_IRI_PREFIX + db; + if (idSpaceMap.containsKey(db)) { + uriPrefix = idSpaceMap.get(db); + } + + String safeId; + try { + safeId = java.net.URLEncoder.encode(localId, "US-ASCII"); + } catch (UnsupportedEncodingException e1) { + // TODO Auto-generated catch block + return null; + } + + if (safeId.contains(" ")) + safeId = safeId.replace(" ", "_"); + IRI iri = null; + try { + iri = IRI.create(uriPrefix + safeId); + } catch (IllegalArgumentException e) { + // TODO - define new exception class for this + // throw new UnsupportedEncodingException(); + return null; + } + + return iri; + } + + // 5.9.3. Special Rules for Relations + private String translateShorthandIdToExpandedId(String id) { + if (id.contains(":")) { + return id; + } else { + System.err.println("line 467 translateShorthandIdToExpandedId fail on need for obo"); + System.exit(-1); + } + return null; /* Frame tdf = obodoc.getTypedefFrame(id); if (tdf == null) return id; @@ -500,121 +466,121 @@ private String translateShorthandIdToExpandedId(String id) { //System.err.println(" ID:"+id+" matching:"+matchingExpandedId); return matchingExpandedId; */ - } - - private String getDefaultIDSpace() { - return defaultIDSpace; - } - - /** - * @param altId - * @return OWLObject that has matching altId, or null if not found - */ - public OWLObject getObjectByAltId(String altId) { - Map m = getAltIdMap(false); - if (m.containsKey(altId)) - return m.get(altId); - else - return null; - } - - private Map getAltIdMap(boolean isReset) { - if (isReset) - altIdMap = null; - if (altIdMap == null) { - altIdMap = getAllOWLObjectsByAltId(); - } - return altIdMap; - } - - /** - * Given an OBO-style ID, return the corresponding OWLObject, if it is declared - otherwise null - * - * @param id - e.g. GO:0008150 - * @return object with id or null - */ - public OWLObject getOWLObjectByIdentifier(String id) { - IRI iri = getIRIByIdentifier(id); - if (iri != null) - return getOWLObject(iri); - return null; - } - - /** - * Returns the OWLObject with this IRI - *

- * Must have been declared in one of the ontologies - *

- * Currently OWLObject must be one of OWLClass, OWLObjectProperty or OWLNamedIndividual - *

- * If the ontology employs punning and there different entities with the same IRI, then - * the order of precedence is OWLClass then OWLObjectProperty then OWLNamedIndividual - * - * @param s entity IRI - * @return {@link OWLObject} - */ - public OWLObject getOWLObject(IRI s) { - OWLObject o; - o = getOWLClass(s); - if (o == null) { - o = getOWLIndividual(s); - } - if (o == null) { - o = getOWLObjectProperty(s); - } - if (o == null) { - o = getOWLAnnotationProperty(s); - } - return o; - } - - public OWLAnnotationProperty getOWLAnnotationProperty(IRI iri) { - OWLAnnotationProperty p = getDataFactory().getOWLAnnotationProperty(iri); + } + + private String getDefaultIDSpace() { + return defaultIDSpace; + } + + /** + * @param altId + * @return OWLObject that has matching altId, or null if not found + */ + public OWLObject getObjectByAltId(String altId) { + Map m = getAltIdMap(false); + if (m.containsKey(altId)) + return m.get(altId); + else + return null; + } + + private Map getAltIdMap(boolean isReset) { + if (isReset) + altIdMap = null; + if (altIdMap == null) { + altIdMap = getAllOWLObjectsByAltId(); + } + return altIdMap; + } + + /** + * Given an OBO-style ID, return the corresponding OWLObject, if it is declared - otherwise null + * + * @param id - e.g. GO:0008150 + * @return object with id or null + */ + public OWLObject getOWLObjectByIdentifier(String id) { + IRI iri = getIRIByIdentifier(id); + if (iri != null) + return getOWLObject(iri); + return null; + } + + /** + * Returns the OWLObject with this IRI + *

+ * Must have been declared in one of the ontologies + *

+ * Currently OWLObject must be one of OWLClass, OWLObjectProperty or OWLNamedIndividual + *

+ * If the ontology employs punning and there different entities with the same IRI, then + * the order of precedence is OWLClass then OWLObjectProperty then OWLNamedIndividual + * + * @param s entity IRI + * @return {@link OWLObject} + */ + public OWLObject getOWLObject(IRI s) { + OWLObject o; + o = getOWLClass(s); + if (o == null) { + o = getOWLIndividual(s); + } + if (o == null) { + o = getOWLObjectProperty(s); + } + if (o == null) { + o = getOWLAnnotationProperty(s); + } + return o; + } + + public OWLAnnotationProperty getOWLAnnotationProperty(IRI iri) { + OWLAnnotationProperty p = getDataFactory().getOWLAnnotationProperty(iri); //TODO reinstate this check with new blazegraph pattern // for (OWLOntology o : getAllOntologies()) { // if (o.getDeclarationAxioms(p).size() > 0) { - return p; + return p; // } // } // return null; - } - - /** - * Returns an OWLNamedIndividual with this IRI if it has been declared - * in the source or support ontologies. Returns null otherwise. - * - * @param iri - * @return {@link OWLNamedIndividual} - */ - public OWLNamedIndividual getOWLIndividual(IRI iri) { - OWLNamedIndividual c = getDataFactory().getOWLNamedIndividual(iri); - for (OWLOntology o : getAllOntologies()) { - for (OWLDeclarationAxiom da : o.getDeclarationAxioms(c)) { - if (da.getEntity() instanceof OWLNamedIndividual) { - return (OWLNamedIndividual) da.getEntity(); - } - } - } - return null; - } - - public OWLDataFactory getDataFactory() { - return getManager().getOWLDataFactory(); - } - - - /** - * Given an OBO-style ID, return the corresponding OWLObjectProperty, if it is declared - otherwise null - * - * @param id - e.g. GO:0008150 - * @return OWLObjectProperty with id or null - */ - public OWLObjectProperty getOWLObjectPropertyByIdentifier(String id) { - IRI iri = getIRIByIdentifier(id); - if (iri != null) - return getOWLObjectProperty(iri); - return null; - } + } + + /** + * Returns an OWLNamedIndividual with this IRI if it has been declared + * in the source or support ontologies. Returns null otherwise. + * + * @param iri + * @return {@link OWLNamedIndividual} + */ + public OWLNamedIndividual getOWLIndividual(IRI iri) { + OWLNamedIndividual c = getDataFactory().getOWLNamedIndividual(iri); + for (OWLOntology o : getAllOntologies()) { + for (OWLDeclarationAxiom da : o.getDeclarationAxioms(c)) { + if (da.getEntity() instanceof OWLNamedIndividual) { + return (OWLNamedIndividual) da.getEntity(); + } + } + } + return null; + } + + public OWLDataFactory getDataFactory() { + return getManager().getOWLDataFactory(); + } + + + /** + * Given an OBO-style ID, return the corresponding OWLObjectProperty, if it is declared - otherwise null + * + * @param id - e.g. GO:0008150 + * @return OWLObjectProperty with id or null + */ + public OWLObjectProperty getOWLObjectPropertyByIdentifier(String id) { + IRI iri = getIRIByIdentifier(id); + if (iri != null) + return getOWLObjectProperty(iri); + return null; + } // public void mergeImportClosure(boolean b) { @@ -623,126 +589,124 @@ public OWLObjectProperty getOWLObjectPropertyByIdentifier(String id) { // } - /** - * Find all corresponding {@link OWLObject}s with an OBO-style alternate identifier. - *

- * WARNING: This methods scans all object annotations in all ontologies. - * This is an expensive method. - * - * @return map of altId to OWLObject (never null) - */ - public Map getAllOWLObjectsByAltId() { - final Map results = new HashMap(); - final OWLAnnotationProperty altIdProperty = getAnnotationProperty(OboFormatTag.TAG_ALT_ID.getTag()); - if (altIdProperty == null) { - return Collections.emptyMap(); - } - for (OWLOntology o : getAllOntologies()) { - Set aas = o.getAxioms(AxiomType.ANNOTATION_ASSERTION); - for (OWLAnnotationAssertionAxiom aa : aas) { - OWLAnnotationValue v = aa.getValue(); - OWLAnnotationProperty property = aa.getProperty(); - if (altIdProperty.equals(property) && v instanceof OWLLiteral) { - String altId = ((OWLLiteral)v).getLiteral(); - OWLAnnotationSubject subject = aa.getSubject(); - if (subject instanceof IRI) { - OWLObject obj = getOWLObject((IRI) subject); - if (obj != null) { - results.put(altId, obj); - } - } - } - } - } - return results; - } - - /** - * It translates a oboformat tag into an OWL annotation property - * - * @param tag - * @return {@link OWLAnnotationProperty} - */ - public OWLAnnotationProperty getAnnotationProperty(String tag){ - return getDataFactory().getOWLAnnotationProperty(trTagToIRI(tag)); - } - - /** - * fetches an OWL Object by rdfs:label - *

- * if there is >1 match, return the first one encountered - * - * @param label - * @return object or null - */ - public OWLObject getOWLObjectByLabel(String label) { - IRI iri = getIRIByLabel(label); - if (iri != null) - return getOWLObject(iri); - return null; - } - - /** - * fetches an OWL IRI by rdfs:label - * - * @param label - * @return IRI or null - */ - public IRI getIRIByLabel(String label) { - try { - return getIRIByLabel(label, false); - } catch (Exception e) { - // note that it should be impossible to reach this point - // if getIRIByLabel is called with isEnforceUnivocal = false - e.printStackTrace(); - return null; - } - } - - /** - * fetches an OWL IRI by rdfs:label, optionally testing for uniqueness - *

- * TODO: index labels. This currently scans all labels in the ontology, which is expensive - * - * @param label - * @param isEnforceUnivocal - * @return IRI or null - * @throws SharedLabelException if >1 IRI shares input label - */ - public IRI getIRIByLabel(String label, boolean isEnforceUnivocal) throws Exception { - IRI iri = null; - for (OWLOntology o : getAllOntologies()) { - Set aas = o.getAxioms(AxiomType.ANNOTATION_ASSERTION); - for (OWLAnnotationAssertionAxiom aa : aas) { - OWLAnnotationValue v = aa.getValue(); - OWLAnnotationProperty property = aa.getProperty(); - if (property.isLabel() && v instanceof OWLLiteral) { - if (label.equals( ((OWLLiteral)v).getLiteral())) { - OWLAnnotationSubject subject = aa.getSubject(); - if (subject instanceof IRI) { - if (isEnforceUnivocal) { - if (iri != null && !iri.equals((IRI)subject)) { - throw new Exception(); - } - iri = (IRI)subject; - } - else { - return (IRI)subject; - } - } - else { - //return null; - } - } - } - } - } - return iri; - } - - public IRI getIRIByIdentifier(String id) { - return getIRIByIdentifier(id, false); - } + /** + * Find all corresponding {@link OWLObject}s with an OBO-style alternate identifier. + *

+ * WARNING: This methods scans all object annotations in all ontologies. + * This is an expensive method. + * + * @return map of altId to OWLObject (never null) + */ + public Map getAllOWLObjectsByAltId() { + final Map results = new HashMap(); + final OWLAnnotationProperty altIdProperty = getAnnotationProperty(OboFormatTag.TAG_ALT_ID.getTag()); + if (altIdProperty == null) { + return Collections.emptyMap(); + } + for (OWLOntology o : getAllOntologies()) { + Set aas = o.getAxioms(AxiomType.ANNOTATION_ASSERTION); + for (OWLAnnotationAssertionAxiom aa : aas) { + OWLAnnotationValue v = aa.getValue(); + OWLAnnotationProperty property = aa.getProperty(); + if (altIdProperty.equals(property) && v instanceof OWLLiteral) { + String altId = ((OWLLiteral) v).getLiteral(); + OWLAnnotationSubject subject = aa.getSubject(); + if (subject instanceof IRI) { + OWLObject obj = getOWLObject((IRI) subject); + if (obj != null) { + results.put(altId, obj); + } + } + } + } + } + return results; + } + + /** + * It translates a oboformat tag into an OWL annotation property + * + * @param tag + * @return {@link OWLAnnotationProperty} + */ + public OWLAnnotationProperty getAnnotationProperty(String tag) { + return getDataFactory().getOWLAnnotationProperty(trTagToIRI(tag)); + } + + /** + * fetches an OWL Object by rdfs:label + *

+ * if there is >1 match, return the first one encountered + * + * @param label + * @return object or null + */ + public OWLObject getOWLObjectByLabel(String label) { + IRI iri = getIRIByLabel(label); + if (iri != null) + return getOWLObject(iri); + return null; + } + + /** + * fetches an OWL IRI by rdfs:label + * + * @param label + * @return IRI or null + */ + public IRI getIRIByLabel(String label) { + try { + return getIRIByLabel(label, false); + } catch (Exception e) { + // note that it should be impossible to reach this point + // if getIRIByLabel is called with isEnforceUnivocal = false + e.printStackTrace(); + return null; + } + } + + /** + * fetches an OWL IRI by rdfs:label, optionally testing for uniqueness + *

+ * TODO: index labels. This currently scans all labels in the ontology, which is expensive + * + * @param label + * @param isEnforceUnivocal + * @return IRI or null + * @throws SharedLabelException if >1 IRI shares input label + */ + public IRI getIRIByLabel(String label, boolean isEnforceUnivocal) throws Exception { + IRI iri = null; + for (OWLOntology o : getAllOntologies()) { + Set aas = o.getAxioms(AxiomType.ANNOTATION_ASSERTION); + for (OWLAnnotationAssertionAxiom aa : aas) { + OWLAnnotationValue v = aa.getValue(); + OWLAnnotationProperty property = aa.getProperty(); + if (property.isLabel() && v instanceof OWLLiteral) { + if (label.equals(((OWLLiteral) v).getLiteral())) { + OWLAnnotationSubject subject = aa.getSubject(); + if (subject instanceof IRI) { + if (isEnforceUnivocal) { + if (iri != null && !iri.equals((IRI) subject)) { + throw new Exception(); + } + iri = (IRI) subject; + } else { + return (IRI) subject; + } + } else { + //return null; + } + } + } + } + } + return iri; + } + + public IRI getIRIByIdentifier(String id) { + return getIRIByIdentifier(id, false); + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/ModelContainer.java b/minerva-core/src/main/java/org/geneontology/minerva/ModelContainer.java index fe8f2d4f..ac329292 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/ModelContainer.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/ModelContainer.java @@ -1,175 +1,167 @@ package org.geneontology.minerva; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - +import com.google.common.base.Optional; import org.apache.log4j.Logger; -import org.semanticweb.owlapi.model.AddImport; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyChange; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.model.parameters.ChangeApplied; -import org.semanticweb.owlapi.reasoner.OWLReasoner; -import com.google.common.base.Optional; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; public class ModelContainer { - private static Logger LOG = Logger.getLogger(ModelContainer.class); - - private final IRI modelId; - private OWLOntology aboxOntology = null; - private boolean aboxModified = false; - private OWLOntology tboxOntology = null; - //private OWLReasoner tboxReasoner = null; - - private final List listeners = new CopyOnWriteArrayList<>(); - - /** - * The container is seeded with a tbox (i.e. ontology). An abox will be created - * automatically. - * - * @param modelId - * @param tbox - * @throws OWLOntologyCreationException - */ - public ModelContainer(IRI modelId, OWLOntology tbox) throws OWLOntologyCreationException { - tboxOntology = tbox; - this.modelId = modelId; - init(); - } - - /** - * Creates a container with a pre-defined tbox (ontology) and abox (instance store). - * Note the abox should import the tbox (directly or indirectly). - * - * The abox may be identical to the tbox, in which case individuals are added to - * the same ontology - * - * @param modelId - * @param tbox - * @param abox - * @throws OWLOntologyCreationException - */ - public ModelContainer(IRI modelId, OWLOntology tbox, OWLOntology abox) throws OWLOntologyCreationException { - tboxOntology = tbox; - aboxOntology = abox; - this.modelId = modelId; - init(); - } - - /** - * Initialization consists of setting aboxOntology, if not set - defaults to a new ontology using tbox. - * @throws OWLOntologyCreationException - */ - private void init() throws OWLOntologyCreationException { - // abox -> tbox - if (aboxOntology == null) { - LOG.debug("Creating abox ontology. mgr = "+getOWLOntologyManager()); - Optional tBoxIRI = tboxOntology.getOntologyID().getOntologyIRI(); - if (tBoxIRI.isPresent()) { - IRI ontologyIRI = IRI.create(tBoxIRI.get()+"__abox"); - aboxOntology = getOWLOntologyManager().getOntology(ontologyIRI); - if (aboxOntology != null) { - LOG.warn("Clearing existing abox ontology"); - getOWLOntologyManager().removeOntology(aboxOntology); - } - aboxOntology = getOWLOntologyManager().createOntology(ontologyIRI); - AddImport ai = new AddImport(aboxOntology, - getOWLDataFactory().getOWLImportsDeclaration(tBoxIRI.get())); - getOWLOntologyManager().applyChange(ai); - } - else { - aboxOntology = getOWLOntologyManager().createOntology(); - } - } - if (LOG.isDebugEnabled()) { - LOG.debug(modelId+" manager(T) = "+tboxOntology.getOWLOntologyManager()); - LOG.debug(modelId+" manager(A) = "+aboxOntology.getOWLOntologyManager()); - LOG.debug(modelId+" id(T) = "+tboxOntology.getOntologyID()); - LOG.debug(modelId+" id(A) = "+aboxOntology.getOntologyID()); - } - } - - public IRI getModelId() { - return modelId; - } - - public OWLOntologyManager getOWLOntologyManager() { - return aboxOntology.getOWLOntologyManager(); - } - - public OWLDataFactory getOWLDataFactory() { - return getOWLOntologyManager().getOWLDataFactory(); - } - - public void dispose() { - final OWLOntologyManager m = getOWLOntologyManager(); - if (aboxOntology != null) { - m.removeOntology(aboxOntology); - } - - for(ModelChangeListener listener : listeners) { - listener.dispose(); - } - listeners.clear(); - } - - public OWLOntology getTboxOntology() { - return tboxOntology; - } - - public OWLOntology getAboxOntology() { - return aboxOntology; - } - - public static interface ModelChangeListener { - - public void handleChange(List changes); - - public void dispose(); - } - - public void registerListener(ModelChangeListener listener) { - if (listener != null) { - listeners.add(listener); - } - } - - public void unRegisterListener(ModelChangeListener listener) { - if (listener != null) { - listeners.remove(listener); - } - } - - public List applyChanges(List changes) { - ChangeApplied applied = getOWLOntologyManager().applyChanges(changes); - if (applied == ChangeApplied.SUCCESSFULLY) { - List relevantChanges = new ArrayList<>(); - for (OWLOntologyChange change : changes) { - if (aboxOntology.equals(change.getOntology())) { - aboxModified = true; - relevantChanges.add(change); - } - } - if (relevantChanges.isEmpty() == false) { - for(ModelChangeListener listener : listeners) { - listener.handleChange(relevantChanges); - } - } - } - return new ArrayList(changes); - } - - public boolean isModified() { - return aboxModified; - } - - void setAboxModified(boolean modified) { - aboxModified = modified; - } + private static Logger LOG = Logger.getLogger(ModelContainer.class); + + private final IRI modelId; + private OWLOntology aboxOntology = null; + private boolean aboxModified = false; + private OWLOntology tboxOntology = null; + //private OWLReasoner tboxReasoner = null; + + private final List listeners = new CopyOnWriteArrayList<>(); + + /** + * The container is seeded with a tbox (i.e. ontology). An abox will be created + * automatically. + * + * @param modelId + * @param tbox + * @throws OWLOntologyCreationException + */ + public ModelContainer(IRI modelId, OWLOntology tbox) throws OWLOntologyCreationException { + tboxOntology = tbox; + this.modelId = modelId; + init(); + } + + /** + * Creates a container with a pre-defined tbox (ontology) and abox (instance store). + * Note the abox should import the tbox (directly or indirectly). + *

+ * The abox may be identical to the tbox, in which case individuals are added to + * the same ontology + * + * @param modelId + * @param tbox + * @param abox + * @throws OWLOntologyCreationException + */ + public ModelContainer(IRI modelId, OWLOntology tbox, OWLOntology abox) throws OWLOntologyCreationException { + tboxOntology = tbox; + aboxOntology = abox; + this.modelId = modelId; + init(); + } + + /** + * Initialization consists of setting aboxOntology, if not set - defaults to a new ontology using tbox. + * + * @throws OWLOntologyCreationException + */ + private void init() throws OWLOntologyCreationException { + // abox -> tbox + if (aboxOntology == null) { + LOG.debug("Creating abox ontology. mgr = " + getOWLOntologyManager()); + Optional tBoxIRI = tboxOntology.getOntologyID().getOntologyIRI(); + if (tBoxIRI.isPresent()) { + IRI ontologyIRI = IRI.create(tBoxIRI.get() + "__abox"); + aboxOntology = getOWLOntologyManager().getOntology(ontologyIRI); + if (aboxOntology != null) { + LOG.warn("Clearing existing abox ontology"); + getOWLOntologyManager().removeOntology(aboxOntology); + } + aboxOntology = getOWLOntologyManager().createOntology(ontologyIRI); + AddImport ai = new AddImport(aboxOntology, + getOWLDataFactory().getOWLImportsDeclaration(tBoxIRI.get())); + getOWLOntologyManager().applyChange(ai); + } else { + aboxOntology = getOWLOntologyManager().createOntology(); + } + } + if (LOG.isDebugEnabled()) { + LOG.debug(modelId + " manager(T) = " + tboxOntology.getOWLOntologyManager()); + LOG.debug(modelId + " manager(A) = " + aboxOntology.getOWLOntologyManager()); + LOG.debug(modelId + " id(T) = " + tboxOntology.getOntologyID()); + LOG.debug(modelId + " id(A) = " + aboxOntology.getOntologyID()); + } + } + + public IRI getModelId() { + return modelId; + } + + public OWLOntologyManager getOWLOntologyManager() { + return aboxOntology.getOWLOntologyManager(); + } + + public OWLDataFactory getOWLDataFactory() { + return getOWLOntologyManager().getOWLDataFactory(); + } + + public void dispose() { + final OWLOntologyManager m = getOWLOntologyManager(); + if (aboxOntology != null) { + m.removeOntology(aboxOntology); + } + + for (ModelChangeListener listener : listeners) { + listener.dispose(); + } + listeners.clear(); + } + + public OWLOntology getTboxOntology() { + return tboxOntology; + } + + public OWLOntology getAboxOntology() { + return aboxOntology; + } + + public static interface ModelChangeListener { + + public void handleChange(List changes); + + public void dispose(); + } + + public void registerListener(ModelChangeListener listener) { + if (listener != null) { + listeners.add(listener); + } + } + + public void unRegisterListener(ModelChangeListener listener) { + if (listener != null) { + listeners.remove(listener); + } + } + + public List applyChanges(List changes) { + ChangeApplied applied = getOWLOntologyManager().applyChanges(changes); + if (applied == ChangeApplied.SUCCESSFULLY) { + List relevantChanges = new ArrayList<>(); + for (OWLOntologyChange change : changes) { + if (aboxOntology.equals(change.getOntology())) { + aboxModified = true; + relevantChanges.add(change); + } + } + if (relevantChanges.isEmpty() == false) { + for (ModelChangeListener listener : listeners) { + listener.handleChange(relevantChanges); + } + } + } + return new ArrayList(changes); + } + + public boolean isModified() { + return aboxModified; + } + + void setAboxModified(boolean modified) { + aboxModified = modified; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/MolecularModelManager.java b/minerva-core/src/main/java/org/geneontology/minerva/MolecularModelManager.java index 62a87003..55339af5 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/MolecularModelManager.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/MolecularModelManager.java @@ -1,345 +1,330 @@ package org.geneontology.minerva; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - import org.geneontology.minerva.curie.CurieHandler; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLEntity; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; - -//import owltools.graph.OWLGraphWrapper; +import org.semanticweb.owlapi.model.*; import owltools.vocab.OBOUpperVocabulary; +import java.io.IOException; +import java.util.*; +import java.util.Map.Entry; + /** * Convenience layer for operations on collections of MolecularModels (aka lego diagrams) - * + *

* This manager is intended to be used within a web server. Multiple clients can * contact the same manager instance through services - * - * @param + * + * @param * @see CoreMolecularModelManager */ -public class MolecularModelManager extends BlazegraphMolecularModelManager { - - public static class UnknownIdentifierException extends Exception { - - // generated - private static final long serialVersionUID = -847970910712518838L; - - /** - * @param message - * @param cause - */ - public UnknownIdentifierException(String message, Throwable cause) { - super(message, cause); - } - - /** - * @param message - */ - public UnknownIdentifierException(String message) { - super(message); - } - - } - - /** - * @param tbox - * @param curieHandler - * @param modelIdPrefix - * @throws OWLOntologyCreationException - * @throws IOException - */ - public MolecularModelManager(OWLOntology tbox, CurieHandler curieHandler, String modelIdPrefix, String pathToJournal, String pathToExportFolder, String pathToOntologyJournal, boolean downloadOntologyJournal) throws OWLOntologyCreationException, IOException { - super(tbox, curieHandler, modelIdPrefix, pathToJournal, pathToExportFolder, pathToOntologyJournal, downloadOntologyJournal); - } - - /** - * Shortcut for {@link CoreMolecularModelManager#createIndividual} - * - * @param modelId - * @param cid - * @param annotations - * @param metadata - * @return id and individual - * @throws UnknownIdentifierException - */ - public OWLNamedIndividual createIndividual(IRI modelId, String cid, Set annotations, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLClass cls = getClass(cid, model); - if (cls == null) { - throw new UnknownIdentifierException("Could not find a class for id: "+cid); - } - OWLNamedIndividual i = createIndividual(model, cls, annotations , metadata); - return i; - } - - - /** - * Shortcut for {@link CoreMolecularModelManager#createIndividual}. - * - * @param modelId - * @param cid - * @param annotations - * @param metadata - * @return id and created individual - * @throws UnknownIdentifierException - */ - public OWLNamedIndividual createIndividualNonReasoning(IRI modelId, String cid, Set annotations, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLClass cls = getClass(cid, model); - if (cls == null) { - throw new UnknownIdentifierException("Could not find a class for id: "+cid); - } - return createIndividualNonReasoning(modelId, cls, annotations, metadata); - } - - /** - * Shortcut for {@link CoreMolecularModelManager#createIndividual}. - * - * @param modelId - * @param ce - * @param annotations - * @param metadata - * @return id and created individual - * @throws UnknownIdentifierException - */ - public OWLNamedIndividual createIndividualNonReasoning(IRI modelId, OWLClassExpression ce, Set annotations, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual i = createIndividual(model, ce, annotations, metadata); - return i; - } - - /** - * Shortcut for {@link CoreMolecularModelManager#createIndividual}. - * - * @param model - * @param annotations - * @param metadata - * @return id and created individual - */ - public OWLNamedIndividual createIndividualNonReasoning(ModelContainer model, Set annotations, METADATA metadata) { - OWLNamedIndividual i = createIndividual(model, (OWLClassExpression)null, annotations, metadata); - return i; - } - - /** - * Shortcut for {@link CoreMolecularModelManager#createIndividual}. - * - * @param model - * @param individualIRI - * @param annotations - * @param metadata - * @return id and created individual - * @throws UnknownIdentifierException - */ - public OWLNamedIndividual createIndividualNonReasoning(ModelContainer model, IRI individualIRI, Set annotations, METADATA metadata) throws UnknownIdentifierException { - OWLNamedIndividual i = createIndividualWithIRI(model, individualIRI, annotations, metadata); - return i; - } - - public OWLNamedIndividual getNamedIndividual(ModelContainer model, String iid) throws UnknownIdentifierException { - OWLNamedIndividual i = getIndividual(iid, model); - if (i == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - return i; - } - - /** - * Deletes an individual and return all IRIs used as an annotation value - * - * @param modelId - * @param iid - * @param metadata - * @return delete information - * @throws UnknownIdentifierException - */ - public DeleteInformation deleteIndividual(IRI modelId, String iid, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual i = getIndividual(iid, model); - if (i == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - return deleteIndividual(model, i, metadata); - } - - /** - * Deletes an individual and return all IRIs used as an annotation value - * - * @param modelId - * @param i - * @param metadata - * @return delete information - * @throws UnknownIdentifierException - */ - public DeleteInformation deleteIndividual(IRI modelId, OWLNamedIndividual i, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - return deleteIndividual(model, i, metadata); - } - - /** - * Deletes an individual - * - * @param modelId - * @param iri - * @param metadata - * @throws UnknownIdentifierException - */ - public void deleteIndividualNonReasoning(IRI modelId, IRI iri, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual i = getIndividual(iri, model); - if (i == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iri); - } - deleteIndividual(model, i, metadata); - } - - public OWLNamedIndividual addAnnotations(IRI modelId, String iid, - Set annotations, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual i = getIndividual(iid, model); - if (i == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - if (annotations != null && !annotations.isEmpty()) { - addAnnotations(model, i.getIRI(), annotations, metadata); - } - return i; - } - - public void addAnnotations(IRI modelId, IRI subject, - Set annotations, METADATA metadata) throws UnknownIdentifierException { - if (annotations != null && !annotations.isEmpty()) { - ModelContainer model = checkModelId(modelId); - addAnnotations(model, subject, annotations, metadata); - } - } - - public OWLNamedIndividual updateAnnotation(ModelContainer model, OWLNamedIndividual i, - OWLAnnotation annotation, METADATA metadata) { - if (annotation != null) { - updateAnnotation(model, i.getIRI(), annotation, metadata); - } - return i; - } - - public void updateAnnotation(IRI modelId, IRI subject, - OWLAnnotation annotation, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - if (annotation != null) { - updateAnnotation(model, subject, annotation, metadata); - } - } - - public OWLNamedIndividual removeAnnotations(ModelContainer model, OWLNamedIndividual i, - Set annotations, METADATA metadata) { - if (annotations != null && !annotations.isEmpty()) { - removeAnnotations(model, i.getIRI(), annotations, metadata); - } - return i; - } - - /** - * @param model - * @return true, if the model was removed - */ - public boolean deleteModel(ModelContainer model) { - return deleteModel(model.getModelId()); - } - - /** - * @param modelId - * @return true, if the model was removed - */ - public boolean deleteModel(IRI modelId) { - ModelContainer model = modelMap.remove(modelId); - if (model != null) { - model.dispose(); - return true; - } - return false; - } - - private OWLNamedIndividual getIndividual(String indId, ModelContainer model) throws UnknownIdentifierException { - IRI iri = getCuriHandler().getIRI(indId); - return getIndividual(iri, model); - } - public OWLNamedIndividual getIndividual(IRI iri, ModelContainer model) { - // check that individual is actually declared - boolean containsIRI = model.getAboxOntology().containsEntityInSignature(iri); - if (containsIRI == false) { - return null; - } - OWLNamedIndividual individual = model.getOWLDataFactory().getOWLNamedIndividual(iri); - return individual; - } - private OWLClass getClass(String cid, ModelContainer model) throws UnknownIdentifierException { - MinervaOWLGraphWrapper graph = new MinervaOWLGraphWrapper(model.getAboxOntology()); - return getClass(cid, graph); - } - private OWLClass getClass(String cid, MinervaOWLGraphWrapper graph) throws UnknownIdentifierException { - IRI iri = getCuriHandler().getIRI(cid); - return graph.getOWLClass(iri); - } - public OWLObjectProperty getObjectProperty(String pid, ModelContainer model) throws UnknownIdentifierException { - MinervaOWLGraphWrapper graph = new MinervaOWLGraphWrapper(model.getAboxOntology()); - IRI iri = getCuriHandler().getIRI(pid); - return graph.getOWLObjectProperty(iri); - } - - public ModelContainer checkModelId(IRI modelId) throws UnknownIdentifierException { - ModelContainer model = getModel(modelId); - if (model == null) { - throw new UnknownIdentifierException("Could not find a model for id: "+modelId); - } - return model; - } - - private OWLObjectPropertyExpression getObjectProperty(OBOUpperVocabulary vocabElement, - ModelContainer model) { - return vocabElement.getObjectProperty(model.getAboxOntology()); - } - - /** - * Convenience wrapper for {@link CoreMolecularModelManager#addType} - * - * @param modelId - * @param iid - * @param cid - * @param metadata - * @throws UnknownIdentifierException - */ - public void addType(IRI modelId, String iid, String cid, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual individual = getIndividual(iid, model); - if (individual == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - OWLClass cls = getClass(cid, model); - if (cls == null) { - throw new UnknownIdentifierException("Could not find a class for id: "+cid); - } - addType(model, individual, cls, metadata); - } - +public class MolecularModelManager extends BlazegraphMolecularModelManager { + + public static class UnknownIdentifierException extends Exception { + + // generated + private static final long serialVersionUID = -847970910712518838L; + + /** + * @param message + * @param cause + */ + public UnknownIdentifierException(String message, Throwable cause) { + super(message, cause); + } + + /** + * @param message + */ + public UnknownIdentifierException(String message) { + super(message); + } + + } + + /** + * @param tbox + * @param curieHandler + * @param modelIdPrefix + * @throws OWLOntologyCreationException + * @throws IOException + */ + public MolecularModelManager(OWLOntology tbox, CurieHandler curieHandler, String modelIdPrefix, String pathToJournal, String pathToExportFolder, String pathToOntologyJournal, boolean downloadOntologyJournal) throws OWLOntologyCreationException, IOException { + super(tbox, curieHandler, modelIdPrefix, pathToJournal, pathToExportFolder, pathToOntologyJournal, downloadOntologyJournal); + } + + /** + * Shortcut for {@link CoreMolecularModelManager#createIndividual} + * + * @param modelId + * @param cid + * @param annotations + * @param metadata + * @return id and individual + * @throws UnknownIdentifierException + */ + public OWLNamedIndividual createIndividual(IRI modelId, String cid, Set annotations, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLClass cls = getClass(cid, model); + if (cls == null) { + throw new UnknownIdentifierException("Could not find a class for id: " + cid); + } + OWLNamedIndividual i = createIndividual(model, cls, annotations, metadata); + return i; + } + + + /** + * Shortcut for {@link CoreMolecularModelManager#createIndividual}. + * + * @param modelId + * @param cid + * @param annotations + * @param metadata + * @return id and created individual + * @throws UnknownIdentifierException + */ + public OWLNamedIndividual createIndividualNonReasoning(IRI modelId, String cid, Set annotations, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLClass cls = getClass(cid, model); + if (cls == null) { + throw new UnknownIdentifierException("Could not find a class for id: " + cid); + } + return createIndividualNonReasoning(modelId, cls, annotations, metadata); + } + + /** + * Shortcut for {@link CoreMolecularModelManager#createIndividual}. + * + * @param modelId + * @param ce + * @param annotations + * @param metadata + * @return id and created individual + * @throws UnknownIdentifierException + */ + public OWLNamedIndividual createIndividualNonReasoning(IRI modelId, OWLClassExpression ce, Set annotations, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual i = createIndividual(model, ce, annotations, metadata); + return i; + } + + /** + * Shortcut for {@link CoreMolecularModelManager#createIndividual}. + * + * @param model + * @param annotations + * @param metadata + * @return id and created individual + */ + public OWLNamedIndividual createIndividualNonReasoning(ModelContainer model, Set annotations, METADATA metadata) { + OWLNamedIndividual i = createIndividual(model, (OWLClassExpression) null, annotations, metadata); + return i; + } + + /** + * Shortcut for {@link CoreMolecularModelManager#createIndividual}. + * + * @param model + * @param individualIRI + * @param annotations + * @param metadata + * @return id and created individual + * @throws UnknownIdentifierException + */ + public OWLNamedIndividual createIndividualNonReasoning(ModelContainer model, IRI individualIRI, Set annotations, METADATA metadata) throws UnknownIdentifierException { + OWLNamedIndividual i = createIndividualWithIRI(model, individualIRI, annotations, metadata); + return i; + } + + public OWLNamedIndividual getNamedIndividual(ModelContainer model, String iid) throws UnknownIdentifierException { + OWLNamedIndividual i = getIndividual(iid, model); + if (i == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + return i; + } + + /** + * Deletes an individual and return all IRIs used as an annotation value + * + * @param modelId + * @param iid + * @param metadata + * @return delete information + * @throws UnknownIdentifierException + */ + public DeleteInformation deleteIndividual(IRI modelId, String iid, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual i = getIndividual(iid, model); + if (i == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + return deleteIndividual(model, i, metadata); + } + + /** + * Deletes an individual and return all IRIs used as an annotation value + * + * @param modelId + * @param i + * @param metadata + * @return delete information + * @throws UnknownIdentifierException + */ + public DeleteInformation deleteIndividual(IRI modelId, OWLNamedIndividual i, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + return deleteIndividual(model, i, metadata); + } + + /** + * Deletes an individual + * + * @param modelId + * @param iri + * @param metadata + * @throws UnknownIdentifierException + */ + public void deleteIndividualNonReasoning(IRI modelId, IRI iri, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual i = getIndividual(iri, model); + if (i == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iri); + } + deleteIndividual(model, i, metadata); + } + + public OWLNamedIndividual addAnnotations(IRI modelId, String iid, + Set annotations, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual i = getIndividual(iid, model); + if (i == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + if (annotations != null && !annotations.isEmpty()) { + addAnnotations(model, i.getIRI(), annotations, metadata); + } + return i; + } + + public void addAnnotations(IRI modelId, IRI subject, + Set annotations, METADATA metadata) throws UnknownIdentifierException { + if (annotations != null && !annotations.isEmpty()) { + ModelContainer model = checkModelId(modelId); + addAnnotations(model, subject, annotations, metadata); + } + } + + public OWLNamedIndividual updateAnnotation(ModelContainer model, OWLNamedIndividual i, + OWLAnnotation annotation, METADATA metadata) { + if (annotation != null) { + updateAnnotation(model, i.getIRI(), annotation, metadata); + } + return i; + } + + public void updateAnnotation(IRI modelId, IRI subject, + OWLAnnotation annotation, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + if (annotation != null) { + updateAnnotation(model, subject, annotation, metadata); + } + } + + public OWLNamedIndividual removeAnnotations(ModelContainer model, OWLNamedIndividual i, + Set annotations, METADATA metadata) { + if (annotations != null && !annotations.isEmpty()) { + removeAnnotations(model, i.getIRI(), annotations, metadata); + } + return i; + } + + /** + * @param model + * @return true, if the model was removed + */ + public boolean deleteModel(ModelContainer model) { + return deleteModel(model.getModelId()); + } + + /** + * @param modelId + * @return true, if the model was removed + */ + public boolean deleteModel(IRI modelId) { + ModelContainer model = modelMap.remove(modelId); + if (model != null) { + model.dispose(); + return true; + } + return false; + } + + private OWLNamedIndividual getIndividual(String indId, ModelContainer model) throws UnknownIdentifierException { + IRI iri = getCuriHandler().getIRI(indId); + return getIndividual(iri, model); + } + + public OWLNamedIndividual getIndividual(IRI iri, ModelContainer model) { + // check that individual is actually declared + boolean containsIRI = model.getAboxOntology().containsEntityInSignature(iri); + if (containsIRI == false) { + return null; + } + OWLNamedIndividual individual = model.getOWLDataFactory().getOWLNamedIndividual(iri); + return individual; + } + + private OWLClass getClass(String cid, ModelContainer model) throws UnknownIdentifierException { + MinervaOWLGraphWrapper graph = new MinervaOWLGraphWrapper(model.getAboxOntology()); + return getClass(cid, graph); + } + + private OWLClass getClass(String cid, MinervaOWLGraphWrapper graph) throws UnknownIdentifierException { + IRI iri = getCuriHandler().getIRI(cid); + return graph.getOWLClass(iri); + } + + public OWLObjectProperty getObjectProperty(String pid, ModelContainer model) throws UnknownIdentifierException { + MinervaOWLGraphWrapper graph = new MinervaOWLGraphWrapper(model.getAboxOntology()); + IRI iri = getCuriHandler().getIRI(pid); + return graph.getOWLObjectProperty(iri); + } + + public ModelContainer checkModelId(IRI modelId) throws UnknownIdentifierException { + ModelContainer model = getModel(modelId); + if (model == null) { + throw new UnknownIdentifierException("Could not find a model for id: " + modelId); + } + return model; + } + + private OWLObjectPropertyExpression getObjectProperty(OBOUpperVocabulary vocabElement, + ModelContainer model) { + return vocabElement.getObjectProperty(model.getAboxOntology()); + } + + /** + * Convenience wrapper for {@link CoreMolecularModelManager#addType} + * + * @param modelId + * @param iid + * @param cid + * @param metadata + * @throws UnknownIdentifierException + */ + public void addType(IRI modelId, String iid, String cid, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual individual = getIndividual(iid, model); + if (individual == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + OWLClass cls = getClass(cid, model); + if (cls == null) { + throw new UnknownIdentifierException("Could not find a class for id: " + cid); + } + addType(model, individual, cls, metadata); + } + // /** // * @param model // * @param individual @@ -351,257 +336,257 @@ public void addType(IRI modelId, String iid, String cid, METADATA metadata) thro // addType(model, individual, clsExp, metadata); // return individual; // } - - /** - * Convenience wrapper for {@link CoreMolecularModelManager#addType}. - * - * @param modelId - * @param iid - * @param pid - * @param cid - * @param metadata - * @throws UnknownIdentifierException - */ - public void addType(IRI modelId, - String iid, String pid, String cid, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual individual = getIndividual(iid, model); - if (individual == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - OWLObjectProperty property = getObjectProperty(pid, model); - if (property == null) { - throw new UnknownIdentifierException("Could not find a property for id: "+pid); - } - OWLClass cls = getClass(cid, model); - if (cls == null) { - throw new UnknownIdentifierException("Could not find a class for id: "+cid); - } - addType(model, individual, property, cls, metadata); - } - - public OWLNamedIndividual addType(IRI modelId, - String iid, String pid, OWLClassExpression ce, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual individual = getIndividual(iid, model); - if (individual == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - OWLObjectProperty property = getObjectProperty(pid, model); - if (property == null) { - throw new UnknownIdentifierException("Could not find a property for id: "+pid); - } - addType(model, individual, property, ce, metadata); - return individual; - } - - /** - * Convenience wrapper for {@link CoreMolecularModelManager#removeType} - * - * @param modelId - * @param iid - * @param cid - * @param metadata - * @throws UnknownIdentifierException - */ - public void removeType(IRI modelId, String iid, String cid, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLNamedIndividual individual = getIndividual(iid, model); - if (individual == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - OWLClass cls = getClass(cid, model); - if (cls == null) { - throw new UnknownIdentifierException("Could not find a class for id: "+cid); - } - removeType(model, individual, cls, metadata); - } - + + /** + * Convenience wrapper for {@link CoreMolecularModelManager#addType}. + * + * @param modelId + * @param iid + * @param pid + * @param cid + * @param metadata + * @throws UnknownIdentifierException + */ + public void addType(IRI modelId, + String iid, String pid, String cid, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual individual = getIndividual(iid, model); + if (individual == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + OWLObjectProperty property = getObjectProperty(pid, model); + if (property == null) { + throw new UnknownIdentifierException("Could not find a property for id: " + pid); + } + OWLClass cls = getClass(cid, model); + if (cls == null) { + throw new UnknownIdentifierException("Could not find a class for id: " + cid); + } + addType(model, individual, property, cls, metadata); + } + + public OWLNamedIndividual addType(IRI modelId, + String iid, String pid, OWLClassExpression ce, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual individual = getIndividual(iid, model); + if (individual == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + OWLObjectProperty property = getObjectProperty(pid, model); + if (property == null) { + throw new UnknownIdentifierException("Could not find a property for id: " + pid); + } + addType(model, individual, property, ce, metadata); + return individual; + } + + /** + * Convenience wrapper for {@link CoreMolecularModelManager#removeType} + * + * @param modelId + * @param iid + * @param cid + * @param metadata + * @throws UnknownIdentifierException + */ + public void removeType(IRI modelId, String iid, String cid, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLNamedIndividual individual = getIndividual(iid, model); + if (individual == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + OWLClass cls = getClass(cid, model); + if (cls == null) { + throw new UnknownIdentifierException("Could not find a class for id: " + cid); + } + removeType(model, individual, cls, metadata); + } + // public OWLNamedIndividual removeTypeNonReasoning(ModelContainer model, OWLNamedIndividual individual, OWLClassExpression clsExp, METADATA metadata) { // removeType(model, individual, clsExp, false, metadata); // return individual; // } - - /** - * Convenience wrapper for {@link CoreMolecularModelManager#addFact} - * - * @param modelId - * @param pid - * @param iid - * @param jid - * @param annotations - * @param metadata - * @return relevant individuals - * @throws UnknownIdentifierException - */ - public List addFact(IRI modelId, String pid, String iid, String jid, - Set annotations, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLObjectProperty property = getObjectProperty(pid, model); - if (property == null) { - throw new UnknownIdentifierException("Could not find a property for id: "+pid); - } - OWLNamedIndividual individual1 = getIndividual(iid, model); - if (individual1 == null) { - throw new UnknownIdentifierException("Could not find a individual (1) for id: "+iid); - } - OWLNamedIndividual individual2 = getIndividual(jid, model); - if (individual2 == null) { - throw new UnknownIdentifierException("Could not find a individual (2) for id: "+jid); - } - addFact(model, property, individual1, individual2, annotations, metadata); - return Arrays.asList(individual1, individual2); - } - - /** - * Convenience wrapper for {@link CoreMolecularModelManager#addFact} - * - * @param model - * @param pid - * @param iid - * @param jid - * @param annotations - * @param metadata - * @return relevant individuals - * @throws UnknownIdentifierException - */ - public List addFact(ModelContainer model, String pid, String iid, String jid, - Set annotations, METADATA metadata) throws UnknownIdentifierException { - OWLObjectProperty property = getObjectProperty(pid, model); - if (property == null) { - throw new UnknownIdentifierException("Could not find a property for id: "+pid); - } - OWLNamedIndividual individual1 = getIndividual(iid, model); - if (individual1 == null) { - throw new UnknownIdentifierException("Could not find a individual (1) for id: "+iid); - } - OWLNamedIndividual individual2 = getIndividual(jid, model); - if (individual2 == null) { - throw new UnknownIdentifierException("Could not find a individual (2) for id: "+jid); - } - addFact(model, property, individual1, individual2, annotations, metadata); - return Arrays.asList(individual1, individual2); - } - - /** - * Convenience wrapper for {@link CoreMolecularModelManager#addFact} - * - * @param modelId - * @param vocabElement - * @param iid - * @param jid - * @param annotations - * @param metadata - * @return relevant individuals - * @throws UnknownIdentifierException - */ - public List addFact(IRI modelId, OBOUpperVocabulary vocabElement, - String iid, String jid, Set annotations, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLObjectPropertyExpression property = getObjectProperty(vocabElement, model); - if (property == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+vocabElement); - } - OWLNamedIndividual individual1 = getIndividual(iid, model); - if (individual1 == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - OWLNamedIndividual individual2 = getIndividual(jid, model); - if (individual2 == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+jid); - } - addFact(model, property, individual1, individual2, annotations, metadata); - return Arrays.asList(individual1, individual2); - } - - /** - * @param modelId - * @param pid - * @param iid - * @param jid - * @param metadata - * @return response info - * @throws UnknownIdentifierException - */ - public List removeFact(IRI modelId, String pid, - String iid, String jid, METADATA metadata) throws UnknownIdentifierException { - ModelContainer model = checkModelId(modelId); - OWLObjectProperty property = getObjectProperty(pid, model); - if (property == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+pid); - } - OWLNamedIndividual individual1 = getIndividual(iid, model); - if (individual1 == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+iid); - } - OWLNamedIndividual individual2 = getIndividual(jid, model); - if (individual2 == null) { - throw new UnknownIdentifierException("Could not find a individual for id: "+jid); - } - removeFact(model, property, individual1, individual2, metadata); - return Arrays.asList(individual1, individual2); - } - - public List addAnnotations(ModelContainer model, String pid, - String iid, String jid, Set annotations, METADATA metadata) throws UnknownIdentifierException { - OWLObjectProperty property = getObjectProperty(pid, model); - if (property == null) { - throw new UnknownIdentifierException("Could not find a property for id: "+pid); - } - OWLNamedIndividual individual1 = getIndividual(iid, model); - if (individual1 == null) { - throw new UnknownIdentifierException("Could not find a individual (1) for id: "+iid); - } - OWLNamedIndividual individual2 = getIndividual(jid, model); - if (individual2 == null) { - throw new UnknownIdentifierException("Could not find a individual (2) for id: "+jid); - } - addAnnotations(model, property, individual1, individual2, annotations, metadata); - - return Arrays.asList(individual1, individual2); - } - - public void addAnnotations(ModelContainer model, Set axioms, Set annotations, METADATA metadata) { - for (OWLObjectPropertyAssertionAxiom axiom : axioms) { - addAnnotations(model, axiom, annotations, metadata); - } - } - - public Set updateAnnotation(ModelContainer model, Set axioms, OWLAnnotation annotation, METADATA metadata) { - Set newAxioms = new HashSet(); - for (OWLObjectPropertyAssertionAxiom axiom : axioms) { - OWLObjectPropertyAssertionAxiom newAxiom = - updateAnnotation(model, axiom, annotation, metadata); - if (newAxiom != null) { - newAxioms.add(newAxiom); - } - } - return newAxioms; - } - - public OWLNamedIndividual addDataProperties(ModelContainer model, OWLNamedIndividual i, - Map> dataProperties, METADATA token) { - if (dataProperties != null && !dataProperties.isEmpty()) { - for(Entry> entry : dataProperties.entrySet()) { - for(OWLLiteral literal : entry.getValue()) { - addDataProperty(model, i, entry.getKey(), literal, token); - } - } - } - return i; - } - - public OWLNamedIndividual removeDataProperties(ModelContainer model, OWLNamedIndividual i, - Map> dataProperties, METADATA token) { - if (dataProperties != null && !dataProperties.isEmpty()) { - for(Entry> entry : dataProperties.entrySet()) { - for(OWLLiteral literal : entry.getValue()) { - removeDataProperty(model, i, entry.getKey(), literal, token); - } - } - } - return i; - } - - + + /** + * Convenience wrapper for {@link CoreMolecularModelManager#addFact} + * + * @param modelId + * @param pid + * @param iid + * @param jid + * @param annotations + * @param metadata + * @return relevant individuals + * @throws UnknownIdentifierException + */ + public List addFact(IRI modelId, String pid, String iid, String jid, + Set annotations, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLObjectProperty property = getObjectProperty(pid, model); + if (property == null) { + throw new UnknownIdentifierException("Could not find a property for id: " + pid); + } + OWLNamedIndividual individual1 = getIndividual(iid, model); + if (individual1 == null) { + throw new UnknownIdentifierException("Could not find a individual (1) for id: " + iid); + } + OWLNamedIndividual individual2 = getIndividual(jid, model); + if (individual2 == null) { + throw new UnknownIdentifierException("Could not find a individual (2) for id: " + jid); + } + addFact(model, property, individual1, individual2, annotations, metadata); + return Arrays.asList(individual1, individual2); + } + + /** + * Convenience wrapper for {@link CoreMolecularModelManager#addFact} + * + * @param model + * @param pid + * @param iid + * @param jid + * @param annotations + * @param metadata + * @return relevant individuals + * @throws UnknownIdentifierException + */ + public List addFact(ModelContainer model, String pid, String iid, String jid, + Set annotations, METADATA metadata) throws UnknownIdentifierException { + OWLObjectProperty property = getObjectProperty(pid, model); + if (property == null) { + throw new UnknownIdentifierException("Could not find a property for id: " + pid); + } + OWLNamedIndividual individual1 = getIndividual(iid, model); + if (individual1 == null) { + throw new UnknownIdentifierException("Could not find a individual (1) for id: " + iid); + } + OWLNamedIndividual individual2 = getIndividual(jid, model); + if (individual2 == null) { + throw new UnknownIdentifierException("Could not find a individual (2) for id: " + jid); + } + addFact(model, property, individual1, individual2, annotations, metadata); + return Arrays.asList(individual1, individual2); + } + + /** + * Convenience wrapper for {@link CoreMolecularModelManager#addFact} + * + * @param modelId + * @param vocabElement + * @param iid + * @param jid + * @param annotations + * @param metadata + * @return relevant individuals + * @throws UnknownIdentifierException + */ + public List addFact(IRI modelId, OBOUpperVocabulary vocabElement, + String iid, String jid, Set annotations, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLObjectPropertyExpression property = getObjectProperty(vocabElement, model); + if (property == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + vocabElement); + } + OWLNamedIndividual individual1 = getIndividual(iid, model); + if (individual1 == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + OWLNamedIndividual individual2 = getIndividual(jid, model); + if (individual2 == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + jid); + } + addFact(model, property, individual1, individual2, annotations, metadata); + return Arrays.asList(individual1, individual2); + } + + /** + * @param modelId + * @param pid + * @param iid + * @param jid + * @param metadata + * @return response info + * @throws UnknownIdentifierException + */ + public List removeFact(IRI modelId, String pid, + String iid, String jid, METADATA metadata) throws UnknownIdentifierException { + ModelContainer model = checkModelId(modelId); + OWLObjectProperty property = getObjectProperty(pid, model); + if (property == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + pid); + } + OWLNamedIndividual individual1 = getIndividual(iid, model); + if (individual1 == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + iid); + } + OWLNamedIndividual individual2 = getIndividual(jid, model); + if (individual2 == null) { + throw new UnknownIdentifierException("Could not find a individual for id: " + jid); + } + removeFact(model, property, individual1, individual2, metadata); + return Arrays.asList(individual1, individual2); + } + + public List addAnnotations(ModelContainer model, String pid, + String iid, String jid, Set annotations, METADATA metadata) throws UnknownIdentifierException { + OWLObjectProperty property = getObjectProperty(pid, model); + if (property == null) { + throw new UnknownIdentifierException("Could not find a property for id: " + pid); + } + OWLNamedIndividual individual1 = getIndividual(iid, model); + if (individual1 == null) { + throw new UnknownIdentifierException("Could not find a individual (1) for id: " + iid); + } + OWLNamedIndividual individual2 = getIndividual(jid, model); + if (individual2 == null) { + throw new UnknownIdentifierException("Could not find a individual (2) for id: " + jid); + } + addAnnotations(model, property, individual1, individual2, annotations, metadata); + + return Arrays.asList(individual1, individual2); + } + + public void addAnnotations(ModelContainer model, Set axioms, Set annotations, METADATA metadata) { + for (OWLObjectPropertyAssertionAxiom axiom : axioms) { + addAnnotations(model, axiom, annotations, metadata); + } + } + + public Set updateAnnotation(ModelContainer model, Set axioms, OWLAnnotation annotation, METADATA metadata) { + Set newAxioms = new HashSet(); + for (OWLObjectPropertyAssertionAxiom axiom : axioms) { + OWLObjectPropertyAssertionAxiom newAxiom = + updateAnnotation(model, axiom, annotation, metadata); + if (newAxiom != null) { + newAxioms.add(newAxiom); + } + } + return newAxioms; + } + + public OWLNamedIndividual addDataProperties(ModelContainer model, OWLNamedIndividual i, + Map> dataProperties, METADATA token) { + if (dataProperties != null && !dataProperties.isEmpty()) { + for (Entry> entry : dataProperties.entrySet()) { + for (OWLLiteral literal : entry.getValue()) { + addDataProperty(model, i, entry.getKey(), literal, token); + } + } + } + return i; + } + + public OWLNamedIndividual removeDataProperties(ModelContainer model, OWLNamedIndividual i, + Map> dataProperties, METADATA token) { + if (dataProperties != null && !dataProperties.isEmpty()) { + for (Entry> entry : dataProperties.entrySet()) { + for (OWLLiteral literal : entry.getValue()) { + removeDataProperty(model, i, entry.getKey(), literal, token); + } + } + } + return i; + } + + } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java b/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java index e631db27..e185fc0d 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/UndoAwareMolecularModelManager.java @@ -1,319 +1,309 @@ package org.geneontology.minerva; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Deque; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; - import org.apache.commons.lang3.tuple.Pair; import org.geneontology.minerva.UndoAwareMolecularModelManager.UndoMetadata; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.util.ReverseChangeGenerator; -import org.geneontology.rules.engine.WorkingMemory; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyChange; import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; + +import java.io.IOException; +import java.util.*; +import java.util.concurrent.atomic.AtomicLong; /** * Provide undo and redo operations for the {@link MolecularModelManager}. */ public class UndoAwareMolecularModelManager extends MolecularModelManager { - - private final Map allChanges = new HashMap<>(); - - private static class UndoRedo { - final Deque undoBuffer = new LinkedList<>(); - final Deque redoBuffer = new LinkedList<>(); - private UndoMetadata token = null; - - void addUndo(List changes, UndoMetadata metadata) { - addUndo(new ChangeEvent(metadata.userId, changes, System.currentTimeMillis()), metadata); - } - - void addUndo(List changes, String userId) { - token = null; - undoBuffer.push(new ChangeEvent(userId, changes, System.currentTimeMillis())); - } - - void addUndo(ChangeEvent changes, UndoMetadata token) { - if (this.token == null || this.token.equals(token) == false) { - // new event or different event - undoBuffer.push(changes); - this.token = token; - } - else { - // append to last event - ChangeEvent current = undoBuffer.peek(); - if (current != null) { - current.getChanges().addAll(changes.getChanges()); - } - else { - undoBuffer.push(changes); - } - } - } - - ChangeEvent getUndo() { - if (undoBuffer.peek() != null) { - return undoBuffer.pop(); - } - return null; - } - - void addRedo(List changes, String userId) { - addRedo(new ChangeEvent(userId, changes, System.currentTimeMillis())); - } - - void addRedo(ChangeEvent changes) { - redoBuffer.push(changes); - this.token = null; - } - - ChangeEvent getRedo() { - if (redoBuffer.peek() != null) { - return redoBuffer.pop(); - } - return null; - } - - void clearRedo() { - redoBuffer.clear(); - } - } - - public static class UndoMetadata { - private static final AtomicLong instanceCounter = new AtomicLong(0L); - - public final String userId; - public final long requestToken; - - /** - * @param userId - */ - public UndoMetadata(String userId) { - this.userId = userId; - this.requestToken = instanceCounter.getAndIncrement(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result - + (int) (requestToken ^ (requestToken >>> 32)); - result = prime * result - + ((userId == null) ? 0 : userId.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - UndoMetadata other = (UndoMetadata) obj; - if (requestToken != other.requestToken) { - return false; - } - if (userId == null) { - if (other.userId != null) { - return false; - } - } else if (!userId.equals(other.userId)) { - return false; - } - return true; - } - } - - /** - * Details for a change in a model. - */ - public static class ChangeEvent { - final String userId; - final List changes; - final long time; - - /** - * @param userId - * @param changes - * @param time - */ - public ChangeEvent(String userId, List changes, long time) { - this.userId = userId; - this.changes = new ArrayList(changes); - this.time = time; - } - - public String getUserId() { - return userId; - } - - public List getChanges() { - return changes; - } - - public long getTime() { - return time; - } - } - - public UndoAwareMolecularModelManager(OWLOntology tbox, - CurieHandler curieHandler, String modelIdLongFormPrefix, String pathToJournal, String pathToExportFolder, String pathToOntologyJournal, boolean downloadOntologyJournal) throws OWLOntologyCreationException, IOException { - super(tbox, curieHandler, modelIdLongFormPrefix, pathToJournal, pathToExportFolder, pathToOntologyJournal, downloadOntologyJournal); - } - - @Override - protected void addToHistory(ModelContainer model, List appliedChanges, UndoMetadata metadata) { - if (appliedChanges == null || appliedChanges.isEmpty()) { - // do nothing - return; - } - UndoRedo undoRedo; - synchronized (allChanges) { - IRI modelId = model.getModelId(); - undoRedo = allChanges.get(modelId); - if (undoRedo == null) { - undoRedo = new UndoRedo(); - allChanges.put(modelId, undoRedo); - } - } - synchronized (undoRedo) { - // append to undo - undoRedo.addUndo(appliedChanges, metadata); - // clear redo - undoRedo.clearRedo(); - } - } - - /** - * Undo latest change for the given model. - * - * @param model - * @param userId - * @return true if the undo was successful - */ - public boolean undo(ModelContainer model, String userId) { - UndoRedo undoRedo; - synchronized (allChanges) { - undoRedo = allChanges.get(model.getModelId()); - } - if (undoRedo != null) { - final OWLOntology abox = model.getAboxOntology(); - synchronized (abox) { - /* - * WARNING multiple locks (undoRedo and abox) always lock ontology first - * to avoid deadlocks! - */ - synchronized (undoRedo) { - // pop from undo - ChangeEvent event = undoRedo.getUndo(); - if (event == null) { - return false; - } - - // invert and apply changes - List invertedChanges = ReverseChangeGenerator.invertChanges(event.getChanges()); - applyChanges(model, invertedChanges); - - // push to redo - undoRedo.addRedo(event.changes, userId); - return true; - } - } - } - return false; - } - - /** - * Redo latest change for the given model. - * - * @param model - * @param userId - * @return true if the redo was successful - */ - public boolean redo(ModelContainer model, String userId) { - UndoRedo undoRedo; - synchronized (allChanges) { - undoRedo = allChanges.get(model.getModelId()); - } - if (undoRedo != null) { - final OWLOntology abox = model.getAboxOntology(); - synchronized (abox) { - /* - * WARNING multiple locks (undoRedo and abox) always lock ontology first - * to avoid deadlocks! - */ - synchronized (undoRedo) { - // pop() from redo - ChangeEvent event = undoRedo.getRedo(); - if (event == null) { - return false; - } - - // apply changes - applyChanges(model, event.getChanges()); - - // push() to undo - undoRedo.addUndo(event.getChanges(), userId); - return true; - } - } - } - return false; - } - - /** - * Retrieve the current available undo and redo events. - * - * @param modelId - * @return pair of undo (left) and redo (right) events - */ - public Pair, List> getUndoRedoEvents(IRI modelId) { - UndoRedo undoRedo = null; - synchronized (allChanges) { - undoRedo = allChanges.get(modelId); - } - if (undoRedo == null) { - // return empty of no data is available - return Pair.of(Collections.emptyList(), Collections.emptyList()); - } - synchronized (undoRedo) { - // copy the current lists - List undoList = new ArrayList(undoRedo.undoBuffer); - List redoList = new ArrayList(undoRedo.redoBuffer); - return Pair.of(undoList, redoList); - } - } - - public void clearUndoHistory(IRI modelId) { - UndoRedo undoRedo = null; - synchronized (allChanges) { - undoRedo = allChanges.get(modelId); - } - if (undoRedo != null) { - synchronized (undoRedo) { - undoRedo.undoBuffer.clear(); - } - } - } - - protected void applyChanges(ModelContainer model, List changes) { - model.applyChanges(changes); - } - - + + private final Map allChanges = new HashMap<>(); + + private static class UndoRedo { + final Deque undoBuffer = new LinkedList<>(); + final Deque redoBuffer = new LinkedList<>(); + private UndoMetadata token = null; + + void addUndo(List changes, UndoMetadata metadata) { + addUndo(new ChangeEvent(metadata.userId, changes, System.currentTimeMillis()), metadata); + } + + void addUndo(List changes, String userId) { + token = null; + undoBuffer.push(new ChangeEvent(userId, changes, System.currentTimeMillis())); + } + + void addUndo(ChangeEvent changes, UndoMetadata token) { + if (this.token == null || this.token.equals(token) == false) { + // new event or different event + undoBuffer.push(changes); + this.token = token; + } else { + // append to last event + ChangeEvent current = undoBuffer.peek(); + if (current != null) { + current.getChanges().addAll(changes.getChanges()); + } else { + undoBuffer.push(changes); + } + } + } + + ChangeEvent getUndo() { + if (undoBuffer.peek() != null) { + return undoBuffer.pop(); + } + return null; + } + + void addRedo(List changes, String userId) { + addRedo(new ChangeEvent(userId, changes, System.currentTimeMillis())); + } + + void addRedo(ChangeEvent changes) { + redoBuffer.push(changes); + this.token = null; + } + + ChangeEvent getRedo() { + if (redoBuffer.peek() != null) { + return redoBuffer.pop(); + } + return null; + } + + void clearRedo() { + redoBuffer.clear(); + } + } + + public static class UndoMetadata { + private static final AtomicLong instanceCounter = new AtomicLong(0L); + + public final String userId; + public final long requestToken; + + /** + * @param userId + */ + public UndoMetadata(String userId) { + this.userId = userId; + this.requestToken = instanceCounter.getAndIncrement(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + + (int) (requestToken ^ (requestToken >>> 32)); + result = prime * result + + ((userId == null) ? 0 : userId.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + UndoMetadata other = (UndoMetadata) obj; + if (requestToken != other.requestToken) { + return false; + } + if (userId == null) { + if (other.userId != null) { + return false; + } + } else if (!userId.equals(other.userId)) { + return false; + } + return true; + } + } + + /** + * Details for a change in a model. + */ + public static class ChangeEvent { + final String userId; + final List changes; + final long time; + + /** + * @param userId + * @param changes + * @param time + */ + public ChangeEvent(String userId, List changes, long time) { + this.userId = userId; + this.changes = new ArrayList(changes); + this.time = time; + } + + public String getUserId() { + return userId; + } + + public List getChanges() { + return changes; + } + + public long getTime() { + return time; + } + } + + public UndoAwareMolecularModelManager(OWLOntology tbox, + CurieHandler curieHandler, String modelIdLongFormPrefix, String pathToJournal, String pathToExportFolder, String pathToOntologyJournal, boolean downloadOntologyJournal) throws OWLOntologyCreationException, IOException { + super(tbox, curieHandler, modelIdLongFormPrefix, pathToJournal, pathToExportFolder, pathToOntologyJournal, downloadOntologyJournal); + } + + @Override + protected void addToHistory(ModelContainer model, List appliedChanges, UndoMetadata metadata) { + if (appliedChanges == null || appliedChanges.isEmpty()) { + // do nothing + return; + } + UndoRedo undoRedo; + synchronized (allChanges) { + IRI modelId = model.getModelId(); + undoRedo = allChanges.get(modelId); + if (undoRedo == null) { + undoRedo = new UndoRedo(); + allChanges.put(modelId, undoRedo); + } + } + synchronized (undoRedo) { + // append to undo + undoRedo.addUndo(appliedChanges, metadata); + // clear redo + undoRedo.clearRedo(); + } + } + + /** + * Undo latest change for the given model. + * + * @param model + * @param userId + * @return true if the undo was successful + */ + public boolean undo(ModelContainer model, String userId) { + UndoRedo undoRedo; + synchronized (allChanges) { + undoRedo = allChanges.get(model.getModelId()); + } + if (undoRedo != null) { + final OWLOntology abox = model.getAboxOntology(); + synchronized (abox) { + /* + * WARNING multiple locks (undoRedo and abox) always lock ontology first + * to avoid deadlocks! + */ + synchronized (undoRedo) { + // pop from undo + ChangeEvent event = undoRedo.getUndo(); + if (event == null) { + return false; + } + + // invert and apply changes + List invertedChanges = ReverseChangeGenerator.invertChanges(event.getChanges()); + applyChanges(model, invertedChanges); + + // push to redo + undoRedo.addRedo(event.changes, userId); + return true; + } + } + } + return false; + } + + /** + * Redo latest change for the given model. + * + * @param model + * @param userId + * @return true if the redo was successful + */ + public boolean redo(ModelContainer model, String userId) { + UndoRedo undoRedo; + synchronized (allChanges) { + undoRedo = allChanges.get(model.getModelId()); + } + if (undoRedo != null) { + final OWLOntology abox = model.getAboxOntology(); + synchronized (abox) { + /* + * WARNING multiple locks (undoRedo and abox) always lock ontology first + * to avoid deadlocks! + */ + synchronized (undoRedo) { + // pop() from redo + ChangeEvent event = undoRedo.getRedo(); + if (event == null) { + return false; + } + + // apply changes + applyChanges(model, event.getChanges()); + + // push() to undo + undoRedo.addUndo(event.getChanges(), userId); + return true; + } + } + } + return false; + } + + /** + * Retrieve the current available undo and redo events. + * + * @param modelId + * @return pair of undo (left) and redo (right) events + */ + public Pair, List> getUndoRedoEvents(IRI modelId) { + UndoRedo undoRedo = null; + synchronized (allChanges) { + undoRedo = allChanges.get(modelId); + } + if (undoRedo == null) { + // return empty of no data is available + return Pair.of(Collections.emptyList(), Collections.emptyList()); + } + synchronized (undoRedo) { + // copy the current lists + List undoList = new ArrayList(undoRedo.undoBuffer); + List redoList = new ArrayList(undoRedo.redoBuffer); + return Pair.of(undoList, redoList); + } + } + + public void clearUndoHistory(IRI modelId) { + UndoRedo undoRedo = null; + synchronized (allChanges) { + undoRedo = allChanges.get(modelId); + } + if (undoRedo != null) { + synchronized (undoRedo) { + undoRedo.undoBuffer.clear(); + } + } + } + + protected void applyChanges(ModelContainer model, List changes) { + model.applyChanges(changes); + } + + } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieHandler.java b/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieHandler.java index 40b5d640..326fe6e2 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieHandler.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieHandler.java @@ -1,31 +1,26 @@ package org.geneontology.minerva.curie; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.*; import java.util.Map; public interface CurieHandler { - public String getCuri(OWLClass cls); - - public String getCuri(OWLNamedIndividual i); - - public String getCuri(OWLObjectProperty p); - - public String getCuri(OWLDataProperty p); - - public String getCuri(OWLAnnotationProperty p); - - public String getCuri(IRI iri); - - public IRI getIRI(String curi) throws UnknownIdentifierException; - - public Map getMappings(); + public String getCuri(OWLClass cls); + + public String getCuri(OWLNamedIndividual i); + + public String getCuri(OWLObjectProperty p); + + public String getCuri(OWLDataProperty p); + + public String getCuri(OWLAnnotationProperty p); + + public String getCuri(IRI iri); + + public IRI getIRI(String curi) throws UnknownIdentifierException; + + public Map getMappings(); } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappings.java b/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappings.java index 70d100ae..edf619c8 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappings.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappings.java @@ -5,20 +5,20 @@ public interface CurieMappings { - public Map getMappings(); - - static final CurieMappings EMPTY = new SimpleCurieMappings(Collections.emptyMap()); - - static class SimpleCurieMappings implements CurieMappings { - private final Map mappings; + public Map getMappings(); - public SimpleCurieMappings(Map mappings) { - this.mappings = mappings; - } + static final CurieMappings EMPTY = new SimpleCurieMappings(Collections.emptyMap()); - @Override - public Map getMappings() { - return mappings; - } - } + static class SimpleCurieMappings implements CurieMappings { + private final Map mappings; + + public SimpleCurieMappings(Map mappings) { + this.mappings = mappings; + } + + @Override + public Map getMappings() { + return mappings; + } + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappingsJsonld.java b/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappingsJsonld.java index b5efd473..5c45fb15 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappingsJsonld.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/curie/CurieMappingsJsonld.java @@ -1,73 +1,72 @@ package org.geneontology.minerva.curie; +import com.google.gson.Gson; +import com.google.gson.JsonSyntaxException; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.io.IOUtils; -import org.apache.log4j.Logger; +public final class CurieMappingsJsonld { -import com.google.gson.Gson; -import com.google.gson.JsonSyntaxException; + private static final Logger LOG = Logger.getLogger(CurieMappingsJsonld.class); -public final class CurieMappingsJsonld { + private CurieMappingsJsonld() { + // no instance + } + + public static CurieMappings loadJsonLdContext(InputStream inputStream) { + try { + String jsonld = IOUtils.toString(inputStream); + return loadJsonLdContext(jsonld); + } catch (IOException e) { + LOG.error("Could not load JsonLD from input stream.", e); + } + return CurieMappings.EMPTY; + } + + + @SuppressWarnings({"rawtypes", "unchecked"}) + private static CurieMappings loadJsonLdContext(String jsonldContent) { + try { + Gson gson = new Gson(); + Map topLevelMap = gson.fromJson(jsonldContent, Map.class); + Map parseMappings = new HashMap(); + if (topLevelMap.containsKey("@context")) { + Object jsonContext = topLevelMap.get("@context"); + if (jsonContext instanceof Map) { + parseEntries((Map) jsonContext, parseMappings); + } + } + parseEntries(topLevelMap, parseMappings); + return new CurieMappings.SimpleCurieMappings(parseMappings); + } catch (JsonSyntaxException e) { + LOG.error("Could not parse JsonLD due to a JSON syntax problem.", e); + } + return CurieMappings.EMPTY; + } - private static final Logger LOG = Logger.getLogger(CurieMappingsJsonld.class); - - private CurieMappingsJsonld() { - // no instance - } - - public static CurieMappings loadJsonLdContext(InputStream inputStream) { - try { - String jsonld = IOUtils.toString(inputStream); - return loadJsonLdContext(jsonld); - } catch (IOException e) { - LOG.error("Could not load JsonLD from input stream.", e); - } - return CurieMappings.EMPTY; - } - - - @SuppressWarnings({ "rawtypes", "unchecked" }) - private static CurieMappings loadJsonLdContext(String jsonldContent) { - try { - Gson gson = new Gson(); - Map topLevelMap = gson.fromJson(jsonldContent, Map.class); - Map parseMappings = new HashMap(); - if (topLevelMap.containsKey("@context")) { - Object jsonContext = topLevelMap.get("@context"); - if (jsonContext instanceof Map) { - parseEntries((Map)jsonContext, parseMappings); - } - } - parseEntries(topLevelMap, parseMappings); - return new CurieMappings.SimpleCurieMappings(parseMappings); - } catch (JsonSyntaxException e) { - LOG.error("Could not parse JsonLD due to a JSON syntax problem.", e); - } - return CurieMappings.EMPTY; - } - - private static void parseEntries(Map json, Map parsedMappings) { - for(Entry e : json.entrySet()){ - Object key = e.getKey(); - Object value = e.getValue(); - if (key != null && value != null) { - String shortPrefix = key.toString(); - if (shortPrefix.isEmpty() || shortPrefix.startsWith("@")) { - continue; - } - if (value instanceof CharSequence) { - String longPrefix = value.toString(); - if (longPrefix.isEmpty()) { - continue; - } - parsedMappings.put(shortPrefix, longPrefix); - } - } - } - } + private static void parseEntries(Map json, Map parsedMappings) { + for (Entry e : json.entrySet()) { + Object key = e.getKey(); + Object value = e.getValue(); + if (key != null && value != null) { + String shortPrefix = key.toString(); + if (shortPrefix.isEmpty() || shortPrefix.startsWith("@")) { + continue; + } + if (value instanceof CharSequence) { + String longPrefix = value.toString(); + if (longPrefix.isEmpty()) { + continue; + } + parsedMappings.put(shortPrefix, longPrefix); + } + } + } + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/curie/DefaultCurieHandler.java b/minerva-core/src/main/java/org/geneontology/minerva/curie/DefaultCurieHandler.java index 9c19adfa..f9407812 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/curie/DefaultCurieHandler.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/curie/DefaultCurieHandler.java @@ -1,5 +1,7 @@ package org.geneontology.minerva.curie; +import org.apache.log4j.Logger; + import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -7,71 +9,67 @@ import java.util.HashMap; import java.util.Map; -import org.apache.log4j.Logger; - public class DefaultCurieHandler { - - private static final Logger LOG = Logger.getLogger(DefaultCurieHandler.class); - private DefaultCurieHandler() { - // no instances - } - - public static synchronized CurieHandler getDefaultHandler() { - return new MappedCurieHandler(loadDefaultMappings()); - } - - public static CurieHandler getHandlerForFile(File jsonld) throws FileNotFoundException { - return new MappedCurieHandler(loadMappingsFromFile(jsonld)); - } - - public static CurieMappings loadMappingsFromFile(File jsonld) throws FileNotFoundException { - final Map curieMap = new HashMap(); - loadJsonldStream(new FileInputStream(jsonld), curieMap); - return new CurieMappings.SimpleCurieMappings(curieMap); - } - - public static CurieMappings loadDefaultMappings() { - final Map curieMap = new HashMap(); + private static final Logger LOG = Logger.getLogger(DefaultCurieHandler.class); + + private DefaultCurieHandler() { + // no instances + } + + public static synchronized CurieHandler getDefaultHandler() { + return new MappedCurieHandler(loadDefaultMappings()); + } + + public static CurieHandler getHandlerForFile(File jsonld) throws FileNotFoundException { + return new MappedCurieHandler(loadMappingsFromFile(jsonld)); + } + + public static CurieMappings loadMappingsFromFile(File jsonld) throws FileNotFoundException { + final Map curieMap = new HashMap(); + loadJsonldStream(new FileInputStream(jsonld), curieMap); + return new CurieMappings.SimpleCurieMappings(curieMap); + } + + public static CurieMappings loadDefaultMappings() { + final Map curieMap = new HashMap(); // TODO: we believe we only need obo_context and go_context // See: https://github.com/geneontology/go-site/issues/617 - loadJsonldResource("obo_context.jsonld", curieMap); + loadJsonldResource("obo_context.jsonld", curieMap); loadJsonldResource("go_context.jsonld", curieMap); - //loadJsonldResource("monarch_context.jsonld", curieMap); - //loadJsonldResource("amigo_context_gen.jsonld", curieMap); - //loadJsonldResource("amigo_context_manual.jsonld", curieMap); - return new CurieMappings.SimpleCurieMappings(curieMap); - } - - public static void loadJsonldStream(InputStream stream, Map curieMap) { - CurieMappings jsonldContext = CurieMappingsJsonld.loadJsonLdContext(stream); - curieMap.putAll(jsonldContext.getMappings()); - } - - public static void loadJsonldResource(String resource, Map curieMap) { - InputStream stream = loadResourceAsStream(resource); - if (stream != null) { - loadJsonldStream(stream, curieMap); - } - else { - LOG.error("Could not find resource for default curie map: " + stream); - } - } - - // package private for testing purposes - static InputStream loadResourceAsStream(String resource) { - InputStream stream = DefaultCurieHandler.class.getResourceAsStream(resource); - if (stream != null) { - return stream; - } - stream = ClassLoader.getSystemResourceAsStream(resource); - if (stream != null) { - return stream; - } - else if (resource.startsWith("/") == false) { - return loadResourceAsStream("/"+resource); - } - return stream; - } - + //loadJsonldResource("monarch_context.jsonld", curieMap); + //loadJsonldResource("amigo_context_gen.jsonld", curieMap); + //loadJsonldResource("amigo_context_manual.jsonld", curieMap); + return new CurieMappings.SimpleCurieMappings(curieMap); + } + + public static void loadJsonldStream(InputStream stream, Map curieMap) { + CurieMappings jsonldContext = CurieMappingsJsonld.loadJsonLdContext(stream); + curieMap.putAll(jsonldContext.getMappings()); + } + + public static void loadJsonldResource(String resource, Map curieMap) { + InputStream stream = loadResourceAsStream(resource); + if (stream != null) { + loadJsonldStream(stream, curieMap); + } else { + LOG.error("Could not find resource for default curie map: " + stream); + } + } + + // package private for testing purposes + static InputStream loadResourceAsStream(String resource) { + InputStream stream = DefaultCurieHandler.class.getResourceAsStream(resource); + if (stream != null) { + return stream; + } + stream = ClassLoader.getSystemResourceAsStream(resource); + if (stream != null) { + return stream; + } else if (resource.startsWith("/") == false) { + return loadResourceAsStream("/" + resource); + } + return stream; + } + } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/curie/MappedCurieHandler.java b/minerva-core/src/main/java/org/geneontology/minerva/curie/MappedCurieHandler.java index c5b5815c..1fff42be 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/curie/MappedCurieHandler.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/curie/MappedCurieHandler.java @@ -1,131 +1,123 @@ package org.geneontology.minerva.curie; +import com.google.common.collect.ImmutableBiMap; +import org.apache.commons.lang3.StringUtils; +import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; +import org.semanticweb.owlapi.model.*; + import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.lang3.StringUtils; -import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; +public class MappedCurieHandler implements CurieHandler { -import com.google.common.collect.ImmutableBiMap; + private final ImmutableBiMap curieMap; -public class MappedCurieHandler implements CurieHandler { + public MappedCurieHandler(CurieMappings... mappings) { + this(merge(mappings)); + } + + private static Map merge(CurieMappings[] mappings) { + if (mappings.length == 0) { + return Collections.emptyMap(); + } else if (mappings.length == 1) { + return mappings[0].getMappings(); + } else { + Map curieMap = new HashMap(); + for (CurieMappings mapping : mappings) { + curieMap.putAll(mapping.getMappings()); + } + return curieMap; + } + } + + public MappedCurieHandler(Map curieMap) { + super(); + this.curieMap = ImmutableBiMap.copyOf(curieMap); + } + + @Override + public String getCuri(OWLClass cls) { + return getCuri(cls.getIRI()); + } + + @Override + public String getCuri(OWLNamedIndividual i) { + return getCuri(i.getIRI()); + } + + @Override + public String getCuri(OWLObjectProperty p) { + return getCuri(p.getIRI()); + } + + @Override + public String getCuri(OWLDataProperty p) { + return getCuri(p.getIRI()); + } + + @Override + public String getCuri(OWLAnnotationProperty p) { + return getCuri(p.getIRI()); + } + + @Override + public String getCuri(IRI iri) { + String iriString = iri.toString(); + String curi = iriString; + + String longPrefix = null; + String shortPrefix = null; + // iterate over inverted map, find longest prefix match + for (Entry e : curieMap.inverse().entrySet()) { + String currentLongPrefix = e.getKey(); + int currentLongprefixLength = currentLongPrefix.length(); + if (iriString.startsWith(currentLongPrefix) && + iriString.length() > currentLongprefixLength) { + if (longPrefix == null || currentLongprefixLength > longPrefix.length()) { + longPrefix = currentLongPrefix; + shortPrefix = e.getValue(); + } + } + } + if (longPrefix != null) { + return shortPrefix + ":" + iriString.substring(longPrefix.length()); + } + return curi; + } + + @Override + public IRI getIRI(String curi) throws UnknownIdentifierException { + if (!curi.contains(":")) { + throw new UnknownIdentifierException("Relative IRIs are not allowed: " + curi); + } + String[] parts = StringUtils.split(curi, ":", 2); + if (parts.length == 2) { + String prefix = parts[0]; + String longPrefix = curieMap.get(prefix); + if (longPrefix != null) { + return IRI.create(longPrefix + curi.substring(prefix.length() + 1)); + } + } + if (curi.startsWith("http:") || curi.startsWith("https:") || curi.startsWith("urn:") || curi.startsWith("mailto:")) { + return IRI.create(curi); + } else { + throw new UnknownIdentifierException("Unknown URI protocol: " + curi); + } + } + + @Override + public Map getMappings() { + return curieMap; + } - private final ImmutableBiMap curieMap; - - public MappedCurieHandler(CurieMappings...mappings) { - this(merge(mappings)); - } - - private static Map merge(CurieMappings[] mappings) { - if (mappings.length == 0) { - return Collections.emptyMap(); - } - else if (mappings.length == 1) { - return mappings[0].getMappings(); - } - else { - Map curieMap = new HashMap(); - for (CurieMappings mapping : mappings) { - curieMap.putAll(mapping.getMappings()); - } - return curieMap; - } - } - - public MappedCurieHandler(Map curieMap) { - super(); - this.curieMap = ImmutableBiMap.copyOf(curieMap); - } - - @Override - public String getCuri(OWLClass cls) { - return getCuri(cls.getIRI()); - } - - @Override - public String getCuri(OWLNamedIndividual i) { - return getCuri(i.getIRI()); - } - - @Override - public String getCuri(OWLObjectProperty p) { - return getCuri(p.getIRI()); - } - - @Override - public String getCuri(OWLDataProperty p) { - return getCuri(p.getIRI()); - } - - @Override - public String getCuri(OWLAnnotationProperty p) { - return getCuri(p.getIRI()); - } - - @Override - public String getCuri(IRI iri) { - String iriString = iri.toString(); - String curi = iriString; - - String longPrefix = null; - String shortPrefix = null; - // iterate over inverted map, find longest prefix match - for (Entry e : curieMap.inverse().entrySet()) { - String currentLongPrefix = e.getKey(); - int currentLongprefixLength = currentLongPrefix.length(); - if (iriString.startsWith(currentLongPrefix) && - iriString.length() > currentLongprefixLength) { - if (longPrefix == null || currentLongprefixLength > longPrefix.length()) { - longPrefix = currentLongPrefix; - shortPrefix = e.getValue(); - } - } - } - if (longPrefix != null) { - return shortPrefix + ":" + iriString.substring(longPrefix.length()); - } - return curi; - } - - @Override - public IRI getIRI(String curi) throws UnknownIdentifierException { - if (!curi.contains(":")) { - throw new UnknownIdentifierException("Relative IRIs are not allowed: " + curi); - } - String[] parts = StringUtils.split(curi, ":", 2); - if (parts.length == 2) { - String prefix = parts[0]; - String longPrefix = curieMap.get(prefix); - if (longPrefix != null) { - return IRI.create(longPrefix + curi.substring(prefix.length() + 1)); - } - } - if (curi.startsWith("http:") || curi.startsWith("https:") || curi.startsWith("urn:") || curi.startsWith("mailto:")) { - return IRI.create(curi); - } else { - throw new UnknownIdentifierException("Unknown URI protocol: " + curi); - } - } - - @Override - public Map getMappings() { - return curieMap; - } - - /** - * package private for internal test purposes. - * - * @return map - */ - Map getInternalMappings() { - return curieMap; - } + /** + * package private for internal test purposes. + * + * @return map + */ + Map getInternalMappings() { + return curieMap; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/json/InferenceProvider.java b/minerva-core/src/main/java/org/geneontology/minerva/json/InferenceProvider.java index cb1164cb..9d159874 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/json/InferenceProvider.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/json/InferenceProvider.java @@ -1,18 +1,18 @@ package org.geneontology.minerva.json; -import java.util.Set; - import org.geneontology.minerva.validation.ValidationResultSet; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLNamedIndividual; +import java.util.Set; + public interface InferenceProvider { - public boolean isConsistent(); - - public Set getTypes(OWLNamedIndividual i); - - public ValidationResultSet getValidation_results(); + public boolean isConsistent(); + + public Set getTypes(OWLNamedIndividual i); + + public ValidationResultSet getValidation_results(); - Set getAllTypes(OWLNamedIndividual i); + Set getAllTypes(OWLNamedIndividual i); } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/json/JsonTools.java b/minerva-core/src/main/java/org/geneontology/minerva/json/JsonTools.java index 70565b83..d80b76fc 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/json/JsonTools.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/json/JsonTools.java @@ -2,111 +2,100 @@ import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.util.AnnotationShorthand; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLAnnotationValue; -import org.semanticweb.owlapi.model.OWLAnnotationValueVisitorEx; -import org.semanticweb.owlapi.model.OWLAnonymousIndividual; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLDatatype; -import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.vocab.OWL2Datatype; public class JsonTools { - private static final String VALUE_TYPE_IRI = "IRI"; - - public static JsonAnnotation create(OWLAnnotationProperty p, OWLAnnotationValue value, String label, CurieHandler curieHandler) { - AnnotationShorthand annotationShorthand = AnnotationShorthand.getShorthand(p.getIRI()); - if (annotationShorthand != null) { - // try to shorten IRIs for shorthand annotations - return create(annotationShorthand.getShorthand(), value, label, curieHandler); - } - return create(curieHandler.getCuri(p), value, label, curieHandler); - } - - public static JsonAnnotation create(OWLDataProperty p, OWLLiteral value, String label, CurieHandler curieHandler) { - String type = getType(value); - return JsonAnnotation.create(curieHandler.getCuri(p), value.getLiteral(), type, label); - } - - private static String getType(OWLLiteral literal) { - OWLDatatype datatype = literal.getDatatype(); - String type = null; - if (datatype.isString() || datatype.isRDFPlainLiteral()) { - // do nothing - } - else if (datatype.isBuiltIn()) { - type = datatype.getBuiltInDatatype().getPrefixedName(); - } - return type; - } - - private static JsonAnnotation create(final String key, OWLAnnotationValue value, String label, final CurieHandler curieHandler) { - return value.accept(new OWLAnnotationValueVisitorEx() { + private static final String VALUE_TYPE_IRI = "IRI"; - @Override - public JsonAnnotation visit(IRI iri) { - String iriString = curieHandler.getCuri(iri); - return JsonAnnotation.create(key, iriString, VALUE_TYPE_IRI, label); - } + public static JsonAnnotation create(OWLAnnotationProperty p, OWLAnnotationValue value, String label, CurieHandler curieHandler) { + AnnotationShorthand annotationShorthand = AnnotationShorthand.getShorthand(p.getIRI()); + if (annotationShorthand != null) { + // try to shorten IRIs for shorthand annotations + return create(annotationShorthand.getShorthand(), value, label, curieHandler); + } + return create(curieHandler.getCuri(p), value, label, curieHandler); + } - @Override - public JsonAnnotation visit(OWLAnonymousIndividual individual) { - return null; // do nothing - } + public static JsonAnnotation create(OWLDataProperty p, OWLLiteral value, String label, CurieHandler curieHandler) { + String type = getType(value); + return JsonAnnotation.create(curieHandler.getCuri(p), value.getLiteral(), type, label); + } - @Override - public JsonAnnotation visit(OWLLiteral literal) { - return JsonAnnotation.create(key, literal.getLiteral(), getType(literal), label); - } - }); - } - - public static JsonAnnotation create(AnnotationShorthand key, String value, String label) { - return JsonAnnotation.create(key.getShorthand(), value, null, label); - } - - private static boolean isIRIValue(JsonAnnotation ann) { - return VALUE_TYPE_IRI.equalsIgnoreCase(ann.valueType); - } - - public static OWLAnnotationValue createAnnotationValue(JsonAnnotation ann, OWLDataFactory f) { - OWLAnnotationValue annotationValue; - if (isIRIValue(ann)) { - annotationValue = IRI.create(ann.value); - } - else { - annotationValue = createLiteralInternal(ann, f); - } - return annotationValue; - } - - public static OWLLiteral createLiteral(JsonAnnotation ann, OWLDataFactory f) { - OWLLiteral literal = null; - if (isIRIValue(ann) == false) { - literal = createLiteralInternal(ann, f); - } - return literal; - } + private static String getType(OWLLiteral literal) { + OWLDatatype datatype = literal.getDatatype(); + String type = null; + if (datatype.isString() || datatype.isRDFPlainLiteral()) { + // do nothing + } else if (datatype.isBuiltIn()) { + type = datatype.getBuiltInDatatype().getPrefixedName(); + } + return type; + } - private static OWLLiteral createLiteralInternal(JsonAnnotation ann, OWLDataFactory f) { - OWLLiteral literal; - OWL2Datatype datatype = null; - for(OWL2Datatype current : OWL2Datatype.values()) { - if (current.getPrefixedName().equalsIgnoreCase(ann.valueType) - || current.getShortForm().equalsIgnoreCase(ann.valueType)) { - datatype = current; - break; - } - } - if (datatype != null) { - literal = f.getOWLLiteral(ann.value, datatype); - } - else { - literal = f.getOWLLiteral(ann.value); - } - return literal; - } + private static JsonAnnotation create(final String key, OWLAnnotationValue value, String label, final CurieHandler curieHandler) { + return value.accept(new OWLAnnotationValueVisitorEx() { + + @Override + public JsonAnnotation visit(IRI iri) { + String iriString = curieHandler.getCuri(iri); + return JsonAnnotation.create(key, iriString, VALUE_TYPE_IRI, label); + } + + @Override + public JsonAnnotation visit(OWLAnonymousIndividual individual) { + return null; // do nothing + } + + @Override + public JsonAnnotation visit(OWLLiteral literal) { + return JsonAnnotation.create(key, literal.getLiteral(), getType(literal), label); + } + }); + } + + public static JsonAnnotation create(AnnotationShorthand key, String value, String label) { + return JsonAnnotation.create(key.getShorthand(), value, null, label); + } + + private static boolean isIRIValue(JsonAnnotation ann) { + return VALUE_TYPE_IRI.equalsIgnoreCase(ann.valueType); + } + + public static OWLAnnotationValue createAnnotationValue(JsonAnnotation ann, OWLDataFactory f) { + OWLAnnotationValue annotationValue; + if (isIRIValue(ann)) { + annotationValue = IRI.create(ann.value); + } else { + annotationValue = createLiteralInternal(ann, f); + } + return annotationValue; + } + + public static OWLLiteral createLiteral(JsonAnnotation ann, OWLDataFactory f) { + OWLLiteral literal = null; + if (isIRIValue(ann) == false) { + literal = createLiteralInternal(ann, f); + } + return literal; + } + + private static OWLLiteral createLiteralInternal(JsonAnnotation ann, OWLDataFactory f) { + OWLLiteral literal; + OWL2Datatype datatype = null; + for (OWL2Datatype current : OWL2Datatype.values()) { + if (current.getPrefixedName().equalsIgnoreCase(ann.valueType) + || current.getShortForm().equalsIgnoreCase(ann.valueType)) { + datatype = current; + break; + } + } + if (datatype != null) { + literal = f.getOWLLiteral(ann.value, datatype); + } else { + literal = f.getOWLLiteral(ann.value); + } + return literal; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/json/MolecularModelJsonRenderer.java b/minerva-core/src/main/java/org/geneontology/minerva/json/MolecularModelJsonRenderer.java index e2e70967..8a4600c9 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/json/MolecularModelJsonRenderer.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/json/MolecularModelJsonRenderer.java @@ -1,18 +1,7 @@ package org.geneontology.minerva.json; -import java.io.IOException; -import java.lang.reflect.Type; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.log4j.Logger; @@ -21,603 +10,574 @@ import org.geneontology.minerva.ModelContainer; import org.geneontology.minerva.MolecularModelManager; import org.geneontology.minerva.curie.CurieHandler; -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLException; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLNamedObject; -import org.semanticweb.owlapi.model.OWLObjectComplementOf; -import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyAlreadyExistsException; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyDocumentAlreadyExistsException; -import org.semanticweb.owlapi.model.OWLOntologyID; -import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.search.EntitySearcher; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - import owltools.gaf.eco.EcoMapper; import owltools.gaf.eco.EcoMapperFactory; import owltools.gaf.eco.EcoMapperFactory.OntologyMapperPair; import owltools.util.OwlHelper; +import java.io.IOException; +import java.lang.reflect.Type; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.*; + /** * A Renderer that takes a MolecularModel (an OWL ABox) and generates Map objects * that can be translated to JSON using Gson. - * + * * @author cjm */ public class MolecularModelJsonRenderer { - private static Logger LOG = Logger.getLogger(MolecularModelJsonRenderer.class); - - private final String modelId; - private final OWLOntology ont; - //TODO get rid of this graph entity - private MinervaOWLGraphWrapper graph; - private final CurieHandler curieHandler; - private final InferenceProvider inferenceProvider; - private BlazegraphOntologyManager go_lego_repo; - private Map> type_roots; - private Map class_label; - - public static final ThreadLocal AnnotationTypeDateFormat = new ThreadLocal(){ - - @Override - protected DateFormat initialValue() { - return new SimpleDateFormat("yyyy-MM-dd"); - } - - }; - - public MolecularModelJsonRenderer(ModelContainer model, InferenceProvider inferenceProvider, CurieHandler curieHandler) { - this(curieHandler.getCuri(model.getModelId()), - model.getAboxOntology(), - new MinervaOWLGraphWrapper(model.getAboxOntology()), - inferenceProvider, curieHandler); - } - - public MolecularModelJsonRenderer(String modelId, OWLOntology ontology, InferenceProvider inferenceProvider, CurieHandler curieHandler) { - this(modelId, ontology, new MinervaOWLGraphWrapper(ontology), inferenceProvider, curieHandler); - } - - public MolecularModelJsonRenderer(String modelId, MinervaOWLGraphWrapper graph, InferenceProvider inferenceProvider, CurieHandler curieHandler) { - this(modelId, graph.getSourceOntology(), graph, inferenceProvider, curieHandler); - } - - private MolecularModelJsonRenderer(String modelId, OWLOntology ont, MinervaOWLGraphWrapper graph, InferenceProvider inferenceProvider, CurieHandler curieHandler) { - super(); - this.modelId = modelId; - this.ont = ont; - this.graph = graph; - this.inferenceProvider = inferenceProvider; - this.curieHandler = curieHandler; - } - - public MolecularModelJsonRenderer(String modelId, OWLOntology ont, BlazegraphOntologyManager go_lego_repo, - InferenceProvider inferenceProvider, CurieHandler curieHandler) { - super(); - this.modelId = modelId; - this.ont = ont; - this.go_lego_repo = go_lego_repo; - this.inferenceProvider = inferenceProvider; - this.curieHandler = curieHandler; - } - - /** - * @return Map to be passed to Gson - */ - public JsonModel renderModel() { - JsonModel json = new JsonModel(); - json.modelId = modelId; - // per-Individual - //TODO this loop is the slowest part of the service response time. - List iObjs = new ArrayList(); - Set individuals = ont.getIndividualsInSignature(); - - if(go_lego_repo!=null) { - try { - type_roots = go_lego_repo.getSuperCategoryMapForIndividuals(individuals, ont, true); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - //get all the labels ready for the ontology terms in the model - Set all_classes = new HashSet(); - for(OWLNamedIndividual ind : individuals) { - Collection ocs = EntitySearcher.getTypes(ind, ont); - if(ocs!=null) { - for(OWLClassExpression oc : ocs) { - if(!oc.isAnonymous()) { - all_classes.add(oc.asOWLClass().getIRI().toString()); - } - } - } - } - //also the root terms - if(type_roots!=null&&type_roots.values()!=null) { - for(Set roots : type_roots.values()) { - if(roots!=null) { - all_classes.addAll(roots); - } - } - } - if(all_classes!=null) { - try { - class_label = go_lego_repo.getLabels(all_classes); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - for (OWLNamedIndividual i : individuals) { - iObjs.add(renderObject(i)); - } - json.individuals = iObjs.toArray(new JsonOwlIndividual[iObjs.size()]); - // per-Assertion - Set usedProps = new HashSet(); - List aObjs = new ArrayList(); - for (OWLObjectPropertyAssertionAxiom opa : ont.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION)) { - JsonOwlFact fact = renderObject(opa); - if (fact != null) { - aObjs.add(fact); - usedProps.addAll(opa.getObjectPropertiesInSignature()); - } - } - json.facts = aObjs.toArray(new JsonOwlFact[aObjs.size()]); - JsonAnnotation[] anObjs = renderAnnotations(ont.getAnnotations(), curieHandler); - if (anObjs != null && anObjs.length > 0) { - json.annotations = anObjs; - } - return json; - - } - - public static JsonAnnotation[] renderModelAnnotations(OWLOntology ont, CurieHandler curieHandler) { - JsonAnnotation[] anObjs = renderAnnotations(ont.getAnnotations(), curieHandler); - return anObjs; - } - - private static JsonAnnotation[] renderAnnotations(Set annotations, CurieHandler curieHandler) { - List anObjs = new ArrayList(); - for (OWLAnnotation annotation : annotations) { - JsonAnnotation json = JsonTools.create(annotation.getProperty(), annotation.getValue(), null, curieHandler); - if (json != null) { - anObjs.add(json); - } - } - return anObjs.toArray(new JsonAnnotation[anObjs.size()]); - } - - public Pair renderIndividuals(Collection individuals) { - - //add root types in case these are new to the model - if(go_lego_repo!=null) { - try { - if(type_roots==null) { - type_roots = new HashMap>(); - } - Map> t_r = go_lego_repo.getSuperCategoryMapForIndividuals(new HashSet(individuals), ont, true); - if(t_r!=null) { - type_roots.putAll(t_r); - } - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - List iObjs = new ArrayList(); - Set individualIds = new HashSet(); - final Set opAxioms = new HashSet(); - for (OWLIndividual i : individuals) { - if (i instanceof OWLNamedIndividual) { - OWLNamedIndividual named = (OWLNamedIndividual)i; - iObjs.add(renderObject(named)); - individualIds.add(named); - } - } - - // filter object property axioms. Only retain axioms which use individuals from the given subset - for (OWLNamedIndividual i : individualIds) { - Set axioms = ont.getObjectPropertyAssertionAxioms(i); - for (OWLObjectPropertyAssertionAxiom opa : axioms) { - OWLIndividual object = opa.getObject(); - if (individualIds.contains(object)) { - opAxioms.add(opa); - } - } - } - List aObjs = new ArrayList(); - for (OWLObjectPropertyAssertionAxiom opa : opAxioms) { - JsonOwlFact fact = renderObject(opa); - if (fact != null) { - aObjs.add(fact); - } - } - - return Pair.of(iObjs.toArray(new JsonOwlIndividual[iObjs.size()]), - aObjs.toArray(new JsonOwlFact[aObjs.size()])); - } - - /** - //TODO this is slow, speed it up. The slowest part of the service, including reasoning and validation. - * @param i - * @return Map to be passed to Gson - */ - public JsonOwlIndividual renderObject(OWLNamedIndividual i) { - JsonOwlIndividual json = new JsonOwlIndividual(); - json.id = curieHandler.getCuri(i); - List typeObjs = new ArrayList(); - Set assertedTypes = OwlHelper.getTypes(i, ont); - for (OWLClassExpression x : assertedTypes) { - typeObjs.add(renderObject(x)); - } - json.type = typeObjs.toArray(new JsonOwlObject[typeObjs.size()]); - - //if we have it, add the root type for the individual - List rootTypes = new ArrayList(); - if(type_roots!=null&&(type_roots.get(i)!=null)) { - for(String root_type : type_roots.get(i)) { - OWLClass root_class = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create(root_type)); - //this takes a lot of time... - rootTypes.add(renderObject(root_class)); - } - } - json.rootType = rootTypes.toArray(new JsonOwlObject[rootTypes.size()]); - - //add direct inferred type information - if (inferenceProvider != null && inferenceProvider.isConsistent()) { - List inferredTypeObjs = new ArrayList(); - Set inferredTypes = inferenceProvider.getTypes(i); - // optimization, do not render inferences, if they are equal to the asserted ones - if (assertedTypes.equals(inferredTypes) == false) { - for(OWLClass c : inferredTypes) { - if (c.isBuiltIn() == false) { - inferredTypeObjs.add(renderObject(c)); - } - } - } - if (inferredTypeObjs.isEmpty() == false) { - json.inferredType = inferredTypeObjs.toArray(new JsonOwlObject[inferredTypeObjs.size()]); - } - //testing approach to adding additional type information to response - //this works but ends up going extremely slowly when a lot of inferences are happening - //since its not being consumed anywhere now, leaving it out speeds things up considerably - // List inferredTypeObjsWithAll = new ArrayList(); - // //TODO this is particularly slow as there can be a lot of inferred types - // Set inferredTypesWithAll = inferenceProvider.getAllTypes(i); - // // optimization, do not render inferences, if they are equal to the asserted ones - // if (assertedTypes.equals(inferredTypesWithAll) == false) { - // for(OWLClass c : inferredTypesWithAll) { - // if (c.isBuiltIn() == false) { - // inferredTypeObjsWithAll.add(renderObject(c)); - // } - // } - // } - // if (inferredTypeObjsWithAll.isEmpty() == false) { - // json.inferredTypeWithAll = inferredTypeObjsWithAll.toArray(new JsonOwlObject[inferredTypeObjsWithAll.size()]); - // } - - - } - final List anObjs = new ArrayList(); - Set annotationAxioms = ont.getAnnotationAssertionAxioms(i.getIRI()); - for (OWLAnnotationAssertionAxiom ax : annotationAxioms) { - JsonAnnotation jsonAnn = JsonTools.create(ax.getProperty(), ax.getValue(), null, curieHandler); - if (jsonAnn != null) { - anObjs.add(jsonAnn); - } - } - Set dataPropertyAxioms = ont.getDataPropertyAssertionAxioms(i); - for (OWLDataPropertyAssertionAxiom ax : dataPropertyAxioms) { - OWLDataProperty property = ax.getProperty().asOWLDataProperty(); - JsonAnnotation jsonAnn = JsonTools.create(property, ax.getObject(), null, curieHandler); - if (jsonAnn != null) { - anObjs.add(jsonAnn); - } - } - - if (anObjs.isEmpty() == false) { - json.annotations = anObjs.toArray(new JsonAnnotation[anObjs.size()]); - } - return json; - } - - /** - * @param opa - * @return Map to be passed to Gson - */ - public JsonOwlFact renderObject(OWLObjectPropertyAssertionAxiom opa) { - OWLNamedIndividual subject; - OWLObjectProperty property; - OWLNamedIndividual object; - - JsonOwlFact fact = null; - if (opa.getSubject().isNamed() && opa.getObject().isNamed() && opa.getProperty().isAnonymous() == false) { - subject = opa.getSubject().asOWLNamedIndividual(); - property = opa.getProperty().asOWLObjectProperty(); - object = opa.getObject().asOWLNamedIndividual(); - - fact = new JsonOwlFact(); - fact.subject = curieHandler.getCuri(subject); - fact.property = curieHandler.getCuri(property); - if(graph==null&&go_lego_repo!=null) { - try { - fact.propertyLabel = go_lego_repo.getLabel(property); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - }else { - fact.propertyLabel = graph.getLabel(property); - } - if(fact.propertyLabel==null) { - fact.propertyLabel = curieHandler.getCuri(property); - } - fact.object = curieHandler.getCuri(object); - - JsonAnnotation[] anObjs = renderAnnotations(opa.getAnnotations(), curieHandler); - if (anObjs != null && anObjs.length > 0) { - fact.annotations = anObjs; - } - } - return fact; - } - - public JsonOwlObject renderObject(OWLObjectProperty p) { - String id = curieHandler.getCuri(p); - String label = getLabel(p, id); - JsonOwlObject json = JsonOwlObject.createProperty(id, label); - return json; - } - - private JsonOwlObject renderObject(OWLObjectPropertyExpression p) { - if (p.isAnonymous()) { - return null; - } - return renderObject(p.asOWLObjectProperty()); - } - /** - * @param x - * @return Object to be passed to Gson - */ - private JsonOwlObject renderObject(OWLClassExpression x) { - if (x.isAnonymous()) { - JsonOwlObject json = null; - if (x instanceof OWLObjectIntersectionOf) { - List expressions = new ArrayList(); - for (OWLClassExpression y : ((OWLObjectIntersectionOf)x).getOperands()) { - expressions.add(renderObject(y)); - } - json = JsonOwlObject.createIntersection(expressions); - } - else if (x instanceof OWLObjectUnionOf) { - List expressions = new ArrayList(); - for (OWLClassExpression y : ((OWLObjectUnionOf)x).getOperands()) { - expressions.add(renderObject(y)); - } - json = JsonOwlObject.createUnion(expressions); - } - else if (x instanceof OWLObjectSomeValuesFrom) { - OWLObjectSomeValuesFrom svf = (OWLObjectSomeValuesFrom)x; - JsonOwlObject prop = renderObject(svf.getProperty()); - JsonOwlObject filler = renderObject(svf.getFiller()); - if (prop != null && filler != null) { - json = JsonOwlObject.createSvf(prop, filler); - } - } - else if (x instanceof OWLObjectComplementOf) { - OWLObjectComplementOf comp = (OWLObjectComplementOf) x; - OWLClassExpression operand = comp.getOperand(); - JsonOwlObject operandJson = renderObject(operand); - if (operandJson != null) { - json = JsonOwlObject.createComplement(operandJson); - } - } - else { - // TODO - } - return json; - } - else { - return renderObject(x.asOWLClass()); - } - } - - private JsonOwlObject renderObject(OWLClass cls) { - String id = curieHandler.getCuri(cls); - JsonOwlObject json = JsonOwlObject.createCls(id, getLabel(cls, id)); - return json; - } - - protected String getLabel(OWLNamedObject i, String id) { - String label = null; - if(class_label!=null&&class_label.containsKey(i.getIRI().toString())) { - label = class_label.get(i.getIRI().toString()); - } else if(graph!=null) { - label = graph.getLabel(i); - } else if(go_lego_repo!=null) { - try { - label = go_lego_repo.getLabel(i); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - return label; - } - - - public static Pair,List> renderProperties(MolecularModelManager mmm, Set importantRelations, CurieHandler curieHandler) throws OWLOntologyCreationException { - /* [{ - * id: {String} - * label: {String} - * relevant: {boolean} // flag to indicate if this is a relation to be used in the model - * ?color: {String} // TODO in the future? - * ?glyph: {String} // TODO in the future? - * }] - */ - // retrieve (or load) all ontologies - // put in a new wrapper - MinervaOWLGraphWrapper wrapper = new MinervaOWLGraphWrapper(mmm.getOntology()); - Collection imports = mmm.getImports(); - OWLOntologyManager manager = wrapper.getManager(); - for (IRI iri : imports) { - OWLOntology ontology = manager.getOntology(iri); - if (ontology == null) { - // only try to load it, if it isn't already loaded - try { - ontology = manager.loadOntology(iri); - } catch (OWLOntologyDocumentAlreadyExistsException e) { - IRI existing = e.getOntologyDocumentIRI(); - ontology = manager.getOntology(existing); - } catch (OWLOntologyAlreadyExistsException e) { - OWLOntologyID id = e.getOntologyID(); - ontology = manager.getOntology(id); - } - } - if (ontology == null) { - LOG.warn("Could not find an ontology for IRI: "+iri); - } - else { - wrapper.addSupportOntology(ontology); - } - } - - // get all properties from all loaded ontologies - Set properties = new HashSet(); - Set dataProperties = new HashSet(); - Set allOntologies = wrapper.getAllOntologies(); - for(OWLOntology o : allOntologies) { - properties.addAll(o.getObjectPropertiesInSignature()); - dataProperties.addAll(o.getDataPropertiesInSignature()); - } - - // sort properties - List propertyList = new ArrayList(properties); - List dataPropertyList = new ArrayList(dataProperties); - Collections.sort(propertyList); - Collections.sort(dataPropertyList); - - // retrieve id and label for all properties - List relList = new ArrayList(); - for (OWLObjectProperty p : propertyList) { - if (p.isBuiltIn()) { - // skip owl:topObjectProperty - continue; - } - JsonRelationInfo json = new JsonRelationInfo(); - json.id = curieHandler.getCuri(p); - json.label = wrapper.getLabel(p); - if (importantRelations != null && (importantRelations.contains(p))) { - json.relevant = true; - } - else { - json.relevant = false; - } - relList.add(json); - } - - // retrieve id and label for all data properties - List dataList = new ArrayList(); - for(OWLDataProperty p : dataPropertyList) { - if(p.isBuiltIn()) { - continue; - } - JsonRelationInfo json = new JsonRelationInfo(); - json.id = curieHandler.getCuri(p); - json.label = wrapper.getLabel(p); - dataList.add(json); - } - IOUtils.closeQuietly(wrapper); - return Pair.of(relList, dataList); - } - - public static List renderEvidences(MolecularModelManager mmm, CurieHandler curieHandler) throws OWLException, IOException { - return renderEvidences(mmm.getOntology().getOWLOntologyManager(), curieHandler); - } - - private static final Object ecoMutex = new Object(); - private static volatile OntologyMapperPair eco = null; - - public static List renderEvidences(OWLOntologyManager manager, CurieHandler curieHandler) throws OWLException, IOException { - // TODO remove the hard coded ECO dependencies - OntologyMapperPair pair; - synchronized (ecoMutex) { - if (eco == null) { - eco = EcoMapperFactory.createEcoMapper(manager); - } - pair = eco; - } - final MinervaOWLGraphWrapper graph = pair.getGraph(); - final EcoMapper mapper = pair.getMapper(); - Set ecoClasses = graph.getAllOWLClasses(); - Map codesForEcoClasses = mapper.getCodesForEcoClasses(); - List relList = new ArrayList(); - for (OWLClass ecoClass : ecoClasses) { - if (ecoClass.isBuiltIn()) { - continue; - } - JsonEvidenceInfo json = new JsonEvidenceInfo(); - json.id = curieHandler.getCuri(ecoClass); - json.label = graph.getLabel(ecoClass); - String code = codesForEcoClasses.get(ecoClass); - if (code != null) { - json.code = code; - } - relList.add(json); - } - return relList; - } - - public static String renderToJson(String modelId, OWLOntology ont, InferenceProvider inferenceProvider, CurieHandler curieHandler) { - return renderToJson(modelId, ont, inferenceProvider, curieHandler, false); - } - - public static String renderToJson(String modelId, OWLOntology ont, InferenceProvider inferenceProvider, CurieHandler curieHandler, boolean prettyPrint) { - MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(modelId, ont, inferenceProvider, curieHandler); - JsonModel model = r.renderModel(); - return renderToJson(model, prettyPrint); - } - - public static String renderToJson(Object model, boolean prettyPrint) { - GsonBuilder builder = new GsonBuilder(); - if (prettyPrint) { - builder = builder.setPrettyPrinting(); - } - Gson gson = builder.create(); - String json = gson.toJson(model); - return json; - } - - public static T parseFromJson(String json, Class type) { - Gson gson = new GsonBuilder().create(); - T result = gson.fromJson(json, type); - return result; - } - - public static T[] parseFromJson(String requestString, Type requestType) { - Gson gson = new GsonBuilder().create(); - return gson.fromJson(requestString, requestType); - } + private static Logger LOG = Logger.getLogger(MolecularModelJsonRenderer.class); + + private final String modelId; + private final OWLOntology ont; + //TODO get rid of this graph entity + private MinervaOWLGraphWrapper graph; + private final CurieHandler curieHandler; + private final InferenceProvider inferenceProvider; + private BlazegraphOntologyManager go_lego_repo; + private Map> type_roots; + private Map class_label; + + public static final ThreadLocal AnnotationTypeDateFormat = new ThreadLocal() { + + @Override + protected DateFormat initialValue() { + return new SimpleDateFormat("yyyy-MM-dd"); + } + + }; + + public MolecularModelJsonRenderer(ModelContainer model, InferenceProvider inferenceProvider, CurieHandler curieHandler) { + this(curieHandler.getCuri(model.getModelId()), + model.getAboxOntology(), + new MinervaOWLGraphWrapper(model.getAboxOntology()), + inferenceProvider, curieHandler); + } + + public MolecularModelJsonRenderer(String modelId, OWLOntology ontology, InferenceProvider inferenceProvider, CurieHandler curieHandler) { + this(modelId, ontology, new MinervaOWLGraphWrapper(ontology), inferenceProvider, curieHandler); + } + + public MolecularModelJsonRenderer(String modelId, MinervaOWLGraphWrapper graph, InferenceProvider inferenceProvider, CurieHandler curieHandler) { + this(modelId, graph.getSourceOntology(), graph, inferenceProvider, curieHandler); + } + + private MolecularModelJsonRenderer(String modelId, OWLOntology ont, MinervaOWLGraphWrapper graph, InferenceProvider inferenceProvider, CurieHandler curieHandler) { + super(); + this.modelId = modelId; + this.ont = ont; + this.graph = graph; + this.inferenceProvider = inferenceProvider; + this.curieHandler = curieHandler; + } + + public MolecularModelJsonRenderer(String modelId, OWLOntology ont, BlazegraphOntologyManager go_lego_repo, + InferenceProvider inferenceProvider, CurieHandler curieHandler) { + super(); + this.modelId = modelId; + this.ont = ont; + this.go_lego_repo = go_lego_repo; + this.inferenceProvider = inferenceProvider; + this.curieHandler = curieHandler; + } + + /** + * @return Map to be passed to Gson + */ + public JsonModel renderModel() { + JsonModel json = new JsonModel(); + json.modelId = modelId; + // per-Individual + //TODO this loop is the slowest part of the service response time. + List iObjs = new ArrayList(); + Set individuals = ont.getIndividualsInSignature(); + + if (go_lego_repo != null) { + try { + type_roots = go_lego_repo.getSuperCategoryMapForIndividuals(individuals, ont, true); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + //get all the labels ready for the ontology terms in the model + Set all_classes = new HashSet(); + for (OWLNamedIndividual ind : individuals) { + Collection ocs = EntitySearcher.getTypes(ind, ont); + if (ocs != null) { + for (OWLClassExpression oc : ocs) { + if (!oc.isAnonymous()) { + all_classes.add(oc.asOWLClass().getIRI().toString()); + } + } + } + } + //also the root terms + if (type_roots != null && type_roots.values() != null) { + for (Set roots : type_roots.values()) { + if (roots != null) { + all_classes.addAll(roots); + } + } + } + if (all_classes != null) { + try { + class_label = go_lego_repo.getLabels(all_classes); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + for (OWLNamedIndividual i : individuals) { + iObjs.add(renderObject(i)); + } + json.individuals = iObjs.toArray(new JsonOwlIndividual[iObjs.size()]); + // per-Assertion + Set usedProps = new HashSet(); + List aObjs = new ArrayList(); + for (OWLObjectPropertyAssertionAxiom opa : ont.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION)) { + JsonOwlFact fact = renderObject(opa); + if (fact != null) { + aObjs.add(fact); + usedProps.addAll(opa.getObjectPropertiesInSignature()); + } + } + json.facts = aObjs.toArray(new JsonOwlFact[aObjs.size()]); + JsonAnnotation[] anObjs = renderAnnotations(ont.getAnnotations(), curieHandler); + if (anObjs != null && anObjs.length > 0) { + json.annotations = anObjs; + } + return json; + + } + + public static JsonAnnotation[] renderModelAnnotations(OWLOntology ont, CurieHandler curieHandler) { + JsonAnnotation[] anObjs = renderAnnotations(ont.getAnnotations(), curieHandler); + return anObjs; + } + + private static JsonAnnotation[] renderAnnotations(Set annotations, CurieHandler curieHandler) { + List anObjs = new ArrayList(); + for (OWLAnnotation annotation : annotations) { + JsonAnnotation json = JsonTools.create(annotation.getProperty(), annotation.getValue(), null, curieHandler); + if (json != null) { + anObjs.add(json); + } + } + return anObjs.toArray(new JsonAnnotation[anObjs.size()]); + } + + public Pair renderIndividuals(Collection individuals) { + + //add root types in case these are new to the model + if (go_lego_repo != null) { + try { + if (type_roots == null) { + type_roots = new HashMap>(); + } + Map> t_r = go_lego_repo.getSuperCategoryMapForIndividuals(new HashSet(individuals), ont, true); + if (t_r != null) { + type_roots.putAll(t_r); + } + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + List iObjs = new ArrayList(); + Set individualIds = new HashSet(); + final Set opAxioms = new HashSet(); + for (OWLIndividual i : individuals) { + if (i instanceof OWLNamedIndividual) { + OWLNamedIndividual named = (OWLNamedIndividual) i; + iObjs.add(renderObject(named)); + individualIds.add(named); + } + } + + // filter object property axioms. Only retain axioms which use individuals from the given subset + for (OWLNamedIndividual i : individualIds) { + Set axioms = ont.getObjectPropertyAssertionAxioms(i); + for (OWLObjectPropertyAssertionAxiom opa : axioms) { + OWLIndividual object = opa.getObject(); + if (individualIds.contains(object)) { + opAxioms.add(opa); + } + } + } + List aObjs = new ArrayList(); + for (OWLObjectPropertyAssertionAxiom opa : opAxioms) { + JsonOwlFact fact = renderObject(opa); + if (fact != null) { + aObjs.add(fact); + } + } + + return Pair.of(iObjs.toArray(new JsonOwlIndividual[iObjs.size()]), + aObjs.toArray(new JsonOwlFact[aObjs.size()])); + } + + /** + * //TODO this is slow, speed it up. The slowest part of the service, including reasoning and validation. + * + * @param i + * @return Map to be passed to Gson + */ + public JsonOwlIndividual renderObject(OWLNamedIndividual i) { + JsonOwlIndividual json = new JsonOwlIndividual(); + json.id = curieHandler.getCuri(i); + List typeObjs = new ArrayList(); + Set assertedTypes = OwlHelper.getTypes(i, ont); + for (OWLClassExpression x : assertedTypes) { + typeObjs.add(renderObject(x)); + } + json.type = typeObjs.toArray(new JsonOwlObject[typeObjs.size()]); + + //if we have it, add the root type for the individual + List rootTypes = new ArrayList(); + if (type_roots != null && (type_roots.get(i) != null)) { + for (String root_type : type_roots.get(i)) { + OWLClass root_class = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create(root_type)); + //this takes a lot of time... + rootTypes.add(renderObject(root_class)); + } + } + json.rootType = rootTypes.toArray(new JsonOwlObject[rootTypes.size()]); + + //add direct inferred type information + if (inferenceProvider != null && inferenceProvider.isConsistent()) { + List inferredTypeObjs = new ArrayList(); + Set inferredTypes = inferenceProvider.getTypes(i); + // optimization, do not render inferences, if they are equal to the asserted ones + if (assertedTypes.equals(inferredTypes) == false) { + for (OWLClass c : inferredTypes) { + if (c.isBuiltIn() == false) { + inferredTypeObjs.add(renderObject(c)); + } + } + } + if (inferredTypeObjs.isEmpty() == false) { + json.inferredType = inferredTypeObjs.toArray(new JsonOwlObject[inferredTypeObjs.size()]); + } + //testing approach to adding additional type information to response + //this works but ends up going extremely slowly when a lot of inferences are happening + //since its not being consumed anywhere now, leaving it out speeds things up considerably + // List inferredTypeObjsWithAll = new ArrayList(); + // //TODO this is particularly slow as there can be a lot of inferred types + // Set inferredTypesWithAll = inferenceProvider.getAllTypes(i); + // // optimization, do not render inferences, if they are equal to the asserted ones + // if (assertedTypes.equals(inferredTypesWithAll) == false) { + // for(OWLClass c : inferredTypesWithAll) { + // if (c.isBuiltIn() == false) { + // inferredTypeObjsWithAll.add(renderObject(c)); + // } + // } + // } + // if (inferredTypeObjsWithAll.isEmpty() == false) { + // json.inferredTypeWithAll = inferredTypeObjsWithAll.toArray(new JsonOwlObject[inferredTypeObjsWithAll.size()]); + // } + + + } + final List anObjs = new ArrayList(); + Set annotationAxioms = ont.getAnnotationAssertionAxioms(i.getIRI()); + for (OWLAnnotationAssertionAxiom ax : annotationAxioms) { + JsonAnnotation jsonAnn = JsonTools.create(ax.getProperty(), ax.getValue(), null, curieHandler); + if (jsonAnn != null) { + anObjs.add(jsonAnn); + } + } + Set dataPropertyAxioms = ont.getDataPropertyAssertionAxioms(i); + for (OWLDataPropertyAssertionAxiom ax : dataPropertyAxioms) { + OWLDataProperty property = ax.getProperty().asOWLDataProperty(); + JsonAnnotation jsonAnn = JsonTools.create(property, ax.getObject(), null, curieHandler); + if (jsonAnn != null) { + anObjs.add(jsonAnn); + } + } + + if (anObjs.isEmpty() == false) { + json.annotations = anObjs.toArray(new JsonAnnotation[anObjs.size()]); + } + return json; + } + + /** + * @param opa + * @return Map to be passed to Gson + */ + public JsonOwlFact renderObject(OWLObjectPropertyAssertionAxiom opa) { + OWLNamedIndividual subject; + OWLObjectProperty property; + OWLNamedIndividual object; + + JsonOwlFact fact = null; + if (opa.getSubject().isNamed() && opa.getObject().isNamed() && opa.getProperty().isAnonymous() == false) { + subject = opa.getSubject().asOWLNamedIndividual(); + property = opa.getProperty().asOWLObjectProperty(); + object = opa.getObject().asOWLNamedIndividual(); + + fact = new JsonOwlFact(); + fact.subject = curieHandler.getCuri(subject); + fact.property = curieHandler.getCuri(property); + if (graph == null && go_lego_repo != null) { + try { + fact.propertyLabel = go_lego_repo.getLabel(property); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } else { + fact.propertyLabel = graph.getLabel(property); + } + if (fact.propertyLabel == null) { + fact.propertyLabel = curieHandler.getCuri(property); + } + fact.object = curieHandler.getCuri(object); + + JsonAnnotation[] anObjs = renderAnnotations(opa.getAnnotations(), curieHandler); + if (anObjs != null && anObjs.length > 0) { + fact.annotations = anObjs; + } + } + return fact; + } + + public JsonOwlObject renderObject(OWLObjectProperty p) { + String id = curieHandler.getCuri(p); + String label = getLabel(p, id); + JsonOwlObject json = JsonOwlObject.createProperty(id, label); + return json; + } + + private JsonOwlObject renderObject(OWLObjectPropertyExpression p) { + if (p.isAnonymous()) { + return null; + } + return renderObject(p.asOWLObjectProperty()); + } + + /** + * @param x + * @return Object to be passed to Gson + */ + private JsonOwlObject renderObject(OWLClassExpression x) { + if (x.isAnonymous()) { + JsonOwlObject json = null; + if (x instanceof OWLObjectIntersectionOf) { + List expressions = new ArrayList(); + for (OWLClassExpression y : ((OWLObjectIntersectionOf) x).getOperands()) { + expressions.add(renderObject(y)); + } + json = JsonOwlObject.createIntersection(expressions); + } else if (x instanceof OWLObjectUnionOf) { + List expressions = new ArrayList(); + for (OWLClassExpression y : ((OWLObjectUnionOf) x).getOperands()) { + expressions.add(renderObject(y)); + } + json = JsonOwlObject.createUnion(expressions); + } else if (x instanceof OWLObjectSomeValuesFrom) { + OWLObjectSomeValuesFrom svf = (OWLObjectSomeValuesFrom) x; + JsonOwlObject prop = renderObject(svf.getProperty()); + JsonOwlObject filler = renderObject(svf.getFiller()); + if (prop != null && filler != null) { + json = JsonOwlObject.createSvf(prop, filler); + } + } else if (x instanceof OWLObjectComplementOf) { + OWLObjectComplementOf comp = (OWLObjectComplementOf) x; + OWLClassExpression operand = comp.getOperand(); + JsonOwlObject operandJson = renderObject(operand); + if (operandJson != null) { + json = JsonOwlObject.createComplement(operandJson); + } + } else { + // TODO + } + return json; + } else { + return renderObject(x.asOWLClass()); + } + } + + private JsonOwlObject renderObject(OWLClass cls) { + String id = curieHandler.getCuri(cls); + JsonOwlObject json = JsonOwlObject.createCls(id, getLabel(cls, id)); + return json; + } + + protected String getLabel(OWLNamedObject i, String id) { + String label = null; + if (class_label != null && class_label.containsKey(i.getIRI().toString())) { + label = class_label.get(i.getIRI().toString()); + } else if (graph != null) { + label = graph.getLabel(i); + } else if (go_lego_repo != null) { + try { + label = go_lego_repo.getLabel(i); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + return label; + } + + + public static Pair, List> renderProperties(MolecularModelManager mmm, Set importantRelations, CurieHandler curieHandler) throws OWLOntologyCreationException { + /* [{ + * id: {String} + * label: {String} + * relevant: {boolean} // flag to indicate if this is a relation to be used in the model + * ?color: {String} // TODO in the future? + * ?glyph: {String} // TODO in the future? + * }] + */ + // retrieve (or load) all ontologies + // put in a new wrapper + MinervaOWLGraphWrapper wrapper = new MinervaOWLGraphWrapper(mmm.getOntology()); + Collection imports = mmm.getImports(); + OWLOntologyManager manager = wrapper.getManager(); + for (IRI iri : imports) { + OWLOntology ontology = manager.getOntology(iri); + if (ontology == null) { + // only try to load it, if it isn't already loaded + try { + ontology = manager.loadOntology(iri); + } catch (OWLOntologyDocumentAlreadyExistsException e) { + IRI existing = e.getOntologyDocumentIRI(); + ontology = manager.getOntology(existing); + } catch (OWLOntologyAlreadyExistsException e) { + OWLOntologyID id = e.getOntologyID(); + ontology = manager.getOntology(id); + } + } + if (ontology == null) { + LOG.warn("Could not find an ontology for IRI: " + iri); + } else { + wrapper.addSupportOntology(ontology); + } + } + + // get all properties from all loaded ontologies + Set properties = new HashSet(); + Set dataProperties = new HashSet(); + Set allOntologies = wrapper.getAllOntologies(); + for (OWLOntology o : allOntologies) { + properties.addAll(o.getObjectPropertiesInSignature()); + dataProperties.addAll(o.getDataPropertiesInSignature()); + } + + // sort properties + List propertyList = new ArrayList(properties); + List dataPropertyList = new ArrayList(dataProperties); + Collections.sort(propertyList); + Collections.sort(dataPropertyList); + + // retrieve id and label for all properties + List relList = new ArrayList(); + for (OWLObjectProperty p : propertyList) { + if (p.isBuiltIn()) { + // skip owl:topObjectProperty + continue; + } + JsonRelationInfo json = new JsonRelationInfo(); + json.id = curieHandler.getCuri(p); + json.label = wrapper.getLabel(p); + if (importantRelations != null && (importantRelations.contains(p))) { + json.relevant = true; + } else { + json.relevant = false; + } + relList.add(json); + } + + // retrieve id and label for all data properties + List dataList = new ArrayList(); + for (OWLDataProperty p : dataPropertyList) { + if (p.isBuiltIn()) { + continue; + } + JsonRelationInfo json = new JsonRelationInfo(); + json.id = curieHandler.getCuri(p); + json.label = wrapper.getLabel(p); + dataList.add(json); + } + IOUtils.closeQuietly(wrapper); + return Pair.of(relList, dataList); + } + + public static List renderEvidences(MolecularModelManager mmm, CurieHandler curieHandler) throws OWLException, IOException { + return renderEvidences(mmm.getOntology().getOWLOntologyManager(), curieHandler); + } + + private static final Object ecoMutex = new Object(); + private static volatile OntologyMapperPair eco = null; + + public static List renderEvidences(OWLOntologyManager manager, CurieHandler curieHandler) throws OWLException, IOException { + // TODO remove the hard coded ECO dependencies + OntologyMapperPair pair; + synchronized (ecoMutex) { + if (eco == null) { + eco = EcoMapperFactory.createEcoMapper(manager); + } + pair = eco; + } + final MinervaOWLGraphWrapper graph = pair.getGraph(); + final EcoMapper mapper = pair.getMapper(); + Set ecoClasses = graph.getAllOWLClasses(); + Map codesForEcoClasses = mapper.getCodesForEcoClasses(); + List relList = new ArrayList(); + for (OWLClass ecoClass : ecoClasses) { + if (ecoClass.isBuiltIn()) { + continue; + } + JsonEvidenceInfo json = new JsonEvidenceInfo(); + json.id = curieHandler.getCuri(ecoClass); + json.label = graph.getLabel(ecoClass); + String code = codesForEcoClasses.get(ecoClass); + if (code != null) { + json.code = code; + } + relList.add(json); + } + return relList; + } + + public static String renderToJson(String modelId, OWLOntology ont, InferenceProvider inferenceProvider, CurieHandler curieHandler) { + return renderToJson(modelId, ont, inferenceProvider, curieHandler, false); + } + + public static String renderToJson(String modelId, OWLOntology ont, InferenceProvider inferenceProvider, CurieHandler curieHandler, boolean prettyPrint) { + MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(modelId, ont, inferenceProvider, curieHandler); + JsonModel model = r.renderModel(); + return renderToJson(model, prettyPrint); + } + + public static String renderToJson(Object model, boolean prettyPrint) { + GsonBuilder builder = new GsonBuilder(); + if (prettyPrint) { + builder = builder.setPrettyPrinting(); + } + Gson gson = builder.create(); + String json = gson.toJson(model); + return json; + } + + public static T parseFromJson(String json, Class type) { + Gson gson = new GsonBuilder().create(); + T result = gson.fromJson(json, type); + return result; + } + + public static T[] parseFromJson(String requestString, Type requestType) { + Gson gson = new GsonBuilder().create(); + return gson.fromJson(requestString, requestType); + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/ActivityUnit.java b/minerva-core/src/main/java/org/geneontology/minerva/model/ActivityUnit.java index 71337a8b..80a1a7e0 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/ActivityUnit.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/ActivityUnit.java @@ -1,246 +1,232 @@ /** - * + * */ package org.geneontology.minerva.model; +import org.apache.log4j.Logger; +import org.semanticweb.owlapi.model.*; +import org.semanticweb.owlapi.search.EntitySearcher; + import java.util.Collection; import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.Set; -import org.apache.log4j.Logger; -import org.geneontology.minerva.CoreMolecularModelManager; -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.search.EntitySearcher; - -import com.google.common.collect.Multimap; - /** * @author benjamingood * */ -public class ActivityUnit extends GoCamOccurent{ - Set containing_processes; - Set enablers; - - private static Logger LOG = Logger.getLogger(ActivityUnit.class); - - public ActivityUnit(OWLNamedIndividual ind, OWLOntology ont, GoCamModel model) { - super(ind, ont, model); - enablers = new HashSet(); - containing_processes = new HashSet(); - causal_out = new HashMap>(); - causal_in = new HashMap>(); - inputs = new HashSet(); - outputs = new HashSet(); - regulating_entities = new HashSet(); - locations = new HashSet(); - transport_locations = new HashSet(); - //FYI this doesn't work unless the gocam ontology either imports the declarations e.g. for all the object properties and classes - //or includes the declarations. - Collection ref_axioms = EntitySearcher.getReferencingAxioms(ind, ont); - for(OWLAxiom axiom : ref_axioms) { - if(axiom.getAxiomType().equals(AxiomType.OBJECT_PROPERTY_ASSERTION)) { - OWLObjectPropertyAssertionAxiom a = (OWLObjectPropertyAssertionAxiom) axiom; - OWLObjectProperty prop = (OWLObjectProperty) a.getProperty(); - if(a.getSubject().equals(ind)) { - OWLNamedIndividual object = a.getObject().asOWLNamedIndividual(); - if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002333")) { - enablers.add(new PhysicalEntity(object, ont, model)); - } - else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/BFO_0000050")) { - containing_processes.add(new BiologicalProcessUnit(object, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002233")) { - inputs.add(new PhysicalEntity(object, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002234")) { - outputs.add(new PhysicalEntity(object, ont, model)); - } - else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/BFO_0000066")) { - locations.add(new AnatomicalEntity(object, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002339")) { - transport_locations.add(new AnatomicalEntity(object, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002338")) { - transport_locations.add(new AnatomicalEntity(object, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002313")) { - transport_locations.add(new AnatomicalEntity(object, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002429")) { - regulating_entities.add(new PhysicalEntity(object, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002430")) { - regulating_entities.add(new PhysicalEntity(object, ont, model)); - } - //all other properties now assumed to be causal relations - else { - GoCamOccurent object_event = getOccurent(object, ont, model); - if(object_event!=null) { - Set objects = causal_out.get(prop); - if(objects==null) { - objects = new HashSet(); - } - objects.add(object_event); - causal_out.put(prop, objects); - }else { - LOG.error("Linked prop "+prop+" Object Not an occurent "+object+ " in "+model.getIri()+" "+model.getTitle()); - } - } - //above we have contextual information for this activity - //here we check for causal relations linked to this activity from another one. - }else if(a.getObject().equals(ind)) { - //the source - OWLNamedIndividual source = a.getSubject().asOWLNamedIndividual(); - if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002429")) { - regulating_entities.add(new PhysicalEntity(source, ont, model)); - }else if(prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002430")) { - regulating_entities.add(new PhysicalEntity(source, ont, model)); - }else { - GoCamEntity source_event = getOccurent(source, ont, model); - if(source_event !=null) { - Set sources = causal_in.get(prop); - if(sources==null) { - sources = new HashSet(); - } - sources.add((GoCamOccurent)source_event); - causal_in.put(prop, sources); - } - } - } - } - } - } - - //causal_out = new HashMap>(); - - public Set getDownstream(GoCamOccurent activity, Set down){ - if(down==null) { - down = new HashSet(); - } - if(activity.causal_out!=null) { - for(Set nextsteps : activity.causal_out.values()) { - for(GoCamOccurent nextstep : nextsteps) { - if(nextstep!=activity&&!down.contains(nextstep)) { - down.add(nextstep); - down = getDownstream(nextstep, down); - } - } - } - } - return down; - } - - - private GoCamOccurent getOccurent(OWLNamedIndividual object, OWLOntology ont, GoCamModel model) { - GoCamEntity e = model.ind_entity.get(object); - if(e!=null) { - if(e instanceof GoCamOccurent) { - return (GoCamOccurent)e; - }else { - LOG.error("Tried to get physical entity as occurent "+object+ " in "+model.getIri()+" "+model.getTitle()); - return null; - } - } - Set types = model.ind_types.get(object); - GoCamOccurent object_event = null; - if(types==null) { - LOG.error("No types found for "+object+ " in "+model.getIri()+" "+model.getTitle()); - }else { - if(types.contains("http://purl.obolibrary.org/obo/GO_0008150")) { - object_event = new BiologicalProcessUnit(object, ont, model); - }else if(types.contains("http://purl.obolibrary.org/obo/GO_0003674")|| - types.contains("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event")) { - object_event = new ActivityUnit(object, ont, model); - }else { - LOG.error("Tried to get physical entity as occurent "+object+ " in "+model.getIri()+" "+model.getTitle()); - return null; - } - } - if(object_event!=null) { - model.ind_entity.put(object, object_event); - } - return object_event; - } - - public String toString() { - String g = ""; - if(label!=null) { - g += "label:"+label; - } - g+="\nIRI:"+individual.toString()+"\ntypes: "+this.stringForClasses(this.direct_types); - if(comments!=null) { - g+="\ncomments: "+comments+"\n"; - } - if(notes!=null) { - g+="\nnotes:"+notes; - } - if(enablers!=null) { - g+="\nenabled by "+enablers; - } - if(locations!=null) { - g+="\noccurs in "+locations; - } - if(containing_processes!=null) { - g+="\npart of "+containing_processes; - } - if(inputs!=null) { - g+="\nhas inputs "+inputs; - } - if(outputs!=null) { - g+="\nhas outputs "+outputs; - } - return g; - } - - public Set getContaining_processes() { - return containing_processes; - } - - public void setContaining_processes(Set containing_processes) { - this.containing_processes = containing_processes; - } - - public Set getEnablers() { - return enablers; - } - - public void setEnablers(Set enablers) { - this.enablers = enablers; - } - - public String getURIsForConnectedBPs() { - String bp_iris = ""; - Set bps = new HashSet(); - for(BiologicalProcessUnit bpu : getContaining_processes()) { - bps.addAll(bpu.direct_types); - } - if(bps.size()>0) { - bp_iris = this.stringForClasses(bps); - } - return bp_iris; - } - - /** - * Definition of a 'complete' activity unit - * @return - */ - public boolean isComplete() { - boolean complete = false; - if(this.getEnablers().size()==1&& - this.getLocations().size()==1&& - this.getContaining_processes().size()==1&& - this.getDirect_types().size()==1) { - OWLClass type = this.getDirect_types().iterator().next(); - if(!type.equals(this.in_model.mf)&&!type.equals(this.in_model.me)) { - complete = true; - } - } - return complete; - } +public class ActivityUnit extends GoCamOccurent { + Set containing_processes; + Set enablers; + + private static Logger LOG = Logger.getLogger(ActivityUnit.class); + + public ActivityUnit(OWLNamedIndividual ind, OWLOntology ont, GoCamModel model) { + super(ind, ont, model); + enablers = new HashSet(); + containing_processes = new HashSet(); + causal_out = new HashMap>(); + causal_in = new HashMap>(); + inputs = new HashSet(); + outputs = new HashSet(); + regulating_entities = new HashSet(); + locations = new HashSet(); + transport_locations = new HashSet(); + //FYI this doesn't work unless the gocam ontology either imports the declarations e.g. for all the object properties and classes + //or includes the declarations. + Collection ref_axioms = EntitySearcher.getReferencingAxioms(ind, ont); + for (OWLAxiom axiom : ref_axioms) { + if (axiom.getAxiomType().equals(AxiomType.OBJECT_PROPERTY_ASSERTION)) { + OWLObjectPropertyAssertionAxiom a = (OWLObjectPropertyAssertionAxiom) axiom; + OWLObjectProperty prop = (OWLObjectProperty) a.getProperty(); + if (a.getSubject().equals(ind)) { + OWLNamedIndividual object = a.getObject().asOWLNamedIndividual(); + if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002333")) { + enablers.add(new PhysicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/BFO_0000050")) { + containing_processes.add(new BiologicalProcessUnit(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002233")) { + inputs.add(new PhysicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002234")) { + outputs.add(new PhysicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/BFO_0000066")) { + locations.add(new AnatomicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002339")) { + transport_locations.add(new AnatomicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002338")) { + transport_locations.add(new AnatomicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002313")) { + transport_locations.add(new AnatomicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002429")) { + regulating_entities.add(new PhysicalEntity(object, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002430")) { + regulating_entities.add(new PhysicalEntity(object, ont, model)); + } + //all other properties now assumed to be causal relations + else { + GoCamOccurent object_event = getOccurent(object, ont, model); + if (object_event != null) { + Set objects = causal_out.get(prop); + if (objects == null) { + objects = new HashSet(); + } + objects.add(object_event); + causal_out.put(prop, objects); + } else { + LOG.error("Linked prop " + prop + " Object Not an occurent " + object + " in " + model.getIri() + " " + model.getTitle()); + } + } + //above we have contextual information for this activity + //here we check for causal relations linked to this activity from another one. + } else if (a.getObject().equals(ind)) { + //the source + OWLNamedIndividual source = a.getSubject().asOWLNamedIndividual(); + if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002429")) { + regulating_entities.add(new PhysicalEntity(source, ont, model)); + } else if (prop.getIRI().toString().equals("http://purl.obolibrary.org/obo/RO_0002430")) { + regulating_entities.add(new PhysicalEntity(source, ont, model)); + } else { + GoCamEntity source_event = getOccurent(source, ont, model); + if (source_event != null) { + Set sources = causal_in.get(prop); + if (sources == null) { + sources = new HashSet(); + } + sources.add((GoCamOccurent) source_event); + causal_in.put(prop, sources); + } + } + } + } + } + } + + //causal_out = new HashMap>(); + + public Set getDownstream(GoCamOccurent activity, Set down) { + if (down == null) { + down = new HashSet(); + } + if (activity.causal_out != null) { + for (Set nextsteps : activity.causal_out.values()) { + for (GoCamOccurent nextstep : nextsteps) { + if (nextstep != activity && !down.contains(nextstep)) { + down.add(nextstep); + down = getDownstream(nextstep, down); + } + } + } + } + return down; + } + + + private GoCamOccurent getOccurent(OWLNamedIndividual object, OWLOntology ont, GoCamModel model) { + GoCamEntity e = model.ind_entity.get(object); + if (e != null) { + if (e instanceof GoCamOccurent) { + return (GoCamOccurent) e; + } else { + LOG.error("Tried to get physical entity as occurent " + object + " in " + model.getIri() + " " + model.getTitle()); + return null; + } + } + Set types = model.ind_types.get(object); + GoCamOccurent object_event = null; + if (types == null) { + LOG.error("No types found for " + object + " in " + model.getIri() + " " + model.getTitle()); + } else { + if (types.contains("http://purl.obolibrary.org/obo/GO_0008150")) { + object_event = new BiologicalProcessUnit(object, ont, model); + } else if (types.contains("http://purl.obolibrary.org/obo/GO_0003674") || + types.contains("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event")) { + object_event = new ActivityUnit(object, ont, model); + } else { + LOG.error("Tried to get physical entity as occurent " + object + " in " + model.getIri() + " " + model.getTitle()); + return null; + } + } + if (object_event != null) { + model.ind_entity.put(object, object_event); + } + return object_event; + } + + public String toString() { + String g = ""; + if (label != null) { + g += "label:" + label; + } + g += "\nIRI:" + individual.toString() + "\ntypes: " + this.stringForClasses(this.direct_types); + if (comments != null) { + g += "\ncomments: " + comments + "\n"; + } + if (notes != null) { + g += "\nnotes:" + notes; + } + if (enablers != null) { + g += "\nenabled by " + enablers; + } + if (locations != null) { + g += "\noccurs in " + locations; + } + if (containing_processes != null) { + g += "\npart of " + containing_processes; + } + if (inputs != null) { + g += "\nhas inputs " + inputs; + } + if (outputs != null) { + g += "\nhas outputs " + outputs; + } + return g; + } + + public Set getContaining_processes() { + return containing_processes; + } + + public void setContaining_processes(Set containing_processes) { + this.containing_processes = containing_processes; + } + + public Set getEnablers() { + return enablers; + } + + public void setEnablers(Set enablers) { + this.enablers = enablers; + } + + public String getURIsForConnectedBPs() { + String bp_iris = ""; + Set bps = new HashSet(); + for (BiologicalProcessUnit bpu : getContaining_processes()) { + bps.addAll(bpu.direct_types); + } + if (bps.size() > 0) { + bp_iris = this.stringForClasses(bps); + } + return bp_iris; + } + + /** + * Definition of a 'complete' activity unit + * @return + */ + public boolean isComplete() { + boolean complete = false; + if (this.getEnablers().size() == 1 && + this.getLocations().size() == 1 && + this.getContaining_processes().size() == 1 && + this.getDirect_types().size() == 1) { + OWLClass type = this.getDirect_types().iterator().next(); + if (!type.equals(this.in_model.mf) && !type.equals(this.in_model.me)) { + complete = true; + } + } + return complete; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/AnatomicalEntity.java b/minerva-core/src/main/java/org/geneontology/minerva/model/AnatomicalEntity.java index 49d08866..b673653b 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/AnatomicalEntity.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/AnatomicalEntity.java @@ -3,10 +3,10 @@ import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLOntology; -public class AnatomicalEntity extends GoCamEntity{ +public class AnatomicalEntity extends GoCamEntity { - public AnatomicalEntity(OWLNamedIndividual loc_ind, OWLOntology ont, GoCamModel model) { - super(loc_ind, ont, model); - } + public AnatomicalEntity(OWLNamedIndividual loc_ind, OWLOntology ont, GoCamModel model) { + super(loc_ind, ont, model); + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/BiologicalProcessUnit.java b/minerva-core/src/main/java/org/geneontology/minerva/model/BiologicalProcessUnit.java index 76336cb4..09e15b38 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/BiologicalProcessUnit.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/BiologicalProcessUnit.java @@ -1,16 +1,17 @@ package org.geneontology.minerva.model; -import java.util.Set; - import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLOntology; +import java.util.Set; + + +public class BiologicalProcessUnit extends GoCamOccurent { + public BiologicalProcessUnit(OWLNamedIndividual enabler_ind, OWLOntology ont, GoCamModel model) { + super(enabler_ind, ont, model); + } -public class BiologicalProcessUnit extends GoCamOccurent{ - public BiologicalProcessUnit(OWLNamedIndividual enabler_ind, OWLOntology ont, GoCamModel model) { - super(enabler_ind, ont, model); - } - Set transports; - AnatomicalEntity start_location; - AnatomicalEntity end_location; + Set transports; + AnatomicalEntity start_location; + AnatomicalEntity end_location; } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamEntity.java b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamEntity.java index 8aec7f39..f3975636 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamEntity.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamEntity.java @@ -1,182 +1,174 @@ package org.geneontology.minerva.model; +import org.semanticweb.owlapi.model.*; +import org.semanticweb.owlapi.search.EntitySearcher; + import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.Set; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.search.EntitySearcher; +public class GoCamEntity extends ProvenanceAnnotated { + String xref; + String label; + String exact_match; + OWLNamedIndividual individual; + Set direct_types; + Set indirect_types; + String derived_from; + GoCamModel in_model; + + public GoCamEntity(OWLNamedIndividual ind, OWLOntology ont, GoCamModel model) { + in_model = model; + addAnnotations(ind, ont); + individual = ind; + direct_types = new HashSet(); + for (OWLClassExpression ce : EntitySearcher.getTypes(ind, ont)) { + if (!ce.isAnonymous()) { + direct_types.add(ce.asOWLClass()); + } + } + model.ind_entity.put(ind, this); + } + + private void addAnnotations(OWLNamedIndividual ind, OWLOntology ont) { + + Collection annos = EntitySearcher.getAnnotations(ind, ont); + comments = new HashSet(); + notes = new HashSet(); + contributors = new HashSet(); + provided_by = new HashSet(); + ; + for (OWLAnnotation anno : annos) { + if (anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/contributor")) { + contributors.add(anno.getValue().asLiteral().get().getLiteral()); + } else if (anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/date")) { + date = anno.getValue().asLiteral().get().getLiteral(); + } else if (anno.getProperty().getIRI().toString().equals("http://purl.org/pav/providedBy")) { + provided_by.add(anno.getValue().asLiteral().get().getLiteral()); + } else if (anno.getProperty().getIRI().toString().equals("http://www.w3.org/2000/01/rdf-schema#comment")) { + String comment = anno.getValue().asLiteral().get().toString(); + comments.add(comment); + } else if (anno.getProperty().getIRI().toString().equals("http://www.w3.org/2004/02/skos/core#note")) { + String note = anno.getValue().asLiteral().get().toString(); + notes.add(note); + } else if (anno.getProperty().getIRI().toString().equals("http://www.geneontology.org/formats/oboInOwl#hasDbXref")) { + xref = anno.getValue().asLiteral().get().toString(); + } else if (anno.getProperty().getIRI().toString().equals("http://www.w3.org/2000/01/rdf-schema#label")) { + label = anno.getValue().asLiteral().get().toString(); + } else if (anno.getProperty().getIRI().toString().equals("http://www.w3.org/2004/02/skos/core#exactMatch")) { + exact_match = anno.getValue().asIRI().get().toString(); + } + } + + } + + public String stringForClasses(Set types) { + if (types == null) { + return ""; + } + String c = ""; + for (OWLClass type : types) { + try { + String label = in_model.go_lego.getLabel(type); + if (label != null) { + c += label + " "; + } else { + //it could be deprecated + Set c1 = new HashSet(); + c1.add(type.getIRI().toString()); + Set fixed = in_model.go_lego.replaceDeprecated(c1); + if (fixed != c1 && fixed.size() == 1) { + label = in_model.go_lego.getLabel(fixed.iterator().next()); + c += label + " (replacing deprecated " + type.getIRI() + ")"; + } + } + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + return c; + } + + public String toString() { + String g = ""; + if (label != null) { + g += "label:" + label; + } + g += "\nIRI:" + individual.toString() + "\ntypes: " + this.stringForClasses(this.direct_types); + if (comments != null) { + g += "\ncomments: " + comments + "\n"; + } + if (notes != null) { + g += "\nnotes:" + notes; + } + + return g; + } + + public String getXref() { + return xref; + } + + public void setXref(String xref) { + this.xref = xref; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public String getExact_match() { + return exact_match; + } + + public void setExact_match(String exact_match) { + this.exact_match = exact_match; + } + + public OWLNamedIndividual getIndividual() { + return individual; + } + + public void setIndividual(OWLNamedIndividual individual) { + this.individual = individual; + } + + public Set getDirect_types() { + return direct_types; + } + + public void setDirect_types(Set direct_types) { + this.direct_types = direct_types; + } + + public Set getIndirect_types() { + return indirect_types; + } + + public void setIndirect_types(Set indirect_types) { + this.indirect_types = indirect_types; + } + + public String getDerived_from() { + return derived_from; + } + + public void setDerived_from(String derived_from) { + this.derived_from = derived_from; + } + + public GoCamModel getIn_model() { + return in_model; + } + + public void setIn_model(GoCamModel in_model) { + this.in_model = in_model; + } -public class GoCamEntity extends ProvenanceAnnotated{ - String xref; - String label; - String exact_match; - OWLNamedIndividual individual; - Set direct_types; - Set indirect_types; - String derived_from; - GoCamModel in_model; - - public GoCamEntity(OWLNamedIndividual ind, OWLOntology ont, GoCamModel model) { - in_model = model; - addAnnotations(ind, ont); - individual = ind; - direct_types = new HashSet(); - for(OWLClassExpression ce : EntitySearcher.getTypes(ind, ont)) { - if(!ce.isAnonymous()) { - direct_types.add(ce.asOWLClass()); - } - } - model.ind_entity.put(ind, this); - } - - private void addAnnotations(OWLNamedIndividual ind, OWLOntology ont) { - - Collection annos = EntitySearcher.getAnnotations(ind, ont); - comments = new HashSet(); - notes = new HashSet(); - contributors = new HashSet(); - provided_by = new HashSet();; - for(OWLAnnotation anno : annos) { - if(anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/contributor")) { - contributors.add(anno.getValue().asLiteral().get().getLiteral()); - } - else if(anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/date")) { - date = anno.getValue().asLiteral().get().getLiteral(); - } - else if(anno.getProperty().getIRI().toString().equals("http://purl.org/pav/providedBy")) { - provided_by.add(anno.getValue().asLiteral().get().getLiteral()); - } - else if(anno.getProperty().getIRI().toString().equals("http://www.w3.org/2000/01/rdf-schema#comment")) { - String comment = anno.getValue().asLiteral().get().toString(); - comments.add(comment); - } - else if(anno.getProperty().getIRI().toString().equals("http://www.w3.org/2004/02/skos/core#note")) { - String note = anno.getValue().asLiteral().get().toString(); - notes.add(note); - } - else if(anno.getProperty().getIRI().toString().equals("http://www.geneontology.org/formats/oboInOwl#hasDbXref")) { - xref = anno.getValue().asLiteral().get().toString(); - } - else if(anno.getProperty().getIRI().toString().equals("http://www.w3.org/2000/01/rdf-schema#label")) { - label = anno.getValue().asLiteral().get().toString(); - } - else if(anno.getProperty().getIRI().toString().equals("http://www.w3.org/2004/02/skos/core#exactMatch")) { - exact_match = anno.getValue().asIRI().get().toString(); - } - } - - } - public String stringForClasses(Set types) { - if(types==null) { - return ""; - } - String c = ""; - for(OWLClass type : types) { - try { - String label = in_model.go_lego.getLabel(type); - if(label!=null) { - c+=label+" "; - }else { - //it could be deprecated - Set c1 = new HashSet(); c1.add(type.getIRI().toString()); - Set fixed = in_model.go_lego.replaceDeprecated(c1); - if(fixed!=c1&&fixed.size()==1) { - label = in_model.go_lego.getLabel(fixed.iterator().next()); - c+=label+" (replacing deprecated "+type.getIRI()+")"; - } - } - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - return c; - } - - public String toString(){ - String g = ""; - if(label!=null) { - g += "label:"+label; - } - g+="\nIRI:"+individual.toString()+"\ntypes: "+this.stringForClasses(this.direct_types); - if(comments!=null) { - g+="\ncomments: "+comments+"\n"; - } - if(notes!=null) { - g+="\nnotes:"+notes; - } - - return g; - } - - public String getXref() { - return xref; - } - - public void setXref(String xref) { - this.xref = xref; - } - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - public String getExact_match() { - return exact_match; - } - - public void setExact_match(String exact_match) { - this.exact_match = exact_match; - } - - public OWLNamedIndividual getIndividual() { - return individual; - } - - public void setIndividual(OWLNamedIndividual individual) { - this.individual = individual; - } - - public Set getDirect_types() { - return direct_types; - } - - public void setDirect_types(Set direct_types) { - this.direct_types = direct_types; - } - - public Set getIndirect_types() { - return indirect_types; - } - - public void setIndirect_types(Set indirect_types) { - this.indirect_types = indirect_types; - } - - public String getDerived_from() { - return derived_from; - } - - public void setDerived_from(String derived_from) { - this.derived_from = derived_from; - } - - public GoCamModel getIn_model() { - return in_model; - } - - public void setIn_model(GoCamModel in_model) { - this.in_model = in_model; - } - } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModel.java b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModel.java index 2b0c5aca..02197ee3 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModel.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModel.java @@ -1,325 +1,324 @@ package org.geneontology.minerva.model; -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.log4j.Logger; import org.geneontology.minerva.BlazegraphMolecularModelManager; import org.geneontology.minerva.BlazegraphOntologyManager; -import org.geneontology.minerva.CoreMolecularModelManager; import org.openrdf.query.BindingSet; import org.openrdf.query.MalformedQueryException; import org.openrdf.query.QueryEvaluationException; import org.openrdf.query.TupleQueryResult; import org.openrdf.repository.RepositoryException; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntology; - -public class GoCamModel extends ProvenanceAnnotated{ - private static Logger LOG = Logger.getLogger(GoCamModel.class); - BlazegraphOntologyManager go_lego; - String modelstate; - Set in_taxon; - String title; - Set imports; - String oboinowlid; - String iri; - //the whole thing - OWLOntology ont; - //the discretized bits of activity flow - Set activities; - Map> ind_types; - Map ind_entity; - OWLClass mf; OWLClass bp; OWLClass cc; OWLClass me; - GoCamModelStats stats; - Map causal_count; - - public GoCamModel(OWLOntology abox, BlazegraphMolecularModelManager m3) throws IOException, MalformedQueryException, QueryEvaluationException, RepositoryException { - ont = abox; - me = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event")); - mf = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/GO_0003674")); - bp = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/GO_0008150")); - cc = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/GO_0005575")); - causal_count = new HashMap(); - go_lego = m3.getGolego_repo(); - iri = abox.getOntologyID().getOntologyIRI().get().toString(); - ind_entity = new HashMap(); - addAnnotations(); - //setIndTypesWithOwl(); - setIndTypesWithSparql(m3, iri); - addActivities(); - this.setGoCamModelStats(); - } - - private void setIndTypesWithSparql(BlazegraphMolecularModelManager m3, String graph_id) throws MalformedQueryException, QueryEvaluationException, RepositoryException, IOException { - Map> iTypesAndComplementTypes = new HashMap>(); - Set all_types = new HashSet(); - TupleQueryResult r = (TupleQueryResult) m3.executeSPARQLQuery("" - + "PREFIX rdf: " - + "select ?instance ?type where {" - + "GRAPH <"+graph_id+"> { " - + "?instance rdf:type ." - + "{ ?instance rdf:type ?type . } UNION { ?instance rdf:type ?complement . ?complement owl:complementOf ?type . }" - + "FILTER (isIRI(?type)) " - + "FILTER (?type != ) " - + "}}", 100); - while(r.hasNext()) { - BindingSet bs = r.next(); - String instance = bs.getBinding("instance").getValue().stringValue(); - String type = bs.getBinding("type").getValue().stringValue(); - OWLNamedIndividual i = ont.getOWLOntologyManager().getOWLDataFactory().getOWLNamedIndividual(IRI.create(instance)); - if (!iTypesAndComplementTypes.containsKey(i)) { - iTypesAndComplementTypes.put(i, new HashSet()); - } - Set types = iTypesAndComplementTypes.get(i); - types.add(type); - all_types.add(type); - } - r.close(); - Map old_new = go_lego.mapDeprecated(all_types); - Set corrected_types = go_lego.replaceDeprecated(all_types, old_new); - Map> type_roots = go_lego.getSuperCategoryMap(corrected_types); - //set global - ind_types = new HashMap>(); - for(OWLNamedIndividual ind : iTypesAndComplementTypes.keySet()) { - //fix deprecated - Set types = go_lego.replaceDeprecated(iTypesAndComplementTypes.get(ind), old_new); - //convert to root types - Set roots = new HashSet(); - for(String type : types) { - if(type_roots.get(type)!=null) { - roots.addAll(type_roots.get(type)); - } - } - ind_types.put(ind, roots); - } - } - - private void setIndTypesWithOwl() throws IOException { - boolean fix_deprecated = true; - Set inds = ont.getIndividualsInSignature(); - ind_types = go_lego.getSuperCategoryMapForIndividuals(inds, ont, fix_deprecated); - } - - private void addActivities() throws IOException { - activities = new HashSet (); - for(OWLNamedIndividual ind : ind_types.keySet()) { - Set types = ind_types.get(ind); - if(types!=null) { - if(types.contains(mf.getIRI().toString())||types.contains(me.getIRI().toString())) { - ActivityUnit unit = new ActivityUnit(ind, ont, this); - - boolean skip = false; - for(String comment : unit.getComments()){ - if(comment.contains("reaction from external pathway")) { - skip = true; - break; - } - } - if(!skip) { - activities.add(unit); - ind_entity.put(ind, unit); - for(OWLObjectProperty prop : unit.causal_out.keySet()) { - Integer np = causal_count.get(prop); - if(np==null) { - np = 0; - } - np++; - causal_count.put(prop, np); - } - } - } - } - } - } - - private void addAnnotations() { - Set annos = ont.getAnnotations(); - in_taxon = new HashSet(); - comments = new HashSet(); - notes = new HashSet(); - contributors = new HashSet();; - provided_by = new HashSet();; - for(OWLAnnotation anno : annos) { - if(anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/title")) { - title = anno.getValue().asLiteral().get().getLiteral(); - } - if(anno.getProperty().getIRI().toString().equals("http://geneontology.org/lego/modelstate")) { - modelstate = anno.getValue().asLiteral().get().getLiteral(); - } - if(anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/contributor")) { - contributors.add(anno.getValue().asLiteral().get().getLiteral()); - } - if(anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/date")) { - date = anno.getValue().asLiteral().get().getLiteral(); - } - if(anno.getProperty().getIRI().toString().equals("http://purl.org/pav/providedBy")) { - provided_by.add(anno.getValue().asLiteral().get().getLiteral()); - } - if(anno.getProperty().getIRI().toString().equals("https://w3id.org/biolink/vocab/in_taxon")) { - if(anno.getValue().asIRI().isPresent()) { - String taxon = anno.getValue().toString(); - in_taxon.add(taxon); - } - } - if(anno.getProperty().getIRI().toString().equals("http://www.w3.org/2000/01/rdf-schema#comment")) { - String comment = anno.getValue().asLiteral().get().toString(); - comments.add(comment); - } - if(anno.getProperty().getIRI().toString().equals("http://www.w3.org/2004/02/skos/core#note")) { - String note = anno.getValue().asLiteral().get().toString(); - notes.add(note); - } - } - - } - - public String toString() { - String g = title+"\n"+iri+"\n"+modelstate+"\n"+contributors+"\n"+date+"\n"+provided_by+"\n"+in_taxon+"\n"; - return g; - } - - public void setGoCamModelStats() { - this.stats = new GoCamModelStats(this); - } - public GoCamModelStats getGoCamModelStats() { - return this.stats; - } - - public BlazegraphOntologyManager getGo_lego() { - return go_lego; - } - - public void setGo_lego(BlazegraphOntologyManager go_lego) { - this.go_lego = go_lego; - } - - public String getModelstate() { - return modelstate; - } - - public void setModelstate(String modelstate) { - this.modelstate = modelstate; - } - - public Set getIn_taxon() { - return in_taxon; - } - - public void setIn_taxon(Set in_taxon) { - this.in_taxon = in_taxon; - } - - public String getTitle() { - return title; - } - - public void setTitle(String title) { - this.title = title; - } - - public Set getImports() { - return imports; - } - - public void setImports(Set imports) { - this.imports = imports; - } - - public String getOboinowlid() { - return oboinowlid; - } - - public void setOboinowlid(String oboinowlid) { - this.oboinowlid = oboinowlid; - } - - public String getIri() { - return iri; - } - - public void setIri(String iri) { - this.iri = iri; - } - - public OWLOntology getOnt() { - return ont; - } - - public void setOnt(OWLOntology ont) { - this.ont = ont; - } - - public Set getActivities() { - return activities; - } - - public void setActivities(Set activities) { - this.activities = activities; - } - - public Map> getInd_types() { - return ind_types; - } - - public void setInd_types(Map> ind_types) { - this.ind_types = ind_types; - } - - public Map getInd_entity() { - return ind_entity; - } - - public void setInd_entity(Map ind_entity) { - this.ind_entity = ind_entity; - } - - public OWLClass getMf() { - return mf; - } - - public void setMf(OWLClass mf) { - this.mf = mf; - } - - public OWLClass getBp() { - return bp; - } +import org.semanticweb.owlapi.model.*; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class GoCamModel extends ProvenanceAnnotated { + private static Logger LOG = Logger.getLogger(GoCamModel.class); + BlazegraphOntologyManager go_lego; + String modelstate; + Set in_taxon; + String title; + Set imports; + String oboinowlid; + String iri; + //the whole thing + OWLOntology ont; + //the discretized bits of activity flow + Set activities; + Map> ind_types; + Map ind_entity; + OWLClass mf; + OWLClass bp; + OWLClass cc; + OWLClass me; + GoCamModelStats stats; + Map causal_count; + + public GoCamModel(OWLOntology abox, BlazegraphMolecularModelManager m3) throws IOException, MalformedQueryException, QueryEvaluationException, RepositoryException { + ont = abox; + me = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/go/extensions/reacto.owl#molecular_event")); + mf = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/GO_0003674")); + bp = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/GO_0008150")); + cc = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create("http://purl.obolibrary.org/obo/GO_0005575")); + causal_count = new HashMap(); + go_lego = m3.getGolego_repo(); + iri = abox.getOntologyID().getOntologyIRI().get().toString(); + ind_entity = new HashMap(); + addAnnotations(); + //setIndTypesWithOwl(); + setIndTypesWithSparql(m3, iri); + addActivities(); + this.setGoCamModelStats(); + } + + private void setIndTypesWithSparql(BlazegraphMolecularModelManager m3, String graph_id) throws MalformedQueryException, QueryEvaluationException, RepositoryException, IOException { + Map> iTypesAndComplementTypes = new HashMap>(); + Set all_types = new HashSet(); + TupleQueryResult r = (TupleQueryResult) m3.executeSPARQLQuery("" + + "PREFIX rdf: " + + "select ?instance ?type where {" + + "GRAPH <" + graph_id + "> { " + + "?instance rdf:type ." + + "{ ?instance rdf:type ?type . } UNION { ?instance rdf:type ?complement . ?complement owl:complementOf ?type . }" + + "FILTER (isIRI(?type)) " + + "FILTER (?type != ) " + + "}}", 100); + while (r.hasNext()) { + BindingSet bs = r.next(); + String instance = bs.getBinding("instance").getValue().stringValue(); + String type = bs.getBinding("type").getValue().stringValue(); + OWLNamedIndividual i = ont.getOWLOntologyManager().getOWLDataFactory().getOWLNamedIndividual(IRI.create(instance)); + if (!iTypesAndComplementTypes.containsKey(i)) { + iTypesAndComplementTypes.put(i, new HashSet()); + } + Set types = iTypesAndComplementTypes.get(i); + types.add(type); + all_types.add(type); + } + r.close(); + Map old_new = go_lego.mapDeprecated(all_types); + Set corrected_types = go_lego.replaceDeprecated(all_types, old_new); + Map> type_roots = go_lego.getSuperCategoryMap(corrected_types); + //set global + ind_types = new HashMap>(); + for (OWLNamedIndividual ind : iTypesAndComplementTypes.keySet()) { + //fix deprecated + Set types = go_lego.replaceDeprecated(iTypesAndComplementTypes.get(ind), old_new); + //convert to root types + Set roots = new HashSet(); + for (String type : types) { + if (type_roots.get(type) != null) { + roots.addAll(type_roots.get(type)); + } + } + ind_types.put(ind, roots); + } + } + + private void setIndTypesWithOwl() throws IOException { + boolean fix_deprecated = true; + Set inds = ont.getIndividualsInSignature(); + ind_types = go_lego.getSuperCategoryMapForIndividuals(inds, ont, fix_deprecated); + } + + private void addActivities() throws IOException { + activities = new HashSet(); + for (OWLNamedIndividual ind : ind_types.keySet()) { + Set types = ind_types.get(ind); + if (types != null) { + if (types.contains(mf.getIRI().toString()) || types.contains(me.getIRI().toString())) { + ActivityUnit unit = new ActivityUnit(ind, ont, this); + + boolean skip = false; + for (String comment : unit.getComments()) { + if (comment.contains("reaction from external pathway")) { + skip = true; + break; + } + } + if (!skip) { + activities.add(unit); + ind_entity.put(ind, unit); + for (OWLObjectProperty prop : unit.causal_out.keySet()) { + Integer np = causal_count.get(prop); + if (np == null) { + np = 0; + } + np++; + causal_count.put(prop, np); + } + } + } + } + } + } + + private void addAnnotations() { + Set annos = ont.getAnnotations(); + in_taxon = new HashSet(); + comments = new HashSet(); + notes = new HashSet(); + contributors = new HashSet(); + ; + provided_by = new HashSet(); + ; + for (OWLAnnotation anno : annos) { + if (anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/title")) { + title = anno.getValue().asLiteral().get().getLiteral(); + } + if (anno.getProperty().getIRI().toString().equals("http://geneontology.org/lego/modelstate")) { + modelstate = anno.getValue().asLiteral().get().getLiteral(); + } + if (anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/contributor")) { + contributors.add(anno.getValue().asLiteral().get().getLiteral()); + } + if (anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/date")) { + date = anno.getValue().asLiteral().get().getLiteral(); + } + if (anno.getProperty().getIRI().toString().equals("http://purl.org/pav/providedBy")) { + provided_by.add(anno.getValue().asLiteral().get().getLiteral()); + } + if (anno.getProperty().getIRI().toString().equals("https://w3id.org/biolink/vocab/in_taxon")) { + if (anno.getValue().asIRI().isPresent()) { + String taxon = anno.getValue().toString(); + in_taxon.add(taxon); + } + } + if (anno.getProperty().getIRI().toString().equals("http://www.w3.org/2000/01/rdf-schema#comment")) { + String comment = anno.getValue().asLiteral().get().toString(); + comments.add(comment); + } + if (anno.getProperty().getIRI().toString().equals("http://www.w3.org/2004/02/skos/core#note")) { + String note = anno.getValue().asLiteral().get().toString(); + notes.add(note); + } + } + + } + + public String toString() { + String g = title + "\n" + iri + "\n" + modelstate + "\n" + contributors + "\n" + date + "\n" + provided_by + "\n" + in_taxon + "\n"; + return g; + } + + public void setGoCamModelStats() { + this.stats = new GoCamModelStats(this); + } + + public GoCamModelStats getGoCamModelStats() { + return this.stats; + } + + public BlazegraphOntologyManager getGo_lego() { + return go_lego; + } + + public void setGo_lego(BlazegraphOntologyManager go_lego) { + this.go_lego = go_lego; + } + + public String getModelstate() { + return modelstate; + } + + public void setModelstate(String modelstate) { + this.modelstate = modelstate; + } + + public Set getIn_taxon() { + return in_taxon; + } + + public void setIn_taxon(Set in_taxon) { + this.in_taxon = in_taxon; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public Set getImports() { + return imports; + } + + public void setImports(Set imports) { + this.imports = imports; + } + + public String getOboinowlid() { + return oboinowlid; + } + + public void setOboinowlid(String oboinowlid) { + this.oboinowlid = oboinowlid; + } + + public String getIri() { + return iri; + } + + public void setIri(String iri) { + this.iri = iri; + } + + public OWLOntology getOnt() { + return ont; + } + + public void setOnt(OWLOntology ont) { + this.ont = ont; + } + + public Set getActivities() { + return activities; + } + + public void setActivities(Set activities) { + this.activities = activities; + } + + public Map> getInd_types() { + return ind_types; + } + + public void setInd_types(Map> ind_types) { + this.ind_types = ind_types; + } + + public Map getInd_entity() { + return ind_entity; + } + + public void setInd_entity(Map ind_entity) { + this.ind_entity = ind_entity; + } + + public OWLClass getMf() { + return mf; + } + + public void setMf(OWLClass mf) { + this.mf = mf; + } + + public OWLClass getBp() { + return bp; + } - public void setBp(OWLClass bp) { - this.bp = bp; - } + public void setBp(OWLClass bp) { + this.bp = bp; + } - public OWLClass getCc() { - return cc; - } + public OWLClass getCc() { + return cc; + } - public void setCc(OWLClass cc) { - this.cc = cc; - } + public void setCc(OWLClass cc) { + this.cc = cc; + } - public GoCamModelStats getStats() { - return stats; - } + public GoCamModelStats getStats() { + return stats; + } - public void setStats(GoCamModelStats stats) { - this.stats = stats; - } + public void setStats(GoCamModelStats stats) { + this.stats = stats; + } - public Map getCausal_count() { - return causal_count; - } + public Map getCausal_count() { + return causal_count; + } - public void setCausal_count(Map causal_count) { - this.causal_count = causal_count; - } + public void setCausal_count(Map causal_count) { + this.causal_count = causal_count; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModelStats.java b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModelStats.java index e210f9d3..4741a419 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModelStats.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamModelStats.java @@ -1,211 +1,213 @@ package org.geneontology.minerva.model; -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLObjectProperty; +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + public class GoCamModelStats { - int n_activity_units = 0; - int n_complete_activity_units = 0; - int n_connected_processes = 0; - int n_causal_in_relation_assertions = 0; - int n_unconnected = 0; - int n_unconnected_out = 0; - int n_unconnected_in = 0; - int n_raw_mf = 0; - int n_raw_bp = 0; - int n_raw_cc = 0; - int n_no_enabler = 0; - int n_no_location = 0; - int n_no_bp = 0; - int max_connected_graph = 0; - DescriptiveStatistics mf_depth = new DescriptiveStatistics(); - DescriptiveStatistics cc_depth = new DescriptiveStatistics(); - DescriptiveStatistics bp_depth = new DescriptiveStatistics(); + int n_activity_units = 0; + int n_complete_activity_units = 0; + int n_connected_processes = 0; + int n_causal_in_relation_assertions = 0; + int n_unconnected = 0; + int n_unconnected_out = 0; + int n_unconnected_in = 0; + int n_raw_mf = 0; + int n_raw_bp = 0; + int n_raw_cc = 0; + int n_no_enabler = 0; + int n_no_location = 0; + int n_no_bp = 0; + int max_connected_graph = 0; + DescriptiveStatistics mf_depth = new DescriptiveStatistics(); + DescriptiveStatistics cc_depth = new DescriptiveStatistics(); + DescriptiveStatistics bp_depth = new DescriptiveStatistics(); + + public GoCamModelStats(GoCamModel model) { + if (model.activities == null) { + return; + } + for (ActivityUnit a : model.activities) { + n_activity_units++; + Set downstream = a.getDownstream(a, null); + for (OWLClass oc : a.direct_types) { + try { + int depth = -1; + if (model.go_lego.class_depth != null) { + if (model.go_lego.class_depth.get(oc.getIRI().toString()) != null) { + depth = model.go_lego.class_depth.get(oc.getIRI().toString()); + } else { + //the class is probably deprecated + try { + Set prob = new HashSet(); + prob.add(oc.getIRI().toString()); + Set fixed = model.go_lego.replaceDeprecated(prob); + if (fixed != prob && fixed.size() == 1) { + depth = model.go_lego.class_depth.get(fixed.iterator().next()); + } + } catch (Exception e) { + System.err.println("problem calculating depth for " + oc); + } + } + } else { + depth = model.go_lego.getClassDepth(oc.getIRI().toString(), "http://purl.obolibrary.org/obo/GO_0003674"); + } + if (depth != -1) { + mf_depth.addValue(depth); + } + } catch (IOException e) { + //TODO Auto-generated catch block + e.printStackTrace(); + } + } + if (downstream.size() > max_connected_graph) { + max_connected_graph = downstream.size(); + } + if (a.direct_types.contains(model.mf)) { + n_raw_mf++; + } + if (a.enablers.size() == 0) { + n_no_enabler++; + } + if (a.locations.size() == 0) { + n_no_location++; + } + for (AnatomicalEntity ae : a.locations) { + if (ae.direct_types.contains(model.cc)) { + n_raw_cc++; + } + for (OWLClass oc : ae.direct_types) { + if (oc == null || oc.isAnonymous()) { + continue; + } + try { + int depth = -1; + if (model.go_lego.class_depth != null) { + Integer d = model.go_lego.class_depth.get(oc.getIRI().toString()); + if (d != null) { + depth = d; + } + } else { + depth = model.go_lego.getClassDepth(oc.getIRI().toString(), "http://purl.obolibrary.org/obo/GO_0005575"); + } + if (depth != -1) { + cc_depth.addValue(depth); + } + } catch (IOException e) { + //TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + if (a.containing_processes.size() == 0) { + n_no_bp++; + } + for (BiologicalProcessUnit bpu : a.containing_processes) { + if (bpu.direct_types.contains(model.bp)) { + n_raw_bp++; + } + for (OWLClass bp : bpu.direct_types) { + try { + int depth = -1; + if (model.go_lego.class_depth != null) { + Integer d = model.go_lego.class_depth.get(bp.getIRI().toString()); + if (d != null) { + depth = d; + } else { + System.out.println("missing " + bp.getIRI()); + } + } else { + depth = model.go_lego.getClassDepth(bp.getIRI().toString(), "http://purl.obolibrary.org/obo/GO_0008150"); + } + if (depth != -1) { + bp_depth.addValue(depth); + } + } catch (IOException e) { + //TODO Auto-generated catch block + e.printStackTrace(); + } + } + } + if (a.causal_out.size() == 0) { + n_unconnected_out++; + } + if (a.causal_in.size() == 0) { + n_unconnected_in++; + } + if (a.causal_in.size() == 0 && a.causal_out.size() == 0) { + n_unconnected++; + } + if ((a.containing_processes.size() == 1) && + (a.enablers.size() == 1) && + (a.locations.size() == 1) && + (!a.direct_types.contains(model.mf))) { + n_complete_activity_units++; + } + Set p = new HashSet(); + if (a.containing_processes != null) { + for (BiologicalProcessUnit bpu : a.containing_processes) { + p.add(bpu.individual.toString()); + } + } + n_connected_processes = p.size(); + if (a.causal_in != null) { + for (OWLObjectProperty prop : a.causal_in.keySet()) { + Set ocs = a.causal_in.get(prop); + for (GoCamOccurent oc : ocs) { + n_causal_in_relation_assertions++; + } + } + } + } + } - public GoCamModelStats(GoCamModel model) { - if(model.activities==null) { - return; - } - for(ActivityUnit a : model.activities) { - n_activity_units++; - Set downstream = a.getDownstream(a, null); - for(OWLClass oc : a.direct_types) { - try { - int depth = -1; - if(model.go_lego.class_depth!=null) { - if(model.go_lego.class_depth.get(oc.getIRI().toString())!=null) { - depth = model.go_lego.class_depth.get(oc.getIRI().toString()); - }else { - //the class is probably deprecated - try { - Set prob = new HashSet(); prob.add(oc.getIRI().toString()); - Set fixed = model.go_lego.replaceDeprecated(prob); - if(fixed!=prob&&fixed.size()==1) { - depth = model.go_lego.class_depth.get(fixed.iterator().next()); - } - }catch(Exception e) { - System.err.println("problem calculating depth for "+oc); - } - } - }else { - depth = model.go_lego.getClassDepth(oc.getIRI().toString(), "http://purl.obolibrary.org/obo/GO_0003674"); - } - if(depth!=-1) { - mf_depth.addValue(depth); - } - } catch (IOException e) { - //TODO Auto-generated catch block - e.printStackTrace(); - } - } - if(downstream.size()>max_connected_graph) { - max_connected_graph = downstream.size(); - } - if(a.direct_types.contains(model.mf)) { - n_raw_mf++; - } - if(a.enablers.size()==0) { - n_no_enabler++; - } - if(a.locations.size()==0) { - n_no_location++; - } - for(AnatomicalEntity ae : a.locations) { - if(ae.direct_types.contains(model.cc)) { - n_raw_cc++; - } - for(OWLClass oc : ae.direct_types) { - if(oc==null||oc.isAnonymous()) { - continue; - } - try { - int depth = -1; - if(model.go_lego.class_depth!=null) { - Integer d = model.go_lego.class_depth.get(oc.getIRI().toString()); - if(d!=null) { - depth = d; - } - }else { - depth = model.go_lego.getClassDepth(oc.getIRI().toString(), "http://purl.obolibrary.org/obo/GO_0005575"); - } - if(depth!=-1) { - cc_depth.addValue(depth); - } - } catch (IOException e) { - //TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - if(a.containing_processes.size()==0) { - n_no_bp++; - } - for(BiologicalProcessUnit bpu : a.containing_processes) { - if(bpu.direct_types.contains(model.bp)) { - n_raw_bp++; - } - for(OWLClass bp : bpu.direct_types) { - try { - int depth = -1; - if(model.go_lego.class_depth!=null) { - Integer d = model.go_lego.class_depth.get(bp.getIRI().toString()); - if(d!=null) { - depth = d; - }else { - System.out.println("missing "+bp.getIRI()); - } - }else { - depth = model.go_lego.getClassDepth(bp.getIRI().toString(), "http://purl.obolibrary.org/obo/GO_0008150"); - } - if(depth!=-1) { - bp_depth.addValue(depth); - } - } catch (IOException e) { - //TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - if(a.causal_out.size()==0) { - n_unconnected_out++; - } - if(a.causal_in.size()==0) { - n_unconnected_in++; - } - if(a.causal_in.size()==0&&a.causal_out.size()==0) { - n_unconnected++; - } - if((a.containing_processes.size()==1)&& - (a.enablers.size()==1)&& - (a.locations.size()==1)&& - (!a.direct_types.contains(model.mf))) { - n_complete_activity_units++; - } - Set p = new HashSet(); - if(a.containing_processes!=null) { - for(BiologicalProcessUnit bpu : a.containing_processes) { - p.add(bpu.individual.toString()); - } - } - n_connected_processes = p.size(); - if(a.causal_in!=null) { - for(OWLObjectProperty prop : a.causal_in.keySet()) { - Set ocs = a.causal_in.get(prop); - for(GoCamOccurent oc : ocs ) { - n_causal_in_relation_assertions++; - } - } - } - } - } + public String stats2string(DescriptiveStatistics stats) { + String g = ""; + g += "\t N:" + stats.getN() + "\n"; + g += "\t mean:" + stats.getMean() + "\n"; + g += "\t median:" + stats.getPercentile(50) + "\n"; + g += "\t max:" + stats.getMax() + "\n"; + g += "\t min:" + stats.getMin() + "\n"; + return g; + } - public String stats2string(DescriptiveStatistics stats) { - String g = ""; - g+="\t N:"+stats.getN()+"\n"; - g+="\t mean:"+stats.getMean()+"\n"; - g+="\t median:"+stats.getPercentile(50)+"\n"; - g+="\t max:"+stats.getMax()+"\n"; - g+="\t min:"+stats.getMin()+"\n"; - return g; - } + public String toString() { + String g = " activity units " + n_activity_units + "\n"; + g += " n complete activity units " + n_complete_activity_units + "\n"; + g += " n root MF activity units " + n_raw_mf + "\n"; + g += " n root BP process " + n_raw_bp + "\n"; + g += " n root CC locations " + n_raw_cc + "\n"; + g += " n unenabled activity units " + n_no_enabler + "\n"; + g += " n unlocated activity units " + n_no_location + "\n"; + g += " n activity units unconnected to a BP " + n_no_bp + "\n"; + g += " n connected biological processes " + n_connected_processes + "\n"; + g += " n causal relation assertions " + n_causal_in_relation_assertions + "\n"; + g += " n unconnected activities " + n_unconnected + "\n"; + g += " n activities with no outgoing connections " + n_unconnected_out + "\n"; + g += " n activities with no incoming connections " + n_unconnected_in + "\n"; + g += " max length of connected causal subgraph " + max_connected_graph + "\n"; + g += " descriptive statistics for depth in ontology for MF terms defining activity units \n" + stats2string(mf_depth); + g += " descriptive statistics for depth in ontology for BP terms containing activity units \n" + stats2string(bp_depth); + g += " descriptive statistics for depth in ontology for CC terms used as locations for activity units \n" + stats2string(cc_depth); + return g; + } - public String toString() { - String g =" activity units "+n_activity_units+"\n"; - g+=" n complete activity units "+n_complete_activity_units+"\n"; - g+=" n root MF activity units "+n_raw_mf+"\n"; - g+=" n root BP process "+n_raw_bp+"\n"; - g+=" n root CC locations "+n_raw_cc+"\n"; - g+=" n unenabled activity units "+n_no_enabler+"\n"; - g+=" n unlocated activity units "+n_no_location+"\n"; - g+=" n activity units unconnected to a BP "+n_no_bp+"\n"; - g+=" n connected biological processes "+n_connected_processes+"\n"; - g+=" n causal relation assertions "+n_causal_in_relation_assertions+"\n"; - g+=" n unconnected activities "+n_unconnected+"\n"; - g+=" n activities with no outgoing connections "+n_unconnected_out+"\n"; - g+=" n activities with no incoming connections "+n_unconnected_in+"\n"; - g+=" max length of connected causal subgraph "+max_connected_graph+"\n"; - g+=" descriptive statistics for depth in ontology for MF terms defining activity units \n"+stats2string(mf_depth); - g+=" descriptive statistics for depth in ontology for BP terms containing activity units \n"+stats2string(bp_depth); - g+=" descriptive statistics for depth in ontology for CC terms used as locations for activity units \n"+stats2string(cc_depth); - return g; - } + public String stats2cols() { + String r = n_activity_units + "\t" + n_complete_activity_units + "\t" + n_raw_mf + "\t" + n_raw_bp + "\t" + n_raw_cc + "\t" + n_no_enabler + "\t" + n_no_location + "\t" + n_no_bp + + "\t" + n_connected_processes + "\t" + n_causal_in_relation_assertions + "\t" + n_unconnected + "\t" + n_unconnected_out + "\t" + n_unconnected_in + "\t" + max_connected_graph + + "\t" + mf_depth.getPercentile(50) + "\t" + bp_depth.getPercentile(50) + "\t" + cc_depth.getPercentile(50); + return r; + } - public String stats2cols() { - String r = n_activity_units+"\t"+n_complete_activity_units+"\t"+n_raw_mf+"\t"+n_raw_bp+"\t"+n_raw_cc+"\t"+n_no_enabler+"\t"+n_no_location+"\t"+n_no_bp+ - "\t"+n_connected_processes+"\t"+n_causal_in_relation_assertions+"\t"+n_unconnected+"\t"+n_unconnected_out+"\t"+n_unconnected_in+"\t"+max_connected_graph+ - "\t"+mf_depth.getPercentile(50)+"\t"+bp_depth.getPercentile(50)+"\t"+cc_depth.getPercentile(50); - return r; - } - public static String statsHeader() { - String h = "activity units\tn complete activity units\tn root MF activity units\tn root BP process\tn root CC locations" - + "\tn unenabled activity units\tn unlocated activity units\tn activity units unconnected to a BP\tn connected biological processes" - + "\tn causal relation assertions\tn unconnected activities\tn activities with no outgoing connections\tn activities with no incoming connections" - + "\tmax length of connected causal subgraph\tmedian_depth_MF\tmedian_depth_BP\tmedian_depth_cc"; - return h; - } + public static String statsHeader() { + String h = "activity units\tn complete activity units\tn root MF activity units\tn root BP process\tn root CC locations" + + "\tn unenabled activity units\tn unlocated activity units\tn activity units unconnected to a BP\tn connected biological processes" + + "\tn causal relation assertions\tn unconnected activities\tn activities with no outgoing connections\tn activities with no incoming connections" + + "\tmax length of connected causal subgraph\tmedian_depth_MF\tmedian_depth_BP\tmedian_depth_cc"; + return h; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamOccurent.java b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamOccurent.java index 3d8924f0..afdbf8bf 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamOccurent.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/GoCamOccurent.java @@ -1,65 +1,80 @@ package org.geneontology.minerva.model; -import java.util.Map; -import java.util.Set; - import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; -public class GoCamOccurent extends GoCamEntity{ - public GoCamOccurent(OWLNamedIndividual ind, OWLOntology ont, GoCamModel model) { - super(ind, ont, model); - } - Set outputs; - Set inputs; - Set regulating_entities; - Set locations; - Set transport_locations; - //all causal links to other activities or processes - Map> causal_out; - Map> causal_in; - public Set getOutputs() { - return outputs; - } - public void setOutputs(Set outputs) { - this.outputs = outputs; - } - public Set getInputs() { - return inputs; - } - public void setInputs(Set inputs) { - this.inputs = inputs; - } - public Set getLocations() { - return locations; - } - public void setLocations(Set locations) { - this.locations = locations; - } - public Set getTransport_locations() { - return transport_locations; - } - public void setTransport_locations(Set transport_locations) { - this.transport_locations = transport_locations; - } - public Map> getCausal_out() { - return causal_out; - } - public void setCausal_out(Map> causal_out) { - this.causal_out = causal_out; - } - public Map> getCausal_in() { - return causal_in; - } - public void setCausal_in(Map> causal_in) { - this.causal_in = causal_in; - } - public Set getRegulating_entities() { - return regulating_entities; - } - public void setRegulating_entities(Set regulating_entities) { - this.regulating_entities = regulating_entities; - } - +import java.util.Map; +import java.util.Set; + +public class GoCamOccurent extends GoCamEntity { + public GoCamOccurent(OWLNamedIndividual ind, OWLOntology ont, GoCamModel model) { + super(ind, ont, model); + } + + Set outputs; + Set inputs; + Set regulating_entities; + Set locations; + Set transport_locations; + //all causal links to other activities or processes + Map> causal_out; + Map> causal_in; + + public Set getOutputs() { + return outputs; + } + + public void setOutputs(Set outputs) { + this.outputs = outputs; + } + + public Set getInputs() { + return inputs; + } + + public void setInputs(Set inputs) { + this.inputs = inputs; + } + + public Set getLocations() { + return locations; + } + + public void setLocations(Set locations) { + this.locations = locations; + } + + public Set getTransport_locations() { + return transport_locations; + } + + public void setTransport_locations(Set transport_locations) { + this.transport_locations = transport_locations; + } + + public Map> getCausal_out() { + return causal_out; + } + + public void setCausal_out(Map> causal_out) { + this.causal_out = causal_out; + } + + public Map> getCausal_in() { + return causal_in; + } + + public void setCausal_in(Map> causal_in) { + this.causal_in = causal_in; + } + + public Set getRegulating_entities() { + return regulating_entities; + } + + public void setRegulating_entities(Set regulating_entities) { + this.regulating_entities = regulating_entities; + } + } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/PhysicalEntity.java b/minerva-core/src/main/java/org/geneontology/minerva/model/PhysicalEntity.java index eab55f6e..28e1b562 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/PhysicalEntity.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/PhysicalEntity.java @@ -3,10 +3,10 @@ import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLOntology; -public class PhysicalEntity extends GoCamEntity{ +public class PhysicalEntity extends GoCamEntity { - public PhysicalEntity(OWLNamedIndividual enabler_ind, OWLOntology ont, GoCamModel model) { - super(enabler_ind, ont, model); - } + public PhysicalEntity(OWLNamedIndividual enabler_ind, OWLOntology ont, GoCamModel model) { + super(enabler_ind, ont, model); + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/model/ProvenanceAnnotated.java b/minerva-core/src/main/java/org/geneontology/minerva/model/ProvenanceAnnotated.java index d5c5bfe3..c2bc2ee9 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/model/ProvenanceAnnotated.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/model/ProvenanceAnnotated.java @@ -4,43 +4,52 @@ public class ProvenanceAnnotated { - Set contributors; - String date; - Set provided_by; - Set comments; - Set notes; - - public String getDate() { - return date; - } - public void setDate(String date) { - this.date = date; - } - public Set getComments() { - return comments; - } - public void setComments(Set comments) { - this.comments = comments; - } - public Set getNotes() { - return notes; - } - public void setNotes(Set notes) { - this.notes = notes; - } - public Set getContributors() { - return contributors; - } - public void setContributors(Set contributors) { - this.contributors = contributors; - } - public Set getProvided_by() { - return provided_by; - } - public void setProvided_by(Set provided_by) { - this.provided_by = provided_by; - } - - + Set contributors; + String date; + Set provided_by; + Set comments; + Set notes; + + public String getDate() { + return date; + } + + public void setDate(String date) { + this.date = date; + } + + public Set getComments() { + return comments; + } + + public void setComments(Set comments) { + this.comments = comments; + } + + public Set getNotes() { + return notes; + } + + public void setNotes(Set notes) { + this.notes = notes; + } + + public Set getContributors() { + return contributors; + } + + public void setContributors(Set contributors) { + this.contributors = contributors; + } + + public Set getProvided_by() { + return provided_by; + } + + public void setProvided_by(Set provided_by) { + this.provided_by = provided_by; + } + + } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/util/AmigoContextGenerator.java b/minerva-core/src/main/java/org/geneontology/minerva/util/AmigoContextGenerator.java index 974b32bf..702c6640 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/util/AmigoContextGenerator.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/util/AmigoContextGenerator.java @@ -1,19 +1,14 @@ package org.geneontology.minerva.util; +import com.google.gson.Gson; +import org.apache.commons.io.IOUtils; +import org.geneontology.minerva.curie.DefaultCurieHandler; + import java.io.BufferedWriter; import java.io.FileWriter; import java.io.InputStreamReader; import java.net.URL; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.io.IOUtils; -import org.geneontology.minerva.curie.DefaultCurieHandler; - -import com.google.gson.Gson; +import java.util.*; /** * generate: amigo_context_gen.jsonld from the json source: @@ -21,97 +16,94 @@ */ public class AmigoContextGenerator { - private final String sourceJson; - private Map existing; - - public AmigoContextGenerator(String sourceJson, Map existing) { - this.sourceJson = sourceJson; - this.existing = existing; - } - - @SuppressWarnings("unchecked") - public Map extract() throws Exception { - Map extracted = new HashMap<>(); - Gson gson = new Gson(); - List jsonList = gson.fromJson(new InputStreamReader(new URL(sourceJson).openStream()), List.class); - for (Object object : jsonList) { - if (object instanceof Map) { - Map topMap = (Map) object; - String database = (String) topMap.get("database"); - if (database != null) { - // skip existing - if (existing.containsKey(database)) { - continue; - } - List> entity_types = (List>) topMap.get("entity_types"); - if (entity_types != null) { - if (entity_types.size() == 1) { - Map entity_type = entity_types.get(0); - String url_syntax = (String) entity_type.get("url_syntax"); - if (url_syntax != null) { - int pos = url_syntax.indexOf("[example_id]"); - if (pos > 0) { - String longPrefix = url_syntax.substring(0, pos); - if (existing.containsValue(longPrefix)) { - System.out.println("Skipping: '"+database+"' conflicting longPrefix: "+longPrefix); - continue; - } - if (extracted.containsValue(longPrefix)) { - System.out.println("Skipping: '"+database+"' conflicting longPrefix: "+longPrefix); - continue; - } - extracted.put(database, longPrefix); - } - } - else { - System.out.println("Missing url_syntax for: "+database); - } - } - else { - System.out.println("Manual mapping required for: "+database); - } - - } - } - } - } - return extracted; - } - - static void writeJsonLdContext(String file, Map extracted) throws Exception { - BufferedWriter writer = null; - try { - writer = new BufferedWriter(new FileWriter(file)); - writer.append("{\n \"@context\": {\n"); - List sortedKeys = new ArrayList<>(extracted.keySet()); - Collections.sort(sortedKeys); - for (int i = 0; i < sortedKeys.size(); i++) { - if (i > 0) { - writer.append(','); - writer.newLine(); - } - String key = sortedKeys.get(i); - String value = extracted.get(key); - writer.append(" ").append('"').append(key).append("\" : \"").append(value).append('"'); - } - writer.newLine(); - writer.append(" }\n}"); - } - finally { - IOUtils.closeQuietly(writer); - } - } - - public static void main(String[] args) throws Exception { - Map existing = new HashMap<>(); - DefaultCurieHandler.loadJsonldResource("obo_context.jsonld", existing); - DefaultCurieHandler.loadJsonldResource("monarch_context.jsonld", existing); - String url = "http://build.berkeleybop.org/job/db-xrefs-yaml2json/lastSuccessfulBuild/artifact/db-xrefs.json"; - AmigoContextGenerator gen = new AmigoContextGenerator(url, existing); - Map extracted = gen.extract(); - writeJsonLdContext("src/main/resources/amigo_context_gen.jsonld", extracted); - + private final String sourceJson; + private Map existing; + + public AmigoContextGenerator(String sourceJson, Map existing) { + this.sourceJson = sourceJson; + this.existing = existing; + } + + @SuppressWarnings("unchecked") + public Map extract() throws Exception { + Map extracted = new HashMap<>(); + Gson gson = new Gson(); + List jsonList = gson.fromJson(new InputStreamReader(new URL(sourceJson).openStream()), List.class); + for (Object object : jsonList) { + if (object instanceof Map) { + Map topMap = (Map) object; + String database = (String) topMap.get("database"); + if (database != null) { + // skip existing + if (existing.containsKey(database)) { + continue; + } + List> entity_types = (List>) topMap.get("entity_types"); + if (entity_types != null) { + if (entity_types.size() == 1) { + Map entity_type = entity_types.get(0); + String url_syntax = (String) entity_type.get("url_syntax"); + if (url_syntax != null) { + int pos = url_syntax.indexOf("[example_id]"); + if (pos > 0) { + String longPrefix = url_syntax.substring(0, pos); + if (existing.containsValue(longPrefix)) { + System.out.println("Skipping: '" + database + "' conflicting longPrefix: " + longPrefix); + continue; + } + if (extracted.containsValue(longPrefix)) { + System.out.println("Skipping: '" + database + "' conflicting longPrefix: " + longPrefix); + continue; + } + extracted.put(database, longPrefix); + } + } else { + System.out.println("Missing url_syntax for: " + database); + } + } else { + System.out.println("Manual mapping required for: " + database); + } + + } + } + } + } + return extracted; + } + + static void writeJsonLdContext(String file, Map extracted) throws Exception { + BufferedWriter writer = null; + try { + writer = new BufferedWriter(new FileWriter(file)); + writer.append("{\n \"@context\": {\n"); + List sortedKeys = new ArrayList<>(extracted.keySet()); + Collections.sort(sortedKeys); + for (int i = 0; i < sortedKeys.size(); i++) { + if (i > 0) { + writer.append(','); + writer.newLine(); + } + String key = sortedKeys.get(i); + String value = extracted.get(key); + writer.append(" ").append('"').append(key).append("\" : \"").append(value).append('"'); + } + writer.newLine(); + writer.append(" }\n}"); + } finally { + IOUtils.closeQuietly(writer); + } + } + + public static void main(String[] args) throws Exception { + Map existing = new HashMap<>(); + DefaultCurieHandler.loadJsonldResource("obo_context.jsonld", existing); + DefaultCurieHandler.loadJsonldResource("monarch_context.jsonld", existing); + String url = "http://build.berkeleybop.org/job/db-xrefs-yaml2json/lastSuccessfulBuild/artifact/db-xrefs.json"; + AmigoContextGenerator gen = new AmigoContextGenerator(url, existing); + Map extracted = gen.extract(); + writeJsonLdContext("src/main/resources/amigo_context_gen.jsonld", extracted); + - } + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/util/AnnotationShorthand.java b/minerva-core/src/main/java/org/geneontology/minerva/util/AnnotationShorthand.java index e3e40390..d993e680 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/util/AnnotationShorthand.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/util/AnnotationShorthand.java @@ -10,77 +10,75 @@ * need to be), as full IRIs can still be used.
*
* This improves readability and reduces clutter in the JSON and related JS. - * */ public enum AnnotationShorthand { - - x(IRI.create("http://geneontology.org/lego/hint/layout/x"), "hint-layout-x"), - y(IRI.create("http://geneontology.org/lego/hint/layout/y"), "hint-layout-y"), - comment(OWLRDFVocabulary.RDFS_COMMENT.getIRI()), // arbitrary String - // TODO replace with annotation property "http://purl.obolibrary.org/obo/RO_0002612" 'axiom has evidence' - evidence(IRI.create("http://geneontology.org/lego/evidence")), // eco class iri + x(IRI.create("http://geneontology.org/lego/hint/layout/x"), "hint-layout-x"), + y(IRI.create("http://geneontology.org/lego/hint/layout/y"), "hint-layout-y"), + comment(OWLRDFVocabulary.RDFS_COMMENT.getIRI()), // arbitrary String - with(IRI.create("http://geneontology.org/lego/evidence-with")), // annotation prop for 'with' literals - date(IRI.create("http://purl.org/dc/elements/1.1/date")), // arbitrary string at the moment, define date format? - // DC recommends http://www.w3.org/TR/NOTE-datetime, one example format is YYYY-MM-DD - source(IRI.create("http://purl.org/dc/elements/1.1/source")), // arbitrary string, such as PMID:000000 - contributor(IRI.create("http://purl.org/dc/elements/1.1/contributor")), // who contributed to the annotation - providedBy(IRI.create("http://purl.org/pav/providedBy")), // organization supporting the annotation - title(IRI.create("http://purl.org/dc/elements/1.1/title")), // title (of the model) - deprecated(OWLRDFVocabulary.OWL_DEPRECATED.getIRI()), // model annotation to indicate deprecated models - templatestate(IRI.create("http://geneontology.org/lego/templatestate"), "template"), // designate a model as a template - modelstate(IRI.create("http://geneontology.org/lego/modelstate"), "state"); - - - private final IRI annotationProperty; - private final String othername; - - AnnotationShorthand(IRI annotationProperty) { - this(annotationProperty, null); - } - - AnnotationShorthand(IRI annotationProperty, String othername) { - this.annotationProperty = annotationProperty; - this.othername = othername; - } - - public IRI getAnnotationProperty() { - return annotationProperty; - } - - public String getShorthand() { - return othername != null ? othername : name(); - } - - public static AnnotationShorthand getShorthand(IRI iri) { - for (AnnotationShorthand type : AnnotationShorthand.values()) { - if (type.annotationProperty.equals(iri)) { - return type; - } - } - return null; - } - - public static AnnotationShorthand getShorthand(String name, CurieHandler curieHandler) { - if (name != null) { - for (AnnotationShorthand type : AnnotationShorthand.values()) { - if (type.name().equals(name) || (type.othername != null && type.othername.equals(name))) { - return type; - } - else { - final IRI iri; - try { - iri = curieHandler.getIRI(name); - if (iri.equals(type.annotationProperty)) { - return type; - } - } catch (UnknownIdentifierException e) { - continue; - } - } - } - } - return null; - } + // TODO replace with annotation property "http://purl.obolibrary.org/obo/RO_0002612" 'axiom has evidence' + evidence(IRI.create("http://geneontology.org/lego/evidence")), // eco class iri + + with(IRI.create("http://geneontology.org/lego/evidence-with")), // annotation prop for 'with' literals + date(IRI.create("http://purl.org/dc/elements/1.1/date")), // arbitrary string at the moment, define date format? + // DC recommends http://www.w3.org/TR/NOTE-datetime, one example format is YYYY-MM-DD + source(IRI.create("http://purl.org/dc/elements/1.1/source")), // arbitrary string, such as PMID:000000 + contributor(IRI.create("http://purl.org/dc/elements/1.1/contributor")), // who contributed to the annotation + providedBy(IRI.create("http://purl.org/pav/providedBy")), // organization supporting the annotation + title(IRI.create("http://purl.org/dc/elements/1.1/title")), // title (of the model) + deprecated(OWLRDFVocabulary.OWL_DEPRECATED.getIRI()), // model annotation to indicate deprecated models + templatestate(IRI.create("http://geneontology.org/lego/templatestate"), "template"), // designate a model as a template + modelstate(IRI.create("http://geneontology.org/lego/modelstate"), "state"); + + + private final IRI annotationProperty; + private final String othername; + + AnnotationShorthand(IRI annotationProperty) { + this(annotationProperty, null); + } + + AnnotationShorthand(IRI annotationProperty, String othername) { + this.annotationProperty = annotationProperty; + this.othername = othername; + } + + public IRI getAnnotationProperty() { + return annotationProperty; + } + + public String getShorthand() { + return othername != null ? othername : name(); + } + + public static AnnotationShorthand getShorthand(IRI iri) { + for (AnnotationShorthand type : AnnotationShorthand.values()) { + if (type.annotationProperty.equals(iri)) { + return type; + } + } + return null; + } + + public static AnnotationShorthand getShorthand(String name, CurieHandler curieHandler) { + if (name != null) { + for (AnnotationShorthand type : AnnotationShorthand.values()) { + if (type.name().equals(name) || (type.othername != null && type.othername.equals(name))) { + return type; + } else { + final IRI iri; + try { + iri = curieHandler.getIRI(name); + if (iri.equals(type.annotationProperty)) { + return type; + } + } catch (UnknownIdentifierException e) { + continue; + } + } + } + } + return null; + } } \ No newline at end of file diff --git a/minerva-core/src/main/java/org/geneontology/minerva/util/BlazegraphMutationCounter.java b/minerva-core/src/main/java/org/geneontology/minerva/util/BlazegraphMutationCounter.java index 62c7fd85..2c925784 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/util/BlazegraphMutationCounter.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/util/BlazegraphMutationCounter.java @@ -4,31 +4,36 @@ import com.bigdata.rdf.changesets.IChangeRecord; public class BlazegraphMutationCounter implements IChangeLog { - - private int records = 0; - - public int mutationCount() { - return records; - } - @Override - public void changeEvent(IChangeRecord record) { - records++; - } + private int records = 0; - @Override - public void close() {} + public int mutationCount() { + return records; + } - @Override - public void transactionAborted() {} + @Override + public void changeEvent(IChangeRecord record) { + records++; + } - @Override - public void transactionBegin() {} + @Override + public void close() { + } - @Override - public void transactionCommited(long commitTime) {} + @Override + public void transactionAborted() { + } - @Override - public void transactionPrepare() {} + @Override + public void transactionBegin() { + } + + @Override + public void transactionCommited(long commitTime) { + } + + @Override + public void transactionPrepare() { + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/util/DebugTools.java b/minerva-core/src/main/java/org/geneontology/minerva/util/DebugTools.java index 245bca5f..485f7c8f 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/util/DebugTools.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/util/DebugTools.java @@ -3,24 +3,20 @@ import org.apache.log4j.Logger; /** - * - * * @author dbk - * */ public class DebugTools { - private static Logger LOG = Logger.getLogger(DebugTools.class); - private static Runtime runtime = Runtime.getRuntime(); + private static Logger LOG = Logger.getLogger(DebugTools.class); + private static Runtime runtime = Runtime.getRuntime(); - public static void logMemory( - String title) - { - double bytesToMB = 1.0 / (double)(1024L*1024L); - String statusText = "DebugTools[memory] " + title; + public static void logMemory( + String title) { + double bytesToMB = 1.0 / (double) (1024L * 1024L); + String statusText = "DebugTools[memory] " + title; - runtime.gc(); - statusText += " freeMemory: " + (int) (runtime.freeMemory() * bytesToMB); - LOG.info(statusText); - } + runtime.gc(); + statusText += " freeMemory: " + (int) (runtime.freeMemory() * bytesToMB); + LOG.info(statusText); + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/util/JenaOwlTool.java b/minerva-core/src/main/java/org/geneontology/minerva/util/JenaOwlTool.java index 284b10dc..fe792428 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/util/JenaOwlTool.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/util/JenaOwlTool.java @@ -1,51 +1,51 @@ /** - * + * */ package org.geneontology.minerva.util; -import java.io.IOException; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; - import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import org.semanticweb.owlapi.formats.TurtleDocumentFormat; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import java.io.IOException; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; + /** * @author bgood * */ public class JenaOwlTool { - /** - * - */ - public JenaOwlTool() { - // TODO Auto-generated constructor stub - } + /** + * + */ + public JenaOwlTool() { + // TODO Auto-generated constructor stub + } - public static Model getJenaModel(OWLOntology ontology) { - Model model = ModelFactory.createDefaultModel(); + public static Model getJenaModel(OWLOntology ontology) { + Model model = ModelFactory.createDefaultModel(); - try (PipedInputStream is = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(is)) { - new Thread(new Runnable() { - @Override - public void run() { - try { - ontology.getOWLOntologyManager().saveOntology(ontology, new TurtleDocumentFormat(), os); - os.close(); - } catch (OWLOntologyStorageException | IOException e) { - e.printStackTrace(); - } - } - }).start(); - model.read(is, null, "TURTLE"); - return model; - } catch (Exception e) { - throw new RuntimeException("Could not convert OWL API ontology to JENA API model.", e); - } - } + try (PipedInputStream is = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(is)) { + new Thread(new Runnable() { + @Override + public void run() { + try { + ontology.getOWLOntologyManager().saveOntology(ontology, new TurtleDocumentFormat(), os); + os.close(); + } catch (OWLOntologyStorageException | IOException e) { + e.printStackTrace(); + } + } + }).start(); + model.read(is, null, "TURTLE"); + return model; + } catch (Exception e) { + throw new RuntimeException("Could not convert OWL API ontology to JENA API model.", e); + } + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/util/ReverseChangeGenerator.java b/minerva-core/src/main/java/org/geneontology/minerva/util/ReverseChangeGenerator.java index 0f0a3f03..02257ea9 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/util/ReverseChangeGenerator.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/util/ReverseChangeGenerator.java @@ -1,28 +1,20 @@ package org.geneontology.minerva.util; +import org.semanticweb.owlapi.model.*; + import java.util.LinkedList; import java.util.List; -import org.semanticweb.owlapi.model.AddAxiom; -import org.semanticweb.owlapi.model.AddImport; -import org.semanticweb.owlapi.model.AddOntologyAnnotation; -import org.semanticweb.owlapi.model.OWLOntologyChange; -import org.semanticweb.owlapi.model.OWLOntologyChangeVisitorEx; -import org.semanticweb.owlapi.model.RemoveAxiom; -import org.semanticweb.owlapi.model.RemoveImport; -import org.semanticweb.owlapi.model.RemoveOntologyAnnotation; -import org.semanticweb.owlapi.model.SetOntologyID; - /** * Create the reverse of an {@link OWLOntologyChange}. */ public class ReverseChangeGenerator implements OWLOntologyChangeVisitorEx { - public static final ReverseChangeGenerator INSTANCE = new ReverseChangeGenerator(); - - private ReverseChangeGenerator() { - // only one instance - } + public static final ReverseChangeGenerator INSTANCE = new ReverseChangeGenerator(); + + private ReverseChangeGenerator() { + // only one instance + } public OWLOntologyChange visit(AddAxiom change) { return new RemoveAxiom(change.getOntology(), change.getAxiom()); @@ -35,38 +27,38 @@ public OWLOntologyChange visit(RemoveAxiom change) { public OWLOntologyChange visit(SetOntologyID change) { - return new SetOntologyID(change.getOntology(), change.getOriginalOntologyID()); + return new SetOntologyID(change.getOntology(), change.getOriginalOntologyID()); } public OWLOntologyChange visit(AddImport addImport) { - return new RemoveImport(addImport.getOntology(), addImport.getImportDeclaration()); + return new RemoveImport(addImport.getOntology(), addImport.getImportDeclaration()); } public OWLOntologyChange visit(RemoveImport removeImport) { - return new AddImport(removeImport.getOntology(), removeImport.getImportDeclaration()); + return new AddImport(removeImport.getOntology(), removeImport.getImportDeclaration()); } public OWLOntologyChange visit(AddOntologyAnnotation addOntologyAnnotation) { - return new RemoveOntologyAnnotation(addOntologyAnnotation.getOntology(), addOntologyAnnotation.getAnnotation()); + return new RemoveOntologyAnnotation(addOntologyAnnotation.getOntology(), addOntologyAnnotation.getAnnotation()); } public OWLOntologyChange visit(RemoveOntologyAnnotation removeOntologyAnnotation) { - return new AddOntologyAnnotation(removeOntologyAnnotation.getOntology(), removeOntologyAnnotation.getAnnotation()); + return new AddOntologyAnnotation(removeOntologyAnnotation.getOntology(), removeOntologyAnnotation.getAnnotation()); } - + public static List invertChanges(List originalChanges) { - final LinkedList invertedChanges = new LinkedList(); - for (OWLOntologyChange originalChange : originalChanges) { - OWLOntologyChange invertedChange = originalChange.accept(ReverseChangeGenerator.INSTANCE); - invertedChanges.push(invertedChange); - } - if (invertedChanges.isEmpty()) { - return null; - } - return invertedChanges; + final LinkedList invertedChanges = new LinkedList(); + for (OWLOntologyChange originalChange : originalChanges) { + OWLOntologyChange invertedChange = originalChange.accept(ReverseChangeGenerator.INSTANCE); + invertedChanges.push(invertedChange); + } + if (invertedChanges.isEmpty()) { + return null; + } + return invertedChanges; } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/Enricher.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/Enricher.java index 66759a7b..6720f2c8 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/Enricher.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/Enricher.java @@ -1,10 +1,19 @@ /** - * + * */ package org.geneontology.minerva.validation; +import org.apache.jena.query.*; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.Resource; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.*; +import org.semanticweb.owlapi.reasoner.OWLReasoner; +import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; +import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory; + import java.io.File; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; @@ -13,196 +22,169 @@ import java.util.Map; import java.util.Set; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryExecutionFactory; -import org.apache.jena.query.QueryFactory; -import org.apache.jena.query.QueryParseException; -import org.apache.jena.query.QuerySolution; -import org.apache.jena.query.ResultSet; -import org.apache.jena.rdf.model.Literal; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.Statement; -import org.apache.jena.vocabulary.DC; -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.AddImport; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLImportsDeclaration; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.reasoner.OWLReasoner; -import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; -import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory; - /** * @author bgood * */ public class Enricher { - public static final String go_endpoint = "http://rdf.geneontology.org/blazegraph/sparql"; - public String extra_info_endpoint = null; - public OWLReasoner tbox_reasoner; - /** - * - */ - public Enricher(String extra_endpoint, OWLReasoner reasoner) { - if(extra_endpoint != null) { - extra_info_endpoint = extra_endpoint; - } - if(reasoner != null) { - tbox_reasoner = reasoner; - } - } + public static final String go_endpoint = "http://rdf.geneontology.org/blazegraph/sparql"; + public String extra_info_endpoint = null; + public OWLReasoner tbox_reasoner; + + /** + * + */ + public Enricher(String extra_endpoint, OWLReasoner reasoner) { + if (extra_endpoint != null) { + extra_info_endpoint = extra_endpoint; + } + if (reasoner != null) { + tbox_reasoner = reasoner; + } + } + + /** + * @param args + * @throws IOException + * @throws OWLOntologyCreationException + */ + public static void main(String[] args) throws IOException, OWLOntologyCreationException { + String dir = "/Users/bgood/Desktop/test/go_cams/reactome/reactome-homosapiens-SLBP_independent_Processing_of_Histone_Pre-mRNAs.ttl"; + Map name_model = loadRDF(dir); + System.out.println("Start on " + name_model.size() + " models " + System.currentTimeMillis() / 1000); - /** - * @param args - * @throws IOException - * @throws OWLOntologyCreationException - */ - public static void main(String[] args) throws IOException, OWLOntologyCreationException { - String dir = "/Users/bgood/Desktop/test/go_cams/reactome/reactome-homosapiens-SLBP_independent_Processing_of_Histone_Pre-mRNAs.ttl"; - Map name_model = loadRDF(dir); - System.out.println("Start on "+name_model.size()+" models "+System.currentTimeMillis()/1000); - - String tbox_file_2 = "/Users/bgood/gocam_ontology/REO.owl"; - String tbox_file = "/Users/bgood/gocam_ontology/go-lego-merged-9-23-2019.owl"; - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - System.out.println("loading ontology"); - OWLOntology tbox = ontman.loadOntologyFromOntologyDocument(new File(tbox_file)); - System.out.println("done loading "+tbox_file); - OWLOntology tbox2 = ontman.loadOntologyFromOntologyDocument(new File(tbox_file_2)); - System.out.println("done loading "+tbox_file_2); - for(OWLAxiom a : tbox2.getAxioms()) { - ontman.addAxiom(tbox, a); - } - System.out.println("done adding axioms from "+tbox_file_2); - System.out.println("done loading, building reasoner"); - OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); - OWLReasoner reasoner = reasonerFactory.createReasoner(tbox); + String tbox_file_2 = "/Users/bgood/gocam_ontology/REO.owl"; + String tbox_file = "/Users/bgood/gocam_ontology/go-lego-merged-9-23-2019.owl"; + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + System.out.println("loading ontology"); + OWLOntology tbox = ontman.loadOntologyFromOntologyDocument(new File(tbox_file)); + System.out.println("done loading " + tbox_file); + OWLOntology tbox2 = ontman.loadOntologyFromOntologyDocument(new File(tbox_file_2)); + System.out.println("done loading " + tbox_file_2); + for (OWLAxiom a : tbox2.getAxioms()) { + ontman.addAxiom(tbox, a); + } + System.out.println("done adding axioms from " + tbox_file_2); + System.out.println("done loading, building reasoner"); + OWLReasonerFactory reasonerFactory = new StructuralReasonerFactory(); + OWLReasoner reasoner = reasonerFactory.createReasoner(tbox); - System.out.println("done building reasoner, enriching"); - Enricher e = new Enricher(null, reasoner); - for(String name : name_model.keySet()) { - Model model = name_model.get(name); - model = e.enrichSuperClasses(model); - write(model, "/Users/bgood/Desktop/test/shex/enriched_go_lego_"+name); - System.out.println("done with "+name+" "+System.currentTimeMillis()/1000); - } - System.out.println("Finish on "+name_model.size()+" models "+System.currentTimeMillis()/1000); + System.out.println("done building reasoner, enriching"); + Enricher e = new Enricher(null, reasoner); + for (String name : name_model.keySet()) { + Model model = name_model.get(name); + model = e.enrichSuperClasses(model); + write(model, "/Users/bgood/Desktop/test/shex/enriched_go_lego_" + name); + System.out.println("done with " + name + " " + System.currentTimeMillis() / 1000); + } + System.out.println("Finish on " + name_model.size() + " models " + System.currentTimeMillis() / 1000); - } + } - public Model enrichSuperClasses(Model model) { - String getOntTerms = - "PREFIX owl: " - + "SELECT DISTINCT ?term " + - " WHERE { " + - " ?ind a owl:NamedIndividual . " + - " ?ind a ?term . " + - " FILTER(?term != owl:NamedIndividual)" + - " FILTER(isIRI(?term)) ." + - " }"; - String terms = ""; - Set term_set = new HashSet(); - try{ - QueryExecution qe = QueryExecutionFactory.create(getOntTerms, model); - ResultSet results = qe.execSelect(); + public Model enrichSuperClasses(Model model) { + String getOntTerms = + "PREFIX owl: " + + "SELECT DISTINCT ?term " + + " WHERE { " + + " ?ind a owl:NamedIndividual . " + + " ?ind a ?term . " + + " FILTER(?term != owl:NamedIndividual)" + + " FILTER(isIRI(?term)) ." + + " }"; + String terms = ""; + Set term_set = new HashSet(); + try { + QueryExecution qe = QueryExecutionFactory.create(getOntTerms, model); + ResultSet results = qe.execSelect(); - while (results.hasNext()) { - QuerySolution qs = results.next(); - Resource term = qs.getResource("term"); - terms+=("<"+term.getURI()+"> "); - term_set.add(term.getURI()); - } - qe.close(); - } catch(QueryParseException e){ - e.printStackTrace(); - } - //either get the superclasses from a reasoner here - if(tbox_reasoner!=null) { - for(String term : term_set) { - OWLClass c = - tbox_reasoner. - getRootOntology(). - getOWLOntologyManager(). - getOWLDataFactory().getOWLClass(IRI.create(term)); - Resource child = model.createResource(term); - Set supers = tbox_reasoner.getSuperClasses(c, false).getFlattened(); - for(OWLClass parent_class : supers) { - Resource parent = model.createResource(parent_class.getIRI().toString()); - model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, child)); - model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, parent)); - model.add(model.createStatement(child, org.apache.jena.vocabulary.RDF.type, org.apache.jena.vocabulary.OWL.Class)); - } - } - //or get them from the remote endpoint(s) - }else { - String superQuery = "" - + "PREFIX owl: " - + "PREFIX rdfs: " - + "CONSTRUCT { " + - " ?term rdfs:subClassOf ?superclass ." + - " ?term a owl:Class ." + - " }" + - " WHERE {" + - " VALUES ?term { "+terms+" } " + - " ?term rdfs:subClassOf* ?superclass ." + - " FILTER(isIRI(?superclass)) ." + - " }"; + while (results.hasNext()) { + QuerySolution qs = results.next(); + Resource term = qs.getResource("term"); + terms += ("<" + term.getURI() + "> "); + term_set.add(term.getURI()); + } + qe.close(); + } catch (QueryParseException e) { + e.printStackTrace(); + } + //either get the superclasses from a reasoner here + if (tbox_reasoner != null) { + for (String term : term_set) { + OWLClass c = + tbox_reasoner. + getRootOntology(). + getOWLOntologyManager(). + getOWLDataFactory().getOWLClass(IRI.create(term)); + Resource child = model.createResource(term); + Set supers = tbox_reasoner.getSuperClasses(c, false).getFlattened(); + for (OWLClass parent_class : supers) { + Resource parent = model.createResource(parent_class.getIRI().toString()); + model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, child)); + model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, parent)); + model.add(model.createStatement(child, org.apache.jena.vocabulary.RDF.type, org.apache.jena.vocabulary.OWL.Class)); + } + } + //or get them from the remote endpoint(s) + } else { + String superQuery = "" + + "PREFIX owl: " + + "PREFIX rdfs: " + + "CONSTRUCT { " + + " ?term rdfs:subClassOf ?superclass ." + + " ?term a owl:Class ." + + " }" + + " WHERE {" + + " VALUES ?term { " + terms + " } " + + " ?term rdfs:subClassOf* ?superclass ." + + " FILTER(isIRI(?superclass)) ." + + " }"; - Query query = QueryFactory.create(superQuery); - try ( - QueryExecution qexec = QueryExecutionFactory.sparqlService(go_endpoint, query) ) { - qexec.execConstruct(model); - qexec.close(); - } catch(QueryParseException e){ - e.printStackTrace(); - } - if(extra_info_endpoint!=null) { - try ( - QueryExecution qexec = QueryExecutionFactory.sparqlService(extra_info_endpoint, query) ) { - qexec.execConstruct(model); - qexec.close(); - } catch(QueryParseException e){ - e.printStackTrace(); - } - } - } - return model; - } + Query query = QueryFactory.create(superQuery); + try ( + QueryExecution qexec = QueryExecutionFactory.sparqlService(go_endpoint, query)) { + qexec.execConstruct(model); + qexec.close(); + } catch (QueryParseException e) { + e.printStackTrace(); + } + if (extra_info_endpoint != null) { + try ( + QueryExecution qexec = QueryExecutionFactory.sparqlService(extra_info_endpoint, query)) { + qexec.execConstruct(model); + qexec.close(); + } catch (QueryParseException e) { + e.printStackTrace(); + } + } + } + return model; + } - public static void write(Model model, String outfilename) throws IOException { - FileOutputStream o = new FileOutputStream(outfilename); - model.write(o, "TURTLE"); - o.close(); - } + public static void write(Model model, String outfilename) throws IOException { + FileOutputStream o = new FileOutputStream(outfilename); + model.write(o, "TURTLE"); + o.close(); + } - public static Map loadRDF(String model_dir){ - Map name_model = new HashMap(); - File good_dir = new File(model_dir); - if(good_dir.isDirectory()) { - File[] good_files = good_dir.listFiles(new FilenameFilter() { - public boolean accept(File dir, String name) { - return name.endsWith(".ttl"); - } - }); - for(File good_file : good_files) { - Model model = ModelFactory.createDefaultModel() ; - model.read(good_file.getAbsolutePath()) ; - name_model.put(good_file.getName(), model); - } - }else if(good_dir.getName().endsWith(".ttl")){ - Model model = ModelFactory.createDefaultModel() ; - model.read(good_dir.getAbsolutePath()) ; - name_model.put(good_dir.getName(), model); - } - return name_model; - } + public static Map loadRDF(String model_dir) { + Map name_model = new HashMap(); + File good_dir = new File(model_dir); + if (good_dir.isDirectory()) { + File[] good_files = good_dir.listFiles(new FilenameFilter() { + public boolean accept(File dir, String name) { + return name.endsWith(".ttl"); + } + }); + for (File good_file : good_files) { + Model model = ModelFactory.createDefaultModel(); + model.read(good_file.getAbsolutePath()); + name_model.put(good_file.getName(), model); + } + } else if (good_dir.getName().endsWith(".ttl")) { + Model model = ModelFactory.createDefaultModel(); + model.read(good_dir.getAbsolutePath()); + name_model.put(good_dir.getName(), model); + } + return name_model; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationReport.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationReport.java index a641ce95..b3ea3a2d 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationReport.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationReport.java @@ -1,86 +1,86 @@ /** - * + * */ package org.geneontology.minerva.validation; +import com.google.gson.annotations.SerializedName; + import java.util.HashSet; import java.util.Set; -import com.google.gson.annotations.SerializedName; - /** * @author bgood * */ public class ModelValidationReport { - final String id; - - @SerializedName("is-conformant") - boolean conformant; - final String tracker; - - @SerializedName("rule-file") - final String rulefile; - Set violations; - @SerializedName("error-message") - String error_message; - - /** - * - */ - public ModelValidationReport(String id, String tracker, String rulefile) { - this.id = id; - this.tracker = tracker; - this.rulefile = rulefile; - } - - public String getId() { - return id; - } - - public Set getViolations() { - return violations; - } - - public void setViolations(Set violations) { - this.violations = violations; - } - - public void addViolation(Violation violation) { - if(this.violations==null) { - this.violations = new HashSet(); - } - this.violations.add(violation); - } - - public boolean isConformant() { - return conformant; - } - - public void setConformant(boolean conformant) { - this.conformant = conformant; - } - - public String getTracker() { - return tracker; - } - - public String getRulefile() { - return rulefile; - } - - public String getError_message() { - return error_message; - } - - public void setError_message(String error_message) { - this.error_message = error_message; - } - - public void addViolations(Set violations) { - if(this.violations==null) { - this.violations = new HashSet(); - } - this.violations.addAll(violations); - } + final String id; + + @SerializedName("is-conformant") + boolean conformant; + final String tracker; + + @SerializedName("rule-file") + final String rulefile; + Set violations; + @SerializedName("error-message") + String error_message; + + /** + * + */ + public ModelValidationReport(String id, String tracker, String rulefile) { + this.id = id; + this.tracker = tracker; + this.rulefile = rulefile; + } + + public String getId() { + return id; + } + + public Set getViolations() { + return violations; + } + + public void setViolations(Set violations) { + this.violations = violations; + } + + public void addViolation(Violation violation) { + if (this.violations == null) { + this.violations = new HashSet(); + } + this.violations.add(violation); + } + + public boolean isConformant() { + return conformant; + } + + public void setConformant(boolean conformant) { + this.conformant = conformant; + } + + public String getTracker() { + return tracker; + } + + public String getRulefile() { + return rulefile; + } + + public String getError_message() { + return error_message; + } + + public void setError_message(String error_message) { + this.error_message = error_message; + } + + public void addViolations(Set violations) { + if (this.violations == null) { + this.violations = new HashSet(); + } + this.violations.addAll(violations); + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationResult.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationResult.java index e2ba5d7e..d6d431f7 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationResult.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/ModelValidationResult.java @@ -1,12 +1,8 @@ /** - * + * */ package org.geneontology.minerva.validation; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import org.apache.jena.query.QueryExecution; import org.apache.jena.query.QueryExecutionFactory; import org.apache.jena.query.QuerySolution; @@ -14,41 +10,44 @@ import org.apache.jena.rdf.model.Literal; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.Resource; -import org.apache.jena.vocabulary.DC; + +import java.util.Map; +import java.util.Set; /** * @author bgood * */ public class ModelValidationResult { - boolean model_is_valid; - boolean model_is_consistent; - Map> node_shapes; - Map> node_types; - Map node_report; - Map node_is_valid; - Map node_is_consistent; - String model_report; - String model_id; - String model_title; - /** - * - */ - public ModelValidationResult(Model model) { - String q = "select ?cam ?title where {" - + "?cam ?title }"; - // + "?cam <"+DC.description.getURI()+"> ?title }"; - QueryExecution qe = QueryExecutionFactory.create(q, model); - ResultSet results = qe.execSelect(); - if (results.hasNext()) { - QuerySolution qs = results.next(); - Resource id = qs.getResource("cam"); - Literal title = qs.getLiteral("title"); - model_id = id.getURI(); - model_title = title.getString(); - } - qe.close(); - model_report = "shape id\tnode uri\tvalidation status\n"; - } + boolean model_is_valid; + boolean model_is_consistent; + Map> node_shapes; + Map> node_types; + Map node_report; + Map node_is_valid; + Map node_is_consistent; + String model_report; + String model_id; + String model_title; + + /** + * + */ + public ModelValidationResult(Model model) { + String q = "select ?cam ?title where {" + + "?cam ?title }"; + // + "?cam <"+DC.description.getURI()+"> ?title }"; + QueryExecution qe = QueryExecutionFactory.create(q, model); + ResultSet results = qe.execSelect(); + if (results.hasNext()) { + QuerySolution qs = results.next(); + Resource id = qs.getResource("cam"); + Literal title = qs.getLiteral("title"); + model_id = id.getURI(); + model_title = title.getString(); + } + qe.close(); + model_report = "shape id\tnode uri\tvalidation status\n"; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/OWLValidationReport.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/OWLValidationReport.java index 86d15721..f130b5e2 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/OWLValidationReport.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/OWLValidationReport.java @@ -2,21 +2,19 @@ public class OWLValidationReport extends ModelValidationReport { - public static final String report_type_id = "OWL_REASONER"; - public static final String tracker = "https://github.com/geneontology/helpdesk/issues"; - public static final String rulefile = "https://github.com/geneontology/go-ontology"; - + public static final String report_type_id = "OWL_REASONER"; + public static final String tracker = "https://github.com/geneontology/helpdesk/issues"; + public static final String rulefile = "https://github.com/geneontology/go-ontology"; - - public OWLValidationReport() { - super(report_type_id, tracker, rulefile); - } + public OWLValidationReport() { + super(report_type_id, tracker, rulefile); + } - public String getAsText() { - String e = "A human readable explanation of any OWL inconsistencies ought to go here."; - return e; - } + public String getAsText() { + String e = "A human readable explanation of any OWL inconsistencies ought to go here."; + return e; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexConstraint.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexConstraint.java index e6d9aec6..a46d4dc7 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexConstraint.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexConstraint.java @@ -1,117 +1,116 @@ /** - * + * */ package org.geneontology.minerva.validation; -import java.util.Set; - import com.google.gson.annotations.SerializedName; -import fr.inria.lille.shexjava.util.Interval; +import java.util.Set; /** * @author bgood * */ public class ShexConstraint { - String object; - String property; - Set node_types; - Set object_types; - String cardinality; - int nobjects; - - @SerializedName("matched_range_shapes") - Set matched_range_shapes; - - - - public String getCardinality() { - return cardinality; - } - - public void setCardinality(String cardinality) { - this.cardinality = cardinality; - } - - public int getNobjects() { - return nobjects; - } - - public void setNobjects(int nobjects) { - this.nobjects = nobjects; - } - - public Set getMatched_range_shapes() { - return matched_range_shapes; - } - - public void setMatched_range_shapes(Set matched_range_shapes) { - this.matched_range_shapes = matched_range_shapes; - } - - public Set getObject_types() { - return object_types; - } - - public void setObject_types(Set object_types) { - this.object_types = object_types; - } - - public Set getNode_types() { - return node_types; - } - - public void setNode_types(Set node_types) { - this.node_types = node_types; - } - - @SerializedName("intended-range-shapes") - Set intended_range_shapes; - /** - * @param node_types - * @param object_types - * - */ - - - public ShexConstraint(String object, String property, Set intended_range_shapes, Set node_types, Set object_types) { - super(); - this.object = object; - this.property = property; - this.intended_range_shapes = intended_range_shapes; - this.node_types = node_types; - this.object_types = object_types; - } - - public ShexConstraint(String property, String cardinality, int nobjects) { - super(); - this.property = property; - this.cardinality = cardinality; - this.nobjects = nobjects; - } - - public Set getIntended_range_shapes() { - return intended_range_shapes; - } - public void setIntended_range_shapes(Set intended_range_shapes) { - this.intended_range_shapes = intended_range_shapes; - } - - public String getObject() { - return object; - } - - public void setObject(String object) { - this.object = object; - } - - public String getProperty() { - return property; - } - - public void setProperty(String property) { - this.property = property; - } + String object; + String property; + Set node_types; + Set object_types; + String cardinality; + int nobjects; + + @SerializedName("matched_range_shapes") + Set matched_range_shapes; + + + public String getCardinality() { + return cardinality; + } + + public void setCardinality(String cardinality) { + this.cardinality = cardinality; + } + + public int getNobjects() { + return nobjects; + } + + public void setNobjects(int nobjects) { + this.nobjects = nobjects; + } + + public Set getMatched_range_shapes() { + return matched_range_shapes; + } + + public void setMatched_range_shapes(Set matched_range_shapes) { + this.matched_range_shapes = matched_range_shapes; + } + + public Set getObject_types() { + return object_types; + } + + public void setObject_types(Set object_types) { + this.object_types = object_types; + } + + public Set getNode_types() { + return node_types; + } + + public void setNode_types(Set node_types) { + this.node_types = node_types; + } + + @SerializedName("intended-range-shapes") + Set intended_range_shapes; + + /** + * @param node_types + * @param object_types + * + */ + + + public ShexConstraint(String object, String property, Set intended_range_shapes, Set node_types, Set object_types) { + super(); + this.object = object; + this.property = property; + this.intended_range_shapes = intended_range_shapes; + this.node_types = node_types; + this.object_types = object_types; + } + + public ShexConstraint(String property, String cardinality, int nobjects) { + super(); + this.property = property; + this.cardinality = cardinality; + this.nobjects = nobjects; + } + + public Set getIntended_range_shapes() { + return intended_range_shapes; + } + + public void setIntended_range_shapes(Set intended_range_shapes) { + this.intended_range_shapes = intended_range_shapes; + } + + public String getObject() { + return object; + } + + public void setObject(String object) { + this.object = object; + } + + public String getProperty() { + return property; + } + + public void setProperty(String property) { + this.property = property; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexExplanation.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexExplanation.java index 08bc71b2..006725c9 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexExplanation.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexExplanation.java @@ -1,5 +1,5 @@ /** - * + * */ package org.geneontology.minerva.validation; @@ -11,34 +11,42 @@ * */ public class ShexExplanation { - String shape; - Set constraints; - String errorMessage; - /** - * - */ - public ShexExplanation() { - constraints = new HashSet(); - } - public String getShape() { - return shape; - } - public void setShape(String shape) { - this.shape = shape; - } - public Set getConstraints() { - return constraints; - } - public void setConstraints(Set constraints) { - this.constraints = constraints; - } - public void addConstraint(ShexConstraint constraint) { - this.constraints.add(constraint); - } - public String getErrorMessage() { - return errorMessage; - } - public void setErrorMessage(String error) { - this.errorMessage = error; - } + String shape; + Set constraints; + String errorMessage; + + /** + * + */ + public ShexExplanation() { + constraints = new HashSet(); + } + + public String getShape() { + return shape; + } + + public void setShape(String shape) { + this.shape = shape; + } + + public Set getConstraints() { + return constraints; + } + + public void setConstraints(Set constraints) { + this.constraints = constraints; + } + + public void addConstraint(ShexConstraint constraint) { + this.constraints.add(constraint); + } + + public String getErrorMessage() { + return errorMessage; + } + + public void setErrorMessage(String error) { + this.errorMessage = error; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidationReport.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidationReport.java index 56015a8e..4b5e10dd 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidationReport.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidationReport.java @@ -1,90 +1,84 @@ /** - * + * */ package org.geneontology.minerva.validation; +import com.google.gson.annotations.SerializedName; + import java.util.HashMap; import java.util.Map; import java.util.Set; -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryExecutionFactory; -import org.apache.jena.query.QuerySolution; -import org.apache.jena.query.ResultSet; -import org.apache.jena.rdf.model.Literal; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.Resource; - -import com.google.gson.annotations.SerializedName; - /** * @author bgood * */ -public class ShexValidationReport extends ModelValidationReport{ - @SerializedName("report-type") - public static final String report_type_id = "SHEX_CORE_SCHEMA"; - public static final String tracker = "https://github.com/geneontology/go-shapes/issues"; +public class ShexValidationReport extends ModelValidationReport { + @SerializedName("report-type") + public static final String report_type_id = "SHEX_CORE_SCHEMA"; + public static final String tracker = "https://github.com/geneontology/go-shapes/issues"; + + @SerializedName("rule-file") + public static final String rulefile = "https://github.com/geneontology/go-shapes/blob/master/shapes/go-cam-shapes.shex"; + + @SerializedName("node-matched-shapes") + public Map> node_matched_shapes = new HashMap>(); - @SerializedName("rule-file") - public static final String rulefile = "https://github.com/geneontology/go-shapes/blob/master/shapes/go-cam-shapes.shex"; + /** + * + */ + public ShexValidationReport() { + super(null, tracker, rulefile); + } - @SerializedName("node-matched-shapes") - public Map> node_matched_shapes = new HashMap>(); - /** - * - */ - public ShexValidationReport() { - super(null, tracker, rulefile); - } + public String getAsText() { + String report = "report type id = " + report_type_id + "\nrulefile = " + rulefile + "\ntracker = " + tracker + "\n"; + if (conformant) { + report += "No errors detected"; + return report; + } + report += getViolations().size() + " noncomformant nodes detected:\n"; + for (Violation violation : getViolations()) { + report += "node: " + violation.getNode() + " "; + ShexViolation sv = (ShexViolation) violation; + for (ShexExplanation e : sv.getExplanations()) { + report += "was expected to match shape: " + e.shape; + report += " but did not fit the following constraints:"; + for (ShexConstraint c : e.getConstraints()) { + report += "\n\tthe objects of assertions made with " + c.getProperty() + " should be nodes that fit the one of these shapes: "; + report += "\n\t\t" + c.getIntended_range_shapes(); + report += "\n\t\tbut, sadly, the object " + c.getObject() + " of one such assertion emanating from the failing node here did not.\n"; + } + } + } + return report; + } - public String getAsText() { - String report = "report type id = "+report_type_id+"\nrulefile = "+rulefile+"\ntracker = "+tracker+"\n"; - if(conformant) { - report+="No errors detected"; - return report; - } - report+=getViolations().size()+" noncomformant nodes detected:\n"; - for(Violation violation : getViolations()) { - report+="node: "+violation.getNode()+" "; - ShexViolation sv = (ShexViolation) violation; - for(ShexExplanation e : sv.getExplanations()) { - report+="was expected to match shape: "+e.shape; - report+=" but did not fit the following constraints:"; - for(ShexConstraint c : e.getConstraints()) { - report+="\n\tthe objects of assertions made with "+c.getProperty()+" should be nodes that fit the one of these shapes: "; - report+="\n\t\t"+c.getIntended_range_shapes(); - report+="\n\t\tbut, sadly, the object "+c.getObject()+" of one such assertion emanating from the failing node here did not.\n"; - } - } - } - return report; - } - public String getAsTab(String prefix) { - if(conformant) { - return "conformant\n"; - } - String report = ""; - if(getViolations()==null) { - return "noncomformant (no explanation)\n"; - } - for(Violation violation : getViolations()) { - ShexViolation sv = (ShexViolation) violation; - for(ShexExplanation e : sv.getExplanations()) { - String error = e.getErrorMessage(); - if(error!=null) { - report+=prefix+"\t"+violation.getNode()+"\t"+error+"\t\t\t\t\t\n"; + public String getAsTab(String prefix) { + if (conformant) { + return "conformant\n"; + } + String report = ""; + if (getViolations() == null) { + return "noncomformant (no explanation)\n"; + } + for (Violation violation : getViolations()) { + ShexViolation sv = (ShexViolation) violation; + for (ShexExplanation e : sv.getExplanations()) { + String error = e.getErrorMessage(); + if (error != null) { + report += prefix + "\t" + violation.getNode() + "\t" + error + "\t\t\t\t\t\n"; - }else { - for(ShexConstraint c : e.getConstraints()) { - report+=prefix+"\t"+violation.getNode()+"\t"+c.getNode_types()+"\t"+c.getProperty()+"\t"+c.getIntended_range_shapes()+"\t"+c.getObject()+"\t"+c.getObject_types()+"\t"+c.getMatched_range_shapes()+"\n"; - } - } - } - } - return report; - } + } else { + for (ShexConstraint c : e.getConstraints()) { + report += prefix + "\t" + violation.getNode() + "\t" + c.getNode_types() + "\t" + c.getProperty() + "\t" + c.getIntended_range_shapes() + "\t" + c.getObject() + "\t" + c.getObject_types() + "\t" + c.getMatched_range_shapes() + "\n"; + } + } + } + } + return report; + } } diff --git a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidator.java b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidator.java index 0d8c5dee..404cf49c 100644 --- a/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidator.java +++ b/minerva-core/src/main/java/org/geneontology/minerva/validation/ShexValidator.java @@ -1,1031 +1,993 @@ /** - * + * */ package org.geneontology.minerva.validation; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileReader; -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - +import fr.inria.lille.shexjava.schema.Label; +import fr.inria.lille.shexjava.schema.ShexSchema; +import fr.inria.lille.shexjava.schema.abstrsynt.*; +import fr.inria.lille.shexjava.schema.parsing.GenParser; +import fr.inria.lille.shexjava.util.Interval; +import fr.inria.lille.shexjava.util.Pair; +import fr.inria.lille.shexjava.validation.RecursiveValidationWithMemorization; +import fr.inria.lille.shexjava.validation.RefineValidation; +import fr.inria.lille.shexjava.validation.Status; +import fr.inria.lille.shexjava.validation.Typing; import org.apache.commons.lang3.StringUtils; import org.apache.commons.rdf.api.RDF; import org.apache.commons.rdf.api.RDFTerm; import org.apache.commons.rdf.jena.JenaGraph; import org.apache.commons.rdf.jena.JenaRDF; import org.apache.commons.rdf.simple.SimpleRDF; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryExecutionFactory; -import org.apache.jena.query.QueryFactory; -import org.apache.jena.query.QueryParseException; -import org.apache.jena.query.QuerySolution; -import org.apache.jena.query.ResultSet; -import org.apache.jena.rdf.model.Literal; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.Property; -import org.apache.jena.rdf.model.RDFNode; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.Statement; -import org.apache.jena.rdf.model.StmtIterator; -import org.apache.jena.vocabulary.RDFS; +import org.apache.jena.query.*; +import org.apache.jena.rdf.model.*; import org.apache.log4j.Logger; import org.geneontology.minerva.BlazegraphOntologyManager; import org.geneontology.minerva.curie.CurieHandler; import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.reasoner.OWLReasoner; -import fr.inria.lille.shexjava.schema.Label; -import fr.inria.lille.shexjava.schema.ShexSchema; -import fr.inria.lille.shexjava.schema.abstrsynt.EachOf; -import fr.inria.lille.shexjava.schema.abstrsynt.NodeConstraint; -import fr.inria.lille.shexjava.schema.abstrsynt.RepeatedTripleExpression; -import fr.inria.lille.shexjava.schema.abstrsynt.Shape; -import fr.inria.lille.shexjava.schema.abstrsynt.ShapeAnd; -import fr.inria.lille.shexjava.schema.abstrsynt.ShapeExpr; -import fr.inria.lille.shexjava.schema.abstrsynt.ShapeExprRef; -import fr.inria.lille.shexjava.schema.abstrsynt.ShapeOr; -import fr.inria.lille.shexjava.schema.abstrsynt.TCProperty; -import fr.inria.lille.shexjava.schema.abstrsynt.TripleConstraint; -import fr.inria.lille.shexjava.schema.abstrsynt.TripleExpr; -import fr.inria.lille.shexjava.schema.abstrsynt.TripleExprRef; -import fr.inria.lille.shexjava.schema.parsing.GenParser; -import fr.inria.lille.shexjava.util.Interval; -import fr.inria.lille.shexjava.util.Pair; -import fr.inria.lille.shexjava.validation.RecursiveValidation; -import fr.inria.lille.shexjava.validation.RecursiveValidationWithMemorization; -import fr.inria.lille.shexjava.validation.RefineValidation; -import fr.inria.lille.shexjava.validation.Status; -import fr.inria.lille.shexjava.validation.Typing; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.*; /** * @author bgood * */ public class ShexValidator { - private static final Logger LOGGER = Logger.getLogger(ShexValidator.class); - public ShexSchema schema; - public Map GoQueryMap; - // public OWLReasoner tbox_reasoner; - private BlazegraphOntologyManager go_lego_repo; - public static final String endpoint = "http://rdf.geneontology.org/blazegraph/sparql"; - public Map>> shape_expected_property_ranges; - public Map> shape_expected_property_cardinality; - Map tripexprlabel_cardinality; - public CurieHandler curieHandler; - public RDF rdfFactory; - public final int timeout_mill = 30000; - - /** - * @throws Exception - * - */ - public ShexValidator(String shexpath, String goshapemappath, BlazegraphOntologyManager go_lego, CurieHandler curieHandler_) throws Exception { - init(new File(shexpath), new File(goshapemappath), go_lego, curieHandler_); - } - - public ShexValidator(File shex_schema_file, File shex_map_file, BlazegraphOntologyManager go_lego, CurieHandler curieHandler_) throws Exception { - init(shex_schema_file, shex_map_file, go_lego, curieHandler_); - } - - public void init(File shex_schema_file, File shex_map_file, BlazegraphOntologyManager go_lego, CurieHandler curieHandler_) throws Exception { - schema = GenParser.parseSchema(shex_schema_file.toPath()); - GoQueryMap = makeGoQueryMap(shex_map_file.getAbsolutePath()); - //tbox_reasoner = tbox_reasoner_; - setGo_lego_repo(go_lego); - shape_expected_property_ranges = new HashMap>>(); - shape_expected_property_cardinality = new HashMap>(); - tripexprlabel_cardinality = new HashMap(); - curieHandler = curieHandler_; - rdfFactory = new SimpleRDF(); - for(String shapelabel : GoQueryMap.keySet()) { - if(shapelabel.equals("http://purl.obolibrary.org/obo/go/shapes/AnnotatedEdge")) { - continue; - } - Label shape_label = new Label(rdfFactory.createIRI(shapelabel)); - ShapeExpr rule = schema.getRules().get(shape_label); - Map> expected_property_ranges = getPropertyRangeMap(shape_label, rule, null); - shape_expected_property_ranges.put(shape_label, expected_property_ranges); - } - LOGGER.info("shex validator ready"); - } - - public static Map makeGoQueryMap(String shapemap_file) throws IOException{ - Map shapelabel_sparql = new HashMap(); - BufferedReader reader = new BufferedReader(new FileReader(shapemap_file)); - String line = reader.readLine(); - String all = line; - while(line!=null) { - all+=line; - line = reader.readLine(); - } - reader.close(); - String[] maps = all.split(","); - for(String map : maps) { - String sparql = StringUtils.substringBetween(map, "'", "'"); - sparql = sparql.replace("a/", " ?c . ?c "); - String[] shapemaprow = map.split("@"); - String shapelabel = shapemaprow[1]; - shapelabel = shapelabel.replace(">", ""); - shapelabel = shapelabel.replace("<", ""); - shapelabel = shapelabel.trim(); - shapelabel_sparql.put(shapelabel, sparql); - } - return shapelabel_sparql; - } - - public ShexValidationReport runShapeMapValidation(Model test_model) { - boolean explain = true; - ShexValidationReport r = new ShexValidationReport(); - JenaRDF jr = new JenaRDF(); - //this shex implementation likes to use the commons JenaRDF interface, nothing exciting here - JenaGraph shexy_graph = jr.asGraph(test_model); - boolean all_good = true; - try { - Typing all_typed = runRefineWithTimeout(shexy_graph); - if(all_typed!=null) { - //filter to most specific tests - Map> node_s_shapes = getShapesToTestForEachResource(test_model); - for(Resource node : node_s_shapes.keySet()) { - Set shapes = node_s_shapes.get(node); - for(String shapelabel : shapes) { - Label shape_label = new Label(rdfFactory.createIRI(shapelabel)); - RDFTerm focus_node = null; - if(node.isURIResource()) { - focus_node = rdfFactory.createIRI(node.getURI()); - }else { - focus_node = rdfFactory.createBlankNode(node.getId().getLabelString()); - } - if(!all_typed.isConformant(focus_node, shape_label)) { - //something didn't match expectations - all_good = false; - //try to explain the mismatch - if(explain) { - Violation violation; - try { - violation = getViolationForMismatch(shape_label, node, all_typed, test_model); - r.addViolation(violation); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - //run our local CLOSE check - //TODO remove if we implement closed directly - if(explain) { - Set extra_violations; - try { - extra_violations = checkForExtraProperties(node, test_model, shape_label, all_typed); - if(extra_violations!=null&&!extra_violations.isEmpty()) { - r.addViolations(extra_violations); - all_good = false; - } - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - } - }else { - //validation failed - all_good = false; - r.setError_message("validation (with Refine algorithm) failed or timed out for this model"); - } - }finally { - try { - //make sure to free up resources here. - shexy_graph.close(); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - r.conformant = all_good; - return r; - } - - private Violation getViolationForMismatch(Label shape_label, Resource focus_node, Typing typing, Model test_model) throws IOException { - - RDFTerm rdfterm = null; - if(focus_node.isURIResource()) { - rdfterm = rdfFactory.createIRI(focus_node.getURI()); - }else { - rdfterm = rdfFactory.createIRI(focus_node.toString()); - } - Status status = typing.getStatus(rdfterm, shape_label); - if(status.equals(Status.NONCONFORMANT)) { - //implementing a start on a generic violation report structure here - ShexViolation violation = new ShexViolation(getCurie(focus_node.toString())); - ShexExplanation explanation = new ShexExplanation(); - String shape_curie = getCurie(shape_label.stringValue()); - explanation.setShape(shape_curie); - Set unmet_constraints = getUnmetConstraints(focus_node, shape_label, test_model, typing); - if(unmet_constraints!=null) { - for(ShexConstraint constraint : unmet_constraints) { - explanation.addConstraint(constraint); - violation.addExplanation(explanation); - } - }else { - explanation.setErrorMessage("explanation computation timed out"); - violation.addExplanation(explanation); - } - return violation; - }else if(status.equals(Status.NOTCOMPUTED)) { - //if any of these are not computed, there is a problem - String error = focus_node+" was not tested against "+shape_label; - LOGGER.error(error); - }else if(status.equals(Status.CONFORMANT)) { - LOGGER.error("node is valid, should not be here trying to make a violation"); - } - - // else { - LOGGER.error("tried to explain shape violation on anonymous node: "+shape_label+" "+focus_node); - StmtIterator node_statements = test_model.listStatements(focus_node.asResource(), null, (RDFNode) null); - if(node_statements.hasNext()) { - while(node_statements.hasNext()) { - Statement s = node_statements.next(); - } - } - StmtIterator literal_statements = test_model.listStatements(focus_node.asResource(), null, (Literal) null); - if(literal_statements.hasNext()) { - while(literal_statements.hasNext()) { - Statement s = literal_statements.next(); - } - } - // } - return null; - } - - public Violation getTimeoutViolation(String node, String shapelabel) { - ShexViolation violation = new ShexViolation(node); - ShexExplanation explanation = new ShexExplanation(); - String shape_curie = getCurie(shapelabel); - explanation.setShape(shape_curie); - explanation.setErrorMessage("validation timed out"); - violation.addExplanation(explanation); - return violation; - } - - - public ShexValidationReport runShapeMapValidationWithRecursiveSingleNodeValidation(Model test_model, boolean stream_output) throws Exception { - ShexValidationReport r = new ShexValidationReport(); - JenaRDF jr = new JenaRDF(); - //this shex implementation likes to use the commons JenaRDF interface, nothing exciting here - JenaGraph shexy_graph = jr.asGraph(test_model); - //recursive only checks the focus node against the chosen shape. - RecursiveValidationWithMemorization shex_model_validator = new RecursiveValidationWithMemorization(schema, shexy_graph); - //for each shape in the query map (e.g. MF, BP, CC, etc.) - - boolean all_good = true; - Map> node_s_shapes = getShapesToTestForEachResource(test_model); - - for(Resource focus_node_resource : node_s_shapes.keySet()) { - Set shape_nodes = node_s_shapes.get(focus_node_resource); - - for(String shapelabel : shape_nodes) { - Label shape_label = new Label(rdfFactory.createIRI(shapelabel)); - if(focus_node_resource==null) { - System.out.println("null focus node for shape "+shape_label); - continue; - } - //check for use of properties not defined for this shape (okay if OPEN, not if CLOSED) - Typing typing = validateNodeWithTimeout(shex_model_validator, focus_node_resource, shape_label); - - if(typing!=null) { - Set extra_prop_violations = checkForExtraProperties(focus_node_resource, test_model, shape_label, typing); - if(extra_prop_violations != null && !extra_prop_violations.isEmpty()) { - for(Violation v : extra_prop_violations) { - r.addViolation(v); - } - all_good = false; - } - //run the validation on the node if possible.. - RDFTerm focus_node = null; - String focus_node_id = ""; - if(focus_node_resource.isURIResource()) { - focus_node = rdfFactory.createIRI(focus_node_resource.getURI()); - focus_node_id = focus_node_resource.getURI(); - }else { - focus_node = rdfFactory.createBlankNode(focus_node_resource.getId().getLabelString()); - focus_node_id = focus_node_resource.getId().getLabelString(); - } - //deal with curies for output - String node = focus_node_id; - node = getCurie(focus_node_id); - Status status = typing.getStatus(focus_node, shape_label); - if(status.equals(Status.CONFORMANT)) { - Set shape_ids = r.node_matched_shapes.get(node); - if(shape_ids==null) { - shape_ids = new HashSet(); - } - shape_ids.add(shapelabel); - r.node_matched_shapes.put(node, shape_ids); - }else if(status.equals(Status.NONCONFORMANT)) { - all_good = false; - //implementing a start on a generic violation report structure here - ShexViolation violation = new ShexViolation(node); - ShexExplanation explanation = new ShexExplanation(); - String shape_curie = getCurie(shapelabel); - explanation.setShape(shape_curie); - Set unmet_constraints = getUnmetConstraints(focus_node_resource, shape_label, test_model, typing); - if(unmet_constraints!=null) { - for(ShexConstraint constraint : unmet_constraints) { - explanation.addConstraint(constraint); - violation.addExplanation(explanation); - } - }else { - explanation.setErrorMessage("explanation computation timed out"); - violation.addExplanation(explanation); - } - r.addViolation(violation); - }else if(status.equals(Status.NOTCOMPUTED)) { - //if any of these are not computed, there is a problem - String error = focus_node_id+" was not tested against "+shapelabel; - LOGGER.error(error); - } - }else { - LOGGER.info("shex validation failed for node "+focus_node_resource.getURI()); - all_good = false; - ShexViolation violation = new ShexViolation(focus_node_resource.getURI()); - ShexExplanation explanation = new ShexExplanation(); - explanation.setErrorMessage("Validating this node was canceled because it took more then "+timeout_mill+" milliseconds"); - String shape_curie = getCurie(shapelabel); - explanation.setShape(shape_curie); - violation.addExplanation(explanation); - r.addViolation(violation); - } - } - } - if(all_good) { - r.conformant = true; - }else { - r.conformant = false; - } - return r; - } - - private Map> getShapesToTestForEachResource(Model test_model) { - Map> node_shapes = new HashMap>(); - for(String shapelabel : GoQueryMap.keySet()) { - //not quite the same pattern as the other shapes - //TODO needs more work - if(shapelabel.equals("http://purl.obolibrary.org/obo/go/shapes/AnnotatedEdge")) { - continue; - } - //get the nodes in this model that SHOULD match the shape - Set focus_nodes = getFocusNodesBySparql(test_model, GoQueryMap.get(shapelabel)); - //shape_nodes.put(shapelabel, focus_nodes); - - for(Resource focus_node : focus_nodes) { - Set shapes = node_shapes.get(focus_node); - if(shapes==null) { - shapes = new HashSet(); - } - shapes.add(shapelabel); - node_shapes.put(focus_node, shapes); - } - } - //prune to only test the most specific shapes - //TODO - do it once up front - Map> node_s_shapes = new HashMap>(); - for(Resource node : node_shapes.keySet()) { - Set shapes = node_shapes.get(node); - Set shapes_to_remove = new HashSet(); - for(String shape1 : shapes) { - Set shape1_nodes = getFocusNodesBySparql(test_model, GoQueryMap.get(shape1)); - for(String shape2 : shapes) { - if(shape1.equals(shape2)) { - continue; - } - Set shape2_nodes = getFocusNodesBySparql(test_model, GoQueryMap.get(shape2)); - //if shape1 contains all of shape2 - e.g. mf would contain all transporter activity - if(shape1_nodes.containsAll(shape2_nodes)) { - //then remove shape1 from this resource (as shape2 is more specific). - shapes_to_remove.add(shape1); - } - } - } - shapes.removeAll(shapes_to_remove); - node_s_shapes.put(node, shapes); - } - return node_s_shapes; - } - - private Typing runRefineWithTimeout(JenaGraph shexy_graph) { - final ExecutorService service = Executors.newSingleThreadExecutor(); - try { - final Future f = service.submit(() -> { - RefineValidation refine = new RefineValidation(schema, shexy_graph); - refine.validate(); - Typing all = refine.getTyping(); - return all; - }); - Typing typing = f.get(timeout_mill, TimeUnit.MILLISECONDS); - return typing; - - } catch (final TimeoutException e) { - LOGGER.error("shex refine all validation took to long "); - service.shutdownNow(); - return null; - } catch (InterruptedException e) { - LOGGER.error("And we have Refine an interrupted exception: "); - e.printStackTrace(); - service.shutdownNow(); - return null; - } catch (ExecutionException e) { - LOGGER.error("And we have a Refine execution exception: "); - e.printStackTrace(); - service.shutdownNow(); - return null; - } finally { - service.shutdown(); - } - } - - public Typing validateNodeWithTimeout(RecursiveValidationWithMemorization shex_model_validator, Resource focus_node_resource, Label shape_label) { - RDFTerm focus_node = null; - String focus_node_id = ""; - if(focus_node_resource.isURIResource()) { - focus_node = rdfFactory.createIRI(focus_node_resource.getURI()); - focus_node_id = focus_node_resource.getURI(); - }else { - focus_node = rdfFactory.createBlankNode(focus_node_resource.getId().getLabelString()); - focus_node_id = focus_node_resource.getId().getLabelString(); - } - //deal with curies for output - String node = focus_node_id; - node = getCurie(focus_node_id); - //this can take a while - give up if it gets stuck - //limit total time to avoid service death on some weird edge case - final ExecutorService service = Executors.newSingleThreadExecutor(); - final RDFTerm test_node = focus_node; - try { - final Future f = service.submit(() -> { - boolean is_valid = shex_model_validator.validate(test_node, shape_label); - if(is_valid) { - return shex_model_validator.getTyping(); - }else { - return null; - } - - }); - Typing typing = f.get(timeout_mill, TimeUnit.MILLISECONDS); - return typing; - - } catch (final TimeoutException e) { - LOGGER.error("shex validation took to long for "+focus_node_resource); - service.shutdownNow(); - return null; - } catch (InterruptedException e) { - LOGGER.error("And we have an interrupted exception: "+test_node+" "+shape_label); - e.printStackTrace(); - service.shutdownNow(); - return null; - } catch (ExecutionException e) { - LOGGER.error("And we have an execution exception: "+test_node+" "+shape_label); - e.printStackTrace(); - service.shutdownNow(); - return null; - } finally { - service.shutdown(); - } - } - - public static Set getFocusNodesBySparql(Model model, String sparql){ - Set nodes = new HashSet(); - QueryExecution qe = QueryExecutionFactory.create(sparql, model); - ResultSet results = qe.execSelect(); - while (results.hasNext()) { - QuerySolution qs = results.next(); - Resource node = qs.getResource("x"); - nodes.add(node); - } - qe.close(); - return nodes; - } - - /** - * Check each focus node for the use of any properties that don't appear in the shape - * @param focus_nodes - * @param model - * @param shape_label - * @return - * @throws IOException - */ - public Set checkForExtraProperties(Resource node_r, Model model, Label shape_label, Typing typing) throws IOException{ - Set violations = new HashSet(); - Set allowed_properties = this.shape_expected_property_ranges.get(shape_label).keySet(); - Set actual_properties = new HashSet(); - Map prop_value = new HashMap(); //don't really care if there are multiple values, one will do. - String sparql = "select distinct ?prop ?value where{ <"+node_r.getURI()+"> ?prop ?value }"; - QueryExecution qe = QueryExecutionFactory.create(sparql, model); - ResultSet results = qe.execSelect(); - while (results.hasNext()) { - QuerySolution qs = results.next(); - Resource prop = qs.getResource("prop"); - RDFNode value = qs.get("value"); - actual_properties.add(prop.getURI()); - prop_value.put(prop.getURI(), value); - } - qe.close(); - actual_properties.removeAll(allowed_properties); - if(!actual_properties.isEmpty()) { - ShexViolation extra = new ShexViolation(getCurie(node_r.getURI())); - Set explanations = new HashSet(); - for(String prop : actual_properties) { - String value = "value"; - boolean value_is_uri = false; - if(prop_value.get(prop).isResource()) { - value = prop_value.get(prop).asResource().getURI(); - value_is_uri = true; - }else if(prop_value.get(prop).isLiteral()) { - value = prop_value.get(prop).asLiteral().getString(); - } - ShexExplanation extra_explain = new ShexExplanation(); - extra_explain.setShape(getCurie(shape_label.stringValue())); - Set intended_range_shapes = new HashSet(); - //For this CLOSED test, no shape fits in intended. Any use of the property here would be incorrect. - intended_range_shapes.add("owl:Nothing"); - Set node_types = getNodeTypes(model, node_r.getURI()); - Set object_types = null; - //TODO consider here. extra info but not really meaningful - anything in the range would be wrong. - Set matched_range_shapes = null; - if(value_is_uri) { - object_types = getNodeTypes(model, value); - RDFTerm node = rdfFactory.createIRI(value); - matched_range_shapes = getAllMatchedShapes(node, typing); - } - String report_prop = getCurie(prop); - ShexConstraint c = new ShexConstraint(value, report_prop, intended_range_shapes, node_types, object_types); - c.setMatched_range_shapes(matched_range_shapes); - Set cs = new HashSet(); - cs.add(c); - extra_explain.setConstraints(cs); - explanations.add(extra_explain); - } - extra.setExplanations(explanations); - violations.add(extra); - } - return violations; - - } - - public Model enrichSuperClasses(Model model) throws IOException { - LOGGER.info("model size before reasoner expansion: "+model.size()); - String getOntTerms = - "PREFIX owl: " - + "SELECT DISTINCT ?term " + - " WHERE { " + - " { " + - " ?ind a owl:NamedIndividual . " + - " ?ind a ?term . " + - " } " + - " UNION" + - " {" + - " ?term a owl:Class ." + - " }" + - " FILTER(?term != owl:NamedIndividual)" + - " FILTER(isIRI(?term)) ." + - " }"; - String terms = ""; - Set term_set = new HashSet(); - try{ - QueryExecution qe = QueryExecutionFactory.create(getOntTerms, model); - ResultSet results = qe.execSelect(); - - while (results.hasNext()) { - QuerySolution qs = results.next(); - Resource term = qs.getResource("term"); - terms+=("<"+term.getURI()+"> "); - term_set.add(term.getURI()); - } - qe.close(); - } catch(QueryParseException e){ - e.printStackTrace(); - } - if(getGo_lego_repo()!=null) { - Map> term_parents = getGo_lego_repo().getSuperClassMap(term_set); - for(String term : term_set) { - Resource child = model.createResource(term); - for(String parent_class : term_parents.get(term)) { - Resource parent = model.createResource(parent_class); - model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, child)); - model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, parent)); - model.add(model.createStatement(child, org.apache.jena.vocabulary.RDF.type, org.apache.jena.vocabulary.OWL.Class)); - } - } - } - // if(tbox_reasoner!=null) { - // for(String term : term_set) { - // OWLClass c = - // tbox_reasoner. - // getRootOntology(). - // getOWLOntologyManager(). - // getOWLDataFactory().getOWLClass(IRI.create(term)); - // Resource child = model.createResource(term); - // Set supers = tbox_reasoner.getSuperClasses(c, false).getFlattened(); - // for(OWLClass parent_class : supers) { - // Resource parent = model.createResource(parent_class.getIRI().toString()); - // model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, child)); - // model.add(model.createStatement(child, org.apache.jena.vocabulary.RDFS.subClassOf, parent)); - // model.add(model.createStatement(child, org.apache.jena.vocabulary.RDF.type, org.apache.jena.vocabulary.OWL.Class)); - // } - // } - // } - // else { - // String superQuery = "" - // + "PREFIX owl: " - // + "PREFIX rdfs: " - // + "CONSTRUCT { " + - // " ?term rdfs:subClassOf ?superclass ." + - // " ?term a owl:Class ." + - // " }" + - // " WHERE {" + - // " VALUES ?term { "+terms+" } " + - // " ?term rdfs:subClassOf* ?superclass ." + - // " FILTER(isIRI(?superclass)) ." + - // " }"; - // - // Query query = QueryFactory.create(superQuery); - // try ( - // QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, query) ) { - // qexec.execConstruct(model); - // qexec.close(); - // } catch(QueryParseException e){ - // e.printStackTrace(); - // } - // } - //LOGGER.info("model size after reasoner expansion: "+model.size()); - return model; - } - - public Set getNodeTypes(Model model, String node_uri) throws IOException { - - String getOntTerms = - "PREFIX rdf: " - + "PREFIX owl: " - + "SELECT DISTINCT ?type ?is_negated " + - " WHERE { " + - "{ <"+node_uri+"> rdf:type ?type . FILTER(isIRI(?type)) VALUES ?is_negated { false } }" + - "UNION" + - "{ <"+node_uri+"> rdf:type ?blank . ?blank owl:complementOf ?type . FILTER(isBlank(?blank) && isIRI(?type)) VALUES ?is_negated { true } }" + - "FILTER(?type != owl:NamedIndividual)" + - " }"; - Set types = new HashSet(); - try{ - QueryExecution qe = QueryExecutionFactory.create(getOntTerms, model); - ResultSet results = qe.execSelect(); - while (results.hasNext()) { - QuerySolution qs = results.next(); - Resource type = qs.getResource("type"); - boolean isNegated = qs.getLiteral("is_negated").getBoolean(); - String typeCurie = getCurie(type.getURI()); - if (isNegated) { - types.add("NOT(" + typeCurie + ")"); - } else types.add(typeCurie); - // OWLClass t = tbox_reasoner.getRootOntology().getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create(type.getURI())); - // for(OWLClass p : tbox_reasoner.getSuperClasses(t, false).getFlattened()) { - // String type_curie = getCurie(p.getIRI().toString()); - // types.add(type_curie); - // } - //this slows it down a lot... - //should not be needed - //Set supers = getGo_lego_repo().getSuperClasses(type.getURI()); - //for(String s : supers) { - // String type_curie = getCurie(s); - // types.add(type_curie); - //} - - } - qe.close(); - } catch(QueryParseException e){ - LOGGER.error(getOntTerms); - e.printStackTrace(); - } - - return types; - } - - /** - * We no there is a problem with the focus node and the shape here. This tries to figure out the constraints that caused the problem and provide some explanation. - * @param focus_node - * @param shape_label - * @param model - * @param typing - * @return - * @throws IOException - */ - private Set getUnmetConstraints(Resource focus_node, Label shape_label, Model model, Typing typing) throws IOException { - Set unmet_constraints = new HashSet(); - Set node_types = getNodeTypes(model, focus_node.getURI()); - Map> expected_property_ranges = shape_expected_property_ranges.get(shape_label); - //get a map from properties to actual shapes of the asserted objects - // JenaRDF jr = new JenaRDF(); - // JenaGraph shexy_graph = jr.asGraph(model); - // RecursiveValidationWithMemorization shex_model_validator = new RecursiveValidationWithMemorization(schema, shexy_graph); - - //get the focus node in the rdf model - //check for assertions with properties in the target shape - for(String prop_uri : expected_property_ranges.keySet()) { - Property prop = model.getProperty(prop_uri); - Interval cardinality = shape_expected_property_cardinality.get(shape_label).get(prop_uri); - //checking on objects of this property for the problem node. - int n_objects = 0; - for (StmtIterator i = focus_node.listProperties(prop); i.hasNext(); ) { - while(i.hasNext()) { - n_objects++; - RDFNode obj = i.nextStatement().getObject(); - //check the computed shapes for this individual - if(!obj.isResource()) { - continue; - //no checks on literal values at this time - }else if(prop_uri.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")&&obj.asResource().getURI().equals("http://www.w3.org/2002/07/owl#NamedIndividual")) { - continue; //ignore type owl individual - } - RDFTerm range_obj = rdfFactory.createIRI(obj.asResource().getURI()); - //does it hit any allowable shapes? - boolean good = false; - //TODO many property ranges are MISSING from previous step - //e.g. any OR will not show up here. - Set expected_ranges = expected_property_ranges.get(prop_uri); - for(String target_shape_uri : expected_ranges) { - if(target_shape_uri.equals(".")) { - //anything is fine - good = true; - //break; - }else if(target_shape_uri.trim().equals("")) { - //ignore syntax type checking for now - good = true; - //break; - } - Label target_shape_label = new Label(rdfFactory.createIRI(target_shape_uri)); - // Typing typing = validateNodeWithTimeout(shex_model_validator, obj.asResource(), shape_label); - if(typing!=null) { - //capture the result - //Typing shape_test = shex_model_validator.getTyping(); - //Pair p = new Pair(range_obj, target_shape_label); - //Status r = shape_test.getStatusMap().get(p); - Status r = typing.getStatus(range_obj, target_shape_label); - if(r!=null&&r.equals(Status.CONFORMANT)) { - good = true; - //break; - } - }else { - good = false; - } - } - if(!good) { //add violated range constraint to explanation - if(obj.isURIResource()) { - String object = obj.toString(); - Set object_types = getNodeTypes(model, obj.toString()); - - String property = prop.toString(); - object = getCurie(object); - property = getCurie(property); - Set expected = new HashSet(); - for(String e : expected_property_ranges.get(prop_uri)) { - String curie_e = getCurie(e); - expected.add(curie_e); - } - ShexConstraint constraint = new ShexConstraint(object, property, expected, node_types, object_types); - //return all shapes that are matched by this node for explanation - Set obj_matched_shapes = getAllMatchedShapes(range_obj, typing); - constraint.setMatched_range_shapes(obj_matched_shapes); - unmet_constraints.add(constraint); - }else { - ShexConstraint constraint = new ShexConstraint(obj.toString(), getCurie(prop.toString()), null, node_types, null); - //return all shapes that are matched by this node for explanation - Set obj_matched_shapes = getAllMatchedShapes(range_obj, typing); - constraint.setMatched_range_shapes(obj_matched_shapes); - unmet_constraints.add(constraint); - } - } - } - } - //check for cardinality violations - if(!prop_uri.contentEquals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { //skip types - should always allow multiple.. - if(!cardinality.contains(n_objects)) { - System.out.println("cardinality violation!"); - System.out.println("problem node "+focus_node); - System.out.println("prop "+prop); - System.out.println("Intended Interval "+cardinality.toString()); - System.out.println("Actual "+n_objects); - ShexConstraint constraint = new ShexConstraint(getCurie(prop.toString()), cardinality.toString(), n_objects); - unmet_constraints.add(constraint); - } - } - } - - return unmet_constraints; - } - - public Set getAllMatchedShapes(RDFTerm value, Typing typing){ - Set

+ * Uses System.err to print the error message. + */ + public void fail() { + System.err.println("cannot process: " + args[i]); + System.exit(1); + + } + + /** + * Write an info. WARNING: This will terminate the VM. + * Do NOT use in a framework. + *

+ * Uses System.out to print the message. + * + * @param params + * @param desc + */ + public void info(String params, String desc) { + if (this.nextArgIsHelp()) { + System.out.println(args[i - 2] + " " + params + "\t " + desc); + System.exit(0); + } + } } \ No newline at end of file diff --git a/minerva-core/src/main/java/owltools/gaf/eco/EcoMapper.java b/minerva-core/src/main/java/owltools/gaf/eco/EcoMapper.java index 1c727678..1bf5a902 100644 --- a/minerva-core/src/main/java/owltools/gaf/eco/EcoMapper.java +++ b/minerva-core/src/main/java/owltools/gaf/eco/EcoMapper.java @@ -1,66 +1,66 @@ package owltools.gaf.eco; -import java.util.Map; -import java.util.Set; - import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLClass; +import java.util.Map; +import java.util.Set; + public interface EcoMapper { - - /** - * Permanent URL for the evidence code ontology (ECO) owl file. - */ - public static final String ECO_PURL = "http://purl.obolibrary.org/obo/eco.owl"; - - /** - * IRI for the evidence code ontology (ECO) owl file. - */ - public static final IRI ECO_PURL_IRI = IRI.create(ECO_PURL); - - /** - * Permanent URL for the mapping of GO evidence codes to ECO classes - */ - public static final String ECO_MAPPING_PURL = "http://purl.obolibrary.org/obo/eco/gaf-eco-mapping.txt"; - /** - * Retrieve the equivalent ECO class for the given GO evidence code. Assume, that the reference is 'default'. - * - * @param code - * @return {@link OWLClass} or null - */ - public OWLClass getEcoClassForCode(String code); - - /** - * Retrieve the ECO classes for the given GO evidence code. Include the classes to be used with more specific references. - * - * @param code - * @return set of classes, never null - */ - public Set getAllEcoClassesForCode(String code); - - /** - * Retrieve the ECO class for the given GO evidence code and reference. If reference is null, assume default. - * - * @param code - * @param refCode - * @return {@link OWLClass} or null - */ - public OWLClass getEcoClassForCode(String code, String refCode); - - - /** - * Check that the given GO code is a valid code with an existing mapping to ECO - * - * @param code - * @return true if the code is a valid - */ - public boolean isGoEvidenceCode(String code); - - /** - * Retrieve the mapping from ECO classes to GO evidence codes. - * - * @return mapping - */ - public Map getCodesForEcoClasses(); + /** + * Permanent URL for the evidence code ontology (ECO) owl file. + */ + public static final String ECO_PURL = "http://purl.obolibrary.org/obo/eco.owl"; + + /** + * IRI for the evidence code ontology (ECO) owl file. + */ + public static final IRI ECO_PURL_IRI = IRI.create(ECO_PURL); + + /** + * Permanent URL for the mapping of GO evidence codes to ECO classes + */ + public static final String ECO_MAPPING_PURL = "http://purl.obolibrary.org/obo/eco/gaf-eco-mapping.txt"; + + /** + * Retrieve the equivalent ECO class for the given GO evidence code. Assume, that the reference is 'default'. + * + * @param code + * @return {@link OWLClass} or null + */ + public OWLClass getEcoClassForCode(String code); + + /** + * Retrieve the ECO classes for the given GO evidence code. Include the classes to be used with more specific references. + * + * @param code + * @return set of classes, never null + */ + public Set getAllEcoClassesForCode(String code); + + /** + * Retrieve the ECO class for the given GO evidence code and reference. If reference is null, assume default. + * + * @param code + * @param refCode + * @return {@link OWLClass} or null + */ + public OWLClass getEcoClassForCode(String code, String refCode); + + + /** + * Check that the given GO code is a valid code with an existing mapping to ECO + * + * @param code + * @return true if the code is a valid + */ + public boolean isGoEvidenceCode(String code); + + /** + * Retrieve the mapping from ECO classes to GO evidence codes. + * + * @return mapping + */ + public Map getCodesForEcoClasses(); } diff --git a/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperFactory.java b/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperFactory.java index 0b6e95ca..eb36943b 100644 --- a/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperFactory.java +++ b/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperFactory.java @@ -1,575 +1,537 @@ package owltools.gaf.eco; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.Reader; -import java.io.StringReader; -import java.net.HttpURLConnection; -import java.net.URL; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - +import com.google.common.base.Optional; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.geneontology.minerva.MinervaOWLGraphWrapper; import org.semanticweb.elk.owlapi.ElkReasonerFactory; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLException; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyID; -import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; - -import com.google.common.base.Optional; - import owltools.io.ParserWrapper; +import java.io.*; +import java.net.HttpURLConnection; +import java.net.URL; +import java.util.*; +import java.util.Map.Entry; + /** * Factory to create instances of {@link EcoMapper} and {@link TraversingEcoMapper}. */ public class EcoMapperFactory { - - private static final OWLReasonerFactory reasonerFactor = new ElkReasonerFactory(); - - private EcoMapperFactory() { - // private constructor, no instances allowed - } - - public static class OntologyMapperPair { - - private final MinervaOWLGraphWrapper graph; - private final MAPPER mapper; - - /** - * @param graph - * @param mapper - */ - OntologyMapperPair(MinervaOWLGraphWrapper graph, MAPPER mapper) { - this.graph = graph; - this.mapper = mapper; - } - - /** - * @return the graph - */ - public MinervaOWLGraphWrapper getGraph() { - return graph; - } - - /** - * @return the mapper - */ - public MAPPER getMapper() { - return mapper; - } - } - - /** - * Create a new {@link SimpleEcoMapper} with from the mapping loaded from - * the PURL. - * - * @return mapper - * @throws IOException - * - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static SimpleEcoMapper createSimple() throws IOException { - return createSimple(EcoMapper.ECO_MAPPING_PURL); - } - - /** - * Create a new {@link SimpleEcoMapper} with from the mapping loaded from - * the given source. - * - * @param source - * @return mapper - * @throws IOException - */ - public static SimpleEcoMapper createSimple(String source) throws IOException { - return createSimpleMapper(createReader(source)); - } - - /** - * Create an instance of a {@link EcoMapper}. Uses a separate parser. Load - * the ECO and mappings using their PURLs. - * - * @return mapper pair - * @throws OWLException - * @throws IOException - * - * @see EcoMapper#ECO_PURL - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static OntologyMapperPair createEcoMapper() throws OWLException, IOException { - return createEcoMapper(new ParserWrapper()); - } - - /** - * Create an instance of a {@link EcoMapper}. Uses a the manager to load ECO via the - * PURL. Load mappings using the PURL. - * @param m - * - * @return mapper pair - * @throws OWLException - * @throws IOException - * - * @see EcoMapper#ECO_PURL - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static OntologyMapperPair createEcoMapper(OWLOntologyManager m) throws OWLException, IOException { - ParserWrapper p = new ParserWrapper(); - p.setManager(m); - return createEcoMapper(p); - } - - /** - * Create an instance of a {@link EcoMapper}. Uses the given - * {@link ParserWrapper} to load the ontology. Retrieves ECO and the - * mappings using their PURLs. - * - * @param p - * @return mapper pair - * @throws OWLException - * @throws IOException - * - * @see EcoMapper#ECO_PURL - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static OntologyMapperPair createEcoMapper(ParserWrapper p) throws OWLException, IOException { - return createEcoMapper(p, EcoMapper.ECO_PURL); - } - - /** - * Create an instance of a {@link EcoMapper}. Uses the given - * {@link ParserWrapper} to load the ontology. Retrieves ECO from the given location and the - * mapping from the PURL. - * - * @param p - * @param location - * @return mapper pair - * @throws OWLException - * @throws IOException - * - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static OntologyMapperPair createEcoMapper(ParserWrapper p, String location) throws OWLException, IOException { - final OWLOntology eco = p.parseOWL(location); - final MinervaOWLGraphWrapper graph = new MinervaOWLGraphWrapper(eco); - final EcoMapper mapper = createEcoMapper(graph); - final OntologyMapperPair pair = new OntologyMapperPair(graph, mapper); - return pair ; - } - - /** - * Create an instance of a {@link EcoMapper}. Retrieves the mappings using - * the PURL. - * - * @param graph graph containing ECO - * @return mapper - * @throws IOException - * - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static EcoMapper createEcoMapper(MinervaOWLGraphWrapper graph) throws IOException { - Reader reader = null; - try { - reader = createReader(EcoMapper.ECO_MAPPING_PURL); - EcoMappings mappings = loadEcoMappings(reader, graph); - return createEcoMapper(mappings); - } - finally { - IOUtils.closeQuietly(reader); - } - } - - static EcoMapper createEcoMapper(EcoMappings mappings) { - return new EcoMapperImpl(mappings); - } - - /** - * Create a {@link TraversingEcoMapper} instance using a new - * {@link ParserWrapper} to load ECO. ECO and the mappings are retrieved - * using their PURLs. - *

- * Creates an ELK reasoner to be used in the traversal methods. Use - * {@link TraversingEcoMapper#dispose()} to ensure proper cleanup of the ELK - * worker thread pool. - * - * @return mapper pair - * @throws OWLException - * @throws IOException - * - * @see EcoMapper#ECO_PURL - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static OntologyMapperPair createTraversingEcoMapper() throws OWLException, IOException { - return createTraversingEcoMapper(new ParserWrapper()); - } - - /** - * Create a {@link TraversingEcoMapper} instance using the given - * {@link ParserWrapper} to load ECO. ECO and the mappings are retrieved - * using their PURLs. - *

- * Creates an ELK reasoner to be used in the traversal methods. Use - * {@link TraversingEcoMapper#dispose()} to ensure proper cleanup of the ELK - * worker thread pool. - * - * @param p - * @return mapper - * @throws OWLException - * @throws IOException - * - * @see EcoMapper#ECO_PURL - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static OntologyMapperPair createTraversingEcoMapper(ParserWrapper p) throws OWLException, IOException { - return createTraversingEcoMapper(p, EcoMapper.ECO_PURL); - } - - /** - * Create a {@link TraversingEcoMapper} instance using the given - * {@link ParserWrapper} to load ECO from the given location. The mappings - * are retrieved using the PURL. - *

- * Creates an ELK reasoner to be used in the traversal methods. Use - * {@link TraversingEcoMapper#dispose()} to ensure proper cleanup of the ELK - * worker thread pool. - * - * @param p - * @param location - * @return mapper - * @throws OWLException - * @throws IOException - * - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static OntologyMapperPair createTraversingEcoMapper(ParserWrapper p, String location) throws OWLException, IOException { - OWLOntology eco = p.parseOWL(EcoMapper.ECO_PURL_IRI); - OWLReasoner reasoner = reasonerFactor.createReasoner(eco); - Reader reader = null; - try { - MinervaOWLGraphWrapper ecoGraph = new MinervaOWLGraphWrapper(eco); - reader = createReader(EcoMapper.ECO_MAPPING_PURL); - final TraversingEcoMapper mapper = createTraversingEcoMapper(reader, ecoGraph, reasoner, true); - return new OntologyMapperPair(ecoGraph, mapper); - } - finally { - IOUtils.closeQuietly(reader); - } - } - - /** - * Create a {@link TraversingEcoMapper} instance using the given - * {@link MinervaOWLGraphWrapper}. It is assumed that ECO can be retrieved from the - * graph using its default IRI. The mappings are retrieved using the PURL. - *

- * Uses the given reasoner in the traversal methods. If disposeReasoner is - * set to true, dispose also the reasoner, while calling - * {@link TraversingEcoMapper#dispose()}. - * - * @param all - * graph containing all ontologies, including ECO - * @param reasoner - * reasoner capable of traversing ECO - * @param disposeReasoner - * set to true if the reasoner should be disposed, when calling - * {@link TraversingEcoMapper#dispose()} - * @return mapper - * @throws IOException - * @throws OWLException - * @throws IllegalArgumentException - * throw when the reasoner is null, or the - * {@link MinervaOWLGraphWrapper} does not contain ECO. - * - * @see EcoMapper#ECO_PURL_IRI - * @see EcoMapper#ECO_MAPPING_PURL - */ - public static TraversingEcoMapper createTraversingEcoMapper(MinervaOWLGraphWrapper all, OWLReasoner reasoner, boolean disposeReasoner) throws IOException, OWLException { - - // This has bitten me, so let's try and be specific... - if( reasoner == null ) { - throw new IllegalArgumentException("No reasoner was specified for use with the EcoTools. Add a reasoner for the command line"); - } - - OWLOntology eco = null; - - // assume the graph wrapper is more than eco - // try to find ECO by its purl - Set allOntologies = all.getAllOntologies(); - for (OWLOntology owlOntology : allOntologies) { - OWLOntologyID id = owlOntology.getOntologyID(); - Optional ontologyIRI = id.getOntologyIRI(); - if (ontologyIRI.isPresent()) { - if (EcoMapper.ECO_PURL_IRI.equals(ontologyIRI.get())) { - eco = owlOntology; - } - } - } - if (eco == null) { - throw new IllegalArgumentException("The specified graph did not contain ECO with the IRI: "+EcoMapper.ECO_PURL_IRI); - } - - MinervaOWLGraphWrapper ecoGraph = new MinervaOWLGraphWrapper(eco); - Reader reader = null; - try { - reader = createReader(EcoMapper.ECO_MAPPING_PURL); - EcoMappings mappings = loadEcoMappings(reader, ecoGraph); - return new TraversingEcoMapperImpl(mappings, reasoner, disposeReasoner); - } - finally { - IOUtils.closeQuietly(reader); - } - } - - static Reader createReader(String src) throws IOException { - if (src.indexOf(':') > 0) { - // assume its an url - URL url = new URL(src); - return loadUrl(url); - } - - // treat as file - File file = new File(src); - return new FileReader(file); - } - - private static Reader loadUrl(URL url) throws IOException { - final HttpURLConnection connection; - InputStream response = null; - // setup and open (actual connection) - try { - connection = (HttpURLConnection) url.openConnection(); - connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https - response = connection.getInputStream(); // opens the connection to the server - } - catch (IOException e) { - IOUtils.closeQuietly(response); - throw e; - } - // check status code - final int status; - try { - status = connection.getResponseCode(); - } catch (IOException e) { - IOUtils.closeQuietly(response); - throw e; - } - if (HttpURLConnection.HTTP_MOVED_PERM == status || HttpURLConnection.HTTP_MOVED_TEMP == status) { - String location; - try { - location = connection.getHeaderField("Location"); - } finally { - IOUtils.closeQuietly(response); - } - if (location == null) { - throw new IOException("Could not follow redirect, missing header/no value for header 'Location'"); - } - URL next = new URL(url, location); // Deal with relative URLs - - return loadUrl(next); - } - // handle unexpected status code - if (status != 200) { - // try to check error stream - String errorMsg = getErrorMsg(connection); - - // construct message for exception - StringBuilder sb = new StringBuilder("Unexpected HTTP status code: "+status); - - if (errorMsg != null) { - sb.append(" Details: "); - sb.append(errorMsg); - } - throw new IOException(sb.toString()); - } - - // try to detect charset - String contentType = connection.getHeaderField("Content-Type"); - String charset = null; - - if (contentType != null) { - for (String param : contentType.replace(" ", "").split(";")) { - if (param.startsWith("charset=")) { - charset = param.split("=", 2)[1]; - break; - } - } - } - - // get string response from stream - String string; - try { - if (charset != null) { - string = IOUtils.toString(response, charset); - } - else { - string = IOUtils.toString(response); - } - } catch (IOException e) { - throw e; - } - finally { - IOUtils.closeQuietly(response); - } - return new StringReader(string); - } - - private static String getErrorMsg(HttpURLConnection connection) { - String errorMsg = null; - InputStream errorStream = null; - try { - errorStream = connection.getErrorStream(); - if (errorStream != null) { - errorMsg =IOUtils.toString(errorStream); - } - errorMsg = StringUtils.trimToNull(errorMsg); - } - catch (IOException e) { - // ignore errors, while trying to retrieve the error message - } - finally { - IOUtils.closeQuietly(errorStream); - } - return errorMsg; - } - - static TraversingEcoMapper createTraversingEcoMapper(Reader mappingsReader, MinervaOWLGraphWrapper eco, OWLReasoner reasoner, boolean disposeReasoner) throws IOException, OWLException { - EcoMappings mappings = loadEcoMappings(mappingsReader, eco); - return new TraversingEcoMapperImpl(mappings, reasoner, disposeReasoner); - } - - private static EcoMappings loadEcoMappings(Reader mappingsReader, MinervaOWLGraphWrapper eco) throws IOException { - EcoMappings mappings = new EcoMappings(); - List lines = IOUtils.readLines(mappingsReader); - for (String line : lines) { - line = StringUtils.trimToNull(line); - if (line != null) { - char c = line.charAt(0); - if ('#' != c) { - String[] split = StringUtils.split(line, '\t'); - if (split.length == 3) { - String code = split[0]; - String ref = split[1]; - String ecoId = split[2]; - OWLClass cls = eco.getOWLClassByIdentifier(ecoId); - if (cls != null) { - mappings.add(code, ref, cls); - } - } - } - } - } - return mappings; - } - - private static SimpleEcoMapper createSimpleMapper(Reader mappingsReader) throws IOException { - EcoMappings mappings = loadEcoMappings(mappingsReader); - return new SimpleEcoMapperImpl(mappings); - } - - private static EcoMappings loadEcoMappings(Reader mappingsReader) throws IOException { - EcoMappings mappings = new EcoMappings(); - List lines = IOUtils.readLines(mappingsReader); - for (String line : lines) { - line = StringUtils.trimToNull(line); - if (line != null) { - char c = line.charAt(0); - if ('#' != c) { - String[] split = StringUtils.split(line, '\t'); - if (split.length == 3) { - String code = split[0]; - String ref = split[1]; - String ecoId = split[2]; - mappings.add(code, ref, ecoId); - } - } - } - } - return mappings; - } - - /** - * Helper to access the mapping for ECO codes. ECO codes should always have - * a 'Default' mapping. Optionally, they have additional mappings for - * specific annotation references. - * - * @param - */ - static class EcoMappings { - - static final String DEFAULT_REF = "Default"; - - private final Map> allMappings = new HashMap>(); - - void add(String code, String ref, T cls) { - Map codeMap = allMappings.get(code); - if (codeMap == null) { - codeMap = new HashMap(); - allMappings.put(code, codeMap); - } - if (ref == null) { - ref = DEFAULT_REF; - } - codeMap.put(ref, cls); - } - - T get(String code, String ref) { - T result = null; - if (code != null) { - Map codeMap = allMappings.get(code); - if (codeMap != null) { - if (ref == null) { - ref = DEFAULT_REF; - } - result = codeMap.get(ref); - } - } - return result; - } - - T get(String code) { - return get(code, DEFAULT_REF); - } - - Set getAll(String code) { - Set result = new HashSet(); - if (code != null) { - Map codeMap = allMappings.get(code); - if (codeMap != null) { - result.addAll(codeMap.values()); - } - } - return result; - } - - boolean hasCode(String code) { - return allMappings.containsKey(code); - } - - Map> getReverseMap() { - Map> reverseMap = new HashMap>(); - for(Entry> e : allMappings.entrySet()) { - Map codeMap = e.getValue(); - for(Entry codeEntry : codeMap.entrySet()) { - T eco = codeEntry.getValue(); - String ref = codeEntry.getKey(); - if (DEFAULT_REF.equals(ref)) { - ref = null; - } - reverseMap.put(eco, Pair.of(e.getKey(), ref)); - } - } - return reverseMap; - } - } + + private static final OWLReasonerFactory reasonerFactor = new ElkReasonerFactory(); + + private EcoMapperFactory() { + // private constructor, no instances allowed + } + + public static class OntologyMapperPair { + + private final MinervaOWLGraphWrapper graph; + private final MAPPER mapper; + + /** + * @param graph + * @param mapper + */ + OntologyMapperPair(MinervaOWLGraphWrapper graph, MAPPER mapper) { + this.graph = graph; + this.mapper = mapper; + } + + /** + * @return the graph + */ + public MinervaOWLGraphWrapper getGraph() { + return graph; + } + + /** + * @return the mapper + */ + public MAPPER getMapper() { + return mapper; + } + } + + /** + * Create a new {@link SimpleEcoMapper} with from the mapping loaded from + * the PURL. + * + * @return mapper + * @throws IOException + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static SimpleEcoMapper createSimple() throws IOException { + return createSimple(EcoMapper.ECO_MAPPING_PURL); + } + + /** + * Create a new {@link SimpleEcoMapper} with from the mapping loaded from + * the given source. + * + * @param source + * @return mapper + * @throws IOException + */ + public static SimpleEcoMapper createSimple(String source) throws IOException { + return createSimpleMapper(createReader(source)); + } + + /** + * Create an instance of a {@link EcoMapper}. Uses a separate parser. Load + * the ECO and mappings using their PURLs. + * + * @return mapper pair + * @throws OWLException + * @throws IOException + * @see EcoMapper#ECO_PURL + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static OntologyMapperPair createEcoMapper() throws OWLException, IOException { + return createEcoMapper(new ParserWrapper()); + } + + /** + * Create an instance of a {@link EcoMapper}. Uses a the manager to load ECO via the + * PURL. Load mappings using the PURL. + * + * @param m + * @return mapper pair + * @throws OWLException + * @throws IOException + * @see EcoMapper#ECO_PURL + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static OntologyMapperPair createEcoMapper(OWLOntologyManager m) throws OWLException, IOException { + ParserWrapper p = new ParserWrapper(); + p.setManager(m); + return createEcoMapper(p); + } + + /** + * Create an instance of a {@link EcoMapper}. Uses the given + * {@link ParserWrapper} to load the ontology. Retrieves ECO and the + * mappings using their PURLs. + * + * @param p + * @return mapper pair + * @throws OWLException + * @throws IOException + * @see EcoMapper#ECO_PURL + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static OntologyMapperPair createEcoMapper(ParserWrapper p) throws OWLException, IOException { + return createEcoMapper(p, EcoMapper.ECO_PURL); + } + + /** + * Create an instance of a {@link EcoMapper}. Uses the given + * {@link ParserWrapper} to load the ontology. Retrieves ECO from the given location and the + * mapping from the PURL. + * + * @param p + * @param location + * @return mapper pair + * @throws OWLException + * @throws IOException + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static OntologyMapperPair createEcoMapper(ParserWrapper p, String location) throws OWLException, IOException { + final OWLOntology eco = p.parseOWL(location); + final MinervaOWLGraphWrapper graph = new MinervaOWLGraphWrapper(eco); + final EcoMapper mapper = createEcoMapper(graph); + final OntologyMapperPair pair = new OntologyMapperPair(graph, mapper); + return pair; + } + + /** + * Create an instance of a {@link EcoMapper}. Retrieves the mappings using + * the PURL. + * + * @param graph graph containing ECO + * @return mapper + * @throws IOException + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static EcoMapper createEcoMapper(MinervaOWLGraphWrapper graph) throws IOException { + Reader reader = null; + try { + reader = createReader(EcoMapper.ECO_MAPPING_PURL); + EcoMappings mappings = loadEcoMappings(reader, graph); + return createEcoMapper(mappings); + } finally { + IOUtils.closeQuietly(reader); + } + } + + static EcoMapper createEcoMapper(EcoMappings mappings) { + return new EcoMapperImpl(mappings); + } + + /** + * Create a {@link TraversingEcoMapper} instance using a new + * {@link ParserWrapper} to load ECO. ECO and the mappings are retrieved + * using their PURLs. + *

+ * Creates an ELK reasoner to be used in the traversal methods. Use + * {@link TraversingEcoMapper#dispose()} to ensure proper cleanup of the ELK + * worker thread pool. + * + * @return mapper pair + * @throws OWLException + * @throws IOException + * @see EcoMapper#ECO_PURL + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static OntologyMapperPair createTraversingEcoMapper() throws OWLException, IOException { + return createTraversingEcoMapper(new ParserWrapper()); + } + + /** + * Create a {@link TraversingEcoMapper} instance using the given + * {@link ParserWrapper} to load ECO. ECO and the mappings are retrieved + * using their PURLs. + *

+ * Creates an ELK reasoner to be used in the traversal methods. Use + * {@link TraversingEcoMapper#dispose()} to ensure proper cleanup of the ELK + * worker thread pool. + * + * @param p + * @return mapper + * @throws OWLException + * @throws IOException + * @see EcoMapper#ECO_PURL + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static OntologyMapperPair createTraversingEcoMapper(ParserWrapper p) throws OWLException, IOException { + return createTraversingEcoMapper(p, EcoMapper.ECO_PURL); + } + + /** + * Create a {@link TraversingEcoMapper} instance using the given + * {@link ParserWrapper} to load ECO from the given location. The mappings + * are retrieved using the PURL. + *

+ * Creates an ELK reasoner to be used in the traversal methods. Use + * {@link TraversingEcoMapper#dispose()} to ensure proper cleanup of the ELK + * worker thread pool. + * + * @param p + * @param location + * @return mapper + * @throws OWLException + * @throws IOException + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static OntologyMapperPair createTraversingEcoMapper(ParserWrapper p, String location) throws OWLException, IOException { + OWLOntology eco = p.parseOWL(EcoMapper.ECO_PURL_IRI); + OWLReasoner reasoner = reasonerFactor.createReasoner(eco); + Reader reader = null; + try { + MinervaOWLGraphWrapper ecoGraph = new MinervaOWLGraphWrapper(eco); + reader = createReader(EcoMapper.ECO_MAPPING_PURL); + final TraversingEcoMapper mapper = createTraversingEcoMapper(reader, ecoGraph, reasoner, true); + return new OntologyMapperPair(ecoGraph, mapper); + } finally { + IOUtils.closeQuietly(reader); + } + } + + /** + * Create a {@link TraversingEcoMapper} instance using the given + * {@link MinervaOWLGraphWrapper}. It is assumed that ECO can be retrieved from the + * graph using its default IRI. The mappings are retrieved using the PURL. + *

+ * Uses the given reasoner in the traversal methods. If disposeReasoner is + * set to true, dispose also the reasoner, while calling + * {@link TraversingEcoMapper#dispose()}. + * + * @param all graph containing all ontologies, including ECO + * @param reasoner reasoner capable of traversing ECO + * @param disposeReasoner set to true if the reasoner should be disposed, when calling + * {@link TraversingEcoMapper#dispose()} + * @return mapper + * @throws IOException + * @throws OWLException + * @throws IllegalArgumentException throw when the reasoner is null, or the + * {@link MinervaOWLGraphWrapper} does not contain ECO. + * @see EcoMapper#ECO_PURL_IRI + * @see EcoMapper#ECO_MAPPING_PURL + */ + public static TraversingEcoMapper createTraversingEcoMapper(MinervaOWLGraphWrapper all, OWLReasoner reasoner, boolean disposeReasoner) throws IOException, OWLException { + + // This has bitten me, so let's try and be specific... + if (reasoner == null) { + throw new IllegalArgumentException("No reasoner was specified for use with the EcoTools. Add a reasoner for the command line"); + } + + OWLOntology eco = null; + + // assume the graph wrapper is more than eco + // try to find ECO by its purl + Set allOntologies = all.getAllOntologies(); + for (OWLOntology owlOntology : allOntologies) { + OWLOntologyID id = owlOntology.getOntologyID(); + Optional ontologyIRI = id.getOntologyIRI(); + if (ontologyIRI.isPresent()) { + if (EcoMapper.ECO_PURL_IRI.equals(ontologyIRI.get())) { + eco = owlOntology; + } + } + } + if (eco == null) { + throw new IllegalArgumentException("The specified graph did not contain ECO with the IRI: " + EcoMapper.ECO_PURL_IRI); + } + + MinervaOWLGraphWrapper ecoGraph = new MinervaOWLGraphWrapper(eco); + Reader reader = null; + try { + reader = createReader(EcoMapper.ECO_MAPPING_PURL); + EcoMappings mappings = loadEcoMappings(reader, ecoGraph); + return new TraversingEcoMapperImpl(mappings, reasoner, disposeReasoner); + } finally { + IOUtils.closeQuietly(reader); + } + } + + static Reader createReader(String src) throws IOException { + if (src.indexOf(':') > 0) { + // assume its an url + URL url = new URL(src); + return loadUrl(url); + } + + // treat as file + File file = new File(src); + return new FileReader(file); + } + + private static Reader loadUrl(URL url) throws IOException { + final HttpURLConnection connection; + InputStream response = null; + // setup and open (actual connection) + try { + connection = (HttpURLConnection) url.openConnection(); + connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https + response = connection.getInputStream(); // opens the connection to the server + } catch (IOException e) { + IOUtils.closeQuietly(response); + throw e; + } + // check status code + final int status; + try { + status = connection.getResponseCode(); + } catch (IOException e) { + IOUtils.closeQuietly(response); + throw e; + } + if (HttpURLConnection.HTTP_MOVED_PERM == status || HttpURLConnection.HTTP_MOVED_TEMP == status) { + String location; + try { + location = connection.getHeaderField("Location"); + } finally { + IOUtils.closeQuietly(response); + } + if (location == null) { + throw new IOException("Could not follow redirect, missing header/no value for header 'Location'"); + } + URL next = new URL(url, location); // Deal with relative URLs + + return loadUrl(next); + } + // handle unexpected status code + if (status != 200) { + // try to check error stream + String errorMsg = getErrorMsg(connection); + + // construct message for exception + StringBuilder sb = new StringBuilder("Unexpected HTTP status code: " + status); + + if (errorMsg != null) { + sb.append(" Details: "); + sb.append(errorMsg); + } + throw new IOException(sb.toString()); + } + + // try to detect charset + String contentType = connection.getHeaderField("Content-Type"); + String charset = null; + + if (contentType != null) { + for (String param : contentType.replace(" ", "").split(";")) { + if (param.startsWith("charset=")) { + charset = param.split("=", 2)[1]; + break; + } + } + } + + // get string response from stream + String string; + try { + if (charset != null) { + string = IOUtils.toString(response, charset); + } else { + string = IOUtils.toString(response); + } + } catch (IOException e) { + throw e; + } finally { + IOUtils.closeQuietly(response); + } + return new StringReader(string); + } + + private static String getErrorMsg(HttpURLConnection connection) { + String errorMsg = null; + InputStream errorStream = null; + try { + errorStream = connection.getErrorStream(); + if (errorStream != null) { + errorMsg = IOUtils.toString(errorStream); + } + errorMsg = StringUtils.trimToNull(errorMsg); + } catch (IOException e) { + // ignore errors, while trying to retrieve the error message + } finally { + IOUtils.closeQuietly(errorStream); + } + return errorMsg; + } + + static TraversingEcoMapper createTraversingEcoMapper(Reader mappingsReader, MinervaOWLGraphWrapper eco, OWLReasoner reasoner, boolean disposeReasoner) throws IOException, OWLException { + EcoMappings mappings = loadEcoMappings(mappingsReader, eco); + return new TraversingEcoMapperImpl(mappings, reasoner, disposeReasoner); + } + + private static EcoMappings loadEcoMappings(Reader mappingsReader, MinervaOWLGraphWrapper eco) throws IOException { + EcoMappings mappings = new EcoMappings(); + List lines = IOUtils.readLines(mappingsReader); + for (String line : lines) { + line = StringUtils.trimToNull(line); + if (line != null) { + char c = line.charAt(0); + if ('#' != c) { + String[] split = StringUtils.split(line, '\t'); + if (split.length == 3) { + String code = split[0]; + String ref = split[1]; + String ecoId = split[2]; + OWLClass cls = eco.getOWLClassByIdentifier(ecoId); + if (cls != null) { + mappings.add(code, ref, cls); + } + } + } + } + } + return mappings; + } + + private static SimpleEcoMapper createSimpleMapper(Reader mappingsReader) throws IOException { + EcoMappings mappings = loadEcoMappings(mappingsReader); + return new SimpleEcoMapperImpl(mappings); + } + + private static EcoMappings loadEcoMappings(Reader mappingsReader) throws IOException { + EcoMappings mappings = new EcoMappings(); + List lines = IOUtils.readLines(mappingsReader); + for (String line : lines) { + line = StringUtils.trimToNull(line); + if (line != null) { + char c = line.charAt(0); + if ('#' != c) { + String[] split = StringUtils.split(line, '\t'); + if (split.length == 3) { + String code = split[0]; + String ref = split[1]; + String ecoId = split[2]; + mappings.add(code, ref, ecoId); + } + } + } + } + return mappings; + } + + /** + * Helper to access the mapping for ECO codes. ECO codes should always have + * a 'Default' mapping. Optionally, they have additional mappings for + * specific annotation references. + * + * @param + */ + static class EcoMappings { + + static final String DEFAULT_REF = "Default"; + + private final Map> allMappings = new HashMap>(); + + void add(String code, String ref, T cls) { + Map codeMap = allMappings.get(code); + if (codeMap == null) { + codeMap = new HashMap(); + allMappings.put(code, codeMap); + } + if (ref == null) { + ref = DEFAULT_REF; + } + codeMap.put(ref, cls); + } + + T get(String code, String ref) { + T result = null; + if (code != null) { + Map codeMap = allMappings.get(code); + if (codeMap != null) { + if (ref == null) { + ref = DEFAULT_REF; + } + result = codeMap.get(ref); + } + } + return result; + } + + T get(String code) { + return get(code, DEFAULT_REF); + } + + Set getAll(String code) { + Set result = new HashSet(); + if (code != null) { + Map codeMap = allMappings.get(code); + if (codeMap != null) { + result.addAll(codeMap.values()); + } + } + return result; + } + + boolean hasCode(String code) { + return allMappings.containsKey(code); + } + + Map> getReverseMap() { + Map> reverseMap = new HashMap>(); + for (Entry> e : allMappings.entrySet()) { + Map codeMap = e.getValue(); + for (Entry codeEntry : codeMap.entrySet()) { + T eco = codeEntry.getValue(); + String ref = codeEntry.getKey(); + if (DEFAULT_REF.equals(ref)) { + ref = null; + } + reverseMap.put(eco, Pair.of(e.getKey(), ref)); + } + } + return reverseMap; + } + } } diff --git a/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperImpl.java b/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperImpl.java index bd597737..d48f6b5d 100644 --- a/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperImpl.java +++ b/minerva-core/src/main/java/owltools/gaf/eco/EcoMapperImpl.java @@ -1,53 +1,53 @@ package owltools.gaf.eco; +import org.apache.commons.lang3.tuple.Pair; +import org.semanticweb.owlapi.model.OWLClass; + import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.lang3.tuple.Pair; -import org.semanticweb.owlapi.model.OWLClass; - public class EcoMapperImpl implements EcoMapper { - - private final EcoMapperFactory.EcoMappings mappings; - - EcoMapperImpl(EcoMapperFactory.EcoMappings mappings) { - this.mappings = mappings; - } - - @Override - public OWLClass getEcoClassForCode(String code) { - return mappings.get(code); - } - - @Override - public Set getAllEcoClassesForCode(String code) { - return mappings.getAll(code); - } - - @Override - public OWLClass getEcoClassForCode(String code, String refCode) { - return mappings.get(code, refCode); - } - - @Override - public boolean isGoEvidenceCode(String code) { - return mappings.hasCode(code); - } - - @Override - public Map getCodesForEcoClasses() { - Map> fullReverseMap = mappings.getReverseMap(); - Map simpleReverseMap = new HashMap(); - for(Entry> e : fullReverseMap.entrySet()) { - String ref = e.getValue().getRight(); - if (ref == null) { - simpleReverseMap.put(e.getKey(), e.getValue().getLeft()); - } - - } - return simpleReverseMap; - } + + private final EcoMapperFactory.EcoMappings mappings; + + EcoMapperImpl(EcoMapperFactory.EcoMappings mappings) { + this.mappings = mappings; + } + + @Override + public OWLClass getEcoClassForCode(String code) { + return mappings.get(code); + } + + @Override + public Set getAllEcoClassesForCode(String code) { + return mappings.getAll(code); + } + + @Override + public OWLClass getEcoClassForCode(String code, String refCode) { + return mappings.get(code, refCode); + } + + @Override + public boolean isGoEvidenceCode(String code) { + return mappings.hasCode(code); + } + + @Override + public Map getCodesForEcoClasses() { + Map> fullReverseMap = mappings.getReverseMap(); + Map simpleReverseMap = new HashMap(); + for (Entry> e : fullReverseMap.entrySet()) { + String ref = e.getValue().getRight(); + if (ref == null) { + simpleReverseMap.put(e.getKey(), e.getValue().getLeft()); + } + + } + return simpleReverseMap; + } } diff --git a/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapper.java b/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapper.java index 61b82bcf..fd4368e9 100644 --- a/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapper.java +++ b/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapper.java @@ -1,14 +1,14 @@ package owltools.gaf.eco; -import java.util.Collection; - import org.apache.commons.lang3.tuple.Pair; +import java.util.Collection; + public interface SimpleEcoMapper { - public String getEco(String goCode, String ref); - - public String getEco(String goCode, Collection allRefs); - - public Pair getGoCode(String eco); + public String getEco(String goCode, String ref); + + public String getEco(String goCode, Collection allRefs); + + public Pair getGoCode(String eco); } diff --git a/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapperImpl.java b/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapperImpl.java index 939c2c09..d56f5c25 100644 --- a/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapperImpl.java +++ b/minerva-core/src/main/java/owltools/gaf/eco/SimpleEcoMapperImpl.java @@ -1,42 +1,41 @@ package owltools.gaf.eco; -import java.util.Collection; -import java.util.Map; - import org.apache.commons.lang3.tuple.Pair; - import owltools.gaf.eco.EcoMapperFactory.EcoMappings; +import java.util.Collection; +import java.util.Map; + public class SimpleEcoMapperImpl implements SimpleEcoMapper { - - private final EcoMapperFactory.EcoMappings mappings; - private final Map> reverseMap; - - SimpleEcoMapperImpl(EcoMappings mappings) { - this.mappings = mappings; - reverseMap = mappings.getReverseMap(); - } - - @Override - public String getEco(String goCode, String ref) { - return mappings.get(goCode, ref); - } - - @Override - public String getEco(String goCode, Collection allRefs) { - String eco = null; - for (String ref : allRefs) { - eco = mappings.get(goCode, ref); - if (eco != null) { - break; - } - } - return eco; - } - - @Override - public Pair getGoCode(String eco) { - return reverseMap.get(eco); - } + + private final EcoMapperFactory.EcoMappings mappings; + private final Map> reverseMap; + + SimpleEcoMapperImpl(EcoMappings mappings) { + this.mappings = mappings; + reverseMap = mappings.getReverseMap(); + } + + @Override + public String getEco(String goCode, String ref) { + return mappings.get(goCode, ref); + } + + @Override + public String getEco(String goCode, Collection allRefs) { + String eco = null; + for (String ref : allRefs) { + eco = mappings.get(goCode, ref); + if (eco != null) { + break; + } + } + return eco; + } + + @Override + public Pair getGoCode(String eco) { + return reverseMap.get(eco); + } } diff --git a/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapper.java b/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapper.java index f8f632fe..c0150b4f 100644 --- a/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapper.java +++ b/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapper.java @@ -1,70 +1,70 @@ package owltools.gaf.eco; -import java.util.Set; - import org.semanticweb.owlapi.model.OWLClass; +import java.util.Set; + public interface TraversingEcoMapper extends EcoMapper { - /** - * Traversing method for the ECO ontology. - * - * @param sources - * @param reflexive - * @return set of super classes - */ - public Set getAncestors(Set sources, boolean reflexive); - - /** - * Traversing method for the ECO ontology. - * - * @param source - * @param reflexive - * @return set of super classes - */ - public Set getAncestors(OWLClass source, boolean reflexive); - - /** - * Traversing method for the ECO ontology. - * - * @param sources - * @param reflexive - * @return set of sub classes - */ - public Set getDescendents(Set sources, boolean reflexive); - - /** - * Traversing method for the ECO ontology. - * - * @param source - * @param reflexive - * @return set of sub classes - */ - public Set getDescendents(OWLClass source, boolean reflexive); - - - /** - * Get all strings which are valid identifiers for a given evidence code. - * This includes, the the codes itself and valid OBO-style identifier from ECO. - * - * @param code - * @param includeChildren - * @return set of ids - */ - public Set getAllValidEvidenceIds(String code, boolean includeChildren); - - /** - * Get all strings which are valid identifiers for the given evidence codes. - * This includes, the the codes itself and valid OBO-style identifier from ECO. - * - * @param codes - * @param includeChildren - * @return set of ids - */ - public Set getAllValidEvidenceIds(Set codes, boolean includeChildren); - - /** - * Dispose this instance - */ - public void dispose(); + /** + * Traversing method for the ECO ontology. + * + * @param sources + * @param reflexive + * @return set of super classes + */ + public Set getAncestors(Set sources, boolean reflexive); + + /** + * Traversing method for the ECO ontology. + * + * @param source + * @param reflexive + * @return set of super classes + */ + public Set getAncestors(OWLClass source, boolean reflexive); + + /** + * Traversing method for the ECO ontology. + * + * @param sources + * @param reflexive + * @return set of sub classes + */ + public Set getDescendents(Set sources, boolean reflexive); + + /** + * Traversing method for the ECO ontology. + * + * @param source + * @param reflexive + * @return set of sub classes + */ + public Set getDescendents(OWLClass source, boolean reflexive); + + + /** + * Get all strings which are valid identifiers for a given evidence code. + * This includes, the the codes itself and valid OBO-style identifier from ECO. + * + * @param code + * @param includeChildren + * @return set of ids + */ + public Set getAllValidEvidenceIds(String code, boolean includeChildren); + + /** + * Get all strings which are valid identifiers for the given evidence codes. + * This includes, the the codes itself and valid OBO-style identifier from ECO. + * + * @param codes + * @param includeChildren + * @return set of ids + */ + public Set getAllValidEvidenceIds(Set codes, boolean includeChildren); + + /** + * Dispose this instance + */ + public void dispose(); } diff --git a/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapperImpl.java b/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapperImpl.java index 4b0064f3..2a635ba4 100644 --- a/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapperImpl.java +++ b/minerva-core/src/main/java/owltools/gaf/eco/TraversingEcoMapperImpl.java @@ -1,125 +1,119 @@ package owltools.gaf.eco; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -//import org.obolibrary.obo2owl.Owl2Obo; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.reasoner.OWLReasoner; - import owltools.gaf.eco.EcoMapperFactory.EcoMappings; import owltools.util.OwlHelper; +import java.util.*; + public class TraversingEcoMapperImpl extends EcoMapperImpl implements TraversingEcoMapper { - private final OWLReasoner reasoner; - private final boolean disposeReasoner; - - private final Map> mappingCache = new HashMap>(); - - TraversingEcoMapperImpl(EcoMappings mappings, OWLReasoner reasoner, boolean disposeReasoner) { - super(mappings); - this.reasoner = reasoner; - this.disposeReasoner = disposeReasoner; - } + private final OWLReasoner reasoner; + private final boolean disposeReasoner; + + private final Map> mappingCache = new HashMap>(); + + TraversingEcoMapperImpl(EcoMappings mappings, OWLReasoner reasoner, boolean disposeReasoner) { + super(mappings); + this.reasoner = reasoner; + this.disposeReasoner = disposeReasoner; + } + + @Override + public Set getAncestors(Set sources, boolean reflexive) { + if (sources == null || sources.isEmpty()) { + return Collections.emptySet(); + } + Set result = new HashSet(); + for (OWLClass source : sources) { + Set set = reasoner.getSuperClasses(source, false).getFlattened(); + for (OWLClass cls : set) { + if (cls.isBuiltIn() == false) { + result.add(cls); + } + } + } + if (reflexive) { + result.addAll(sources); + } + if (result.isEmpty()) { + return Collections.emptySet(); + } + return result; + } + + @Override + public Set getAncestors(OWLClass source, boolean reflexive) { + return getAncestors(Collections.singleton(source), reflexive); + } - @Override - public Set getAncestors(Set sources, boolean reflexive) { - if (sources == null || sources.isEmpty()) { - return Collections.emptySet(); - } - Set result = new HashSet(); - for (OWLClass source : sources) { - Set set = reasoner.getSuperClasses(source, false).getFlattened(); - for (OWLClass cls : set) { - if (cls.isBuiltIn() == false) { - result.add(cls); - } - } - } - if (reflexive) { - result.addAll(sources); - } - if (result.isEmpty()) { - return Collections.emptySet(); - } - return result; - } + @Override + public Set getDescendents(Set sources, boolean reflexive) { + if (sources == null || sources.isEmpty()) { + return Collections.emptySet(); + } + Set result = new HashSet(); + for (OWLClass source : sources) { + Set set = reasoner.getSubClasses(source, false).getFlattened(); + for (OWLClass cls : set) { + if (cls.isBuiltIn() == false) { + result.add(cls); + } + } + } + if (reflexive) { + result.addAll(sources); + } + if (result.isEmpty()) { + return Collections.emptySet(); + } + return result; + } - @Override - public Set getAncestors(OWLClass source, boolean reflexive) { - return getAncestors(Collections.singleton(source), reflexive); - } + @Override + public Set getDescendents(OWLClass source, boolean reflexive) { + return getDescendents(Collections.singleton(source), reflexive); + } - @Override - public Set getDescendents(Set sources, boolean reflexive) { - if (sources == null || sources.isEmpty()) { - return Collections.emptySet(); - } - Set result = new HashSet(); - for (OWLClass source : sources) { - Set set = reasoner.getSubClasses(source, false).getFlattened(); - for (OWLClass cls : set) { - if (cls.isBuiltIn() == false) { - result.add(cls); - } - } - } - if (reflexive) { - result.addAll(sources); - } - if (result.isEmpty()) { - return Collections.emptySet(); - } - return result; - } + @Override + public Set getAllValidEvidenceIds(String code, boolean includeChildren) { + return getAllValidEvidenceIds(Collections.singleton(code), includeChildren); + } - @Override - public Set getDescendents(OWLClass source, boolean reflexive) { - return getDescendents(Collections.singleton(source), reflexive); - } + @Override + public Set getAllValidEvidenceIds(Set codes, boolean includeChildren) { + if (codes == null || codes.isEmpty()) { + return Collections.emptySet(); + } + Set result = new HashSet(); + for (String code : codes) { + Set classes = getAllEcoClassesForCode(code); + for (OWLClass owlClass : classes) { + result.add(getId(owlClass)); + } + if (includeChildren) { + Set descendents = getDescendents(classes, false); + for (OWLClass owlClass : descendents) { + result.add(getId(owlClass)); + } + } + } + result.addAll(codes); + return result; + } - @Override - public Set getAllValidEvidenceIds(String code, boolean includeChildren) { - return getAllValidEvidenceIds(Collections.singleton(code), includeChildren); - } + private String getId(OWLClass cls) { + //return Owl2Obo.getIdentifier(cls.getIRI()); + return OwlHelper.getIdentifier(cls.getIRI(), null); + } - @Override - public Set getAllValidEvidenceIds(Set codes, boolean includeChildren) { - if (codes == null || codes.isEmpty()) { - return Collections.emptySet(); - } - Set result = new HashSet(); - for(String code : codes) { - Set classes = getAllEcoClassesForCode(code); - for (OWLClass owlClass : classes) { - result.add(getId(owlClass)); - } - if (includeChildren) { - Set descendents = getDescendents(classes, false); - for (OWLClass owlClass : descendents) { - result.add(getId(owlClass)); - } - } - } - result.addAll(codes); - return result; - } - - private String getId(OWLClass cls) { - //return Owl2Obo.getIdentifier(cls.getIRI()); - return OwlHelper.getIdentifier(cls.getIRI(), null); - } + @Override + public void dispose() { + mappingCache.clear(); + if (disposeReasoner) { + reasoner.dispose(); + } + } - @Override - public void dispose() { - mappingCache.clear(); - if (disposeReasoner) { - reasoner.dispose(); - } - } - } diff --git a/minerva-core/src/main/java/owltools/io/CatalogXmlIRIMapper.java b/minerva-core/src/main/java/owltools/io/CatalogXmlIRIMapper.java index 4aa45e21..c742b4b3 100644 --- a/minerva-core/src/main/java/owltools/io/CatalogXmlIRIMapper.java +++ b/minerva-core/src/main/java/owltools/io/CatalogXmlIRIMapper.java @@ -1,5 +1,15 @@ package owltools.io; +import org.apache.log4j.Logger; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; +import org.xml.sax.Attributes; +import org.xml.sax.SAXException; +import org.xml.sax.helpers.DefaultHandler; + +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.parsers.SAXParser; +import javax.xml.parsers.SAXParserFactory; import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -10,178 +20,164 @@ import java.util.HashMap; import java.util.Map; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.parsers.SAXParser; -import javax.xml.parsers.SAXParserFactory; - -import org.apache.log4j.Logger; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; -import org.xml.sax.Attributes; -import org.xml.sax.SAXException; -import org.xml.sax.helpers.DefaultHandler; - /** * {@link OWLOntologyIRIMapper} using the mappings from a catalog.xml file. */ public class CatalogXmlIRIMapper implements OWLOntologyIRIMapper { - private static final Logger logger = Logger.getLogger(CatalogXmlIRIMapper.class); - - private final Map mappings; - - CatalogXmlIRIMapper(Map mappings) { - this.mappings = mappings; - } - - /** - * Create an CatalogXmlIRIMapper from the given catalog.xml file. - * Assume, that relative paths are relative to the catalog file location. - * - * @param catalogFile - * @throws IOException - */ - public CatalogXmlIRIMapper(String catalogFile) throws IOException { - this(new File(catalogFile).getAbsoluteFile()); - } - - /** - * Create an CatalogXmlIRIMapper from the given catalog.xml file. - * Assume, that relative paths are relative to the catalog file location. - * - * @param catalogFile - * @throws IOException - */ - public CatalogXmlIRIMapper(File catalogFile) throws IOException { - this(catalogFile, catalogFile.getAbsoluteFile().getParentFile()); - } - - /** - * Create an CatalogXmlIRIMapper from the given catalog.xml file. - * Use the parentFolder to resolve relative paths from the catalog file. - * - * @param catalogFile - * @param parentFolder - * @throws IOException - */ - public CatalogXmlIRIMapper(File catalogFile, File parentFolder) throws IOException { - this(parseCatalogXml(new FileInputStream(catalogFile), parentFolder)); - } - - /** - * Create an CatalogXmlIRIMapper from the given catalog URL. - * Assume, there are no relative paths in the catalog file. - * - * @param catalogURL - * @throws IOException - */ - public CatalogXmlIRIMapper(URL catalogURL) throws IOException { - if ("file".equals(catalogURL.getProtocol())) { - try { - File catalogFile = new File(catalogURL.toURI()); - mappings = parseCatalogXml(new FileInputStream(catalogFile), catalogFile.getParentFile()); - } catch (URISyntaxException e) { - throw new IOException(e); - } - } - else { - mappings = parseCatalogXml(catalogURL.openStream(), null); - } - } - - /** - * Create an CatalogXmlIRIMapper from the given catalog URL. - * Use the parentFolder to resolve relative paths from the catalog file. - * - * @param catalogURL - * @param parentFolder - * @throws IOException - */ - public CatalogXmlIRIMapper(URL catalogURL, File parentFolder) throws IOException { - this(parseCatalogXml(catalogURL.openStream(), parentFolder)); - } - - @Override - public IRI getDocumentIRI(IRI ontologyIRI) { - return mappings.get(ontologyIRI); - } - - public Map getMappings() { - return Collections.unmodifiableMap(mappings); - } - - /** - * Parse the inputStream as a catalog.xml file and extract IRI mappings. - * - * Optional: Resolve relative file paths with the given parent folder. - * - * @param inputStream input stream (never null) - * @param parentFolder folder or null - * @return mappings - * @throws IOException - * @throws IllegalArgumentException if input stream is null - */ - static Map parseCatalogXml(InputStream inputStream, final File parentFolder) throws IOException { - if (inputStream == null) { - throw new IllegalArgumentException("InputStream should never be null, missing resource?"); - } - - // use the Java built-in SAX parser - SAXParserFactory factory = SAXParserFactory.newInstance(); - factory.setValidating(false); - - try { - final Map mappings = new HashMap(); - SAXParser saxParser = factory.newSAXParser(); - saxParser.parse(inputStream, new DefaultHandler(){ - - @Override - public void startElement(String uri, String localName, - String qName, Attributes attributes) - throws SAXException - { - // only look at 'uri' tags - // does not check any parent tags - if ("uri".equals(qName)) { - IRI original = null; - IRI mapped = null; - String nameString = attributes.getValue("name"); - if (nameString != null) { - original = IRI.create(nameString); - } - String mappedString = attributes.getValue("uri"); - if (mappedString != null) { - if (parentFolder != null && mappedString.indexOf(":") < 0) { - // there is a parent folder and the mapping is not an IRI or URL - File file = new File(mappedString); - if (!file.isAbsolute()) { - file = new File(parentFolder, mappedString); - } - try { - file = file.getCanonicalFile(); - mapped = IRI.create(file); - } catch (IOException e) { - logger.warn("Skipping mapping: "+nameString+" "+mappedString, e); - } - } - else { - mapped = IRI.create(mappedString); - } - } - - if (original != null && mapped != null) { - mappings.put(original, mapped); - } - } - } - }); - return mappings; - } catch (ParserConfigurationException e) { - throw new IOException(e); - } catch (SAXException e) { - throw new IOException(e); - } finally { - inputStream.close(); - } - } + private static final Logger logger = Logger.getLogger(CatalogXmlIRIMapper.class); + + private final Map mappings; + + CatalogXmlIRIMapper(Map mappings) { + this.mappings = mappings; + } + + /** + * Create an CatalogXmlIRIMapper from the given catalog.xml file. + * Assume, that relative paths are relative to the catalog file location. + * + * @param catalogFile + * @throws IOException + */ + public CatalogXmlIRIMapper(String catalogFile) throws IOException { + this(new File(catalogFile).getAbsoluteFile()); + } + + /** + * Create an CatalogXmlIRIMapper from the given catalog.xml file. + * Assume, that relative paths are relative to the catalog file location. + * + * @param catalogFile + * @throws IOException + */ + public CatalogXmlIRIMapper(File catalogFile) throws IOException { + this(catalogFile, catalogFile.getAbsoluteFile().getParentFile()); + } + + /** + * Create an CatalogXmlIRIMapper from the given catalog.xml file. + * Use the parentFolder to resolve relative paths from the catalog file. + * + * @param catalogFile + * @param parentFolder + * @throws IOException + */ + public CatalogXmlIRIMapper(File catalogFile, File parentFolder) throws IOException { + this(parseCatalogXml(new FileInputStream(catalogFile), parentFolder)); + } + + /** + * Create an CatalogXmlIRIMapper from the given catalog URL. + * Assume, there are no relative paths in the catalog file. + * + * @param catalogURL + * @throws IOException + */ + public CatalogXmlIRIMapper(URL catalogURL) throws IOException { + if ("file".equals(catalogURL.getProtocol())) { + try { + File catalogFile = new File(catalogURL.toURI()); + mappings = parseCatalogXml(new FileInputStream(catalogFile), catalogFile.getParentFile()); + } catch (URISyntaxException e) { + throw new IOException(e); + } + } else { + mappings = parseCatalogXml(catalogURL.openStream(), null); + } + } + + /** + * Create an CatalogXmlIRIMapper from the given catalog URL. + * Use the parentFolder to resolve relative paths from the catalog file. + * + * @param catalogURL + * @param parentFolder + * @throws IOException + */ + public CatalogXmlIRIMapper(URL catalogURL, File parentFolder) throws IOException { + this(parseCatalogXml(catalogURL.openStream(), parentFolder)); + } + + @Override + public IRI getDocumentIRI(IRI ontologyIRI) { + return mappings.get(ontologyIRI); + } + + public Map getMappings() { + return Collections.unmodifiableMap(mappings); + } + + /** + * Parse the inputStream as a catalog.xml file and extract IRI mappings. + *

+ * Optional: Resolve relative file paths with the given parent folder. + * + * @param inputStream input stream (never null) + * @param parentFolder folder or null + * @return mappings + * @throws IOException + * @throws IllegalArgumentException if input stream is null + */ + static Map parseCatalogXml(InputStream inputStream, final File parentFolder) throws IOException { + if (inputStream == null) { + throw new IllegalArgumentException("InputStream should never be null, missing resource?"); + } + + // use the Java built-in SAX parser + SAXParserFactory factory = SAXParserFactory.newInstance(); + factory.setValidating(false); + + try { + final Map mappings = new HashMap(); + SAXParser saxParser = factory.newSAXParser(); + saxParser.parse(inputStream, new DefaultHandler() { + + @Override + public void startElement(String uri, String localName, + String qName, Attributes attributes) + throws SAXException { + // only look at 'uri' tags + // does not check any parent tags + if ("uri".equals(qName)) { + IRI original = null; + IRI mapped = null; + String nameString = attributes.getValue("name"); + if (nameString != null) { + original = IRI.create(nameString); + } + String mappedString = attributes.getValue("uri"); + if (mappedString != null) { + if (parentFolder != null && mappedString.indexOf(":") < 0) { + // there is a parent folder and the mapping is not an IRI or URL + File file = new File(mappedString); + if (!file.isAbsolute()) { + file = new File(parentFolder, mappedString); + } + try { + file = file.getCanonicalFile(); + mapped = IRI.create(file); + } catch (IOException e) { + logger.warn("Skipping mapping: " + nameString + " " + mappedString, e); + } + } else { + mapped = IRI.create(mappedString); + } + } + + if (original != null && mapped != null) { + mappings.put(original, mapped); + } + } + } + }); + return mappings; + } catch (ParserConfigurationException e) { + throw new IOException(e); + } catch (SAXException e) { + throw new IOException(e); + } finally { + inputStream.close(); + } + } } diff --git a/minerva-core/src/main/java/owltools/io/ParserWrapper.java b/minerva-core/src/main/java/owltools/io/ParserWrapper.java index db248b2f..7da26860 100644 --- a/minerva-core/src/main/java/owltools/io/ParserWrapper.java +++ b/minerva-core/src/main/java/owltools/io/ParserWrapper.java @@ -20,8 +20,8 @@ /** * Convenience class wrapping org.oboformat that abstracts away underlying details of ontology format or location - * @author cjm * + * @author cjm */ public class ParserWrapper { @@ -43,14 +43,14 @@ public ParserWrapper() { public void startedLoadingOntology(LoadingStartedEvent event) { IRI id = event.getOntologyID().getOntologyIRI().orNull(); IRI source = event.getDocumentIRI(); - LOG.info("Start loading ontology: "+id+" from: "+source); + LOG.info("Start loading ontology: " + id + " from: " + source); } @Override public void finishedLoadingOntology(LoadingFinishedEvent event) { IRI id = event.getOntologyID().getOntologyIRI().orNull(); IRI source = event.getDocumentIRI(); - LOG.info("Finished loading ontology: "+id+" from: "+source); + LOG.info("Finished loading ontology: " + id + " from: " + source); } }; manager.addOntologyLoaderListener(listener); @@ -59,6 +59,7 @@ public void finishedLoadingOntology(LoadingFinishedEvent event) { public OWLOntologyManager getManager() { return manager; } + public void setManager(OWLOntologyManager manager) { this.manager = manager; } @@ -75,13 +76,16 @@ public void addIRIMapper(OWLOntologyIRIMapper mapper) { manager.getIRIMappers().add(mapper); mappers.add(0, mapper); } + public void removeIRIMapper(OWLOntologyIRIMapper mapper) { manager.getIRIMappers().remove(mapper); mappers.remove(mapper); } + public List getIRIMappers() { return Collections.unmodifiableList(mappers); } + public void addIRIMappers(List mappers) { List reverse = new ArrayList(mappers); Collections.reverse(reverse); @@ -91,7 +95,7 @@ public void addIRIMappers(List mappers) { } public MinervaOWLGraphWrapper parseToOWLGraph(String iriString) throws OWLOntologyCreationException, IOException { - return new MinervaOWLGraphWrapper(parse(iriString)); + return new MinervaOWLGraphWrapper(parse(iriString)); } public OWLOntology parse(String iriString) throws OWLOntologyCreationException, IOException { @@ -105,12 +109,11 @@ public OWLOntology parseOBO(String source) throws IOException, OWLOntologyCreati public OWLOntology parseOWL(String iriString) throws OWLOntologyCreationException { IRI iri; if (LOG.isDebugEnabled()) { - LOG.debug("parsing: "+iriString); + LOG.debug("parsing: " + iriString); } if (isIRI(iriString)) { iri = IRI.create(iriString); - } - else { + } else { iri = IRI.create(new File(iriString)); } return parseOWL(iri); @@ -122,7 +125,7 @@ private boolean isIRI(String iriString) { public OWLOntology parseOWL(IRI iri) throws OWLOntologyCreationException { if (LOG.isDebugEnabled()) { - LOG.debug("parsing: "+iri.toString()+" using "+manager); + LOG.debug("parsing: " + iri.toString() + " using " + manager); } OWLOntology ont; try { @@ -136,16 +139,16 @@ public OWLOntology parseOWL(IRI iri) throws OWLOntologyCreationException { // never return null ontology throw e; } - LOG.info("Skip already loaded ontology: "+iri); + LOG.info("Skip already loaded ontology: " + iri); } catch (OWLOntologyDocumentAlreadyExistsException e) { // Trying to recover from exception IRI duplicate = e.getOntologyDocumentIRI(); ont = manager.getOntology(duplicate); if (ont == null) { - for(OWLOntology managed : manager.getOntologies()) { + for (OWLOntology managed : manager.getOntologies()) { Optional managedIRI = managed.getOntologyID().getOntologyIRI(); - if(managedIRI.isPresent() && duplicate.equals(managedIRI.get())) { - LOG.info("Skip already loaded ontology: "+iri); + if (managedIRI.isPresent() && duplicate.equals(managedIRI.get())) { + LOG.info("Skip already loaded ontology: " + iri); ont = managed; break; } @@ -263,9 +266,9 @@ public OWLOntology parseOWL(IRI iri) throws OWLOntologyCreationException { /** * Provide names for the {@link OBOFormatWriter} using an * {@link MinervaOWLGraphWrapper}. - * + * * @see OboAndOwlNameProvider use the {@link OboAndOwlNameProvider}, the - * pure OWL lookup is problematic for relations. + * pure OWL lookup is problematic for relations. */ public static class OWLGraphWrapperNameProvider implements NameProvider { private final MinervaOWLGraphWrapper graph; @@ -294,9 +297,7 @@ public OWLGraphWrapperNameProvider(MinervaOWLGraphWrapper graph, String defaultO /** * @param graph - * @param oboDoc - * - * If an {@link OBODoc} is available use {@link OboAndOwlNameProvider}. + * @param oboDoc If an {@link OBODoc} is available use {@link OboAndOwlNameProvider}. */ @Deprecated public OWLGraphWrapperNameProvider(MinervaOWLGraphWrapper graph, OBODoc oboDoc) { diff --git a/minerva-core/src/main/java/owltools/util/OwlHelper.java b/minerva-core/src/main/java/owltools/util/OwlHelper.java index 7c90fa5f..b2e97004 100644 --- a/minerva-core/src/main/java/owltools/util/OwlHelper.java +++ b/minerva-core/src/main/java/owltools/util/OwlHelper.java @@ -1,292 +1,258 @@ package owltools.util; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLEntity; -import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLSubAnnotationPropertyOfAxiom; -import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; -import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; -import org.semanticweb.owlapi.model.OWLSubPropertyAxiom; +import org.semanticweb.owlapi.model.*; + +import java.util.*; public class OwlHelper { - private OwlHelper() { - // no instances - } - - public static Set getAnnotations(OWLEntity e, OWLAnnotationProperty property, OWLOntology ont) { - Set annotations; - if (e != null && property != null && ont != null) { - annotations = new HashSet<>(); - for (OWLAnnotationAssertionAxiom ax : ont.getAnnotationAssertionAxioms(e.getIRI())) { - if (property.equals(ax.getProperty())) { - annotations.add(ax.getAnnotation()); - } - } - } - else { - annotations = Collections.emptySet(); - } - return annotations; - } - - public static Set getAnnotations(OWLEntity e, OWLOntology ont) { - Set annotations; - if (e != null && ont != null) { - Set axioms = ont.getAnnotationAssertionAxioms(e.getIRI()); - annotations = new HashSet<>(axioms.size()); - for(OWLAnnotationAssertionAxiom ax : axioms) { - annotations.add(ax.getAnnotation()); - } - } - else { - annotations = Collections.emptySet(); - } - return annotations; - } - - public static Set getAnnotations(OWLEntity e, Set ontolgies) { - Set annotations; - if (e != null && ontolgies != null && !ontolgies.isEmpty()) { - annotations = new HashSet<>(); - for(OWLOntology ont : ontolgies) { - annotations.addAll(getAnnotations(e, ont)); - } - } - else { - annotations = Collections.emptySet(); - } - return annotations; - } - - public static Set getEquivalentClasses(OWLClass cls, OWLOntology ont) { - Set expressions; - if (cls != null && ont != null) { - Set axioms = ont.getEquivalentClassesAxioms(cls); - expressions = new HashSet<>(axioms.size()); - for(OWLEquivalentClassesAxiom ax : axioms) { - expressions.addAll(ax.getClassExpressions()); - } - expressions.remove(cls); // set should not contain the query cls - } - else { - expressions = Collections.emptySet(); - } - return expressions; - } - - public static Set getEquivalentClasses(OWLClass cls, Set ontologies) { - Set expressions; - if (cls != null && ontologies != null && ontologies.isEmpty() == false) { - expressions = new HashSet<>(); - for(OWLOntology ont : ontologies) { - expressions.addAll(getEquivalentClasses(cls, ont)); - } - } - else { - expressions = Collections.emptySet(); - } - return expressions; - } - - public static Set getSuperClasses(OWLClass subCls, OWLOntology ont) { - Set result; - if (subCls != null && ont != null) { - result = new HashSet<>(); - Set axioms = ont.getSubClassAxiomsForSubClass(subCls); - for (OWLSubClassOfAxiom axiom : axioms) { - result.add(axiom.getSuperClass()); - } - } - else { - result = Collections.emptySet(); - } - return result; - } - - public static Set getSuperClasses(OWLClass subCls, Set ontologies) { - Set result; - if (subCls != null && ontologies != null && ontologies.isEmpty() == false) { - result = new HashSet<>(); - for(OWLOntology ont : ontologies) { - result.addAll(getSuperClasses(subCls, ont)); - } - } - else { - result = Collections.emptySet(); - } - return result; - } - - public static Set getSubClasses(OWLClass superCls, OWLOntology ont) { - Set result; - if (superCls != null && ont != null) { - result = new HashSet<>(); - Set axioms = ont.getSubClassAxiomsForSuperClass(superCls); - for (OWLSubClassOfAxiom axiom : axioms) { - result.add(axiom.getSubClass()); - } - } - else { - result = Collections.emptySet(); - } - return result; - } - - public static Set getSubClasses(OWLClass superCls, Set ontologies) { - Set result; - if (superCls != null && ontologies != null && ontologies.isEmpty() == false) { - result = new HashSet<>(); - for(OWLOntology ont : ontologies) { - result.addAll(getSubClasses(superCls, ont)); - } - } - else { - result = Collections.emptySet(); - } - return result; - } - - public static Set getTypes(OWLIndividual i, OWLOntology ont) { - Set types; - if (ont != null && i != null && i.isNamed()) { - types = getTypes(i.asOWLNamedIndividual(), ont); - } - else { - types = Collections.emptySet(); - } - return types; - } - - public static Set getTypes(OWLNamedIndividual i, OWLOntology ont) { - Set types; - if (i != null && ont != null) { - types = new HashSet<>(); - for (OWLClassAssertionAxiom axiom : ont.getClassAssertionAxioms(i)) { - types.add(axiom.getClassExpression()); - } - } - else { - types = Collections.emptySet(); - } - return types; - } - - public static Set getTypes(OWLNamedIndividual i, Set ontologies) { - Set types; - if (i != null && ontologies != null && ontologies.isEmpty() == false) { - types = new HashSet<>(); - for(OWLOntology ont : ontologies) { - types.addAll(getTypes(i, ont)); - } - } - else { - types = Collections.emptySet(); - } - return types; - } - - public static Map> getObjectPropertyValues(OWLIndividual i, OWLOntology ont) { - Set axioms = ont.getObjectPropertyAssertionAxioms(i); - Map> result = new HashMap<>(); - for(OWLObjectPropertyAssertionAxiom ax : axioms) { - Set inds = result.get(ax.getProperty()); - if (inds == null) { - inds = new HashSet<>(); - result.put(ax.getProperty(), inds); - } - inds.add(ax.getObject()); - } - return result; - } - - public static boolean isTransitive(OWLObjectPropertyExpression property, OWLOntology ontology) { - return !ontology.getTransitiveObjectPropertyAxioms(property).isEmpty(); - } - - public static boolean isTransitive(OWLObjectPropertyExpression property, Set ontologies) { - for (OWLOntology ont : ontologies) { - if (isTransitive(property, ont)) { - return true; - } - } - return false; - } - - public static Set getSubProperties(OWLAnnotationProperty superProp, OWLOntology ont) { - return getSubProperties(superProp, Collections.singleton(ont)); - } - - public static Set getSubProperties(OWLAnnotationProperty superProp, Set ontologies) { - Set result = new HashSet(); - for (OWLOntology ont : ontologies) { - for (OWLSubAnnotationPropertyOfAxiom ax : ont.getAxioms(AxiomType.SUB_ANNOTATION_PROPERTY_OF)) { - if (ax.getSuperProperty().equals(superProp)) { - result.add(ax.getSubProperty()); - } - } - } - return result; - } - - public static Set getSuperProperties(OWLAnnotationProperty subProp, OWLOntology ont) { - return getSuperProperties(subProp, Collections.singleton(ont)); - } - - public static Set getSuperProperties(OWLAnnotationProperty subProp, Set ontologies) { - Set result = new HashSet(); - for (OWLOntology ont : ontologies) { - for (OWLSubAnnotationPropertyOfAxiom ax : ont.getAxioms(AxiomType.SUB_ANNOTATION_PROPERTY_OF)) { - if (ax.getSubProperty().equals(subProp)) { - result.add(ax.getSuperProperty()); - } - } - } - return result; - } - - public static Set getSuperProperties(OWLObjectPropertyExpression prop, OWLOntology ont) { - Set result = new HashSet<>(); - Set axioms = ont.getObjectSubPropertyAxiomsForSubProperty(prop); - for (OWLSubPropertyAxiom axiom : axioms) { - result.add(axiom.getSuperProperty()); - } - return result; - } - - public static Set getSubProperties(OWLObjectPropertyExpression prop, OWLOntology ont) { - Set results = new HashSet<>(); - Set axioms = ont.getObjectSubPropertyAxiomsForSuperProperty(prop); - for (OWLSubObjectPropertyOfAxiom axiom : axioms) { - results.add(axiom.getSubProperty()); - } - return results; - } - - public static String getIdentifier(IRI iriId, OWLOntology baseOntology) { + private OwlHelper() { + // no instances + } + + public static Set getAnnotations(OWLEntity e, OWLAnnotationProperty property, OWLOntology ont) { + Set annotations; + if (e != null && property != null && ont != null) { + annotations = new HashSet<>(); + for (OWLAnnotationAssertionAxiom ax : ont.getAnnotationAssertionAxioms(e.getIRI())) { + if (property.equals(ax.getProperty())) { + annotations.add(ax.getAnnotation()); + } + } + } else { + annotations = Collections.emptySet(); + } + return annotations; + } + + public static Set getAnnotations(OWLEntity e, OWLOntology ont) { + Set annotations; + if (e != null && ont != null) { + Set axioms = ont.getAnnotationAssertionAxioms(e.getIRI()); + annotations = new HashSet<>(axioms.size()); + for (OWLAnnotationAssertionAxiom ax : axioms) { + annotations.add(ax.getAnnotation()); + } + } else { + annotations = Collections.emptySet(); + } + return annotations; + } + + public static Set getAnnotations(OWLEntity e, Set ontolgies) { + Set annotations; + if (e != null && ontolgies != null && !ontolgies.isEmpty()) { + annotations = new HashSet<>(); + for (OWLOntology ont : ontolgies) { + annotations.addAll(getAnnotations(e, ont)); + } + } else { + annotations = Collections.emptySet(); + } + return annotations; + } + + public static Set getEquivalentClasses(OWLClass cls, OWLOntology ont) { + Set expressions; + if (cls != null && ont != null) { + Set axioms = ont.getEquivalentClassesAxioms(cls); + expressions = new HashSet<>(axioms.size()); + for (OWLEquivalentClassesAxiom ax : axioms) { + expressions.addAll(ax.getClassExpressions()); + } + expressions.remove(cls); // set should not contain the query cls + } else { + expressions = Collections.emptySet(); + } + return expressions; + } + + public static Set getEquivalentClasses(OWLClass cls, Set ontologies) { + Set expressions; + if (cls != null && ontologies != null && ontologies.isEmpty() == false) { + expressions = new HashSet<>(); + for (OWLOntology ont : ontologies) { + expressions.addAll(getEquivalentClasses(cls, ont)); + } + } else { + expressions = Collections.emptySet(); + } + return expressions; + } + + public static Set getSuperClasses(OWLClass subCls, OWLOntology ont) { + Set result; + if (subCls != null && ont != null) { + result = new HashSet<>(); + Set axioms = ont.getSubClassAxiomsForSubClass(subCls); + for (OWLSubClassOfAxiom axiom : axioms) { + result.add(axiom.getSuperClass()); + } + } else { + result = Collections.emptySet(); + } + return result; + } + + public static Set getSuperClasses(OWLClass subCls, Set ontologies) { + Set result; + if (subCls != null && ontologies != null && ontologies.isEmpty() == false) { + result = new HashSet<>(); + for (OWLOntology ont : ontologies) { + result.addAll(getSuperClasses(subCls, ont)); + } + } else { + result = Collections.emptySet(); + } + return result; + } + + public static Set getSubClasses(OWLClass superCls, OWLOntology ont) { + Set result; + if (superCls != null && ont != null) { + result = new HashSet<>(); + Set axioms = ont.getSubClassAxiomsForSuperClass(superCls); + for (OWLSubClassOfAxiom axiom : axioms) { + result.add(axiom.getSubClass()); + } + } else { + result = Collections.emptySet(); + } + return result; + } + + public static Set getSubClasses(OWLClass superCls, Set ontologies) { + Set result; + if (superCls != null && ontologies != null && ontologies.isEmpty() == false) { + result = new HashSet<>(); + for (OWLOntology ont : ontologies) { + result.addAll(getSubClasses(superCls, ont)); + } + } else { + result = Collections.emptySet(); + } + return result; + } + + public static Set getTypes(OWLIndividual i, OWLOntology ont) { + Set types; + if (ont != null && i != null && i.isNamed()) { + types = getTypes(i.asOWLNamedIndividual(), ont); + } else { + types = Collections.emptySet(); + } + return types; + } + + public static Set getTypes(OWLNamedIndividual i, OWLOntology ont) { + Set types; + if (i != null && ont != null) { + types = new HashSet<>(); + for (OWLClassAssertionAxiom axiom : ont.getClassAssertionAxioms(i)) { + types.add(axiom.getClassExpression()); + } + } else { + types = Collections.emptySet(); + } + return types; + } - if(iriId == null) - return null; + public static Set getTypes(OWLNamedIndividual i, Set ontologies) { + Set types; + if (i != null && ontologies != null && ontologies.isEmpty() == false) { + types = new HashSet<>(); + for (OWLOntology ont : ontologies) { + types.addAll(getTypes(i, ont)); + } + } else { + types = Collections.emptySet(); + } + return types; + } - String iri = iriId.toString(); + public static Map> getObjectPropertyValues(OWLIndividual i, OWLOntology ont) { + Set axioms = ont.getObjectPropertyAssertionAxioms(i); + Map> result = new HashMap<>(); + for (OWLObjectPropertyAssertionAxiom ax : axioms) { + Set inds = result.get(ax.getProperty()); + if (inds == null) { + inds = new HashSet<>(); + result.put(ax.getProperty(), inds); + } + inds.add(ax.getObject()); + } + return result; + } + + public static boolean isTransitive(OWLObjectPropertyExpression property, OWLOntology ontology) { + return !ontology.getTransitiveObjectPropertyAxioms(property).isEmpty(); + } + + public static boolean isTransitive(OWLObjectPropertyExpression property, Set ontologies) { + for (OWLOntology ont : ontologies) { + if (isTransitive(property, ont)) { + return true; + } + } + return false; + } + + public static Set getSubProperties(OWLAnnotationProperty superProp, OWLOntology ont) { + return getSubProperties(superProp, Collections.singleton(ont)); + } + + public static Set getSubProperties(OWLAnnotationProperty superProp, Set ontologies) { + Set result = new HashSet(); + for (OWLOntology ont : ontologies) { + for (OWLSubAnnotationPropertyOfAxiom ax : ont.getAxioms(AxiomType.SUB_ANNOTATION_PROPERTY_OF)) { + if (ax.getSuperProperty().equals(superProp)) { + result.add(ax.getSubProperty()); + } + } + } + return result; + } + + public static Set getSuperProperties(OWLAnnotationProperty subProp, OWLOntology ont) { + return getSuperProperties(subProp, Collections.singleton(ont)); + } + + public static Set getSuperProperties(OWLAnnotationProperty subProp, Set ontologies) { + Set result = new HashSet(); + for (OWLOntology ont : ontologies) { + for (OWLSubAnnotationPropertyOfAxiom ax : ont.getAxioms(AxiomType.SUB_ANNOTATION_PROPERTY_OF)) { + if (ax.getSubProperty().equals(subProp)) { + result.add(ax.getSuperProperty()); + } + } + } + return result; + } + + public static Set getSuperProperties(OWLObjectPropertyExpression prop, OWLOntology ont) { + Set result = new HashSet<>(); + Set axioms = ont.getObjectSubPropertyAxiomsForSubProperty(prop); + for (OWLSubPropertyAxiom axiom : axioms) { + result.add(axiom.getSuperProperty()); + } + return result; + } + + public static Set getSubProperties(OWLObjectPropertyExpression prop, OWLOntology ont) { + Set results = new HashSet<>(); + Set axioms = ont.getObjectSubPropertyAxiomsForSuperProperty(prop); + for (OWLSubObjectPropertyOfAxiom axiom : axioms) { + results.add(axiom.getSubProperty()); + } + return results; + } + + public static String getIdentifier(IRI iriId, OWLOntology baseOntology) { + + if (iriId == null) + return null; + + String iri = iriId.toString(); /* // canonical IRIs @@ -295,37 +261,37 @@ public static String getIdentifier(IRI iriId, OWLOntology baseOntology) { } */ - int indexSlash = iri.lastIndexOf("/"); + int indexSlash = iri.lastIndexOf("/"); - String prefixURI = null; - String id = null; + String prefixURI = null; + String id = null; - if(indexSlash>-1){ - prefixURI = iri.substring(0, indexSlash+1); - id = iri.substring(indexSlash+1); - }else - id = iri; + if (indexSlash > -1) { + prefixURI = iri.substring(0, indexSlash + 1); + id = iri.substring(indexSlash + 1); + } else + id = iri; - String s[]= id.split("#_"); + String s[] = id.split("#_"); - // table 5.9.2 row 2 - NonCanonical-Prefixed-ID - if(s.length>1){ - return s[0] + ":" + s[1]; - } + // table 5.9.2 row 2 - NonCanonical-Prefixed-ID + if (s.length > 1) { + return s[0] + ":" + s[1]; + } - // row 3 - Unprefixed-ID - s= id.split("#"); - if(s.length>1){ - // prefixURI = prefixURI + s[0] + "#"; + // row 3 - Unprefixed-ID + s = id.split("#"); + if (s.length > 1) { + // prefixURI = prefixURI + s[0] + "#"; - // if(!(s[1].contains("#") || s[1].contains("_"))){ - String prefix = ""; + // if(!(s[1].contains("#") || s[1].contains("_"))){ + String prefix = ""; - if("owl".equals(s[0]) || "rdf".equals(s[0]) || "rdfs".equals(s[0])){ - prefix = s[0] + ":"; - } - // TODO: the following implements behavior in current spec, but this leads to undesirable results + if ("owl".equals(s[0]) || "rdf".equals(s[0]) || "rdfs".equals(s[0])) { + prefix = s[0] + ":"; + } + // TODO: the following implements behavior in current spec, but this leads to undesirable results /* else if (baseOntology != null) { String oid = getOntologyId(baseOntology); // OBO-style ID @@ -338,36 +304,35 @@ else if (baseOntology != null) { } */ - return prefix + s[1]; - } + return prefix + s[1]; + } - // row 1 - Canonical-Prefixed-ID - s= id.split("_"); + // row 1 - Canonical-Prefixed-ID + s = id.split("_"); + + if (s.length == 2 && !id.contains("#") && !s[1].contains("_")) { + String localId = java.net.URLDecoder.decode(s[1]); + return s[0] + ":" + localId; + } + if (s.length > 2 && !id.contains("#")) { + if (s[s.length - 1].replaceAll("[0-9]", "").length() == 0) { + StringBuffer sb = new StringBuffer(); + for (int i = 0; i < s.length; i++) { + if (i > 0) { + if (i == s.length - 1) { + sb.append(":"); + } else { + sb.append("_"); + } + } + sb.append(s[i]); + } + return sb.toString(); + } + } - if(s.length==2 && !id.contains("#") && !s[1].contains("_")){ - String localId = java.net.URLDecoder.decode(s[1]); - return s[0] + ":" + localId; - } - if(s.length > 2 && !id.contains("#")) { - if (s[s.length-1].replaceAll("[0-9]","").length() == 0) { - StringBuffer sb = new StringBuffer(); - for (int i=0; i < s.length; i++) { - if (i > 0) { - if (i == s.length -1) { - sb.append(":"); - } - else { - sb.append("_"); - } - } - sb.append(s[i]); - } - return sb.toString(); - } - } + return iri; + } - return iri; - } - } diff --git a/minerva-core/src/main/java/owltools/version/VersionInfo.java b/minerva-core/src/main/java/owltools/version/VersionInfo.java index 6e00f473..f75ba3a3 100644 --- a/minerva-core/src/main/java/owltools/version/VersionInfo.java +++ b/minerva-core/src/main/java/owltools/version/VersionInfo.java @@ -10,41 +10,40 @@ public class VersionInfo { - private VersionInfo() { - // make constructor private - } - - /** - * Try to retrieve the value for the given key from a manifest file. - * Returns the first match or null, if it does not exist. - * - * @param key - * @return string value or null - */ - public static String getManifestVersion(String key) { - Enumeration resEnum; - try { - resEnum = Thread.currentThread().getContextClassLoader().getResources(JarFile.MANIFEST_NAME); - while (resEnum.hasMoreElements()) { - try { - URL url = resEnum.nextElement(); - InputStream is = url.openStream(); - if (is != null) { - Manifest manifest = new Manifest(is); - Attributes mainAttribs = manifest.getMainAttributes(); - String version = mainAttribs.getValue(key); - if(version != null) { - return version; - } - } - } - catch (Exception exception) { - // Silently ignore problematic manifests in classpath - } - } - } catch (IOException ioException) { - // Silently ignore any IO issues with manifests - } - return null; - } + private VersionInfo() { + // make constructor private + } + + /** + * Try to retrieve the value for the given key from a manifest file. + * Returns the first match or null, if it does not exist. + * + * @param key + * @return string value or null + */ + public static String getManifestVersion(String key) { + Enumeration resEnum; + try { + resEnum = Thread.currentThread().getContextClassLoader().getResources(JarFile.MANIFEST_NAME); + while (resEnum.hasMoreElements()) { + try { + URL url = resEnum.nextElement(); + InputStream is = url.openStream(); + if (is != null) { + Manifest manifest = new Manifest(is); + Attributes mainAttribs = manifest.getMainAttributes(); + String version = mainAttribs.getValue(key); + if (version != null) { + return version; + } + } + } catch (Exception exception) { + // Silently ignore problematic manifests in classpath + } + } + } catch (IOException ioException) { + // Silently ignore any IO issues with manifests + } + return null; + } } diff --git a/minerva-core/src/main/java/owltools/vocab/OBONamespaces.java b/minerva-core/src/main/java/owltools/vocab/OBONamespaces.java index b816f4f8..17e8c484 100644 --- a/minerva-core/src/main/java/owltools/vocab/OBONamespaces.java +++ b/minerva-core/src/main/java/owltools/vocab/OBONamespaces.java @@ -1,13 +1,14 @@ package owltools.vocab; public enum OBONamespaces { - GO("GO"), - BFO("BFO"), - GOREL("GOREL"), - RO("RO"); - - final String ns; - OBONamespaces(String ns) { - this.ns = ns; - } + GO("GO"), + BFO("BFO"), + GOREL("GOREL"), + RO("RO"); + + final String ns; + + OBONamespaces(String ns) { + this.ns = ns; + } } diff --git a/minerva-core/src/main/java/owltools/vocab/OBOUpperVocabulary.java b/minerva-core/src/main/java/owltools/vocab/OBOUpperVocabulary.java index 3fd88ede..57964c8a 100644 --- a/minerva-core/src/main/java/owltools/vocab/OBOUpperVocabulary.java +++ b/minerva-core/src/main/java/owltools/vocab/OBOUpperVocabulary.java @@ -1,74 +1,68 @@ package owltools.vocab; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.*; public enum OBOUpperVocabulary { - - /** - * - */ - GO_molecular_function(OBONamespaces.GO, "0003674"), - GO_biological_process(OBONamespaces.GO, "0008150"), - GO_cellular_process(OBONamespaces.GO, "0009987"), - BFO_part_of(OBONamespaces.BFO, "0000050"), - BFO_has_part(OBONamespaces.BFO, "0000051"), - BFO_occurs_in(OBONamespaces.BFO, "0000066"), - RO_regulates(OBONamespaces.RO, "0002211"), - RO_negatively_regulates(OBONamespaces.RO, "0002212"), - RO_positively_regulates(OBONamespaces.RO, "0002213"), - RO_starts(OBONamespaces.RO, "0002223"), - RO_ends(OBONamespaces.RO, "0002229"), - RO_gene_product_of(OBONamespaces.RO, "0002204"), - RO_involved_in(OBONamespaces.RO, "0002331"), - GOREL_enabled_by(OBONamespaces.RO, "0002333"), - GOREL_directly_provides_input_for(OBONamespaces.RO, "0002413"); - - - - - final IRI iri; - final OBONamespaces namespace; - final String id; - - public static final String OBO = "http://purl.obolibrary.org/obo/"; - - OBOUpperVocabulary(OBONamespaces ns, String id) { - this.namespace = ns; - this.id = id; - iri = IRI.create(OBO + ns + "_" + id); - } - - - - public IRI getIRI() { - return iri; - } - - - public OWLClass getOWLClass(OWLDataFactory f) { - return f.getOWLClass(iri); - } - public OWLClass getOWLClass(OWLOntology o) { - return getOWLClass(o.getOWLOntologyManager().getOWLDataFactory()); - } - - public OWLObjectProperty getObjectProperty(OWLDataFactory f) { - return f.getOWLObjectProperty(iri); - } - public OWLObjectProperty getObjectProperty(OWLOntology o) { - return getObjectProperty(o.getOWLOntologyManager().getOWLDataFactory()); - } - - @Override - public String toString() { - return iri.toString(); - } + /** + * + */ + GO_molecular_function(OBONamespaces.GO, "0003674"), + GO_biological_process(OBONamespaces.GO, "0008150"), + GO_cellular_process(OBONamespaces.GO, "0009987"), + BFO_part_of(OBONamespaces.BFO, "0000050"), + BFO_has_part(OBONamespaces.BFO, "0000051"), + BFO_occurs_in(OBONamespaces.BFO, "0000066"), + RO_regulates(OBONamespaces.RO, "0002211"), + RO_negatively_regulates(OBONamespaces.RO, "0002212"), + RO_positively_regulates(OBONamespaces.RO, "0002213"), + RO_starts(OBONamespaces.RO, "0002223"), + RO_ends(OBONamespaces.RO, "0002229"), + RO_gene_product_of(OBONamespaces.RO, "0002204"), + RO_involved_in(OBONamespaces.RO, "0002331"), + GOREL_enabled_by(OBONamespaces.RO, "0002333"), + GOREL_directly_provides_input_for(OBONamespaces.RO, "0002413"); + + + final IRI iri; + final OBONamespaces namespace; + final String id; + + public static final String OBO = "http://purl.obolibrary.org/obo/"; + + OBOUpperVocabulary(OBONamespaces ns, String id) { + this.namespace = ns; + this.id = id; + iri = IRI.create(OBO + ns + "_" + id); + } + + + public IRI getIRI() { + return iri; + } + + + public OWLClass getOWLClass(OWLDataFactory f) { + return f.getOWLClass(iri); + } + + public OWLClass getOWLClass(OWLOntology o) { + return getOWLClass(o.getOWLOntologyManager().getOWLDataFactory()); + } + + public OWLObjectProperty getObjectProperty(OWLDataFactory f) { + return f.getOWLObjectProperty(iri); + } + + public OWLObjectProperty getObjectProperty(OWLOntology o) { + return getObjectProperty(o.getOWLOntologyManager().getOWLDataFactory()); + } + + @Override + public String toString() { + return iri.toString(); + } } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphMolecularModelManagerTest.java b/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphMolecularModelManagerTest.java index e1c1591f..83c4a91e 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphMolecularModelManagerTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphMolecularModelManagerTest.java @@ -18,7 +18,6 @@ import org.openrdf.query.TupleQueryResult; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.*; -//import owltools.OWLToolsTestBasics; import java.io.File; import java.io.FileInputStream; @@ -27,328 +26,328 @@ import static org.junit.Assert.*; -public class BlazegraphMolecularModelManagerTest { - private final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - /** - * Test whether the revised import function properly digests turtle files. We import a ttl file - * into the BlazegraphMolecularModel, dump the model into files, and then compare these files. - * Check this pull request for more information: https://github.com/geneontology/minerva/pull/144/files - * - * @throws Exception - */ - @Test - public void testImportDump() throws Exception { - /* I used the file from one of the turtle file in https://github.com/geneontology/noctua-models/blob/master/models/0000000300000001.ttl */ - String sourceModelPath = "src/test/resources/dummy-noctua-model.ttl"; - BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); - - /* Import the test turtle file */ - m3.importModelToDatabase(new File(sourceModelPath), false); - /* Dump back triples in the model to temporary files */ - for (IRI modelId : m3.getStoredModelIds()) - m3.dumpStoredModel(modelId, folder.getRoot()); - - compareDumpUsingJena(new File(sourceModelPath), folder.getRoot(), null); - m3.dispose(); - } - - @Test - public void testRemoveImportsDuringImport() throws Exception { - String sourceModelPath = "src/test/resources/dummy-noctua-modelwith-import.ttl"; - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - OWLOntology cam = ontman.loadOntologyFromOntologyDocument(new File(sourceModelPath)); - int axioms = cam.getAxiomCount(); - Set imports = cam.getImportsDeclarations(); - assertFalse(imports.size()==0); - - BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); - /* Import the test turtle file */ - String modelId = m3.importModelToDatabase(new File(sourceModelPath), false); - // read it back out and show it is imports free - OWLOntology loaded = m3.loadModelABox(IRI.create(modelId)); - Set shouldbenone = loaded.getImportsDeclarations(); - int loadedaxioms = loaded.getAxiomCount(); - assertTrue(shouldbenone.size()==0); - assertTrue(axioms==loadedaxioms); - m3.dispose(); - } - - /** - * Test the whole cycle of data processing using Blazegraph. - * Check this pull request: https://github.com/geneontology/minerva/issues/143 - * - * @throws Exception - */ - @Test - public void testFullCycle() throws Exception { - BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); - try { - ModelContainer model = m3.generateBlankModel(null); - testModelSaveLoad(m3, model); - m3.unlinkModel(model.getModelId()); - assertEquals(m3.getModelIds().size(), 0); - - model = m3.generateBlankModel(null); - testModelAddRemove(m3, model); - testModelImport(m3, model); - }finally { - m3.dispose(); - } - } - - @Test - public void testModelStateDelete() throws Exception { - BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); - try { - final OWLDataFactory df = m3.getOntology().getOWLOntologyManager().getOWLDataFactory(); - final OWLObjectProperty partOf = df.getOWLObjectProperty(curieHandler.getIRI("BFO:0000050")); - final OWLAnnotationProperty modelState = df.getOWLAnnotationProperty(AnnotationShorthand.modelstate.getAnnotationProperty()); - - ModelContainer model1 = m3.generateBlankModel(null); - OWLNamedIndividual i1 = m3.createIndividualWithIRI(model1, curieHandler.getIRI("GO:0000001"), null, null); - OWLNamedIndividual i2 = m3.createIndividualWithIRI(model1, curieHandler.getIRI("GO:0000002"), null, null); - m3.addFact(model1, partOf, i1, i2, Collections.emptySet(), null); - - ModelContainer model2 = m3.generateBlankModel(null); - OWLNamedIndividual i3 = m3.createIndividualWithIRI(model2, curieHandler.getIRI("GO:0000001"), null, null); - OWLNamedIndividual i4 = m3.createIndividualWithIRI(model2, curieHandler.getIRI("GO:0000002"), null, null); - m3.addFact(model2, partOf, i3, i4, Collections.emptySet(), null); - m3.addModelAnnotations(model2, Collections.singleton(df.getOWLAnnotation(modelState, df.getOWLLiteral("delete"))), null); - m3.saveAllModels(null, null); - - File dir = folder.newFolder(); - m3.dumpStoredModel(model1.getModelId(), dir); - m3.dumpStoredModel(model2.getModelId(), dir); - m3.dispose(); - - BlazegraphMolecularModelManager m3b = createBlazegraphMolecularModelManager(); - try { - assertEquals(2, dir.list().length); - for (File file : dir.listFiles()) { - m3b.importModelToDatabase(file, true); - } - assertEquals(1, m3b.getStoredModelIds().size()); - }finally { - m3b.dispose(); - } - }finally { - m3.dispose(); - } - } - - @Test - public void testSPARQLQuery() throws Exception { - String sourceModelPath = "src/test/resources/dummy-noctua-model.ttl"; - BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); - try { - /* Import the test turtle file */ - m3.importModelToDatabase(new File(sourceModelPath), false); - QueryResult selectResult = m3.executeSPARQLQuery("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }", 10); - assertTrue(selectResult instanceof TupleQueryResult); - assertEquals("http://model.geneontology.org/0000000300000001", ((TupleQueryResult) selectResult).next().getBinding("g").getValue().stringValue()); - QueryResult constructResult = m3.executeSPARQLQuery("CONSTRUCT { ?s ?g } WHERE { GRAPH ?g { ?s ?p ?o } }", 10); - assertTrue(constructResult instanceof GraphQueryResult); - assertEquals("http://model.geneontology.org/0000000300000001", ((GraphQueryResult) constructResult).next().getObject().stringValue()); - SPARQLResultJSONRenderer renderer = new SPARQLResultJSONRenderer(m3.getCuriHandler()); - String gValue = renderer.renderResults((TupleQueryResult)m3.executeSPARQLQuery("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }", 10)) - .getAsJsonObject("results") - .getAsJsonArray("bindings").get(0) - .getAsJsonObject().getAsJsonObject("g").getAsJsonPrimitive("value").getAsString(); - assertEquals("gomodel:0000000300000001", gValue); - JsonObject graphJson = renderer.renderGraph((GraphQueryResult)m3.executeSPARQLQuery("CONSTRUCT { ?s ?g } WHERE { GRAPH ?g { ?s ?p ?o } }", 10)); - String objectValue = graphJson.getAsJsonObject("GO:0000981").getAsJsonArray("ex:subject_in").get(0).getAsJsonObject().getAsJsonPrimitive("value").getAsString(); - assertEquals("gomodel:0000000300000001", objectValue); - }finally { - m3.dispose(); - } - } - - /** - * Test the process that adds some individuals, saves them and then loads them back into the model. - * - * @param m3 - * @param model - * @throws Exception - */ - private void testModelSaveLoad(BlazegraphMolecularModelManager m3, ModelContainer model) throws Exception { - IRI modelID = model.getModelId(); - final OWLObjectProperty partOf = m3.getOntology().getOWLOntologyManager(). - getOWLDataFactory().getOWLObjectProperty(curieHandler.getIRI("BFO:0000050")); - OWLNamedIndividual i1 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000001"), null, null); - OWLNamedIndividual i2 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000002"), null, null); - - m3.addFact(model, partOf, i1, i2, Collections.emptySet(), null); - m3.saveModel(model, null, null); - m3.unlinkModel(modelID); - - /* getModel internally calls the loadModel method */ - model = m3.getModel(modelID); - Collection loaded = m3.getIndividuals(model.getModelId()); - assertTrue(loaded.contains(i1)); - assertTrue(loaded.contains(i2)); - } - - /** - * Repeatedly add and remove individuals/facts and check what happens. - * Also check the case whether individuals can be added after the Blazegraph instance is shutdown. - * - * @param m3 - * @param model - * @throws Exception - */ - private void testModelAddRemove(BlazegraphMolecularModelManager m3, ModelContainer model) throws Exception { - final OWLObjectProperty partOf = m3.getOntology().getOWLOntologyManager().getOWLDataFactory().getOWLObjectProperty(curieHandler.getIRI("BFO:0000050")); - OWLNamedIndividual i1 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000001"), null, null); - OWLNamedIndividual i2 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000002"), null, null); - OWLNamedIndividual i3 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000003"), null, null); - OWLNamedIndividual i4 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000004"), null, null); - - /* Add four individuals */ - m3.addFact(model, partOf, i1, i2, Collections.emptySet(), null); - m3.addFact(model, partOf, i3, i4, Collections.emptySet(), null); - m3.saveModel(model, null, null); - Collection loaded = m3.getIndividuals(model.getModelId()); - assertTrue(loaded.contains(i1) && loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); - - /* Remove the partOf triple that connects i1 and i2 */ - m3.removeFact(model, partOf, i1, i2, null); - m3.saveModel(model, null, null); - loaded = m3.getIndividuals(model.getModelId()); - assertTrue(loaded.contains(i1) && loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); - - /* Remove the i1 and i2 */ - m3.deleteIndividual(model, i1, null); - m3.deleteIndividual(model, i2, null); - m3.saveModel(model, null, null); - loaded = m3.getIndividuals(model.getModelId()); - assertTrue(!loaded.contains(i1) && !loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); - - /* Trying to remove the fact that is already removed */ - m3.deleteIndividual(model, i1, null); - m3.saveModel(model, null, null); - loaded = m3.getIndividuals(model.getModelId()); - assertTrue(!loaded.contains(i1) && !loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); - - /* Re-add the i1 */ - i1 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000001"), - null, null); - loaded = m3.getIndividuals(model.getModelId()); - assertTrue(loaded.contains(i1) && !loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); - m3.saveModel(model, null, null); - assertEquals(m3.getModelIds().size(), 1); - - m3.unlinkModel(model.getModelId()); - - /* i5 should not be added; createIndividualWithIRI should throw java.lang.IllegalStateException */ - try { - OWLNamedIndividual i5 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000005"), null, null); - m3.saveModel(model, null, null); - fail("Creating individual after disposing the model manager should not be allowed."); - } catch (IllegalStateException e) { - } - } - - /** - * Dump stored model and and read back the dumped ttl files; check whether the model is properly reconstructed - * from ttl files. Double-check whether the model is properly dumped using Jena. - * - * @param m3 - * @param model - * @throws Exception - */ - private void testModelImport(BlazegraphMolecularModelManager m3, ModelContainer model) throws Exception { - IRI modelId = model.getModelId(); - /* Dump the specific model that match model's Id */ - m3.dumpStoredModel(modelId, folder.getRoot()); - /* So far we created and saved two models */ - assertEquals(m3.getAvailableModelIds().size(), 2); - /* Shutdown the database instance */ - m3.dispose(); - - /* Create the instance again */ - m3 = createBlazegraphMolecularModelManager(); - /* Import the dumped ttl files */ - String[] extensions = new String[]{"ttl"}; - List files = (List) FileUtils.listFiles(folder.getRoot(), extensions, true); - for (File file : files) - m3.importModelToDatabase(file, false); - - /* Check whether the model contains all individuals we created before */ - for (OWLNamedIndividual ind : m3.getIndividuals(modelId)) { - IRI iri = ind.getIRI(); - assertTrue(iri.equals(curieHandler.getIRI("GO:0000001")) || iri.equals(curieHandler.getIRI("GO:0000003")) || iri.equals(curieHandler.getIRI("GO:0000004"))); - assertFalse(iri.equals(curieHandler.getIRI("GO:0000002"))); - } - - /* Compare the model constructed from dump files with the model constructed using pre-dumped files */ - compareDumpUsingJena(new File("src/test/resources/mmg/basic-fullcycle-dump.ttl"), folder.getRoot(), modelId.toString()); - m3.dispose(); - } - - /** - * @return the instance of BlazegraphMolecularModelManager - * @throws Exception - */ - private BlazegraphMolecularModelManager createBlazegraphMolecularModelManager() throws Exception { - /* A path of the temporary journal file for Blazegraph storage system */ - String journalPath = folder.newFile().getAbsolutePath(); - /* A root path of the temporary directory */ - String tempRootPath = folder.getRoot().getAbsolutePath(); - /* Delete the journal file if exists */ - FileUtils.deleteQuietly(new File(journalPath)); - OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/mmg/basic-tbox.omn"))); - Map prefixes = new HashMap<>(); - prefixes.put("gomodel", "http://model.geneontology.org/"); - prefixes.put("ex", "http://example.org/"); - prefixes.put("GO", "http://purl.obolibrary.org/obo/GO_"); - CurieHandler curieHandler = new MappedCurieHandler(prefixes); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(tbox, curieHandler,"http://model.geneontology.org/", journalPath, tempRootPath, go_lego_journal_file, true); - return m3; - } - - /** - * Compare two sets of turtle files and check whether they are equivalent. - * Dump files often have different orders of triples compared with the ones in the original file, - * thus one-by-one comparison is obviously not working here. We therefore leverage Jena's model, i.e., - * import original file and dump files using Jena and then compare them using Jena's isIsomorphicWith function. - * - * @param sourceFile - * @param targetFile - * @param targetModelIdStr - * @throws IOException - */ - private void compareDumpUsingJena(File sourceFile, File targetFile, String targetModelIdStr) throws IOException { - /* Read triples from a single source file */ - Model sourceModel = ModelFactory.createDefaultModel(); - sourceModel.read(new FileInputStream(sourceFile), null, "TURTLE"); - - /* Read triples from a directory */ - String[] extensions = new String[]{"ttl"}; - Model targetModel = ModelFactory.createDefaultModel(); - extensions = new String[]{"ttl"}; - List files = (List) FileUtils.listFiles(targetFile, extensions, true); - for (File file : files) - targetModel.read(file.getCanonicalPath()); - - /* - * The modelId is randomly generated for every time we create a new model and the modelId - * is also added as resources in dump files. Therefore, when we run this test code, - * the same model with the different Id is generated every time, so Jena think these models - * are different models due to the difference of the modelId (although other triples are equivalent). - * We therefore remove triples containing modelId before we compare the models using isIsomorphicWith. - */ - if (targetModelIdStr != null) { - Resource modelIdRes = targetModel.createResource(targetModelIdStr); - targetModel.removeAll(modelIdRes, null, null); - } - - /* Does the dumped file contain all triples from the source file (and vice versa)? */ - if (sourceModel.isIsomorphicWith(targetModel) != true) - fail("Source graphs and target graphs are not isomorphic."); - } +public class BlazegraphMolecularModelManagerTest { + private final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + /** + * Test whether the revised import function properly digests turtle files. We import a ttl file + * into the BlazegraphMolecularModel, dump the model into files, and then compare these files. + * Check this pull request for more information: https://github.com/geneontology/minerva/pull/144/files + * + * @throws Exception + */ + @Test + public void testImportDump() throws Exception { + /* I used the file from one of the turtle file in https://github.com/geneontology/noctua-models/blob/master/models/0000000300000001.ttl */ + String sourceModelPath = "src/test/resources/dummy-noctua-model.ttl"; + BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); + + /* Import the test turtle file */ + m3.importModelToDatabase(new File(sourceModelPath), false); + /* Dump back triples in the model to temporary files */ + for (IRI modelId : m3.getStoredModelIds()) + m3.dumpStoredModel(modelId, folder.getRoot()); + + compareDumpUsingJena(new File(sourceModelPath), folder.getRoot(), null); + m3.dispose(); + } + + @Test + public void testRemoveImportsDuringImport() throws Exception { + String sourceModelPath = "src/test/resources/dummy-noctua-modelwith-import.ttl"; + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + OWLOntology cam = ontman.loadOntologyFromOntologyDocument(new File(sourceModelPath)); + int axioms = cam.getAxiomCount(); + Set imports = cam.getImportsDeclarations(); + assertFalse(imports.size() == 0); + + BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); + /* Import the test turtle file */ + String modelId = m3.importModelToDatabase(new File(sourceModelPath), false); + // read it back out and show it is imports free + OWLOntology loaded = m3.loadModelABox(IRI.create(modelId)); + Set shouldbenone = loaded.getImportsDeclarations(); + int loadedaxioms = loaded.getAxiomCount(); + assertTrue(shouldbenone.size() == 0); + assertTrue(axioms == loadedaxioms); + m3.dispose(); + } + + /** + * Test the whole cycle of data processing using Blazegraph. + * Check this pull request: https://github.com/geneontology/minerva/issues/143 + * + * @throws Exception + */ + @Test + public void testFullCycle() throws Exception { + BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); + try { + ModelContainer model = m3.generateBlankModel(null); + testModelSaveLoad(m3, model); + m3.unlinkModel(model.getModelId()); + assertEquals(m3.getModelIds().size(), 0); + + model = m3.generateBlankModel(null); + testModelAddRemove(m3, model); + testModelImport(m3, model); + } finally { + m3.dispose(); + } + } + + @Test + public void testModelStateDelete() throws Exception { + BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); + try { + final OWLDataFactory df = m3.getOntology().getOWLOntologyManager().getOWLDataFactory(); + final OWLObjectProperty partOf = df.getOWLObjectProperty(curieHandler.getIRI("BFO:0000050")); + final OWLAnnotationProperty modelState = df.getOWLAnnotationProperty(AnnotationShorthand.modelstate.getAnnotationProperty()); + + ModelContainer model1 = m3.generateBlankModel(null); + OWLNamedIndividual i1 = m3.createIndividualWithIRI(model1, curieHandler.getIRI("GO:0000001"), null, null); + OWLNamedIndividual i2 = m3.createIndividualWithIRI(model1, curieHandler.getIRI("GO:0000002"), null, null); + m3.addFact(model1, partOf, i1, i2, Collections.emptySet(), null); + + ModelContainer model2 = m3.generateBlankModel(null); + OWLNamedIndividual i3 = m3.createIndividualWithIRI(model2, curieHandler.getIRI("GO:0000001"), null, null); + OWLNamedIndividual i4 = m3.createIndividualWithIRI(model2, curieHandler.getIRI("GO:0000002"), null, null); + m3.addFact(model2, partOf, i3, i4, Collections.emptySet(), null); + m3.addModelAnnotations(model2, Collections.singleton(df.getOWLAnnotation(modelState, df.getOWLLiteral("delete"))), null); + m3.saveAllModels(null, null); + + File dir = folder.newFolder(); + m3.dumpStoredModel(model1.getModelId(), dir); + m3.dumpStoredModel(model2.getModelId(), dir); + m3.dispose(); + + BlazegraphMolecularModelManager m3b = createBlazegraphMolecularModelManager(); + try { + assertEquals(2, dir.list().length); + for (File file : dir.listFiles()) { + m3b.importModelToDatabase(file, true); + } + assertEquals(1, m3b.getStoredModelIds().size()); + } finally { + m3b.dispose(); + } + } finally { + m3.dispose(); + } + } + + @Test + public void testSPARQLQuery() throws Exception { + String sourceModelPath = "src/test/resources/dummy-noctua-model.ttl"; + BlazegraphMolecularModelManager m3 = createBlazegraphMolecularModelManager(); + try { + /* Import the test turtle file */ + m3.importModelToDatabase(new File(sourceModelPath), false); + QueryResult selectResult = m3.executeSPARQLQuery("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }", 10); + assertTrue(selectResult instanceof TupleQueryResult); + assertEquals("http://model.geneontology.org/0000000300000001", ((TupleQueryResult) selectResult).next().getBinding("g").getValue().stringValue()); + QueryResult constructResult = m3.executeSPARQLQuery("CONSTRUCT { ?s ?g } WHERE { GRAPH ?g { ?s ?p ?o } }", 10); + assertTrue(constructResult instanceof GraphQueryResult); + assertEquals("http://model.geneontology.org/0000000300000001", ((GraphQueryResult) constructResult).next().getObject().stringValue()); + SPARQLResultJSONRenderer renderer = new SPARQLResultJSONRenderer(m3.getCuriHandler()); + String gValue = renderer.renderResults((TupleQueryResult) m3.executeSPARQLQuery("SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }", 10)) + .getAsJsonObject("results") + .getAsJsonArray("bindings").get(0) + .getAsJsonObject().getAsJsonObject("g").getAsJsonPrimitive("value").getAsString(); + assertEquals("gomodel:0000000300000001", gValue); + JsonObject graphJson = renderer.renderGraph((GraphQueryResult) m3.executeSPARQLQuery("CONSTRUCT { ?s ?g } WHERE { GRAPH ?g { ?s ?p ?o } }", 10)); + String objectValue = graphJson.getAsJsonObject("GO:0000981").getAsJsonArray("ex:subject_in").get(0).getAsJsonObject().getAsJsonPrimitive("value").getAsString(); + assertEquals("gomodel:0000000300000001", objectValue); + } finally { + m3.dispose(); + } + } + + /** + * Test the process that adds some individuals, saves them and then loads them back into the model. + * + * @param m3 + * @param model + * @throws Exception + */ + private void testModelSaveLoad(BlazegraphMolecularModelManager m3, ModelContainer model) throws Exception { + IRI modelID = model.getModelId(); + final OWLObjectProperty partOf = m3.getOntology().getOWLOntologyManager(). + getOWLDataFactory().getOWLObjectProperty(curieHandler.getIRI("BFO:0000050")); + OWLNamedIndividual i1 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000001"), null, null); + OWLNamedIndividual i2 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000002"), null, null); + + m3.addFact(model, partOf, i1, i2, Collections.emptySet(), null); + m3.saveModel(model, null, null); + m3.unlinkModel(modelID); + + /* getModel internally calls the loadModel method */ + model = m3.getModel(modelID); + Collection loaded = m3.getIndividuals(model.getModelId()); + assertTrue(loaded.contains(i1)); + assertTrue(loaded.contains(i2)); + } + + /** + * Repeatedly add and remove individuals/facts and check what happens. + * Also check the case whether individuals can be added after the Blazegraph instance is shutdown. + * + * @param m3 + * @param model + * @throws Exception + */ + private void testModelAddRemove(BlazegraphMolecularModelManager m3, ModelContainer model) throws Exception { + final OWLObjectProperty partOf = m3.getOntology().getOWLOntologyManager().getOWLDataFactory().getOWLObjectProperty(curieHandler.getIRI("BFO:0000050")); + OWLNamedIndividual i1 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000001"), null, null); + OWLNamedIndividual i2 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000002"), null, null); + OWLNamedIndividual i3 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000003"), null, null); + OWLNamedIndividual i4 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000004"), null, null); + + /* Add four individuals */ + m3.addFact(model, partOf, i1, i2, Collections.emptySet(), null); + m3.addFact(model, partOf, i3, i4, Collections.emptySet(), null); + m3.saveModel(model, null, null); + Collection loaded = m3.getIndividuals(model.getModelId()); + assertTrue(loaded.contains(i1) && loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); + + /* Remove the partOf triple that connects i1 and i2 */ + m3.removeFact(model, partOf, i1, i2, null); + m3.saveModel(model, null, null); + loaded = m3.getIndividuals(model.getModelId()); + assertTrue(loaded.contains(i1) && loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); + + /* Remove the i1 and i2 */ + m3.deleteIndividual(model, i1, null); + m3.deleteIndividual(model, i2, null); + m3.saveModel(model, null, null); + loaded = m3.getIndividuals(model.getModelId()); + assertTrue(!loaded.contains(i1) && !loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); + + /* Trying to remove the fact that is already removed */ + m3.deleteIndividual(model, i1, null); + m3.saveModel(model, null, null); + loaded = m3.getIndividuals(model.getModelId()); + assertTrue(!loaded.contains(i1) && !loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); + + /* Re-add the i1 */ + i1 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000001"), + null, null); + loaded = m3.getIndividuals(model.getModelId()); + assertTrue(loaded.contains(i1) && !loaded.contains(i2) && loaded.contains(i3) && loaded.contains(i4)); + m3.saveModel(model, null, null); + assertEquals(m3.getModelIds().size(), 1); + + m3.unlinkModel(model.getModelId()); + + /* i5 should not be added; createIndividualWithIRI should throw java.lang.IllegalStateException */ + try { + OWLNamedIndividual i5 = m3.createIndividualWithIRI(model, curieHandler.getIRI("GO:0000005"), null, null); + m3.saveModel(model, null, null); + fail("Creating individual after disposing the model manager should not be allowed."); + } catch (IllegalStateException e) { + } + } + + /** + * Dump stored model and and read back the dumped ttl files; check whether the model is properly reconstructed + * from ttl files. Double-check whether the model is properly dumped using Jena. + * + * @param m3 + * @param model + * @throws Exception + */ + private void testModelImport(BlazegraphMolecularModelManager m3, ModelContainer model) throws Exception { + IRI modelId = model.getModelId(); + /* Dump the specific model that match model's Id */ + m3.dumpStoredModel(modelId, folder.getRoot()); + /* So far we created and saved two models */ + assertEquals(m3.getAvailableModelIds().size(), 2); + /* Shutdown the database instance */ + m3.dispose(); + + /* Create the instance again */ + m3 = createBlazegraphMolecularModelManager(); + /* Import the dumped ttl files */ + String[] extensions = new String[]{"ttl"}; + List files = (List) FileUtils.listFiles(folder.getRoot(), extensions, true); + for (File file : files) + m3.importModelToDatabase(file, false); + + /* Check whether the model contains all individuals we created before */ + for (OWLNamedIndividual ind : m3.getIndividuals(modelId)) { + IRI iri = ind.getIRI(); + assertTrue(iri.equals(curieHandler.getIRI("GO:0000001")) || iri.equals(curieHandler.getIRI("GO:0000003")) || iri.equals(curieHandler.getIRI("GO:0000004"))); + assertFalse(iri.equals(curieHandler.getIRI("GO:0000002"))); + } + + /* Compare the model constructed from dump files with the model constructed using pre-dumped files */ + compareDumpUsingJena(new File("src/test/resources/mmg/basic-fullcycle-dump.ttl"), folder.getRoot(), modelId.toString()); + m3.dispose(); + } + + /** + * @return the instance of BlazegraphMolecularModelManager + * @throws Exception + */ + private BlazegraphMolecularModelManager createBlazegraphMolecularModelManager() throws Exception { + /* A path of the temporary journal file for Blazegraph storage system */ + String journalPath = folder.newFile().getAbsolutePath(); + /* A root path of the temporary directory */ + String tempRootPath = folder.getRoot().getAbsolutePath(); + /* Delete the journal file if exists */ + FileUtils.deleteQuietly(new File(journalPath)); + OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/mmg/basic-tbox.omn"))); + Map prefixes = new HashMap<>(); + prefixes.put("gomodel", "http://model.geneontology.org/"); + prefixes.put("ex", "http://example.org/"); + prefixes.put("GO", "http://purl.obolibrary.org/obo/GO_"); + CurieHandler curieHandler = new MappedCurieHandler(prefixes); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(tbox, curieHandler, "http://model.geneontology.org/", journalPath, tempRootPath, go_lego_journal_file, true); + return m3; + } + + /** + * Compare two sets of turtle files and check whether they are equivalent. + * Dump files often have different orders of triples compared with the ones in the original file, + * thus one-by-one comparison is obviously not working here. We therefore leverage Jena's model, i.e., + * import original file and dump files using Jena and then compare them using Jena's isIsomorphicWith function. + * + * @param sourceFile + * @param targetFile + * @param targetModelIdStr + * @throws IOException + */ + private void compareDumpUsingJena(File sourceFile, File targetFile, String targetModelIdStr) throws IOException { + /* Read triples from a single source file */ + Model sourceModel = ModelFactory.createDefaultModel(); + sourceModel.read(new FileInputStream(sourceFile), null, "TURTLE"); + + /* Read triples from a directory */ + String[] extensions = new String[]{"ttl"}; + Model targetModel = ModelFactory.createDefaultModel(); + extensions = new String[]{"ttl"}; + List files = (List) FileUtils.listFiles(targetFile, extensions, true); + for (File file : files) + targetModel.read(file.getCanonicalPath()); + + /* + * The modelId is randomly generated for every time we create a new model and the modelId + * is also added as resources in dump files. Therefore, when we run this test code, + * the same model with the different Id is generated every time, so Jena think these models + * are different models due to the difference of the modelId (although other triples are equivalent). + * We therefore remove triples containing modelId before we compare the models using isIsomorphicWith. + */ + if (targetModelIdStr != null) { + Resource modelIdRes = targetModel.createResource(targetModelIdStr); + targetModel.removeAll(modelIdRes, null, null); + } + + /* Does the dumped file contain all triples from the source file (and vice versa)? */ + if (sourceModel.isIsomorphicWith(targetModel) != true) + fail("Source graphs and target graphs are not isomorphic."); + } } \ No newline at end of file diff --git a/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphOntologyManagerTest.java b/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphOntologyManagerTest.java index d496d96c..e094549c 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphOntologyManagerTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/BlazegraphOntologyManagerTest.java @@ -1,130 +1,130 @@ /** - * + * */ package org.geneontology.minerva; -import static org.junit.Assert.*; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; import java.io.IOException; import java.util.HashSet; import java.util.Map; import java.util.Set; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import static org.junit.Assert.assertTrue; /** * @author benjamingood * */ public class BlazegraphOntologyManagerTest { - //if the file isn't there, it will try to download it from - //BlazegraphOntologyManager.http://skyhook.berkeleybop.org/issue-35-neo-test/products/blazegraph/blazegraph-go-lego.jnl.gz - //can override the download by providing the file at the specified location - static final String ontology_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static BlazegraphOntologyManager onto_repo; - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - onto_repo = new BlazegraphOntologyManager(ontology_journal_file, true); - } + //if the file isn't there, it will try to download it from + //BlazegraphOntologyManager.http://skyhook.berkeleybop.org/issue-35-neo-test/products/blazegraph/blazegraph-go-lego.jnl.gz + //can override the download by providing the file at the specified location + static final String ontology_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + static BlazegraphOntologyManager onto_repo; + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + onto_repo = new BlazegraphOntologyManager(ontology_journal_file, true); + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + onto_repo.dispose(); + } - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - onto_repo.dispose(); - } - - /** - * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getAllTaxaWithGenes()}. - * @throws IOException - */ - @Test - public void testGetLabels() throws IOException { - String thing = "http://purl.obolibrary.org/obo/NCBITaxon_44689"; - String label = onto_repo.getLabel(thing); - assertTrue(label!=null); - System.out.println(thing+" "+label); - thing = "http://identifiers.org/zfin/ZDB-GENE-010410-3"; - label = onto_repo.getLabel(thing); - assertTrue(label!=null); - System.out.println(thing+" "+label); - thing = "http://purl.obolibrary.org/obo/ECO_0000314"; - label = onto_repo.getLabel(thing); - assertTrue(label!=null); - System.out.println(thing+" "+label); - thing = "http://purl.obolibrary.org/obo/GO_0110165"; - label = onto_repo.getLabel(thing); - assertTrue(label!=null); - System.out.println(thing+" "+label); - thing = "http://purl.obolibrary.org/obo/GO_0060090"; - label = onto_repo.getLabel(thing); - assertTrue(label!=null); - System.out.println(thing+" "+label); - } - - /** - * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getAllTaxaWithGenes()}. - * @throws IOException - */ - @Test - public void testGetAllTaxaWithGenes() throws IOException { - Set taxa = onto_repo.getAllTaxaWithGenes(); - assertTrue("taxa has more than a hundred entries", taxa.size()>100); - assertTrue("taxa contains NCBITaxon_44689", taxa.contains("http://purl.obolibrary.org/obo/NCBITaxon_44689")); - assertTrue("taxa contains NCBITaxon_9606", taxa.contains("http://purl.obolibrary.org/obo/NCBITaxon_9606")); - } - - /** - * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getSubClasses(java.lang.String)}. - * @throws IOException - */ - @Test - public void testGetGenesByTaxonId() throws IOException { - //zfin - String ncbi_tax_id = "7955"; //zfin - Set genes = onto_repo.getGenesByTaxid(ncbi_tax_id); - assertTrue("http://identifiers.org/zfin/ZDB-GENE-010410-3 not returned for taxon 7955 zfin", genes.contains("http://identifiers.org/zfin/ZDB-GENE-010410-3")); - } - - - /** - * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getSubClasses(java.lang.String)}. - * @throws IOException - */ - @Test - public void testGetSubClasses() throws IOException { - //make sure its possible to get from leaf to root for the key classes - //Evidence - String uri = "http://purl.obolibrary.org/obo/ECO_0000000"; - Set subs = onto_repo.getAllSubClasses(uri); - assertTrue("ECO_0000314 not subclass of ECO_0000000", subs.contains("http://purl.obolibrary.org/obo/ECO_0000314")); - //Anatomy - //worm anatomy - note that it needs parts of the cl ontology in there - uri = "http://purl.obolibrary.org/obo/CL_0000003"; - subs = onto_repo.getAllSubClasses(uri); - //GO native cell - used a lot in shex - assertTrue("WBbt_0005753 not subclass of CL_0000003", subs.contains("http://purl.obolibrary.org/obo/WBbt_0005753")); - //Cell component - uri = "http://purl.obolibrary.org/obo/GO_0110165"; - subs = onto_repo.getAllSubClasses(uri); - assertTrue("GO_0000776 not subclass of GO_0110165 'cellular anatomical entity'", subs.contains("http://purl.obolibrary.org/obo/GO_0000776")); - //biological process - uri = "http://purl.obolibrary.org/obo/GO_0008150"; - subs = onto_repo.getAllSubClasses(uri); - assertTrue("GO_0022607 not subclass of GO_000815 biological process", subs.contains("http://purl.obolibrary.org/obo/GO_0022607")); - //molecular function - uri = "http://purl.obolibrary.org/obo/GO_0003674"; - subs = onto_repo.getAllSubClasses(uri); - assertTrue("GO_0060090 not subclass of molecular function GO_0003674", subs.contains("http://purl.obolibrary.org/obo/GO_0060090")); - //Gene products + /** + * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getAllTaxaWithGenes()}. + * @throws IOException + */ + @Test + public void testGetLabels() throws IOException { + String thing = "http://purl.obolibrary.org/obo/NCBITaxon_44689"; + String label = onto_repo.getLabel(thing); + assertTrue(label != null); + System.out.println(thing + " " + label); + thing = "http://identifiers.org/zfin/ZDB-GENE-010410-3"; + label = onto_repo.getLabel(thing); + assertTrue(label != null); + System.out.println(thing + " " + label); + thing = "http://purl.obolibrary.org/obo/ECO_0000314"; + label = onto_repo.getLabel(thing); + assertTrue(label != null); + System.out.println(thing + " " + label); + thing = "http://purl.obolibrary.org/obo/GO_0110165"; + label = onto_repo.getLabel(thing); + assertTrue(label != null); + System.out.println(thing + " " + label); + thing = "http://purl.obolibrary.org/obo/GO_0060090"; + label = onto_repo.getLabel(thing); + assertTrue(label != null); + System.out.println(thing + " " + label); + } + + /** + * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getAllTaxaWithGenes()}. + * @throws IOException + */ + @Test + public void testGetAllTaxaWithGenes() throws IOException { + Set taxa = onto_repo.getAllTaxaWithGenes(); + assertTrue("taxa has more than a hundred entries", taxa.size() > 100); + assertTrue("taxa contains NCBITaxon_44689", taxa.contains("http://purl.obolibrary.org/obo/NCBITaxon_44689")); + assertTrue("taxa contains NCBITaxon_9606", taxa.contains("http://purl.obolibrary.org/obo/NCBITaxon_9606")); + } + + /** + * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getSubClasses(java.lang.String)}. + * @throws IOException + */ + @Test + public void testGetGenesByTaxonId() throws IOException { + //zfin + String ncbi_tax_id = "7955"; //zfin + Set genes = onto_repo.getGenesByTaxid(ncbi_tax_id); + assertTrue("http://identifiers.org/zfin/ZDB-GENE-010410-3 not returned for taxon 7955 zfin", genes.contains("http://identifiers.org/zfin/ZDB-GENE-010410-3")); + } + + + /** + * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getSubClasses(java.lang.String)}. + * @throws IOException + */ + @Test + public void testGetSubClasses() throws IOException { + //make sure its possible to get from leaf to root for the key classes + //Evidence + String uri = "http://purl.obolibrary.org/obo/ECO_0000000"; + Set subs = onto_repo.getAllSubClasses(uri); + assertTrue("ECO_0000314 not subclass of ECO_0000000", subs.contains("http://purl.obolibrary.org/obo/ECO_0000314")); + //Anatomy + //worm anatomy - note that it needs parts of the cl ontology in there + uri = "http://purl.obolibrary.org/obo/CL_0000003"; + subs = onto_repo.getAllSubClasses(uri); + //GO native cell - used a lot in shex + assertTrue("WBbt_0005753 not subclass of CL_0000003", subs.contains("http://purl.obolibrary.org/obo/WBbt_0005753")); + //Cell component + uri = "http://purl.obolibrary.org/obo/GO_0110165"; + subs = onto_repo.getAllSubClasses(uri); + assertTrue("GO_0000776 not subclass of GO_0110165 'cellular anatomical entity'", subs.contains("http://purl.obolibrary.org/obo/GO_0000776")); + //biological process + uri = "http://purl.obolibrary.org/obo/GO_0008150"; + subs = onto_repo.getAllSubClasses(uri); + assertTrue("GO_0022607 not subclass of GO_000815 biological process", subs.contains("http://purl.obolibrary.org/obo/GO_0022607")); + //molecular function + uri = "http://purl.obolibrary.org/obo/GO_0003674"; + subs = onto_repo.getAllSubClasses(uri); + assertTrue("GO_0060090 not subclass of molecular function GO_0003674", subs.contains("http://purl.obolibrary.org/obo/GO_0060090")); + //Gene products //this is a little extreme.. it works but takes a minute. Should never have to do this in a live search system // //uniprot // uri = "http://purl.obolibrary.org/obo/CHEBI_36080"; @@ -141,134 +141,134 @@ public void testGetSubClasses() throws IOException { // subs = onto_repo.getAllSubClasses(uri); // assertTrue("WBGene00000275 not subclass of CHEBI_24431 chemical entity", subs.contains("http://identifiers.org/wormbase/WBGene00000275")); - } - - /** - * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getSuperClasses(java.lang.String)}. - * @throws IOException - */ - @Test - public void testGetSuperClasses() throws IOException { - //make sure its possible to get from leaf to root for the key classes - //Evidence - String uri = "http://purl.obolibrary.org/obo/ECO_0000314"; - Set supers = onto_repo.getAllSuperClasses(uri); - assertTrue("ECO_0000314 not subclass of ECO_0000000", supers.contains("http://purl.obolibrary.org/obo/ECO_0000000")); - //Anatomy - //worm anatomy - note that it needs parts of the cl ontology in there - uri = "http://purl.obolibrary.org/obo/WBbt_0005753"; - supers = onto_repo.getAllSuperClasses(uri); - //GO native cell - used a lot in shex - assertTrue("WBbt_0005753 not subclass of CL_0000003", supers.contains("http://purl.obolibrary.org/obo/CL_0000003")); - //anatomy - also used a lot in shex - assertTrue("WBbt_0005753 not subclass of CARO_0000000", supers.contains("http://purl.obolibrary.org/obo/CARO_0000000")); - //Cell component - uri = "http://purl.obolibrary.org/obo/GO_0000776"; - supers = onto_repo.getAllSuperClasses(uri); - assertTrue("GO_0000776 not subclass of GO_0110165 'cellular anatomical entity'", supers.contains("http://purl.obolibrary.org/obo/GO_0110165")); - assertTrue("GO_0000776 not subclass of GO_0005575 'cellular component'", supers.contains("http://purl.obolibrary.org/obo/GO_0005575")); - //biological process - uri = "http://purl.obolibrary.org/obo/GO_0022607"; - supers = onto_repo.getAllSuperClasses(uri); - assertTrue("GO_0022607 not subclass of GO_000815 biological process", supers.contains("http://purl.obolibrary.org/obo/GO_0008150")); - //molecular function - uri = "http://purl.obolibrary.org/obo/GO_0060090"; - supers = onto_repo.getAllSuperClasses(uri); - assertTrue("GO_0060090 not subclass of molecular function GO_0003674", supers.contains("http://purl.obolibrary.org/obo/GO_0003674")); - //Gene products - //uniprot - uri = "http://identifiers.org/uniprot/Q13253"; - supers = onto_repo.getAllSuperClasses(uri); - //protein - assertTrue("uniprot/Q13253 not subclass of CHEBI_36080 protein", supers.contains("http://purl.obolibrary.org/obo/CHEBI_36080")); - assertTrue("uniprot/Q13253 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); - assertTrue("uniprot/Q13253 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); - //"gene".. - //zfin - uri = "http://identifiers.org/zfin/ZDB-GENE-010410-3"; - supers = onto_repo.getAllSuperClasses(uri); - assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); - assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); - //wormbase - uri = "http://identifiers.org/wormbase/WBGene00000275"; - supers = onto_repo.getAllSuperClasses(uri); - assertTrue("WBGene00000275 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); - assertTrue("WBGene00000275 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); - } - - @Test - public void testGetUpperTypes() throws IOException { - //make sure its possible to get from leaf to root for the key classes - Set uris = new HashSet(); - String eco = "http://purl.obolibrary.org/obo/ECO_0000314"; - String wbbt = "http://purl.obolibrary.org/obo/WBbt_0005753"; - String cc = "http://purl.obolibrary.org/obo/GO_0000776"; - String bp = "http://purl.obolibrary.org/obo/GO_0022607"; - String mf = "http://purl.obolibrary.org/obo/GO_0060090"; - String human_protein = "http://identifiers.org/uniprot/Q13253"; - String zfin_protein = "http://identifiers.org/zfin/ZDB-GENE-010410-3"; - String worm_gene = "http://identifiers.org/wormbase/WBGene00000275"; - uris.add(eco); - uris.add(wbbt); - uris.add(cc); - uris.add(bp); - uris.add(mf); - uris.add(human_protein); - uris.add(zfin_protein); - uris.add(worm_gene); - - Map> uri_roots = onto_repo.getSuperCategoryMap(uris); - //Evidence - Set supers = uri_roots.get(eco); - assertTrue("ECO_0000314 not subclass of ECO_0000000", supers.contains("http://purl.obolibrary.org/obo/ECO_0000000")); - //Anatomy - //worm anatomy - note that it needs parts of the cl ontology in there - supers = uri_roots.get(wbbt); - //GO native cell - used a lot in shex - //assertTrue("WBbt_0005753 not subclass of CL_0000003", supers.contains("http://purl.obolibrary.org/obo/CL_0000003")); - //anatomy - also used a lot in shex - assertTrue("WBbt_0005753 not subclass of CARO_0000000", supers.contains("http://purl.obolibrary.org/obo/CARO_0000000")); - //Cell component - supers = uri_roots.get(cc); - assertTrue("GO_0000776 not subclass of GO_0110165 'cellular anatomical entity'", supers.contains("http://purl.obolibrary.org/obo/GO_0110165")); - assertTrue("GO_0000776 not subclass of GO_0005575 'cellular component'", supers.contains("http://purl.obolibrary.org/obo/GO_0005575")); - //biological process - supers = uri_roots.get(bp); - assertTrue("GO_0022607 not subclass of GO_000815 biological process", supers.contains("http://purl.obolibrary.org/obo/GO_0008150")); - //molecular function - supers = uri_roots.get(mf); - assertTrue("GO_0060090 not subclass of molecular function GO_0003674", supers.contains("http://purl.obolibrary.org/obo/GO_0003674")); - //Gene products - //uniprot - supers = uri_roots.get(human_protein); - //protein - assertTrue("uniprot/Q13253 not subclass of CHEBI_36080 protein", supers.contains("http://purl.obolibrary.org/obo/CHEBI_36080")); - assertTrue("uniprot/Q13253 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); - assertTrue("uniprot/Q13253 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); - //"gene".. - //zfin - supers = uri_roots.get(zfin_protein); - assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); - assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); - //wormbase - supers = uri_roots.get(worm_gene); - assertTrue("WBGene00000275 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); - assertTrue("WBGene00000275 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); - } - - @Test - public void testGetComplexPortalTypes() throws IOException { - //make sure its possible to get from leaf to root for the key classes - Set uris = new HashSet(); - String cp1 = "https://www.ebi.ac.uk/complexportal/complex/CPX-9"; - String cp2 = "https://www.ebi.ac.uk/complexportal/complex/CPX-4082"; - uris.add(cp1); - uris.add(cp2); - Map> uri_roots = onto_repo.getSuperCategoryMap(uris); - Set supers = uri_roots.get(cp1); - assertTrue("ComplexPortal_CPX-9 should be a protein-containing complex", supers.contains("http://purl.obolibrary.org/obo/GO_0032991")); - supers = uri_roots.get(cp2); - assertTrue("ComplexPortal_CPX-4082 should be a protein-containing complex", supers.contains("http://purl.obolibrary.org/obo/GO_0032991")); - } - + } + + /** + * Test method for {@link org.geneontology.minerva.BlazegraphOntologyManager#getSuperClasses(java.lang.String)}. + * @throws IOException + */ + @Test + public void testGetSuperClasses() throws IOException { + //make sure its possible to get from leaf to root for the key classes + //Evidence + String uri = "http://purl.obolibrary.org/obo/ECO_0000314"; + Set supers = onto_repo.getAllSuperClasses(uri); + assertTrue("ECO_0000314 not subclass of ECO_0000000", supers.contains("http://purl.obolibrary.org/obo/ECO_0000000")); + //Anatomy + //worm anatomy - note that it needs parts of the cl ontology in there + uri = "http://purl.obolibrary.org/obo/WBbt_0005753"; + supers = onto_repo.getAllSuperClasses(uri); + //GO native cell - used a lot in shex + assertTrue("WBbt_0005753 not subclass of CL_0000003", supers.contains("http://purl.obolibrary.org/obo/CL_0000003")); + //anatomy - also used a lot in shex + assertTrue("WBbt_0005753 not subclass of CARO_0000000", supers.contains("http://purl.obolibrary.org/obo/CARO_0000000")); + //Cell component + uri = "http://purl.obolibrary.org/obo/GO_0000776"; + supers = onto_repo.getAllSuperClasses(uri); + assertTrue("GO_0000776 not subclass of GO_0110165 'cellular anatomical entity'", supers.contains("http://purl.obolibrary.org/obo/GO_0110165")); + assertTrue("GO_0000776 not subclass of GO_0005575 'cellular component'", supers.contains("http://purl.obolibrary.org/obo/GO_0005575")); + //biological process + uri = "http://purl.obolibrary.org/obo/GO_0022607"; + supers = onto_repo.getAllSuperClasses(uri); + assertTrue("GO_0022607 not subclass of GO_000815 biological process", supers.contains("http://purl.obolibrary.org/obo/GO_0008150")); + //molecular function + uri = "http://purl.obolibrary.org/obo/GO_0060090"; + supers = onto_repo.getAllSuperClasses(uri); + assertTrue("GO_0060090 not subclass of molecular function GO_0003674", supers.contains("http://purl.obolibrary.org/obo/GO_0003674")); + //Gene products + //uniprot + uri = "http://identifiers.org/uniprot/Q13253"; + supers = onto_repo.getAllSuperClasses(uri); + //protein + assertTrue("uniprot/Q13253 not subclass of CHEBI_36080 protein", supers.contains("http://purl.obolibrary.org/obo/CHEBI_36080")); + assertTrue("uniprot/Q13253 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); + assertTrue("uniprot/Q13253 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); + //"gene".. + //zfin + uri = "http://identifiers.org/zfin/ZDB-GENE-010410-3"; + supers = onto_repo.getAllSuperClasses(uri); + assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); + assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); + //wormbase + uri = "http://identifiers.org/wormbase/WBGene00000275"; + supers = onto_repo.getAllSuperClasses(uri); + assertTrue("WBGene00000275 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); + assertTrue("WBGene00000275 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); + } + + @Test + public void testGetUpperTypes() throws IOException { + //make sure its possible to get from leaf to root for the key classes + Set uris = new HashSet(); + String eco = "http://purl.obolibrary.org/obo/ECO_0000314"; + String wbbt = "http://purl.obolibrary.org/obo/WBbt_0005753"; + String cc = "http://purl.obolibrary.org/obo/GO_0000776"; + String bp = "http://purl.obolibrary.org/obo/GO_0022607"; + String mf = "http://purl.obolibrary.org/obo/GO_0060090"; + String human_protein = "http://identifiers.org/uniprot/Q13253"; + String zfin_protein = "http://identifiers.org/zfin/ZDB-GENE-010410-3"; + String worm_gene = "http://identifiers.org/wormbase/WBGene00000275"; + uris.add(eco); + uris.add(wbbt); + uris.add(cc); + uris.add(bp); + uris.add(mf); + uris.add(human_protein); + uris.add(zfin_protein); + uris.add(worm_gene); + + Map> uri_roots = onto_repo.getSuperCategoryMap(uris); + //Evidence + Set supers = uri_roots.get(eco); + assertTrue("ECO_0000314 not subclass of ECO_0000000", supers.contains("http://purl.obolibrary.org/obo/ECO_0000000")); + //Anatomy + //worm anatomy - note that it needs parts of the cl ontology in there + supers = uri_roots.get(wbbt); + //GO native cell - used a lot in shex + //assertTrue("WBbt_0005753 not subclass of CL_0000003", supers.contains("http://purl.obolibrary.org/obo/CL_0000003")); + //anatomy - also used a lot in shex + assertTrue("WBbt_0005753 not subclass of CARO_0000000", supers.contains("http://purl.obolibrary.org/obo/CARO_0000000")); + //Cell component + supers = uri_roots.get(cc); + assertTrue("GO_0000776 not subclass of GO_0110165 'cellular anatomical entity'", supers.contains("http://purl.obolibrary.org/obo/GO_0110165")); + assertTrue("GO_0000776 not subclass of GO_0005575 'cellular component'", supers.contains("http://purl.obolibrary.org/obo/GO_0005575")); + //biological process + supers = uri_roots.get(bp); + assertTrue("GO_0022607 not subclass of GO_000815 biological process", supers.contains("http://purl.obolibrary.org/obo/GO_0008150")); + //molecular function + supers = uri_roots.get(mf); + assertTrue("GO_0060090 not subclass of molecular function GO_0003674", supers.contains("http://purl.obolibrary.org/obo/GO_0003674")); + //Gene products + //uniprot + supers = uri_roots.get(human_protein); + //protein + assertTrue("uniprot/Q13253 not subclass of CHEBI_36080 protein", supers.contains("http://purl.obolibrary.org/obo/CHEBI_36080")); + assertTrue("uniprot/Q13253 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); + assertTrue("uniprot/Q13253 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); + //"gene".. + //zfin + supers = uri_roots.get(zfin_protein); + assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); + assertTrue("ZDB-GENE-010410-3 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); + //wormbase + supers = uri_roots.get(worm_gene); + assertTrue("WBGene00000275 not subclass of CHEBI_36695 information biomacromolecule", supers.contains("http://purl.obolibrary.org/obo/CHEBI_33695")); + assertTrue("WBGene00000275 not subclass of CHEBI_24431 chemical entity", supers.contains("http://purl.obolibrary.org/obo/CHEBI_24431")); + } + + @Test + public void testGetComplexPortalTypes() throws IOException { + //make sure its possible to get from leaf to root for the key classes + Set uris = new HashSet(); + String cp1 = "https://www.ebi.ac.uk/complexportal/complex/CPX-9"; + String cp2 = "https://www.ebi.ac.uk/complexportal/complex/CPX-4082"; + uris.add(cp1); + uris.add(cp2); + Map> uri_roots = onto_repo.getSuperCategoryMap(uris); + Set supers = uri_roots.get(cp1); + assertTrue("ComplexPortal_CPX-9 should be a protein-containing complex", supers.contains("http://purl.obolibrary.org/obo/GO_0032991")); + supers = uri_roots.get(cp2); + assertTrue("ComplexPortal_CPX-4082 should be a protein-containing complex", supers.contains("http://purl.obolibrary.org/obo/GO_0032991")); + } + } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/CoreMolecularModelManagerTest.java b/minerva-core/src/test/java/org/geneontology/minerva/CoreMolecularModelManagerTest.java index bb419889..12cb305b 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/CoreMolecularModelManagerTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/CoreMolecularModelManagerTest.java @@ -1,115 +1,93 @@ package org.geneontology.minerva; -import static org.junit.Assert.*; +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.formats.TurtleDocumentFormat; +import org.semanticweb.owlapi.io.IRIDocumentSource; +import org.semanticweb.owlapi.model.*; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; -import java.io.IOException; -import java.util.HashSet; import java.util.Set; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.geneontology.minerva.CoreMolecularModelManager; -import org.geneontology.minerva.validation.ShexValidationReport; -import org.junit.Test; -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.formats.RDFXMLDocumentFormat; -import org.semanticweb.owlapi.formats.TurtleDocumentFormat; -import org.semanticweb.owlapi.io.IRIDocumentSource; -import org.semanticweb.owlapi.io.UnparsableOntologyException; -import org.semanticweb.owlapi.model.AddImport; -import org.semanticweb.owlapi.model.AxiomType; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDeclarationAxiom; -import org.semanticweb.owlapi.model.OWLDocumentFormat; -import org.semanticweb.owlapi.model.OWLImportsDeclaration; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; -import org.semanticweb.owlapi.model.RemoveImport; -import org.semanticweb.owlapi.model.RemoveOntologyAnnotation; +import static org.junit.Assert.assertTrue; public class CoreMolecularModelManagerTest { -// @Test(expected=UnparsableOntologyException.class) - public void testSyntaxErrorModel() throws Exception { - OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - final IRI modelFile = IRI.create(new File("src/test/resources/syntax-error/5667fdd400000802").getAbsoluteFile()); - CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(modelFile), false, m); - } + // @Test(expected=UnparsableOntologyException.class) + public void testSyntaxErrorModel() throws Exception { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + final IRI modelFile = IRI.create(new File("src/test/resources/syntax-error/5667fdd400000802").getAbsoluteFile()); + CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(modelFile), false, m); + } + + @Test + public void testCleanOntology() throws OWLOntologyCreationException { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + File directory = new File("src/test/resources/broken-ontologies/"); + boolean ignore_imports = true; + if (directory.isDirectory()) { + for (File file : directory.listFiles()) { + if (file.getName().endsWith("ttl")) { + System.out.println("fixing " + file.getAbsolutePath()); + final IRI modelFile = IRI.create(file.getAbsoluteFile()); + OWLOntology o; + try { + o = CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(modelFile), ignore_imports, m); + //in case the reader was confused by the missing import, fix declarations + o = CoreMolecularModelManager.fixBrokenObjectPropertiesAndAxioms(o); + //check on what came in + int obj_prop_assertions_in = o.getAxiomCount(AxiomType.OBJECT_PROPERTY_ASSERTION); + int anno_prop_assertions_in = o.getAxiomCount(AxiomType.ANNOTATION_ASSERTION); + String title_in = getTitle(o); + //clean the model + OWLOntology cleaned_ont = CoreMolecularModelManager.removeDeadAnnotationsAndImports(o); + //saved the blessed ontology + OWLDocumentFormat owlFormat = new TurtleDocumentFormat(); + m.setOntologyFormat(cleaned_ont, owlFormat); + String cleaned_ont_file = "src/test/resources/broken-ontologies/fixed/fixed_" + file.getName(); + System.out.println("Saving " + title_in + " from file " + cleaned_ont_file); + try { + m.saveOntology(cleaned_ont, new FileOutputStream(cleaned_ont_file)); + } catch (OWLOntologyStorageException | FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + //read the ontology back in and check that it makes sense + File newfile = new File(cleaned_ont_file); + final IRI cleaned_iri = IRI.create(newfile.getAbsoluteFile()); + OWLOntology cleaned = CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(cleaned_iri), false, m); + //no imports + Set cleaned_imports = cleaned.getImportsDeclarations(); + assertTrue("found an import where we shouldn't in " + cleaned_ont_file, cleaned_imports.size() == 0); + //same number of object prop and annotation assertions + int obj_prop_assertions_out = cleaned.getAxiomCount(AxiomType.OBJECT_PROPERTY_ASSERTION); + int anno_prop_assertions_out = cleaned.getAxiomCount(AxiomType.ANNOTATION_ASSERTION); + assertTrue("lost some object property assertions in " + cleaned_ont_file, obj_prop_assertions_in == obj_prop_assertions_out); + assertTrue("lost some annotation property assertions in " + cleaned_ont_file, anno_prop_assertions_in == anno_prop_assertions_out); + //check on ontology annotatins + String title_out = getTitle(cleaned); + assertTrue("lost some ontology annotations in " + cleaned_ont_file, title_in.equals(title_out)); + } catch (OWLOntologyCreationException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } + } + } + } - @Test - public void testCleanOntology() throws OWLOntologyCreationException { - OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - File directory = new File("src/test/resources/broken-ontologies/"); - boolean ignore_imports = true; - if(directory.isDirectory()) { - for(File file : directory.listFiles()) { - if(file.getName().endsWith("ttl")) { - System.out.println("fixing "+file.getAbsolutePath()); - final IRI modelFile = IRI.create(file.getAbsoluteFile()); - OWLOntology o; - try { - o = CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(modelFile), ignore_imports, m); - //in case the reader was confused by the missing import, fix declarations - o = CoreMolecularModelManager.fixBrokenObjectPropertiesAndAxioms(o); - //check on what came in - int obj_prop_assertions_in = o.getAxiomCount(AxiomType.OBJECT_PROPERTY_ASSERTION); - int anno_prop_assertions_in = o.getAxiomCount(AxiomType.ANNOTATION_ASSERTION); - String title_in = getTitle(o); - //clean the model - OWLOntology cleaned_ont = CoreMolecularModelManager.removeDeadAnnotationsAndImports(o); - //saved the blessed ontology - OWLDocumentFormat owlFormat = new TurtleDocumentFormat(); - m.setOntologyFormat(cleaned_ont, owlFormat); - String cleaned_ont_file = "src/test/resources/broken-ontologies/fixed/fixed_"+file.getName(); - System.out.println("Saving "+title_in+" from file "+cleaned_ont_file); - try { - m.saveOntology(cleaned_ont, new FileOutputStream(cleaned_ont_file)); - } catch (OWLOntologyStorageException | FileNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - //read the ontology back in and check that it makes sense - File newfile = new File(cleaned_ont_file); - final IRI cleaned_iri = IRI.create(newfile.getAbsoluteFile()); - OWLOntology cleaned = CoreMolecularModelManager.loadOntologyDocumentSource(new IRIDocumentSource(cleaned_iri), false, m); - //no imports - Set cleaned_imports = cleaned.getImportsDeclarations(); - assertTrue("found an import where we shouldn't in "+cleaned_ont_file, cleaned_imports.size()==0); - //same number of object prop and annotation assertions - int obj_prop_assertions_out = cleaned.getAxiomCount(AxiomType.OBJECT_PROPERTY_ASSERTION); - int anno_prop_assertions_out = cleaned.getAxiomCount(AxiomType.ANNOTATION_ASSERTION); - assertTrue("lost some object property assertions in "+cleaned_ont_file, obj_prop_assertions_in==obj_prop_assertions_out); - assertTrue("lost some annotation property assertions in "+cleaned_ont_file, anno_prop_assertions_in==anno_prop_assertions_out); - //check on ontology annotatins - String title_out = getTitle(cleaned); - assertTrue("lost some ontology annotations in "+cleaned_ont_file, title_in.equals(title_out)); - } catch (OWLOntologyCreationException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - } - } - } - } - - private String getTitle(OWLOntology ont) { - String title = ""; - for(OWLAnnotation anno : ont.getAnnotations()) { - if(anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/title")) { - title = anno.getValue().asLiteral().get().getLiteral(); - break; - } - } - return title; - } + private String getTitle(OWLOntology ont) { + String title = ""; + for (OWLAnnotation anno : ont.getAnnotations()) { + if (anno.getProperty().getIRI().toString().equals("http://purl.org/dc/elements/1.1/title")) { + title = anno.getValue().asLiteral().get().getLiteral(); + break; + } + } + return title; + } } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/MolecularModelManagerTest.java b/minerva-core/src/test/java/org/geneontology/minerva/MolecularModelManagerTest.java index 36e22042..4764cf7e 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/MolecularModelManagerTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/MolecularModelManagerTest.java @@ -1,16 +1,5 @@ package org.geneontology.minerva; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Set; - import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; @@ -22,184 +11,192 @@ import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.*; +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +import static org.junit.Assert.*; + public class MolecularModelManagerTest { - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - // JUnit way of creating a temporary test folder - // will be deleted after the test has run, by JUnit. - @Rule + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + + // JUnit way of creating a temporary test folder + // will be deleted after the test has run, by JUnit. + @Rule public TemporaryFolder folder = new TemporaryFolder(); - private final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - - static{ - Logger.getLogger("org.semanticweb.elk").setLevel(Level.ERROR); - } - - private MolecularModelManager createM3(OWLOntology tbox, File journal) throws OWLOntologyCreationException, IOException { - return new MolecularModelManager(tbox, curieHandler, "http://testmodel.geneontology.org/", journal.getAbsolutePath(), null, go_lego_journal_file, true); - } - - @Test - public void testDeleteIndividual() throws Exception { - OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); - - // GO:0038024 ! cargo receptor activity - // GO:0042803 ! protein homodimerization activity - - MolecularModelManager mmm = createM3(tbox, folder.newFile()); - - ModelContainer model = mmm.generateBlankModel(null); - OWLNamedIndividual i1 = mmm.createIndividual(model.getModelId(), "GO:0038024", null, null); - - OWLNamedIndividual i2 = mmm.createIndividual(model.getModelId(), "GO:0042803", null, null); - - addPartOf(model, i1, i2, mmm); - - // String js = renderJSON(modelId); - // System.out.println("-------------"); - // System.out.println("INDS:" + js); - // - // System.out.println("-------------"); - - mmm.deleteIndividual(model, i2, null); - - // js = renderJSON(modelId); - // System.out.println("INDS:" + js); - // System.out.println("-------------"); - - Set individuals = mmm.getIndividuals(model.getModelId()); - assertEquals(1, individuals.size()); - mmm.dispose(); - } - - @Test - public void testExportImport() throws Exception { - OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); - - // GO:0038024 ! cargo receptor activity - // GO:0042803 ! protein homodimerization activity - // GO:0008233 ! peptidase activity - - File journalFile = folder.newFile(); - MolecularModelManager mmm = createM3(tbox, journalFile); - - final ModelContainer model = mmm.generateBlankModel(null); - final OWLNamedIndividual i1 = mmm.createIndividual(model.getModelId(), "GO:0038024", null, null); - - final OWLNamedIndividual i2 = mmm.createIndividual(model.getModelId(), "GO:0042803", null, null); - - addPartOf(model, i1, i2, mmm); - - // export - final String modelContent = mmm.exportModel(model); - final IRI modelId1 = model.getModelId(); - - // add an additional individual to model after export - final OWLNamedIndividual i3 = mmm.createIndividual(model.getModelId(), "GO:0008233", null, null); - assertEquals(3, mmm.getIndividuals(model.getModelId()).size()); - - - // import - final ModelContainer model2 = mmm.importModel(modelContent); - - final String modelContent2 = mmm.exportModel(model2); - assertEquals(modelContent, modelContent2); - - assertEquals(modelId1, model2.getModelId()); - Set loaded = mmm.getIndividuals(model2.getModelId()); - assertEquals(2, loaded.size()); - for (OWLNamedIndividual i : loaded) { - IRI iri = i.getIRI(); - // check that the model only contains the individuals created before the export - assertTrue(iri.equals(i1.getIRI()) || iri.equals(i2.getIRI())); - assertFalse(iri.equals(i3.getIRI())); - } - mmm.dispose(); - } - - @Test - public void testSaveModel() throws Exception { - OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); - - File journalFile = folder.newFile(); - MolecularModelManager mmm = createM3(tbox, journalFile); - - // GO:0038024 ! cargo receptor activity - // GO:0042803 ! protein homodimerization activity - // GO:0008233 ! peptidase activity - - final ModelContainer model = mmm.generateBlankModel(null); - final OWLNamedIndividual i1 = mmm.createIndividual(model.getModelId(), "GO:0038024", null, null); - - final OWLNamedIndividual i2 = mmm.createIndividual(model.getModelId(), "GO:0042803", null, null); - - addPartOf(model, i1, i2, mmm); - - // save - mmm.saveModel(model, null, null); - - // add an additional individual to model after export - final OWLNamedIndividual i3 = mmm.createIndividual(model.getModelId(), "GO:0008233", null, null); - assertEquals(3, mmm.getIndividuals(model.getModelId()).size()); - - // discard mmm - mmm.dispose(); - mmm = null; - - OWLOntology tbox2 = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); - - - mmm = createM3(tbox2, journalFile); - - Set availableModelIds = mmm.getAvailableModelIds(); - assertTrue(availableModelIds.contains(model.getModelId())); - - final ModelContainer model2 = mmm.getModel(model.getModelId()); - assertNotNull(model2); - - Collection loaded = mmm.getIndividuals(model2.getModelId()); - assertEquals(2, loaded.size()); - for (OWLNamedIndividual i : loaded) { - IRI iri = i.getIRI(); - // check that the model only contains the individuals created before the save - assertTrue(iri.equals(i1.getIRI()) || iri.equals(i2.getIRI())); - assertFalse(iri.equals(i3.getIRI())); - } - mmm.dispose(); - } - - @Test - public void testInferredType() throws Exception { - OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); - - - // GO:0038024 ! cargo receptor activity - // GO:0042803 ! protein homodimerization activity - - File journalFile = folder.newFile(); - MolecularModelManager mmm = createM3(tbox, journalFile); - - ModelContainer model = mmm.generateBlankModel(null); - OWLNamedIndividual cc = mmm.createIndividual(model.getModelId(), "GO:0004872", null, null); // receptor activity - - - OWLNamedIndividual mit = mmm.createIndividual(model.getModelId(), "GO:0007166", null, null); // cell surface receptor signaling pathway - - addPartOf(model, mit, cc, mmm); - - // we expect inference to be to: GO:0038023 signaling receptor activity - // See discussion here: https://github.com/kltm/go-mme/issues/3 - - //List> gson = mmm.getIndividualObjects(modelId); - //assertEquals(1, individuals.size()); - mmm.dispose(); - } - - private void addPartOf(ModelContainer model, OWLNamedIndividual i1, OWLNamedIndividual i2, - MolecularModelManager m3) throws UnknownIdentifierException { - IRI partOfIRI = curieHandler.getIRI("BFO:0000050"); - final OWLObjectProperty partOf = model.getOWLDataFactory().getOWLObjectProperty(partOfIRI); - m3.addFact(model, partOf, i1, i2, Collections.emptySet(), null); - } + private final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + + static { + Logger.getLogger("org.semanticweb.elk").setLevel(Level.ERROR); + } + + private MolecularModelManager createM3(OWLOntology tbox, File journal) throws OWLOntologyCreationException, IOException { + return new MolecularModelManager(tbox, curieHandler, "http://testmodel.geneontology.org/", journal.getAbsolutePath(), null, go_lego_journal_file, true); + } + + @Test + public void testDeleteIndividual() throws Exception { + OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); + + // GO:0038024 ! cargo receptor activity + // GO:0042803 ! protein homodimerization activity + + MolecularModelManager mmm = createM3(tbox, folder.newFile()); + + ModelContainer model = mmm.generateBlankModel(null); + OWLNamedIndividual i1 = mmm.createIndividual(model.getModelId(), "GO:0038024", null, null); + + OWLNamedIndividual i2 = mmm.createIndividual(model.getModelId(), "GO:0042803", null, null); + + addPartOf(model, i1, i2, mmm); + + // String js = renderJSON(modelId); + // System.out.println("-------------"); + // System.out.println("INDS:" + js); + // + // System.out.println("-------------"); + + mmm.deleteIndividual(model, i2, null); + + // js = renderJSON(modelId); + // System.out.println("INDS:" + js); + // System.out.println("-------------"); + + Set individuals = mmm.getIndividuals(model.getModelId()); + assertEquals(1, individuals.size()); + mmm.dispose(); + } + + @Test + public void testExportImport() throws Exception { + OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); + + // GO:0038024 ! cargo receptor activity + // GO:0042803 ! protein homodimerization activity + // GO:0008233 ! peptidase activity + + File journalFile = folder.newFile(); + MolecularModelManager mmm = createM3(tbox, journalFile); + + final ModelContainer model = mmm.generateBlankModel(null); + final OWLNamedIndividual i1 = mmm.createIndividual(model.getModelId(), "GO:0038024", null, null); + + final OWLNamedIndividual i2 = mmm.createIndividual(model.getModelId(), "GO:0042803", null, null); + + addPartOf(model, i1, i2, mmm); + + // export + final String modelContent = mmm.exportModel(model); + final IRI modelId1 = model.getModelId(); + + // add an additional individual to model after export + final OWLNamedIndividual i3 = mmm.createIndividual(model.getModelId(), "GO:0008233", null, null); + assertEquals(3, mmm.getIndividuals(model.getModelId()).size()); + + + // import + final ModelContainer model2 = mmm.importModel(modelContent); + + final String modelContent2 = mmm.exportModel(model2); + assertEquals(modelContent, modelContent2); + + assertEquals(modelId1, model2.getModelId()); + Set loaded = mmm.getIndividuals(model2.getModelId()); + assertEquals(2, loaded.size()); + for (OWLNamedIndividual i : loaded) { + IRI iri = i.getIRI(); + // check that the model only contains the individuals created before the export + assertTrue(iri.equals(i1.getIRI()) || iri.equals(i2.getIRI())); + assertFalse(iri.equals(i3.getIRI())); + } + mmm.dispose(); + } + + @Test + public void testSaveModel() throws Exception { + OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); + + File journalFile = folder.newFile(); + MolecularModelManager mmm = createM3(tbox, journalFile); + + // GO:0038024 ! cargo receptor activity + // GO:0042803 ! protein homodimerization activity + // GO:0008233 ! peptidase activity + + final ModelContainer model = mmm.generateBlankModel(null); + final OWLNamedIndividual i1 = mmm.createIndividual(model.getModelId(), "GO:0038024", null, null); + + final OWLNamedIndividual i2 = mmm.createIndividual(model.getModelId(), "GO:0042803", null, null); + + addPartOf(model, i1, i2, mmm); + + // save + mmm.saveModel(model, null, null); + + // add an additional individual to model after export + final OWLNamedIndividual i3 = mmm.createIndividual(model.getModelId(), "GO:0008233", null, null); + assertEquals(3, mmm.getIndividuals(model.getModelId()).size()); + + // discard mmm + mmm.dispose(); + mmm = null; + + OWLOntology tbox2 = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); + + + mmm = createM3(tbox2, journalFile); + + Set availableModelIds = mmm.getAvailableModelIds(); + assertTrue(availableModelIds.contains(model.getModelId())); + + final ModelContainer model2 = mmm.getModel(model.getModelId()); + assertNotNull(model2); + + Collection loaded = mmm.getIndividuals(model2.getModelId()); + assertEquals(2, loaded.size()); + for (OWLNamedIndividual i : loaded) { + IRI iri = i.getIRI(); + // check that the model only contains the individuals created before the save + assertTrue(iri.equals(i1.getIRI()) || iri.equals(i2.getIRI())); + assertFalse(iri.equals(i3.getIRI())); + } + mmm.dispose(); + } + + @Test + public void testInferredType() throws Exception { + OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); + + + // GO:0038024 ! cargo receptor activity + // GO:0042803 ! protein homodimerization activity + + File journalFile = folder.newFile(); + MolecularModelManager mmm = createM3(tbox, journalFile); + + ModelContainer model = mmm.generateBlankModel(null); + OWLNamedIndividual cc = mmm.createIndividual(model.getModelId(), "GO:0004872", null, null); // receptor activity + + + OWLNamedIndividual mit = mmm.createIndividual(model.getModelId(), "GO:0007166", null, null); // cell surface receptor signaling pathway + + addPartOf(model, mit, cc, mmm); + + // we expect inference to be to: GO:0038023 signaling receptor activity + // See discussion here: https://github.com/kltm/go-mme/issues/3 + + //List> gson = mmm.getIndividualObjects(modelId); + //assertEquals(1, individuals.size()); + mmm.dispose(); + } + + private void addPartOf(ModelContainer model, OWLNamedIndividual i1, OWLNamedIndividual i2, + MolecularModelManager m3) throws UnknownIdentifierException { + IRI partOfIRI = curieHandler.getIRI("BFO:0000050"); + final OWLObjectProperty partOf = model.getOWLDataFactory().getOWLObjectProperty(partOfIRI); + m3.addFact(model, partOf, i1, i2, Collections.emptySet(), null); + } } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/UndoAwareMolecularModelManagerTest.java b/minerva-core/src/test/java/org/geneontology/minerva/UndoAwareMolecularModelManagerTest.java index 2b066837..5c548d82 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/UndoAwareMolecularModelManagerTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/UndoAwareMolecularModelManagerTest.java @@ -1,12 +1,5 @@ package org.geneontology.minerva; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.util.List; - import org.apache.commons.lang3.tuple.Pair; import org.geneontology.minerva.UndoAwareMolecularModelManager.ChangeEvent; import org.geneontology.minerva.UndoAwareMolecularModelManager.UndoMetadata; @@ -14,86 +7,84 @@ import org.geneontology.minerva.curie.DefaultCurieHandler; import org.geneontology.minerva.json.JsonOwlIndividual; import org.geneontology.minerva.json.MolecularModelJsonRenderer; -import org.junit.AfterClass; -import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLNamedIndividual; - import org.semanticweb.owlapi.model.OWLOntology; -import owltools.io.ParserWrapper; +import java.util.List; + +import static org.junit.Assert.*; + +public class UndoAwareMolecularModelManagerTest { -public class UndoAwareMolecularModelManagerTest { + static MinervaOWLGraphWrapper g = null; + static CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + static UndoAwareMolecularModelManager m3 = null; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static MinervaOWLGraphWrapper g = null; - static CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - static UndoAwareMolecularModelManager m3 = null; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - @Rule + @Rule public TemporaryFolder folder = new TemporaryFolder(); - - @Test - public void testUndoRedo() throws Exception { - OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); - m3 = new UndoAwareMolecularModelManager(tbox, curieHandler, "http://testmodel.geneontology.org/", folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); - - String userId = "test-user-id"; - ModelContainer model = m3.generateBlankModel(null); - // GO:0001158 ! enhancer sequence-specific DNA binding - OWLNamedIndividual bindingIdividual = m3.createIndividual(model.getModelId(), "GO:0001158", null, new UndoMetadata(userId)); - String bindingId = bindingIdividual.getIRI().toString(); - // BFO:0000066 GO:0005654 ! occurs_in nucleoplasm - m3.addType(model.getModelId(), bindingId, "BFO:0000066", "GO:0005654", new UndoMetadata(userId)); - - MolecularModelJsonRenderer renderer = new MolecularModelJsonRenderer(model, null, curieHandler); - JsonOwlIndividual render1 = renderer.renderObject(bindingIdividual); - assertEquals(2, render1.type.length); - - // check event count - Pair,List> undoRedoEvents = m3.getUndoRedoEvents(model.getModelId()); - List undoEvents = undoRedoEvents.getLeft(); - List redoEvents = undoRedoEvents.getRight(); - assertEquals(0, redoEvents.size()); - assertEquals(2, undoEvents.size()); - - // undo - assertTrue(m3.undo(model, userId)); - - JsonOwlIndividual render2 = renderer.renderObject(bindingIdividual); - assertEquals(1, render2.type.length); - - // redo - assertTrue(m3.redo(model, userId)); - JsonOwlIndividual render3 = renderer.renderObject(bindingIdividual); - assertEquals(2, render3.type.length); - - // undo again - assertTrue(m3.undo(model, userId)); - JsonOwlIndividual render4 = renderer.renderObject(bindingIdividual); - assertEquals(1, render4.type.length); - - // add new type - // GO:0001664 ! G-protein coupled receptor binding - m3.addType(model.getModelId(), bindingId, "GO:0001664", new UndoMetadata(userId)); - - // redo again, should fail - assertFalse(m3.redo(model, userId)); - - if (m3 != null) { - m3.dispose(); - } - } - - static void printToJson(Object obj) { - String json = MolecularModelJsonRenderer.renderToJson(obj, true); - System.out.println("---------"); - System.out.println(json); - System.out.println("---------"); - } - + + @Test + public void testUndoRedo() throws Exception { + OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(this.getClass().getResourceAsStream("/go-mgi-signaling-test.obo")); + m3 = new UndoAwareMolecularModelManager(tbox, curieHandler, "http://testmodel.geneontology.org/", folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); + + String userId = "test-user-id"; + ModelContainer model = m3.generateBlankModel(null); + // GO:0001158 ! enhancer sequence-specific DNA binding + OWLNamedIndividual bindingIdividual = m3.createIndividual(model.getModelId(), "GO:0001158", null, new UndoMetadata(userId)); + String bindingId = bindingIdividual.getIRI().toString(); + // BFO:0000066 GO:0005654 ! occurs_in nucleoplasm + m3.addType(model.getModelId(), bindingId, "BFO:0000066", "GO:0005654", new UndoMetadata(userId)); + + MolecularModelJsonRenderer renderer = new MolecularModelJsonRenderer(model, null, curieHandler); + JsonOwlIndividual render1 = renderer.renderObject(bindingIdividual); + assertEquals(2, render1.type.length); + + // check event count + Pair, List> undoRedoEvents = m3.getUndoRedoEvents(model.getModelId()); + List undoEvents = undoRedoEvents.getLeft(); + List redoEvents = undoRedoEvents.getRight(); + assertEquals(0, redoEvents.size()); + assertEquals(2, undoEvents.size()); + + // undo + assertTrue(m3.undo(model, userId)); + + JsonOwlIndividual render2 = renderer.renderObject(bindingIdividual); + assertEquals(1, render2.type.length); + + // redo + assertTrue(m3.redo(model, userId)); + JsonOwlIndividual render3 = renderer.renderObject(bindingIdividual); + assertEquals(2, render3.type.length); + + // undo again + assertTrue(m3.undo(model, userId)); + JsonOwlIndividual render4 = renderer.renderObject(bindingIdividual); + assertEquals(1, render4.type.length); + + // add new type + // GO:0001664 ! G-protein coupled receptor binding + m3.addType(model.getModelId(), bindingId, "GO:0001664", new UndoMetadata(userId)); + + // redo again, should fail + assertFalse(m3.redo(model, userId)); + + if (m3 != null) { + m3.dispose(); + } + } + + static void printToJson(Object obj) { + String json = MolecularModelJsonRenderer.renderToJson(obj, true); + System.out.println("---------"); + System.out.println(json); + System.out.println("---------"); + } + } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/curie/DefaultCurieHandlerTest.java b/minerva-core/src/test/java/org/geneontology/minerva/curie/DefaultCurieHandlerTest.java index 6aa43f63..a3091397 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/curie/DefaultCurieHandlerTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/curie/DefaultCurieHandlerTest.java @@ -1,30 +1,30 @@ package org.geneontology.minerva.curie; -import static org.junit.Assert.*; - -import java.io.InputStream; - import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.junit.Test; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLDataFactory; +import java.io.InputStream; + +import static org.junit.Assert.*; + public class DefaultCurieHandlerTest { - @Test - public void testAll() { - MappedCurieHandler handler = (MappedCurieHandler) DefaultCurieHandler.getDefaultHandler(); - assertFalse(handler.getInternalMappings().isEmpty()); - assertTrue(handler.getInternalMappings().containsKey("BFO")); - assertTrue(handler.getInternalMappings().containsKey("GO")); - assertTrue(handler.getInternalMappings().containsKey("IAO")); - assertTrue(handler.getInternalMappings().containsKey("MGI")); - assertTrue(handler.getInternalMappings().containsKey("ECO")); - assertTrue(handler.getInternalMappings().containsKey("PMID")); - assertFalse(handler.getInternalMappings().containsKey("BLABLA")); - } - + @Test + public void testAll() { + MappedCurieHandler handler = (MappedCurieHandler) DefaultCurieHandler.getDefaultHandler(); + assertFalse(handler.getInternalMappings().isEmpty()); + assertTrue(handler.getInternalMappings().containsKey("BFO")); + assertTrue(handler.getInternalMappings().containsKey("GO")); + assertTrue(handler.getInternalMappings().containsKey("IAO")); + assertTrue(handler.getInternalMappings().containsKey("MGI")); + assertTrue(handler.getInternalMappings().containsKey("ECO")); + assertTrue(handler.getInternalMappings().containsKey("PMID")); + assertFalse(handler.getInternalMappings().containsKey("BLABLA")); + } + @Test public void testGo() { InputStream stream = DefaultCurieHandler.loadResourceAsStream("go_context.jsonld"); @@ -32,7 +32,7 @@ public void testGo() { CurieMappings mappings = CurieMappingsJsonld.loadJsonLdContext(stream); assertFalse(mappings.getMappings().isEmpty()); } - + @Test public void testObo() { InputStream stream = DefaultCurieHandler.loadResourceAsStream("obo_context.jsonld"); @@ -40,40 +40,40 @@ public void testObo() { CurieMappings mappings = CurieMappingsJsonld.loadJsonLdContext(stream); assertFalse(mappings.getMappings().isEmpty()); } - - @Test - public void testMonarch() { - InputStream stream = DefaultCurieHandler.loadResourceAsStream("monarch_context.jsonld"); - assertNotNull(stream); - CurieMappings mappings = CurieMappingsJsonld.loadJsonLdContext(stream); - assertFalse(mappings.getMappings().isEmpty()); - } - - @Test - public void testConversions() throws UnknownIdentifierException { - CurieHandler handler = DefaultCurieHandler.getDefaultHandler(); - final OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); - - IRI longBFO = IRI.create("http://purl.obolibrary.org/obo/BFO_0000050"); - - assertEquals(longBFO, handler.getIRI("BFO:0000050")); - assertEquals("BFO:0000050", handler.getCuri(f.getOWLAnnotationProperty(longBFO))); - - - IRI longEco = IRI.create("http://purl.obolibrary.org/obo/ECO_0000217"); - assertEquals(longEco, handler.getIRI("ECO:0000217")); - assertEquals("ECO:0000217", handler.getCuri(f.getOWLClass(longEco))); - - - IRI longPmid = IRI.create("http://www.ncbi.nlm.nih.gov/pubmed/0000"); - assertEquals(longPmid, handler.getIRI("PMID:0000")); - assertEquals("PMID:0000", handler.getCuri(f.getOWLClass(longPmid))); - - // test failure for non existing prefix - try { - handler.getIRI("BLABLA:000001"); - fail("Expected an UnknownIdentifierException to be thrown"); - } catch (UnknownIdentifierException e) { - } - } + + @Test + public void testMonarch() { + InputStream stream = DefaultCurieHandler.loadResourceAsStream("monarch_context.jsonld"); + assertNotNull(stream); + CurieMappings mappings = CurieMappingsJsonld.loadJsonLdContext(stream); + assertFalse(mappings.getMappings().isEmpty()); + } + + @Test + public void testConversions() throws UnknownIdentifierException { + CurieHandler handler = DefaultCurieHandler.getDefaultHandler(); + final OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); + + IRI longBFO = IRI.create("http://purl.obolibrary.org/obo/BFO_0000050"); + + assertEquals(longBFO, handler.getIRI("BFO:0000050")); + assertEquals("BFO:0000050", handler.getCuri(f.getOWLAnnotationProperty(longBFO))); + + + IRI longEco = IRI.create("http://purl.obolibrary.org/obo/ECO_0000217"); + assertEquals(longEco, handler.getIRI("ECO:0000217")); + assertEquals("ECO:0000217", handler.getCuri(f.getOWLClass(longEco))); + + + IRI longPmid = IRI.create("http://www.ncbi.nlm.nih.gov/pubmed/0000"); + assertEquals(longPmid, handler.getIRI("PMID:0000")); + assertEquals("PMID:0000", handler.getCuri(f.getOWLClass(longPmid))); + + // test failure for non existing prefix + try { + handler.getIRI("BLABLA:000001"); + fail("Expected an UnknownIdentifierException to be thrown"); + } catch (UnknownIdentifierException e) { + } + } } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/json/MolecularModelJsonRendererTest.java b/minerva-core/src/test/java/org/geneontology/minerva/json/MolecularModelJsonRendererTest.java index 0e8d2f9a..c2931ed6 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/json/MolecularModelJsonRendererTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/json/MolecularModelJsonRendererTest.java @@ -1,12 +1,5 @@ package org.geneontology.minerva.json; -import static org.junit.Assert.*; - -import java.io.File; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; @@ -14,312 +7,296 @@ import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.curie.DefaultCurieHandler; -import org.geneontology.minerva.json.JsonOwlIndividual; -import org.geneontology.minerva.json.JsonOwlObject; -import org.geneontology.minerva.json.MolecularModelJsonRenderer; import org.geneontology.minerva.json.JsonOwlObject.JsonOwlObjectType; import org.geneontology.minerva.util.AnnotationShorthand; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import org.semanticweb.owlapi.model.AddImport; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLImportsDeclaration; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyManager; - +import org.semanticweb.owlapi.model.*; import owltools.io.ParserWrapper; +import java.io.File; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import static org.junit.Assert.*; + public class MolecularModelJsonRendererTest { - private static MinervaOWLGraphWrapper g = null; - private static CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - private static OWLOntologyManager m = null; - private static OWLDataFactory f = null; - private static OWLObjectProperty partOf = null; - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - ParserWrapper pw = new ParserWrapper(); - File file = new File("src/test/resources/mgi-go.obo").getCanonicalFile(); - OWLOntology ont = pw.parseOWL(IRI.create(file)); - g = new MinervaOWLGraphWrapper(ont); - f = g.getDataFactory(); - m = g.getManager(); - partOf = g.getOWLObjectPropertyByIdentifier("BFO:0000050"); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - IOUtils.closeQuietly(g); - } - - @Test - public void testIRIConversion() throws Exception { - IRI evidenceIRI = AnnotationShorthand.evidence.getAnnotationProperty(); - OWLAnnotationProperty p = f.getOWLAnnotationProperty(evidenceIRI); - IRI iriValue = IRI.generateDocumentIRI(); - OWLAnnotation owlAnnotation = f.getOWLAnnotation(p, iriValue); - JsonAnnotation json = JsonTools.create(p, owlAnnotation.getValue(), null, curieHandler); - assertEquals(AnnotationShorthand.evidence.name(), json.key); - assertEquals(curieHandler.getCuri(iriValue), json.value); - assertEquals("IRI", json.valueType); - } - - @Test - public void testSimpleClass() throws Exception { - testSimpleClassExpression(g.getOWLClassByIdentifier("GO:0000003"), "class"); - } - - @Test - public void testSimpleSVF() throws Exception { - OWLObjectSomeValuesFrom svf = f.getOWLObjectSomeValuesFrom(g.getOWLObjectPropertyByIdentifier("BFO:0000050"), g.getOWLClassByIdentifier("GO:0000003")); - testSimpleClassExpression(svf, "svf"); - } - - @Test - public void testSimpleUnion() throws Exception { - OWLObjectSomeValuesFrom svf = f.getOWLObjectSomeValuesFrom(g.getOWLObjectPropertyByIdentifier("BFO:0000050"), g.getOWLClassByIdentifier("GO:0000003")); - OWLClass cls = g.getOWLClassByIdentifier("GO:0000122"); - testSimpleClassExpression(f.getOWLObjectUnionOf(cls, svf), "union"); - } - - @Test - public void testSimpleIntersection() throws Exception { - OWLObjectSomeValuesFrom svf = f.getOWLObjectSomeValuesFrom(g.getOWLObjectPropertyByIdentifier("BFO:0000050"), g.getOWLClassByIdentifier("GO:0000003")); - OWLClass cls = g.getOWLClassByIdentifier("GO:0000122"); - testSimpleClassExpression(f.getOWLObjectIntersectionOf(cls, svf), "intersection"); - } - - @Test - public void testAnnotations() throws Exception { - // setup test model/ontology - OWLOntology o = m.createOntology(); - OWLImportsDeclaration importDeclaration = f.getOWLImportsDeclaration(g.getSourceOntology().getOntologyID().getOntologyIRI().get()); - m.applyChange(new AddImport(o, importDeclaration)); - - final IRI i1IRI = IRI.generateDocumentIRI(); - final OWLNamedIndividual ni1 = f.getOWLNamedIndividual(i1IRI); - // declare individual - m.addAxiom(o, f.getOWLDeclarationAxiom(ni1)); - // add annotations - m.addAxiom(o, f.getOWLAnnotationAssertionAxiom(i1IRI, - f.getOWLAnnotation(f.getOWLAnnotationProperty( - AnnotationShorthand.comment.getAnnotationProperty()), - f.getOWLLiteral("Comment 1")))); - m.addAxiom(o, f.getOWLAnnotationAssertionAxiom(i1IRI, - f.getOWLAnnotation(f.getOWLAnnotationProperty( - AnnotationShorthand.comment.getAnnotationProperty()), - f.getOWLLiteral("Comment 2")))); - // declare type - m.addAxiom(o, f.getOWLClassAssertionAxiom(g.getOWLClassByIdentifier("GO:0000003"), ni1)); - - MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(null, o, null, curieHandler); - - JsonOwlIndividual jsonOwlIndividualOriginal = r.renderObject(ni1); - assertEquals(2, jsonOwlIndividualOriginal.annotations.length); - - String json = MolecularModelJsonRenderer.renderToJson(jsonOwlIndividualOriginal, true); - - JsonOwlIndividual jsonOwlIndividualParse = MolecularModelJsonRenderer.parseFromJson(json, JsonOwlIndividual.class); - - assertNotNull(jsonOwlIndividualParse); - assertEquals(jsonOwlIndividualOriginal, jsonOwlIndividualParse); - } - - private void testSimpleClassExpression(OWLClassExpression ce, String expectedJsonType) throws Exception { - // setup test model/ontology - OWLOntology o = m.createOntology(); - OWLImportsDeclaration importDeclaration = f.getOWLImportsDeclaration(g.getSourceOntology().getOntologyID().getOntologyIRI().get()); - m.applyChange(new AddImport(o, importDeclaration)); - - // create indivdual with a ce type - final IRI i1IRI = IRI.generateDocumentIRI(); - final OWLNamedIndividual ni1 = f.getOWLNamedIndividual(i1IRI); - // declare individual - m.addAxiom(o, f.getOWLDeclarationAxiom(ni1)); - // declare type - m.addAxiom(o, f.getOWLClassAssertionAxiom(ce, ni1)); - - - MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(null, o, null, curieHandler); - - JsonOwlIndividual jsonOwlIndividualOriginal = r.renderObject(ni1); - - String json = MolecularModelJsonRenderer.renderToJson(jsonOwlIndividualOriginal, true); - assertTrue(json, json.contains("\"type\": \""+expectedJsonType+"\"")); - - JsonOwlIndividual jsonOwlIndividualParse = MolecularModelJsonRenderer.parseFromJson(json, JsonOwlIndividual.class); - - assertNotNull(jsonOwlIndividualParse); - assertEquals(jsonOwlIndividualOriginal, jsonOwlIndividualParse); - - Set ces = TestJsonOwlObjectParser.parse(new MinervaOWLGraphWrapper(o), jsonOwlIndividualParse.type); - assertEquals(1, ces.size()); - assertEquals(ce, ces.iterator().next()); - } - - @Test - public void testPartialRenderer() throws Exception { - OWLOntology o = m.createOntology(); - OWLImportsDeclaration importDeclaration = f.getOWLImportsDeclaration(g.getSourceOntology().getOntologyID().getOntologyIRI().get()); - m.applyChange(new AddImport(o, importDeclaration)); - - // individuals - final OWLNamedIndividual a = addIndividual(o, "A", null); - final OWLNamedIndividual b = addIndividual(o, "B", null); - final OWLNamedIndividual c = addIndividual(o, "C", null); - final OWLNamedIndividual d = addIndividual(o, "D", null); - final OWLNamedIndividual e = addIndividual(o, "E", null); - final OWLNamedIndividual f = addIndividual(o, "F", null); - - // links - addFact(o, a, b, partOf); - addFact(o, b, a, partOf); - - addFact(o, b, c, partOf); - addFact(o, d, b, partOf); - addFact(o, e, a, partOf); - addFact(o, a, f, partOf); - - MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(null, o, null, curieHandler); - - final String aId = curieHandler.getCuri(a); - final String bId = curieHandler.getCuri(b); - - Pair pair = r.renderIndividuals(Arrays.asList(a, b)); - assertEquals(2, pair.getLeft().length); - assertEquals(2, pair.getRight().length); - boolean foundAB = false; - boolean foundBA = false; - for(JsonOwlFact fact : pair.getRight()) { - if (aId.equals(fact.subject) && bId.equals(fact.object)) { - foundAB = true; - } - if (bId.equals(fact.subject) && aId.equals(fact.object)) { - foundBA = true; - } - } - assertTrue(foundAB); - assertTrue(foundBA); - } - - - private static OWLNamedIndividual addIndividual(OWLOntology o, String name, OWLClass typeCls) { - final IRI iri = IRI.generateDocumentIRI(); - final OWLNamedIndividual ni = f.getOWLNamedIndividual(iri); - // declare individual - m.addAxiom(o, f.getOWLDeclarationAxiom(ni)); - if (typeCls != null) { - m.addAxiom(o, f.getOWLClassAssertionAxiom(typeCls, ni)); - } - m.addAxiom(o, f.getOWLAnnotationAssertionAxiom(iri, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral(name)))); - - return ni; - } - - private static void addFact(OWLOntology o, OWLNamedIndividual source, OWLNamedIndividual target, OWLObjectProperty property) { - m.addAxiom(o, f.getOWLObjectPropertyAssertionAxiom(property, source, target)); - } - - static class TestJsonOwlObjectParser { - static OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject expression) - throws Exception { - if (expression == null) { - throw new Exception("Missing expression: null is not a valid expression."); - } - if (expression.type == null) { - throw new Exception("An expression type is required."); - } - if (JsonOwlObjectType.Class == expression.type) { - if (expression.id == null) { - throw new Exception("Missing literal for expression of type 'class'"); - } - if (StringUtils.containsWhitespace(expression.id)) { - throw new Exception("Identifiers may not contain whitespaces: '"+expression.id+"'"); - } - OWLClass cls = g.getOWLClassByIdentifier(expression.id); - if (cls == null) { - throw new Exception("Could not retrieve a class for id: "+expression.id); - } - return cls; - } - else if (JsonOwlObjectType.SomeValueFrom == expression.type) { - if (expression.property == null) { - throw new Exception("Missing property for expression of type 'svf'"); - } - if (expression.property.type != JsonOwlObjectType.ObjectProperty) { - throw new Exception("Unexpected type for Property in 'svf': "+expression.property.type); - } - if (expression.property.id == null) { - throw new Exception("Missing property id for expression of type 'svf'"); - } - OWLObjectProperty p = g.getOWLObjectPropertyByIdentifier(expression.property.id); - if (p == null) { - throw new UnknownIdentifierException("Could not find a property for: "+expression.property); - } - if (expression.filler != null) { - OWLClassExpression ce = parse(g, expression.filler); - return g.getDataFactory().getOWLObjectSomeValuesFrom(p, ce); - } - else { - throw new Exception("Missing literal or expression for expression of type 'svf'."); - } - } - else if (JsonOwlObjectType.IntersectionOf == expression.type) { - return parse(g, expression.expressions, JsonOwlObjectType.IntersectionOf); - } - else if (JsonOwlObjectType.UnionOf == expression.type) { - return parse(g, expression.expressions, JsonOwlObjectType.UnionOf); - } - else { - throw new UnknownIdentifierException("Unknown expression type: "+expression.type); - } - } - - static OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject[] expressions, JsonOwlObjectType type) - throws Exception { - if (expressions.length == 0) { - throw new Exception("Missing expressions: empty expression list is not allowed."); - } - if (expressions.length == 1) { - return parse(g, expressions[0]); - } - Set clsExpressions = new HashSet(); - for (JsonOwlObject m3Expression : expressions) { - OWLClassExpression ce = parse(g, m3Expression); - clsExpressions.add(ce); - } - if (type == JsonOwlObjectType.UnionOf) { - return g.getDataFactory().getOWLObjectUnionOf(clsExpressions); - } - else if (type == JsonOwlObjectType.IntersectionOf) { - return g.getDataFactory().getOWLObjectIntersectionOf(clsExpressions); - } - else { - throw new UnknownIdentifierException("Unsupported expression type: "+type); - } - } - - static Set parse(MinervaOWLGraphWrapper g, JsonOwlObject[] expressions) - throws Exception { - if (expressions.length == 0) { - throw new Exception("Missing expressions: empty expression list is not allowed."); - } - Set clsExpressions = new HashSet(); - for (JsonOwlObject m3Expression : expressions) { - OWLClassExpression ce = parse(g, m3Expression); - clsExpressions.add(ce); - } - return clsExpressions; - } - } + private static MinervaOWLGraphWrapper g = null; + private static CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + private static OWLOntologyManager m = null; + private static OWLDataFactory f = null; + private static OWLObjectProperty partOf = null; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + ParserWrapper pw = new ParserWrapper(); + File file = new File("src/test/resources/mgi-go.obo").getCanonicalFile(); + OWLOntology ont = pw.parseOWL(IRI.create(file)); + g = new MinervaOWLGraphWrapper(ont); + f = g.getDataFactory(); + m = g.getManager(); + partOf = g.getOWLObjectPropertyByIdentifier("BFO:0000050"); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + IOUtils.closeQuietly(g); + } + + @Test + public void testIRIConversion() throws Exception { + IRI evidenceIRI = AnnotationShorthand.evidence.getAnnotationProperty(); + OWLAnnotationProperty p = f.getOWLAnnotationProperty(evidenceIRI); + IRI iriValue = IRI.generateDocumentIRI(); + OWLAnnotation owlAnnotation = f.getOWLAnnotation(p, iriValue); + JsonAnnotation json = JsonTools.create(p, owlAnnotation.getValue(), null, curieHandler); + assertEquals(AnnotationShorthand.evidence.name(), json.key); + assertEquals(curieHandler.getCuri(iriValue), json.value); + assertEquals("IRI", json.valueType); + } + + @Test + public void testSimpleClass() throws Exception { + testSimpleClassExpression(g.getOWLClassByIdentifier("GO:0000003"), "class"); + } + + @Test + public void testSimpleSVF() throws Exception { + OWLObjectSomeValuesFrom svf = f.getOWLObjectSomeValuesFrom(g.getOWLObjectPropertyByIdentifier("BFO:0000050"), g.getOWLClassByIdentifier("GO:0000003")); + testSimpleClassExpression(svf, "svf"); + } + + @Test + public void testSimpleUnion() throws Exception { + OWLObjectSomeValuesFrom svf = f.getOWLObjectSomeValuesFrom(g.getOWLObjectPropertyByIdentifier("BFO:0000050"), g.getOWLClassByIdentifier("GO:0000003")); + OWLClass cls = g.getOWLClassByIdentifier("GO:0000122"); + testSimpleClassExpression(f.getOWLObjectUnionOf(cls, svf), "union"); + } + + @Test + public void testSimpleIntersection() throws Exception { + OWLObjectSomeValuesFrom svf = f.getOWLObjectSomeValuesFrom(g.getOWLObjectPropertyByIdentifier("BFO:0000050"), g.getOWLClassByIdentifier("GO:0000003")); + OWLClass cls = g.getOWLClassByIdentifier("GO:0000122"); + testSimpleClassExpression(f.getOWLObjectIntersectionOf(cls, svf), "intersection"); + } + + @Test + public void testAnnotations() throws Exception { + // setup test model/ontology + OWLOntology o = m.createOntology(); + OWLImportsDeclaration importDeclaration = f.getOWLImportsDeclaration(g.getSourceOntology().getOntologyID().getOntologyIRI().get()); + m.applyChange(new AddImport(o, importDeclaration)); + + final IRI i1IRI = IRI.generateDocumentIRI(); + final OWLNamedIndividual ni1 = f.getOWLNamedIndividual(i1IRI); + // declare individual + m.addAxiom(o, f.getOWLDeclarationAxiom(ni1)); + // add annotations + m.addAxiom(o, f.getOWLAnnotationAssertionAxiom(i1IRI, + f.getOWLAnnotation(f.getOWLAnnotationProperty( + AnnotationShorthand.comment.getAnnotationProperty()), + f.getOWLLiteral("Comment 1")))); + m.addAxiom(o, f.getOWLAnnotationAssertionAxiom(i1IRI, + f.getOWLAnnotation(f.getOWLAnnotationProperty( + AnnotationShorthand.comment.getAnnotationProperty()), + f.getOWLLiteral("Comment 2")))); + // declare type + m.addAxiom(o, f.getOWLClassAssertionAxiom(g.getOWLClassByIdentifier("GO:0000003"), ni1)); + + MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(null, o, null, curieHandler); + + JsonOwlIndividual jsonOwlIndividualOriginal = r.renderObject(ni1); + assertEquals(2, jsonOwlIndividualOriginal.annotations.length); + + String json = MolecularModelJsonRenderer.renderToJson(jsonOwlIndividualOriginal, true); + + JsonOwlIndividual jsonOwlIndividualParse = MolecularModelJsonRenderer.parseFromJson(json, JsonOwlIndividual.class); + + assertNotNull(jsonOwlIndividualParse); + assertEquals(jsonOwlIndividualOriginal, jsonOwlIndividualParse); + } + + private void testSimpleClassExpression(OWLClassExpression ce, String expectedJsonType) throws Exception { + // setup test model/ontology + OWLOntology o = m.createOntology(); + OWLImportsDeclaration importDeclaration = f.getOWLImportsDeclaration(g.getSourceOntology().getOntologyID().getOntologyIRI().get()); + m.applyChange(new AddImport(o, importDeclaration)); + + // create indivdual with a ce type + final IRI i1IRI = IRI.generateDocumentIRI(); + final OWLNamedIndividual ni1 = f.getOWLNamedIndividual(i1IRI); + // declare individual + m.addAxiom(o, f.getOWLDeclarationAxiom(ni1)); + // declare type + m.addAxiom(o, f.getOWLClassAssertionAxiom(ce, ni1)); + + + MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(null, o, null, curieHandler); + + JsonOwlIndividual jsonOwlIndividualOriginal = r.renderObject(ni1); + + String json = MolecularModelJsonRenderer.renderToJson(jsonOwlIndividualOriginal, true); + assertTrue(json, json.contains("\"type\": \"" + expectedJsonType + "\"")); + + JsonOwlIndividual jsonOwlIndividualParse = MolecularModelJsonRenderer.parseFromJson(json, JsonOwlIndividual.class); + + assertNotNull(jsonOwlIndividualParse); + assertEquals(jsonOwlIndividualOriginal, jsonOwlIndividualParse); + + Set ces = TestJsonOwlObjectParser.parse(new MinervaOWLGraphWrapper(o), jsonOwlIndividualParse.type); + assertEquals(1, ces.size()); + assertEquals(ce, ces.iterator().next()); + } + + @Test + public void testPartialRenderer() throws Exception { + OWLOntology o = m.createOntology(); + OWLImportsDeclaration importDeclaration = f.getOWLImportsDeclaration(g.getSourceOntology().getOntologyID().getOntologyIRI().get()); + m.applyChange(new AddImport(o, importDeclaration)); + + // individuals + final OWLNamedIndividual a = addIndividual(o, "A", null); + final OWLNamedIndividual b = addIndividual(o, "B", null); + final OWLNamedIndividual c = addIndividual(o, "C", null); + final OWLNamedIndividual d = addIndividual(o, "D", null); + final OWLNamedIndividual e = addIndividual(o, "E", null); + final OWLNamedIndividual f = addIndividual(o, "F", null); + + // links + addFact(o, a, b, partOf); + addFact(o, b, a, partOf); + + addFact(o, b, c, partOf); + addFact(o, d, b, partOf); + addFact(o, e, a, partOf); + addFact(o, a, f, partOf); + + MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(null, o, null, curieHandler); + + final String aId = curieHandler.getCuri(a); + final String bId = curieHandler.getCuri(b); + + Pair pair = r.renderIndividuals(Arrays.asList(a, b)); + assertEquals(2, pair.getLeft().length); + assertEquals(2, pair.getRight().length); + boolean foundAB = false; + boolean foundBA = false; + for (JsonOwlFact fact : pair.getRight()) { + if (aId.equals(fact.subject) && bId.equals(fact.object)) { + foundAB = true; + } + if (bId.equals(fact.subject) && aId.equals(fact.object)) { + foundBA = true; + } + } + assertTrue(foundAB); + assertTrue(foundBA); + } + + + private static OWLNamedIndividual addIndividual(OWLOntology o, String name, OWLClass typeCls) { + final IRI iri = IRI.generateDocumentIRI(); + final OWLNamedIndividual ni = f.getOWLNamedIndividual(iri); + // declare individual + m.addAxiom(o, f.getOWLDeclarationAxiom(ni)); + if (typeCls != null) { + m.addAxiom(o, f.getOWLClassAssertionAxiom(typeCls, ni)); + } + m.addAxiom(o, f.getOWLAnnotationAssertionAxiom(iri, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral(name)))); + + return ni; + } + + private static void addFact(OWLOntology o, OWLNamedIndividual source, OWLNamedIndividual target, OWLObjectProperty property) { + m.addAxiom(o, f.getOWLObjectPropertyAssertionAxiom(property, source, target)); + } + + static class TestJsonOwlObjectParser { + static OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject expression) + throws Exception { + if (expression == null) { + throw new Exception("Missing expression: null is not a valid expression."); + } + if (expression.type == null) { + throw new Exception("An expression type is required."); + } + if (JsonOwlObjectType.Class == expression.type) { + if (expression.id == null) { + throw new Exception("Missing literal for expression of type 'class'"); + } + if (StringUtils.containsWhitespace(expression.id)) { + throw new Exception("Identifiers may not contain whitespaces: '" + expression.id + "'"); + } + OWLClass cls = g.getOWLClassByIdentifier(expression.id); + if (cls == null) { + throw new Exception("Could not retrieve a class for id: " + expression.id); + } + return cls; + } else if (JsonOwlObjectType.SomeValueFrom == expression.type) { + if (expression.property == null) { + throw new Exception("Missing property for expression of type 'svf'"); + } + if (expression.property.type != JsonOwlObjectType.ObjectProperty) { + throw new Exception("Unexpected type for Property in 'svf': " + expression.property.type); + } + if (expression.property.id == null) { + throw new Exception("Missing property id for expression of type 'svf'"); + } + OWLObjectProperty p = g.getOWLObjectPropertyByIdentifier(expression.property.id); + if (p == null) { + throw new UnknownIdentifierException("Could not find a property for: " + expression.property); + } + if (expression.filler != null) { + OWLClassExpression ce = parse(g, expression.filler); + return g.getDataFactory().getOWLObjectSomeValuesFrom(p, ce); + } else { + throw new Exception("Missing literal or expression for expression of type 'svf'."); + } + } else if (JsonOwlObjectType.IntersectionOf == expression.type) { + return parse(g, expression.expressions, JsonOwlObjectType.IntersectionOf); + } else if (JsonOwlObjectType.UnionOf == expression.type) { + return parse(g, expression.expressions, JsonOwlObjectType.UnionOf); + } else { + throw new UnknownIdentifierException("Unknown expression type: " + expression.type); + } + } + + static OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject[] expressions, JsonOwlObjectType type) + throws Exception { + if (expressions.length == 0) { + throw new Exception("Missing expressions: empty expression list is not allowed."); + } + if (expressions.length == 1) { + return parse(g, expressions[0]); + } + Set clsExpressions = new HashSet(); + for (JsonOwlObject m3Expression : expressions) { + OWLClassExpression ce = parse(g, m3Expression); + clsExpressions.add(ce); + } + if (type == JsonOwlObjectType.UnionOf) { + return g.getDataFactory().getOWLObjectUnionOf(clsExpressions); + } else if (type == JsonOwlObjectType.IntersectionOf) { + return g.getDataFactory().getOWLObjectIntersectionOf(clsExpressions); + } else { + throw new UnknownIdentifierException("Unsupported expression type: " + type); + } + } + + static Set parse(MinervaOWLGraphWrapper g, JsonOwlObject[] expressions) + throws Exception { + if (expressions.length == 0) { + throw new Exception("Missing expressions: empty expression list is not allowed."); + } + Set clsExpressions = new HashSet(); + for (JsonOwlObject m3Expression : expressions) { + OWLClassExpression ce = parse(g, m3Expression); + clsExpressions.add(ce); + } + return clsExpressions; + } + } } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/util/AnnotationShortHandTest.java b/minerva-core/src/test/java/org/geneontology/minerva/util/AnnotationShortHandTest.java index 96cc2044..b28418f4 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/util/AnnotationShortHandTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/util/AnnotationShortHandTest.java @@ -1,26 +1,26 @@ package org.geneontology.minerva.util; -import static org.junit.Assert.assertEquals; - import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.curie.DefaultCurieHandler; import org.junit.Test; +import static org.junit.Assert.assertEquals; + public class AnnotationShortHandTest { - @Test - public void testRoundTrip() throws Exception { - CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - for (AnnotationShorthand sh : AnnotationShorthand.values()) { - String iriString = sh.getAnnotationProperty().toString(); - String curie = curieHandler.getCuri(sh.getAnnotationProperty()); - String json = sh.getShorthand(); - AnnotationShorthand roundTrip = AnnotationShorthand.getShorthand(json, curieHandler); - AnnotationShorthand resolvedByIRI = AnnotationShorthand.getShorthand(iriString, curieHandler); - AnnotationShorthand resolvedByCurie = AnnotationShorthand.getShorthand(curie, curieHandler); - assertEquals(sh, roundTrip); - assertEquals(sh, resolvedByIRI); - assertEquals(sh, resolvedByCurie); - } - } + @Test + public void testRoundTrip() throws Exception { + CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + for (AnnotationShorthand sh : AnnotationShorthand.values()) { + String iriString = sh.getAnnotationProperty().toString(); + String curie = curieHandler.getCuri(sh.getAnnotationProperty()); + String json = sh.getShorthand(); + AnnotationShorthand roundTrip = AnnotationShorthand.getShorthand(json, curieHandler); + AnnotationShorthand resolvedByIRI = AnnotationShorthand.getShorthand(iriString, curieHandler); + AnnotationShorthand resolvedByCurie = AnnotationShorthand.getShorthand(curie, curieHandler); + assertEquals(sh, roundTrip); + assertEquals(sh, resolvedByIRI); + assertEquals(sh, resolvedByCurie); + } + } } diff --git a/minerva-core/src/test/java/org/geneontology/minerva/validation/ShexValidatorTest.java b/minerva-core/src/test/java/org/geneontology/minerva/validation/ShexValidatorTest.java index 9e87a7f0..9692bc9a 100644 --- a/minerva-core/src/test/java/org/geneontology/minerva/validation/ShexValidatorTest.java +++ b/minerva-core/src/test/java/org/geneontology/minerva/validation/ShexValidatorTest.java @@ -1,11 +1,7 @@ package org.geneontology.minerva.validation; -import static org.junit.Assert.*; - -import java.io.File; -import java.io.IOException; -import java.util.Map; - +import fr.inria.lille.shexjava.schema.ShexSchema; +import fr.inria.lille.shexjava.schema.parsing.GenParser; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import org.geneontology.minerva.BlazegraphOntologyManager; @@ -15,96 +11,100 @@ import org.junit.BeforeClass; import org.junit.Test; -import fr.inria.lille.shexjava.schema.ShexSchema; -import fr.inria.lille.shexjava.schema.parsing.GenParser; +import java.io.File; +import java.io.IOException; +import java.util.Map; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class ShexValidatorTest { - //TODO set up some kind of a configuration file that encapsulates these files - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static final String schemaFile = "src/test/resources/validation/go-cam-shapes.shex"; - static final String metadataSchemaFile = "src/test/resources/validation/metadata-shapes.shex"; - static final String metadataShapemapFile = "src/test/resources/validation/metadata.shapemap"; - static final String mainShapemapFile = "src/test/resources/validation/go-cam-shapes.shapeMap"; - static ShexValidator shex; - static ShexValidator shexMeta; - static BlazegraphOntologyManager go_lego; - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - go_lego = new BlazegraphOntologyManager(go_lego_journal_file, true); - CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - shex = new ShexValidator(schemaFile, mainShapemapFile, go_lego, curieHandler); - shexMeta = new ShexValidator(metadataSchemaFile, metadataShapemapFile, go_lego, curieHandler); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - go_lego.dispose(); - } - - @Test - public void testSchemaParse() throws Exception { - ShexSchema schema = GenParser.parseSchema(new File(schemaFile).toPath()); - } - - @Test - public void testMainMapParse() throws IOException { - Map query_map = ShexValidator.makeGoQueryMap(mainShapemapFile); - } - - @Test - public void testMetaMapParse() throws IOException { - Map query_map = ShexValidator.makeGoQueryMap(metadataShapemapFile); - System.out.println(query_map); - } - - @Test - public void testShexShouldPass() throws Exception { - boolean should_be_valid = true; - validate("src/test/resources/validation/should_pass/", shex, should_be_valid); - } - - @Test - public void testShexShouldFail() throws Exception { - boolean should_be_valid = false; - validate("src/test/resources/validation/should_fail/", shex, should_be_valid); - } - - @Test - public void testShexMetadata() throws IOException { - boolean should_be_valid = true; - validate("src/test/resources/validation/should_pass/", shexMeta, should_be_valid); - } - - @Test - public void testHandleNegatedTermInReport() throws Exception { - validate(new File("src/test/resources/validation/model_test/ZFIN_ZDB-GENE-030131-514.ttl"), shex, true); - } - - public void validate(String dir, ShexValidator shex, boolean shouldBeValid) throws IOException { - File directory = new File(dir); - if(directory.isDirectory()) { - for(File file : directory.listFiles()) { - validate(file, shex, shouldBeValid); - } - } - } - - private void validate(File file, ShexValidator shex, boolean shouldBeValid) throws IOException { - if(file.getName().endsWith("ttl")) { - Model test_model = ModelFactory.createDefaultModel(); - test_model.read(file.getAbsolutePath()); - System.out.println("validating "+file.getAbsolutePath()+" size "+test_model.size()); - //Note that in the live system, Arachne is executed on the model prior to this step, potentially adding inferred classes that are missed with this. - //this is faster and useful for debugging the shex though. See org.geneontology.minerva.server.validation.ValidationTest in the Test branch of Minerva server for a more complete test - test_model = shex.enrichSuperClasses(test_model); - ShexValidationReport report = shex.runShapeMapValidation(test_model); - if(shouldBeValid) { - assertTrue(file+" not conformant "+report.getAsText()+"\n"+report.getAsTab(""), report.isConformant()); - }else { - assertFalse(file+" is conformant (should not be)", report.isConformant()); - } - } - } + //TODO set up some kind of a configuration file that encapsulates these files + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + static final String schemaFile = "src/test/resources/validation/go-cam-shapes.shex"; + static final String metadataSchemaFile = "src/test/resources/validation/metadata-shapes.shex"; + static final String metadataShapemapFile = "src/test/resources/validation/metadata.shapemap"; + static final String mainShapemapFile = "src/test/resources/validation/go-cam-shapes.shapeMap"; + static ShexValidator shex; + static ShexValidator shexMeta; + static BlazegraphOntologyManager go_lego; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + go_lego = new BlazegraphOntologyManager(go_lego_journal_file, true); + CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + shex = new ShexValidator(schemaFile, mainShapemapFile, go_lego, curieHandler); + shexMeta = new ShexValidator(metadataSchemaFile, metadataShapemapFile, go_lego, curieHandler); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + go_lego.dispose(); + } + + @Test + public void testSchemaParse() throws Exception { + ShexSchema schema = GenParser.parseSchema(new File(schemaFile).toPath()); + } + + @Test + public void testMainMapParse() throws IOException { + Map query_map = ShexValidator.makeGoQueryMap(mainShapemapFile); + } + + @Test + public void testMetaMapParse() throws IOException { + Map query_map = ShexValidator.makeGoQueryMap(metadataShapemapFile); + System.out.println(query_map); + } + + @Test + public void testShexShouldPass() throws Exception { + boolean should_be_valid = true; + validate("src/test/resources/validation/should_pass/", shex, should_be_valid); + } + + @Test + public void testShexShouldFail() throws Exception { + boolean should_be_valid = false; + validate("src/test/resources/validation/should_fail/", shex, should_be_valid); + } + + @Test + public void testShexMetadata() throws IOException { + boolean should_be_valid = true; + validate("src/test/resources/validation/should_pass/", shexMeta, should_be_valid); + } + + @Test + public void testHandleNegatedTermInReport() throws Exception { + validate(new File("src/test/resources/validation/model_test/ZFIN_ZDB-GENE-030131-514.ttl"), shex, true); + } + + public void validate(String dir, ShexValidator shex, boolean shouldBeValid) throws IOException { + File directory = new File(dir); + if (directory.isDirectory()) { + for (File file : directory.listFiles()) { + validate(file, shex, shouldBeValid); + } + } + } + + private void validate(File file, ShexValidator shex, boolean shouldBeValid) throws IOException { + if (file.getName().endsWith("ttl")) { + Model test_model = ModelFactory.createDefaultModel(); + test_model.read(file.getAbsolutePath()); + System.out.println("validating " + file.getAbsolutePath() + " size " + test_model.size()); + //Note that in the live system, Arachne is executed on the model prior to this step, potentially adding inferred classes that are missed with this. + //this is faster and useful for debugging the shex though. See org.geneontology.minerva.server.validation.ValidationTest in the Test branch of Minerva server for a more complete test + test_model = shex.enrichSuperClasses(test_model); + ShexValidationReport report = shex.runShapeMapValidation(test_model); + if (shouldBeValid) { + assertTrue(file + " not conformant " + report.getAsText() + "\n" + report.getAsTab(""), report.isConformant()); + } else { + assertFalse(file + " is conformant (should not be)", report.isConformant()); + } + } + } } diff --git a/minerva-core/src/test/resources/log4j.properties b/minerva-core/src/test/resources/log4j.properties index 6336feba..d1ca3d76 100644 --- a/minerva-core/src/test/resources/log4j.properties +++ b/minerva-core/src/test/resources/log4j.properties @@ -1,11 +1,9 @@ log4j.appender.console=org.apache.log4j.ConsoleAppender log4j.appender.console.layout=org.apache.log4j.PatternLayout log4j.appender.console.layout.ConversionPattern=%d %-5p (%c{1}:%L) %m\n - -log4j.logger.org.semanticweb.elk = ERROR +log4j.logger.org.semanticweb.elk=ERROR log4j.logger.org.obolibrary.obo2owl=OFF log4j.logger.org.semanticweb.owlapi=ERROR log4j.logger.org.apache.jena=ERROR - log4j.rootLogger=INFO, console diff --git a/minerva-core/src/test/resources/mmg/catalog-v001.xml b/minerva-core/src/test/resources/mmg/catalog-v001.xml index 9f20a5ab..058b8b53 100644 --- a/minerva-core/src/test/resources/mmg/catalog-v001.xml +++ b/minerva-core/src/test/resources/mmg/catalog-v001.xml @@ -1,4 +1,4 @@ - + diff --git a/minerva-json/pom.xml b/minerva-json/pom.xml index 773392dc..1c601382 100644 --- a/minerva-json/pom.xml +++ b/minerva-json/pom.xml @@ -1,18 +1,18 @@ - 4.0.0 - - minerva - org.geneontology - 0.6.1 - - minerva-json - Minerva-JSON-Model + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + + minerva + org.geneontology + 0.6.1 + + minerva-json + Minerva-JSON-Model - - - com.google.code.gson - gson - - + + + com.google.code.gson + gson + + diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotatedObject.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotatedObject.java index a2b1050d..7ef8cc97 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotatedObject.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotatedObject.java @@ -4,38 +4,38 @@ abstract class JsonAnnotatedObject { - public JsonAnnotation[] annotations; + public JsonAnnotation[] annotations; - /* (non-Javadoc) - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + Arrays.hashCode(annotations); - return result; - } + /* (non-Javadoc) + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(annotations); + return result; + } - /* (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - JsonAnnotatedObject other = (JsonAnnotatedObject) obj; - if (!Arrays.equals(annotations, other.annotations)) { - return false; - } - return true; - } + /* (non-Javadoc) + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + JsonAnnotatedObject other = (JsonAnnotatedObject) obj; + if (!Arrays.equals(annotations, other.annotations)) { + return false; + } + return true; + } } \ No newline at end of file diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotation.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotation.java index 31f7cb98..3783b9e9 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotation.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonAnnotation.java @@ -3,74 +3,74 @@ import com.google.gson.annotations.SerializedName; public class JsonAnnotation { - - public String key; - public String value; - @SerializedName("value-type") - public String valueType; // optional, defaults to OWL string literal for null - public String label; // optional, a label for the value, which may be a String-form IRI - - static JsonAnnotation create(String key, String value, String type, String label) { - JsonAnnotation a = new JsonAnnotation(); - a.key = key; - a.value = value; - a.valueType = type; - a.label = label; - return a; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((key == null) ? 0 : key.hashCode()); - result = prime * result + ((value == null) ? 0 : value.hashCode()); - result = prime * result + ((valueType == null) ? 0 : valueType.hashCode()); - result = prime * result + ((label == null) ? 0 : label.hashCode()); - return result; - } - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - JsonAnnotation other = (JsonAnnotation) obj; - if (key == null) { - if (other.key != null) { - return false; - } - } else if (!key.equals(other.key)) { - return false; - } - if (value == null) { - if (other.value != null) { - return false; - } - } else if (!value.equals(other.value)) { - return false; - } - if (valueType == null) { - if (other.valueType != null) { - return false; - } - } else if (!valueType.equals(other.valueType)) { - return false; - } - if (label == null) { - if (other.label != null) { - return false; - } - } else if (!label.equals(other.label)) { - return false; - } - return true; - } + public String key; + public String value; + @SerializedName("value-type") + public String valueType; // optional, defaults to OWL string literal for null + public String label; // optional, a label for the value, which may be a String-form IRI + + static JsonAnnotation create(String key, String value, String type, String label) { + JsonAnnotation a = new JsonAnnotation(); + a.key = key; + a.value = value; + a.valueType = type; + a.label = label; + return a; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((key == null) ? 0 : key.hashCode()); + result = prime * result + ((value == null) ? 0 : value.hashCode()); + result = prime * result + ((valueType == null) ? 0 : valueType.hashCode()); + result = prime * result + ((label == null) ? 0 : label.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + JsonAnnotation other = (JsonAnnotation) obj; + if (key == null) { + if (other.key != null) { + return false; + } + } else if (!key.equals(other.key)) { + return false; + } + if (value == null) { + if (other.value != null) { + return false; + } + } else if (!value.equals(other.value)) { + return false; + } + if (valueType == null) { + if (other.valueType != null) { + return false; + } + } else if (!valueType.equals(other.valueType)) { + return false; + } + if (label == null) { + if (other.label != null) { + return false; + } + } else if (!label.equals(other.label)) { + return false; + } + return true; + } } \ No newline at end of file diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonEvidenceInfo.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonEvidenceInfo.java index 2ad94d83..8b62cc2b 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonEvidenceInfo.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonEvidenceInfo.java @@ -1,7 +1,7 @@ package org.geneontology.minerva.json; public class JsonEvidenceInfo { - public String id; - public String label; - public String code; + public String id; + public String label; + public String code; } \ No newline at end of file diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonModel.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonModel.java index cd2b84d0..52cb6f71 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonModel.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonModel.java @@ -1,43 +1,43 @@ package org.geneontology.minerva.json; -import java.util.Arrays; - import com.google.gson.annotations.SerializedName; +import java.util.Arrays; + public class JsonModel extends JsonAnnotatedObject { - - @SerializedName("id") - public String modelId; - public JsonOwlIndividual[] individuals; - public JsonOwlFact[] facts; - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + Arrays.hashCode(facts); - result = prime * result + Arrays.hashCode(individuals); - return result; - } + @SerializedName("id") + public String modelId; + public JsonOwlIndividual[] individuals; + public JsonOwlFact[] facts; + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + Arrays.hashCode(facts); + result = prime * result + Arrays.hashCode(individuals); + return result; + } - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (!super.equals(obj)) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - JsonModel other = (JsonModel) obj; - if (!Arrays.equals(facts, other.facts)) { - return false; - } - if (!Arrays.equals(individuals, other.individuals)) { - return false; - } - return true; - } + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + JsonModel other = (JsonModel) obj; + if (!Arrays.equals(facts, other.facts)) { + return false; + } + if (!Arrays.equals(individuals, other.individuals)) { + return false; + } + return true; + } } \ No newline at end of file diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlFact.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlFact.java index 79462ead..aa3df6ae 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlFact.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlFact.java @@ -3,58 +3,58 @@ import com.google.gson.annotations.SerializedName; public class JsonOwlFact extends JsonAnnotatedObject { - public String subject; - public String property; - - @SerializedName("property-label") - public String propertyLabel; - public String object; + public String subject; + public String property; - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((object == null) ? 0 : object.hashCode()); - result = prime * result - + ((property == null) ? 0 : property.hashCode()); - result = prime * result + ((subject == null) ? 0 : subject.hashCode()); - return result; - } + @SerializedName("property-label") + public String propertyLabel; + public String object; + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((object == null) ? 0 : object.hashCode()); + result = prime * result + + ((property == null) ? 0 : property.hashCode()); + result = prime * result + ((subject == null) ? 0 : subject.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + JsonOwlFact other = (JsonOwlFact) obj; + if (object == null) { + if (other.object != null) { + return false; + } + } else if (!object.equals(other.object)) { + return false; + } + if (property == null) { + if (other.property != null) { + return false; + } + } else if (!property.equals(other.property)) { + return false; + } + if (subject == null) { + if (other.subject != null) { + return false; + } + } else if (!subject.equals(other.subject)) { + return false; + } + return true; + } - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (!super.equals(obj)) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - JsonOwlFact other = (JsonOwlFact) obj; - if (object == null) { - if (other.object != null) { - return false; - } - } else if (!object.equals(other.object)) { - return false; - } - if (property == null) { - if (other.property != null) { - return false; - } - } else if (!property.equals(other.property)) { - return false; - } - if (subject == null) { - if (other.subject != null) { - return false; - } - } else if (!subject.equals(other.subject)) { - return false; - } - return true; - } - } \ No newline at end of file diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlIndividual.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlIndividual.java index 692b07ea..7b49bb20 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlIndividual.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlIndividual.java @@ -1,52 +1,52 @@ package org.geneontology.minerva.json; -import java.util.Arrays; - import com.google.gson.annotations.SerializedName; +import java.util.Arrays; + public class JsonOwlIndividual extends JsonAnnotatedObject { - public String id; - public JsonOwlObject[] type; - - @SerializedName("inferred-type") - public JsonOwlObject[] inferredType; - - @SerializedName("root-type") - public JsonOwlObject[] rootType; - - @SerializedName("inferred-type-with-all") - public JsonOwlObject[] inferredTypeWithAll; - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((id == null) ? 0 : id.hashCode()); - result = prime * result + Arrays.hashCode(inferredType); - result = prime * result + Arrays.hashCode(type); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - if (getClass() != obj.getClass()) - return false; - JsonOwlIndividual other = (JsonOwlIndividual) obj; - if (id == null) { - if (other.id != null) - return false; - } else if (!id.equals(other.id)) - return false; - if (!Arrays.equals(inferredType, other.inferredType)) - return false; - if (!Arrays.equals(type, other.type)) - return false; - return true; - } + public String id; + public JsonOwlObject[] type; + + @SerializedName("inferred-type") + public JsonOwlObject[] inferredType; + + @SerializedName("root-type") + public JsonOwlObject[] rootType; + + @SerializedName("inferred-type-with-all") + public JsonOwlObject[] inferredTypeWithAll; + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + Arrays.hashCode(inferredType); + result = prime * result + Arrays.hashCode(type); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + if (getClass() != obj.getClass()) + return false; + JsonOwlIndividual other = (JsonOwlIndividual) obj; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (!Arrays.equals(inferredType, other.inferredType)) + return false; + if (!Arrays.equals(type, other.type)) + return false; + return true; + } } \ No newline at end of file diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlObject.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlObject.java index 47db2947..22c53558 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlObject.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonOwlObject.java @@ -1,149 +1,149 @@ package org.geneontology.minerva.json; +import com.google.gson.annotations.SerializedName; + import java.util.Arrays; import java.util.List; -import com.google.gson.annotations.SerializedName; - public final class JsonOwlObject extends JsonAnnotatedObject { - - public static enum JsonOwlObjectType { - @SerializedName("svf") - SomeValueFrom, - - @SerializedName("property") - ObjectProperty, - - @SerializedName("class") - Class, - - @SerializedName("intersection") - IntersectionOf, - - @SerializedName("union") - UnionOf, - - @SerializedName("complement") - ComplementOf - - } - - public JsonOwlObject.JsonOwlObjectType type; - public String id; - public String label; - public JsonOwlObject[] expressions; // union, intersection - public JsonOwlObject property; - public JsonOwlObject filler; - - static JsonOwlObject createCls(String id, String label) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.Class; - json.id = id; - json.label = label; - return json; - } - - static JsonOwlObject createProperty(String id, String label) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.ObjectProperty; - json.id = id; - json.label = label; - return json; - } - - public static JsonOwlObject createIntersection(List expressions) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.IntersectionOf; - if (expressions != null && !expressions.isEmpty()) { - json.expressions = expressions.toArray(new JsonOwlObject[expressions.size()]); - } - return json; - } - - public static JsonOwlObject createUnion(List expressions) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.UnionOf; - if (expressions != null && !expressions.isEmpty()) { - json.expressions = expressions.toArray(new JsonOwlObject[expressions.size()]); - } - return json; - } - - public static JsonOwlObject createSvf(JsonOwlObject prop, JsonOwlObject filler) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.SomeValueFrom; - json.property = prop; - json.filler = filler; - return json; - } - - public static JsonOwlObject createComplement(JsonOwlObject filler) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.ComplementOf; - json.filler = filler; - return json; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + Arrays.hashCode(expressions); - result = prime * result + ((filler == null) ? 0 : filler.hashCode()); - result = prime * result + ((id == null) ? 0 : id.hashCode()); - result = prime * result + ((label == null) ? 0 : label.hashCode()); - result = prime * result - + ((property == null) ? 0 : property.hashCode()); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (!super.equals(obj)) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - JsonOwlObject other = (JsonOwlObject) obj; - if (!Arrays.equals(expressions, other.expressions)) { - return false; - } - if (filler == null) { - if (other.filler != null) { - return false; - } - } else if (!filler.equals(other.filler)) { - return false; - } - if (id == null) { - if (other.id != null) { - return false; - } - } else if (!id.equals(other.id)) { - return false; - } - if (label == null) { - if (other.label != null) { - return false; - } - } else if (!label.equals(other.label)) { - return false; - } - if (property == null) { - if (other.property != null) { - return false; - } - } else if (!property.equals(other.property)) { - return false; - } - if (type != other.type) { - return false; - } - return true; - } + + public static enum JsonOwlObjectType { + @SerializedName("svf") + SomeValueFrom, + + @SerializedName("property") + ObjectProperty, + + @SerializedName("class") + Class, + + @SerializedName("intersection") + IntersectionOf, + + @SerializedName("union") + UnionOf, + + @SerializedName("complement") + ComplementOf + + } + + public JsonOwlObject.JsonOwlObjectType type; + public String id; + public String label; + public JsonOwlObject[] expressions; // union, intersection + public JsonOwlObject property; + public JsonOwlObject filler; + + static JsonOwlObject createCls(String id, String label) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.Class; + json.id = id; + json.label = label; + return json; + } + + static JsonOwlObject createProperty(String id, String label) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.ObjectProperty; + json.id = id; + json.label = label; + return json; + } + + public static JsonOwlObject createIntersection(List expressions) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.IntersectionOf; + if (expressions != null && !expressions.isEmpty()) { + json.expressions = expressions.toArray(new JsonOwlObject[expressions.size()]); + } + return json; + } + + public static JsonOwlObject createUnion(List expressions) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.UnionOf; + if (expressions != null && !expressions.isEmpty()) { + json.expressions = expressions.toArray(new JsonOwlObject[expressions.size()]); + } + return json; + } + + public static JsonOwlObject createSvf(JsonOwlObject prop, JsonOwlObject filler) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.SomeValueFrom; + json.property = prop; + json.filler = filler; + return json; + } + + public static JsonOwlObject createComplement(JsonOwlObject filler) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.ComplementOf; + json.filler = filler; + return json; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + Arrays.hashCode(expressions); + result = prime * result + ((filler == null) ? 0 : filler.hashCode()); + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((label == null) ? 0 : label.hashCode()); + result = prime * result + + ((property == null) ? 0 : property.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + JsonOwlObject other = (JsonOwlObject) obj; + if (!Arrays.equals(expressions, other.expressions)) { + return false; + } + if (filler == null) { + if (other.filler != null) { + return false; + } + } else if (!filler.equals(other.filler)) { + return false; + } + if (id == null) { + if (other.id != null) { + return false; + } + } else if (!id.equals(other.id)) { + return false; + } + if (label == null) { + if (other.label != null) { + return false; + } + } else if (!label.equals(other.label)) { + return false; + } + if (property == null) { + if (other.property != null) { + return false; + } + } else if (!property.equals(other.property)) { + return false; + } + if (type != other.type) { + return false; + } + return true; + } } \ No newline at end of file diff --git a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonRelationInfo.java b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonRelationInfo.java index dc1ac42b..abd11bb8 100644 --- a/minerva-json/src/main/java/org/geneontology/minerva/json/JsonRelationInfo.java +++ b/minerva-json/src/main/java/org/geneontology/minerva/json/JsonRelationInfo.java @@ -1,7 +1,7 @@ package org.geneontology.minerva.json; public class JsonRelationInfo { - public String id; - public String label; - public boolean relevant; + public String id; + public String label; + public boolean relevant; } \ No newline at end of file diff --git a/minerva-lookup/pom.xml b/minerva-lookup/pom.xml index 39d17f3c..cb124316 100644 --- a/minerva-lookup/pom.xml +++ b/minerva-lookup/pom.xml @@ -1,40 +1,40 @@ - 4.0.0 - - minerva - org.geneontology - 0.6.1 - - minerva-lookup - Minerva-Lookup + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + + minerva + org.geneontology + 0.6.1 + + minerva-lookup + Minerva-Lookup - - - - org.apache.maven.plugins - maven-surefire-plugin - - - - - - - - org.jacoco - jacoco-maven-plugin - - - + + + + org.apache.maven.plugins + maven-surefire-plugin + + + + + + + + org.jacoco + jacoco-maven-plugin + + + - - - org.geneontology - minerva-core - ${project.parent.version} - - + + + org.geneontology + minerva-core + ${project.parent.version} + + diff --git a/minerva-lookup/src/main/java/org/bbop/golr/java/AbstractRetrieveGolr.java b/minerva-lookup/src/main/java/org/bbop/golr/java/AbstractRetrieveGolr.java index 75fe18e0..b858b50a 100644 --- a/minerva-lookup/src/main/java/org/bbop/golr/java/AbstractRetrieveGolr.java +++ b/minerva-lookup/src/main/java/org/bbop/golr/java/AbstractRetrieveGolr.java @@ -1,17 +1,8 @@ package org.bbop.golr.java; -import java.io.IOException; -import java.io.InputStream; -import java.io.UnsupportedEncodingException; -import java.net.HttpURLConnection; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - -import org.apache.commons.httpclient.NameValuePair; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonSyntaxException; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.client.entity.UrlEncodedFormEntity; @@ -23,302 +14,302 @@ import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonSyntaxException; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; public abstract class AbstractRetrieveGolr { - protected static final Gson GSON = new GsonBuilder().create(); - - private final String server; - private int retryCount; - - public AbstractRetrieveGolr(String server) { - this(server, 3); - } - - public AbstractRetrieveGolr(String server, int retryCount) { - this.server = server; - this.retryCount = retryCount; - } - - protected abstract boolean isIndentJson(); - - protected abstract List getRelevantFields(); - - /* - http://noctua-golr.berkeleybop.org/select?indent=on - &wt=json&rows=10&start=0 - &fl=id,isa_closure - &json.nl=arrarr - &q=*:* - &fq=document_category:%22ontology_class%22 - &fq=is_obsolete:%22false%22 - &fq=id:%22UniProtKB:P32241-1%22 - - */ - URI createGolrRequest(List tagvalues, String category, int start, int pagination) throws IOException { - try { - URIBuilder builder = new URIBuilder(server); - String currentPath = StringUtils.trimToEmpty(builder.getPath()); - builder.setPath(currentPath+"/select"); - builder.addParameter("defType", "edismax"); - builder.addParameter("qt", "standard"); - builder.addParameter("wt", "json"); - if (isIndentJson()) { - builder.addParameter("indent","on"); - } - builder.addParameter("fl",StringUtils.join(getRelevantFields(), ',')); - builder.addParameter("facet","false"); - builder.addParameter("json.nl","arrarr"); - builder.addParameter("q","*:*"); - builder.addParameter("rows", Integer.toString(pagination)); - builder.addParameter("start", Integer.toString(start)); - builder.addParameter("fq", "document_category:\""+category+"\""); - for (String [] tagvalue : tagvalues) { - if (tagvalue.length == 2) { - builder.addParameter("fq", tagvalue[0]+":\""+tagvalue[1]+"\""); - } - else if (tagvalue.length > 2) { - // if there is more than one value, assume that this is an OR query - StringBuilder value = new StringBuilder(); - value.append(tagvalue[0]).append(":("); - for (int i = 1; i < tagvalue.length; i++) { - if (i > 1) { - value.append(" OR "); - } - value.append('"').append(tagvalue[i]).append('"'); - } - value.append(')'); - builder.addParameter("fq", value.toString()); - } - } - return builder.build(); - } catch (URISyntaxException e) { - throw new IOException("Could not build URI for Golr request", e); - } - } - - - HttpPost createGolrPostRequest(List tagvalues, String category, int start, int pagination) throws UnsupportedEncodingException { - HttpPost post = new HttpPost(server+"/select"); - List urlParameters = new ArrayList<>(); - urlParameters.add(new BasicNameValuePair("username", "abc")); - - urlParameters.add(new BasicNameValuePair("defType", "edismax")); - urlParameters.add(new BasicNameValuePair("qt", "standard")); - urlParameters.add(new BasicNameValuePair("wt", "json")); - if (isIndentJson()) { - urlParameters.add(new BasicNameValuePair("indent","on")); - } - urlParameters.add(new BasicNameValuePair("fl",StringUtils.join(getRelevantFields(), ','))); - urlParameters.add(new BasicNameValuePair("facet","false")); - urlParameters.add(new BasicNameValuePair("json.nl","arrarr")); - urlParameters.add(new BasicNameValuePair("q","*:*")); - urlParameters.add(new BasicNameValuePair("rows", Integer.toString(pagination))); - urlParameters.add(new BasicNameValuePair("start", Integer.toString(start))); - urlParameters.add(new BasicNameValuePair("fq", "document_category:\""+category+"\"")); - for (String [] tagvalue : tagvalues) { - if (tagvalue.length == 2) { - urlParameters.add(new BasicNameValuePair("fq", tagvalue[0]+":\""+tagvalue[1]+"\"")); - } - else if (tagvalue.length > 2) { - // if there is more than one value, assume that this is an OR query - StringBuilder value = new StringBuilder(); - value.append(tagvalue[0]).append(":("); - for (int i = 1; i < tagvalue.length; i++) { - if (i > 1) { - value.append(" OR "); - } - value.append('"').append(tagvalue[i]).append('"'); - } - value.append(')'); - urlParameters.add(new BasicNameValuePair("fq", value.toString())); - } - } - post.setEntity(new UrlEncodedFormEntity(urlParameters)); - return post; - } - - - //TODO add retry for failed request - protected String getJsonStringFromPost(HttpPost post) throws IOException { - - CloseableHttpClient httpClient = HttpClients.createDefault(); - CloseableHttpResponse response = httpClient.execute(post); - String json = EntityUtils.toString(response.getEntity()); - - return json; - } - - protected String getJsonStringFromUri(URI uri) throws IOException { - logRequest(uri); - return getJsonStringFromUri(uri, retryCount); - } - - protected String getJsonStringFromUri(URI uri, int retryCount) throws IOException { - final URL url = uri.toURL(); - final HttpURLConnection connection; - InputStream response = null; - // setup and open (actual connection) - try { - connection = (HttpURLConnection) url.openConnection(); - // connection.setRequestMethod("POST"); - connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https - response = connection.getInputStream(); // opens the connection to the server - } - catch (IOException e) { - IOUtils.closeQuietly(response); - return retryRequest(uri, e, retryCount); - } - // check status code - final int status; - try { - status = connection.getResponseCode(); - } catch (IOException e) { - IOUtils.closeQuietly(response); - return retryRequest(uri, e, retryCount); - } - // handle unexpected status code - if (status != 200) { - // try to check error stream - String errorMsg = getErrorMsg(connection); - - // construct message for exception - StringBuilder sb = new StringBuilder("Unexpected HTTP status code: "+status); - - if (errorMsg != null) { - sb.append(" Details: "); - sb.append(errorMsg); - } - IOException e = new IOException(sb.toString()); - return retryRequest(uri, e, retryCount); - } - - // try to detect charset - String contentType = connection.getHeaderField("Content-Type"); - String charset = null; - - if (contentType != null) { - for (String param : contentType.replace(" ", "").split(";")) { - if (param.startsWith("charset=")) { - charset = param.split("=", 2)[1]; - break; - } - } - } - - // get string response from stream - String json; - try { - if (charset != null) { - json = IOUtils.toString(response, charset); - } - else { - json = IOUtils.toString(response); - } - } catch (IOException e) { - return retryRequest(uri, e, retryCount); - } - finally { - IOUtils.closeQuietly(response); - } - return json; - } - - protected String retryRequest(URI uri, IOException e, int retryCount) throws IOException { - if (retryCount > 0) { - int remaining = retryCount - 1; - defaultRandomWait(); - logRetry(uri, e, remaining); - return getJsonStringFromUri(uri, remaining); - } - logRequestError(uri, e); - throw e; - } - - private static String getErrorMsg(HttpURLConnection connection) { - String errorMsg = null; - InputStream errorStream = null; - try { - errorStream = connection.getErrorStream(); - if (errorStream != null) { - errorMsg =IOUtils.toString(errorStream); - } - errorMsg = StringUtils.trimToNull(errorMsg); - } - catch (IOException e) { - // ignore errors, while trying to retrieve the error message - } - finally { - IOUtils.closeQuietly(errorStream); - } - return errorMsg; - } - - protected void defaultRandomWait() { - // wait a random interval between 400 and 1500 ms - randomWait(400, 1500); - } - - protected void randomWait(int min, int max) { - Random random = new Random(System.currentTimeMillis()); - long wait = min + random.nextInt((max - min)); - try { - Thread.sleep(wait); - } catch (InterruptedException exception) { - // ignore - } - } - - - protected void logRequest(URI uri) { - // do nothing - // hook to implement logging of requests - } - - protected void logRequestError(URI uri, IOException exception) { - // do nothing - // hook to implement logging of request errors - } - - protected void logRetry(URI uri, IOException exception, int remaining) { - // do nothing - // hook to implement logging of a retry - } - - - protected > T parseGolrResponse(String response, Class clazz) throws IOException { - try { - T envelope = GSON.fromJson(response, clazz); - if (envelope == null || envelope.response == null || envelope.responseHeader == null) { - throw new IOException("Unexpected response content in GOLR response."); - } - if ("0".equals(envelope.responseHeader.status) == false) { - throw new IOException("Unexpected response status in GOLR response header: "+envelope.responseHeader.status); - } - return envelope; - } catch (JsonSyntaxException e) { - throw new IOException("Could not parse JSON response.", e); - } - } - - static class GolrEnvelope { - GolrResponseHeader responseHeader; - GolrResponse response; - } - - static class GolrResponseHeader { - String status; - String QTime; - Object params; - } - - static class GolrResponse { - int numFound; - int start; - T[] docs; - } + protected static final Gson GSON = new GsonBuilder().create(); + + private final String server; + private int retryCount; + + public AbstractRetrieveGolr(String server) { + this(server, 3); + } + + public AbstractRetrieveGolr(String server, int retryCount) { + this.server = server; + this.retryCount = retryCount; + } + + protected abstract boolean isIndentJson(); + + protected abstract List getRelevantFields(); + + /* + http://noctua-golr.berkeleybop.org/select?indent=on + &wt=json&rows=10&start=0 + &fl=id,isa_closure + &json.nl=arrarr + &q=*:* + &fq=document_category:%22ontology_class%22 + &fq=is_obsolete:%22false%22 + &fq=id:%22UniProtKB:P32241-1%22 + + */ + URI createGolrRequest(List tagvalues, String category, int start, int pagination) throws IOException { + try { + URIBuilder builder = new URIBuilder(server); + String currentPath = StringUtils.trimToEmpty(builder.getPath()); + builder.setPath(currentPath + "/select"); + builder.addParameter("defType", "edismax"); + builder.addParameter("qt", "standard"); + builder.addParameter("wt", "json"); + if (isIndentJson()) { + builder.addParameter("indent", "on"); + } + builder.addParameter("fl", StringUtils.join(getRelevantFields(), ',')); + builder.addParameter("facet", "false"); + builder.addParameter("json.nl", "arrarr"); + builder.addParameter("q", "*:*"); + builder.addParameter("rows", Integer.toString(pagination)); + builder.addParameter("start", Integer.toString(start)); + builder.addParameter("fq", "document_category:\"" + category + "\""); + for (String[] tagvalue : tagvalues) { + if (tagvalue.length == 2) { + builder.addParameter("fq", tagvalue[0] + ":\"" + tagvalue[1] + "\""); + } else if (tagvalue.length > 2) { + // if there is more than one value, assume that this is an OR query + StringBuilder value = new StringBuilder(); + value.append(tagvalue[0]).append(":("); + for (int i = 1; i < tagvalue.length; i++) { + if (i > 1) { + value.append(" OR "); + } + value.append('"').append(tagvalue[i]).append('"'); + } + value.append(')'); + builder.addParameter("fq", value.toString()); + } + } + return builder.build(); + } catch (URISyntaxException e) { + throw new IOException("Could not build URI for Golr request", e); + } + } + + + HttpPost createGolrPostRequest(List tagvalues, String category, int start, int pagination) throws UnsupportedEncodingException { + HttpPost post = new HttpPost(server + "/select"); + List urlParameters = new ArrayList<>(); + urlParameters.add(new BasicNameValuePair("username", "abc")); + + urlParameters.add(new BasicNameValuePair("defType", "edismax")); + urlParameters.add(new BasicNameValuePair("qt", "standard")); + urlParameters.add(new BasicNameValuePair("wt", "json")); + if (isIndentJson()) { + urlParameters.add(new BasicNameValuePair("indent", "on")); + } + urlParameters.add(new BasicNameValuePair("fl", StringUtils.join(getRelevantFields(), ','))); + urlParameters.add(new BasicNameValuePair("facet", "false")); + urlParameters.add(new BasicNameValuePair("json.nl", "arrarr")); + urlParameters.add(new BasicNameValuePair("q", "*:*")); + urlParameters.add(new BasicNameValuePair("rows", Integer.toString(pagination))); + urlParameters.add(new BasicNameValuePair("start", Integer.toString(start))); + urlParameters.add(new BasicNameValuePair("fq", "document_category:\"" + category + "\"")); + for (String[] tagvalue : tagvalues) { + if (tagvalue.length == 2) { + urlParameters.add(new BasicNameValuePair("fq", tagvalue[0] + ":\"" + tagvalue[1] + "\"")); + } else if (tagvalue.length > 2) { + // if there is more than one value, assume that this is an OR query + StringBuilder value = new StringBuilder(); + value.append(tagvalue[0]).append(":("); + for (int i = 1; i < tagvalue.length; i++) { + if (i > 1) { + value.append(" OR "); + } + value.append('"').append(tagvalue[i]).append('"'); + } + value.append(')'); + urlParameters.add(new BasicNameValuePair("fq", value.toString())); + } + } + post.setEntity(new UrlEncodedFormEntity(urlParameters)); + return post; + } + + + //TODO add retry for failed request + protected String getJsonStringFromPost(HttpPost post) throws IOException { + + CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = httpClient.execute(post); + String json = EntityUtils.toString(response.getEntity()); + + return json; + } + + protected String getJsonStringFromUri(URI uri) throws IOException { + logRequest(uri); + return getJsonStringFromUri(uri, retryCount); + } + + protected String getJsonStringFromUri(URI uri, int retryCount) throws IOException { + final URL url = uri.toURL(); + final HttpURLConnection connection; + InputStream response = null; + // setup and open (actual connection) + try { + connection = (HttpURLConnection) url.openConnection(); + // connection.setRequestMethod("POST"); + connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https + response = connection.getInputStream(); // opens the connection to the server + } catch (IOException e) { + IOUtils.closeQuietly(response); + return retryRequest(uri, e, retryCount); + } + // check status code + final int status; + try { + status = connection.getResponseCode(); + } catch (IOException e) { + IOUtils.closeQuietly(response); + return retryRequest(uri, e, retryCount); + } + // handle unexpected status code + if (status != 200) { + // try to check error stream + String errorMsg = getErrorMsg(connection); + + // construct message for exception + StringBuilder sb = new StringBuilder("Unexpected HTTP status code: " + status); + + if (errorMsg != null) { + sb.append(" Details: "); + sb.append(errorMsg); + } + IOException e = new IOException(sb.toString()); + return retryRequest(uri, e, retryCount); + } + + // try to detect charset + String contentType = connection.getHeaderField("Content-Type"); + String charset = null; + + if (contentType != null) { + for (String param : contentType.replace(" ", "").split(";")) { + if (param.startsWith("charset=")) { + charset = param.split("=", 2)[1]; + break; + } + } + } + + // get string response from stream + String json; + try { + if (charset != null) { + json = IOUtils.toString(response, charset); + } else { + json = IOUtils.toString(response); + } + } catch (IOException e) { + return retryRequest(uri, e, retryCount); + } finally { + IOUtils.closeQuietly(response); + } + return json; + } + + protected String retryRequest(URI uri, IOException e, int retryCount) throws IOException { + if (retryCount > 0) { + int remaining = retryCount - 1; + defaultRandomWait(); + logRetry(uri, e, remaining); + return getJsonStringFromUri(uri, remaining); + } + logRequestError(uri, e); + throw e; + } + + private static String getErrorMsg(HttpURLConnection connection) { + String errorMsg = null; + InputStream errorStream = null; + try { + errorStream = connection.getErrorStream(); + if (errorStream != null) { + errorMsg = IOUtils.toString(errorStream); + } + errorMsg = StringUtils.trimToNull(errorMsg); + } catch (IOException e) { + // ignore errors, while trying to retrieve the error message + } finally { + IOUtils.closeQuietly(errorStream); + } + return errorMsg; + } + + protected void defaultRandomWait() { + // wait a random interval between 400 and 1500 ms + randomWait(400, 1500); + } + + protected void randomWait(int min, int max) { + Random random = new Random(System.currentTimeMillis()); + long wait = min + random.nextInt((max - min)); + try { + Thread.sleep(wait); + } catch (InterruptedException exception) { + // ignore + } + } + + + protected void logRequest(URI uri) { + // do nothing + // hook to implement logging of requests + } + + protected void logRequestError(URI uri, IOException exception) { + // do nothing + // hook to implement logging of request errors + } + + protected void logRetry(URI uri, IOException exception, int remaining) { + // do nothing + // hook to implement logging of a retry + } + + + protected > T parseGolrResponse(String response, Class clazz) throws IOException { + try { + T envelope = GSON.fromJson(response, clazz); + if (envelope == null || envelope.response == null || envelope.responseHeader == null) { + throw new IOException("Unexpected response content in GOLR response."); + } + if ("0".equals(envelope.responseHeader.status) == false) { + throw new IOException("Unexpected response status in GOLR response header: " + envelope.responseHeader.status); + } + return envelope; + } catch (JsonSyntaxException e) { + throw new IOException("Could not parse JSON response.", e); + } + } + + static class GolrEnvelope { + GolrResponseHeader responseHeader; + GolrResponse response; + } + + static class GolrResponseHeader { + String status; + String QTime; + Object params; + } + + static class GolrResponse { + int numFound; + int start; + T[] docs; + } } diff --git a/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrAnnotations.java b/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrAnnotations.java index d30bc505..4644aade 100644 --- a/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrAnnotations.java +++ b/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrAnnotations.java @@ -1,55 +1,46 @@ package org.bbop.golr.java; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.bbop.golr.java.RetrieveGolrAnnotations.GolrAnnotationExtension.GolrAnnotationExtensionEntry.GolrAnnotationExtensionRelation; + import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; -import org.bbop.golr.java.RetrieveGolrAnnotations.GolrAnnotationExtension.GolrAnnotationExtensionEntry.GolrAnnotationExtensionRelation; +public class RetrieveGolrAnnotations extends AbstractRetrieveGolr { -//import owltools.gaf.Bioentity; -//import owltools.gaf.ExtensionExpression; -//import owltools.gaf.GafDocument; -//import owltools.gaf.GeneAnnotation; + static boolean JSON_INDENT_FLAG = false; + static int PAGINATION_CHUNK_SIZE = 100; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.JsonSyntaxException; + private static final Gson GSON = new GsonBuilder().create(); + + /* + * This flag indicates that missing c16 data, due to malformed JSON is acceptable. + */ + private final boolean ignoreC16ParseErrors; + + public RetrieveGolrAnnotations(String server) { + this(server, 3, false); + } -public class RetrieveGolrAnnotations extends AbstractRetrieveGolr{ - - static boolean JSON_INDENT_FLAG = false; - static int PAGINATION_CHUNK_SIZE = 100; - - private static final Gson GSON = new GsonBuilder().create(); - - /* - * This flag indicates that missing c16 data, due to malformed JSON is acceptable. - */ - private final boolean ignoreC16ParseErrors; + public RetrieveGolrAnnotations(String server, int retryCount, boolean ignoreC16ParseErrors) { + super(server, retryCount); + this.ignoreC16ParseErrors = ignoreC16ParseErrors; + } - public RetrieveGolrAnnotations(String server) { - this(server, 3, false); - } - - public RetrieveGolrAnnotations(String server, int retryCount, boolean ignoreC16ParseErrors) { - super(server, retryCount); - this.ignoreC16ParseErrors = ignoreC16ParseErrors; - } - - @Override - protected boolean isIndentJson() { - return JSON_INDENT_FLAG; - } + @Override + protected boolean isIndentJson() { + return JSON_INDENT_FLAG; + } - @Override - protected List getRelevantFields() { - return GolrAnnotationDocument.getRelevantFields(); - } + @Override + protected List getRelevantFields() { + return GolrAnnotationDocument.getRelevantFields(); + } // public GafDocument convert(List golrAnnotationDocuments) throws IOException { // Map entities = new HashMap(); @@ -142,178 +133,178 @@ protected List getRelevantFields() { // annotation.setExtensionExpressions(expressions); // }; // } - - private String extractRelation(GolrAnnotationExtension extension) { - StringBuilder sb = new StringBuilder(); - for(GolrAnnotationExtensionRelation rel : extension.relationship.relation) { - if (sb.length() > 0) { - sb.append(" o "); - } - sb.append(rel.id); - } - if (sb.length() > 0) { - return sb.toString(); - } - return null; - } - - public List getGolrAnnotationsForGenes(List ids) throws IOException { - return getGolrAnnotationsForGenes(ids, false); - } - - public List getGolrAnnotationsForGenes(List ids, boolean noIEAs) throws IOException { - List tagvalues = new ArrayList(); - String [] tagvalue = new String[ids.size() + 1]; - tagvalue[0] = "bioentity"; - for (int i = 0; i < ids.size(); i++) { - tagvalue[i+1] = ids.get(i); - } - tagvalues.add(tagvalue); - if (noIEAs) { - // add negative filter for IEAs - tagvalues.add(new String[]{"-evidence_type", "IEA"}); - } - final List documents = getGolrAnnotations(tagvalues); - return documents; - } - public List getGolrAnnotationsForGene(String id) throws IOException { - List tagvalues = new ArrayList(); - String [] tagvalue = new String[2]; - tagvalue[0] = "bioentity"; - tagvalue[1] = id; - tagvalues.add(tagvalue); - final List documents = getGolrAnnotations(tagvalues); - return documents; - } - - public List getGolrAnnotationsForSynonym(String source, String synonym) throws IOException { - return getGolrAnnotationsForSynonym(source, Collections.singletonList(synonym)); - } - - public List getGolrAnnotationsForSynonym(String source, List synonyms) throws IOException { - return getGolrAnnotationsForSynonym(source, synonyms, false); - } - - public List getGolrAnnotationsForSynonym(String source, List synonyms, boolean noIEAs) throws IOException { - List tagvalues = new ArrayList(); - String [] param1 = new String[2]; - param1[0] = "source"; - param1[1] = source; - tagvalues.add(param1); - String [] param2 = new String[synonyms.size() + 1]; - param2[0] = "synonym"; - for (int i = 0; i < synonyms.size(); i++) { - param2[i+1] = synonyms.get(i); - } - tagvalues.add(param2); - if (noIEAs) { - // add negative filter for IEAs - tagvalues.add(new String[]{"-evidence_type", "IEA"}); - } - final List documents = getGolrAnnotations(tagvalues); + private String extractRelation(GolrAnnotationExtension extension) { + StringBuilder sb = new StringBuilder(); + for (GolrAnnotationExtensionRelation rel : extension.relationship.relation) { + if (sb.length() > 0) { + sb.append(" o "); + } + sb.append(rel.id); + } + if (sb.length() > 0) { + return sb.toString(); + } + return null; + } + + public List getGolrAnnotationsForGenes(List ids) throws IOException { + return getGolrAnnotationsForGenes(ids, false); + } + + public List getGolrAnnotationsForGenes(List ids, boolean noIEAs) throws IOException { + List tagvalues = new ArrayList(); + String[] tagvalue = new String[ids.size() + 1]; + tagvalue[0] = "bioentity"; + for (int i = 0; i < ids.size(); i++) { + tagvalue[i + 1] = ids.get(i); + } + tagvalues.add(tagvalue); + if (noIEAs) { + // add negative filter for IEAs + tagvalues.add(new String[]{"-evidence_type", "IEA"}); + } + final List documents = getGolrAnnotations(tagvalues); + return documents; + } + + public List getGolrAnnotationsForGene(String id) throws IOException { + List tagvalues = new ArrayList(); + String[] tagvalue = new String[2]; + tagvalue[0] = "bioentity"; + tagvalue[1] = id; + tagvalues.add(tagvalue); + final List documents = getGolrAnnotations(tagvalues); + return documents; + } + + public List getGolrAnnotationsForSynonym(String source, String synonym) throws IOException { + return getGolrAnnotationsForSynonym(source, Collections.singletonList(synonym)); + } + + public List getGolrAnnotationsForSynonym(String source, List synonyms) throws IOException { + return getGolrAnnotationsForSynonym(source, synonyms, false); + } + + public List getGolrAnnotationsForSynonym(String source, List synonyms, boolean noIEAs) throws IOException { + List tagvalues = new ArrayList(); + String[] param1 = new String[2]; + param1[0] = "source"; + param1[1] = source; + tagvalues.add(param1); + String[] param2 = new String[synonyms.size() + 1]; + param2[0] = "synonym"; + for (int i = 0; i < synonyms.size(); i++) { + param2[i + 1] = synonyms.get(i); + } + tagvalues.add(param2); + if (noIEAs) { + // add negative filter for IEAs + tagvalues.add(new String[]{"-evidence_type", "IEA"}); + } + final List documents = getGolrAnnotations(tagvalues); + + return documents; + } + + public List getGolrAnnotations(List tagvalues) throws IOException { + JSON_INDENT_FLAG = true; + final URI uri = createGolrRequest(tagvalues, "annotation", 0, PAGINATION_CHUNK_SIZE); + final String jsonString = getJsonStringFromUri(uri); + final GolrResponse response = parseGolrResponse(jsonString); + final List documents = new ArrayList(response.numFound); + documents.addAll(Arrays.asList(response.docs)); + if (response.numFound > PAGINATION_CHUNK_SIZE) { + // fetch remaining documents + int start = PAGINATION_CHUNK_SIZE; + int end = response.numFound / PAGINATION_CHUNK_SIZE; + if (response.numFound % PAGINATION_CHUNK_SIZE != 0) { + end += 1; + } + end = end * PAGINATION_CHUNK_SIZE; + while (start <= end) { + URI uriPagination = createGolrRequest(tagvalues, "annotation", start, PAGINATION_CHUNK_SIZE); + String jsonStringPagination = getJsonStringFromUri(uriPagination); + GolrResponse responsePagination = parseGolrResponse(jsonStringPagination); + documents.addAll(Arrays.asList(responsePagination.docs)); + start += PAGINATION_CHUNK_SIZE; + } + } + return documents; + } + + private static class GolrAnnotationResponse extends GolrEnvelope { + // empty + } + + public static class GolrAnnotationDocument { + String source; + String bioentity; + String bioentity_internal_id; + String bioentity_label; + String bioentity_name; + String annotation_class; + String annotation_class_label; + String evidence_type; + String aspect; + String type; + String taxon; + String taxon_label; + String date; + String assigned_by; + String bioentity_isoform; + String panther_family; + String panther_family_label; + List annotation_extension_json; + List synonym; + List evidence_with; + List reference; + List qualifier; + + static List getRelevantFields() { + // explicit list of fields, avoid "*" retrieval of unused fields + return Arrays.asList("source", + "qualifier", + "bioentity", + "bioentity_internal_id", + "bioentity_label", + "bioentity_name", + "annotation_class", + "annotation_class_label", + "evidence_type", + "aspect", + "type", + "taxon", + "taxon_label", + "date", + "assigned_by", + "bioentity_isoform", + "panther_family", + "panther_family_label", + "annotation_extension_json", + "synonym", + "evidence_with", + "reference"); + } + } + + public static class GolrAnnotationExtension { + + GolrAnnotationExtensionEntry relationship; - return documents; - } - - public List getGolrAnnotations(List tagvalues) throws IOException { - JSON_INDENT_FLAG = true; - final URI uri = createGolrRequest(tagvalues, "annotation", 0, PAGINATION_CHUNK_SIZE); - final String jsonString = getJsonStringFromUri(uri); - final GolrResponse response = parseGolrResponse(jsonString); - final List documents = new ArrayList(response.numFound); - documents.addAll(Arrays.asList(response.docs)); - if (response.numFound > PAGINATION_CHUNK_SIZE) { - // fetch remaining documents - int start = PAGINATION_CHUNK_SIZE; - int end = response.numFound / PAGINATION_CHUNK_SIZE; - if (response.numFound % PAGINATION_CHUNK_SIZE != 0) { - end += 1; - } - end = end * PAGINATION_CHUNK_SIZE; - while (start <= end) { - URI uriPagination = createGolrRequest(tagvalues, "annotation", start, PAGINATION_CHUNK_SIZE); - String jsonStringPagination = getJsonStringFromUri(uriPagination); - GolrResponse responsePagination = parseGolrResponse(jsonStringPagination); - documents.addAll(Arrays.asList(responsePagination.docs)); - start += PAGINATION_CHUNK_SIZE; - } - } - return documents; - } - - private static class GolrAnnotationResponse extends GolrEnvelope { - // empty - } - - public static class GolrAnnotationDocument { - String source; - String bioentity; - String bioentity_internal_id; - String bioentity_label; - String bioentity_name; - String annotation_class; - String annotation_class_label; - String evidence_type; - String aspect; - String type; - String taxon; - String taxon_label; - String date; - String assigned_by; - String bioentity_isoform; - String panther_family; - String panther_family_label; - List annotation_extension_json; - List synonym; - List evidence_with; - List reference; - List qualifier; - - static List getRelevantFields() { - // explicit list of fields, avoid "*" retrieval of unused fields - return Arrays.asList("source", - "qualifier", - "bioentity", - "bioentity_internal_id", - "bioentity_label", - "bioentity_name", - "annotation_class", - "annotation_class_label", - "evidence_type", - "aspect", - "type", - "taxon", - "taxon_label", - "date", - "assigned_by", - "bioentity_isoform", - "panther_family", - "panther_family_label", - "annotation_extension_json", - "synonym", - "evidence_with", - "reference"); - } - } + public static class GolrAnnotationExtensionEntry { + List relation; // list represents a property chain + String id; + String label; - public static class GolrAnnotationExtension { - - GolrAnnotationExtensionEntry relationship; - - public static class GolrAnnotationExtensionEntry { - List relation; // list represents a property chain - String id; - String label; - - public static class GolrAnnotationExtensionRelation { - String id; - String label; - } - } - } + public static class GolrAnnotationExtensionRelation { + String id; + String label; + } + } + } - private GolrResponse parseGolrResponse(String jsonString) throws IOException { - return parseGolrResponse(jsonString, GolrAnnotationResponse.class).response; - } + private GolrResponse parseGolrResponse(String jsonString) throws IOException { + return parseGolrResponse(jsonString, GolrAnnotationResponse.class).response; + } } diff --git a/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrBioentities.java b/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrBioentities.java index 6a02828d..431b1774 100644 --- a/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrBioentities.java +++ b/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrBioentities.java @@ -8,93 +8,93 @@ public class RetrieveGolrBioentities extends AbstractRetrieveGolr { - static int PAGINATION_CHUNK_SIZE = 100; - - private final List relevantFields; - - public RetrieveGolrBioentities(String server, int retryCount) { - super(server, retryCount); - relevantFields = GolrBioentityDocument.getRelevantFields(); - } + static int PAGINATION_CHUNK_SIZE = 100; - @Override - protected boolean isIndentJson() { - return true; - } + private final List relevantFields; - @Override - protected List getRelevantFields() { - return relevantFields; - } + public RetrieveGolrBioentities(String server, int retryCount) { + super(server, retryCount); + relevantFields = GolrBioentityDocument.getRelevantFields(); + } - public List getGolrBioentites(String id) throws IOException { - List tagvalues = new ArrayList(); - String [] tagvalue = new String[2]; - tagvalue[0] = "bioentity"; - tagvalue[1] = id; - tagvalues.add(tagvalue); - final List documents = getGolrBioentities(tagvalues); - return documents; - } - - public List getGolrBioentities(List tagvalues) throws IOException { - final URI uri = createGolrRequest(tagvalues, "bioentity", 0, PAGINATION_CHUNK_SIZE); - final String jsonString = getJsonStringFromUri(uri); - final GolrResponse response = parseGolrResponse(jsonString); - final List documents = new ArrayList(response.numFound); - documents.addAll(Arrays.asList(response.docs)); - if (response.numFound > PAGINATION_CHUNK_SIZE) { - // fetch remaining documents - int start = PAGINATION_CHUNK_SIZE; - int end = response.numFound / PAGINATION_CHUNK_SIZE; - if (response.numFound % PAGINATION_CHUNK_SIZE != 0) { - end += 1; - } - end = end * PAGINATION_CHUNK_SIZE; - while (start <= end) { - URI uriPagination = createGolrRequest(tagvalues, "bioentity", start, PAGINATION_CHUNK_SIZE); - String jsonStringPagination = getJsonStringFromUri(uriPagination); - GolrResponse responsePagination = parseGolrResponse(jsonStringPagination); - documents.addAll(Arrays.asList(responsePagination.docs)); - start += PAGINATION_CHUNK_SIZE; - } - } - return documents; - } - - private static class GolrBioentityResponse extends GolrEnvelope { - // empty - } - - public static class GolrBioentityDocument { - - public String document_category; - public String id; - public String bioentity; - public String bioentity_label; - public String bioentity_name; - public String source; - public String type; - public String taxon; - public String taxon_label; - public List synonym; - - static List getRelevantFields() { - // explicit list of fields, avoid "*" retrieval of unused fields - return Arrays.asList("document_category", - "id", - "bioentity", - "bioentity_label", - "bioentity_name", - "source", - "type", - "taxon", - "taxon_label", - "synonym"); - } - } - - private GolrResponse parseGolrResponse(String jsonString) throws IOException { - return parseGolrResponse(jsonString, GolrBioentityResponse.class).response; - } + @Override + protected boolean isIndentJson() { + return true; + } + + @Override + protected List getRelevantFields() { + return relevantFields; + } + + public List getGolrBioentites(String id) throws IOException { + List tagvalues = new ArrayList(); + String[] tagvalue = new String[2]; + tagvalue[0] = "bioentity"; + tagvalue[1] = id; + tagvalues.add(tagvalue); + final List documents = getGolrBioentities(tagvalues); + return documents; + } + + public List getGolrBioentities(List tagvalues) throws IOException { + final URI uri = createGolrRequest(tagvalues, "bioentity", 0, PAGINATION_CHUNK_SIZE); + final String jsonString = getJsonStringFromUri(uri); + final GolrResponse response = parseGolrResponse(jsonString); + final List documents = new ArrayList(response.numFound); + documents.addAll(Arrays.asList(response.docs)); + if (response.numFound > PAGINATION_CHUNK_SIZE) { + // fetch remaining documents + int start = PAGINATION_CHUNK_SIZE; + int end = response.numFound / PAGINATION_CHUNK_SIZE; + if (response.numFound % PAGINATION_CHUNK_SIZE != 0) { + end += 1; + } + end = end * PAGINATION_CHUNK_SIZE; + while (start <= end) { + URI uriPagination = createGolrRequest(tagvalues, "bioentity", start, PAGINATION_CHUNK_SIZE); + String jsonStringPagination = getJsonStringFromUri(uriPagination); + GolrResponse responsePagination = parseGolrResponse(jsonStringPagination); + documents.addAll(Arrays.asList(responsePagination.docs)); + start += PAGINATION_CHUNK_SIZE; + } + } + return documents; + } + + private static class GolrBioentityResponse extends GolrEnvelope { + // empty + } + + public static class GolrBioentityDocument { + + public String document_category; + public String id; + public String bioentity; + public String bioentity_label; + public String bioentity_name; + public String source; + public String type; + public String taxon; + public String taxon_label; + public List synonym; + + static List getRelevantFields() { + // explicit list of fields, avoid "*" retrieval of unused fields + return Arrays.asList("document_category", + "id", + "bioentity", + "bioentity_label", + "bioentity_name", + "source", + "type", + "taxon", + "taxon_label", + "synonym"); + } + } + + private GolrResponse parseGolrResponse(String jsonString) throws IOException { + return parseGolrResponse(jsonString, GolrBioentityResponse.class).response; + } } diff --git a/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrOntologyClass.java b/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrOntologyClass.java index 2c3456ff..3aaec701 100644 --- a/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrOntologyClass.java +++ b/minerva-lookup/src/main/java/org/bbop/golr/java/RetrieveGolrOntologyClass.java @@ -1,153 +1,148 @@ package org.bbop.golr.java; +import org.apache.http.client.methods.HttpPost; + import java.io.IOException; import java.net.URI; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.http.client.methods.HttpPost; +import java.util.*; public class RetrieveGolrOntologyClass extends AbstractRetrieveGolr { - static int PAGINATION_CHUNK_SIZE = 1000; - - private final List relevantFields; - - public RetrieveGolrOntologyClass(String server, int retryCount) { - super(server, retryCount); - relevantFields = GolrOntologyClassDocument.getRelevantFields(); - } - - @Override - protected boolean isIndentJson() { - return true; - } - - @Override - protected List getRelevantFields() { - return relevantFields; - } - - - public Map> getGolrOntologyCls(Set curies) throws IOException { - List tagvalues = new ArrayList(); - String [] tagvalue = new String[1+curies.size()]; - int i = 0; - tagvalue[i] = "annotation_class"; - for(String curie : curies) { - i++; - tagvalue[i] = curie; - } - tagvalues.add(tagvalue); - final List documents = getGolrOntologyCls(tagvalues); - //remap - Map> curie_response = new HashMap>(); - for(GolrOntologyClassDocument ontdoc : documents) { - String id = ontdoc.annotation_class; - List docs = curie_response.get(id); - if(docs==null) { - docs = new ArrayList(); - } - docs.add(ontdoc); - curie_response.put(id, docs); - } - - return curie_response; - } - - public List getGolrOntologyCls(String id) throws IOException { - List tagvalues = new ArrayList(); - String [] tagvalue = new String[2]; - tagvalue[0] = "annotation_class"; - tagvalue[1] = id; - tagvalues.add(tagvalue); - final List documents = getGolrOntologyCls(tagvalues); - return documents; - } - - public List getGolrOntologyCls(List tagvalues) throws IOException { + static int PAGINATION_CHUNK_SIZE = 1000; + + private final List relevantFields; + + public RetrieveGolrOntologyClass(String server, int retryCount) { + super(server, retryCount); + relevantFields = GolrOntologyClassDocument.getRelevantFields(); + } + + @Override + protected boolean isIndentJson() { + return true; + } + + @Override + protected List getRelevantFields() { + return relevantFields; + } + + + public Map> getGolrOntologyCls(Set curies) throws IOException { + List tagvalues = new ArrayList(); + String[] tagvalue = new String[1 + curies.size()]; + int i = 0; + tagvalue[i] = "annotation_class"; + for (String curie : curies) { + i++; + tagvalue[i] = curie; + } + tagvalues.add(tagvalue); + final List documents = getGolrOntologyCls(tagvalues); + //remap + Map> curie_response = new HashMap>(); + for (GolrOntologyClassDocument ontdoc : documents) { + String id = ontdoc.annotation_class; + List docs = curie_response.get(id); + if (docs == null) { + docs = new ArrayList(); + } + docs.add(ontdoc); + curie_response.put(id, docs); + } + + return curie_response; + } + + public List getGolrOntologyCls(String id) throws IOException { + List tagvalues = new ArrayList(); + String[] tagvalue = new String[2]; + tagvalue[0] = "annotation_class"; + tagvalue[1] = id; + tagvalues.add(tagvalue); + final List documents = getGolrOntologyCls(tagvalues); + return documents; + } + + public List getGolrOntologyCls(List tagvalues) throws IOException { // final URI uri = createGolrRequest(tagvalues, "ontology_class", 0, PAGINATION_CHUNK_SIZE); // final String jsonString = getJsonStringFromUri(uri); - - final HttpPost post = createGolrPostRequest(tagvalues, "ontology_class", 0, PAGINATION_CHUNK_SIZE); - final String jsonString = getJsonStringFromPost(post); - - final GolrResponse response = parseGolrResponse(jsonString); - final List documents = new ArrayList(response.numFound); - documents.addAll(Arrays.asList(response.docs)); - if (response.numFound > PAGINATION_CHUNK_SIZE) { - // fetch remaining documents - int start = PAGINATION_CHUNK_SIZE; - int end = response.numFound / PAGINATION_CHUNK_SIZE; - if (response.numFound % PAGINATION_CHUNK_SIZE != 0) { - end += 1; - } - end = end * PAGINATION_CHUNK_SIZE; - while (start <= end) { - URI uriPagination = createGolrRequest(tagvalues, "ontology_class", start, PAGINATION_CHUNK_SIZE); - String jsonStringPagination = getJsonStringFromUri(uriPagination); - GolrResponse responsePagination = parseGolrResponse(jsonStringPagination); - documents.addAll(Arrays.asList(responsePagination.docs)); - start += PAGINATION_CHUNK_SIZE; - } - } - return documents; - } - - private static class GolrOntologyClassResponse extends GolrEnvelope { - // empty - } - - public static class GolrOntologyClassDocument { - - public String document_category; - public String annotation_class; - public String annotation_class_label; - public String description; - public String source; - public String is_obsolete; - public List alternate_id; - public List replaced_by; - public List consider; - public List synonym; - public List subset; - public List definition_xref; - public List database_xref; - //public List isa_partof_closure; - public List isa_closure; - public List regulates_closure; - public String only_in_taxon; - public List only_in_taxon_closure; - - - static List getRelevantFields() { - // explicit list of fields, avoid "*" retrieval of unused fields - return Arrays.asList("document_category", - "annotation_class", - "annotation_class_label", - "description", - "source", - "is_obsolete", - "alternate_id", - "replaced_by", - "synonym", - "subset", - "definition_xref", - "database_xref", - //"isa_partof_closure", - "isa_closure", - "regulates_closure", - "only_in_taxon", - "only_in_taxon_closure"); - } - } - - private GolrResponse parseGolrResponse(String jsonString) throws IOException { - return parseGolrResponse(jsonString, GolrOntologyClassResponse.class).response; - } + + final HttpPost post = createGolrPostRequest(tagvalues, "ontology_class", 0, PAGINATION_CHUNK_SIZE); + final String jsonString = getJsonStringFromPost(post); + + final GolrResponse response = parseGolrResponse(jsonString); + final List documents = new ArrayList(response.numFound); + documents.addAll(Arrays.asList(response.docs)); + if (response.numFound > PAGINATION_CHUNK_SIZE) { + // fetch remaining documents + int start = PAGINATION_CHUNK_SIZE; + int end = response.numFound / PAGINATION_CHUNK_SIZE; + if (response.numFound % PAGINATION_CHUNK_SIZE != 0) { + end += 1; + } + end = end * PAGINATION_CHUNK_SIZE; + while (start <= end) { + URI uriPagination = createGolrRequest(tagvalues, "ontology_class", start, PAGINATION_CHUNK_SIZE); + String jsonStringPagination = getJsonStringFromUri(uriPagination); + GolrResponse responsePagination = parseGolrResponse(jsonStringPagination); + documents.addAll(Arrays.asList(responsePagination.docs)); + start += PAGINATION_CHUNK_SIZE; + } + } + return documents; + } + + private static class GolrOntologyClassResponse extends GolrEnvelope { + // empty + } + + public static class GolrOntologyClassDocument { + + public String document_category; + public String annotation_class; + public String annotation_class_label; + public String description; + public String source; + public String is_obsolete; + public List alternate_id; + public List replaced_by; + public List consider; + public List synonym; + public List subset; + public List definition_xref; + public List database_xref; + //public List isa_partof_closure; + public List isa_closure; + public List regulates_closure; + public String only_in_taxon; + public List only_in_taxon_closure; + + + static List getRelevantFields() { + // explicit list of fields, avoid "*" retrieval of unused fields + return Arrays.asList("document_category", + "annotation_class", + "annotation_class_label", + "description", + "source", + "is_obsolete", + "alternate_id", + "replaced_by", + "synonym", + "subset", + "definition_xref", + "database_xref", + //"isa_partof_closure", + "isa_closure", + "regulates_closure", + "only_in_taxon", + "only_in_taxon_closure"); + } + } + + private GolrResponse parseGolrResponse(String jsonString) throws IOException { + return parseGolrResponse(jsonString, GolrOntologyClassResponse.class).response; + } } diff --git a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CachingExternalLookupService.java b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CachingExternalLookupService.java index 5c965f95..1dc5af02 100644 --- a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CachingExternalLookupService.java +++ b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CachingExternalLookupService.java @@ -1,98 +1,94 @@ package org.geneontology.minerva.lookup; -import java.util.ArrayList; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.util.concurrent.ExecutionError; +import com.google.common.util.concurrent.UncheckedExecutionException; +import org.semanticweb.owlapi.model.IRI; + import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import org.semanticweb.owlapi.model.IRI; +public class CachingExternalLookupService implements ExternalLookupService { -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.util.concurrent.ExecutionError; -import com.google.common.util.concurrent.UncheckedExecutionException; + private final LoadingCache> cache; + private final ExternalLookupService service; -public class CachingExternalLookupService implements ExternalLookupService { - - private final LoadingCache> cache; - private final ExternalLookupService service; - - public CachingExternalLookupService(ExternalLookupService service, int size, long duration, TimeUnit unit) { - this.service = service; - cache = CacheBuilder.newBuilder() - .expireAfterWrite(duration, unit) - .maximumSize(size) - .build(new CacheLoader>() { + public CachingExternalLookupService(ExternalLookupService service, int size, long duration, TimeUnit unit) { + this.service = service; + cache = CacheBuilder.newBuilder() + .expireAfterWrite(duration, unit) + .maximumSize(size) + .build(new CacheLoader>() { + + @Override + public List load(IRI key) throws Exception { + List lookup = CachingExternalLookupService.this.service.lookup(key); + if (lookup == null || lookup.isEmpty()) { + throw new Exception("No legal value for key."); + } + return lookup; + } + }); + } + + public CachingExternalLookupService(Iterable services, int size, long duration, TimeUnit unit) { + this(new CombinedExternalLookupService(services), size, duration, unit); + } + + public CachingExternalLookupService(int size, long duration, TimeUnit unit, ExternalLookupService... services) { + this(Arrays.asList(services), size, duration, unit); + } - @Override - public List load(IRI key) throws Exception { - List lookup = CachingExternalLookupService.this.service.lookup(key); - if (lookup == null || lookup.isEmpty()) { - throw new Exception("No legal value for key."); - } - return lookup; - } - }); - } - - public CachingExternalLookupService(Iterable services, int size, long duration, TimeUnit unit) { - this(new CombinedExternalLookupService(services), size, duration, unit); - } + @Override + public List lookup(IRI id) { + try { + return cache.get(id); + } catch (ExecutionException e) { + return null; + } catch (UncheckedExecutionException e) { + return null; + } catch (ExecutionError e) { + return null; + } + } - public CachingExternalLookupService(int size, long duration, TimeUnit unit, ExternalLookupService...services) { - this(Arrays.asList(services), size, duration, unit); - } - - @Override - public List lookup(IRI id) { - try { - return cache.get(id); - } catch (ExecutionException e) { - return null; - } catch (UncheckedExecutionException e) { - return null; - } catch (ExecutionError e) { - return null; - } - } + @Override + public LookupEntry lookup(IRI id, String taxon) { + LookupEntry entry = null; + List list = cache.getUnchecked(id); + for (LookupEntry current : list) { + if (taxon.equals(current.taxon)) { + entry = current; + break; + } + } + return entry; + } - @Override - public LookupEntry lookup(IRI id, String taxon) { - LookupEntry entry = null; - List list = cache.getUnchecked(id); - for (LookupEntry current : list) { - if (taxon.equals(current.taxon)) { - entry = current; - break; - } - } - return entry; - } + @Override + public String toString() { + return "Caching(" + service.toString() + ")"; + } - @Override - public String toString() { - return "Caching("+service.toString()+")"; - } + @Override + public Map> lookupBatch(Set to_look_up) { + try { + Map> id_lookups = cache.getAll(to_look_up); + return id_lookups; + } catch (ExecutionException e) { + return null; + } catch (UncheckedExecutionException e) { + return null; + } catch (ExecutionError e) { + return null; + } + } - @Override - public Map> lookupBatch(Set to_look_up) { - try { - Map> id_lookups = cache.getAll(to_look_up); - return id_lookups; - } catch (ExecutionException e) { - return null; - } catch (UncheckedExecutionException e) { - return null; - } catch (ExecutionError e) { - return null; - } - } - - } diff --git a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CombinedExternalLookupService.java b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CombinedExternalLookupService.java index 438cc957..070c700e 100644 --- a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CombinedExternalLookupService.java +++ b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/CombinedExternalLookupService.java @@ -1,66 +1,62 @@ package org.geneontology.minerva.lookup; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; - import org.apache.commons.lang3.StringUtils; import org.semanticweb.owlapi.model.IRI; -public class CombinedExternalLookupService implements ExternalLookupService { - - private final Iterable services; - - /** - * @param services - */ - public CombinedExternalLookupService(ExternalLookupService...services) { - this(Arrays.asList(services)); - } - - /** - * @param services - */ - public CombinedExternalLookupService(Iterable services) { - this.services = services; - } +import java.util.*; - @Override - public List lookup(IRI id) { - List result = new ArrayList(); - for (ExternalLookupService service : services) { - List cResult = service.lookup(id); - if (cResult != null && !cResult.isEmpty()) { - result.addAll(cResult); - } - } - return result; - } - - @Override - public LookupEntry lookup(IRI id, String taxon) { - LookupEntry result = null; - for (ExternalLookupService service : services) { - result = service.lookup(id, taxon); - if (result != null) { - break; - } - } - return result; - } +public class CombinedExternalLookupService implements ExternalLookupService { - @Override - public String toString() { - return "["+StringUtils.join(services, "|")+"]"; - } + private final Iterable services; + + /** + * @param services + */ + public CombinedExternalLookupService(ExternalLookupService... services) { + this(Arrays.asList(services)); + } + + /** + * @param services + */ + public CombinedExternalLookupService(Iterable services) { + this.services = services; + } + + @Override + public List lookup(IRI id) { + List result = new ArrayList(); + for (ExternalLookupService service : services) { + List cResult = service.lookup(id); + if (cResult != null && !cResult.isEmpty()) { + result.addAll(cResult); + } + } + return result; + } + + @Override + public LookupEntry lookup(IRI id, String taxon) { + LookupEntry result = null; + for (ExternalLookupService service : services) { + result = service.lookup(id, taxon); + if (result != null) { + break; + } + } + return result; + } + + @Override + public String toString() { + return "[" + StringUtils.join(services, "|") + "]"; + } + + @Override + public Map> lookupBatch(Set to_look_up) { + // TODO Auto-generated method stub + return null; + } - @Override - public Map> lookupBatch(Set to_look_up) { - // TODO Auto-generated method stub - return null; - } - } diff --git a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/ExternalLookupService.java b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/ExternalLookupService.java index 32f72263..f2ed63ae 100644 --- a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/ExternalLookupService.java +++ b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/ExternalLookupService.java @@ -1,77 +1,76 @@ package org.geneontology.minerva.lookup; +import org.semanticweb.owlapi.model.IRI; + import java.util.List; import java.util.Map; import java.util.Set; -import org.semanticweb.owlapi.model.IRI; - /** * Interface for wrapping a service to lookup information for a given identifier. */ public interface ExternalLookupService { - /** - * Result of an id lookup. - */ - public static class LookupEntry { + /** + * Result of an id lookup. + */ + public static class LookupEntry { + + public final IRI id; + public final String label; + public final String type; + public final String taxon; + public final List isa_closure; + public final String direct_parent_iri; + + /** + * @param id + * @param label + * @param type + * @param taxon + */ + public LookupEntry(IRI id, String label, String type, String taxon, List isa_closure) { + this.id = id; + this.label = label; + this.type = type; + this.taxon = taxon; + this.isa_closure = isa_closure; + if (isa_closure != null) { + if (isa_closure.contains("CHEBI:36080") || isa_closure.contains("PR:000000001")) { + //protein + //direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_36080"; + direct_parent_iri = "http://purl.obolibrary.org/obo/PR_000000001"; + } else if (isa_closure.contains("CHEBI:33695")) { + //information biomacrolecule (gene, complex) + direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_33695"; + } else { + direct_parent_iri = null; + } + } else { + direct_parent_iri = null; + } + } + } + + /** + * Lookup the information for the given identifier. This is not a search. + * + * @param id + * @return entries + */ + public List lookup(IRI id); + + /** + * Lookup the information for the given identifier and taxon. This is not a + * search. + * + * @param id + * @param taxon + * @return entry + */ + public LookupEntry lookup(IRI id, String taxon); - public final IRI id; - public final String label; - public final String type; - public final String taxon; - public final List isa_closure; - public final String direct_parent_iri; - - /** - * @param id - * @param label - * @param type - * @param taxon - */ - public LookupEntry(IRI id, String label, String type, String taxon, List isa_closure) { - this.id = id; - this.label = label; - this.type = type; - this.taxon = taxon; - this.isa_closure = isa_closure; - if(isa_closure!=null) { - if(isa_closure.contains("CHEBI:36080")||isa_closure.contains("PR:000000001")) { - //protein - //direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_36080"; - direct_parent_iri = "http://purl.obolibrary.org/obo/PR_000000001"; - }else if(isa_closure.contains("CHEBI:33695")) { - //information biomacrolecule (gene, complex) - direct_parent_iri = "http://purl.obolibrary.org/obo/CHEBI_33695"; - }else { - direct_parent_iri = null; - } - }else { - direct_parent_iri = null; - } - } - } - - /** - * Lookup the information for the given identifier. This is not a search. - * - * @param id - * @return entries - */ - public List lookup(IRI id); - - /** - * Lookup the information for the given identifier and taxon. This is not a - * search. - * - * @param id - * @param taxon - * @return entry - */ - public LookupEntry lookup(IRI id, String taxon); + public Map> lookupBatch(Set to_look_up); - public Map> lookupBatch(Set to_look_up); - - } diff --git a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/GolrExternalLookupService.java b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/GolrExternalLookupService.java index bb4976c0..99222494 100644 --- a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/GolrExternalLookupService.java +++ b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/GolrExternalLookupService.java @@ -1,153 +1,142 @@ package org.geneontology.minerva.lookup; -import java.io.IOException; -import java.net.URI; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - import org.apache.log4j.Logger; import org.bbop.golr.java.RetrieveGolrBioentities; -import org.bbop.golr.java.RetrieveGolrOntologyClass; import org.bbop.golr.java.RetrieveGolrBioentities.GolrBioentityDocument; +import org.bbop.golr.java.RetrieveGolrOntologyClass; import org.bbop.golr.java.RetrieveGolrOntologyClass.GolrOntologyClassDocument; import org.geneontology.minerva.curie.CurieHandler; -import org.geneontology.minerva.lookup.ExternalLookupService.LookupEntry; import org.semanticweb.owlapi.model.IRI; +import java.io.IOException; +import java.net.URI; +import java.util.*; + public class GolrExternalLookupService implements ExternalLookupService { - - private final static Logger LOG = Logger.getLogger(GolrExternalLookupService.class); - - private final RetrieveGolrBioentities bioentityClient; - private final RetrieveGolrOntologyClass ontologyClient; - - private final String golrUrl; - - private final CurieHandler curieHandler; - - public GolrExternalLookupService(String golrUrl, CurieHandler curieHandler) { - this(golrUrl, curieHandler, false); - } - - public GolrExternalLookupService(String golrUrl, CurieHandler curieHandler, final boolean logGolrRequests) { - this(golrUrl, new RetrieveGolrBioentities(golrUrl, 2){ - - @Override - protected void logRequest(URI uri) { - if(logGolrRequests) { - LOG.info("Golr bioentity request: "+uri); - } - } - - }, new RetrieveGolrOntologyClass(golrUrl, 2) { - - @Override - protected void logRequest(URI uri) { - if(logGolrRequests) { - LOG.info("Golr ontology cls request: "+uri); - } - } - - }, curieHandler); - LOG.info("Creating Golr lookup service for minerva: "+golrUrl); - } - - protected GolrExternalLookupService(String golrUrl, RetrieveGolrBioentities bioentityClient, - RetrieveGolrOntologyClass ontologyClient, CurieHandler curieHandler) { - this.bioentityClient = bioentityClient; - this.ontologyClient = ontologyClient; - this.golrUrl = golrUrl; - this.curieHandler = curieHandler; - } - - @Override - public Map> lookupBatch(Set to_look_up){ - Map> iri_lookups = new HashMap>(); - - Set curies = new HashSet(); - Map curie_iri = new HashMap(); - for(IRI iri : to_look_up) { - String curie = curieHandler.getCuri(iri); - curies.add(curie); - curie_iri.put(curie, iri); - } - - try { - Map> ontologyEntities = ontologyClient.getGolrOntologyCls(curies); - for(String id : ontologyEntities.keySet()) { - List result = new ArrayList(); - for(GolrOntologyClassDocument doc : ontologyEntities.get(id)) { - result.add(new LookupEntry(curie_iri.get(id), doc.annotation_class_label, "ontology_class", doc.only_in_taxon, doc.isa_closure)); - } - iri_lookups.put(curie_iri.get(id), result); - } - } catch(IOException exception) { - //if (LOG.isDebugEnabled()) { - LOG.debug("Error during retrieval for id: "+curies+" GOLR-URL: "+golrUrl, exception); - //} - return null; - } - catch (Throwable exception) { - LOG.warn("Unexpected problem during Golr lookup for id: "+curies, exception); - throw exception; - } - - return iri_lookups; - } - - @Override - public List lookup(IRI id) { - String curie = curieHandler.getCuri(id); - if (LOG.isDebugEnabled()) { - LOG.debug("Golr look up for id: "+id+" curie: "+curie); - } - List result = new ArrayList(); - try { - //in current noctua, minerva context, this is never used. noctua.golr loads everything as ontologies - List bioentites = bioentityClient.getGolrBioentites(curie); - if (bioentites != null && !bioentites.isEmpty()) { - result = new ArrayList(bioentites.size()); - for(GolrBioentityDocument doc : bioentites) { - result.add(new LookupEntry(id, doc.bioentity_label, doc.type, doc.taxon, null)); - } - } - else - if (ontologyClient != null){ - List ontologyEntities = ontologyClient.getGolrOntologyCls(curie); - if (ontologyEntities != null && !ontologyEntities.isEmpty()) { - result = new ArrayList(ontologyEntities.size()); - for(GolrOntologyClassDocument doc : ontologyEntities) { - result.add(new LookupEntry(id, doc.annotation_class_label, "ontology_class", doc.only_in_taxon, doc.isa_closure)); - } - } - } - } - catch(IOException exception) { - if (LOG.isDebugEnabled()) { - LOG.debug("Error during retrieval for id: "+id+" GOLR-URL: "+golrUrl, exception); - } - return null; - } - catch (Throwable exception) { - LOG.warn("Unexpected problem during Golr lookup for id: "+id, exception); - throw exception; - } - return result; - } - - @Override - public LookupEntry lookup(IRI id, String taxon) { - throw new RuntimeException("This method is not implemented."); - } - - @Override - public String toString() { - return "Golr: "+golrUrl; - } - - + + private final static Logger LOG = Logger.getLogger(GolrExternalLookupService.class); + + private final RetrieveGolrBioentities bioentityClient; + private final RetrieveGolrOntologyClass ontologyClient; + + private final String golrUrl; + + private final CurieHandler curieHandler; + + public GolrExternalLookupService(String golrUrl, CurieHandler curieHandler) { + this(golrUrl, curieHandler, false); + } + + public GolrExternalLookupService(String golrUrl, CurieHandler curieHandler, final boolean logGolrRequests) { + this(golrUrl, new RetrieveGolrBioentities(golrUrl, 2) { + + @Override + protected void logRequest(URI uri) { + if (logGolrRequests) { + LOG.info("Golr bioentity request: " + uri); + } + } + + }, new RetrieveGolrOntologyClass(golrUrl, 2) { + + @Override + protected void logRequest(URI uri) { + if (logGolrRequests) { + LOG.info("Golr ontology cls request: " + uri); + } + } + + }, curieHandler); + LOG.info("Creating Golr lookup service for minerva: " + golrUrl); + } + + protected GolrExternalLookupService(String golrUrl, RetrieveGolrBioentities bioentityClient, + RetrieveGolrOntologyClass ontologyClient, CurieHandler curieHandler) { + this.bioentityClient = bioentityClient; + this.ontologyClient = ontologyClient; + this.golrUrl = golrUrl; + this.curieHandler = curieHandler; + } + + @Override + public Map> lookupBatch(Set to_look_up) { + Map> iri_lookups = new HashMap>(); + + Set curies = new HashSet(); + Map curie_iri = new HashMap(); + for (IRI iri : to_look_up) { + String curie = curieHandler.getCuri(iri); + curies.add(curie); + curie_iri.put(curie, iri); + } + + try { + Map> ontologyEntities = ontologyClient.getGolrOntologyCls(curies); + for (String id : ontologyEntities.keySet()) { + List result = new ArrayList(); + for (GolrOntologyClassDocument doc : ontologyEntities.get(id)) { + result.add(new LookupEntry(curie_iri.get(id), doc.annotation_class_label, "ontology_class", doc.only_in_taxon, doc.isa_closure)); + } + iri_lookups.put(curie_iri.get(id), result); + } + } catch (IOException exception) { + //if (LOG.isDebugEnabled()) { + LOG.debug("Error during retrieval for id: " + curies + " GOLR-URL: " + golrUrl, exception); + //} + return null; + } catch (Throwable exception) { + LOG.warn("Unexpected problem during Golr lookup for id: " + curies, exception); + throw exception; + } + + return iri_lookups; + } + + @Override + public List lookup(IRI id) { + String curie = curieHandler.getCuri(id); + if (LOG.isDebugEnabled()) { + LOG.debug("Golr look up for id: " + id + " curie: " + curie); + } + List result = new ArrayList(); + try { + //in current noctua, minerva context, this is never used. noctua.golr loads everything as ontologies + List bioentites = bioentityClient.getGolrBioentites(curie); + if (bioentites != null && !bioentites.isEmpty()) { + result = new ArrayList(bioentites.size()); + for (GolrBioentityDocument doc : bioentites) { + result.add(new LookupEntry(id, doc.bioentity_label, doc.type, doc.taxon, null)); + } + } else if (ontologyClient != null) { + List ontologyEntities = ontologyClient.getGolrOntologyCls(curie); + if (ontologyEntities != null && !ontologyEntities.isEmpty()) { + result = new ArrayList(ontologyEntities.size()); + for (GolrOntologyClassDocument doc : ontologyEntities) { + result.add(new LookupEntry(id, doc.annotation_class_label, "ontology_class", doc.only_in_taxon, doc.isa_closure)); + } + } + } + } catch (IOException exception) { + if (LOG.isDebugEnabled()) { + LOG.debug("Error during retrieval for id: " + id + " GOLR-URL: " + golrUrl, exception); + } + return null; + } catch (Throwable exception) { + LOG.warn("Unexpected problem during Golr lookup for id: " + id, exception); + throw exception; + } + return result; + } + + @Override + public LookupEntry lookup(IRI id, String taxon) { + throw new RuntimeException("This method is not implemented."); + } + + @Override + public String toString() { + return "Golr: " + golrUrl; + } + + } diff --git a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/MonarchExternalLookupService.java b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/MonarchExternalLookupService.java index 62cfc476..f3742642 100644 --- a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/MonarchExternalLookupService.java +++ b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/MonarchExternalLookupService.java @@ -1,5 +1,11 @@ package org.geneontology.minerva.lookup; +import org.apache.log4j.Logger; +import org.bbop.golr.java.RetrieveGolrOntologyClass; +import org.bbop.golr.java.RetrieveGolrOntologyClass.GolrOntologyClassDocument; +import org.geneontology.minerva.curie.CurieHandler; +import org.semanticweb.owlapi.model.IRI; + import java.io.IOException; import java.net.URI; import java.util.ArrayList; @@ -7,92 +13,84 @@ import java.util.Map; import java.util.Set; -import org.apache.log4j.Logger; -import org.bbop.golr.java.RetrieveGolrOntologyClass; -import org.bbop.golr.java.RetrieveGolrOntologyClass.GolrOntologyClassDocument; -import org.geneontology.minerva.curie.CurieHandler; -import org.semanticweb.owlapi.model.IRI; - public class MonarchExternalLookupService implements ExternalLookupService { - - private final static Logger LOG = Logger.getLogger(MonarchExternalLookupService.class); - - private final RetrieveGolrOntologyClass monarchClient; - - private final String monarchUrl; - - private final CurieHandler curieHandler; - - public MonarchExternalLookupService(String monarchUrl, CurieHandler curieHandler) { - this(monarchUrl, curieHandler, false); - } - - public MonarchExternalLookupService(String monarchUrl, CurieHandler curieHandler, final boolean logGolrRequests) { - this(monarchUrl, new RetrieveGolrOntologyClass(monarchUrl, 2) { - - @Override - protected void logRequest(URI uri) { - if(logGolrRequests) { - LOG.info("Golr ontology cls request: "+uri); - } - } - - }, curieHandler); - LOG.info("Creating Golr lookup service for minerva: "+monarchUrl); - } - - protected MonarchExternalLookupService(String golrUrl, RetrieveGolrOntologyClass ontologyClient, CurieHandler curieHandler) { - this.monarchClient = ontologyClient; - this.monarchUrl = golrUrl; - this.curieHandler = curieHandler; - } - - @Override - public List lookup(IRI id) { - String curie = curieHandler.getCuri(id); - if (LOG.isDebugEnabled()) { - LOG.debug("Monarch Golr look up for id: "+id+" curie: "+curie); - } - List result = new ArrayList(); - try { - if (monarchClient != null){ - List entities = monarchClient.getGolrOntologyCls(curie); - if (entities != null && !entities.isEmpty()) { - result = new ArrayList(entities.size()); - for(GolrOntologyClassDocument doc : entities) { - result.add(new LookupEntry(id, doc.annotation_class_label, "ontology_class", null, doc.isa_closure)); - } - } - } - } - catch(IOException exception) { - if (LOG.isDebugEnabled()) { - LOG.debug("Error during retrieval for id: "+id+" GOLR-URL: "+monarchUrl, exception); - } - return null; - } - catch (Throwable exception) { - LOG.warn("Unexpected problem during Golr lookup for id: "+id, exception); - throw exception; - } - return result; - } - - @Override - public LookupEntry lookup(IRI id, String taxon) { - throw new RuntimeException("This method is not implemented."); - } - - @Override - public String toString() { - return "Monarch: "+monarchUrl; - } - - @Override - public Map> lookupBatch(Set to_look_up) { - // TODO Auto-generated method stub - return null; - } - - + + private final static Logger LOG = Logger.getLogger(MonarchExternalLookupService.class); + + private final RetrieveGolrOntologyClass monarchClient; + + private final String monarchUrl; + + private final CurieHandler curieHandler; + + public MonarchExternalLookupService(String monarchUrl, CurieHandler curieHandler) { + this(monarchUrl, curieHandler, false); + } + + public MonarchExternalLookupService(String monarchUrl, CurieHandler curieHandler, final boolean logGolrRequests) { + this(monarchUrl, new RetrieveGolrOntologyClass(monarchUrl, 2) { + + @Override + protected void logRequest(URI uri) { + if (logGolrRequests) { + LOG.info("Golr ontology cls request: " + uri); + } + } + + }, curieHandler); + LOG.info("Creating Golr lookup service for minerva: " + monarchUrl); + } + + protected MonarchExternalLookupService(String golrUrl, RetrieveGolrOntologyClass ontologyClient, CurieHandler curieHandler) { + this.monarchClient = ontologyClient; + this.monarchUrl = golrUrl; + this.curieHandler = curieHandler; + } + + @Override + public List lookup(IRI id) { + String curie = curieHandler.getCuri(id); + if (LOG.isDebugEnabled()) { + LOG.debug("Monarch Golr look up for id: " + id + " curie: " + curie); + } + List result = new ArrayList(); + try { + if (monarchClient != null) { + List entities = monarchClient.getGolrOntologyCls(curie); + if (entities != null && !entities.isEmpty()) { + result = new ArrayList(entities.size()); + for (GolrOntologyClassDocument doc : entities) { + result.add(new LookupEntry(id, doc.annotation_class_label, "ontology_class", null, doc.isa_closure)); + } + } + } + } catch (IOException exception) { + if (LOG.isDebugEnabled()) { + LOG.debug("Error during retrieval for id: " + id + " GOLR-URL: " + monarchUrl, exception); + } + return null; + } catch (Throwable exception) { + LOG.warn("Unexpected problem during Golr lookup for id: " + id, exception); + throw exception; + } + return result; + } + + @Override + public LookupEntry lookup(IRI id, String taxon) { + throw new RuntimeException("This method is not implemented."); + } + + @Override + public String toString() { + return "Monarch: " + monarchUrl; + } + + @Override + public Map> lookupBatch(Set to_look_up) { + // TODO Auto-generated method stub + return null; + } + + } diff --git a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/TableLookupService.java b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/TableLookupService.java index 52bb7929..ebac5ed2 100644 --- a/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/TableLookupService.java +++ b/minerva-lookup/src/main/java/org/geneontology/minerva/lookup/TableLookupService.java @@ -1,63 +1,58 @@ package org.geneontology.minerva.lookup; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - import org.semanticweb.owlapi.model.IRI; +import java.util.*; + public class TableLookupService implements ExternalLookupService { - - private final Map> entries; - - public TableLookupService(Iterable dataProvider) { - entries = new HashMap<>(); - for (LookupEntry entry : dataProvider) { - List list = entries.get(entry.id); - if (list == null) { - list = new ArrayList(); - entries.put(entry.id, list); - } - list.add(entry); - } - } - - @Override - public List lookup(IRI id) { - List list = entries.get(id); - if (list == null) { - list = Collections.emptyList(); - } - return list; - } - - @Override - public LookupEntry lookup(IRI id, String taxon) { - LookupEntry entry = null; - List list = entries.get(id); - if (list != null) { - for (LookupEntry current : list) { - if (taxon.equals(current.taxon)) { - entry = current; - break; - } - } - } - return entry; - } - - @Override - public String toString() { - return "table: "+entries.size(); - } - - @Override - public Map> lookupBatch(Set to_look_up) { - // TODO Auto-generated method stub - return null; - } + + private final Map> entries; + + public TableLookupService(Iterable dataProvider) { + entries = new HashMap<>(); + for (LookupEntry entry : dataProvider) { + List list = entries.get(entry.id); + if (list == null) { + list = new ArrayList(); + entries.put(entry.id, list); + } + list.add(entry); + } + } + + @Override + public List lookup(IRI id) { + List list = entries.get(id); + if (list == null) { + list = Collections.emptyList(); + } + return list; + } + + @Override + public LookupEntry lookup(IRI id, String taxon) { + LookupEntry entry = null; + List list = entries.get(id); + if (list != null) { + for (LookupEntry current : list) { + if (taxon.equals(current.taxon)) { + entry = current; + break; + } + } + } + return entry; + } + + @Override + public String toString() { + return "table: " + entries.size(); + } + + @Override + public Map> lookupBatch(Set to_look_up) { + // TODO Auto-generated method stub + return null; + } } diff --git a/minerva-lookup/src/test/java/org/geneontology/minerva/lookup/GolrExternalLookupServiceTest.java b/minerva-lookup/src/test/java/org/geneontology/minerva/lookup/GolrExternalLookupServiceTest.java index 4e9e588d..e258b1ca 100644 --- a/minerva-lookup/src/test/java/org/geneontology/minerva/lookup/GolrExternalLookupServiceTest.java +++ b/minerva-lookup/src/test/java/org/geneontology/minerva/lookup/GolrExternalLookupServiceTest.java @@ -1,16 +1,5 @@ package org.geneontology.minerva.lookup; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.net.URI; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; - import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.bbop.golr.java.RetrieveGolrBioentities; @@ -18,194 +7,197 @@ import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.curie.DefaultCurieHandler; -import org.geneontology.minerva.lookup.CachingExternalLookupService; -import org.geneontology.minerva.lookup.ExternalLookupService; -import org.geneontology.minerva.lookup.GolrExternalLookupService; import org.geneontology.minerva.lookup.ExternalLookupService.LookupEntry; -import org.junit.Ignore; import org.junit.Test; import org.semanticweb.owlapi.model.IRI; +import java.net.URI; +import java.util.*; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + public class GolrExternalLookupServiceTest { - private static final String golrUrl = "http://noctua-golr.berkeleybop.org"; - //noting that tests written assumed "http://golr.berkeleybop.org/solr"; - private final CurieHandler handler = DefaultCurieHandler.getDefaultHandler(); - - @Test - public void testLookupString1() throws Exception { - GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler, true); - String testCurie = "SGD:S000004529"; - List lookup = s.lookup(handler.getIRI(testCurie)); - assertEquals(1, lookup.size()); - assertEquals("TEM1 Scer", lookup.get(0).label); - - IRI testIRI = IRI.create("http://identifiers.org/sgd/S000004529"); - assertEquals(testCurie, handler.getCuri(testIRI)); - List lookup2 = s.lookup(testIRI); - assertEquals(1, lookup2.size()); - assertEquals("TEM1 Scer", lookup2.get(0).label); - } - - @Test - public void testLookupString2() throws Exception { - GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); - List lookup = s.lookup(handler.getIRI("SGD:S000004328")); - assertEquals(1, lookup.size()); - assertEquals("SGD1 Scer", lookup.get(0).label); - } - - @Test - public void testLookupString3() throws Exception { - GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); - String testCurie = "SGD:S000005952"; - List lookup = s.lookup(handler.getIRI(testCurie)); - assertEquals(1, lookup.size()); - assertEquals("PHO85 Scer", lookup.get(0).label); - //assertEquals("gene", lookup.get(0).type); - - IRI testIRI = IRI.create("http://identifiers.org/sgd/S000005952"); - String gCuri = handler.getCuri(testIRI); - List lookup2 = s.lookup(testIRI); - assertEquals(testCurie, gCuri); - assertEquals(1, lookup2.size()); - assertEquals("PHO85 Scer", lookup2.get(0).label); - //assertEquals("gene", lookup2.get(0).type); - - } - - @Test - public void testLookupStringCls() throws Exception { - GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); - List lookup = s.lookup(handler.getIRI("GO:0140312")); - //I don't believe we are loading PO:0001040 anymore - assertEquals(1, lookup.size()); - assertEquals("cargo adaptor activity", lookup.get(0).label); - } - - @Test - public void testLookupStringCls2() throws Exception { - Logger.getLogger(GolrExternalLookupService.class).setLevel(Level.DEBUG); - GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); - List lookup = s.lookup(handler.getIRI("WBbt:0006748")); - assertEquals(1, lookup.size()); - assertEquals("vulva", lookup.get(0).label); - //suspect this has fallen out of scope - // List lookup = s.lookup(handler.getIRI("UBERON:0010403")); - // assertEquals(1, lookup.size()); - // assertEquals("brain marginal zone", lookup.get(0).label); - } - - @Test - public void testLookupGeneProductCls() throws Exception { - Logger.getLogger(GolrExternalLookupService.class).setLevel(Level.DEBUG); - GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); - IRI gp_iri = handler.getIRI("UniProtKB:P32241-1"); - List lookup = s.lookup(gp_iri); - assertEquals(1, lookup.size()); - LookupEntry e = lookup.get(0); - assertEquals("VIPR1 Hsap", e.label); - assertEquals(24, e.isa_closure.size()); - assertTrue(e.isa_closure.contains("PR:000000001")); + private static final String golrUrl = "http://noctua-golr.berkeleybop.org"; + //noting that tests written assumed "http://golr.berkeleybop.org/solr"; + private final CurieHandler handler = DefaultCurieHandler.getDefaultHandler(); + + @Test + public void testLookupString1() throws Exception { + GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler, true); + String testCurie = "SGD:S000004529"; + List lookup = s.lookup(handler.getIRI(testCurie)); + assertEquals(1, lookup.size()); + assertEquals("TEM1 Scer", lookup.get(0).label); + + IRI testIRI = IRI.create("http://identifiers.org/sgd/S000004529"); + assertEquals(testCurie, handler.getCuri(testIRI)); + List lookup2 = s.lookup(testIRI); + assertEquals(1, lookup2.size()); + assertEquals("TEM1 Scer", lookup2.get(0).label); + } + + @Test + public void testLookupString2() throws Exception { + GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); + List lookup = s.lookup(handler.getIRI("SGD:S000004328")); + assertEquals(1, lookup.size()); + assertEquals("SGD1 Scer", lookup.get(0).label); + } + + @Test + public void testLookupString3() throws Exception { + GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); + String testCurie = "SGD:S000005952"; + List lookup = s.lookup(handler.getIRI(testCurie)); + assertEquals(1, lookup.size()); + assertEquals("PHO85 Scer", lookup.get(0).label); + //assertEquals("gene", lookup.get(0).type); + + IRI testIRI = IRI.create("http://identifiers.org/sgd/S000005952"); + String gCuri = handler.getCuri(testIRI); + List lookup2 = s.lookup(testIRI); + assertEquals(testCurie, gCuri); + assertEquals(1, lookup2.size()); + assertEquals("PHO85 Scer", lookup2.get(0).label); + //assertEquals("gene", lookup2.get(0).type); + + } + + @Test + public void testLookupStringCls() throws Exception { + GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); + List lookup = s.lookup(handler.getIRI("GO:0140312")); + //I don't believe we are loading PO:0001040 anymore + assertEquals(1, lookup.size()); + assertEquals("cargo adaptor activity", lookup.get(0).label); + } + + @Test + public void testLookupStringCls2() throws Exception { + Logger.getLogger(GolrExternalLookupService.class).setLevel(Level.DEBUG); + GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); + List lookup = s.lookup(handler.getIRI("WBbt:0006748")); + assertEquals(1, lookup.size()); + assertEquals("vulva", lookup.get(0).label); + //suspect this has fallen out of scope + // List lookup = s.lookup(handler.getIRI("UBERON:0010403")); + // assertEquals(1, lookup.size()); + // assertEquals("brain marginal zone", lookup.get(0).label); + } + + @Test + public void testLookupGeneProductCls() throws Exception { + Logger.getLogger(GolrExternalLookupService.class).setLevel(Level.DEBUG); + GolrExternalLookupService s = new GolrExternalLookupService(golrUrl, handler); + IRI gp_iri = handler.getIRI("UniProtKB:P32241-1"); + List lookup = s.lookup(gp_iri); + assertEquals(1, lookup.size()); + LookupEntry e = lookup.get(0); + assertEquals("VIPR1 Hsap", e.label); + assertEquals(24, e.isa_closure.size()); + assertTrue(e.isa_closure.contains("PR:000000001")); // - gp_iri = handler.getIRI("SGD:S000005952"); - lookup = s.lookup(gp_iri); - assertEquals(1, lookup.size()); - e = lookup.get(0); - assertEquals("PHO85 Scer", e.label); - assertTrue(e.isa_closure.size() >= 15); - assertTrue(e.isa_closure.contains("CHEBI:33695")); + gp_iri = handler.getIRI("SGD:S000005952"); + lookup = s.lookup(gp_iri); + assertEquals(1, lookup.size()); + e = lookup.get(0); + assertEquals("PHO85 Scer", e.label); + assertTrue(e.isa_closure.size() >= 15); + assertTrue(e.isa_closure.contains("CHEBI:33695")); // // //example non-gene obo:ComplexPortal_CPX-900 https://www.ebi.ac.uk/complexportal/complex/CPX-900 - gp_iri = handler.getIRI("ComplexPortal:CPX-900"); - lookup = s.lookup(gp_iri); - assertEquals(1, lookup.size()); - e = lookup.get(0); - assertEquals("saga-kat2a_human Hsap", e.label); - assertEquals(7, e.isa_closure.size()); - assertTrue(e.isa_closure.contains("GO:0032991")); - } - - @Test - public void testCachedGolrLookup() throws Exception { - final List requests = new ArrayList(); - GolrExternalLookupService golr = new GolrExternalLookupService(golrUrl, - new RetrieveGolrBioentities(golrUrl, 2){ - - @Override - protected void logRequest(URI uri) { - requests.add(uri); - } - - }, new RetrieveGolrOntologyClass(golrUrl, 2){ - @Override - protected void logRequest(URI uri) { - requests.add(uri); - } - }, handler); - ExternalLookupService s = new CachingExternalLookupService(golr, 1000, 24l, TimeUnit.HOURS); - - List lookup1 = s.lookup(handler.getIRI("SGD:S000004529")); - assertEquals(1, lookup1.size()); - assertEquals("TEM1 Scer", lookup1.get(0).label); - int count = requests.size(); - - List lookup2 = s.lookup(handler.getIRI("SGD:S000004529")); - assertEquals(1, lookup2.size()); - assertEquals("TEM1 Scer", lookup2.get(0).label); - - // there should be no new request to Golr, that's what the cache is for! - assertEquals(count, requests.size()); - } - - @Test - public void testCachedGolrBatchLookup() throws Exception { - final List requests = new ArrayList(); - GolrExternalLookupService golr = new GolrExternalLookupService(golrUrl, - new RetrieveGolrBioentities(golrUrl, 2){ - - @Override - protected void logRequest(URI uri) { - requests.add(uri); - } - - }, new RetrieveGolrOntologyClass(golrUrl, 2){ - @Override - protected void logRequest(URI uri) { - requests.add(uri); - } - }, handler); - ExternalLookupService s = new CachingExternalLookupService(golr, 1000, 24l, TimeUnit.HOURS); - Set ids = new HashSet(); - ids.add(handler.getIRI("SGD:S000004529")); - ids.add(handler.getIRI("CHEBI:33695")); - ids.add(handler.getIRI("ComplexPortal:CPX-900")); - ids.add(handler.getIRI("UniProtKB:P32241-1")); - ids.add(handler.getIRI("GO:0003700")); - - Map> lookups = s.lookupBatch(ids); - assertEquals(5, lookups.size()); - assertEquals("TEM1 Scer", lookups.get(handler.getIRI("SGD:S000004529")).get(0).label); - int count = requests.size(); - - Map> lookups2 = s.lookupBatch(ids); - assertEquals(5, lookups2.size()); - assertEquals("TEM1 Scer", lookups2.get(handler.getIRI("SGD:S000004529")).get(0).label); - - // there should be no new request to Golr, that's what the cache is for! - assertEquals(count, requests.size()); - } - - - public void printListTermLabels(ExternalLookupService s, List terms) throws UnknownIdentifierException { - for(String id : terms) { - if(id.contains("CHEBI")||id.contains("PR")||id.contains("BFO")) { - List elookup = s.lookup(handler.getIRI(id)); - for(LookupEntry l : elookup) { - System.out.println(l.id+"\t"+l.label); - } - } - } - } - + gp_iri = handler.getIRI("ComplexPortal:CPX-900"); + lookup = s.lookup(gp_iri); + assertEquals(1, lookup.size()); + e = lookup.get(0); + assertEquals("saga-kat2a_human Hsap", e.label); + assertEquals(7, e.isa_closure.size()); + assertTrue(e.isa_closure.contains("GO:0032991")); + } + + @Test + public void testCachedGolrLookup() throws Exception { + final List requests = new ArrayList(); + GolrExternalLookupService golr = new GolrExternalLookupService(golrUrl, + new RetrieveGolrBioentities(golrUrl, 2) { + + @Override + protected void logRequest(URI uri) { + requests.add(uri); + } + + }, new RetrieveGolrOntologyClass(golrUrl, 2) { + @Override + protected void logRequest(URI uri) { + requests.add(uri); + } + }, handler); + ExternalLookupService s = new CachingExternalLookupService(golr, 1000, 24l, TimeUnit.HOURS); + + List lookup1 = s.lookup(handler.getIRI("SGD:S000004529")); + assertEquals(1, lookup1.size()); + assertEquals("TEM1 Scer", lookup1.get(0).label); + int count = requests.size(); + + List lookup2 = s.lookup(handler.getIRI("SGD:S000004529")); + assertEquals(1, lookup2.size()); + assertEquals("TEM1 Scer", lookup2.get(0).label); + + // there should be no new request to Golr, that's what the cache is for! + assertEquals(count, requests.size()); + } + + @Test + public void testCachedGolrBatchLookup() throws Exception { + final List requests = new ArrayList(); + GolrExternalLookupService golr = new GolrExternalLookupService(golrUrl, + new RetrieveGolrBioentities(golrUrl, 2) { + + @Override + protected void logRequest(URI uri) { + requests.add(uri); + } + + }, new RetrieveGolrOntologyClass(golrUrl, 2) { + @Override + protected void logRequest(URI uri) { + requests.add(uri); + } + }, handler); + ExternalLookupService s = new CachingExternalLookupService(golr, 1000, 24l, TimeUnit.HOURS); + Set ids = new HashSet(); + ids.add(handler.getIRI("SGD:S000004529")); + ids.add(handler.getIRI("CHEBI:33695")); + ids.add(handler.getIRI("ComplexPortal:CPX-900")); + ids.add(handler.getIRI("UniProtKB:P32241-1")); + ids.add(handler.getIRI("GO:0003700")); + + Map> lookups = s.lookupBatch(ids); + assertEquals(5, lookups.size()); + assertEquals("TEM1 Scer", lookups.get(handler.getIRI("SGD:S000004529")).get(0).label); + int count = requests.size(); + + Map> lookups2 = s.lookupBatch(ids); + assertEquals(5, lookups2.size()); + assertEquals("TEM1 Scer", lookups2.get(handler.getIRI("SGD:S000004529")).get(0).label); + + // there should be no new request to Golr, that's what the cache is for! + assertEquals(count, requests.size()); + } + + + public void printListTermLabels(ExternalLookupService s, List terms) throws UnknownIdentifierException { + for (String id : terms) { + if (id.contains("CHEBI") || id.contains("PR") || id.contains("BFO")) { + List elookup = s.lookup(handler.getIRI(id)); + for (LookupEntry l : elookup) { + System.out.println(l.id + "\t" + l.label); + } + } + } + } + } diff --git a/minerva-server/pom.xml b/minerva-server/pom.xml index 62d1fce7..03e7df10 100644 --- a/minerva-server/pom.xml +++ b/minerva-server/pom.xml @@ -1,182 +1,186 @@ - 4.0.0 - - minerva - org.geneontology - 0.6.1 - - minerva-server - Minerva-Server + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + + minerva + org.geneontology + 0.6.1 + + minerva-server + Minerva-Server - - - - org.apache.maven.plugins - maven-surefire-plugin - - - - **/GolrExternalLookupServiceTest.java - **/SeedHandlerTest.java - - - - - org.jacoco - jacoco-maven-plugin - - - - org.apache.maven.plugins - maven-shade-plugin - - - package - - shade - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - bin/minerva-server.jar - false - - - - - org.geneontology.minerva.server.StartUpTool - ${git.commit.id} - https://github.com/geneontology/minerva/commit/${git.commit.id} - ${git.branch} - ${git.dirty} - - - - - - - - - + + + + org.apache.maven.plugins + maven-surefire-plugin + + + + **/GolrExternalLookupServiceTest.java + **/SeedHandlerTest.java + + + + + org.jacoco + jacoco-maven-plugin + + + + org.apache.maven.plugins + maven-shade-plugin + + + package + + shade + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + bin/minerva-server.jar + false + + + + + org.geneontology.minerva.server.StartUpTool + ${git.commit.id} + + https://github.com/geneontology/minerva/commit/${git.commit.id} + + ${git.branch} + ${git.dirty} + + + + + + + + + - - - org.geneontology - minerva-core - ${project.parent.version} - - - org.geneontology - minerva-json - ${project.parent.version} - - - org.geneontology - minerva-lookup - ${project.parent.version} - - - org.geneontology - minerva-converter - ${project.parent.version} - - - org.apache.ant - ant - 1.10.11 - - - org.eclipse.jetty - jetty-server - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-io - - - org.eclipse.jetty - jetty-jmx - - - org.eclipse.jetty - jetty-jndi - - - org.eclipse.jetty - jetty-rewrite - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-xml - - - org.glassfish.jersey.containers - jersey-container-servlet-core - - - org.glassfish.jersey.core - jersey-common - - - org.glassfish.jersey.inject - jersey-hk2 - - - org.apache.logging.log4j - log4j-core - - - org.apache.logging.log4j - log4j-1.2-api - - + + + org.geneontology + minerva-core + ${project.parent.version} + + + org.geneontology + minerva-json + ${project.parent.version} + + + org.geneontology + minerva-lookup + ${project.parent.version} + + + org.geneontology + minerva-converter + ${project.parent.version} + + + org.apache.ant + ant + 1.10.11 + + + org.eclipse.jetty + jetty-server + + + org.eclipse.jetty + jetty-servlet + + + org.eclipse.jetty + jetty-util + + + org.eclipse.jetty + jetty-io + + + org.eclipse.jetty + jetty-jmx + + + org.eclipse.jetty + jetty-jndi + + + org.eclipse.jetty + jetty-rewrite + + + org.eclipse.jetty + jetty-webapp + + + org.eclipse.jetty + jetty-xml + + + org.glassfish.jersey.containers + jersey-container-servlet-core + + + org.glassfish.jersey.core + jersey-common + + + org.glassfish.jersey.inject + jersey-hk2 + + + org.apache.logging.log4j + log4j-core + + + org.apache.logging.log4j + log4j-1.2-api + + diff --git a/minerva-server/src/main/assembly/minerva-server.xml b/minerva-server/src/main/assembly/minerva-server.xml index 67c923c8..3151e28b 100644 --- a/minerva-server/src/main/assembly/minerva-server.xml +++ b/minerva-server/src/main/assembly/minerva-server.xml @@ -1,32 +1,32 @@ - minerva-server - - jar - - false - - - / - true - true - - - META-INF - META-INF/** - log4j.properties - *.txt - README - - - runtime - - - - - ${project.build.outputDirectory}/log4j.properties - - + xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> + minerva-server + + jar + + false + + + / + true + true + + + META-INF + META-INF/** + log4j.properties + *.txt + README + + + runtime + + + + + ${project.build.outputDirectory}/log4j.properties + + \ No newline at end of file diff --git a/minerva-server/src/main/java/org/geneontology/minerva/ModelReaderHelper.java b/minerva-server/src/main/java/org/geneontology/minerva/ModelReaderHelper.java index 819a7bea..b9551d8c 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/ModelReaderHelper.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/ModelReaderHelper.java @@ -1,100 +1,88 @@ package org.geneontology.minerva; +import org.geneontology.minerva.BlazegraphMolecularModelManager.PostLoadOntologyFilter; +import org.semanticweb.owlapi.model.*; + import java.util.ArrayList; import java.util.List; import java.util.Set; -import org.geneontology.minerva.BlazegraphMolecularModelManager.PostLoadOntologyFilter; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLAnnotationValueVisitorEx; -import org.semanticweb.owlapi.model.OWLAnonymousIndividual; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyChange; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.RemoveAxiom; -import org.semanticweb.owlapi.model.RemoveOntologyAnnotation; - /** * The axioms tagged with the lego:derived property are removed upon reading in the model. * This system is not really used anymore, but this class should still be active while * some models still contain these annotated axioms. */ public class ModelReaderHelper implements PostLoadOntologyFilter { - - public static final ModelReaderHelper INSTANCE = new ModelReaderHelper(); - public static final IRI DERIVED_IRI = IRI.create("http://geneontology.org/lego/derived"); - public static final String DERIVED_VALUE = "true"; - - private ModelReaderHelper() { - // no public constructor, use instance - } - - @Override - public OWLOntology filter(OWLOntology model) { - final OWLOntologyManager m = model.getOWLOntologyManager(); - final OWLDataFactory f = m.getOWLDataFactory(); - final OWLAnnotationProperty derivedProperty = f.getOWLAnnotationProperty(DERIVED_IRI); - - List allChanges = new ArrayList(); - - // handle model annotations - Set modelAnnotations = model.getAnnotations(); - for (OWLAnnotation modelAnnotation : modelAnnotations) { - boolean isTagged = isTagged(modelAnnotation.getAnnotations(), derivedProperty); - if (isTagged) { - allChanges.add(new RemoveOntologyAnnotation(model, modelAnnotation)); - } - } - - // handle axioms - for(OWLAxiom ax : model.getAxioms()) { - boolean isTagged = isTagged(ax.getAnnotations(), derivedProperty); - if (isTagged) { - allChanges.add(new RemoveAxiom(model, ax)); - } - } - - // execute changes as batch to minimize change event generation in the owl-api - if (allChanges.isEmpty() == false) { - m.applyChanges(allChanges); - } - - return model; - } - - static boolean isTagged(Set annotations, OWLAnnotationProperty p) { - if (annotations != null && !annotations.isEmpty()) { - for (OWLAnnotation annotation : annotations) { - if (p.equals(annotation.getProperty())) { - String value = annotation.getValue().accept(new OWLAnnotationValueVisitorEx() { - - @Override - public String visit(IRI iri) { - return null; - } - - @Override - public String visit(OWLAnonymousIndividual individual) { - return null; - } - - @Override - public String visit(OWLLiteral literal) { - return literal.getLiteral(); - } - }); - if (value != null && DERIVED_VALUE.equalsIgnoreCase(value)) { - return true; - } - } - } - } - return false; - } + + public static final ModelReaderHelper INSTANCE = new ModelReaderHelper(); + public static final IRI DERIVED_IRI = IRI.create("http://geneontology.org/lego/derived"); + public static final String DERIVED_VALUE = "true"; + + private ModelReaderHelper() { + // no public constructor, use instance + } + + @Override + public OWLOntology filter(OWLOntology model) { + final OWLOntologyManager m = model.getOWLOntologyManager(); + final OWLDataFactory f = m.getOWLDataFactory(); + final OWLAnnotationProperty derivedProperty = f.getOWLAnnotationProperty(DERIVED_IRI); + + List allChanges = new ArrayList(); + + // handle model annotations + Set modelAnnotations = model.getAnnotations(); + for (OWLAnnotation modelAnnotation : modelAnnotations) { + boolean isTagged = isTagged(modelAnnotation.getAnnotations(), derivedProperty); + if (isTagged) { + allChanges.add(new RemoveOntologyAnnotation(model, modelAnnotation)); + } + } + + // handle axioms + for (OWLAxiom ax : model.getAxioms()) { + boolean isTagged = isTagged(ax.getAnnotations(), derivedProperty); + if (isTagged) { + allChanges.add(new RemoveAxiom(model, ax)); + } + } + + // execute changes as batch to minimize change event generation in the owl-api + if (allChanges.isEmpty() == false) { + m.applyChanges(allChanges); + } + + return model; + } + + static boolean isTagged(Set annotations, OWLAnnotationProperty p) { + if (annotations != null && !annotations.isEmpty()) { + for (OWLAnnotation annotation : annotations) { + if (p.equals(annotation.getProperty())) { + String value = annotation.getValue().accept(new OWLAnnotationValueVisitorEx() { + + @Override + public String visit(IRI iri) { + return null; + } + + @Override + public String visit(OWLAnonymousIndividual individual) { + return null; + } + + @Override + public String visit(OWLLiteral literal) { + return literal.getLiteral(); + } + }); + if (value != null && DERIVED_VALUE.equalsIgnoreCase(value)) { + return true; + } + } + } + } + return false; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/AuthorizationRequestFilter.java b/minerva-server/src/main/java/org/geneontology/minerva/server/AuthorizationRequestFilter.java index b2c7dc3a..36eb655c 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/AuthorizationRequestFilter.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/AuthorizationRequestFilter.java @@ -1,49 +1,48 @@ package org.geneontology.minerva.server; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Set; - import javax.ws.rs.container.ContainerRequestContext; import javax.ws.rs.container.ContainerRequestFilter; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.UriInfo; +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Set; @Deprecated public class AuthorizationRequestFilter implements ContainerRequestFilter { - @Override - public void filter(ContainerRequestContext requestContext) throws IOException { - UriInfo uriInfo = requestContext.getUriInfo(); - List matchedURIs = uriInfo.getMatchedURIs(); - if (matchedURIs.contains("m3StoreModel")) { - boolean hasSecurityToken = hasSecurityToken(requestContext); - if (!hasSecurityToken) { - abort(requestContext); - } - } - } + @Override + public void filter(ContainerRequestContext requestContext) throws IOException { + UriInfo uriInfo = requestContext.getUriInfo(); + List matchedURIs = uriInfo.getMatchedURIs(); + if (matchedURIs.contains("m3StoreModel")) { + boolean hasSecurityToken = hasSecurityToken(requestContext); + if (!hasSecurityToken) { + abort(requestContext); + } + } + } + + private void abort(ContainerRequestContext requestContext) { + ResponseBuilder builder = Response.status(Response.Status.UNAUTHORIZED); + builder = builder.entity("User cannot access the resource."); + requestContext.abortWith(builder.build()); + } + + private boolean hasSecurityToken(ContainerRequestContext requestContext) { + UriInfo uriInfo = requestContext.getUriInfo(); + MultivaluedMap queryParameters = uriInfo.getQueryParameters(); + String secToken = queryParameters.getFirst("security-token"); + Set required = getCurrentSecurityTokens(); + boolean hasToken = required.contains(secToken); + return hasToken; + } - private void abort(ContainerRequestContext requestContext) { - ResponseBuilder builder = Response.status(Response.Status.UNAUTHORIZED); - builder = builder.entity("User cannot access the resource."); - requestContext.abortWith(builder.build()); - } - - private boolean hasSecurityToken(ContainerRequestContext requestContext) { - UriInfo uriInfo = requestContext.getUriInfo(); - MultivaluedMap queryParameters = uriInfo.getQueryParameters(); - String secToken = queryParameters.getFirst("security-token"); - Set required = getCurrentSecurityTokens(); - boolean hasToken = required.contains(secToken); - return hasToken; - } - - private Set getCurrentSecurityTokens() { - // TODO - return Collections.singleton("0815"); - } + private Set getCurrentSecurityTokens() { + // TODO + return Collections.singleton("0815"); + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/GsonMessageBodyHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/GsonMessageBodyHandler.java index f624f06d..283b1f52 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/GsonMessageBodyHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/GsonMessageBodyHandler.java @@ -1,13 +1,8 @@ package org.geneontology.minerva.server; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.UnsupportedEncodingException; -import java.lang.annotation.Annotation; -import java.lang.reflect.Type; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.apache.log4j.Logger; import javax.ws.rs.Consumes; import javax.ws.rs.Produces; @@ -17,104 +12,98 @@ import javax.ws.rs.ext.MessageBodyReader; import javax.ws.rs.ext.MessageBodyWriter; import javax.ws.rs.ext.Provider; - -import org.apache.log4j.Logger; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; +import java.io.*; +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; @Provider @Produces({MediaType.APPLICATION_JSON + ";charset=utf-8", "text/json"}) @Consumes({MediaType.APPLICATION_JSON, "text/json"}) public final class GsonMessageBodyHandler implements MessageBodyWriter, MessageBodyReader { - private static Logger LOG = Logger.getLogger(GsonMessageBodyHandler.class); - private static final String UTF_8 = "UTF-8"; + private static Logger LOG = Logger.getLogger(GsonMessageBodyHandler.class); + private static final String UTF_8 = "UTF-8"; - private Gson gson; + private Gson gson; - private Gson getGson() { - if (gson == null) { - final GsonBuilder gsonBuilder = new GsonBuilder(); - gson = gsonBuilder.create(); - } - return gson; - } + private Gson getGson() { + if (gson == null) { + final GsonBuilder gsonBuilder = new GsonBuilder(); + gson = gsonBuilder.create(); + } + return gson; + } - @Override - public boolean isReadable(Class type, - Type genericType, - java.lang.annotation.Annotation[] annotations, - MediaType mediaType) - { - return true; - } + @Override + public boolean isReadable(Class type, + Type genericType, + java.lang.annotation.Annotation[] annotations, + MediaType mediaType) { + return true; + } - @Override - public Object readFrom(Class type, Type genericType, - Annotation[] annotations, MediaType mediaType, - MultivaluedMap httpHeaders, InputStream entityStream) { - InputStreamReader streamReader = null; - try { - streamReader = new InputStreamReader(entityStream, UTF_8); - } catch (UnsupportedEncodingException e) { - LOG.error(e.getMessage()); - } - try { - Type jsonType; - if (type.equals(genericType)) { - jsonType = type; - } else { - jsonType = genericType; - } - return getGson().fromJson(streamReader, jsonType); - } finally { - try { - streamReader.close(); - } catch (IOException e) { - LOG.error(e.getMessage()); - } - } - } + @Override + public Object readFrom(Class type, Type genericType, + Annotation[] annotations, MediaType mediaType, + MultivaluedMap httpHeaders, InputStream entityStream) { + InputStreamReader streamReader = null; + try { + streamReader = new InputStreamReader(entityStream, UTF_8); + } catch (UnsupportedEncodingException e) { + LOG.error(e.getMessage()); + } + try { + Type jsonType; + if (type.equals(genericType)) { + jsonType = type; + } else { + jsonType = genericType; + } + return getGson().fromJson(streamReader, jsonType); + } finally { + try { + streamReader.close(); + } catch (IOException e) { + LOG.error(e.getMessage()); + } + } + } - @Override - public boolean isWriteable(Class type, - Type genericType, - Annotation[] annotations, - MediaType mediaType) - { - return true; - } + @Override + public boolean isWriteable(Class type, + Type genericType, + Annotation[] annotations, + MediaType mediaType) { + return true; + } - @Override - public long getSize(Object object, - Class type, - Type genericType, - Annotation[] annotations, - MediaType mediaType) - { - return -1; - } + @Override + public long getSize(Object object, + Class type, + Type genericType, + Annotation[] annotations, + MediaType mediaType) { + return -1; + } - @Override - public void writeTo(Object object, - Class type, - Type genericType, - Annotation[] annotations, - MediaType mediaType, - MultivaluedMap httpHeaders, - OutputStream entityStream) throws IOException, WebApplicationException - { - OutputStreamWriter writer = new OutputStreamWriter(entityStream, UTF_8); - try { - Type jsonType; - if (type.equals(genericType)) { - jsonType = type; - } else { - jsonType = genericType; - } - getGson().toJson(object, jsonType, writer); - } finally { - writer.close(); - } - } + @Override + public void writeTo(Object object, + Class type, + Type genericType, + Annotation[] annotations, + MediaType mediaType, + MultivaluedMap httpHeaders, + OutputStream entityStream) throws IOException, WebApplicationException { + OutputStreamWriter writer = new OutputStreamWriter(entityStream, UTF_8); + try { + Type jsonType; + if (type.equals(genericType)) { + jsonType = type; + } else { + jsonType = genericType; + } + getGson().toJson(object, jsonType, writer); + } finally { + writer.close(); + } + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/LoggingApplicationEventListener.java b/minerva-server/src/main/java/org/geneontology/minerva/server/LoggingApplicationEventListener.java index 414538b9..9effb1ab 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/LoggingApplicationEventListener.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/LoggingApplicationEventListener.java @@ -1,11 +1,5 @@ package org.geneontology.minerva.server; -import java.io.IOException; -import java.util.List; -import java.util.Map.Entry; - -import javax.ws.rs.core.MultivaluedMap; - import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; import org.apache.tools.ant.filters.StringInputStream; @@ -17,94 +11,99 @@ import org.glassfish.jersey.server.monitoring.RequestEvent; import org.glassfish.jersey.server.monitoring.RequestEventListener; +import javax.ws.rs.core.MultivaluedMap; +import java.io.IOException; +import java.util.List; +import java.util.Map.Entry; + public class LoggingApplicationEventListener implements ApplicationEventListener { - - private static final Logger LOG = Logger.getLogger(LoggingApplicationEventListener.class); - private volatile long requestCounter = 0; - - @Override - public void onEvent(ApplicationEvent event) { - switch (event.getType()) { - case INITIALIZATION_FINISHED: - LOG.info("Application " + event.getResourceConfig().getApplicationName() - + " initialization finished."); - break; - case DESTROY_FINISHED: - LOG.info("Application "+ event.getResourceConfig().getApplicationName()+" destroyed."); - break; - default: - break; - } - } + private static final Logger LOG = Logger.getLogger(LoggingApplicationEventListener.class); + + private volatile long requestCounter = 0; + + @Override + public void onEvent(ApplicationEvent event) { + switch (event.getType()) { + case INITIALIZATION_FINISHED: + LOG.info("Application " + event.getResourceConfig().getApplicationName() + + " initialization finished."); + break; + case DESTROY_FINISHED: + LOG.info("Application " + event.getResourceConfig().getApplicationName() + " destroyed."); + break; + default: + break; + } + } + + @Override + public RequestEventListener onRequest(RequestEvent requestEvent) { + requestCounter++; + LOG.info("Request " + requestCounter + " started."); + return new LoggingRequestEventListener(requestCounter); + } + + private static class LoggingRequestEventListener implements RequestEventListener { + + private final long requestNumber; + private final long startTime; - @Override - public RequestEventListener onRequest(RequestEvent requestEvent) { - requestCounter++; - LOG.info("Request " + requestCounter + " started."); - return new LoggingRequestEventListener(requestCounter); - } + public LoggingRequestEventListener(long requestNumber) { + this.requestNumber = requestNumber; + startTime = System.currentTimeMillis(); + } - private static class LoggingRequestEventListener implements RequestEventListener { + @Override + public void onEvent(RequestEvent event) { + switch (event.getType()) { + case RESOURCE_METHOD_START: + ExtendedUriInfo uriInfo = event.getUriInfo(); + ResourceMethod method = uriInfo.getMatchedResourceMethod(); + ContainerRequest containerRequest = event.getContainerRequest(); + LOG.info(requestNumber + " Resource method " + method.getHttpMethod() + " started for request " + requestNumber); + LOG.info(requestNumber + " Headers: " + render(containerRequest.getHeaders())); + LOG.info(requestNumber + " Path: " + uriInfo.getPath()); + LOG.info(requestNumber + " PathParameters: " + render(uriInfo.getPathParameters())); + LOG.info(requestNumber + " QueryParameters: " + render(uriInfo.getQueryParameters())); + LOG.info(requestNumber + " Body: " + getBody(containerRequest)); + break; + case FINISHED: + LOG.info("Request " + requestNumber + " finished. Processing time " + + (System.currentTimeMillis() - startTime) + " ms."); + break; + default: + break; + } - private final long requestNumber; - private final long startTime; + } - public LoggingRequestEventListener(long requestNumber) { - this.requestNumber = requestNumber; - startTime = System.currentTimeMillis(); - } + } - @Override - public void onEvent(RequestEvent event) { - switch (event.getType()) { - case RESOURCE_METHOD_START: - ExtendedUriInfo uriInfo = event.getUriInfo(); - ResourceMethod method = uriInfo.getMatchedResourceMethod(); - ContainerRequest containerRequest = event.getContainerRequest(); - LOG.info(requestNumber+" Resource method " + method.getHttpMethod() + " started for request " + requestNumber); - LOG.info(requestNumber+" Headers: "+ render(containerRequest.getHeaders())); - LOG.info(requestNumber+" Path: "+uriInfo.getPath()); - LOG.info(requestNumber+" PathParameters: "+ render(uriInfo.getPathParameters())); - LOG.info(requestNumber+" QueryParameters: "+ render(uriInfo.getQueryParameters())); - LOG.info(requestNumber+" Body: "+getBody(containerRequest)); - break; - case FINISHED: - LOG.info("Request " + requestNumber + " finished. Processing time " - + (System.currentTimeMillis() - startTime) + " ms."); - break; - default: - break; - } - - } + private static CharSequence getBody(ContainerRequest request) { + String body = null; + try { + body = IOUtils.toString(request.getEntityStream()); + // reading the stream consumes it, need to re-create it for the real thing + request.setEntityStream(new StringInputStream(body)); + } catch (IOException e) { + LOG.warn("Couldn't ready body.", e); + } + return body; + } - } - - private static CharSequence getBody(ContainerRequest request) { - String body = null; - try { - body = IOUtils.toString(request.getEntityStream()); - // reading the stream consumes it, need to re-create it for the real thing - request.setEntityStream(new StringInputStream(body)); - } catch (IOException e) { - LOG.warn("Couldn't ready body.", e); - } - return body; - } - - private static CharSequence render(MultivaluedMap map) { - StringBuilder sb = new StringBuilder(); - int count = 0; - sb.append('['); - for (Entry> entry : map.entrySet()) { - if (count > 0) { - sb.append(','); - } - sb.append('{').append(entry.getKey()).append(',').append(entry.getValue()).append('}'); - count += 1; - } - sb.append(']'); - return sb; - } + private static CharSequence render(MultivaluedMap map) { + StringBuilder sb = new StringBuilder(); + int count = 0; + sb.append('['); + for (Entry> entry : map.entrySet()) { + if (count > 0) { + sb.append(','); + } + sb.append('{').append(entry.getKey()).append(',').append(entry.getValue()).append('}'); + count += 1; + } + sb.append(']'); + return sb; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/RequireJsonpFilter.java b/minerva-server/src/main/java/org/geneontology/minerva/server/RequireJsonpFilter.java index 7136bd4b..37874982 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/RequireJsonpFilter.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/RequireJsonpFilter.java @@ -1,14 +1,13 @@ package org.geneontology.minerva.server; -import java.io.IOException; +import org.geneontology.minerva.server.handler.JsonOrJsonpBatchHandler; import javax.ws.rs.container.ContainerRequestContext; import javax.ws.rs.container.ContainerRequestFilter; import javax.ws.rs.container.PreMatching; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.UriInfo; - -import org.geneontology.minerva.server.handler.JsonOrJsonpBatchHandler; +import java.io.IOException; /** * Replace the accepted request type, if there is a 'json.wrf' query parameter.
@@ -19,16 +18,16 @@ @PreMatching public class RequireJsonpFilter implements ContainerRequestFilter { - @Override - public void filter(ContainerRequestContext requestContext) throws IOException { - UriInfo uriInfo = requestContext.getUriInfo(); - MultivaluedMap queryParameters = uriInfo.getQueryParameters(); - for(String param : queryParameters.keySet()) { - if (JsonOrJsonpBatchHandler.JSONP_DEFAULT_OVERWRITE.equals(param)) { - MultivaluedMap headers = requestContext.getHeaders(); - headers.putSingle("Accept", "application/javascript"); - } - } - } + @Override + public void filter(ContainerRequestContext requestContext) throws IOException { + UriInfo uriInfo = requestContext.getUriInfo(); + MultivaluedMap queryParameters = uriInfo.getQueryParameters(); + for (String param : queryParameters.keySet()) { + if (JsonOrJsonpBatchHandler.JSONP_DEFAULT_OVERWRITE.equals(param)) { + MultivaluedMap headers = requestContext.getHeaders(); + headers.putSingle("Accept", "application/javascript"); + } + } + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java b/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java index 9b410fc4..8de4b403 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/StartUpTool.java @@ -5,7 +5,6 @@ import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; -//import org.eclipse.jetty.server.nio.SelectChannelConnector; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.geneontology.minerva.MinervaOWLGraphWrapper; @@ -15,10 +14,7 @@ import org.geneontology.minerva.curie.CurieMappings; import org.geneontology.minerva.curie.DefaultCurieHandler; import org.geneontology.minerva.curie.MappedCurieHandler; -import org.geneontology.minerva.lookup.CachingExternalLookupService; import org.geneontology.minerva.lookup.ExternalLookupService; -import org.geneontology.minerva.lookup.GolrExternalLookupService; -import org.geneontology.minerva.lookup.MonarchExternalLookupService; import org.geneontology.minerva.server.handler.*; import org.geneontology.minerva.server.inferences.CachingInferenceProviderCreatorImpl; import org.geneontology.minerva.server.inferences.InferenceProviderCreator; @@ -26,9 +22,6 @@ import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; import org.semanticweb.owlapi.model.*; -import org.semanticweb.owlapi.search.EntitySearcher; -import org.semanticweb.owlapi.search.Searcher; - import owltools.cli.Opts; import owltools.gaf.eco.EcoMapperFactory; import owltools.gaf.eco.SimpleEcoMapper; @@ -39,215 +32,180 @@ import java.net.URL; import java.time.LocalDate; import java.time.LocalTime; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.concurrent.TimeUnit; public class StartUpTool { - private static final Logger LOGGER = Logger.getLogger(StartUpTool.class); - - public static class MinervaStartUpConfig { - // data configuration - public String ontology = null; - public String catalog = null; - public String journalFile = null; - public String exportFolder = null; - public String modelIdPrefix = "http://model.geneontology.org/"; - public String modelIdcurie = "gomodel"; - - public String defaultModelState = "development"; - - public String golrUrl = null; - public String monarchUrl = null; - public String golrSeedUrl = null; - public int golrCacheSize = 100000; - public long golrCacheDuration = 24l; - public TimeUnit golrCacheDurationUnit = TimeUnit.HOURS; - public ExternalLookupService lookupService = null; - public boolean checkLiteralIds = true; - - public String reasonerOpt = null; - - public CurieHandler curieHandler; - - // The subset of highly relevant relations is configured using super property - // all direct children (asserted) are considered important - public String importantRelationParent = null; - public Set importantRelations = null; - - // server configuration - public int port = 6800; - public String contextPrefix = null; // root context by default - public String contextString = null; - - // increase default size to deal with large HTTP GET requests - public int requestHeaderSize = 64*1024; - public int requestBufferSize = 128*1024; - - public boolean useRequestLogging = false; - - public boolean useGolrUrlLogging = false; - - public String prefixesFile = null; - - public int sparqlEndpointTimeout = 100; - - public String shexFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shex"; - public String goshapemapFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shapeMap"; - public MinervaShexValidator shex; - public String pathToOntologyJournal; - - } - - public static void main(String[] args) throws Exception { - Opts opts = new Opts(args); - MinervaStartUpConfig conf = new MinervaStartUpConfig(); - - while (opts.hasArgs()) { - if (opts.nextEq("-g|--graph")) { - conf.ontology = opts.nextOpt(); - } - else if (opts.nextEq("-c|--catalog")) { - conf.catalog = opts.nextOpt(); - } - else if (opts.nextEq("-f|--journal-file")) { - conf.journalFile = opts.nextOpt(); - } - else if (opts.nextEq("--export-folder")) { - conf.exportFolder = opts.nextOpt(); - } - else if (opts.nextEq("--model-id-prefix")) { - conf.modelIdPrefix = opts.nextOpt(); - } - else if (opts.nextEq("--model-id-curie")) { - conf.modelIdcurie = opts.nextOpt(); - } - else if (opts.nextEq("-p|--protein-folder")) { - System.err.println("specific protein ontologies are no longer supported"); - System.exit(-1); - } - else if (opts.nextEq("--gaf-folder")) { - System.err.println("--gaf-folder is not longer supported"); - System.exit(-1); - } - else if (opts.nextEq("--context-prefix")) { - conf.contextPrefix = opts.nextOpt(); - } - else if (opts.nextEq("--port")) { - conf.port = Integer.parseInt(opts.nextOpt()); - } - else if (opts.nextEq("-i|--import|--additional-import")) { - System.err.println("-i|--import|--additional-import is no longer supported, all imports are expected to be in the source ontology '-g|--graph'"); - System.exit(-1); - } - else if (opts.nextEq("--obsolete-import")) { - System.err.println("--obsolete-import is no longer supported"); - System.exit(-1); - } - else if (opts.nextEq("--set-relevant-relations")) { - System.err.println("--set-relevant-relations is no longer supported, use '--set-important-relation-parent' instead"); - System.exit(-1); - } - else if (opts.nextEq("--add-relevant-relations")) { - System.err.println("--add-relevant-relations is no longer supported, use '--set-important-relation-parent' instead"); - System.exit(-1); - } - else if (opts.nextEq("--add-relevant-relation")) { - System.err.println("--add-relevant-relation is no longer supported, use '--set-important-relation-parent' instead"); - System.exit(-1); - } - else if (opts.nextEq("--set-important-relation-parent")) { - conf.importantRelationParent = opts.nextOpt(); - } - else if (opts.nextEq("--skip-class-id-validation")) { - conf.checkLiteralIds = false; - } - else if (opts.nextEq("--golr-cache-size")) { - String sizeString = opts.nextOpt(); - conf.golrCacheSize = Integer.parseInt(sizeString); - } - //--golr-labels http://noctua-golr.berkeleybop.org/ - else if (opts.nextEq("--golr-labels")) { - conf.golrUrl = opts.nextOpt(); - } - else if (opts.nextEq("--monarch-labels")) { - conf.monarchUrl = opts.nextOpt(); - } - else if (opts.nextEq("--golr-seed")) { - conf.golrSeedUrl = opts.nextOpt(); - } - else if (opts.nextEq("--no-reasoning|--no-reasoner")) { - conf.reasonerOpt = null; - } - else if (opts.nextEq("--slme-hermit")) { - conf.reasonerOpt = "slme-hermit"; - } - else if (opts.nextEq("--slme-elk")) { - conf.reasonerOpt = "slme-elk"; - } - else if (opts.nextEq("--elk")) { - conf.reasonerOpt = "elk"; - } - else if (opts.nextEq("--arachne")) { - conf.reasonerOpt = "arachne"; - } - else if (opts.nextEq("--use-request-logging|--request-logging")) { - conf.useRequestLogging = true; - } - else if (opts.nextEq("--use-golr-url-logging|--golr-url-logging")) { - conf.useGolrUrlLogging = true; - } - else if (opts.nextEq("--prefix-mappings")) { - conf.prefixesFile = opts.nextOpt(); - } - else if (opts.nextEq("--sparql-endpoint-timeout")) { - conf.sparqlEndpointTimeout = Integer.parseInt(opts.nextOpt()); - } - else if (opts.nextEq("--ontojournal")) { - conf.pathToOntologyJournal = opts.nextOpt(); - } - else { - break; - } - } - if (conf.pathToOntologyJournal == null) { - System.err.println("No blazegraph journal containing tbox ontology provided. exit."); - System.exit(-1); - } - if (conf.ontology == null) { - System.err.println("No ontology graph available"); - System.exit(-1); - } - if (conf.journalFile == null) { - System.err.println("No journal file available"); - System.exit(-1); - } + private static final Logger LOGGER = Logger.getLogger(StartUpTool.class); + + public static class MinervaStartUpConfig { + // data configuration + public String ontology = null; + public String catalog = null; + public String journalFile = null; + public String exportFolder = null; + public String modelIdPrefix = "http://model.geneontology.org/"; + public String modelIdcurie = "gomodel"; + + public String defaultModelState = "development"; + + public String golrUrl = null; + public String monarchUrl = null; + public String golrSeedUrl = null; + public int golrCacheSize = 100000; + public long golrCacheDuration = 24l; + public TimeUnit golrCacheDurationUnit = TimeUnit.HOURS; + public ExternalLookupService lookupService = null; + public boolean checkLiteralIds = true; + + public String reasonerOpt = null; + + public CurieHandler curieHandler; + + // The subset of highly relevant relations is configured using super property + // all direct children (asserted) are considered important + public String importantRelationParent = null; + public Set importantRelations = null; + + // server configuration + public int port = 6800; + public String contextPrefix = null; // root context by default + public String contextString = null; + + // increase default size to deal with large HTTP GET requests + public int requestHeaderSize = 64 * 1024; + public int requestBufferSize = 128 * 1024; + + public boolean useRequestLogging = false; + + public boolean useGolrUrlLogging = false; + + public String prefixesFile = null; + + public int sparqlEndpointTimeout = 100; + + public String shexFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shex"; + public String goshapemapFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shapeMap"; + public MinervaShexValidator shex; + public String pathToOntologyJournal; + + } + + public static void main(String[] args) throws Exception { + Opts opts = new Opts(args); + MinervaStartUpConfig conf = new MinervaStartUpConfig(); + + while (opts.hasArgs()) { + if (opts.nextEq("-g|--graph")) { + conf.ontology = opts.nextOpt(); + } else if (opts.nextEq("-c|--catalog")) { + conf.catalog = opts.nextOpt(); + } else if (opts.nextEq("-f|--journal-file")) { + conf.journalFile = opts.nextOpt(); + } else if (opts.nextEq("--export-folder")) { + conf.exportFolder = opts.nextOpt(); + } else if (opts.nextEq("--model-id-prefix")) { + conf.modelIdPrefix = opts.nextOpt(); + } else if (opts.nextEq("--model-id-curie")) { + conf.modelIdcurie = opts.nextOpt(); + } else if (opts.nextEq("-p|--protein-folder")) { + System.err.println("specific protein ontologies are no longer supported"); + System.exit(-1); + } else if (opts.nextEq("--gaf-folder")) { + System.err.println("--gaf-folder is not longer supported"); + System.exit(-1); + } else if (opts.nextEq("--context-prefix")) { + conf.contextPrefix = opts.nextOpt(); + } else if (opts.nextEq("--port")) { + conf.port = Integer.parseInt(opts.nextOpt()); + } else if (opts.nextEq("-i|--import|--additional-import")) { + System.err.println("-i|--import|--additional-import is no longer supported, all imports are expected to be in the source ontology '-g|--graph'"); + System.exit(-1); + } else if (opts.nextEq("--obsolete-import")) { + System.err.println("--obsolete-import is no longer supported"); + System.exit(-1); + } else if (opts.nextEq("--set-relevant-relations")) { + System.err.println("--set-relevant-relations is no longer supported, use '--set-important-relation-parent' instead"); + System.exit(-1); + } else if (opts.nextEq("--add-relevant-relations")) { + System.err.println("--add-relevant-relations is no longer supported, use '--set-important-relation-parent' instead"); + System.exit(-1); + } else if (opts.nextEq("--add-relevant-relation")) { + System.err.println("--add-relevant-relation is no longer supported, use '--set-important-relation-parent' instead"); + System.exit(-1); + } else if (opts.nextEq("--set-important-relation-parent")) { + conf.importantRelationParent = opts.nextOpt(); + } else if (opts.nextEq("--skip-class-id-validation")) { + conf.checkLiteralIds = false; + } else if (opts.nextEq("--golr-cache-size")) { + String sizeString = opts.nextOpt(); + conf.golrCacheSize = Integer.parseInt(sizeString); + } + //--golr-labels http://noctua-golr.berkeleybop.org/ + else if (opts.nextEq("--golr-labels")) { + conf.golrUrl = opts.nextOpt(); + } else if (opts.nextEq("--monarch-labels")) { + conf.monarchUrl = opts.nextOpt(); + } else if (opts.nextEq("--golr-seed")) { + conf.golrSeedUrl = opts.nextOpt(); + } else if (opts.nextEq("--no-reasoning|--no-reasoner")) { + conf.reasonerOpt = null; + } else if (opts.nextEq("--slme-hermit")) { + conf.reasonerOpt = "slme-hermit"; + } else if (opts.nextEq("--slme-elk")) { + conf.reasonerOpt = "slme-elk"; + } else if (opts.nextEq("--elk")) { + conf.reasonerOpt = "elk"; + } else if (opts.nextEq("--arachne")) { + conf.reasonerOpt = "arachne"; + } else if (opts.nextEq("--use-request-logging|--request-logging")) { + conf.useRequestLogging = true; + } else if (opts.nextEq("--use-golr-url-logging|--golr-url-logging")) { + conf.useGolrUrlLogging = true; + } else if (opts.nextEq("--prefix-mappings")) { + conf.prefixesFile = opts.nextOpt(); + } else if (opts.nextEq("--sparql-endpoint-timeout")) { + conf.sparqlEndpointTimeout = Integer.parseInt(opts.nextOpt()); + } else if (opts.nextEq("--ontojournal")) { + conf.pathToOntologyJournal = opts.nextOpt(); + } else { + break; + } + } + if (conf.pathToOntologyJournal == null) { + System.err.println("No blazegraph journal containing tbox ontology provided. exit."); + System.exit(-1); + } + if (conf.ontology == null) { + System.err.println("No ontology graph available"); + System.exit(-1); + } + if (conf.journalFile == null) { + System.err.println("No journal file available"); + System.exit(-1); + } // if (conf.golrUrl == null) { // conf.golrUrl = "http://noctua-golr.berkeleybop.org/"; // System.err.println("No GOLR service configured. This is required, please add e.g. --golr-labels http://noctua-golr.berkeleybop.org/ to start up parameters "); // //System.exit(-1); // } - conf.contextString = "/"; - if (conf.contextPrefix != null) { - conf.contextString = "/"+conf.contextPrefix; - } - - // set curie handler - final CurieMappings mappings; - if (conf.prefixesFile != null) { - mappings = DefaultCurieHandler.loadMappingsFromFile(new File(conf.prefixesFile)); - } else { - mappings = DefaultCurieHandler.loadDefaultMappings(); - } - CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(conf.modelIdcurie, conf.modelIdPrefix)); - conf.curieHandler = new MappedCurieHandler(mappings, localMappings); - // wrap the Golr service with a cache + conf.contextString = "/"; + if (conf.contextPrefix != null) { + conf.contextString = "/" + conf.contextPrefix; + } + + // set curie handler + final CurieMappings mappings; + if (conf.prefixesFile != null) { + mappings = DefaultCurieHandler.loadMappingsFromFile(new File(conf.prefixesFile)); + } else { + mappings = DefaultCurieHandler.loadDefaultMappings(); + } + CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(conf.modelIdcurie, conf.modelIdPrefix)); + conf.curieHandler = new MappedCurieHandler(mappings, localMappings); + // wrap the Golr service with a cache //get rid of external look ups altogether. // if (conf.golrUrl != null) { // conf.lookupService = new GolrExternalLookupService(conf.golrUrl, conf.curieHandler, conf.useGolrUrlLogging); @@ -263,230 +221,232 @@ else if (opts.nextEq("--ontojournal")) { // " use url logging: "+conf.useGolrUrlLogging); // conf.lookupService = new CachingExternalLookupService(conf.lookupService, conf.golrCacheSize, conf.golrCacheDuration, conf.golrCacheDurationUnit); // } - - //TODO maybe make these command line parameters - URL shex_schema_url = new URL(conf.shexFileUrl); - File shex_schema_file = new File("./target/shex-schema.shex"); - org.apache.commons.io.FileUtils.copyURLToFile(shex_schema_url, shex_schema_file); - URL shex_map_url = new URL(conf.goshapemapFileUrl); - File shex_map_file = new File("./target/go-cam-shapes.shapeMap"); - org.apache.commons.io.FileUtils.copyURLToFile(shex_map_url, shex_map_file); - //reasoner set in next phase after ontologies loaded - conf.shex = new MinervaShexValidator(shex_schema_file, shex_map_file, conf.curieHandler, null); - - Server server = startUp(conf); - try { - server.join(); - } - finally { - server.stop(); - server.destroy(); - } - } - - /** - * Try to resolve the given string into an {@link OWLObjectProperty}. - * - * @param rel - * @param g - * @return property or null - */ - public static OWLObjectProperty getRelation(String rel, MinervaOWLGraphWrapper g) { - if (rel == null || rel.isEmpty()) { - return null; - } - if (rel.startsWith("http://")) { - IRI iri = IRI.create(rel); - return g.getDataFactory().getOWLObjectProperty(iri); - } - // try to find property - OWLObjectProperty p = g.getOWLObjectPropertyByIdentifier(rel); - if (p == null) { - // could not find by id, search by label - OWLObject owlObject = g.getOWLObjectByLabel(rel); - if (owlObject instanceof OWLObjectProperty) { - p = (OWLObjectProperty) owlObject; - } - } - return p; - } - - /** - * Find all asserted direct sub properties of the parent property. - * - * @param parent - * @param g - * @return set - */ - public static Set getAssertedSubProperties(OWLObjectProperty parent, MinervaOWLGraphWrapper g) { - Set properties = new HashSet(); - for(OWLOntology ont : g.getAllOntologies()) { - Set axioms = ont.getObjectSubPropertyAxiomsForSuperProperty(parent); - for (OWLSubObjectPropertyOfAxiom axiom : axioms) { - OWLObjectPropertyExpression subProperty = axiom.getSubProperty(); - if (subProperty instanceof OWLObjectProperty) { - properties.add(subProperty.asOWLObjectProperty()); - } - } - } - return properties; - } - - public static Server startUp(final MinervaStartUpConfig conf) - throws Exception { - // load ontology - LOGGER.info("Start loading ontology: "+conf.ontology); - ParserWrapper pw = new ParserWrapper(); - // if available, set catalog - if (conf.catalog != null) { - LOGGER.info("Adding catalog xml: "+conf.catalog); - pw.addIRIMapper(new CatalogXmlIRIMapper(conf.catalog)); - } - MinervaOWLGraphWrapper graph = pw.parseToOWLGraph(conf.ontology); - - //grab ontology metadata and store for status service - Map> ont_annos = new HashMap>(); - for(OWLOntology ont : graph.getAllOntologies()) { - ont_annos.put(ont.getOWLOntologyManager().getOntologyDocumentIRI(ont), ont.getAnnotations()); - } - - OWLOntology full_tbox = forceMergeImports(graph.getSourceOntology(), graph.getAllOntologies()); - graph.setSourceOntology(full_tbox); - - if (conf.importantRelationParent != null) { - // try to find parent property - OWLObjectProperty parentProperty = getRelation(conf.importantRelationParent, graph); - if (parentProperty != null) { - // find all asserted direct sub properties of the parent property - conf.importantRelations = getAssertedSubProperties(parentProperty, graph); - if (conf.importantRelations.isEmpty()) { - LOGGER.warn("Could not find any asserted sub properties for parent: "+conf.importantRelationParent); - } - } - else { - LOGGER.warn("Could not find a property for rel: "+conf.importantRelationParent); - } - } - - // set folder to models - LOGGER.info("Model path: "+conf.journalFile); - - // create model manager - LOGGER.info("Start initializing Minerva"); - UndoAwareMolecularModelManager models = new UndoAwareMolecularModelManager(graph.getSourceOntology(), - conf.curieHandler, conf.modelIdPrefix, conf.journalFile, conf.exportFolder, conf.pathToOntologyJournal, true); - // set pre and post file handlers - models.addPostLoadOntologyFilter(ModelReaderHelper.INSTANCE); - // conf.shex.tbox_reasoner = models.getTbox_reasoner(); - conf.shex.setGo_lego_repo(models.getGolego_repo()); - conf.shex.curieHandler = conf.curieHandler; - // start server - Server server = startUp(models, conf, ont_annos); - return server; - } - - public static OWLOntology forceMergeImports(OWLOntology sourceOntology, Set import_set) { - - //In some cases, go-lego is not pre-merged and parseToOWLgraph keeps the imports separate - //most OWL API methods have an include-imports option that makes this work - //but EntitySearcher methods that deal with annotation assertions do not. - //The current pattern for mapping external ontologies to local ones (e.g. reactome to uniprot) - //involves the use of an annotation property.. To get that to work, - //need to pre-merge the ontologies. - OWLOntology full_tbox = sourceOntology; - if(import_set!=null) { - for(OWLOntology ont : import_set) { - if(!ont.equals(full_tbox)) { - full_tbox.getOWLOntologyManager().addAxioms(full_tbox, ont.getAxioms()); - full_tbox.getOWLOntologyManager().removeOntology(ont); - } - } - } - return full_tbox; - } - - public static InferenceProviderCreator createInferenceProviderCreator(String reasonerOpt, UndoAwareMolecularModelManager models, MinervaShexValidator shex) { - switch(reasonerOpt) { - // case ("slme-hermit"): return CachingInferenceProviderCreatorImpl.createHermiT(shex); - case ("slme-elk"): return CachingInferenceProviderCreatorImpl.createElk(true, shex); - case ("elk"): return CachingInferenceProviderCreatorImpl.createElk(false, shex); - case ("arachne"): return CachingInferenceProviderCreatorImpl.createArachne(models.getRuleEngine(), shex); - default: return null; - } - } - - public static Server startUp(UndoAwareMolecularModelManager models, MinervaStartUpConfig conf, Map> ont_annos) - throws Exception { - - LOGGER.info("Setup Jetty config."); - // Configuration: Use an already existing handler instance - // Configuration: Use custom JSON renderer (GSON) - ResourceConfig resourceConfig = new ResourceConfig(); - resourceConfig.register(GsonMessageBodyHandler.class); - resourceConfig.register(RequireJsonpFilter.class); - resourceConfig.register(SPARQLResultsMessageBodyWriter.class); - resourceConfig.register(SPARQLGraphMessageBodyWriter.class); - if (conf.useRequestLogging) { - resourceConfig.register(LoggingApplicationEventListener.class); - } - //resourceConfig.register(AuthorizationRequestFilter.class); - - LOGGER.info("BatchHandler config inference provider: "+conf.reasonerOpt); - LOGGER.info("BatchHandler config importantRelations: "+conf.importantRelations); - LOGGER.info("BatchHandler config lookupService: "+conf.lookupService); - LOGGER.info("BatchHandler config checkLiteralIds: "+conf.checkLiteralIds); - LOGGER.info("BatchHandler config useRequestLogging: "+conf.useRequestLogging); - if (conf.golrSeedUrl == null) { - // default fall back to normal golr URL - conf.golrSeedUrl = conf.golrUrl; - } - LOGGER.info("SeedHandler config golrUrl: "+conf.golrSeedUrl); - - InferenceProviderCreator ipc = createInferenceProviderCreator(conf.reasonerOpt, models, conf.shex); - JsonOrJsonpBatchHandler batchHandler = new JsonOrJsonpBatchHandler(models, conf.defaultModelState, - ipc, conf.importantRelations, conf.lookupService); - batchHandler.CHECK_LITERAL_IDENTIFIERS = false; //conf.checkLiteralIds; - - SimpleEcoMapper ecoMapper = EcoMapperFactory.createSimple(); - // JsonOrJsonpSeedHandler seedHandler = new JsonOrJsonpSeedHandler(models, conf.defaultModelState, conf.golrSeedUrl, ecoMapper ); - // SPARQLHandler sparqlHandler = new SPARQLHandler(models, conf.sparqlEndpointTimeout); - ModelSearchHandler searchHandler = new ModelSearchHandler(models); - ModelARTHandler artHandler = new ModelARTHandler(models, ipc); - - LocalDate d = LocalDate.now(); - LocalTime t = LocalTime.now(); - String startup = d.toString()+" "+t.toString(); - StatusHandler statusHandler = new StatusHandler(conf, ont_annos, startup); - TaxonHandler taxonHandler = new TaxonHandler(models); - resourceConfig = resourceConfig.registerInstances(batchHandler, searchHandler,artHandler, statusHandler, taxonHandler); - - // setup jetty server port, buffers and context path - Server server = new Server(); - // create connector with port and custom buffer sizes - //old jetty - //SelectChannelConnector connector = new SelectChannelConnector(); - //old jetty - they must be configured somewhere else in new jetty - //connector.setRequestHeaderSize(conf.requestHeaderSize); - //connector.setRequestBufferSize(conf.requestBufferSize); - //new jetty - does not have setRequestBufferSize at all - //seems to push defaults harder here. - //to change request header size need to create a new connector and manipulate httpconfiguration - HttpConfiguration http_config = new HttpConfiguration(); - http_config.setRequestHeaderSize(conf.requestHeaderSize); - ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); - connector.setPort(conf.port); - - server.addConnector(connector); - - ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); - context.setContextPath(conf.contextString); - server.setHandler(context); - ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); - context.addServlet(h, "/*"); - - // start jetty server - LOGGER.info("Start server on port: "+conf.port+" context: "+conf.contextString); - server.start(); - return server; - } + + //TODO maybe make these command line parameters + URL shex_schema_url = new URL(conf.shexFileUrl); + File shex_schema_file = new File("./target/shex-schema.shex"); + org.apache.commons.io.FileUtils.copyURLToFile(shex_schema_url, shex_schema_file); + URL shex_map_url = new URL(conf.goshapemapFileUrl); + File shex_map_file = new File("./target/go-cam-shapes.shapeMap"); + org.apache.commons.io.FileUtils.copyURLToFile(shex_map_url, shex_map_file); + //reasoner set in next phase after ontologies loaded + conf.shex = new MinervaShexValidator(shex_schema_file, shex_map_file, conf.curieHandler, null); + + Server server = startUp(conf); + try { + server.join(); + } finally { + server.stop(); + server.destroy(); + } + } + + /** + * Try to resolve the given string into an {@link OWLObjectProperty}. + * + * @param rel + * @param g + * @return property or null + */ + public static OWLObjectProperty getRelation(String rel, MinervaOWLGraphWrapper g) { + if (rel == null || rel.isEmpty()) { + return null; + } + if (rel.startsWith("http://")) { + IRI iri = IRI.create(rel); + return g.getDataFactory().getOWLObjectProperty(iri); + } + // try to find property + OWLObjectProperty p = g.getOWLObjectPropertyByIdentifier(rel); + if (p == null) { + // could not find by id, search by label + OWLObject owlObject = g.getOWLObjectByLabel(rel); + if (owlObject instanceof OWLObjectProperty) { + p = (OWLObjectProperty) owlObject; + } + } + return p; + } + + /** + * Find all asserted direct sub properties of the parent property. + * + * @param parent + * @param g + * @return set + */ + public static Set getAssertedSubProperties(OWLObjectProperty parent, MinervaOWLGraphWrapper g) { + Set properties = new HashSet(); + for (OWLOntology ont : g.getAllOntologies()) { + Set axioms = ont.getObjectSubPropertyAxiomsForSuperProperty(parent); + for (OWLSubObjectPropertyOfAxiom axiom : axioms) { + OWLObjectPropertyExpression subProperty = axiom.getSubProperty(); + if (subProperty instanceof OWLObjectProperty) { + properties.add(subProperty.asOWLObjectProperty()); + } + } + } + return properties; + } + + public static Server startUp(final MinervaStartUpConfig conf) + throws Exception { + // load ontology + LOGGER.info("Start loading ontology: " + conf.ontology); + ParserWrapper pw = new ParserWrapper(); + // if available, set catalog + if (conf.catalog != null) { + LOGGER.info("Adding catalog xml: " + conf.catalog); + pw.addIRIMapper(new CatalogXmlIRIMapper(conf.catalog)); + } + MinervaOWLGraphWrapper graph = pw.parseToOWLGraph(conf.ontology); + + //grab ontology metadata and store for status service + Map> ont_annos = new HashMap>(); + for (OWLOntology ont : graph.getAllOntologies()) { + ont_annos.put(ont.getOWLOntologyManager().getOntologyDocumentIRI(ont), ont.getAnnotations()); + } + + OWLOntology full_tbox = forceMergeImports(graph.getSourceOntology(), graph.getAllOntologies()); + graph.setSourceOntology(full_tbox); + + if (conf.importantRelationParent != null) { + // try to find parent property + OWLObjectProperty parentProperty = getRelation(conf.importantRelationParent, graph); + if (parentProperty != null) { + // find all asserted direct sub properties of the parent property + conf.importantRelations = getAssertedSubProperties(parentProperty, graph); + if (conf.importantRelations.isEmpty()) { + LOGGER.warn("Could not find any asserted sub properties for parent: " + conf.importantRelationParent); + } + } else { + LOGGER.warn("Could not find a property for rel: " + conf.importantRelationParent); + } + } + + // set folder to models + LOGGER.info("Model path: " + conf.journalFile); + + // create model manager + LOGGER.info("Start initializing Minerva"); + UndoAwareMolecularModelManager models = new UndoAwareMolecularModelManager(graph.getSourceOntology(), + conf.curieHandler, conf.modelIdPrefix, conf.journalFile, conf.exportFolder, conf.pathToOntologyJournal, true); + // set pre and post file handlers + models.addPostLoadOntologyFilter(ModelReaderHelper.INSTANCE); + // conf.shex.tbox_reasoner = models.getTbox_reasoner(); + conf.shex.setGo_lego_repo(models.getGolego_repo()); + conf.shex.curieHandler = conf.curieHandler; + // start server + Server server = startUp(models, conf, ont_annos); + return server; + } + + public static OWLOntology forceMergeImports(OWLOntology sourceOntology, Set import_set) { + + //In some cases, go-lego is not pre-merged and parseToOWLgraph keeps the imports separate + //most OWL API methods have an include-imports option that makes this work + //but EntitySearcher methods that deal with annotation assertions do not. + //The current pattern for mapping external ontologies to local ones (e.g. reactome to uniprot) + //involves the use of an annotation property.. To get that to work, + //need to pre-merge the ontologies. + OWLOntology full_tbox = sourceOntology; + if (import_set != null) { + for (OWLOntology ont : import_set) { + if (!ont.equals(full_tbox)) { + full_tbox.getOWLOntologyManager().addAxioms(full_tbox, ont.getAxioms()); + full_tbox.getOWLOntologyManager().removeOntology(ont); + } + } + } + return full_tbox; + } + + public static InferenceProviderCreator createInferenceProviderCreator(String reasonerOpt, UndoAwareMolecularModelManager models, MinervaShexValidator shex) { + switch (reasonerOpt) { + // case ("slme-hermit"): return CachingInferenceProviderCreatorImpl.createHermiT(shex); + case ("slme-elk"): + return CachingInferenceProviderCreatorImpl.createElk(true, shex); + case ("elk"): + return CachingInferenceProviderCreatorImpl.createElk(false, shex); + case ("arachne"): + return CachingInferenceProviderCreatorImpl.createArachne(models.getRuleEngine(), shex); + default: + return null; + } + } + + public static Server startUp(UndoAwareMolecularModelManager models, MinervaStartUpConfig conf, Map> ont_annos) + throws Exception { + + LOGGER.info("Setup Jetty config."); + // Configuration: Use an already existing handler instance + // Configuration: Use custom JSON renderer (GSON) + ResourceConfig resourceConfig = new ResourceConfig(); + resourceConfig.register(GsonMessageBodyHandler.class); + resourceConfig.register(RequireJsonpFilter.class); + resourceConfig.register(SPARQLResultsMessageBodyWriter.class); + resourceConfig.register(SPARQLGraphMessageBodyWriter.class); + if (conf.useRequestLogging) { + resourceConfig.register(LoggingApplicationEventListener.class); + } + //resourceConfig.register(AuthorizationRequestFilter.class); + + LOGGER.info("BatchHandler config inference provider: " + conf.reasonerOpt); + LOGGER.info("BatchHandler config importantRelations: " + conf.importantRelations); + LOGGER.info("BatchHandler config lookupService: " + conf.lookupService); + LOGGER.info("BatchHandler config checkLiteralIds: " + conf.checkLiteralIds); + LOGGER.info("BatchHandler config useRequestLogging: " + conf.useRequestLogging); + if (conf.golrSeedUrl == null) { + // default fall back to normal golr URL + conf.golrSeedUrl = conf.golrUrl; + } + LOGGER.info("SeedHandler config golrUrl: " + conf.golrSeedUrl); + + InferenceProviderCreator ipc = createInferenceProviderCreator(conf.reasonerOpt, models, conf.shex); + JsonOrJsonpBatchHandler batchHandler = new JsonOrJsonpBatchHandler(models, conf.defaultModelState, + ipc, conf.importantRelations, conf.lookupService); + batchHandler.CHECK_LITERAL_IDENTIFIERS = false; //conf.checkLiteralIds; + + SimpleEcoMapper ecoMapper = EcoMapperFactory.createSimple(); + // JsonOrJsonpSeedHandler seedHandler = new JsonOrJsonpSeedHandler(models, conf.defaultModelState, conf.golrSeedUrl, ecoMapper ); + // SPARQLHandler sparqlHandler = new SPARQLHandler(models, conf.sparqlEndpointTimeout); + ModelSearchHandler searchHandler = new ModelSearchHandler(models); + ModelARTHandler artHandler = new ModelARTHandler(models, ipc); + + LocalDate d = LocalDate.now(); + LocalTime t = LocalTime.now(); + String startup = d.toString() + " " + t.toString(); + StatusHandler statusHandler = new StatusHandler(conf, ont_annos, startup); + TaxonHandler taxonHandler = new TaxonHandler(models); + resourceConfig = resourceConfig.registerInstances(batchHandler, searchHandler, artHandler, statusHandler, taxonHandler); + + // setup jetty server port, buffers and context path + Server server = new Server(); + // create connector with port and custom buffer sizes + //old jetty + //SelectChannelConnector connector = new SelectChannelConnector(); + //old jetty - they must be configured somewhere else in new jetty + //connector.setRequestHeaderSize(conf.requestHeaderSize); + //connector.setRequestBufferSize(conf.requestBufferSize); + //new jetty - does not have setRequestBufferSize at all + //seems to push defaults harder here. + //to change request header size need to create a new connector and manipulate httpconfiguration + HttpConfiguration http_config = new HttpConfiguration(); + http_config.setRequestHeaderSize(conf.requestHeaderSize); + ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); + connector.setPort(conf.port); + + server.addConnector(connector); + + ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); + context.setContextPath(conf.contextString); + server.setHandler(context); + ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); + context.addServlet(h, "/*"); + + // start jetty server + LOGGER.info("Start server on port: " + conf.port + " context: " + conf.contextString); + server.start(); + return server; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/JsonOrJsonpBatchHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/JsonOrJsonpBatchHandler.java index fd85461b..66f913b8 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/JsonOrJsonpBatchHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/JsonOrJsonpBatchHandler.java @@ -1,7 +1,6 @@ package org.geneontology.minerva.server.handler; import com.google.common.reflect.TypeToken; - import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.log4j.Logger; @@ -24,290 +23,288 @@ public class JsonOrJsonpBatchHandler extends OperationsImpl implements M3BatchHandler { - public static final String JSONP_DEFAULT_CALLBACK = "jsonp"; - public static final String JSONP_DEFAULT_OVERWRITE = "json.wrf"; - - - public static boolean VALIDATE_BEFORE_SAVE = true; - public static boolean CHECK_LITERAL_IDENTIFIERS = false;//TODO turning this off because it depends on external lookup service - need to rewire to get rid of external lookup service entirely. - - private static final Logger logger = Logger.getLogger(JsonOrJsonpBatchHandler.class); - - private final InferenceProviderCreator inferenceProviderCreator; - - public JsonOrJsonpBatchHandler(UndoAwareMolecularModelManager models, - String defaultModelState, - InferenceProviderCreator inferenceProviderCreator, - Set importantRelations, - ExternalLookupService externalLookupService) { - super(models, importantRelations, defaultModelState); - this.inferenceProviderCreator = inferenceProviderCreator; - } - - private final Type requestType = new TypeToken(){ - - // generated - private static final long serialVersionUID = 5452629810143143422L; - - }.getType(); - - @Override - boolean checkLiteralIdentifiers() { - return CHECK_LITERAL_IDENTIFIERS; - } - - @Override - boolean validateBeforeSave() { - return VALIDATE_BEFORE_SAVE; - } - - @Override - @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) - public M3BatchResponse m3BatchGet(String intention, String packetId, String requestString, String useReasoner) { - return m3Batch(null, Collections.emptySet(), intention, packetId, requestString, useReasoner, false); - } - - @Override - @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) - public M3BatchResponse m3BatchGetPrivileged(String uid, Set providerGroups, String intention, String packetId, String requestString, String useReasoner) { - return m3Batch(uid, providerGroups, intention, packetId, requestString, useReasoner, true); - } - - @Override - @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) - public M3BatchResponse m3BatchPost(String intention, String packetId, String requestString, String useReasoner) { - return m3Batch(null, Collections.emptySet(), intention, packetId, requestString, useReasoner, false); - } - - @Override - @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) - public M3BatchResponse m3BatchPostPrivileged(String uid, Set providerGroups, String intention, String packetId, String requestString, String useReasoner) { - return m3Batch(uid, providerGroups, intention, packetId, requestString, useReasoner, true); - } - - private static String checkPacketId(String packetId) { - if (packetId == null) { - packetId = PacketIdGenerator.generateId(); - } - return packetId; - } - - @Override - public M3BatchResponse m3Batch(String uid, Set providerGroups, String intention, String packetId, M3Request[] requests, boolean useReasoner, boolean isPrivileged) { - M3BatchResponse response = new M3BatchResponse(uid, providerGroups, intention, checkPacketId(packetId)); - if (requests == null) { - return error(response, "The batch contains no requests: null value for request array", null); - } - try { - return m3Batch(response, requests, uid, providerGroups, useReasoner, isPrivileged); - } catch (InsufficientPermissionsException e) { - return error(response, e.getMessage(), null); - } catch (Exception e) { - return error(response, "Could not successfully complete batch request.", e); - } catch (Throwable t) { - logger.error("A critical error occured.", t); - return error(response, "An internal error occured at the server level.", t); - } - } - - private M3BatchResponse m3Batch(String uid, Set providerGroups, String intention, String packetId, String requestString, String useReasonerString, boolean isPrivileged) { - boolean useReasoner = false; - if (inferenceProviderCreator != null) { - useReasonerString = StringUtils.trimToNull(useReasonerString); - useReasoner = "true".equalsIgnoreCase(useReasonerString); - } - M3BatchResponse response = new M3BatchResponse(uid, providerGroups, intention, checkPacketId(packetId)); - requestString = StringUtils.trimToNull(requestString); - if (requestString == null) { - return error(response, "The batch contains no requests: null value for request", null); - } - try { - M3Request[] requests = MolecularModelJsonRenderer.parseFromJson(requestString, requestType); - return m3Batch(response, requests, uid, providerGroups, useReasoner, isPrivileged); - } catch (Exception e) { - return error(response, "Could not successfully handle batch request.", e); - } catch (Throwable t) { - logger.error("A critical error occured.", t); - return error(response, "An internal error occured at the server level.", t); - } - } - - private M3BatchResponse m3Batch(M3BatchResponse response, M3Request[] requests, String userId, Set providerGroups, boolean useReasoner, boolean isPrivileged) throws InsufficientPermissionsException, Exception { - userId = normalizeUserId(userId); - UndoMetadata token = new UndoMetadata(userId); - - final BatchHandlerValues values = new BatchHandlerValues(); - for (M3Request request : requests) { - requireNotNull(request, "request"); - requireNotNull(request.entity, "entity"); - requireNotNull(request.operation, "operation"); - final Entity entity = request.entity; - final Operation operation = request.operation; - checkPermissions(entity, operation, isPrivileged); - - // individual - if (Entity.individual == entity) { - String error = handleRequestForIndividual(request, operation, userId, providerGroups, token, values); - if (error != null) { - return error(response, error, null); - } - } - // edge - else if (Entity.edge == entity) { - String error = handleRequestForEdge(request, operation, userId, providerGroups, token, values); - if (error != null) { - return error(response, error, null); - } - } - //model - else if (Entity.model == entity) { - String error = handleRequestForModel(request, response, operation, userId, providerGroups, token, values); - if (error != null) { - return error(response, error, null); - } - } - // meta (e.g. relations, model ids, evidence) - else if (Entity.meta == entity) { - if (Operation.get == operation){ - if (values.nonMeta) { - // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal - return error(response, "Get meta entity can only be combined with other meta operations.", null); - } - getMeta(response, userId, providerGroups); - } else if (Operation.exportAll == operation) { - exportAllModels(); - response.messageType = "success"; - response.signal = "meta"; - response.message = "Dumped all models to folder"; - return response; - } else if (Operation.sparql == operation) { - handleSPARQLRequest(request, response); - } else { - return error(response, "Unknown operation: "+operation, null); - } - } - else { - return error(response, "Unknown entity: "+entity, null); - } - } - if (M3BatchResponse.SIGNAL_META.equals(response.signal)) { - return response; - } - if (values.model == null) { - return error(response, "Empty batch calls are not supported, at least one request is required.", null); - } - // update reasoner - // report state - InferenceProvider inferenceProvider = null; - boolean isConsistent = true; - boolean isConformant = true; - if (inferenceProviderCreator != null && useReasoner) { - inferenceProvider = inferenceProviderCreator.create(values.model); - isConsistent = inferenceProvider.isConsistent(); - response.setReasoned(true); - values.renderBulk = true; // to ensure that all individuals are in the response - org.geneontology.minerva.validation.ValidationResultSet validations = inferenceProvider.getValidation_results(); - isConformant = validations.allConformant(); - } - - // create response.data - response.data = new ResponseData(); - //final MolecularModelJsonRenderer renderer = createModelRenderer(values.model, externalLookupService, inferenceProvider, curieHandler); - //working towards zero use of external look up service.. which is both slow and confusing. - final MolecularModelJsonRenderer renderer = createModelRenderer(values.model, m3.getGolego_repo(), inferenceProvider, curieHandler); - if (values.renderBulk) { - // render complete model - JsonModel jsonModel = renderer.renderModel(); - initResponseData(jsonModel, response.data); - response.signal = M3BatchResponse.SIGNAL_REBUILD; - } - else { - response.signal = M3BatchResponse.SIGNAL_MERGE; - // render individuals - if (values.relevantIndividuals.isEmpty() == false) { - Pair pair = renderer.renderIndividuals(values.relevantIndividuals); - response.data.individuals = pair.getLeft(); - response.data.facts = pair.getRight(); - } - // add model annotations - response.data.annotations = MolecularModelJsonRenderer.renderModelAnnotations(values.model.getAboxOntology(), curieHandler); - response.data.modelId = curieHandler.getCuri(values.model.getModelId()); - } - // add other infos to data - if (!isConsistent) { - response.data.inconsistentFlag = Boolean.TRUE; - } - if(!isConformant) { - response.data.validation_results = inferenceProvider.getValidation_results(); - } - response.data.diffResult = values.diffResult; - response.data.modifiedFlag = Boolean.valueOf(values.model.isModified()); - // These are required for an "okay" response. - response.messageType = M3BatchResponse.MESSAGE_TYPE_SUCCESS; - if( response.message == null ){ - response.message = "success"; - } - return response; - } - - public static void initResponseData(JsonModel jsonModel, ResponseData data) { - data.modelId = jsonModel.modelId; - data.individuals = jsonModel.individuals; - data.facts = jsonModel.facts; - data.annotations = jsonModel.annotations; - } - - /* - * commentary is now to be a string, not an unknown multi-leveled object. - */ - private M3BatchResponse error(M3BatchResponse state, String msg, Throwable e) { - state.messageType = "error"; - state.message = msg; - if (e != null) { - - // Add in the exception name if possible. - String ename = e.getClass().getName(); - if( ename != null ){ - state.message = state.message + " Exception: " + ename + "."; - } - - // And the exception message. - String emsg = e.getMessage(); - if( emsg != null ){ - state.message = state.message + " " + emsg; - } - - // Add the stack trace as commentary. - StringWriter stacktrace = new StringWriter(); - e.printStackTrace(new PrintWriter(stacktrace)); - state.commentary = stacktrace.toString(); - } - return state; - } - - protected void checkPermissions(Entity entity, Operation operation, boolean isPrivileged) throws InsufficientPermissionsException { - // TODO make this configurable - if (isPrivileged == false) { - switch (operation) { - case get: - case sparql: - case exportModel: - case exportModelLegacy: - case exportAll: - // positive list, all other operation require a privileged call - break; - default : - throw new InsufficientPermissionsException("Insufficient permissions for the operation "+operation+" on entity: "+entity); - } - } - } - - static class InsufficientPermissionsException extends Exception { - - private static final long serialVersionUID = -3751573576960618428L; - - InsufficientPermissionsException(String msg) { - super(msg); - } - } + public static final String JSONP_DEFAULT_CALLBACK = "jsonp"; + public static final String JSONP_DEFAULT_OVERWRITE = "json.wrf"; + + + public static boolean VALIDATE_BEFORE_SAVE = true; + public static boolean CHECK_LITERAL_IDENTIFIERS = false;//TODO turning this off because it depends on external lookup service - need to rewire to get rid of external lookup service entirely. + + private static final Logger logger = Logger.getLogger(JsonOrJsonpBatchHandler.class); + + private final InferenceProviderCreator inferenceProviderCreator; + + public JsonOrJsonpBatchHandler(UndoAwareMolecularModelManager models, + String defaultModelState, + InferenceProviderCreator inferenceProviderCreator, + Set importantRelations, + ExternalLookupService externalLookupService) { + super(models, importantRelations, defaultModelState); + this.inferenceProviderCreator = inferenceProviderCreator; + } + + private final Type requestType = new TypeToken() { + + // generated + private static final long serialVersionUID = 5452629810143143422L; + + }.getType(); + + @Override + boolean checkLiteralIdentifiers() { + return CHECK_LITERAL_IDENTIFIERS; + } + + @Override + boolean validateBeforeSave() { + return VALIDATE_BEFORE_SAVE; + } + + @Override + @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) + public M3BatchResponse m3BatchGet(String intention, String packetId, String requestString, String useReasoner) { + return m3Batch(null, Collections.emptySet(), intention, packetId, requestString, useReasoner, false); + } + + @Override + @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) + public M3BatchResponse m3BatchGetPrivileged(String uid, Set providerGroups, String intention, String packetId, String requestString, String useReasoner) { + return m3Batch(uid, providerGroups, intention, packetId, requestString, useReasoner, true); + } + + @Override + @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) + public M3BatchResponse m3BatchPost(String intention, String packetId, String requestString, String useReasoner) { + return m3Batch(null, Collections.emptySet(), intention, packetId, requestString, useReasoner, false); + } + + @Override + @JSONP(callback = JSONP_DEFAULT_CALLBACK, queryParam = JSONP_DEFAULT_OVERWRITE) + public M3BatchResponse m3BatchPostPrivileged(String uid, Set providerGroups, String intention, String packetId, String requestString, String useReasoner) { + return m3Batch(uid, providerGroups, intention, packetId, requestString, useReasoner, true); + } + + private static String checkPacketId(String packetId) { + if (packetId == null) { + packetId = PacketIdGenerator.generateId(); + } + return packetId; + } + + @Override + public M3BatchResponse m3Batch(String uid, Set providerGroups, String intention, String packetId, M3Request[] requests, boolean useReasoner, boolean isPrivileged) { + M3BatchResponse response = new M3BatchResponse(uid, providerGroups, intention, checkPacketId(packetId)); + if (requests == null) { + return error(response, "The batch contains no requests: null value for request array", null); + } + try { + return m3Batch(response, requests, uid, providerGroups, useReasoner, isPrivileged); + } catch (InsufficientPermissionsException e) { + return error(response, e.getMessage(), null); + } catch (Exception e) { + return error(response, "Could not successfully complete batch request.", e); + } catch (Throwable t) { + logger.error("A critical error occured.", t); + return error(response, "An internal error occured at the server level.", t); + } + } + + private M3BatchResponse m3Batch(String uid, Set providerGroups, String intention, String packetId, String requestString, String useReasonerString, boolean isPrivileged) { + boolean useReasoner = false; + if (inferenceProviderCreator != null) { + useReasonerString = StringUtils.trimToNull(useReasonerString); + useReasoner = "true".equalsIgnoreCase(useReasonerString); + } + M3BatchResponse response = new M3BatchResponse(uid, providerGroups, intention, checkPacketId(packetId)); + requestString = StringUtils.trimToNull(requestString); + if (requestString == null) { + return error(response, "The batch contains no requests: null value for request", null); + } + try { + M3Request[] requests = MolecularModelJsonRenderer.parseFromJson(requestString, requestType); + return m3Batch(response, requests, uid, providerGroups, useReasoner, isPrivileged); + } catch (Exception e) { + return error(response, "Could not successfully handle batch request.", e); + } catch (Throwable t) { + logger.error("A critical error occured.", t); + return error(response, "An internal error occured at the server level.", t); + } + } + + private M3BatchResponse m3Batch(M3BatchResponse response, M3Request[] requests, String userId, Set providerGroups, boolean useReasoner, boolean isPrivileged) throws InsufficientPermissionsException, Exception { + userId = normalizeUserId(userId); + UndoMetadata token = new UndoMetadata(userId); + + final BatchHandlerValues values = new BatchHandlerValues(); + for (M3Request request : requests) { + requireNotNull(request, "request"); + requireNotNull(request.entity, "entity"); + requireNotNull(request.operation, "operation"); + final Entity entity = request.entity; + final Operation operation = request.operation; + checkPermissions(entity, operation, isPrivileged); + + // individual + if (Entity.individual == entity) { + String error = handleRequestForIndividual(request, operation, userId, providerGroups, token, values); + if (error != null) { + return error(response, error, null); + } + } + // edge + else if (Entity.edge == entity) { + String error = handleRequestForEdge(request, operation, userId, providerGroups, token, values); + if (error != null) { + return error(response, error, null); + } + } + //model + else if (Entity.model == entity) { + String error = handleRequestForModel(request, response, operation, userId, providerGroups, token, values); + if (error != null) { + return error(response, error, null); + } + } + // meta (e.g. relations, model ids, evidence) + else if (Entity.meta == entity) { + if (Operation.get == operation) { + if (values.nonMeta) { + // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal + return error(response, "Get meta entity can only be combined with other meta operations.", null); + } + getMeta(response, userId, providerGroups); + } else if (Operation.exportAll == operation) { + exportAllModels(); + response.messageType = "success"; + response.signal = "meta"; + response.message = "Dumped all models to folder"; + return response; + } else if (Operation.sparql == operation) { + handleSPARQLRequest(request, response); + } else { + return error(response, "Unknown operation: " + operation, null); + } + } else { + return error(response, "Unknown entity: " + entity, null); + } + } + if (M3BatchResponse.SIGNAL_META.equals(response.signal)) { + return response; + } + if (values.model == null) { + return error(response, "Empty batch calls are not supported, at least one request is required.", null); + } + // update reasoner + // report state + InferenceProvider inferenceProvider = null; + boolean isConsistent = true; + boolean isConformant = true; + if (inferenceProviderCreator != null && useReasoner) { + inferenceProvider = inferenceProviderCreator.create(values.model); + isConsistent = inferenceProvider.isConsistent(); + response.setReasoned(true); + values.renderBulk = true; // to ensure that all individuals are in the response + org.geneontology.minerva.validation.ValidationResultSet validations = inferenceProvider.getValidation_results(); + isConformant = validations.allConformant(); + } + + // create response.data + response.data = new ResponseData(); + //final MolecularModelJsonRenderer renderer = createModelRenderer(values.model, externalLookupService, inferenceProvider, curieHandler); + //working towards zero use of external look up service.. which is both slow and confusing. + final MolecularModelJsonRenderer renderer = createModelRenderer(values.model, m3.getGolego_repo(), inferenceProvider, curieHandler); + if (values.renderBulk) { + // render complete model + JsonModel jsonModel = renderer.renderModel(); + initResponseData(jsonModel, response.data); + response.signal = M3BatchResponse.SIGNAL_REBUILD; + } else { + response.signal = M3BatchResponse.SIGNAL_MERGE; + // render individuals + if (values.relevantIndividuals.isEmpty() == false) { + Pair pair = renderer.renderIndividuals(values.relevantIndividuals); + response.data.individuals = pair.getLeft(); + response.data.facts = pair.getRight(); + } + // add model annotations + response.data.annotations = MolecularModelJsonRenderer.renderModelAnnotations(values.model.getAboxOntology(), curieHandler); + response.data.modelId = curieHandler.getCuri(values.model.getModelId()); + } + // add other infos to data + if (!isConsistent) { + response.data.inconsistentFlag = Boolean.TRUE; + } + if (!isConformant) { + response.data.validation_results = inferenceProvider.getValidation_results(); + } + response.data.diffResult = values.diffResult; + response.data.modifiedFlag = Boolean.valueOf(values.model.isModified()); + // These are required for an "okay" response. + response.messageType = M3BatchResponse.MESSAGE_TYPE_SUCCESS; + if (response.message == null) { + response.message = "success"; + } + return response; + } + + public static void initResponseData(JsonModel jsonModel, ResponseData data) { + data.modelId = jsonModel.modelId; + data.individuals = jsonModel.individuals; + data.facts = jsonModel.facts; + data.annotations = jsonModel.annotations; + } + + /* + * commentary is now to be a string, not an unknown multi-leveled object. + */ + private M3BatchResponse error(M3BatchResponse state, String msg, Throwable e) { + state.messageType = "error"; + state.message = msg; + if (e != null) { + + // Add in the exception name if possible. + String ename = e.getClass().getName(); + if (ename != null) { + state.message = state.message + " Exception: " + ename + "."; + } + + // And the exception message. + String emsg = e.getMessage(); + if (emsg != null) { + state.message = state.message + " " + emsg; + } + + // Add the stack trace as commentary. + StringWriter stacktrace = new StringWriter(); + e.printStackTrace(new PrintWriter(stacktrace)); + state.commentary = stacktrace.toString(); + } + return state; + } + + protected void checkPermissions(Entity entity, Operation operation, boolean isPrivileged) throws InsufficientPermissionsException { + // TODO make this configurable + if (isPrivileged == false) { + switch (operation) { + case get: + case sparql: + case exportModel: + case exportModelLegacy: + case exportAll: + // positive list, all other operation require a privileged call + break; + default: + throw new InsufficientPermissionsException("Insufficient permissions for the operation " + operation + " on entity: " + entity); + } + } + } + + static class InsufficientPermissionsException extends Exception { + + private static final long serialVersionUID = -3751573576960618428L; + + InsufficientPermissionsException(String msg) { + super(msg); + } + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3BatchHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3BatchHandler.java index 37829ea1..0a0d2e94 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3BatchHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3BatchHandler.java @@ -4,7 +4,6 @@ import com.google.gson.annotations.SerializedName; import org.geneontology.minerva.json.*; import org.geneontology.minerva.validation.ValidationResultSet; -import org.geneontology.owl.differ.Differ; import javax.ws.rs.*; import java.util.List; @@ -14,246 +13,247 @@ @Path("/") public interface M3BatchHandler { - public static class M3Request extends MinervaRequest { - // wrapper to conform to minerva request standard - } - - public static enum Entity { - individual, - edge, - model, - meta; - } - - public static enum Operation { - // generic operations - get, - - @SerializedName("export-all") - exportAll, - - @SerializedName("add-type") - addType, - - @SerializedName("remove-type") - removeType, - - add, - - remove, - - @SerializedName("add-annotation") - addAnnotation, - - @SerializedName("remove-annotation") - removeAnnotation, - - // model specific operations - @SerializedName("export") - exportModel, - - @SerializedName("export-legacy") - exportModelLegacy, - - @SerializedName("import") - importModel, - - @SerializedName("store") - storeModel, - - @SerializedName("reset") - resetModel, - - @SerializedName("diff") - diffModel, - - @SerializedName("update-imports") - updateImports, - - // undo operations for models - undo, // undo the latest op - redo, // redo the latest undo - @SerializedName("get-undo-redo") - getUndoRedo, // get a list of all currently available undo and redo for a model - - sparql - - } - - public static class M3Argument extends MinervaRequest.MinervaArgument { - - @SerializedName("model-id") - String modelId; - String subject; - String object; - String predicate; - String individual; - - @SerializedName("individual-iri") - String individualIRI; - - @SerializedName("taxon-id") - String taxonId; - - @SerializedName("import-model") - String importModel; - String format; - - @SerializedName("assign-to-variable") - String assignToVariable; - - JsonOwlObject[] expressions; - JsonAnnotation[] values; - - String query; - } - - public static class M3BatchResponse extends MinervaResponse{ - - public static class ResponseData extends JsonModel { - - @SerializedName("inconsistent-p") - public Boolean inconsistentFlag; - - @SerializedName("modified-p") - public Boolean modifiedFlag; - - //TODO starting out here with raw result from robot - @SerializedName("diff-result") - public String diffResult; - - public Object undo; - public Object redo; - - @SerializedName("export-model") - public String exportModel; - - public MetaResponse meta; - - @SerializedName("sparql-result") - public JsonObject sparqlResult; - - @SerializedName("validation-results") - public ValidationResultSet validation_results; - - } - - public static class MetaResponse { - public JsonRelationInfo[] relations; - - @SerializedName("data-properties") - public JsonRelationInfo[] dataProperties; - - public JsonEvidenceInfo[] evidence; - - @SerializedName("models-meta") - public Map> modelsMeta; - - @SerializedName("models-meta-read-only") - public Map> modelsReadOnly; - } - - /** - * @param uid - * @param intention - * @param packetId - */ - public M3BatchResponse(String uid, Set providerGroups, String intention, String packetId) { - super(uid, providerGroups, intention, packetId); - } - - } - - - /** - * Process a batch request. The parameters uid and intention are round-tripped for the JSONP. - * - * @param uid user id, JSONP relevant - * @param intention JSONP relevant - * @param packetId response relevant, may be null - * @param requests batch request - * @param useReasoner - * @param isPrivileged true, if the access is privileged - * @return response object, never null - */ - public M3BatchResponse m3Batch(String uid, Set providerGroups, String intention, String packetId, M3Request[] requests, boolean useReasoner, boolean isPrivileged); - - /** - * Jersey REST method for POST with three form parameters. - * - * @param intention JSONP relevant - * @param packetId - * @param requests JSON string of the batch request - * @param useReasoner - * @return response convertible to JSON(P) - */ - @Path("m3Batch") - @POST - @Consumes("application/x-www-form-urlencoded") - public M3BatchResponse m3BatchPost( - @FormParam("intention") String intention, - @FormParam("packet-id") String packetId, - @FormParam("requests") String requests, - @FormParam("use-reasoner") String useReasoner); - - /** - * Jersey REST method for POST with three form parameters with privileged rights. - * - * @param uid user id, JSONP relevant - * @param providerGroups user groups, JSONP relevant - * @param intention JSONP relevant - * @param packetId - * @param requests JSON string of the batch request - * @param useReasoner - * @return response convertible to JSON(P) - */ - @Path("m3BatchPrivileged") - @POST - @Consumes("application/x-www-form-urlencoded") - public M3BatchResponse m3BatchPostPrivileged( - @FormParam("uid") String uid, - @FormParam("provided-by") Set providerGroups, - @FormParam("intention") String intention, - @FormParam("packet-id") String packetId, - @FormParam("requests") String requests, - @FormParam("use-reasoner") String useReasoner); - - - /** - * Jersey REST method for GET with three query parameters. - * - * @param intention JSONP relevant - * @param packetId - * @param requests JSON string of the batch request - * @param useReasoner - * @return response convertible to JSON(P) - */ - @Path("m3Batch") - @GET - public M3BatchResponse m3BatchGet( - @QueryParam("intention") String intention, - @QueryParam("packet-id") String packetId, - @QueryParam("requests") String requests, - @QueryParam("use-reasoner") String useReasoner); - /** - * Jersey REST method for GET with three query parameters with privileged rights. - * - * @param uid user id, JSONP relevant - * @param providerGroups user groups, JSONP relevant - * @param intention JSONP relevant - * @param packetId - * @param requests JSON string of the batch request - * @param useReasoner - * @return response convertible to JSON(P) - */ - @Path("m3BatchPrivileged") - @GET - public M3BatchResponse m3BatchGetPrivileged( - @QueryParam("uid") String uid, - @QueryParam("provided-by") Set providerGroups, - @QueryParam("intention") String intention, - @QueryParam("packet-id") String packetId, - @QueryParam("requests") String requests, - @QueryParam("use-reasoner") String useReasoner); + public static class M3Request extends MinervaRequest { + // wrapper to conform to minerva request standard + } + + public static enum Entity { + individual, + edge, + model, + meta; + } + + public static enum Operation { + // generic operations + get, + + @SerializedName("export-all") + exportAll, + + @SerializedName("add-type") + addType, + + @SerializedName("remove-type") + removeType, + + add, + + remove, + + @SerializedName("add-annotation") + addAnnotation, + + @SerializedName("remove-annotation") + removeAnnotation, + + // model specific operations + @SerializedName("export") + exportModel, + + @SerializedName("export-legacy") + exportModelLegacy, + + @SerializedName("import") + importModel, + + @SerializedName("store") + storeModel, + + @SerializedName("reset") + resetModel, + + @SerializedName("diff") + diffModel, + + @SerializedName("update-imports") + updateImports, + + // undo operations for models + undo, // undo the latest op + redo, // redo the latest undo + @SerializedName("get-undo-redo") + getUndoRedo, // get a list of all currently available undo and redo for a model + + sparql + + } + + public static class M3Argument extends MinervaRequest.MinervaArgument { + + @SerializedName("model-id") + String modelId; + String subject; + String object; + String predicate; + String individual; + + @SerializedName("individual-iri") + String individualIRI; + + @SerializedName("taxon-id") + String taxonId; + + @SerializedName("import-model") + String importModel; + String format; + + @SerializedName("assign-to-variable") + String assignToVariable; + + JsonOwlObject[] expressions; + JsonAnnotation[] values; + + String query; + } + + public static class M3BatchResponse extends MinervaResponse { + + public static class ResponseData extends JsonModel { + + @SerializedName("inconsistent-p") + public Boolean inconsistentFlag; + + @SerializedName("modified-p") + public Boolean modifiedFlag; + + //TODO starting out here with raw result from robot + @SerializedName("diff-result") + public String diffResult; + + public Object undo; + public Object redo; + + @SerializedName("export-model") + public String exportModel; + + public MetaResponse meta; + + @SerializedName("sparql-result") + public JsonObject sparqlResult; + + @SerializedName("validation-results") + public ValidationResultSet validation_results; + + } + + public static class MetaResponse { + public JsonRelationInfo[] relations; + + @SerializedName("data-properties") + public JsonRelationInfo[] dataProperties; + + public JsonEvidenceInfo[] evidence; + + @SerializedName("models-meta") + public Map> modelsMeta; + + @SerializedName("models-meta-read-only") + public Map> modelsReadOnly; + } + + /** + * @param uid + * @param intention + * @param packetId + */ + public M3BatchResponse(String uid, Set providerGroups, String intention, String packetId) { + super(uid, providerGroups, intention, packetId); + } + + } + + + /** + * Process a batch request. The parameters uid and intention are round-tripped for the JSONP. + * + * @param uid user id, JSONP relevant + * @param intention JSONP relevant + * @param packetId response relevant, may be null + * @param requests batch request + * @param useReasoner + * @param isPrivileged true, if the access is privileged + * @return response object, never null + */ + public M3BatchResponse m3Batch(String uid, Set providerGroups, String intention, String packetId, M3Request[] requests, boolean useReasoner, boolean isPrivileged); + + /** + * Jersey REST method for POST with three form parameters. + * + * @param intention JSONP relevant + * @param packetId + * @param requests JSON string of the batch request + * @param useReasoner + * @return response convertible to JSON(P) + */ + @Path("m3Batch") + @POST + @Consumes("application/x-www-form-urlencoded") + public M3BatchResponse m3BatchPost( + @FormParam("intention") String intention, + @FormParam("packet-id") String packetId, + @FormParam("requests") String requests, + @FormParam("use-reasoner") String useReasoner); + + /** + * Jersey REST method for POST with three form parameters with privileged rights. + * + * @param uid user id, JSONP relevant + * @param providerGroups user groups, JSONP relevant + * @param intention JSONP relevant + * @param packetId + * @param requests JSON string of the batch request + * @param useReasoner + * @return response convertible to JSON(P) + */ + @Path("m3BatchPrivileged") + @POST + @Consumes("application/x-www-form-urlencoded") + public M3BatchResponse m3BatchPostPrivileged( + @FormParam("uid") String uid, + @FormParam("provided-by") Set providerGroups, + @FormParam("intention") String intention, + @FormParam("packet-id") String packetId, + @FormParam("requests") String requests, + @FormParam("use-reasoner") String useReasoner); + + + /** + * Jersey REST method for GET with three query parameters. + * + * @param intention JSONP relevant + * @param packetId + * @param requests JSON string of the batch request + * @param useReasoner + * @return response convertible to JSON(P) + */ + @Path("m3Batch") + @GET + public M3BatchResponse m3BatchGet( + @QueryParam("intention") String intention, + @QueryParam("packet-id") String packetId, + @QueryParam("requests") String requests, + @QueryParam("use-reasoner") String useReasoner); + + /** + * Jersey REST method for GET with three query parameters with privileged rights. + * + * @param uid user id, JSONP relevant + * @param providerGroups user groups, JSONP relevant + * @param intention JSONP relevant + * @param packetId + * @param requests JSON string of the batch request + * @param useReasoner + * @return response convertible to JSON(P) + */ + @Path("m3BatchPrivileged") + @GET + public M3BatchResponse m3BatchGetPrivileged( + @QueryParam("uid") String uid, + @QueryParam("provided-by") Set providerGroups, + @QueryParam("intention") String intention, + @QueryParam("packet-id") String packetId, + @QueryParam("requests") String requests, + @QueryParam("use-reasoner") String useReasoner); } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3ExpressionParser.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3ExpressionParser.java index 61ae030f..0ba33f24 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3ExpressionParser.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3ExpressionParser.java @@ -1,9 +1,5 @@ package org.geneontology.minerva.server.handler; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - import org.apache.commons.lang3.StringUtils; import org.geneontology.minerva.MinervaOWLGraphWrapper; import org.geneontology.minerva.ModelContainer; @@ -14,137 +10,128 @@ import org.geneontology.minerva.lookup.ExternalLookupService; import org.geneontology.minerva.lookup.ExternalLookupService.LookupEntry; import org.geneontology.minerva.server.handler.OperationsTools.MissingParameterException; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLException; -import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.*; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; public class M3ExpressionParser { - - private final boolean checkLiteralIds; - private final CurieHandler curieHandler; - - M3ExpressionParser(boolean checkLiteralIds, CurieHandler curieHandler) { - this.checkLiteralIds = checkLiteralIds; - this.curieHandler = curieHandler; - } - - M3ExpressionParser(CurieHandler curieHandler) { - this(false, curieHandler); - } - OWLClassExpression parse(ModelContainer model, JsonOwlObject expression, - ExternalLookupService externalLookupService) - throws MissingParameterException, UnknownIdentifierException, OWLException { - MinervaOWLGraphWrapper g = new MinervaOWLGraphWrapper(model.getAboxOntology()); - return parse(g, expression, externalLookupService); - } - - OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject expression, - ExternalLookupService externalLookupService) - throws MissingParameterException, UnknownIdentifierException, OWLException { - if (expression == null) { - throw new MissingParameterException("Missing expression: null is not a valid expression."); - } - if (expression.type == null) { - throw new MissingParameterException("An expression type is required."); - } - if (JsonOwlObjectType.Class == expression.type) { - if (expression.id == null) { - throw new MissingParameterException("Missing literal for expression of type 'class'"); - } - if (StringUtils.containsWhitespace(expression.id)) { - throw new UnknownIdentifierException("Identifiers may not contain whitespaces: '"+expression.id+"'"); - } - IRI clsIRI = curieHandler.getIRI(expression.id); - OWLClass cls; - if (checkLiteralIds) { - cls = g.getOWLClass(clsIRI); - if (cls == null && externalLookupService != null) { - List lookup = externalLookupService.lookup(clsIRI); - if (lookup == null || lookup.isEmpty()) { - throw new UnknownIdentifierException("Could not validate the id: "+expression.id); - } - cls = createClass(clsIRI, g); - } - if (cls == null) { - throw new UnknownIdentifierException("Could not retrieve a class for id: "+expression.id); - } - } - else { - cls = createClass(clsIRI, g); - } - return cls; - } - else if (JsonOwlObjectType.SomeValueFrom == expression.type) { - if (expression.property == null) { - throw new MissingParameterException("Missing property for expression of type 'svf'"); - } - if (expression.property.id == null) { - throw new MissingParameterException("Missing property id for expression of type 'svf'"); - } - if (expression.property.type != JsonOwlObjectType.ObjectProperty) { - throw new MissingParameterException("Unexpected type for property in 'svf': "+expression.property.type); - } - IRI propIRI = curieHandler.getIRI(expression.property.id); - OWLObjectProperty p = g.getOWLObjectProperty(propIRI); - if (p == null) { - throw new UnknownIdentifierException("Could not find a property for: "+expression.property); - } - if (expression.filler != null) { - OWLClassExpression ce = parse(g, expression.filler, externalLookupService); - return g.getDataFactory().getOWLObjectSomeValuesFrom(p, ce); - } - else { - throw new MissingParameterException("Missing literal or expression for expression of type 'svf'."); - } - } - else if (JsonOwlObjectType.IntersectionOf == expression.type) { - return parse(g, expression.expressions, externalLookupService, JsonOwlObjectType.IntersectionOf); - } - else if (JsonOwlObjectType.UnionOf == expression.type) { - return parse(g, expression.expressions, externalLookupService, JsonOwlObjectType.UnionOf); - } - else if (JsonOwlObjectType.ComplementOf == expression.type) { - if (expression.filler == null) { - throw new MissingParameterException("Missing filler for expression of type 'complement'"); - } - OWLClassExpression filler = parse(g, expression.filler, externalLookupService); - return g.getDataFactory().getOWLObjectComplementOf(filler); - } - else { - throw new UnknownIdentifierException("Unknown expression type: "+expression.type); - } - } - - private OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject[] expressions, - ExternalLookupService externalLookupService, JsonOwlObjectType type) - throws MissingParameterException, UnknownIdentifierException, OWLException { - if (expressions.length == 0) { - throw new MissingParameterException("Missing expressions: empty expression list is not allowed."); - } - if (expressions.length == 1) { - return parse(g, expressions[0], externalLookupService); - } - Set clsExpressions = new HashSet(); - for (JsonOwlObject m3Expression : expressions) { - OWLClassExpression ce = parse(g, m3Expression, externalLookupService); - clsExpressions.add(ce); - } - if (type == JsonOwlObjectType.UnionOf) { - return g.getDataFactory().getOWLObjectUnionOf(clsExpressions); - } - else if (type == JsonOwlObjectType.IntersectionOf) { - return g.getDataFactory().getOWLObjectIntersectionOf(clsExpressions); - } - else { - throw new UnknownIdentifierException("Unsupported expression type: "+type); - } - } - - private OWLClass createClass(IRI iri, MinervaOWLGraphWrapper g) { - return g.getDataFactory().getOWLClass(iri); - } - + private final boolean checkLiteralIds; + private final CurieHandler curieHandler; + + M3ExpressionParser(boolean checkLiteralIds, CurieHandler curieHandler) { + this.checkLiteralIds = checkLiteralIds; + this.curieHandler = curieHandler; + } + + M3ExpressionParser(CurieHandler curieHandler) { + this(false, curieHandler); + } + + OWLClassExpression parse(ModelContainer model, JsonOwlObject expression, + ExternalLookupService externalLookupService) + throws MissingParameterException, UnknownIdentifierException, OWLException { + MinervaOWLGraphWrapper g = new MinervaOWLGraphWrapper(model.getAboxOntology()); + return parse(g, expression, externalLookupService); + } + + OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject expression, + ExternalLookupService externalLookupService) + throws MissingParameterException, UnknownIdentifierException, OWLException { + if (expression == null) { + throw new MissingParameterException("Missing expression: null is not a valid expression."); + } + if (expression.type == null) { + throw new MissingParameterException("An expression type is required."); + } + if (JsonOwlObjectType.Class == expression.type) { + if (expression.id == null) { + throw new MissingParameterException("Missing literal for expression of type 'class'"); + } + if (StringUtils.containsWhitespace(expression.id)) { + throw new UnknownIdentifierException("Identifiers may not contain whitespaces: '" + expression.id + "'"); + } + IRI clsIRI = curieHandler.getIRI(expression.id); + OWLClass cls; + if (checkLiteralIds) { + cls = g.getOWLClass(clsIRI); + if (cls == null && externalLookupService != null) { + List lookup = externalLookupService.lookup(clsIRI); + if (lookup == null || lookup.isEmpty()) { + throw new UnknownIdentifierException("Could not validate the id: " + expression.id); + } + cls = createClass(clsIRI, g); + } + if (cls == null) { + throw new UnknownIdentifierException("Could not retrieve a class for id: " + expression.id); + } + } else { + cls = createClass(clsIRI, g); + } + return cls; + } else if (JsonOwlObjectType.SomeValueFrom == expression.type) { + if (expression.property == null) { + throw new MissingParameterException("Missing property for expression of type 'svf'"); + } + if (expression.property.id == null) { + throw new MissingParameterException("Missing property id for expression of type 'svf'"); + } + if (expression.property.type != JsonOwlObjectType.ObjectProperty) { + throw new MissingParameterException("Unexpected type for property in 'svf': " + expression.property.type); + } + IRI propIRI = curieHandler.getIRI(expression.property.id); + OWLObjectProperty p = g.getOWLObjectProperty(propIRI); + if (p == null) { + throw new UnknownIdentifierException("Could not find a property for: " + expression.property); + } + if (expression.filler != null) { + OWLClassExpression ce = parse(g, expression.filler, externalLookupService); + return g.getDataFactory().getOWLObjectSomeValuesFrom(p, ce); + } else { + throw new MissingParameterException("Missing literal or expression for expression of type 'svf'."); + } + } else if (JsonOwlObjectType.IntersectionOf == expression.type) { + return parse(g, expression.expressions, externalLookupService, JsonOwlObjectType.IntersectionOf); + } else if (JsonOwlObjectType.UnionOf == expression.type) { + return parse(g, expression.expressions, externalLookupService, JsonOwlObjectType.UnionOf); + } else if (JsonOwlObjectType.ComplementOf == expression.type) { + if (expression.filler == null) { + throw new MissingParameterException("Missing filler for expression of type 'complement'"); + } + OWLClassExpression filler = parse(g, expression.filler, externalLookupService); + return g.getDataFactory().getOWLObjectComplementOf(filler); + } else { + throw new UnknownIdentifierException("Unknown expression type: " + expression.type); + } + } + + private OWLClassExpression parse(MinervaOWLGraphWrapper g, JsonOwlObject[] expressions, + ExternalLookupService externalLookupService, JsonOwlObjectType type) + throws MissingParameterException, UnknownIdentifierException, OWLException { + if (expressions.length == 0) { + throw new MissingParameterException("Missing expressions: empty expression list is not allowed."); + } + if (expressions.length == 1) { + return parse(g, expressions[0], externalLookupService); + } + Set clsExpressions = new HashSet(); + for (JsonOwlObject m3Expression : expressions) { + OWLClassExpression ce = parse(g, m3Expression, externalLookupService); + clsExpressions.add(ce); + } + if (type == JsonOwlObjectType.UnionOf) { + return g.getDataFactory().getOWLObjectUnionOf(clsExpressions); + } else if (type == JsonOwlObjectType.IntersectionOf) { + return g.getDataFactory().getOWLObjectIntersectionOf(clsExpressions); + } else { + throw new UnknownIdentifierException("Unsupported expression type: " + type); + } + } + + private OWLClass createClass(IRI iri, MinervaOWLGraphWrapper g) { + return g.getDataFactory().getOWLClass(iri); + } + } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3SeedHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3SeedHandler.java index 0ffe4b54..b8bbaf1e 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3SeedHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/M3SeedHandler.java @@ -1,133 +1,125 @@ package org.geneontology.minerva.server.handler; -import java.util.Set; - -import javax.ws.rs.Consumes; -import javax.ws.rs.FormParam; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.QueryParam; - +import com.google.gson.annotations.SerializedName; import org.geneontology.minerva.server.handler.MinervaRequest.MinervaArgument; -import com.google.gson.annotations.SerializedName; +import javax.ws.rs.*; +import java.util.Set; /** * Alpha version interface for seeding a model. - * */ @Path("/seed") public interface M3SeedHandler { - - public static class SeedRequest extends MinervaRequest { - // wrapper to conform to minerva request standard - } - - public static class SeedRequestArgument extends MinervaArgument { - - String process; - String taxon; - - /* - * use the label, as this is the used as a restriction in the - * 'evidence_type_closure' Golr field - */ - @SerializedName("evidence-restriction") - String[] evidenceRestriction = new String[]{"experimental evidence"}; - - @SerializedName("location-roots") - String[] locationRoots = new String[]{"CL:0000003", "GO:0005575"}; // native cell, CC - - @SerializedName("ignore-classes") - String[] ignoreList = new String[]{"GO:0005515"}; // protein binding - } - - public static class SeedResponse extends MinervaResponse { - - public static class SeedResponseData { - - public String id; - } - - /** - * @param uid - * @param intention - * @param packetId - */ - public SeedResponse(String uid, Set providerGroups, String intention, String packetId) { - super(uid, providerGroups, intention, packetId); - } - } - - /** - * Jersey REST method for POST with three form parameters. - * - * @param intention JSONP relevant - * @param packetId - * @param requestString seed request - * @return response convertible to JSON(P) - */ - @Path("fromProcess") - @POST - @Consumes("application/x-www-form-urlencoded") - public SeedResponse fromProcessPost( - @FormParam("intention") String intention, - @FormParam("packet-id") String packetId, - @FormParam("requests") String requestString); - - /** - * Jersey REST method for POST with three form parameters with privileged rights. - * - * @param uid user id, JSONP relevant - * @param providerGroups user groups, JSONP relevant - * @param intention JSONP relevant - * @param packetId - * @param requestString seed request - * @return response convertible to JSON(P) - */ - @Path("fromProcessPrivileged") - @POST - @Consumes("application/x-www-form-urlencoded") - public SeedResponse fromProcessPostPrivileged( - @FormParam("uid") String uid, - @FormParam("provided-by") Set providerGroups, - @FormParam("intention") String intention, - @FormParam("packet-id") String packetId, - @FormParam("requests") String requestString); - - - /** - * Jersey REST method for GET with three query parameters. - * - * @param intention JSONP relevant - * @param packetId - * @param requestString seed request - * @return response convertible to JSON(P) - */ - @Path("fromProcess") - @GET - public SeedResponse fromProcessGet( - @QueryParam("intention") String intention, - @QueryParam("packet-id") String packetId, - @QueryParam("requests") String requestString); - - /** - * Jersey REST method for GET with three query parameters with privileged rights. - * - * @param uid user id, JSONP relevant - * @param providerGroups user groups, JSONP relevant - * @param intention JSONP relevant - * @param packetId - * @param requestString seed request - * @return response convertible to JSON(P) - */ - @Path("fromProcessPrivileged") - @GET - public SeedResponse fromProcessGetPrivileged( - @QueryParam("uid") String uid, - @QueryParam("provided-by") Set providerGroups, - @QueryParam("intention") String intention, - @QueryParam("packet-id") String packetId, - @QueryParam("requests") String requestString); + + public static class SeedRequest extends MinervaRequest { + // wrapper to conform to minerva request standard + } + + public static class SeedRequestArgument extends MinervaArgument { + + String process; + String taxon; + + /* + * use the label, as this is the used as a restriction in the + * 'evidence_type_closure' Golr field + */ + @SerializedName("evidence-restriction") + String[] evidenceRestriction = new String[]{"experimental evidence"}; + + @SerializedName("location-roots") + String[] locationRoots = new String[]{"CL:0000003", "GO:0005575"}; // native cell, CC + + @SerializedName("ignore-classes") + String[] ignoreList = new String[]{"GO:0005515"}; // protein binding + } + + public static class SeedResponse extends MinervaResponse { + + public static class SeedResponseData { + + public String id; + } + + /** + * @param uid + * @param intention + * @param packetId + */ + public SeedResponse(String uid, Set providerGroups, String intention, String packetId) { + super(uid, providerGroups, intention, packetId); + } + } + + /** + * Jersey REST method for POST with three form parameters. + * + * @param intention JSONP relevant + * @param packetId + * @param requestString seed request + * @return response convertible to JSON(P) + */ + @Path("fromProcess") + @POST + @Consumes("application/x-www-form-urlencoded") + public SeedResponse fromProcessPost( + @FormParam("intention") String intention, + @FormParam("packet-id") String packetId, + @FormParam("requests") String requestString); + + /** + * Jersey REST method for POST with three form parameters with privileged rights. + * + * @param uid user id, JSONP relevant + * @param providerGroups user groups, JSONP relevant + * @param intention JSONP relevant + * @param packetId + * @param requestString seed request + * @return response convertible to JSON(P) + */ + @Path("fromProcessPrivileged") + @POST + @Consumes("application/x-www-form-urlencoded") + public SeedResponse fromProcessPostPrivileged( + @FormParam("uid") String uid, + @FormParam("provided-by") Set providerGroups, + @FormParam("intention") String intention, + @FormParam("packet-id") String packetId, + @FormParam("requests") String requestString); + + + /** + * Jersey REST method for GET with three query parameters. + * + * @param intention JSONP relevant + * @param packetId + * @param requestString seed request + * @return response convertible to JSON(P) + */ + @Path("fromProcess") + @GET + public SeedResponse fromProcessGet( + @QueryParam("intention") String intention, + @QueryParam("packet-id") String packetId, + @QueryParam("requests") String requestString); + + /** + * Jersey REST method for GET with three query parameters with privileged rights. + * + * @param uid user id, JSONP relevant + * @param providerGroups user groups, JSONP relevant + * @param intention JSONP relevant + * @param packetId + * @param requestString seed request + * @return response convertible to JSON(P) + */ + @Path("fromProcessPrivileged") + @GET + public SeedResponse fromProcessGetPrivileged( + @QueryParam("uid") String uid, + @QueryParam("provided-by") Set providerGroups, + @QueryParam("intention") String intention, + @QueryParam("packet-id") String packetId, + @QueryParam("requests") String requestString); } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaRequest.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaRequest.java index 717a5be9..14c6023c 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaRequest.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaRequest.java @@ -10,12 +10,12 @@ */ public abstract class MinervaRequest { - ENTITY entity; - OPERATION operation; - ARGUMENT arguments; - - public abstract static class MinervaArgument { - // empty for now - // content depends on application - } + ENTITY entity; + OPERATION operation; + ARGUMENT arguments; + + public abstract static class MinervaArgument { + // empty for now + // content depends on application + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaResponse.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaResponse.java index 31601a51..60c5170c 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaResponse.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/MinervaResponse.java @@ -1,74 +1,74 @@ package org.geneontology.minerva.server.handler; +import com.google.gson.annotations.SerializedName; + import java.util.Collections; import java.util.Set; -import com.google.gson.annotations.SerializedName; - public abstract class MinervaResponse { - @SerializedName("packet-id") - final String packetId; // generated or pass-through - final String uid; // pass-through - @SerializedName("provided-by") - final Set providerGroups; // pass-through - - @SerializedName("is-reasoned") - boolean isReasoned = false; - - /* - * pass-through; model: - * "query", "action" //, "location" - */ - final String intention; - - public static final String SIGNAL_MERGE = "merge"; - public static final String SIGNAL_REBUILD = "rebuild"; - public static final String SIGNAL_META = "meta"; - /* - * "merge", "rebuild", "meta" //, "location"? - */ - String signal; - - public static final String MESSAGE_TYPE_SUCCESS = "success"; - public static final String MESSAGE_TYPE_ERROR = "error"; - /* - * "error", "success", //"warning" - */ - @SerializedName("message-type") - String messageType; - /* - * "e.g.: server done borked" - */ - String message; - /* - * Now degraded to just a String, not an Object. - */ - //Map commentary = null; - String commentary; - - DATA data; - - /** - * @param uid - * @param intention - * @param packetId - */ - public MinervaResponse(String uid, Set providerGroups, String intention, String packetId) { - this.uid = uid; - if (providerGroups != null) { - this.providerGroups = providerGroups; - } else { - this.providerGroups = Collections.emptySet(); - } - this.intention = intention; - this.packetId = packetId; - } + @SerializedName("packet-id") + final String packetId; // generated or pass-through + final String uid; // pass-through + @SerializedName("provided-by") + final Set providerGroups; // pass-through + + @SerializedName("is-reasoned") + boolean isReasoned = false; + + /* + * pass-through; model: + * "query", "action" //, "location" + */ + final String intention; + + public static final String SIGNAL_MERGE = "merge"; + public static final String SIGNAL_REBUILD = "rebuild"; + public static final String SIGNAL_META = "meta"; + /* + * "merge", "rebuild", "meta" //, "location"? + */ + String signal; + + public static final String MESSAGE_TYPE_SUCCESS = "success"; + public static final String MESSAGE_TYPE_ERROR = "error"; + /* + * "error", "success", //"warning" + */ + @SerializedName("message-type") + String messageType; + /* + * "e.g.: server done borked" + */ + String message; + /* + * Now degraded to just a String, not an Object. + */ + //Map commentary = null; + String commentary; + + DATA data; + + /** + * @param uid + * @param intention + * @param packetId + */ + public MinervaResponse(String uid, Set providerGroups, String intention, String packetId) { + this.uid = uid; + if (providerGroups != null) { + this.providerGroups = providerGroups; + } else { + this.providerGroups = Collections.emptySet(); + } + this.intention = intention; + this.packetId = packetId; + } - /** - * @param isReasoned the isReasoned to set - */ - public void setReasoned(boolean isReasoned) { - this.isReasoned = isReasoned; - } + /** + * @param isReasoned the isReasoned to set + */ + public void setReasoned(boolean isReasoned) { + this.isReasoned = isReasoned; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelARTHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelARTHandler.java index 7e25f30e..c38e1c89 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelARTHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelARTHandler.java @@ -1,60 +1,29 @@ /** - * + * */ package org.geneontology.minerva.server.handler; -import static org.geneontology.minerva.server.handler.OperationsTools.createModelRenderer; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.ws.rs.Consumes; -import javax.ws.rs.FormParam; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; - -import org.apache.commons.io.IOUtils; import org.geneontology.minerva.BlazegraphMolecularModelManager; import org.geneontology.minerva.BlazegraphOntologyManager; import org.geneontology.minerva.ModelContainer; -import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.json.InferenceProvider; import org.geneontology.minerva.json.JsonModel; import org.geneontology.minerva.json.MolecularModelJsonRenderer; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3BatchResponse; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3BatchResponse.ResponseData; import org.geneontology.minerva.server.inferences.InferenceProviderCreator; -import org.openrdf.query.Binding; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryResult; -import org.openrdf.query.TupleQueryResult; -import org.openrdf.repository.RepositoryException; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.parameters.OntologyCopy; -import com.google.gson.annotations.SerializedName; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import java.util.Set; + +import static org.geneontology.minerva.server.handler.OperationsTools.createModelRenderer; /** * Gets Model readonly data for Annotation Review Tool @@ -65,97 +34,98 @@ @Path("/search/stored") //using store endpoint temporarily because thats what barista allows public class ModelARTHandler { - private final BlazegraphMolecularModelManager m3; - private final BlazegraphOntologyManager go_lego; - private final CurieHandler curieHandler; - private final InferenceProviderCreator ipc; - /** - * - */ - public ModelARTHandler(BlazegraphMolecularModelManager m3, InferenceProviderCreator ipc) { - this.m3 = m3; - this.go_lego = m3.getGolego_repo(); - this.curieHandler = m3.getCuriHandler(); - this.ipc = ipc; - } - - public class ModelARTResult { - private String id; - private JsonModel storedModel; - private JsonModel activeModel; - private JsonModel diffModel; - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public void setStoredModel(JsonModel storedModel) { - this.storedModel = storedModel; - } - - public JsonModel getStoredModel() { - return this.storedModel; - } - - public void setActiveModel(JsonModel activeModel) { - this.activeModel = activeModel; - } - - public JsonModel getActiveModel() { - return this.activeModel; - } - } - - - @GET - @Produces(MediaType.APPLICATION_JSON) - public ModelARTResult storedGet( - @QueryParam("id") Set id - ) throws Exception{ - ModelARTResult result = new ModelARTResult(); - result = stored(id); - return result; - } - - public ModelARTResult stored(Set ids) throws Exception { - ModelARTResult result = new ModelARTResult(); - - for(String mid : ids) { - addToModel(mid, result); - } - - return result; - } - - private void addToModel(String modelId, ModelARTResult result) throws Exception { - - IRI modelIri = curieHandler.getIRI(modelId); - OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); - OWLOntology currentOntology = m3.getModelAbox(modelIri); - OWLOntology storedOntology = m3.loadModelABox(modelIri, manager); - - //OWLOntology stored_ontology = man1.copyOntology(storedOntology, OntologyCopy.DEEP); - ModelContainer storedMC = new ModelContainer(modelIri, null, storedOntology); - final MolecularModelJsonRenderer storedRenderer = createModelRenderer(storedMC, go_lego, null, curieHandler); - JsonModel jsonStoredModel = storedRenderer.renderModel(); - - ModelContainer activeMC = new ModelContainer(modelIri, null, currentOntology); - InferenceProvider inferenceProvider = ipc.create(activeMC); - final MolecularModelJsonRenderer renderer = createModelRenderer(activeMC, go_lego, inferenceProvider, curieHandler); - JsonModel jsonActiveModel = renderer.renderModel(); - - result.storedModel = jsonStoredModel; - result.activeModel = jsonActiveModel; - result.diffModel = getDiff(jsonStoredModel, jsonActiveModel); - - } - - private JsonModel getDiff(JsonModel storedOntology, JsonModel activeOntology) { - return new JsonModel(); - } - + private final BlazegraphMolecularModelManager m3; + private final BlazegraphOntologyManager go_lego; + private final CurieHandler curieHandler; + private final InferenceProviderCreator ipc; + + /** + * + */ + public ModelARTHandler(BlazegraphMolecularModelManager m3, InferenceProviderCreator ipc) { + this.m3 = m3; + this.go_lego = m3.getGolego_repo(); + this.curieHandler = m3.getCuriHandler(); + this.ipc = ipc; + } + + public class ModelARTResult { + private String id; + private JsonModel storedModel; + private JsonModel activeModel; + private JsonModel diffModel; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public void setStoredModel(JsonModel storedModel) { + this.storedModel = storedModel; + } + + public JsonModel getStoredModel() { + return this.storedModel; + } + + public void setActiveModel(JsonModel activeModel) { + this.activeModel = activeModel; + } + + public JsonModel getActiveModel() { + return this.activeModel; + } + } + + + @GET + @Produces(MediaType.APPLICATION_JSON) + public ModelARTResult storedGet( + @QueryParam("id") Set id + ) throws Exception { + ModelARTResult result = new ModelARTResult(); + result = stored(id); + return result; + } + + public ModelARTResult stored(Set ids) throws Exception { + ModelARTResult result = new ModelARTResult(); + + for (String mid : ids) { + addToModel(mid, result); + } + + return result; + } + + private void addToModel(String modelId, ModelARTResult result) throws Exception { + + IRI modelIri = curieHandler.getIRI(modelId); + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLOntology currentOntology = m3.getModelAbox(modelIri); + OWLOntology storedOntology = m3.loadModelABox(modelIri, manager); + + //OWLOntology stored_ontology = man1.copyOntology(storedOntology, OntologyCopy.DEEP); + ModelContainer storedMC = new ModelContainer(modelIri, null, storedOntology); + final MolecularModelJsonRenderer storedRenderer = createModelRenderer(storedMC, go_lego, null, curieHandler); + JsonModel jsonStoredModel = storedRenderer.renderModel(); + + ModelContainer activeMC = new ModelContainer(modelIri, null, currentOntology); + InferenceProvider inferenceProvider = ipc.create(activeMC); + final MolecularModelJsonRenderer renderer = createModelRenderer(activeMC, go_lego, inferenceProvider, curieHandler); + JsonModel jsonActiveModel = renderer.renderModel(); + + result.storedModel = jsonStoredModel; + result.activeModel = jsonActiveModel; + result.diffModel = getDiff(jsonStoredModel, jsonActiveModel); + + } + + private JsonModel getDiff(JsonModel storedOntology, JsonModel activeOntology) { + return new JsonModel(); + } + } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelCreator.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelCreator.java index 1e3e1f79..09179d7f 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelCreator.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelCreator.java @@ -1,236 +1,220 @@ package org.geneontology.minerva.server.handler; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import org.geneontology.minerva.ModelContainer; import org.geneontology.minerva.MolecularModelManager; -import org.geneontology.minerva.UndoAwareMolecularModelManager; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; +import org.geneontology.minerva.UndoAwareMolecularModelManager; import org.geneontology.minerva.UndoAwareMolecularModelManager.UndoMetadata; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.json.JsonAnnotation; import org.geneontology.minerva.json.JsonTools; import org.geneontology.minerva.json.MolecularModelJsonRenderer; import org.geneontology.minerva.util.AnnotationShorthand; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLAnnotationValue; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDataProperty; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.model.parameters.Imports; +import java.util.*; + /** * Methods for creating a new model. This handles also all the default * annotations for models and provides methods to update date annotations */ abstract class ModelCreator { - - final UndoAwareMolecularModelManager m3; - final CurieHandler curieHandler; - private final String defaultModelState; - private final Set dataPropertyIRIs; - - static interface VariableResolver { - public boolean notVariable(String id); - public OWLNamedIndividual getVariableValue(String id) throws UnknownIdentifierException; - - static final VariableResolver EMPTY = new VariableResolver() { - - @Override - public boolean notVariable(String id) { - return true; - } - - @Override - public OWLNamedIndividual getVariableValue(String id) { - return null; - } - }; - } - - ModelCreator(UndoAwareMolecularModelManager models, String defaultModelState) { - this.m3 = models; - this.curieHandler = models.getCuriHandler(); - this.defaultModelState = defaultModelState; - Set dataPropertyIRIs = new HashSet(); - for(OWLDataProperty p : m3.getOntology().getDataPropertiesInSignature(Imports.INCLUDED)) { - dataPropertyIRIs.add(p.getIRI()); - } - this.dataPropertyIRIs = Collections.unmodifiableSet(dataPropertyIRIs); - } - - ModelContainer createModel(String userId, Set providerGroups, UndoMetadata token, VariableResolver resolver, JsonAnnotation[] annotationValues) throws UnknownIdentifierException, OWLOntologyCreationException { - ModelContainer model = m3.generateBlankModel(token); - Set annotations = extract(annotationValues, userId, providerGroups, resolver, model); - annotations = addDefaultModelState(annotations, model.getOWLDataFactory()); - if (annotations != null) { - m3.addModelAnnotations(model, annotations, token); - } - updateModelAnnotations(model, userId, providerGroups, token, m3); - // Disallow undo of initial annotations - m3.clearUndoHistory(model.getModelId()); - return model; - } - - boolean deleteModel(ModelContainer model) { - if (model != null) { - return m3.deleteModel(model); - } - return false; - } - - private Set addDefaultModelState(Set existing, OWLDataFactory f) { - IRI iri = AnnotationShorthand.modelstate.getAnnotationProperty(); - OWLAnnotationProperty property = f.getOWLAnnotationProperty(iri); - OWLAnnotation ann = f.getOWLAnnotation(property, f.getOWLLiteral(defaultModelState)); - if (existing == null || existing.isEmpty()) { - return Collections.singleton(ann); - } - existing.add(ann); - return existing; - } - - Set extract(JsonAnnotation[] values, String userId, Set providerGroups, VariableResolver batchValues, ModelContainer model) throws UnknownIdentifierException { - Set result = new HashSet(); - OWLDataFactory f = model.getOWLDataFactory(); - if (values != null) { - for (JsonAnnotation jsonAnn : values) { - if (jsonAnn.key != null && jsonAnn.value != null) { - AnnotationShorthand shorthand = AnnotationShorthand.getShorthand(jsonAnn.key, curieHandler); - if (shorthand != null) { - if (AnnotationShorthand.evidence == shorthand) { - IRI evidenceIRI; - if (batchValues.notVariable(jsonAnn.value)) { - evidenceIRI = curieHandler.getIRI(jsonAnn.value); - } - else { - evidenceIRI = batchValues.getVariableValue(jsonAnn.value).getIRI(); - } - result.add(create(f, shorthand, evidenceIRI)); - } - else { - result.add(create(f, shorthand, JsonTools.createAnnotationValue(jsonAnn, f))); - } - } - else { - IRI pIRI = curieHandler.getIRI(jsonAnn.key); - if (dataPropertyIRIs.contains(pIRI) == false) { - OWLAnnotationValue annotationValue = JsonTools.createAnnotationValue(jsonAnn, f); - result.add(f.getOWLAnnotation(f.getOWLAnnotationProperty(pIRI), annotationValue)); - } - } - } - } - } - addGeneratedAnnotations(userId, providerGroups, result, f); - return result; - } - - Map> extractDataProperties(JsonAnnotation[] values, ModelContainer model) throws UnknownIdentifierException { - Map> result = new HashMap>(); - - if (values != null && values.length > 0) { - OWLDataFactory f = model.getOWLDataFactory(); - for (JsonAnnotation jsonAnn : values) { - if (jsonAnn.key != null && jsonAnn.value != null) { - AnnotationShorthand shorthand = AnnotationShorthand.getShorthand(jsonAnn.key, curieHandler); - if (shorthand == null) { - IRI pIRI = curieHandler.getIRI(jsonAnn.key); - if (dataPropertyIRIs.contains(pIRI)) { - OWLLiteral literal = JsonTools.createLiteral(jsonAnn, f); - if (literal != null) { - OWLDataProperty property = f.getOWLDataProperty(pIRI); - Set literals = result.get(property); - if (literals == null) { - literals = new HashSet(); - result.put(property, literals); - } - literals.add(literal); - } - } - } - } - } - } - - return result; - } - - void updateDate(ModelContainer model, OWLNamedIndividual individual, UndoMetadata token, UndoAwareMolecularModelManager m3) throws UnknownIdentifierException { - final OWLDataFactory f = model.getOWLDataFactory(); - m3.updateAnnotation(model, individual, createDateAnnotation(f), token); - } - - void updateModelAnnotations(ModelContainer model, String userId, Set providerGroups, UndoMetadata token, MolecularModelManager m3) throws UnknownIdentifierException { - final OWLDataFactory f = model.getOWLDataFactory(); - if (userId != null) { - Set annotations = new HashSet(); - annotations.add(create(f, AnnotationShorthand.contributor, userId)); - m3.addModelAnnotations(model, annotations, token); - } - for (String provider : providerGroups) { - Set annotations = new HashSet(); - annotations.add(create(f, AnnotationShorthand.providedBy, provider)); - m3.addModelAnnotations(model, annotations, token); - } - m3.updateAnnotation(model, createDateAnnotation(f), token); - } - - void addGeneratedAnnotations(String userId, Set providerGroups, Set annotations, OWLDataFactory f) { - if (userId != null) { - annotations.add(create(f, AnnotationShorthand.contributor, userId)); - } - for (String provider : providerGroups) { - annotations.add(create(f, AnnotationShorthand.providedBy, provider)); - } - } - - void addDateAnnotation(Set annotations, OWLDataFactory f) { - annotations.add(createDateAnnotation(f)); - } - - OWLAnnotation createDateAnnotation(OWLDataFactory f) { - return create(f, AnnotationShorthand.date, generateDateString()); - } - - /** - * separate method, intended to be overridden during test. - * - * @return date string, never null - */ - protected String generateDateString() { - String dateString = MolecularModelJsonRenderer.AnnotationTypeDateFormat.get().format(new Date()); - return dateString; - } - - Set createGeneratedAnnotations(ModelContainer model, String userId, Set providerGroups) { - Set annotations = new HashSet(); - OWLDataFactory f = model.getOWLDataFactory(); - addGeneratedAnnotations(userId, providerGroups, annotations, f); - return annotations; - } - - void updateDate(ModelContainer model, OWLObjectProperty predicate, OWLNamedIndividual subject, OWLNamedIndividual object, UndoMetadata token, UndoAwareMolecularModelManager m3) throws UnknownIdentifierException { - final OWLDataFactory f = model.getOWLDataFactory(); - m3.updateAnnotation(model, predicate, subject, object, createDateAnnotation(f), token); - } - - static OWLAnnotation create(OWLDataFactory f, AnnotationShorthand s, String literal) { - return create(f, s, f.getOWLLiteral(literal)); - } - - static OWLAnnotation create(OWLDataFactory f, AnnotationShorthand s, OWLAnnotationValue v) { - final OWLAnnotationProperty p = f.getOWLAnnotationProperty(s.getAnnotationProperty()); - return f.getOWLAnnotation(p, v); - } + + final UndoAwareMolecularModelManager m3; + final CurieHandler curieHandler; + private final String defaultModelState; + private final Set dataPropertyIRIs; + + static interface VariableResolver { + public boolean notVariable(String id); + + public OWLNamedIndividual getVariableValue(String id) throws UnknownIdentifierException; + + static final VariableResolver EMPTY = new VariableResolver() { + + @Override + public boolean notVariable(String id) { + return true; + } + + @Override + public OWLNamedIndividual getVariableValue(String id) { + return null; + } + }; + } + + ModelCreator(UndoAwareMolecularModelManager models, String defaultModelState) { + this.m3 = models; + this.curieHandler = models.getCuriHandler(); + this.defaultModelState = defaultModelState; + Set dataPropertyIRIs = new HashSet(); + for (OWLDataProperty p : m3.getOntology().getDataPropertiesInSignature(Imports.INCLUDED)) { + dataPropertyIRIs.add(p.getIRI()); + } + this.dataPropertyIRIs = Collections.unmodifiableSet(dataPropertyIRIs); + } + + ModelContainer createModel(String userId, Set providerGroups, UndoMetadata token, VariableResolver resolver, JsonAnnotation[] annotationValues) throws UnknownIdentifierException, OWLOntologyCreationException { + ModelContainer model = m3.generateBlankModel(token); + Set annotations = extract(annotationValues, userId, providerGroups, resolver, model); + annotations = addDefaultModelState(annotations, model.getOWLDataFactory()); + if (annotations != null) { + m3.addModelAnnotations(model, annotations, token); + } + updateModelAnnotations(model, userId, providerGroups, token, m3); + // Disallow undo of initial annotations + m3.clearUndoHistory(model.getModelId()); + return model; + } + + boolean deleteModel(ModelContainer model) { + if (model != null) { + return m3.deleteModel(model); + } + return false; + } + + private Set addDefaultModelState(Set existing, OWLDataFactory f) { + IRI iri = AnnotationShorthand.modelstate.getAnnotationProperty(); + OWLAnnotationProperty property = f.getOWLAnnotationProperty(iri); + OWLAnnotation ann = f.getOWLAnnotation(property, f.getOWLLiteral(defaultModelState)); + if (existing == null || existing.isEmpty()) { + return Collections.singleton(ann); + } + existing.add(ann); + return existing; + } + + Set extract(JsonAnnotation[] values, String userId, Set providerGroups, VariableResolver batchValues, ModelContainer model) throws UnknownIdentifierException { + Set result = new HashSet(); + OWLDataFactory f = model.getOWLDataFactory(); + if (values != null) { + for (JsonAnnotation jsonAnn : values) { + if (jsonAnn.key != null && jsonAnn.value != null) { + AnnotationShorthand shorthand = AnnotationShorthand.getShorthand(jsonAnn.key, curieHandler); + if (shorthand != null) { + if (AnnotationShorthand.evidence == shorthand) { + IRI evidenceIRI; + if (batchValues.notVariable(jsonAnn.value)) { + evidenceIRI = curieHandler.getIRI(jsonAnn.value); + } else { + evidenceIRI = batchValues.getVariableValue(jsonAnn.value).getIRI(); + } + result.add(create(f, shorthand, evidenceIRI)); + } else { + result.add(create(f, shorthand, JsonTools.createAnnotationValue(jsonAnn, f))); + } + } else { + IRI pIRI = curieHandler.getIRI(jsonAnn.key); + if (dataPropertyIRIs.contains(pIRI) == false) { + OWLAnnotationValue annotationValue = JsonTools.createAnnotationValue(jsonAnn, f); + result.add(f.getOWLAnnotation(f.getOWLAnnotationProperty(pIRI), annotationValue)); + } + } + } + } + } + addGeneratedAnnotations(userId, providerGroups, result, f); + return result; + } + + Map> extractDataProperties(JsonAnnotation[] values, ModelContainer model) throws UnknownIdentifierException { + Map> result = new HashMap>(); + + if (values != null && values.length > 0) { + OWLDataFactory f = model.getOWLDataFactory(); + for (JsonAnnotation jsonAnn : values) { + if (jsonAnn.key != null && jsonAnn.value != null) { + AnnotationShorthand shorthand = AnnotationShorthand.getShorthand(jsonAnn.key, curieHandler); + if (shorthand == null) { + IRI pIRI = curieHandler.getIRI(jsonAnn.key); + if (dataPropertyIRIs.contains(pIRI)) { + OWLLiteral literal = JsonTools.createLiteral(jsonAnn, f); + if (literal != null) { + OWLDataProperty property = f.getOWLDataProperty(pIRI); + Set literals = result.get(property); + if (literals == null) { + literals = new HashSet(); + result.put(property, literals); + } + literals.add(literal); + } + } + } + } + } + } + + return result; + } + + void updateDate(ModelContainer model, OWLNamedIndividual individual, UndoMetadata token, UndoAwareMolecularModelManager m3) throws UnknownIdentifierException { + final OWLDataFactory f = model.getOWLDataFactory(); + m3.updateAnnotation(model, individual, createDateAnnotation(f), token); + } + + void updateModelAnnotations(ModelContainer model, String userId, Set providerGroups, UndoMetadata token, MolecularModelManager m3) throws UnknownIdentifierException { + final OWLDataFactory f = model.getOWLDataFactory(); + if (userId != null) { + Set annotations = new HashSet(); + annotations.add(create(f, AnnotationShorthand.contributor, userId)); + m3.addModelAnnotations(model, annotations, token); + } + for (String provider : providerGroups) { + Set annotations = new HashSet(); + annotations.add(create(f, AnnotationShorthand.providedBy, provider)); + m3.addModelAnnotations(model, annotations, token); + } + m3.updateAnnotation(model, createDateAnnotation(f), token); + } + + void addGeneratedAnnotations(String userId, Set providerGroups, Set annotations, OWLDataFactory f) { + if (userId != null) { + annotations.add(create(f, AnnotationShorthand.contributor, userId)); + } + for (String provider : providerGroups) { + annotations.add(create(f, AnnotationShorthand.providedBy, provider)); + } + } + + void addDateAnnotation(Set annotations, OWLDataFactory f) { + annotations.add(createDateAnnotation(f)); + } + + OWLAnnotation createDateAnnotation(OWLDataFactory f) { + return create(f, AnnotationShorthand.date, generateDateString()); + } + + /** + * separate method, intended to be overridden during test. + * + * @return date string, never null + */ + protected String generateDateString() { + String dateString = MolecularModelJsonRenderer.AnnotationTypeDateFormat.get().format(new Date()); + return dateString; + } + + Set createGeneratedAnnotations(ModelContainer model, String userId, Set providerGroups) { + Set annotations = new HashSet(); + OWLDataFactory f = model.getOWLDataFactory(); + addGeneratedAnnotations(userId, providerGroups, annotations, f); + return annotations; + } + + void updateDate(ModelContainer model, OWLObjectProperty predicate, OWLNamedIndividual subject, OWLNamedIndividual object, UndoMetadata token, UndoAwareMolecularModelManager m3) throws UnknownIdentifierException { + final OWLDataFactory f = model.getOWLDataFactory(); + m3.updateAnnotation(model, predicate, subject, object, createDateAnnotation(f), token); + } + + static OWLAnnotation create(OWLDataFactory f, AnnotationShorthand s, String literal) { + return create(f, s, f.getOWLLiteral(literal)); + } + + static OWLAnnotation create(OWLDataFactory f, AnnotationShorthand s, OWLAnnotationValue v) { + final OWLAnnotationProperty p = f.getOWLAnnotationProperty(s.getAnnotationProperty()); + return f.getOWLAnnotation(p, v); + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelSearchHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelSearchHandler.java index e6a3dc58..615e0da2 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelSearchHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/ModelSearchHandler.java @@ -1,621 +1,613 @@ package org.geneontology.minerva.server.handler; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.*; - -import javax.ws.rs.Consumes; -import javax.ws.rs.FormParam; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; - +import com.google.gson.annotations.SerializedName; import org.apache.commons.io.IOUtils; import org.geneontology.minerva.BlazegraphMolecularModelManager; import org.geneontology.minerva.BlazegraphOntologyManager; -import org.geneontology.minerva.ModelContainer; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; -import org.openrdf.query.Binding; -import org.openrdf.query.BindingSet; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryEvaluationException; -import org.openrdf.query.QueryResult; -import org.openrdf.query.TupleQueryResult; +import org.openrdf.query.*; import org.openrdf.repository.RepositoryException; import org.semanticweb.owlapi.model.IRI; -import com.google.gson.annotations.SerializedName; +import javax.ws.rs.*; +import javax.ws.rs.core.MediaType; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.*; /** * Respond to queries for models in the running blazegraph instance backing minerva * Uses Jersey + JSONP - * */ @Path("/search/models") public class ModelSearchHandler { - private final BlazegraphMolecularModelManager m3; - private final BlazegraphOntologyManager go_lego; - /** - * - */ - public ModelSearchHandler(BlazegraphMolecularModelManager m3) { - this.m3 = m3; - this.go_lego = m3.getGolego_repo(); - } - - public class ModelSearchResult { - private Integer n; - private LinkedHashSet models; - private String message; - private String error; - private String sparql; - public Integer getN() { - return n; - } - public void setN(Integer n) { - this.n = n; - } - public LinkedHashSet getModels() { - return models; - } - public void setModels(LinkedHashSet models) { - this.models = models; - } - public String getMessage() { - return message; - } - public void setMessage(String message) { - this.message = message; - } - public String getError() { - return error; - } - public void setError(String error) { - this.error = error; - } - public String getSparql() { - return sparql; - } - public void setSparql(String sparql) { - this.sparql = sparql; - } - - - } - - static public class ModelMeta{ - private String id; - private String date; - private String title; - private String state; - private Set contributors; - private Set groups; - private HashMap> query_match; - - @SerializedName("modified-p") - private boolean modified; - - public ModelMeta(String id, String date, String title, String state, Set contributors, Set groups, boolean modified) { - this.id = id; - this.date = date; - this.title = title; - this.state = state; - this.contributors = contributors; - this.groups = groups; - this.modified = modified; - query_match = new HashMap>(); - } - - - - public boolean isModified() { - return modified; - } - - - - public void setModified(boolean modified) { - this.modified = modified; - } - - - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public String getDate() { - return date; - } - - public void setDate(String date) { - this.date = date; - } - - public String getTitle() { - return title; - } - - public void setTitle(String title) { - this.title = title; - } - - public String getState() { - return state; - } - - public void setState(String state) { - this.state = state; - } - - public Set getContributors() { - return contributors; - } - - public void setContributors(Set contributors) { - this.contributors = contributors; - } - - public Set getGroups() { - return groups; - } - - public void setGroups(Set groups) { - this.groups = groups; - } - - public HashMap> getQuery_match() { - return query_match; - } - - public void setQuery_match(HashMap> query_match) { - this.query_match = query_match; - } - - - } - - - @GET - @Produces(MediaType.APPLICATION_JSON) - public ModelSearchResult searchGet( - @QueryParam("taxon") Set taxa, - @QueryParam("gp") Set gene_product_class_uris, - @QueryParam("term") Set terms, - @QueryParam("expand") String expand, - @QueryParam("pmid") Set pmids, - @QueryParam("title") String title, - @QueryParam("state") Set state, - @QueryParam("contributor") Set contributor, - @QueryParam("group") Set group, - @QueryParam("exactdate") String exactdate, - @QueryParam("date") String date, - @QueryParam("dateend") String datend, - @QueryParam("offset") int offset, - @QueryParam("limit") int limit, - @QueryParam("count") String count, - @QueryParam("debug") String debug, - @QueryParam("id") Set id - ){ - ModelSearchResult result = new ModelSearchResult(); - result = search(taxa, gene_product_class_uris, terms, expand, pmids, title, state, contributor, group, exactdate, date, datend, offset, limit, count, debug, id); - return result; - } - - //examples - //http://127.0.0.1:6800/search/? - //?gp=http://identifiers.org/uniprot/P15822-3 - //?term=http://purl.obolibrary.org/obo/GO_0003677 - // - // - //?gp=http://identifiers.org/mgi/MGI:1328355 - //&gp=http://identifiers.org/mgi/MGI:87986 - //&term=http://purl.obolibrary.org/obo/GO_0030968 - //&title=mouse - //&pmid=PMID:19911006 - //&state=development&state=review {development, production, closed, review, delete} or operator - //&count - //127.0.0.1:6800/search/?contributor=http://orcid.org/0000-0002-1706-4196 - public ModelSearchResult search(Set taxa, - Set gene_product_ids, Set terms, String expand, Setpmids, - String title_search,Set state_search, Set contributor_search, Set group_search, - String exactdate, String date_search, String datend, - int offset, int limit, String count, String debug, Set id) { - ModelSearchResult r = new ModelSearchResult(); - Set go_type_ids = new HashSet(); - Set gene_type_ids = new HashSet(); - if(gene_product_ids!=null) { - gene_type_ids.addAll(gene_product_ids); - } - if(terms!=null) { - go_type_ids.addAll(terms); - } - CurieHandler curie_handler = m3.getCuriHandler(); - Set go_type_uris = new HashSet(); - Set gene_type_uris = new HashSet(); - for(String curi : go_type_ids) { - if(curi.startsWith("http")) { - go_type_uris.add(curi); - }else { - try { - IRI iri = curie_handler.getIRI(curi); - if(iri!=null) { - go_type_uris.add(iri.toString()); - } - } catch (UnknownIdentifierException e) { - r.error += e.getMessage()+" \n "; - e.printStackTrace(); - return r; - } - } - } - for(String curi : gene_type_ids) { - if(curi.startsWith("http")) { - gene_type_uris.add(curi); - }else { - try { - IRI iri = curie_handler.getIRI(curi); - if(iri!=null) { - gene_type_uris.add(iri.toString()); - } - } catch (UnknownIdentifierException e) { - r.error += e.getMessage()+" \n "; - e.printStackTrace(); - return r; - } - } - } - Map id_model = new LinkedHashMap(); - String sparql=""; - try { - sparql = IOUtils.toString(ModelSearchHandler.class.getResourceAsStream("/ModelSearchQueryTemplate.rq"), StandardCharsets.UTF_8); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - Map ind_return = new HashMap(); - String ind_return_list = ""; // - String types = ""; // - int n = 0; - for(String type_uri : gene_type_uris) { - n++; - ind_return.put("?ind"+n, type_uri); - ind_return_list = ind_return_list +" (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; - types = types+"?ind"+n+" rdf:type <"+type_uri+"> . \n"; - } - if(expand!=null) { - for(String go_type_uri : go_type_uris) { - n++; - ind_return.put("?ind"+n, go_type_uri); - ind_return_list = ind_return_list +" (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; - String expansion = "VALUES ?term"+n+" { "; - try { - Set subclasses = go_lego.getAllSubClasses(go_type_uri); - for(String sub : subclasses) { - expansion+="<"+sub+"> \n"; - } - expansion+= "} . \n"; - types = types+" "+expansion+" ?ind"+n+" rdf:type ?term"+n+" . \n"; - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - }else { - for(String go_type_uri : go_type_uris) { - n++; - ind_return.put("?ind"+n, go_type_uri); - ind_return_list = ind_return_list +" (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; - types = types+"?ind"+n+" rdf:type <"+go_type_uri+"> . \n"; - } - } - String id_constraint = ""; - if(id!=null&&id.size()>0) { - String id_list = ""; - for(String mid : id) { - if(!mid.contains("http")) { - String[] curie = mid.split(":"); - if(curie!=null&&curie.length==2) { - mid = "http://model.geneontology.org/"+curie[1]; - } - //TODO figure this out and add it to standard curie collection - // try { - // IRI iri = curie_handler.getIRI(id); - // id = iri.toString(); - // } catch (UnknownIdentifierException e) { - // // TODO Auto-generated catch block - // e.printStackTrace(); - // } - } - id_list += "<"+mid+"> "; - } - id_constraint = " values ?id { "+id_list+" } "; - } - String pmid_constraints = ""; // - if(pmids!=null) { - for(String pmid : pmids) { - n++; - ind_return.put("?ind"+n, pmid); - ind_return_list = ind_return_list +" (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; - pmid_constraints = pmid_constraints+"?ind"+n+" ?pmid FILTER (?pmid=\""+pmid+"\"^^xsd:string) .\n"; - } - } - String taxa_constraint = ""; - if(taxa!=null&&!taxa.isEmpty()) { - for(String taxon : taxa) { - if(taxon.startsWith("NCBITaxon:")) { - taxon = taxon.replace(":", "_"); - taxon = "http://purl.obolibrary.org/obo/"+taxon; - } - else if(!taxon.startsWith("http://purl.obolibrary.org/obo/NCBITaxon_")) { - taxon = "http://purl.obolibrary.org/obo/NCBITaxon_"+taxon; - } - taxa_constraint += "?id <"+BlazegraphOntologyManager.in_taxon_uri+"> <"+taxon+"> . \n"; - } - } - - - // if(taxa!=null&&!taxa.isEmpty()) { - // String model_filter = " VALUES ?id { \n"; - // for(String taxon : taxa) { - // if(taxon.startsWith("NCBITaxon:")) { - // taxon = taxon.replace(":", "_"); - // taxon = "http://purl.obolibrary.org/obo/"+taxon; - // } - // else if(!taxon.startsWith("http://purl.obolibrary.org/obo/NCBITaxon_")) { - // taxon = "http://purl.obolibrary.org/obo/NCBITaxon_"+taxon; - // } - // Set models = taxon_models.get(taxon); - // if(models!=null) { - // for(String model : models) { - // model_filter+="<"+model+"> \n"; - // } - // } - // } - // model_filter += "} . \n"; - // taxa_constraint = model_filter; - // } - String title_search_constraint = ""; - if(title_search!=null) { - title_search_constraint = "?title \""+title_search+"\" .\n"; - if(!title_search.contains("*")) { - title_search_constraint+=" ?title \""+"true"+"\" . \n"; - } - // if(exact_match) { - // title_search_constraint+=" ?title \""+"true"+"\" . \n"; - // } - } - String state_search_constraint = ""; - if(state_search!=null&&state_search.size()>0) { - String allowed_states = ""; - int c = 0; - for(String s : state_search) { - c++; - allowed_states+="\""+s+"\""; - if(c0) { - String allowed_contributors = ""; - int c = 0; - for(String contributor : contributor_search) { - c++; - allowed_contributors+="\""+contributor+"\""; - if(c ?test_contributor . \n" - + " FILTER (?test_contributor IN ("+allowed_contributors+")) . \n"; - } - String group_search_constraint = ""; - if(group_search!=null&&group_search.size()>0) { - String allowed_group = ""; - int c = 0; - for(String group : group_search) { - c++; - allowed_group+="\""+group+"\""; - if(c", return_block); - sparql = sparql.replaceAll("", id_constraint); - sparql = sparql.replaceAll("", group_by_constraint); - sparql = sparql.replaceAll("", ind_return_list); - sparql = sparql.replaceAll("", types); - sparql = sparql.replaceAll("", pmid_constraints); - sparql = sparql.replaceAll("", title_search_constraint); - sparql = sparql.replaceAll("", state_search_constraint); - sparql = sparql.replaceAll("", contributor_search_constraint); - sparql = sparql.replaceAll("", group_search_constraint); - sparql = sparql.replaceAll("", date_constraint); - sparql = sparql.replaceAll("", limit_constraint); - sparql = sparql.replaceAll("", offset_constraint); - sparql = sparql.replaceAll("", taxa_constraint); - if(debug!=null) { - r.sparql = sparql; - }else { - r.sparql = "add 'debug' parameter to see sparql request"; - } - TupleQueryResult result; - try { - result = (TupleQueryResult) m3.executeSPARQLQuery(sparql, 1000); - } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { - if(e instanceof MalformedQueryException) { - r.message = "Malformed Query"; - }else if(e instanceof QueryEvaluationException) { - r.message = "Query Evaluation Problem - probably a time out"; - }else if(e instanceof RepositoryException) { - r.message = "Repository Exception"; - } - r.error = e.getMessage(); - e.printStackTrace(); - return r; - } - String n_count = null; - try { - while(result.hasNext()) { - BindingSet bs = result.next(); - if(count!=null) { - n_count = bs.getBinding("count").getValue().stringValue(); - }else { - //model meta - String model_iri_string = bs.getBinding("id").getValue().stringValue(); - IRI model_iri = IRI.create(model_iri_string); - String model_curie = null; - try { - model_curie = curie_handler.getCuri(IRI.create(model_iri_string)); - if(model_curie==null) { - model_curie = model_iri_string; - } - } catch (Exception e) { - r.error += e.getMessage()+" \n "; - e.printStackTrace(); - return r; - } - String date = bs.getBinding("mindate").getValue().stringValue(); - String title = bs.getBinding("mintitle").getValue().stringValue(); - String contribs = bs.getBinding("contributors").getValue().stringValue(); - //optional values (some are empty) - Binding state_binding = bs.getBinding("minstate"); - String state = ""; - if(state_binding!=null) { - state = state_binding.getValue().stringValue(); - } - Binding group_binding = bs.getBinding("groups"); - String groups_ = ""; - if(group_binding!=null) { - groups_ = group_binding.getValue().stringValue(); - } - Set contributors = new HashSet(Arrays.asList(contribs.split(";"))); - Set groups = new HashSet(); - if(groups_!=null) { - groups.addAll(Arrays.asList(groups_.split(";"))); - } - ModelMeta mm = id_model.get(model_curie); - if(mm==null) { - //look up model in in-memory cache to check edit state - boolean is_modified = m3.isModelModified(model_iri); - mm = new ModelMeta(model_curie, date, title, state, contributors, groups, is_modified); - } - //matching - for(String ind : ind_return.keySet()) { - String bindingName = ind.replace("?ind", "inds"); - String[] ind_class_matches = bs.getBinding(bindingName).getValue().stringValue().split(" ", -1); - for (String ind_class_match : ind_class_matches) { - Set matching_inds = mm.query_match.get(ind_return.get(ind)); - if(matching_inds==null) { - matching_inds = new HashSet(); - } - matching_inds.add(ind_class_match); - mm.query_match.put(ind_return.get(ind), matching_inds); - } - } - id_model.put(model_curie, mm); - } - } - } catch (QueryEvaluationException e) { - r.message = "Query Evaluation Problem - probably a time out"; - r.error = e.getMessage(); - e.printStackTrace(); - return r; - } - if(n_count!=null) { - r.n = Integer.parseInt(n_count); - }else { - r.n = id_model.size(); - r.models = new LinkedHashSet(id_model.values()); - } - try { - result.close(); - } catch (QueryEvaluationException e) { - r.message = "Query Evaluation Problem - can't close result set"; - r.error = e.getMessage(); - e.printStackTrace(); - return r; - } - //test - //http://127.0.0.1:6800/modelsearch/?query=bla - return r; - } - - - @POST - @Produces(MediaType.APPLICATION_JSON) - @Consumes(MediaType.APPLICATION_FORM_URLENCODED) - public ModelSearchResult searchPostForm( - @FormParam("taxon") Set taxa, - @FormParam("gp") Set gene_product_class_uris, - @FormParam("term") Set terms, - @FormParam("expand") String expand, - @FormParam("pmid") Set pmids, - @FormParam("title") String title, - @FormParam("state") Set state, - @FormParam("contributor") Set contributor, - @FormParam("group") Set group, - @FormParam("exactdate") String exactdate, - @FormParam("date") String date, - @FormParam("dateend") String datend, - @FormParam("offset") int offset, - @FormParam("limit") int limit, - @FormParam("count") String count, - @FormParam("debug") String debug, - @FormParam("debug") Set id) { - ModelSearchResult result = new ModelSearchResult(); - result = search(taxa, gene_product_class_uris, terms, expand, pmids, title, state, contributor, group, exactdate, date, datend, offset, limit, count, debug, id); - return result; - } + private final BlazegraphMolecularModelManager m3; + private final BlazegraphOntologyManager go_lego; + + /** + * + */ + public ModelSearchHandler(BlazegraphMolecularModelManager m3) { + this.m3 = m3; + this.go_lego = m3.getGolego_repo(); + } + + public class ModelSearchResult { + private Integer n; + private LinkedHashSet models; + private String message; + private String error; + private String sparql; + + public Integer getN() { + return n; + } + + public void setN(Integer n) { + this.n = n; + } + + public LinkedHashSet getModels() { + return models; + } + + public void setModels(LinkedHashSet models) { + this.models = models; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getError() { + return error; + } + + public void setError(String error) { + this.error = error; + } + + public String getSparql() { + return sparql; + } + + public void setSparql(String sparql) { + this.sparql = sparql; + } + + + } + + static public class ModelMeta { + private String id; + private String date; + private String title; + private String state; + private Set contributors; + private Set groups; + private HashMap> query_match; + + @SerializedName("modified-p") + private boolean modified; + + public ModelMeta(String id, String date, String title, String state, Set contributors, Set groups, boolean modified) { + this.id = id; + this.date = date; + this.title = title; + this.state = state; + this.contributors = contributors; + this.groups = groups; + this.modified = modified; + query_match = new HashMap>(); + } + + + public boolean isModified() { + return modified; + } + + + public void setModified(boolean modified) { + this.modified = modified; + } + + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getDate() { + return date; + } + + public void setDate(String date) { + this.date = date; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getState() { + return state; + } + + public void setState(String state) { + this.state = state; + } + + public Set getContributors() { + return contributors; + } + + public void setContributors(Set contributors) { + this.contributors = contributors; + } + + public Set getGroups() { + return groups; + } + + public void setGroups(Set groups) { + this.groups = groups; + } + + public HashMap> getQuery_match() { + return query_match; + } + + public void setQuery_match(HashMap> query_match) { + this.query_match = query_match; + } + + + } + + + @GET + @Produces(MediaType.APPLICATION_JSON) + public ModelSearchResult searchGet( + @QueryParam("taxon") Set taxa, + @QueryParam("gp") Set gene_product_class_uris, + @QueryParam("term") Set terms, + @QueryParam("expand") String expand, + @QueryParam("pmid") Set pmids, + @QueryParam("title") String title, + @QueryParam("state") Set state, + @QueryParam("contributor") Set contributor, + @QueryParam("group") Set group, + @QueryParam("exactdate") String exactdate, + @QueryParam("date") String date, + @QueryParam("dateend") String datend, + @QueryParam("offset") int offset, + @QueryParam("limit") int limit, + @QueryParam("count") String count, + @QueryParam("debug") String debug, + @QueryParam("id") Set id + ) { + ModelSearchResult result = new ModelSearchResult(); + result = search(taxa, gene_product_class_uris, terms, expand, pmids, title, state, contributor, group, exactdate, date, datend, offset, limit, count, debug, id); + return result; + } + + //examples + //http://127.0.0.1:6800/search/? + //?gp=http://identifiers.org/uniprot/P15822-3 + //?term=http://purl.obolibrary.org/obo/GO_0003677 + // + // + //?gp=http://identifiers.org/mgi/MGI:1328355 + //&gp=http://identifiers.org/mgi/MGI:87986 + //&term=http://purl.obolibrary.org/obo/GO_0030968 + //&title=mouse + //&pmid=PMID:19911006 + //&state=development&state=review {development, production, closed, review, delete} or operator + //&count + //127.0.0.1:6800/search/?contributor=http://orcid.org/0000-0002-1706-4196 + public ModelSearchResult search(Set taxa, + Set gene_product_ids, Set terms, String expand, Set pmids, + String title_search, Set state_search, Set contributor_search, Set group_search, + String exactdate, String date_search, String datend, + int offset, int limit, String count, String debug, Set id) { + ModelSearchResult r = new ModelSearchResult(); + Set go_type_ids = new HashSet(); + Set gene_type_ids = new HashSet(); + if (gene_product_ids != null) { + gene_type_ids.addAll(gene_product_ids); + } + if (terms != null) { + go_type_ids.addAll(terms); + } + CurieHandler curie_handler = m3.getCuriHandler(); + Set go_type_uris = new HashSet(); + Set gene_type_uris = new HashSet(); + for (String curi : go_type_ids) { + if (curi.startsWith("http")) { + go_type_uris.add(curi); + } else { + try { + IRI iri = curie_handler.getIRI(curi); + if (iri != null) { + go_type_uris.add(iri.toString()); + } + } catch (UnknownIdentifierException e) { + r.error += e.getMessage() + " \n "; + e.printStackTrace(); + return r; + } + } + } + for (String curi : gene_type_ids) { + if (curi.startsWith("http")) { + gene_type_uris.add(curi); + } else { + try { + IRI iri = curie_handler.getIRI(curi); + if (iri != null) { + gene_type_uris.add(iri.toString()); + } + } catch (UnknownIdentifierException e) { + r.error += e.getMessage() + " \n "; + e.printStackTrace(); + return r; + } + } + } + Map id_model = new LinkedHashMap(); + String sparql = ""; + try { + sparql = IOUtils.toString(ModelSearchHandler.class.getResourceAsStream("/ModelSearchQueryTemplate.rq"), StandardCharsets.UTF_8); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + Map ind_return = new HashMap(); + String ind_return_list = ""; // + String types = ""; // + int n = 0; + for (String type_uri : gene_type_uris) { + n++; + ind_return.put("?ind" + n, type_uri); + ind_return_list = ind_return_list + " (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; + types = types + "?ind" + n + " rdf:type <" + type_uri + "> . \n"; + } + if (expand != null) { + for (String go_type_uri : go_type_uris) { + n++; + ind_return.put("?ind" + n, go_type_uri); + ind_return_list = ind_return_list + " (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; + String expansion = "VALUES ?term" + n + " { "; + try { + Set subclasses = go_lego.getAllSubClasses(go_type_uri); + for (String sub : subclasses) { + expansion += "<" + sub + "> \n"; + } + expansion += "} . \n"; + types = types + " " + expansion + " ?ind" + n + " rdf:type ?term" + n + " . \n"; + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + } else { + for (String go_type_uri : go_type_uris) { + n++; + ind_return.put("?ind" + n, go_type_uri); + ind_return_list = ind_return_list + " (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; + types = types + "?ind" + n + " rdf:type <" + go_type_uri + "> . \n"; + } + } + String id_constraint = ""; + if (id != null && id.size() > 0) { + String id_list = ""; + for (String mid : id) { + if (!mid.contains("http")) { + String[] curie = mid.split(":"); + if (curie != null && curie.length == 2) { + mid = "http://model.geneontology.org/" + curie[1]; + } + //TODO figure this out and add it to standard curie collection + // try { + // IRI iri = curie_handler.getIRI(id); + // id = iri.toString(); + // } catch (UnknownIdentifierException e) { + // // TODO Auto-generated catch block + // e.printStackTrace(); + // } + } + id_list += "<" + mid + "> "; + } + id_constraint = " values ?id { " + id_list + " } "; + } + String pmid_constraints = ""; // + if (pmids != null) { + for (String pmid : pmids) { + n++; + ind_return.put("?ind" + n, pmid); + ind_return_list = ind_return_list + " (GROUP_CONCAT(?ind" + n + " ; separator=\" \") AS ?inds" + n + ")"; + pmid_constraints = pmid_constraints + "?ind" + n + " ?pmid FILTER (?pmid=\"" + pmid + "\"^^xsd:string) .\n"; + } + } + String taxa_constraint = ""; + if (taxa != null && !taxa.isEmpty()) { + for (String taxon : taxa) { + if (taxon.startsWith("NCBITaxon:")) { + taxon = taxon.replace(":", "_"); + taxon = "http://purl.obolibrary.org/obo/" + taxon; + } else if (!taxon.startsWith("http://purl.obolibrary.org/obo/NCBITaxon_")) { + taxon = "http://purl.obolibrary.org/obo/NCBITaxon_" + taxon; + } + taxa_constraint += "?id <" + BlazegraphOntologyManager.in_taxon_uri + "> <" + taxon + "> . \n"; + } + } + + + // if(taxa!=null&&!taxa.isEmpty()) { + // String model_filter = " VALUES ?id { \n"; + // for(String taxon : taxa) { + // if(taxon.startsWith("NCBITaxon:")) { + // taxon = taxon.replace(":", "_"); + // taxon = "http://purl.obolibrary.org/obo/"+taxon; + // } + // else if(!taxon.startsWith("http://purl.obolibrary.org/obo/NCBITaxon_")) { + // taxon = "http://purl.obolibrary.org/obo/NCBITaxon_"+taxon; + // } + // Set models = taxon_models.get(taxon); + // if(models!=null) { + // for(String model : models) { + // model_filter+="<"+model+"> \n"; + // } + // } + // } + // model_filter += "} . \n"; + // taxa_constraint = model_filter; + // } + String title_search_constraint = ""; + if (title_search != null) { + title_search_constraint = "?title \"" + title_search + "\" .\n"; + if (!title_search.contains("*")) { + title_search_constraint += " ?title \"" + "true" + "\" . \n"; + } + // if(exact_match) { + // title_search_constraint+=" ?title \""+"true"+"\" . \n"; + // } + } + String state_search_constraint = ""; + if (state_search != null && state_search.size() > 0) { + String allowed_states = ""; + int c = 0; + for (String s : state_search) { + c++; + allowed_states += "\"" + s + "\""; + if (c < state_search.size()) { + allowed_states += ","; + } + } + // FILTER (?state IN ("production", , "development", "review", "closed", "delete" )) + state_search_constraint = "FILTER (?state IN (" + allowed_states + ")) . \n"; + } + String contributor_search_constraint = ""; + if (contributor_search != null && contributor_search.size() > 0) { + String allowed_contributors = ""; + int c = 0; + for (String contributor : contributor_search) { + c++; + allowed_contributors += "\"" + contributor + "\""; + if (c < contributor_search.size()) { + allowed_contributors += ","; + } + } + contributor_search_constraint = + " ?id ?test_contributor . \n" + + " FILTER (?test_contributor IN (" + allowed_contributors + ")) . \n"; + } + String group_search_constraint = ""; + if (group_search != null && group_search.size() > 0) { + String allowed_group = ""; + int c = 0; + for (String group : group_search) { + c++; + allowed_group += "\"" + group + "\""; + if (c < group_search.size()) { + allowed_group += ","; + } + } + group_search_constraint = " ?id ?test_group . \n" + + "FILTER (?test_group IN (" + allowed_group + ")) . \n"; + } + String date_constraint = ""; + if (exactdate != null && exactdate.length() == 10) { + date_constraint = "FILTER (?date = '" + exactdate + "') \n"; + } else if (date_search != null && date_search.length() == 10) { + //e.g. 2019-06-26 + date_constraint = "FILTER (?date > '" + date_search + "') \n"; + if (datend != null && datend.length() == 10) { + date_constraint = "FILTER (?date > '" + date_search + "' && ?date < '" + datend + "') \n"; + } + } + String offset_constraint = ""; + if (offset != 0) { + offset_constraint = "OFFSET " + offset + "\n"; + } + String limit_constraint = ""; + if (limit != 0) { + limit_constraint = "LIMIT " + limit + "\n"; + } + if (offset == 0 && limit == 0) { + limit_constraint = "LIMIT 1000\n"; + } + //default group by + String group_by_constraint = "GROUP BY ?id"; + //default return block + //TODO investigate need to add DISTINCT to GROUP_CONCAT here + String return_block = "?id (MIN(?date) AS ?mindate) (MIN(?title) AS ?mintitle) (MIN(?state) AS ?minstate) (GROUP_CONCAT(DISTINCT ?contributor;separator=\";\") AS ?contributors) (GROUP_CONCAT(DISTINCT ?group;separator=\";\") AS ?groups)"; + if (count != null) { + return_block = "(count(distinct ?id) as ?count)"; + limit_constraint = ""; + offset_constraint = ""; + group_by_constraint = ""; + } + sparql = sparql.replaceAll("", return_block); + sparql = sparql.replaceAll("", id_constraint); + sparql = sparql.replaceAll("", group_by_constraint); + sparql = sparql.replaceAll("", ind_return_list); + sparql = sparql.replaceAll("", types); + sparql = sparql.replaceAll("", pmid_constraints); + sparql = sparql.replaceAll("", title_search_constraint); + sparql = sparql.replaceAll("", state_search_constraint); + sparql = sparql.replaceAll("", contributor_search_constraint); + sparql = sparql.replaceAll("", group_search_constraint); + sparql = sparql.replaceAll("", date_constraint); + sparql = sparql.replaceAll("", limit_constraint); + sparql = sparql.replaceAll("", offset_constraint); + sparql = sparql.replaceAll("", taxa_constraint); + if (debug != null) { + r.sparql = sparql; + } else { + r.sparql = "add 'debug' parameter to see sparql request"; + } + TupleQueryResult result; + try { + result = (TupleQueryResult) m3.executeSPARQLQuery(sparql, 1000); + } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { + if (e instanceof MalformedQueryException) { + r.message = "Malformed Query"; + } else if (e instanceof QueryEvaluationException) { + r.message = "Query Evaluation Problem - probably a time out"; + } else if (e instanceof RepositoryException) { + r.message = "Repository Exception"; + } + r.error = e.getMessage(); + e.printStackTrace(); + return r; + } + String n_count = null; + try { + while (result.hasNext()) { + BindingSet bs = result.next(); + if (count != null) { + n_count = bs.getBinding("count").getValue().stringValue(); + } else { + //model meta + String model_iri_string = bs.getBinding("id").getValue().stringValue(); + IRI model_iri = IRI.create(model_iri_string); + String model_curie = null; + try { + model_curie = curie_handler.getCuri(IRI.create(model_iri_string)); + if (model_curie == null) { + model_curie = model_iri_string; + } + } catch (Exception e) { + r.error += e.getMessage() + " \n "; + e.printStackTrace(); + return r; + } + String date = bs.getBinding("mindate").getValue().stringValue(); + String title = bs.getBinding("mintitle").getValue().stringValue(); + String contribs = bs.getBinding("contributors").getValue().stringValue(); + //optional values (some are empty) + Binding state_binding = bs.getBinding("minstate"); + String state = ""; + if (state_binding != null) { + state = state_binding.getValue().stringValue(); + } + Binding group_binding = bs.getBinding("groups"); + String groups_ = ""; + if (group_binding != null) { + groups_ = group_binding.getValue().stringValue(); + } + Set contributors = new HashSet(Arrays.asList(contribs.split(";"))); + Set groups = new HashSet(); + if (groups_ != null) { + groups.addAll(Arrays.asList(groups_.split(";"))); + } + ModelMeta mm = id_model.get(model_curie); + if (mm == null) { + //look up model in in-memory cache to check edit state + boolean is_modified = m3.isModelModified(model_iri); + mm = new ModelMeta(model_curie, date, title, state, contributors, groups, is_modified); + } + //matching + for (String ind : ind_return.keySet()) { + String bindingName = ind.replace("?ind", "inds"); + String[] ind_class_matches = bs.getBinding(bindingName).getValue().stringValue().split(" ", -1); + for (String ind_class_match : ind_class_matches) { + Set matching_inds = mm.query_match.get(ind_return.get(ind)); + if (matching_inds == null) { + matching_inds = new HashSet(); + } + matching_inds.add(ind_class_match); + mm.query_match.put(ind_return.get(ind), matching_inds); + } + } + id_model.put(model_curie, mm); + } + } + } catch (QueryEvaluationException e) { + r.message = "Query Evaluation Problem - probably a time out"; + r.error = e.getMessage(); + e.printStackTrace(); + return r; + } + if (n_count != null) { + r.n = Integer.parseInt(n_count); + } else { + r.n = id_model.size(); + r.models = new LinkedHashSet(id_model.values()); + } + try { + result.close(); + } catch (QueryEvaluationException e) { + r.message = "Query Evaluation Problem - can't close result set"; + r.error = e.getMessage(); + e.printStackTrace(); + return r; + } + //test + //http://127.0.0.1:6800/modelsearch/?query=bla + return r; + } + + + @POST + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public ModelSearchResult searchPostForm( + @FormParam("taxon") Set taxa, + @FormParam("gp") Set gene_product_class_uris, + @FormParam("term") Set terms, + @FormParam("expand") String expand, + @FormParam("pmid") Set pmids, + @FormParam("title") String title, + @FormParam("state") Set state, + @FormParam("contributor") Set contributor, + @FormParam("group") Set group, + @FormParam("exactdate") String exactdate, + @FormParam("date") String date, + @FormParam("dateend") String datend, + @FormParam("offset") int offset, + @FormParam("limit") int limit, + @FormParam("count") String count, + @FormParam("debug") String debug, + @FormParam("debug") Set id) { + ModelSearchResult result = new ModelSearchResult(); + result = search(taxa, gene_product_class_uris, terms, expand, pmids, title, state, contributor, group, exactdate, date, datend, offset, limit, count, debug, id); + return result; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java index d318faa6..da68aee2 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsImpl.java @@ -22,7 +22,6 @@ import org.geneontology.minerva.server.handler.M3BatchHandler.Operation; import org.geneontology.minerva.server.handler.OperationsTools.MissingParameterException; import org.geneontology.minerva.server.validation.BeforeSaveModelValidator; -import org.geneontology.owl.differ.Differ; import org.geneontology.rules.engine.WorkingMemory; import org.obolibrary.robot.DiffOperation; import org.obolibrary.robot.IOHelper; @@ -52,758 +51,745 @@ */ abstract class OperationsImpl extends ModelCreator { - final Set importantRelations; - final BeforeSaveModelValidator beforeSaveValidator; - private final OWLAnnotationProperty contributor = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/contributor")); - - private static final Logger LOG = Logger.getLogger(OperationsImpl.class); - public static final int SPARQL_QUERY_TIMEOUT = 20; - - OperationsImpl(UndoAwareMolecularModelManager models, - Set importantRelations, - String defaultModelState) { - super(models, defaultModelState); - this.importantRelations = importantRelations; - this.beforeSaveValidator = new BeforeSaveModelValidator(); - } - - abstract boolean checkLiteralIdentifiers(); - - abstract boolean validateBeforeSave(); - - static class BatchHandlerValues implements VariableResolver { - - final Set relevantIndividuals = new HashSet<>(); - boolean renderBulk = false; - boolean nonMeta = false; - ModelContainer model = null; - Map individualVariable = new HashMap<>(); - String diffResult = null; - - @Override - public boolean notVariable(String id) { - return individualVariable.containsKey(id) == false; - } - - @Override - public OWLNamedIndividual getVariableValue(String id) throws UnknownIdentifierException { - if (individualVariable.containsKey(id)) { - OWLNamedIndividual individual = individualVariable.get(id); - if (individual == null) { - throw new UnknownIdentifierException("Variable "+id+" has a null value."); - } - return individual; - } - return null; - } - - public void addVariableValue(String id, OWLNamedIndividual i) throws UnknownIdentifierException { - if (id != null) { - individualVariable.put(id, i); - } - } - } - - - private OWLNamedIndividual getIndividual(String id, BatchHandlerValues values) throws UnknownIdentifierException { - if (values.notVariable(id)) { - IRI iri = curieHandler.getIRI(id); - OWLNamedIndividual i = m3.getIndividual(iri, values.model); - if (i == null) { - throw new UnknownIdentifierException("No individual found for id: '"+id+"' and IRI: "+iri+" in model: "+values.model.getModelId()); - } - return i; - } - return values.getVariableValue(id); - } - - /** - * Handle the request for an operation regarding an individual. - * - * @param request - * @param operation - * @param userId - * @param token - * @param values - * @return error or null - * @throws Exception - */ - String handleRequestForIndividual(M3Request request, Operation operation, String userId, Set providerGroups, UndoMetadata token, BatchHandlerValues values) throws Exception { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - - // get info, no modification - if (Operation.get == operation) { - requireNotNull(request.arguments.individual, "request.arguments.individual"); - OWLNamedIndividual i = getIndividual(request.arguments.individual, values); - values.relevantIndividuals.add(i); - } - // create individual (look-up variable first) and add type - else if (Operation.add == operation) { - // required: expression - // optional: more expressions, values - requireNotNull(request.arguments.expressions, "request.arguments.expressions"); - Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); - Map> dataProperties = extractDataProperties(request.arguments.values, values.model); - OWLNamedIndividual individual; - List clsExpressions = new ArrayList(request.arguments.expressions.length); - for(JsonOwlObject expression : request.arguments.expressions) { - OWLClassExpression cls = parseM3Expression(expression, values); - clsExpressions.add(cls); - } - if (values.notVariable(request.arguments.individual)) { - // create indivdual - if (request.arguments.individualIRI != null) { - IRI iri = curieHandler.getIRI(request.arguments.individualIRI); - individual = m3.createIndividualNonReasoning(values.model, iri, annotations, token); - } - else { - individual = m3.createIndividualNonReasoning(values.model, annotations, token); - } - - // add to render list and set variable - values.relevantIndividuals.add(individual); - values.addVariableValue(request.arguments.assignToVariable, individual); - } - else { - individual = values.getVariableValue(request.arguments.individual); - } - if (individual != null) { - // add types - for (OWLClassExpression clsExpression : clsExpressions) { - m3.addType(values.model, individual, clsExpression, token); - } - - if (dataProperties.isEmpty() == false) { - m3.addDataProperties(values.model, individual, dataProperties, token); - } - updateDate(values.model, individual, token, m3); - } - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - // remove individual (and all axioms using it) - else if (Operation.remove == operation){ - // required: modelId, individual - requireNotNull(request.arguments.individual, "request.arguments.individual"); - OWLNamedIndividual i = getIndividual(request.arguments.individual, values); - - DeleteInformation dInfo = m3.deleteIndividual(values.model, i, token); - handleRemovedAnnotationIRIs(dInfo.usedIRIs, values.model, token); - updateAnnotationsForDelete(dInfo, values.model, userId, providerGroups, token, m3); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - values.renderBulk = true; - } - // add type / named class assertion - else if (Operation.addType == operation){ - // required: individual, expressions - requireNotNull(request.arguments.individual, "request.arguments.individual"); - requireNotNull(request.arguments.expressions, "request.arguments.expressions"); - - Set annotations = createGeneratedAnnotations(values.model, userId, providerGroups); - OWLNamedIndividual i = getIndividual(request.arguments.individual, values); - - for(JsonOwlObject expression : request.arguments.expressions) { - OWLClassExpression cls = parseM3Expression(expression, values); - m3.addType(values.model, i, cls, token); - values.relevantIndividuals.add(i); - values.addVariableValue(request.arguments.assignToVariable, i); - m3.addAnnotations(values.model, i, annotations, token); - } - updateDate(values.model, i, token, m3); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - // remove type / named class assertion - else if (Operation.removeType == operation){ - // required: individual, expressions - requireNotNull(request.arguments.individual, "request.arguments.individual"); - requireNotNull(request.arguments.expressions, "request.arguments.expressions"); - - Set annotations = createGeneratedAnnotations(values.model, userId, providerGroups); - OWLNamedIndividual i = getIndividual(request.arguments.individual, values); - - for(JsonOwlObject expression : request.arguments.expressions) { - OWLClassExpression cls = parseM3Expression(expression, values); - m3.removeType(values.model, i, cls, token); - values.relevantIndividuals.add(i); - values.addVariableValue(request.arguments.assignToVariable, i); - m3.addAnnotations(values.model, i, annotations, token); - } - updateDate(values.model, i, token, m3); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - // add annotation - else if (Operation.addAnnotation == operation){ - // required: individual, values - requireNotNull(request.arguments.individual, "request.arguments.individual"); - requireNotNull(request.arguments.values, "request.arguments.values"); - - Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); - Map> dataProperties = extractDataProperties(request.arguments.values, values.model); - OWLNamedIndividual i = getIndividual(request.arguments.individual, values); - - values.relevantIndividuals.add(i); - if (annotations.isEmpty() == false) { - m3.addAnnotations(values.model, i, annotations, token); - } - if (dataProperties.isEmpty() == false) { - m3.addDataProperties(values.model, i, dataProperties, token); - } - values.addVariableValue(request.arguments.assignToVariable, i); - updateDate(values.model, i, token, m3); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - // remove annotation - else if (Operation.removeAnnotation == operation){ - // required: individual, values - requireNotNull(request.arguments.individual, "request.arguments.individual"); - requireNotNull(request.arguments.values, "request.arguments.values"); - - Set annotations = extract(request.arguments.values, null, Collections.emptySet(), values, values.model); - Map> dataProperties = extractDataProperties(request.arguments.values, values.model); - OWLNamedIndividual i = getIndividual(request.arguments.individual, values); - - Set evidenceIRIs = MolecularModelManager.extractEvidenceIRIValues(annotations); - - values.relevantIndividuals.add(i); - if (annotations.isEmpty() == false) { - m3.removeAnnotations(values.model, i, annotations, token); - - } - if (dataProperties.isEmpty() == false) { - m3.removeDataProperties(values.model, i, dataProperties, token); - } - values.addVariableValue(request.arguments.assignToVariable, i); - - handleRemovedAnnotationIRIs(evidenceIRIs, values.model, token); - updateDate(values.model, i, token, m3); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - else { - return "Unknown operation: "+operation; - } - return null; - } - - private void handleRemovedAnnotationIRIs(Set evidenceIRIs, ModelContainer model, UndoMetadata token) { - if (evidenceIRIs != null) { - for (IRI evidenceIRI : evidenceIRIs) { - OWLNamedIndividual i = m3.getIndividual(evidenceIRI, model); - if (i != null) { - m3.deleteIndividual(model, i, token); - } - // ignoring undefined IRIs - } - } - } - - //TODO likely dead code here. - private OWLClassExpression parseM3Expression(JsonOwlObject expression, BatchHandlerValues values) - throws MissingParameterException, UnknownIdentifierException, OWLException { - M3ExpressionParser p = new M3ExpressionParser(checkLiteralIdentifiers(), curieHandler); - return p.parse(values.model, expression, null); - } - - private OWLObjectProperty getProperty(String id, BatchHandlerValues values) throws UnknownIdentifierException { - OWLObjectProperty p = m3.getObjectProperty(id, values.model); - if (p == null) { - throw new UnknownIdentifierException("Could not find a property for id: "+id); - } - return p; - } - - /** - * Handle the request for an operation regarding an edge. - * - * @param request - * @param operation - * @param userId - * @param token - * @param values - * @return error or null - * @throws Exception - */ - String handleRequestForEdge(M3Request request, Operation operation, String userId, Set providerGroups, UndoMetadata token, BatchHandlerValues values) throws Exception { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - // required: subject, predicate, object - requireNotNull(request.arguments.subject, "request.arguments.subject"); - requireNotNull(request.arguments.predicate, "request.arguments.predicate"); - requireNotNull(request.arguments.object, "request.arguments.object"); - // check for variables - final OWLNamedIndividual s = getIndividual(request.arguments.subject, values); - final OWLNamedIndividual o = getIndividual(request.arguments.object, values); - final OWLObjectProperty p = getProperty(request.arguments.predicate, values); - values.relevantIndividuals.addAll(Arrays.asList(s, o)); - - // add edge - if (Operation.add == operation){ - // optional: values - Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); - addDateAnnotation(annotations, values.model.getOWLDataFactory()); - m3.addFact(values.model, p, s, o, annotations, token); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - // remove edge - else if (Operation.remove == operation){ - Set removedIRIs = m3.removeFact(values.model, p, s, o, token); - if (removedIRIs != null && removedIRIs.isEmpty() == false) { - // only render bulk, iff there were additional deletes (i.e. evidence removal) - values.renderBulk = true; - handleRemovedAnnotationIRIs(removedIRIs, values.model, token); - } - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - // add annotation - else if (Operation.addAnnotation == operation){ - requireNotNull(request.arguments.values, "request.arguments.values"); - - m3.addAnnotations(values.model, p, s, o, - extract(request.arguments.values, userId, providerGroups, values, values.model), token); - updateDate(values.model, p, s, o, token, m3); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - // remove annotation - else if (Operation.removeAnnotation == operation){ - requireNotNull(request.arguments.values, "request.arguments.values"); - - Set annotations = extract(request.arguments.values, null, Collections.emptySet(), values, values.model); - Set evidenceIRIs = MolecularModelManager.extractEvidenceIRIValues(annotations); - m3.removeAnnotations(values.model, p, s, o, annotations, token); - handleRemovedAnnotationIRIs(evidenceIRIs, values.model, token); - updateDate(values.model, p, s, o, token, m3); - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - else { - return "Unknown operation: "+operation; - } - return null; - } - - /** - * Handle the request for an operation regarding a model. - * - * @param request - * @param response - * @param operation - * @param userId - * @param token - * @param values - * @return error or null - * @throws Exception - */ - String handleRequestForModel(M3Request request, M3BatchResponse response, Operation operation, String userId, Set providerGroups, UndoMetadata token, BatchHandlerValues values) throws Exception { - // get model - if (Operation.get == operation){ - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - values.renderBulk = true; - } - else if (Operation.updateImports == operation){ - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - values.renderBulk = true; - } - // add an empty model - else if (Operation.add == operation) { - values.nonMeta = true; - values.renderBulk = true; - - if (request.arguments != null) { - values.model = createModel(userId, providerGroups, token, values, request.arguments.values); - } - else { - values.model = createModel(userId, providerGroups, token, values, null); - } - } - else if (Operation.addAnnotation == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - requireNotNull(request.arguments.values, "request.arguments.values"); - values.model = checkModelId(values.model, request); - Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); - if (annotations != null) { - m3.addModelAnnotations(values.model, annotations, token); - } - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - } - else if (Operation.removeAnnotation == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - requireNotNull(request.arguments.values, "request.arguments.values"); - values.model = checkModelId(values.model, request); - Set annotations = extract(request.arguments.values, null, Collections.emptySet(), values, values.model); - if (annotations != null) { - m3.removeAnnotations(values.model, annotations, token); - } - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - values.renderBulk = true; - } - else if (Operation.exportModel == operation) { - if (values.nonMeta) { - // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal - return "Export model can only be combined with other meta operations."; - } - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - export(response, values.model, userId, providerGroups); - } - else if (Operation.exportModelLegacy == operation) { - if (values.nonMeta) { - // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal - return "Export legacy model can only be combined with other meta operations."; - } - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - exportLegacy(response, values.model, request.arguments.format, userId); - } - else if (Operation.importModel == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - requireNotNull(request.arguments.importModel, "request.arguments.importModel"); - //this is documented as not working... - values.model = m3.importModel(request.arguments.importModel); - - Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); - if (annotations != null) { - m3.addModelAnnotations(values.model, annotations, token); - } - updateModelAnnotations(values.model, userId, providerGroups, token, m3); - values.renderBulk = true; - } - else if (Operation.storeModel == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); - if (validateBeforeSave()) { - List issues = beforeSaveValidator.validateBeforeSave(values.model); - if (issues != null && !issues.isEmpty()) { - StringBuilder commentary = new StringBuilder(); - for (Iterator it = issues.iterator(); it.hasNext();) { - String issue = it.next(); - commentary.append(issue); - if (it.hasNext()) { - commentary.append('\n'); - } - } - response.commentary = commentary.toString(); - return "Save model failed: validation error(s) before save"; - } - } - m3.saveModel(values.model, annotations, token); - values.renderBulk = true; - } - else if (Operation.resetModel == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - //drop in memory model and reload - IRI model_iri = values.model.getModelId(); - boolean drop_cached = true; - //load will reload from db if override - m3.loadModel(model_iri, drop_cached); - //ensure the change queue is gone to avoid downstream confusion. - m3.clearUndoHistory(model_iri); - //reset model values - values.model = checkModelId(null, request); - values.renderBulk = true; - }else if (Operation.diffModel == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - //this won't change - values.model = checkModelId(values.model, request); - IRI model_iri = values.model.getModelId(); - //run diff - OWLOntologyManager man1 = OWLManager.createOWLOntologyManager(); - //do we have an ontology in the datastore with that id? - OWLOntology stored_ontology = null; - if(m3.getStoredModelIds().contains(model_iri)) { - stored_ontology = m3.loadModelABox(model_iri); - }else { - //could error out here, but maybe this is more useful - stored_ontology = man1.createOntology(); - } - OWLOntology active_ontology = man1.copyOntology(values.model.getAboxOntology(), OntologyCopy.DEEP); - - //TODO refine representation of diff result.. - StringWriter writer = new StringWriter(); - // boolean actual = DiffOperation.compare(active_ontology, stored_ontology, writer); - Map options = new HashMap<>(); - options.put("labels", "true"); - options.put("format", "pretty"); //plain, pretty, html, markdown - DiffOperation.compare(stored_ontology, active_ontology, new IOHelper(), writer, options); - values.diffResult = writer.toString(); - writer.close(); - values.renderBulk = true; - } - else if (Operation.undo == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - m3.undo(values.model, userId); - values.renderBulk = true; - } - else if (Operation.redo == operation) { - values.nonMeta = true; - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - m3.redo(values.model, userId); - values.renderBulk = true; - } - else if (Operation.getUndoRedo == operation) { - if (values.nonMeta) { - // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal - return operation+" cannot be combined with other operations."; - } - requireNotNull(request.arguments, "request.arguments"); - values.model = checkModelId(values.model, request); - getCurrentUndoRedoForModel(response, values.model.getModelId(), userId); - } - else { - return "Unknown operation: "+operation; - } - return null; - } - - void handleSPARQLRequest(M3Request request, M3BatchResponse response) throws IOException, OWLException, QueryEvaluationException, RDFHandlerException, TupleQueryResultHandlerException, RepositoryException, MalformedQueryException { - String query = request.arguments.query; - if (query != null) { - QueryResult result = m3.executeSPARQLQuery(query, SPARQL_QUERY_TIMEOUT); - - final JsonObject jsonObject; - if (result instanceof GraphQueryResult) { - //RDFWriter writer = new RDFJSONWriterFactory().getWriter(stream); - ByteArrayOutputStream stream = new ByteArrayOutputStream(); - RDFWriter writer = new SesameJSONLDWriterFactory().getWriter(stream); - writer.handleNamespace("gomodel", "http://model.geneontology.org/"); - Rio.write(QueryResults.asModel((GraphQueryResult)result), writer); - String json = stream.toString("UTF-8"); - stream.close(); - jsonObject = new Gson().fromJson(json, JsonObject.class); - //QueryResultIO.write((GraphQueryResult) result, RDFFormat.RDFJSON, stream); - } else if (result instanceof TupleQueryResult) { - jsonObject = new SPARQLResultJSONRenderer(curieHandler).renderResults((TupleQueryResult) result); - //QueryResultIO.write((TupleQueryResult) result, TupleQueryResultFormat.JSON, stream); - } else { - throw new UnsupportedOperationException("Query type not supported."); - } - initMetaResponse(response); - response.data.sparqlResult = jsonObject; - } - } - - private void getCurrentUndoRedoForModel(M3BatchResponse response, IRI modelId, String userId) { - Pair,List> undoRedoEvents = m3.getUndoRedoEvents(modelId); - initMetaResponse(response); - List> undos = new ArrayList>(); - List> redos = new ArrayList>(); - final long currentTime = System.currentTimeMillis(); - for(ChangeEvent undo : undoRedoEvents.getLeft()) { - Map data = new HashMap(3); - data.put("user-id", undo.getUserId()); - data.put("time", Long.valueOf(currentTime-undo.getTime())); - // TODO add a summary of the change? axiom count? - undos.add(data); - } - for(ChangeEvent redo : undoRedoEvents.getRight()) { - Map data = new HashMap(3); - data.put("user-id", redo.getUserId()); - data.put("time", Long.valueOf(currentTime-redo.getTime())); - // TODO add a summary of the change? axiom count? - redos.add(data); - } - response.data.undo = undos; - response.data.redo = redos; - } - - private void initMetaResponse(M3BatchResponse response) { - if (response.data == null) { - response.data = new ResponseData(); - response.messageType = M3BatchResponse.MESSAGE_TYPE_SUCCESS; - response.message = "success: 0"; - response.signal = M3BatchResponse.SIGNAL_META; - } - } - - /** - * Handle the request for the meta properties. - * - * @param response - * @param userId - * @throws IOException - * @throws OWLException - */ - void getMeta(M3BatchResponse response, String userId, Set providerGroups) throws IOException, OWLException { - // init - initMetaResponse(response); - if (response.data.meta == null) { - response.data.meta = new MetaResponse(); - } - - // relations - Pair, List> propPair = MolecularModelJsonRenderer.renderProperties(m3, importantRelations, curieHandler); - final List relList = propPair.getLeft(); - if (relList != null) { - response.data.meta.relations = relList.toArray(new JsonRelationInfo[relList.size()]); - } - - // data properties - final List propList = propPair.getRight(); - if (propList != null) { - response.data.meta.dataProperties = propList.toArray(new JsonRelationInfo[propList.size()]); - } - - // evidence - final List evidencesList = MolecularModelJsonRenderer.renderEvidences(m3, curieHandler); - if (evidencesList != null) { - response.data.meta.evidence = evidencesList.toArray(new JsonEvidenceInfo[evidencesList.size()]); - } - - // model ids - // and model annotations - final Set allModelIds = m3.getAvailableModelIds(); - final Map> allModelAnnotations = new HashMap<>(); - final Map> allModelAnnotationsReadOnly = new HashMap<>(); - final Map> annotationsForAllModels = m3.getAllModelAnnotations(); - for (IRI modelId : allModelIds) { - String curie = curieHandler.getCuri(modelId); - List modelAnnotations = new ArrayList<>(); - allModelAnnotations.put(curie, modelAnnotations); - // Iterate through the model's a. - Set annotations = annotationsForAllModels.get(modelId); - if (annotations != null) { - for (OWLAnnotation an : annotations) { - final String label; - if (an.getProperty().equals(contributor)) { - final IRI iri; - if (an.getValue() instanceof IRI) { - iri = an.getValue().asIRI().get(); - } else if (an.getValue() instanceof OWLLiteral) { - iri = IRI.create(an.getValue().asLiteral().get().getLiteral()); - } else { iri = null; } - if (iri != null) { label = m3.getTboxLabelIndex().getOrDefault(iri, null); } - else { label = null; } - } else { - label = null; - } - JsonAnnotation json = JsonTools.create(an.getProperty(), an.getValue(), label, curieHandler); - if (json != null) { - modelAnnotations.add(json); - } - } - } else { - LOG.error("No annotations found for model: " + modelId); - } - // handle read-only information, currently only the modification flag - // check modification status - boolean modified = m3.isModelModified(modelId); - Map readOnly = Collections.singletonMap("modified-p", Boolean.valueOf(modified)); - allModelAnnotationsReadOnly.put(curie, readOnly); - } - response.data.meta.modelsMeta = allModelAnnotations; - response.data.meta.modelsReadOnly = allModelAnnotationsReadOnly; - } - - void exportAllModels() throws OWLOntologyStorageException, OWLOntologyCreationException, IOException { - m3.dumpAllStoredModels(); - } - - private void export(M3BatchResponse response, ModelContainer model, String userId, Set providerGroups) throws OWLOntologyStorageException, UnknownIdentifierException { - String exportModel = m3.exportModel(model); - initMetaResponse(response); - response.data.exportModel = exportModel; - } - - private void exportLegacy(M3BatchResponse response, ModelContainer model, String format, String userId) throws IOException, OWLOntologyCreationException, UnknownIdentifierException { - if ("gpad".equals(format)) { - initMetaResponse(response); - try { - GPADSPARQLExport exporter = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getGolego_repo().regulatorsToRegulated); - WorkingMemory wm = m3.createCanonicalInferredModel(model.getModelId()); - response.data.exportModel = exporter.exportGPAD(wm, model.getModelId()); - // response.data.exportModel = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getDoNotAnnotateSubset()).exportGPAD( - // m3.createCanonicalInferredModel(model.getModelId())); - } catch (InconsistentOntologyException e) { - response.messageType = MinervaResponse.MESSAGE_TYPE_ERROR; - response.message = "The model is inconsistent; a GPAD cannot be created."; - } - } else if ("explanations".equals(format)) { - initMetaResponse(response); - response.data.exportModel = ExportExplanation.exportExplanation(m3.createInferredModel(model.getModelId()), m3.getGolego_repo(), m3.getLegacyRelationShorthandIndex()); - } else { - // final GafExportTool exportTool = GafExportTool.getInstance(); - // if (format == null) { - // format = "gaf"; // set a default format, if necessary - // } - // Map allExported = exportTool.exportModelLegacy(model, curieHandler, externalLookupService, Collections.singleton(format)); - // String exported = allExported.get(format); - // if (exported == null) { - throw new IOException("Unknown export format: "+format); - // } - // initMetaResponse(response); - // response.data.exportModel = exported; - } - } - - - /** - * @param model - * @param request - * @return modelId - * @throws MissingParameterException - * @throws MultipleModelIdsParameterException - * @throws UnknownIdentifierException - */ - public ModelContainer checkModelId(ModelContainer model, M3Request request) - throws MissingParameterException, MultipleModelIdsParameterException, UnknownIdentifierException { - - if (model == null) { - final String currentModelId = request.arguments.modelId; - requireNotNull(currentModelId, "request.arguments.modelId"); - model = m3.checkModelId(curieHandler.getIRI(currentModelId)); - } - else { - final String currentModelId = request.arguments.modelId; - if (currentModelId != null) { - IRI modelId = curieHandler.getIRI(currentModelId); - if (model.getModelId().equals(modelId) == false) { - throw new MultipleModelIdsParameterException("Using multiple modelIds in one batch call is not supported."); - } - } - } - return model; - } - - private void updateAnnotationsForDelete(DeleteInformation info, ModelContainer model, String userId, Set providerGroups, UndoMetadata token, UndoAwareMolecularModelManager m3) throws UnknownIdentifierException { - final OWLDataFactory f = model.getOWLDataFactory(); - final OWLAnnotation annotation = createDateAnnotation(f); - final Set generated = new HashSet(); - addGeneratedAnnotations(userId, providerGroups, generated, f); - for(IRI subject : info.touched) { - m3.updateAnnotation(model, subject, annotation, token); - m3.addAnnotations(model, subject, generated, token); - } - if (info.updated.isEmpty() == false) { - Set newAxioms = - m3.updateAnnotation(model, info.updated, annotation, token); - m3.addAnnotations(model, newAxioms, generated, token); - } - } - - static class MultipleModelIdsParameterException extends Exception { - - private static final long serialVersionUID = 4362299465121954598L; - - /** - * @param message - */ - MultipleModelIdsParameterException(String message) { - super(message); - } - - } + final Set importantRelations; + final BeforeSaveModelValidator beforeSaveValidator; + private final OWLAnnotationProperty contributor = OWLManager.getOWLDataFactory().getOWLAnnotationProperty(IRI.create("http://purl.org/dc/elements/1.1/contributor")); + + private static final Logger LOG = Logger.getLogger(OperationsImpl.class); + public static final int SPARQL_QUERY_TIMEOUT = 20; + + OperationsImpl(UndoAwareMolecularModelManager models, + Set importantRelations, + String defaultModelState) { + super(models, defaultModelState); + this.importantRelations = importantRelations; + this.beforeSaveValidator = new BeforeSaveModelValidator(); + } + + abstract boolean checkLiteralIdentifiers(); + + abstract boolean validateBeforeSave(); + + static class BatchHandlerValues implements VariableResolver { + + final Set relevantIndividuals = new HashSet<>(); + boolean renderBulk = false; + boolean nonMeta = false; + ModelContainer model = null; + Map individualVariable = new HashMap<>(); + String diffResult = null; + + @Override + public boolean notVariable(String id) { + return individualVariable.containsKey(id) == false; + } + + @Override + public OWLNamedIndividual getVariableValue(String id) throws UnknownIdentifierException { + if (individualVariable.containsKey(id)) { + OWLNamedIndividual individual = individualVariable.get(id); + if (individual == null) { + throw new UnknownIdentifierException("Variable " + id + " has a null value."); + } + return individual; + } + return null; + } + + public void addVariableValue(String id, OWLNamedIndividual i) throws UnknownIdentifierException { + if (id != null) { + individualVariable.put(id, i); + } + } + } + + + private OWLNamedIndividual getIndividual(String id, BatchHandlerValues values) throws UnknownIdentifierException { + if (values.notVariable(id)) { + IRI iri = curieHandler.getIRI(id); + OWLNamedIndividual i = m3.getIndividual(iri, values.model); + if (i == null) { + throw new UnknownIdentifierException("No individual found for id: '" + id + "' and IRI: " + iri + " in model: " + values.model.getModelId()); + } + return i; + } + return values.getVariableValue(id); + } + + /** + * Handle the request for an operation regarding an individual. + * + * @param request + * @param operation + * @param userId + * @param token + * @param values + * @return error or null + * @throws Exception + */ + String handleRequestForIndividual(M3Request request, Operation operation, String userId, Set providerGroups, UndoMetadata token, BatchHandlerValues values) throws Exception { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + + // get info, no modification + if (Operation.get == operation) { + requireNotNull(request.arguments.individual, "request.arguments.individual"); + OWLNamedIndividual i = getIndividual(request.arguments.individual, values); + values.relevantIndividuals.add(i); + } + // create individual (look-up variable first) and add type + else if (Operation.add == operation) { + // required: expression + // optional: more expressions, values + requireNotNull(request.arguments.expressions, "request.arguments.expressions"); + Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); + Map> dataProperties = extractDataProperties(request.arguments.values, values.model); + OWLNamedIndividual individual; + List clsExpressions = new ArrayList(request.arguments.expressions.length); + for (JsonOwlObject expression : request.arguments.expressions) { + OWLClassExpression cls = parseM3Expression(expression, values); + clsExpressions.add(cls); + } + if (values.notVariable(request.arguments.individual)) { + // create indivdual + if (request.arguments.individualIRI != null) { + IRI iri = curieHandler.getIRI(request.arguments.individualIRI); + individual = m3.createIndividualNonReasoning(values.model, iri, annotations, token); + } else { + individual = m3.createIndividualNonReasoning(values.model, annotations, token); + } + + // add to render list and set variable + values.relevantIndividuals.add(individual); + values.addVariableValue(request.arguments.assignToVariable, individual); + } else { + individual = values.getVariableValue(request.arguments.individual); + } + if (individual != null) { + // add types + for (OWLClassExpression clsExpression : clsExpressions) { + m3.addType(values.model, individual, clsExpression, token); + } + + if (dataProperties.isEmpty() == false) { + m3.addDataProperties(values.model, individual, dataProperties, token); + } + updateDate(values.model, individual, token, m3); + } + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } + // remove individual (and all axioms using it) + else if (Operation.remove == operation) { + // required: modelId, individual + requireNotNull(request.arguments.individual, "request.arguments.individual"); + OWLNamedIndividual i = getIndividual(request.arguments.individual, values); + + DeleteInformation dInfo = m3.deleteIndividual(values.model, i, token); + handleRemovedAnnotationIRIs(dInfo.usedIRIs, values.model, token); + updateAnnotationsForDelete(dInfo, values.model, userId, providerGroups, token, m3); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + values.renderBulk = true; + } + // add type / named class assertion + else if (Operation.addType == operation) { + // required: individual, expressions + requireNotNull(request.arguments.individual, "request.arguments.individual"); + requireNotNull(request.arguments.expressions, "request.arguments.expressions"); + + Set annotations = createGeneratedAnnotations(values.model, userId, providerGroups); + OWLNamedIndividual i = getIndividual(request.arguments.individual, values); + + for (JsonOwlObject expression : request.arguments.expressions) { + OWLClassExpression cls = parseM3Expression(expression, values); + m3.addType(values.model, i, cls, token); + values.relevantIndividuals.add(i); + values.addVariableValue(request.arguments.assignToVariable, i); + m3.addAnnotations(values.model, i, annotations, token); + } + updateDate(values.model, i, token, m3); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } + // remove type / named class assertion + else if (Operation.removeType == operation) { + // required: individual, expressions + requireNotNull(request.arguments.individual, "request.arguments.individual"); + requireNotNull(request.arguments.expressions, "request.arguments.expressions"); + + Set annotations = createGeneratedAnnotations(values.model, userId, providerGroups); + OWLNamedIndividual i = getIndividual(request.arguments.individual, values); + + for (JsonOwlObject expression : request.arguments.expressions) { + OWLClassExpression cls = parseM3Expression(expression, values); + m3.removeType(values.model, i, cls, token); + values.relevantIndividuals.add(i); + values.addVariableValue(request.arguments.assignToVariable, i); + m3.addAnnotations(values.model, i, annotations, token); + } + updateDate(values.model, i, token, m3); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } + // add annotation + else if (Operation.addAnnotation == operation) { + // required: individual, values + requireNotNull(request.arguments.individual, "request.arguments.individual"); + requireNotNull(request.arguments.values, "request.arguments.values"); + + Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); + Map> dataProperties = extractDataProperties(request.arguments.values, values.model); + OWLNamedIndividual i = getIndividual(request.arguments.individual, values); + + values.relevantIndividuals.add(i); + if (annotations.isEmpty() == false) { + m3.addAnnotations(values.model, i, annotations, token); + } + if (dataProperties.isEmpty() == false) { + m3.addDataProperties(values.model, i, dataProperties, token); + } + values.addVariableValue(request.arguments.assignToVariable, i); + updateDate(values.model, i, token, m3); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } + // remove annotation + else if (Operation.removeAnnotation == operation) { + // required: individual, values + requireNotNull(request.arguments.individual, "request.arguments.individual"); + requireNotNull(request.arguments.values, "request.arguments.values"); + + Set annotations = extract(request.arguments.values, null, Collections.emptySet(), values, values.model); + Map> dataProperties = extractDataProperties(request.arguments.values, values.model); + OWLNamedIndividual i = getIndividual(request.arguments.individual, values); + + Set evidenceIRIs = MolecularModelManager.extractEvidenceIRIValues(annotations); + + values.relevantIndividuals.add(i); + if (annotations.isEmpty() == false) { + m3.removeAnnotations(values.model, i, annotations, token); + + } + if (dataProperties.isEmpty() == false) { + m3.removeDataProperties(values.model, i, dataProperties, token); + } + values.addVariableValue(request.arguments.assignToVariable, i); + + handleRemovedAnnotationIRIs(evidenceIRIs, values.model, token); + updateDate(values.model, i, token, m3); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } else { + return "Unknown operation: " + operation; + } + return null; + } + + private void handleRemovedAnnotationIRIs(Set evidenceIRIs, ModelContainer model, UndoMetadata token) { + if (evidenceIRIs != null) { + for (IRI evidenceIRI : evidenceIRIs) { + OWLNamedIndividual i = m3.getIndividual(evidenceIRI, model); + if (i != null) { + m3.deleteIndividual(model, i, token); + } + // ignoring undefined IRIs + } + } + } + + //TODO likely dead code here. + private OWLClassExpression parseM3Expression(JsonOwlObject expression, BatchHandlerValues values) + throws MissingParameterException, UnknownIdentifierException, OWLException { + M3ExpressionParser p = new M3ExpressionParser(checkLiteralIdentifiers(), curieHandler); + return p.parse(values.model, expression, null); + } + + private OWLObjectProperty getProperty(String id, BatchHandlerValues values) throws UnknownIdentifierException { + OWLObjectProperty p = m3.getObjectProperty(id, values.model); + if (p == null) { + throw new UnknownIdentifierException("Could not find a property for id: " + id); + } + return p; + } + + /** + * Handle the request for an operation regarding an edge. + * + * @param request + * @param operation + * @param userId + * @param token + * @param values + * @return error or null + * @throws Exception + */ + String handleRequestForEdge(M3Request request, Operation operation, String userId, Set providerGroups, UndoMetadata token, BatchHandlerValues values) throws Exception { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + // required: subject, predicate, object + requireNotNull(request.arguments.subject, "request.arguments.subject"); + requireNotNull(request.arguments.predicate, "request.arguments.predicate"); + requireNotNull(request.arguments.object, "request.arguments.object"); + // check for variables + final OWLNamedIndividual s = getIndividual(request.arguments.subject, values); + final OWLNamedIndividual o = getIndividual(request.arguments.object, values); + final OWLObjectProperty p = getProperty(request.arguments.predicate, values); + values.relevantIndividuals.addAll(Arrays.asList(s, o)); + + // add edge + if (Operation.add == operation) { + // optional: values + Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); + addDateAnnotation(annotations, values.model.getOWLDataFactory()); + m3.addFact(values.model, p, s, o, annotations, token); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } + // remove edge + else if (Operation.remove == operation) { + Set removedIRIs = m3.removeFact(values.model, p, s, o, token); + if (removedIRIs != null && removedIRIs.isEmpty() == false) { + // only render bulk, iff there were additional deletes (i.e. evidence removal) + values.renderBulk = true; + handleRemovedAnnotationIRIs(removedIRIs, values.model, token); + } + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } + // add annotation + else if (Operation.addAnnotation == operation) { + requireNotNull(request.arguments.values, "request.arguments.values"); + + m3.addAnnotations(values.model, p, s, o, + extract(request.arguments.values, userId, providerGroups, values, values.model), token); + updateDate(values.model, p, s, o, token, m3); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } + // remove annotation + else if (Operation.removeAnnotation == operation) { + requireNotNull(request.arguments.values, "request.arguments.values"); + + Set annotations = extract(request.arguments.values, null, Collections.emptySet(), values, values.model); + Set evidenceIRIs = MolecularModelManager.extractEvidenceIRIValues(annotations); + m3.removeAnnotations(values.model, p, s, o, annotations, token); + handleRemovedAnnotationIRIs(evidenceIRIs, values.model, token); + updateDate(values.model, p, s, o, token, m3); + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } else { + return "Unknown operation: " + operation; + } + return null; + } + + /** + * Handle the request for an operation regarding a model. + * + * @param request + * @param response + * @param operation + * @param userId + * @param token + * @param values + * @return error or null + * @throws Exception + */ + String handleRequestForModel(M3Request request, M3BatchResponse response, Operation operation, String userId, Set providerGroups, UndoMetadata token, BatchHandlerValues values) throws Exception { + // get model + if (Operation.get == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + values.renderBulk = true; + } else if (Operation.updateImports == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + values.renderBulk = true; + } + // add an empty model + else if (Operation.add == operation) { + values.nonMeta = true; + values.renderBulk = true; + + if (request.arguments != null) { + values.model = createModel(userId, providerGroups, token, values, request.arguments.values); + } else { + values.model = createModel(userId, providerGroups, token, values, null); + } + } else if (Operation.addAnnotation == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + requireNotNull(request.arguments.values, "request.arguments.values"); + values.model = checkModelId(values.model, request); + Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); + if (annotations != null) { + m3.addModelAnnotations(values.model, annotations, token); + } + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + } else if (Operation.removeAnnotation == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + requireNotNull(request.arguments.values, "request.arguments.values"); + values.model = checkModelId(values.model, request); + Set annotations = extract(request.arguments.values, null, Collections.emptySet(), values, values.model); + if (annotations != null) { + m3.removeAnnotations(values.model, annotations, token); + } + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + values.renderBulk = true; + } else if (Operation.exportModel == operation) { + if (values.nonMeta) { + // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal + return "Export model can only be combined with other meta operations."; + } + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + export(response, values.model, userId, providerGroups); + } else if (Operation.exportModelLegacy == operation) { + if (values.nonMeta) { + // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal + return "Export legacy model can only be combined with other meta operations."; + } + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + exportLegacy(response, values.model, request.arguments.format, userId); + } else if (Operation.importModel == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + requireNotNull(request.arguments.importModel, "request.arguments.importModel"); + //this is documented as not working... + values.model = m3.importModel(request.arguments.importModel); + + Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); + if (annotations != null) { + m3.addModelAnnotations(values.model, annotations, token); + } + updateModelAnnotations(values.model, userId, providerGroups, token, m3); + values.renderBulk = true; + } else if (Operation.storeModel == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + Set annotations = extract(request.arguments.values, userId, providerGroups, values, values.model); + if (validateBeforeSave()) { + List issues = beforeSaveValidator.validateBeforeSave(values.model); + if (issues != null && !issues.isEmpty()) { + StringBuilder commentary = new StringBuilder(); + for (Iterator it = issues.iterator(); it.hasNext(); ) { + String issue = it.next(); + commentary.append(issue); + if (it.hasNext()) { + commentary.append('\n'); + } + } + response.commentary = commentary.toString(); + return "Save model failed: validation error(s) before save"; + } + } + m3.saveModel(values.model, annotations, token); + values.renderBulk = true; + } else if (Operation.resetModel == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + //drop in memory model and reload + IRI model_iri = values.model.getModelId(); + boolean drop_cached = true; + //load will reload from db if override + m3.loadModel(model_iri, drop_cached); + //ensure the change queue is gone to avoid downstream confusion. + m3.clearUndoHistory(model_iri); + //reset model values + values.model = checkModelId(null, request); + values.renderBulk = true; + } else if (Operation.diffModel == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + //this won't change + values.model = checkModelId(values.model, request); + IRI model_iri = values.model.getModelId(); + //run diff + OWLOntologyManager man1 = OWLManager.createOWLOntologyManager(); + //do we have an ontology in the datastore with that id? + OWLOntology stored_ontology = null; + if (m3.getStoredModelIds().contains(model_iri)) { + stored_ontology = m3.loadModelABox(model_iri); + } else { + //could error out here, but maybe this is more useful + stored_ontology = man1.createOntology(); + } + OWLOntology active_ontology = man1.copyOntology(values.model.getAboxOntology(), OntologyCopy.DEEP); + + //TODO refine representation of diff result.. + StringWriter writer = new StringWriter(); + // boolean actual = DiffOperation.compare(active_ontology, stored_ontology, writer); + Map options = new HashMap<>(); + options.put("labels", "true"); + options.put("format", "pretty"); //plain, pretty, html, markdown + DiffOperation.compare(stored_ontology, active_ontology, new IOHelper(), writer, options); + values.diffResult = writer.toString(); + writer.close(); + values.renderBulk = true; + } else if (Operation.undo == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + m3.undo(values.model, userId); + values.renderBulk = true; + } else if (Operation.redo == operation) { + values.nonMeta = true; + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + m3.redo(values.model, userId); + values.renderBulk = true; + } else if (Operation.getUndoRedo == operation) { + if (values.nonMeta) { + // can only be used with other "meta" operations in batch mode, otherwise it would lead to conflicts in the returned signal + return operation + " cannot be combined with other operations."; + } + requireNotNull(request.arguments, "request.arguments"); + values.model = checkModelId(values.model, request); + getCurrentUndoRedoForModel(response, values.model.getModelId(), userId); + } else { + return "Unknown operation: " + operation; + } + return null; + } + + void handleSPARQLRequest(M3Request request, M3BatchResponse response) throws IOException, OWLException, QueryEvaluationException, RDFHandlerException, TupleQueryResultHandlerException, RepositoryException, MalformedQueryException { + String query = request.arguments.query; + if (query != null) { + QueryResult result = m3.executeSPARQLQuery(query, SPARQL_QUERY_TIMEOUT); + + final JsonObject jsonObject; + if (result instanceof GraphQueryResult) { + //RDFWriter writer = new RDFJSONWriterFactory().getWriter(stream); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + RDFWriter writer = new SesameJSONLDWriterFactory().getWriter(stream); + writer.handleNamespace("gomodel", "http://model.geneontology.org/"); + Rio.write(QueryResults.asModel((GraphQueryResult) result), writer); + String json = stream.toString("UTF-8"); + stream.close(); + jsonObject = new Gson().fromJson(json, JsonObject.class); + //QueryResultIO.write((GraphQueryResult) result, RDFFormat.RDFJSON, stream); + } else if (result instanceof TupleQueryResult) { + jsonObject = new SPARQLResultJSONRenderer(curieHandler).renderResults((TupleQueryResult) result); + //QueryResultIO.write((TupleQueryResult) result, TupleQueryResultFormat.JSON, stream); + } else { + throw new UnsupportedOperationException("Query type not supported."); + } + initMetaResponse(response); + response.data.sparqlResult = jsonObject; + } + } + + private void getCurrentUndoRedoForModel(M3BatchResponse response, IRI modelId, String userId) { + Pair, List> undoRedoEvents = m3.getUndoRedoEvents(modelId); + initMetaResponse(response); + List> undos = new ArrayList>(); + List> redos = new ArrayList>(); + final long currentTime = System.currentTimeMillis(); + for (ChangeEvent undo : undoRedoEvents.getLeft()) { + Map data = new HashMap(3); + data.put("user-id", undo.getUserId()); + data.put("time", Long.valueOf(currentTime - undo.getTime())); + // TODO add a summary of the change? axiom count? + undos.add(data); + } + for (ChangeEvent redo : undoRedoEvents.getRight()) { + Map data = new HashMap(3); + data.put("user-id", redo.getUserId()); + data.put("time", Long.valueOf(currentTime - redo.getTime())); + // TODO add a summary of the change? axiom count? + redos.add(data); + } + response.data.undo = undos; + response.data.redo = redos; + } + + private void initMetaResponse(M3BatchResponse response) { + if (response.data == null) { + response.data = new ResponseData(); + response.messageType = M3BatchResponse.MESSAGE_TYPE_SUCCESS; + response.message = "success: 0"; + response.signal = M3BatchResponse.SIGNAL_META; + } + } + + /** + * Handle the request for the meta properties. + * + * @param response + * @param userId + * @throws IOException + * @throws OWLException + */ + void getMeta(M3BatchResponse response, String userId, Set providerGroups) throws IOException, OWLException { + // init + initMetaResponse(response); + if (response.data.meta == null) { + response.data.meta = new MetaResponse(); + } + + // relations + Pair, List> propPair = MolecularModelJsonRenderer.renderProperties(m3, importantRelations, curieHandler); + final List relList = propPair.getLeft(); + if (relList != null) { + response.data.meta.relations = relList.toArray(new JsonRelationInfo[relList.size()]); + } + + // data properties + final List propList = propPair.getRight(); + if (propList != null) { + response.data.meta.dataProperties = propList.toArray(new JsonRelationInfo[propList.size()]); + } + + // evidence + final List evidencesList = MolecularModelJsonRenderer.renderEvidences(m3, curieHandler); + if (evidencesList != null) { + response.data.meta.evidence = evidencesList.toArray(new JsonEvidenceInfo[evidencesList.size()]); + } + + // model ids + // and model annotations + final Set allModelIds = m3.getAvailableModelIds(); + final Map> allModelAnnotations = new HashMap<>(); + final Map> allModelAnnotationsReadOnly = new HashMap<>(); + final Map> annotationsForAllModels = m3.getAllModelAnnotations(); + for (IRI modelId : allModelIds) { + String curie = curieHandler.getCuri(modelId); + List modelAnnotations = new ArrayList<>(); + allModelAnnotations.put(curie, modelAnnotations); + // Iterate through the model's a. + Set annotations = annotationsForAllModels.get(modelId); + if (annotations != null) { + for (OWLAnnotation an : annotations) { + final String label; + if (an.getProperty().equals(contributor)) { + final IRI iri; + if (an.getValue() instanceof IRI) { + iri = an.getValue().asIRI().get(); + } else if (an.getValue() instanceof OWLLiteral) { + iri = IRI.create(an.getValue().asLiteral().get().getLiteral()); + } else { + iri = null; + } + if (iri != null) { + label = m3.getTboxLabelIndex().getOrDefault(iri, null); + } else { + label = null; + } + } else { + label = null; + } + JsonAnnotation json = JsonTools.create(an.getProperty(), an.getValue(), label, curieHandler); + if (json != null) { + modelAnnotations.add(json); + } + } + } else { + LOG.error("No annotations found for model: " + modelId); + } + // handle read-only information, currently only the modification flag + // check modification status + boolean modified = m3.isModelModified(modelId); + Map readOnly = Collections.singletonMap("modified-p", Boolean.valueOf(modified)); + allModelAnnotationsReadOnly.put(curie, readOnly); + } + response.data.meta.modelsMeta = allModelAnnotations; + response.data.meta.modelsReadOnly = allModelAnnotationsReadOnly; + } + + void exportAllModels() throws OWLOntologyStorageException, OWLOntologyCreationException, IOException { + m3.dumpAllStoredModels(); + } + + private void export(M3BatchResponse response, ModelContainer model, String userId, Set providerGroups) throws OWLOntologyStorageException, UnknownIdentifierException { + String exportModel = m3.exportModel(model); + initMetaResponse(response); + response.data.exportModel = exportModel; + } + + private void exportLegacy(M3BatchResponse response, ModelContainer model, String format, String userId) throws IOException, OWLOntologyCreationException, UnknownIdentifierException { + if ("gpad".equals(format)) { + initMetaResponse(response); + try { + GPADSPARQLExport exporter = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getGolego_repo().regulatorsToRegulated); + WorkingMemory wm = m3.createCanonicalInferredModel(model.getModelId()); + response.data.exportModel = exporter.exportGPAD(wm, model.getModelId()); + // response.data.exportModel = new GPADSPARQLExport(curieHandler, m3.getLegacyRelationShorthandIndex(), m3.getTboxShorthandIndex(), m3.getDoNotAnnotateSubset()).exportGPAD( + // m3.createCanonicalInferredModel(model.getModelId())); + } catch (InconsistentOntologyException e) { + response.messageType = MinervaResponse.MESSAGE_TYPE_ERROR; + response.message = "The model is inconsistent; a GPAD cannot be created."; + } + } else if ("explanations".equals(format)) { + initMetaResponse(response); + response.data.exportModel = ExportExplanation.exportExplanation(m3.createInferredModel(model.getModelId()), m3.getGolego_repo(), m3.getLegacyRelationShorthandIndex()); + } else { + // final GafExportTool exportTool = GafExportTool.getInstance(); + // if (format == null) { + // format = "gaf"; // set a default format, if necessary + // } + // Map allExported = exportTool.exportModelLegacy(model, curieHandler, externalLookupService, Collections.singleton(format)); + // String exported = allExported.get(format); + // if (exported == null) { + throw new IOException("Unknown export format: " + format); + // } + // initMetaResponse(response); + // response.data.exportModel = exported; + } + } + + + /** + * @param model + * @param request + * @return modelId + * @throws MissingParameterException + * @throws MultipleModelIdsParameterException + * @throws UnknownIdentifierException + */ + public ModelContainer checkModelId(ModelContainer model, M3Request request) + throws MissingParameterException, MultipleModelIdsParameterException, UnknownIdentifierException { + + if (model == null) { + final String currentModelId = request.arguments.modelId; + requireNotNull(currentModelId, "request.arguments.modelId"); + model = m3.checkModelId(curieHandler.getIRI(currentModelId)); + } else { + final String currentModelId = request.arguments.modelId; + if (currentModelId != null) { + IRI modelId = curieHandler.getIRI(currentModelId); + if (model.getModelId().equals(modelId) == false) { + throw new MultipleModelIdsParameterException("Using multiple modelIds in one batch call is not supported."); + } + } + } + return model; + } + + private void updateAnnotationsForDelete(DeleteInformation info, ModelContainer model, String userId, Set providerGroups, UndoMetadata token, UndoAwareMolecularModelManager m3) throws UnknownIdentifierException { + final OWLDataFactory f = model.getOWLDataFactory(); + final OWLAnnotation annotation = createDateAnnotation(f); + final Set generated = new HashSet(); + addGeneratedAnnotations(userId, providerGroups, generated, f); + for (IRI subject : info.touched) { + m3.updateAnnotation(model, subject, annotation, token); + m3.addAnnotations(model, subject, generated, token); + } + if (info.updated.isEmpty() == false) { + Set newAxioms = + m3.updateAnnotation(model, info.updated, annotation, token); + m3.addAnnotations(model, newAxioms, generated, token); + } + } + + static class MultipleModelIdsParameterException extends Exception { + + private static final long serialVersionUID = 4362299465121954598L; + + /** + * @param message + */ + MultipleModelIdsParameterException(String message) { + super(message); + } + + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsTools.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsTools.java index 3970f4c3..1625ce6d 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsTools.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/OperationsTools.java @@ -1,7 +1,5 @@ package org.geneontology.minerva.server.handler; -import java.util.List; - import org.apache.commons.lang3.StringUtils; import org.geneontology.minerva.BlazegraphOntologyManager; import org.geneontology.minerva.ModelContainer; @@ -13,112 +11,113 @@ import org.semanticweb.owlapi.model.OWLNamedObject; import org.semanticweb.owlapi.model.OWLOntology; +import java.util.List; + public class OperationsTools { - static void requireNotNull(Object value, String msg) throws MissingParameterException { - if (value == null) { - throw new MissingParameterException("Expected non-null value for: "+msg); - } - } - - - static class MissingParameterException extends Exception { - - private static final long serialVersionUID = 4362299465121954598L; - - /** - * @param message - */ - MissingParameterException(String message) { - super(message); - } - - } - - /** - * Normalize the userId. - * - * @param userId - * @return normalized id or null - */ - static String normalizeUserId(String userId) { - if (userId != null) { - userId = StringUtils.trimToNull(userId); - // quick hack, may be removed once all users are required to have a user id. - if ("anonymous".equalsIgnoreCase(userId)) { - return null; - } - } - return userId; - } - - /** - * @param modelId - * @param model - * @param externalLookupService - * @param inferenceProvider - * @param curieHandler - * @return renderer - */ - public static MolecularModelJsonRenderer createModelRenderer( - final String modelId, - final OWLOntology model, - final ExternalLookupService externalLookupService, - final InferenceProvider inferenceProvider, - final CurieHandler curieHandler) { - - MolecularModelJsonRenderer renderer; - if (externalLookupService != null) { - renderer = new MolecularModelJsonRenderer(modelId, model, inferenceProvider, curieHandler) { - - @Override - protected String getLabel(OWLNamedObject i, String id) { - String label = super.getLabel(i, id); - if (label == null ) { - List lookup = externalLookupService.lookup(i.getIRI()); - if (lookup != null && !lookup.isEmpty()) { - LookupEntry entry = lookup.iterator().next(); - label = entry.label; - } - } - return label; - } - }; - } - else { - renderer = new MolecularModelJsonRenderer(modelId, model, inferenceProvider, curieHandler); - } - return renderer; - } - - /** - * @param model - * @param externalLookupService - * @param inferenceProvider - * @param curieHandler - * @return renderer - */ - static MolecularModelJsonRenderer createModelRenderer( - final ModelContainer model, - final ExternalLookupService externalLookupService, - final InferenceProvider inferenceProvider, - final CurieHandler curieHandler) { - return createModelRenderer(curieHandler.getCuri(model.getModelId()), model.getAboxOntology(), - externalLookupService, inferenceProvider, curieHandler); - } - - //BlazegraphOntologyManager - static MolecularModelJsonRenderer createModelRenderer( - final ModelContainer model, - final BlazegraphOntologyManager go_lego_repo, - final InferenceProvider inferenceProvider, - final CurieHandler curieHandler) { - - MolecularModelJsonRenderer renderer = new MolecularModelJsonRenderer(curieHandler.getCuri(model.getModelId()), model.getAboxOntology(), - go_lego_repo, inferenceProvider, curieHandler); - - return renderer; - } - - + static void requireNotNull(Object value, String msg) throws MissingParameterException { + if (value == null) { + throw new MissingParameterException("Expected non-null value for: " + msg); + } + } + + + static class MissingParameterException extends Exception { + + private static final long serialVersionUID = 4362299465121954598L; + + /** + * @param message + */ + MissingParameterException(String message) { + super(message); + } + + } + + /** + * Normalize the userId. + * + * @param userId + * @return normalized id or null + */ + static String normalizeUserId(String userId) { + if (userId != null) { + userId = StringUtils.trimToNull(userId); + // quick hack, may be removed once all users are required to have a user id. + if ("anonymous".equalsIgnoreCase(userId)) { + return null; + } + } + return userId; + } + + /** + * @param modelId + * @param model + * @param externalLookupService + * @param inferenceProvider + * @param curieHandler + * @return renderer + */ + public static MolecularModelJsonRenderer createModelRenderer( + final String modelId, + final OWLOntology model, + final ExternalLookupService externalLookupService, + final InferenceProvider inferenceProvider, + final CurieHandler curieHandler) { + + MolecularModelJsonRenderer renderer; + if (externalLookupService != null) { + renderer = new MolecularModelJsonRenderer(modelId, model, inferenceProvider, curieHandler) { + + @Override + protected String getLabel(OWLNamedObject i, String id) { + String label = super.getLabel(i, id); + if (label == null) { + List lookup = externalLookupService.lookup(i.getIRI()); + if (lookup != null && !lookup.isEmpty()) { + LookupEntry entry = lookup.iterator().next(); + label = entry.label; + } + } + return label; + } + }; + } else { + renderer = new MolecularModelJsonRenderer(modelId, model, inferenceProvider, curieHandler); + } + return renderer; + } + + /** + * @param model + * @param externalLookupService + * @param inferenceProvider + * @param curieHandler + * @return renderer + */ + static MolecularModelJsonRenderer createModelRenderer( + final ModelContainer model, + final ExternalLookupService externalLookupService, + final InferenceProvider inferenceProvider, + final CurieHandler curieHandler) { + return createModelRenderer(curieHandler.getCuri(model.getModelId()), model.getAboxOntology(), + externalLookupService, inferenceProvider, curieHandler); + } + + //BlazegraphOntologyManager + static MolecularModelJsonRenderer createModelRenderer( + final ModelContainer model, + final BlazegraphOntologyManager go_lego_repo, + final InferenceProvider inferenceProvider, + final CurieHandler curieHandler) { + + MolecularModelJsonRenderer renderer = new MolecularModelJsonRenderer(curieHandler.getCuri(model.getModelId()), model.getAboxOntology(), + go_lego_repo, inferenceProvider, curieHandler); + + return renderer; + } + + } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/PacketIdGenerator.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/PacketIdGenerator.java index 9667686d..451d399d 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/PacketIdGenerator.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/PacketIdGenerator.java @@ -2,15 +2,15 @@ public class PacketIdGenerator { - private static int modCounter = 0; - - public synchronized static String generateId() { - modCounter += 1; - StringBuilder sb = new StringBuilder(Long.toHexString((System.nanoTime()))); - sb.append(Integer.toHexString(modCounter)); - // keep mod counter below 10000 - modCounter = modCounter % 10000; - return sb.toString(); - } - + private static int modCounter = 0; + + public synchronized static String generateId() { + modCounter += 1; + StringBuilder sb = new StringBuilder(Long.toHexString((System.nanoTime()))); + sb.append(Integer.toHexString(modCounter)); + // keep mod counter below 10000 + modCounter = modCounter % 10000; + return sb.toString(); + } + } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/StatusHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/StatusHandler.java index 956bb68a..45170334 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/StatusHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/StatusHandler.java @@ -1,116 +1,109 @@ package org.geneontology.minerva.server.handler; +import org.geneontology.minerva.server.StartUpTool.MinervaStartUpConfig; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import javax.ws.rs.*; +import javax.ws.rs.core.MediaType; import java.util.Map; import java.util.Set; -import javax.ws.rs.Consumes; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import org.geneontology.minerva.server.StartUpTool.MinervaStartUpConfig; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLObjectProperty; +/** + * Respond to queries for system status + */ +@Path("/status") +public class StatusHandler { + + private final MinervaStartUpConfig conf; + private final Map> ont_annosa; + private final String started_at; + + public class Status { + public String startup_date = started_at; + // data configuration + public String ontology; + public String catalog; + public String journalFile; + public String exportFolder; + public String modelIdPrefix; + public String modelIdcurie; + public String defaultModelState; + public String golrUrl; + public String monarchUrl; + public String golrSeedUrl; + public int golrCacheSize; + public long golrCacheDuration; + public String reasonerOpt; + public String importantRelationParent = "not set"; + public Set importantRelations; + public int port; + public String contextPrefix; + public String contextString; + public int requestHeaderSize; + public int requestBufferSize; + public boolean useRequestLogging; + public boolean useGolrUrlLogging; + public String prefixesFile; + public int sparqlEndpointTimeout; + public String shexFileUrl; + public String goshapemapFileUrl; + public Map> ont_annos = ont_annosa; + + public Status(MinervaStartUpConfig conf) { + this.ontology = conf.ontology; + this.catalog = conf.catalog; + this.journalFile = conf.journalFile; + this.exportFolder = conf.exportFolder; + this.modelIdPrefix = conf.modelIdPrefix; + this.modelIdcurie = conf.modelIdcurie; + this.defaultModelState = conf.defaultModelState; + this.golrUrl = conf.golrUrl; + this.monarchUrl = conf.monarchUrl; + this.golrSeedUrl = conf.golrSeedUrl; + this.golrCacheSize = conf.golrCacheSize; + this.golrCacheDuration = conf.golrCacheDuration; + this.reasonerOpt = conf.reasonerOpt; + this.importantRelationParent = conf.importantRelationParent; + this.importantRelations = conf.importantRelations; + this.port = conf.port; + this.contextPrefix = conf.contextPrefix; + this.contextString = conf.contextString; + this.requestHeaderSize = conf.requestHeaderSize; + this.requestBufferSize = conf.requestBufferSize; + this.useRequestLogging = conf.useRequestLogging; + this.useGolrUrlLogging = conf.useGolrUrlLogging; + this.prefixesFile = conf.prefixesFile; + this.sparqlEndpointTimeout = conf.sparqlEndpointTimeout; + this.shexFileUrl = conf.shexFileUrl; + this.goshapemapFileUrl = conf.goshapemapFileUrl; + + } + } - - /** - * Respond to queries for system status - * - */ - @Path("/status") - public class StatusHandler { + /** + * @param ont_annos + * @param started_at + */ + public StatusHandler(MinervaStartUpConfig conf, Map> ont_annos, String started_at) { + this.ont_annosa = ont_annos; + this.conf = conf; + this.started_at = started_at; + } - private final MinervaStartUpConfig conf; - private final Map> ont_annosa; - private final String started_at; - - public class Status { - public String startup_date = started_at; - // data configuration - public String ontology; - public String catalog; - public String journalFile; - public String exportFolder; - public String modelIdPrefix; - public String modelIdcurie; - public String defaultModelState; - public String golrUrl; - public String monarchUrl; - public String golrSeedUrl; - public int golrCacheSize; - public long golrCacheDuration; - public String reasonerOpt; - public String importantRelationParent = "not set"; - public Set importantRelations; - public int port; - public String contextPrefix; - public String contextString; - public int requestHeaderSize; - public int requestBufferSize; - public boolean useRequestLogging; - public boolean useGolrUrlLogging; - public String prefixesFile; - public int sparqlEndpointTimeout; - public String shexFileUrl; - public String goshapemapFileUrl; - public Map> ont_annos = ont_annosa; - - public Status(MinervaStartUpConfig conf) { - this.ontology = conf.ontology; - this.catalog = conf.catalog; - this.journalFile = conf.journalFile; - this.exportFolder = conf.exportFolder; - this.modelIdPrefix = conf.modelIdPrefix; - this.modelIdcurie = conf.modelIdcurie; - this.defaultModelState = conf.defaultModelState; - this.golrUrl = conf.golrUrl; - this.monarchUrl = conf.monarchUrl; - this.golrSeedUrl = conf.golrSeedUrl; - this.golrCacheSize = conf.golrCacheSize; - this.golrCacheDuration = conf.golrCacheDuration; - this.reasonerOpt = conf.reasonerOpt; - this.importantRelationParent = conf.importantRelationParent; - this.importantRelations = conf.importantRelations; - this.port = conf.port; - this.contextPrefix = conf.contextPrefix; - this.contextString = conf.contextString; - this.requestHeaderSize = conf.requestHeaderSize; - this.requestBufferSize = conf.requestBufferSize; - this.useRequestLogging = conf.useRequestLogging; - this.useGolrUrlLogging = conf.useGolrUrlLogging; - this.prefixesFile = conf.prefixesFile; - this.sparqlEndpointTimeout = conf.sparqlEndpointTimeout; - this.shexFileUrl = conf.shexFileUrl; - this.goshapemapFileUrl = conf.goshapemapFileUrl; - - } - } - /** - * @param ont_annos - * @param started_at - * - */ - public StatusHandler(MinervaStartUpConfig conf, Map> ont_annos, String started_at) { - this.ont_annosa = ont_annos; - this.conf = conf; - this.started_at = started_at; - } + @GET + @Produces(MediaType.APPLICATION_JSON) + public Status get() { + return new Status(conf); + } - @GET - @Produces(MediaType.APPLICATION_JSON) - public Status get() { - return new Status(conf); - } - - @POST - @Consumes(MediaType.APPLICATION_FORM_URLENCODED) - @Produces(MediaType.APPLICATION_JSON) - public MinervaStartUpConfig post() { - return conf; - } - } \ No newline at end of file + @POST + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + @Produces(MediaType.APPLICATION_JSON) + public MinervaStartUpConfig post() { + return conf; + } +} \ No newline at end of file diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/TaxonHandler.java b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/TaxonHandler.java index e22348ec..d0d34877 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/handler/TaxonHandler.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/handler/TaxonHandler.java @@ -1,16 +1,5 @@ package org.geneontology.minerva.server.handler; -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; - import org.geneontology.minerva.BlazegraphMolecularModelManager; import org.geneontology.minerva.BlazegraphOntologyManager; import org.openrdf.query.BindingSet; @@ -18,76 +7,87 @@ import org.openrdf.query.QueryEvaluationException; import org.openrdf.query.TupleQueryResult; import org.openrdf.repository.RepositoryException; -import org.semanticweb.owlapi.model.IRI; - - /** - * Respond to queries about taxa in minerva world - * - */ - @Path("/search/taxa") - public class TaxonHandler { - private final BlazegraphMolecularModelManager m3; - /** - * @param ont_annos - * @param started_at - * - */ - public TaxonHandler(BlazegraphMolecularModelManager m3) { - this.m3 = m3; - } +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + + +/** + * Respond to queries about taxa in minerva world + */ +@Path("/search/taxa") +public class TaxonHandler { + private final BlazegraphMolecularModelManager m3; + + /** + * @param ont_annos + * @param started_at + */ + public TaxonHandler(BlazegraphMolecularModelManager m3) { + this.m3 = m3; + } + + public class Taxa { + class Taxon { + String id; + String label; + + public Taxon(String id, String label) { + super(); + this.id = id; + this.label = label; + } + } + + public Set taxa; + + public Taxa(Map id_label) { + if (id_label != null) { + taxa = new HashSet(); + for (String id : id_label.keySet()) { + Taxon t = new Taxon(id, id_label.get(id)); + taxa.add(t); + } + } + } + } + + @GET + @Produces(MediaType.APPLICATION_JSON) + public Taxa get() { + Map id_label = new HashMap(); + + String sparql = "select distinct ?taxon where { ?model <" + BlazegraphOntologyManager.in_taxon_uri + "> ?taxon }"; + + TupleQueryResult result; + try { + result = (TupleQueryResult) m3.executeSPARQLQuery(sparql, 1000); + while (result.hasNext()) { + BindingSet bs = result.next(); + String taxon = bs.getBinding("taxon").getValue().stringValue(); + String label = m3.getGolego_repo().getLabel(taxon); + String tcurie = taxon.replace("http://purl.obolibrary.org/obo/NCBITaxon_", "NCBITaxon:"); + id_label.put(tcurie, label); + } + } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } - public class Taxa { - class Taxon { - String id; - String label; - public Taxon(String id, String label) { - super(); - this.id = id; - this.label = label; - } - } - public Set taxa; - public Taxa(Map id_label) { - if(id_label!=null) { - taxa = new HashSet(); - for(String id : id_label.keySet()) { - Taxon t = new Taxon(id, id_label.get(id)); - taxa.add(t); - } - } - } - } - - @GET - @Produces(MediaType.APPLICATION_JSON) - public Taxa get() { - Map id_label = new HashMap(); - - String sparql = "select distinct ?taxon where { ?model <"+BlazegraphOntologyManager.in_taxon_uri+"> ?taxon }"; - - TupleQueryResult result; - try { - result = (TupleQueryResult) m3.executeSPARQLQuery(sparql, 1000); - while(result.hasNext()) { - BindingSet bs = result.next(); - String taxon = bs.getBinding("taxon").getValue().stringValue(); - String label = m3.getGolego_repo().getLabel(taxon); - String tcurie = taxon.replace("http://purl.obolibrary.org/obo/NCBITaxon_", "NCBITaxon:"); - id_label.put(tcurie, label); - } - } catch (MalformedQueryException | QueryEvaluationException | RepositoryException e) { - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } + return new Taxa(id_label); + } - return new Taxa(id_label); - } + public BlazegraphMolecularModelManager getM3() { + return m3; + } - public BlazegraphMolecularModelManager getM3() { - return m3; - } - - } \ No newline at end of file +} \ No newline at end of file diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/CachingInferenceProviderCreatorImpl.java b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/CachingInferenceProviderCreatorImpl.java index aa724e8a..b19de735 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/CachingInferenceProviderCreatorImpl.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/CachingInferenceProviderCreatorImpl.java @@ -1,10 +1,5 @@ package org.geneontology.minerva.server.inferences; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - import org.geneontology.minerva.ModelContainer; import org.geneontology.minerva.ModelContainer.ModelChangeListener; import org.geneontology.minerva.json.InferenceProvider; @@ -16,24 +11,28 @@ import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + public class CachingInferenceProviderCreatorImpl extends InferenceProviderCreatorImpl { - - private final Map inferenceCache = new ConcurrentHashMap<>(); - - protected CachingInferenceProviderCreatorImpl(OWLReasonerFactory rf, int maxConcurrent, boolean useSLME, String name, MinervaShexValidator shex) { - super(rf, maxConcurrent, useSLME, name, shex); - } - - public static InferenceProviderCreator createElk(boolean useSLME, MinervaShexValidator shex) { - String name; - if (useSLME) { - name = "Caching ELK-SLME"; - } - else { - name = "Caching ELK"; - } - return new CachingInferenceProviderCreatorImpl(new ElkReasonerFactory(), 1, useSLME, name, shex); - } + + private final Map inferenceCache = new ConcurrentHashMap<>(); + + protected CachingInferenceProviderCreatorImpl(OWLReasonerFactory rf, int maxConcurrent, boolean useSLME, String name, MinervaShexValidator shex) { + super(rf, maxConcurrent, useSLME, name, shex); + } + + public static InferenceProviderCreator createElk(boolean useSLME, MinervaShexValidator shex) { + String name; + if (useSLME) { + name = "Caching ELK-SLME"; + } else { + name = "Caching ELK"; + } + return new CachingInferenceProviderCreatorImpl(new ElkReasonerFactory(), 1, useSLME, name, shex); + } //TODO current Hermit doesn't provide a reasonerfactory ? //Not using hermit anyway, can probably just delete. @@ -41,66 +40,65 @@ public static InferenceProviderCreator createElk(boolean useSLME, MinervaShexVal // int maxConcurrent = Runtime.getRuntime().availableProcessors(); // return createHermiT(maxConcurrent, shex); // } - + // public static InferenceProviderCreator createHermiT(int maxConcurrent, MinervaShexValidator shex) { // return new CachingInferenceProviderCreatorImpl(new org.semanticweb.HermiT.ReasonerFactory(), // maxConcurrent, true, "Caching Hermit-SLME", shex); // } - - public static InferenceProviderCreator createArachne(RuleEngine arachne, MinervaShexValidator shex) { - return new CachingInferenceProviderCreatorImpl(new ArachneOWLReasonerFactory(arachne), 1, false, "Caching Arachne", shex); - } - - @Override - public InferenceProvider create(final ModelContainer model) throws OWLOntologyCreationException, InterruptedException, IOException { - synchronized (model.getAboxOntology()) { - InferenceProvider inferenceProvider = inferenceCache.get(model); - if (inferenceProvider == null) { - addMiss(); - inferenceProvider = super.create(model); - model.registerListener(new ModelChangeListenerImplementation(model)); - inferenceCache.put(model, inferenceProvider); - } - else { - addHit(); - } - return inferenceProvider; - } - } - - protected void addHit() { - // do nothing, hook for debugging - } - - protected void addMiss() { - // do nothing, hook for debugging - } - - protected void clear() { - inferenceCache.clear(); - } - - private final class ModelChangeListenerImplementation implements ModelChangeListener { - private final ModelContainer model; - - private ModelChangeListenerImplementation(ModelContainer model) { - this.model = model; - } - - @Override - public void handleChange(List changes) { - synchronized (model.getAboxOntology()) { - inferenceCache.remove(model); - model.unRegisterListener(this); - } - } - - @Override - public void dispose() { - synchronized (model.getAboxOntology()) { - inferenceCache.remove(model); - model.unRegisterListener(this); - } - } - } + + public static InferenceProviderCreator createArachne(RuleEngine arachne, MinervaShexValidator shex) { + return new CachingInferenceProviderCreatorImpl(new ArachneOWLReasonerFactory(arachne), 1, false, "Caching Arachne", shex); + } + + @Override + public InferenceProvider create(final ModelContainer model) throws OWLOntologyCreationException, InterruptedException, IOException { + synchronized (model.getAboxOntology()) { + InferenceProvider inferenceProvider = inferenceCache.get(model); + if (inferenceProvider == null) { + addMiss(); + inferenceProvider = super.create(model); + model.registerListener(new ModelChangeListenerImplementation(model)); + inferenceCache.put(model, inferenceProvider); + } else { + addHit(); + } + return inferenceProvider; + } + } + + protected void addHit() { + // do nothing, hook for debugging + } + + protected void addMiss() { + // do nothing, hook for debugging + } + + protected void clear() { + inferenceCache.clear(); + } + + private final class ModelChangeListenerImplementation implements ModelChangeListener { + private final ModelContainer model; + + private ModelChangeListenerImplementation(ModelContainer model) { + this.model = model; + } + + @Override + public void handleChange(List changes) { + synchronized (model.getAboxOntology()) { + inferenceCache.remove(model); + model.unRegisterListener(this); + } + } + + @Override + public void dispose() { + synchronized (model.getAboxOntology()) { + inferenceCache.remove(model); + model.unRegisterListener(this); + } + } + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreator.java b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreator.java index c9c72789..ea48f1af 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreator.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreator.java @@ -5,5 +5,5 @@ public interface InferenceProviderCreator { - public InferenceProvider create(ModelContainer model) throws Exception; + public InferenceProvider create(ModelContainer model) throws Exception; } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreatorImpl.java b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreatorImpl.java index e0b60cfd..8e81b992 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreatorImpl.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/InferenceProviderCreatorImpl.java @@ -1,14 +1,5 @@ package org.geneontology.minerva.server.inferences; -import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Semaphore; - import org.apache.log4j.Logger; import org.geneontology.minerva.ModelContainer; import org.geneontology.minerva.json.InferenceProvider; @@ -17,237 +8,221 @@ import org.geneontology.minerva.server.validation.MinervaShexValidator; import org.semanticweb.elk.owlapi.ElkReasonerFactory; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLEntity; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.model.parameters.OntologyCopy; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.search.EntitySearcher; - -import com.sun.org.apache.xml.internal.utils.URI; - import uk.ac.manchester.cs.owlapi.modularity.ModuleType; import uk.ac.manchester.cs.owlapi.modularity.SyntacticLocalityModuleExtractor; +import java.io.IOException; +import java.util.*; +import java.util.concurrent.Semaphore; + public class InferenceProviderCreatorImpl implements InferenceProviderCreator { - private final static Logger LOG = Logger.getLogger(InferenceProviderCreatorImpl.class); - - private final OWLReasonerFactory rf; - private final Semaphore concurrentLock; - private final boolean useSLME; - private final String name; - private final MinervaShexValidator shex; - - - InferenceProviderCreatorImpl(OWLReasonerFactory rf, int maxConcurrent, boolean useSLME, String name, MinervaShexValidator shex) { - super(); - this.rf = rf; - this.useSLME = useSLME; - this.name = name; - this.concurrentLock = new Semaphore(maxConcurrent); - this.shex = shex; - } - - public static InferenceProviderCreator createElk(boolean useSLME, MinervaShexValidator shex) { - String name; - if (useSLME) { - name = "ELK-SLME"; - } - else { - name = "ELK"; - } - return new InferenceProviderCreatorImpl(new ElkReasonerFactory(), 1, useSLME, name, shex); - } - - // public static InferenceProviderCreator createHermiT(MinervaShexValidator shex) { - // int maxConcurrent = Runtime.getRuntime().availableProcessors(); - // return createHermiT(maxConcurrent, shex); - // } - - // public static InferenceProviderCreator createHermiT(int maxConcurrent, MinervaShexValidator shex) { - // return new InferenceProviderCreatorImpl(new org.semanticweb.HermiT.ReasonerFactory(), maxConcurrent, true, "Hermit-SLME", shex); - // } - - @Override - public InferenceProvider create(ModelContainer model) throws OWLOntologyCreationException, InterruptedException, IOException { - OWLOntology ont = model.getAboxOntology(); - final OWLOntologyManager m = ont.getOWLOntologyManager(); - OWLOntology module = null; - OWLReasoner reasoner = null; - OWLOntology temp_ont = null; - try { - InferenceProvider provider; - synchronized (ont) { - concurrentLock.acquire(); - try { - if (useSLME) { - LOG.info("Creating for module: "+model.getModelId()); - ModuleType mtype = ModuleType.BOT; - SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(m, ont, mtype); - Set seeds = new HashSet(ont.getIndividualsInSignature()); - module = ont = sme.extractAsOntology(seeds, IRI.generateDocumentIRI()); - LOG.info("Done creating module: "+model.getModelId()); - } - //add root types for gene products. - //TODO investigate performance impact - //tradefoff these queries versus loading all possible genes into tbox - //temp_ont = addRootTypesToCopy(ont, shex.externalLookupService); - temp_ont = addAllInferredTypesToCopyLocalOntoBlazegraph(ont); - //do reasoning and validation on the enhanced model - reasoner = rf.createReasoner(temp_ont); - provider = MapInferenceProvider.create(reasoner, temp_ont, shex); - } - finally { - concurrentLock.release(); - } - } - return provider; - } - finally { - if (reasoner != null) { - reasoner.dispose(); - } - if (module != null) { - m.removeOntology(module); - } - if (temp_ont != null) { - temp_ont.getOWLOntologyManager().removeOntology(temp_ont); - } - } - - } - - - public OWLOntology addAllInferredTypesToCopyLocalOntoBlazegraph(OWLOntology asserted_ont) throws OWLOntologyCreationException, IOException { - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - OWLDataFactory df = ontman.getOWLDataFactory(); - OWLOntology temp_ont = ontman.copyOntology(asserted_ont, OntologyCopy.SHALLOW); - for(OWLAnnotation a : asserted_ont.getAnnotations()) { - OWLAxiom annoaxiom = df.getOWLAnnotationAssertionAxiom(temp_ont.getOntologyID().getOntologyIRI().get(), a); - ontman.addAxiom(temp_ont, annoaxiom); - } - Set individuals = temp_ont.getIndividualsInSignature(); - Map> sub_supers = new HashMap>(); - Set uris = new HashSet(); - Map> individual_asserted_types = new HashMap>(); - for (OWLNamedIndividual individual : individuals) { - Collection asserted_types = EntitySearcher.getTypes(individual, asserted_ont); - for(OWLClassExpression cls : asserted_types) { - if(cls.isAnonymous()) { - continue; - } - IRI class_iri = cls.asOWLClass().getIRI(); - if(class_iri.toString().contains("ECO")) { - continue; //this only deals with genes, chemicals, proteins, and complexes. - } - uris.add(class_iri.toString()); - } - individual_asserted_types.put(individual, asserted_types); - } - sub_supers = shex.getGo_lego_repo().getNeoRoots(uris); - Set new_parent_types = new HashSet(); - //for all individuals - for(OWLNamedIndividual i : individual_asserted_types.keySet()) { - //for all asserted types - for(OWLClassExpression asserted : individual_asserted_types.get(i)) { - //add types for all their parents - if(asserted.isAnonymous()) { - continue; - } - OWLClass sub = asserted.asOWLClass(); - Set supers = sub_supers.get(sub.getIRI().toString()); - if(supers!=null) { - for(String s : supers) { - OWLClass parent_class = ontman.getOWLDataFactory().getOWLClass(IRI.create(s)); - if(!parent_class.isBuiltIn()&&(!parent_class.isAnonymous())) { - OWLClassAssertionAxiom add_parent_type = df.getOWLClassAssertionAxiom(parent_class, i); - new_parent_types.add(add_parent_type); - } - //add everything into the tbox - (for use later in shex validator) - //OWLSubClassOfAxiom subSuper = df.getOWLSubClassOfAxiom(sub, parent_class); - //new_parent_types.add(subSuper); - //make sure the parent is a subclass of itself.. needed for shex to find it. - //OWLSubClassOfAxiom superSuper = df.getOWLSubClassOfAxiom(parent_class, parent_class); - //new_parent_types.add(superSuper); - } - } - } - } - if(!new_parent_types.isEmpty()) { - ontman.addAxioms(temp_ont, new_parent_types); - } - - return temp_ont; - } - - - public static OWLOntology addRootTypesToCopyViaGolr(OWLOntology asserted_ont, ExternalLookupService externalLookupService) throws OWLOntologyCreationException { - if(externalLookupService==null) { - return asserted_ont; //should probably throw some kind of exception here.. - } - OWLOntology temp_ont = asserted_ont.getOWLOntologyManager().createOntology(); - temp_ont.getOWLOntologyManager().addAxioms(temp_ont, asserted_ont.getAxioms()); - Set individuals = temp_ont.getIndividualsInSignature(); - Set to_look_up = new HashSet(); - Map> individual_types = new HashMap>(); - for (OWLNamedIndividual individual : individuals) { - Collection asserted_types = EntitySearcher.getTypes(individual, asserted_ont); - Set ind_types = new HashSet(); - for(OWLClassExpression cls : asserted_types) { - if(cls.isAnonymous()) { - continue; - } - IRI class_iri = cls.asOWLClass().getIRI(); - if(class_iri.toString().contains("ECO")) { - continue; //this only deals with genes, chemicals, proteins, and complexes. - } - to_look_up.add(class_iri); - ind_types.add(class_iri); - } - individual_types.put(individual, ind_types); - } - //look up all at once - Map> iri_lookup = externalLookupService.lookupBatch(to_look_up); - - if(iri_lookup!=null) { - //add the identified root types on to the individuals in the model - for(OWLNamedIndividual i : individual_types.keySet()) { - for(IRI asserted_type : individual_types.get(i)) { - if(asserted_type==null) { - continue; - } - List lookup = iri_lookup.get(asserted_type); - if(lookup!=null&&!lookup.isEmpty()&&lookup.get(0).direct_parent_iri!=null) { - OWLClass parent_class = temp_ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create(lookup.get(0).direct_parent_iri)); - OWLClassAssertionAxiom add_root = temp_ont.getOWLOntologyManager().getOWLDataFactory().getOWLClassAssertionAxiom(parent_class, i); - temp_ont.getOWLOntologyManager().addAxiom(temp_ont, add_root); - } - } - } - }else { - LOG.error("external lookup at failed for batch: "+to_look_up); - } - return temp_ont; - } - - @Override - public String toString() { - return "InferenceProviderCreator: " + name; - } + private final static Logger LOG = Logger.getLogger(InferenceProviderCreatorImpl.class); + + private final OWLReasonerFactory rf; + private final Semaphore concurrentLock; + private final boolean useSLME; + private final String name; + private final MinervaShexValidator shex; + + + InferenceProviderCreatorImpl(OWLReasonerFactory rf, int maxConcurrent, boolean useSLME, String name, MinervaShexValidator shex) { + super(); + this.rf = rf; + this.useSLME = useSLME; + this.name = name; + this.concurrentLock = new Semaphore(maxConcurrent); + this.shex = shex; + } + + public static InferenceProviderCreator createElk(boolean useSLME, MinervaShexValidator shex) { + String name; + if (useSLME) { + name = "ELK-SLME"; + } else { + name = "ELK"; + } + return new InferenceProviderCreatorImpl(new ElkReasonerFactory(), 1, useSLME, name, shex); + } + + // public static InferenceProviderCreator createHermiT(MinervaShexValidator shex) { + // int maxConcurrent = Runtime.getRuntime().availableProcessors(); + // return createHermiT(maxConcurrent, shex); + // } + + // public static InferenceProviderCreator createHermiT(int maxConcurrent, MinervaShexValidator shex) { + // return new InferenceProviderCreatorImpl(new org.semanticweb.HermiT.ReasonerFactory(), maxConcurrent, true, "Hermit-SLME", shex); + // } + + @Override + public InferenceProvider create(ModelContainer model) throws OWLOntologyCreationException, InterruptedException, IOException { + OWLOntology ont = model.getAboxOntology(); + final OWLOntologyManager m = ont.getOWLOntologyManager(); + OWLOntology module = null; + OWLReasoner reasoner = null; + OWLOntology temp_ont = null; + try { + InferenceProvider provider; + synchronized (ont) { + concurrentLock.acquire(); + try { + if (useSLME) { + LOG.info("Creating for module: " + model.getModelId()); + ModuleType mtype = ModuleType.BOT; + SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(m, ont, mtype); + Set seeds = new HashSet(ont.getIndividualsInSignature()); + module = ont = sme.extractAsOntology(seeds, IRI.generateDocumentIRI()); + LOG.info("Done creating module: " + model.getModelId()); + } + //add root types for gene products. + //TODO investigate performance impact + //tradefoff these queries versus loading all possible genes into tbox + //temp_ont = addRootTypesToCopy(ont, shex.externalLookupService); + temp_ont = addAllInferredTypesToCopyLocalOntoBlazegraph(ont); + //do reasoning and validation on the enhanced model + reasoner = rf.createReasoner(temp_ont); + provider = MapInferenceProvider.create(reasoner, temp_ont, shex); + } finally { + concurrentLock.release(); + } + } + return provider; + } finally { + if (reasoner != null) { + reasoner.dispose(); + } + if (module != null) { + m.removeOntology(module); + } + if (temp_ont != null) { + temp_ont.getOWLOntologyManager().removeOntology(temp_ont); + } + } + + } + + + public OWLOntology addAllInferredTypesToCopyLocalOntoBlazegraph(OWLOntology asserted_ont) throws OWLOntologyCreationException, IOException { + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + OWLDataFactory df = ontman.getOWLDataFactory(); + OWLOntology temp_ont = ontman.copyOntology(asserted_ont, OntologyCopy.SHALLOW); + for (OWLAnnotation a : asserted_ont.getAnnotations()) { + OWLAxiom annoaxiom = df.getOWLAnnotationAssertionAxiom(temp_ont.getOntologyID().getOntologyIRI().get(), a); + ontman.addAxiom(temp_ont, annoaxiom); + } + Set individuals = temp_ont.getIndividualsInSignature(); + Map> sub_supers = new HashMap>(); + Set uris = new HashSet(); + Map> individual_asserted_types = new HashMap>(); + for (OWLNamedIndividual individual : individuals) { + Collection asserted_types = EntitySearcher.getTypes(individual, asserted_ont); + for (OWLClassExpression cls : asserted_types) { + if (cls.isAnonymous()) { + continue; + } + IRI class_iri = cls.asOWLClass().getIRI(); + if (class_iri.toString().contains("ECO")) { + continue; //this only deals with genes, chemicals, proteins, and complexes. + } + uris.add(class_iri.toString()); + } + individual_asserted_types.put(individual, asserted_types); + } + sub_supers = shex.getGo_lego_repo().getNeoRoots(uris); + Set new_parent_types = new HashSet(); + //for all individuals + for (OWLNamedIndividual i : individual_asserted_types.keySet()) { + //for all asserted types + for (OWLClassExpression asserted : individual_asserted_types.get(i)) { + //add types for all their parents + if (asserted.isAnonymous()) { + continue; + } + OWLClass sub = asserted.asOWLClass(); + Set supers = sub_supers.get(sub.getIRI().toString()); + if (supers != null) { + for (String s : supers) { + OWLClass parent_class = ontman.getOWLDataFactory().getOWLClass(IRI.create(s)); + if (!parent_class.isBuiltIn() && (!parent_class.isAnonymous())) { + OWLClassAssertionAxiom add_parent_type = df.getOWLClassAssertionAxiom(parent_class, i); + new_parent_types.add(add_parent_type); + } + //add everything into the tbox - (for use later in shex validator) + //OWLSubClassOfAxiom subSuper = df.getOWLSubClassOfAxiom(sub, parent_class); + //new_parent_types.add(subSuper); + //make sure the parent is a subclass of itself.. needed for shex to find it. + //OWLSubClassOfAxiom superSuper = df.getOWLSubClassOfAxiom(parent_class, parent_class); + //new_parent_types.add(superSuper); + } + } + } + } + if (!new_parent_types.isEmpty()) { + ontman.addAxioms(temp_ont, new_parent_types); + } + + return temp_ont; + } + + + public static OWLOntology addRootTypesToCopyViaGolr(OWLOntology asserted_ont, ExternalLookupService externalLookupService) throws OWLOntologyCreationException { + if (externalLookupService == null) { + return asserted_ont; //should probably throw some kind of exception here.. + } + OWLOntology temp_ont = asserted_ont.getOWLOntologyManager().createOntology(); + temp_ont.getOWLOntologyManager().addAxioms(temp_ont, asserted_ont.getAxioms()); + Set individuals = temp_ont.getIndividualsInSignature(); + Set to_look_up = new HashSet(); + Map> individual_types = new HashMap>(); + for (OWLNamedIndividual individual : individuals) { + Collection asserted_types = EntitySearcher.getTypes(individual, asserted_ont); + Set ind_types = new HashSet(); + for (OWLClassExpression cls : asserted_types) { + if (cls.isAnonymous()) { + continue; + } + IRI class_iri = cls.asOWLClass().getIRI(); + if (class_iri.toString().contains("ECO")) { + continue; //this only deals with genes, chemicals, proteins, and complexes. + } + to_look_up.add(class_iri); + ind_types.add(class_iri); + } + individual_types.put(individual, ind_types); + } + //look up all at once + Map> iri_lookup = externalLookupService.lookupBatch(to_look_up); + + if (iri_lookup != null) { + //add the identified root types on to the individuals in the model + for (OWLNamedIndividual i : individual_types.keySet()) { + for (IRI asserted_type : individual_types.get(i)) { + if (asserted_type == null) { + continue; + } + List lookup = iri_lookup.get(asserted_type); + if (lookup != null && !lookup.isEmpty() && lookup.get(0).direct_parent_iri != null) { + OWLClass parent_class = temp_ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(IRI.create(lookup.get(0).direct_parent_iri)); + OWLClassAssertionAxiom add_root = temp_ont.getOWLOntologyManager().getOWLDataFactory().getOWLClassAssertionAxiom(parent_class, i); + temp_ont.getOWLOntologyManager().addAxiom(temp_ont, add_root); + } + } + } + } else { + LOG.error("external lookup at failed for batch: " + to_look_up); + } + return temp_ont; + } + + @Override + public String toString() { + return "InferenceProviderCreator: " + name; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/MapInferenceProvider.java b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/MapInferenceProvider.java index 27e06b47..bca89e2b 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/MapInferenceProvider.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/inferences/MapInferenceProvider.java @@ -1,14 +1,5 @@ package org.geneontology.minerva.server.inferences; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import org.apache.jena.rdf.model.Model; import org.apache.log4j.Logger; import org.geneontology.minerva.json.InferenceProvider; @@ -18,121 +9,119 @@ import org.geneontology.minerva.validation.ShexValidationReport; import org.geneontology.minerva.validation.ValidationResultSet; import org.geneontology.minerva.validation.Violation; -import org.semanticweb.owlapi.formats.TurtleDocumentFormat; -import org.semanticweb.owlapi.io.FileDocumentTarget; -import org.semanticweb.owlapi.model.OWLAnnotation; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.semanticweb.owlapi.reasoner.OWLReasoner; +import java.io.IOException; +import java.util.*; + public class MapInferenceProvider implements InferenceProvider { - private static final Logger LOGGER = Logger.getLogger(InferenceProvider.class); - private final boolean isConsistent; - private final Map> inferredTypes; - private final Map> inferredTypesWithIndirects; + private static final Logger LOGGER = Logger.getLogger(InferenceProvider.class); + private final boolean isConsistent; + private final Map> inferredTypes; + private final Map> inferredTypesWithIndirects; + + //for shex and other validation + private ValidationResultSet validation_results; - //for shex and other validation - private ValidationResultSet validation_results; + MapInferenceProvider(boolean isConsistent, Map> inferredTypes, Map> inferredTypesWithIndirects, ValidationResultSet validation_reports) { + this.isConsistent = isConsistent; + this.inferredTypes = inferredTypes; + this.inferredTypesWithIndirects = inferredTypesWithIndirects; + this.validation_results = validation_reports; + } - MapInferenceProvider(boolean isConsistent, Map> inferredTypes, Map> inferredTypesWithIndirects, ValidationResultSet validation_reports) { - this.isConsistent = isConsistent; - this.inferredTypes = inferredTypes; - this.inferredTypesWithIndirects = inferredTypesWithIndirects; - this.validation_results = validation_reports; - } - - public static InferenceProvider create(OWLReasoner r, OWLOntology ont, MinervaShexValidator shex) throws OWLOntologyCreationException, IOException { - Map> inferredTypes = new HashMap<>(); - Map> inferredTypesWithIndirects = new HashMap<>(); - boolean isConsistent = r.isConsistent(); - if (isConsistent) { - Set individuals = ont.getIndividualsInSignature(); - for (OWLNamedIndividual individual : individuals) { - Set inferred = new HashSet<>(); - Set flattened = r.getTypes(individual, true).getFlattened(); - for (OWLClass cls : flattened) { - if (cls.isBuiltIn() == false) { - inferred.add(cls); - } - } - inferredTypes.put(individual, inferred); - //adding the rest of the types - //TODO consider filtering down to root types - depending on use cases - Set all_inferred = new HashSet<>(); - Set all_flattened = r.getTypes(individual, false).getFlattened(); - for (OWLClass cls : all_flattened) { - if (cls.isBuiltIn() == false) { - all_inferred.add(cls); - } - } - inferredTypesWithIndirects.put(individual, all_inferred); - } - } - //reasoner - OWLValidationReport reasoner_validation = new OWLValidationReport(); - reasoner_validation.setConformant(isConsistent); - if(!isConsistent) { - Violation i_v = new Violation("id of inconsistent node"); - reasoner_validation.addViolation(i_v); - } - //shex - ShexValidationReport shex_validation = new ShexValidationReport(); - if(shex.isActive()) { - //generate an RDF model - Model model = JenaOwlTool.getJenaModel(ont); - //add superclasses to types used in model - needed for shex to find everything - //model may now have additional inferred assertions from Arachne - model = shex.enrichSuperClasses(model); - try { - LOGGER.info("Running shex validation - model (enriched with superclass hierarchy) size:"+model.size()); - shex_validation = shex.runShapeMapValidation(model); - LOGGER.info("Done with shex validation. model is conformant is: "+shex_validation.isConformant()); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - ValidationResultSet all_validations = new ValidationResultSet(reasoner_validation, shex_validation); - return new MapInferenceProvider(isConsistent, inferredTypes, inferredTypesWithIndirects, all_validations); - } + public static InferenceProvider create(OWLReasoner r, OWLOntology ont, MinervaShexValidator shex) throws OWLOntologyCreationException, IOException { + Map> inferredTypes = new HashMap<>(); + Map> inferredTypesWithIndirects = new HashMap<>(); + boolean isConsistent = r.isConsistent(); + if (isConsistent) { + Set individuals = ont.getIndividualsInSignature(); + for (OWLNamedIndividual individual : individuals) { + Set inferred = new HashSet<>(); + Set flattened = r.getTypes(individual, true).getFlattened(); + for (OWLClass cls : flattened) { + if (cls.isBuiltIn() == false) { + inferred.add(cls); + } + } + inferredTypes.put(individual, inferred); + //adding the rest of the types + //TODO consider filtering down to root types - depending on use cases + Set all_inferred = new HashSet<>(); + Set all_flattened = r.getTypes(individual, false).getFlattened(); + for (OWLClass cls : all_flattened) { + if (cls.isBuiltIn() == false) { + all_inferred.add(cls); + } + } + inferredTypesWithIndirects.put(individual, all_inferred); + } + } + //reasoner + OWLValidationReport reasoner_validation = new OWLValidationReport(); + reasoner_validation.setConformant(isConsistent); + if (!isConsistent) { + Violation i_v = new Violation("id of inconsistent node"); + reasoner_validation.addViolation(i_v); + } + //shex + ShexValidationReport shex_validation = new ShexValidationReport(); + if (shex.isActive()) { + //generate an RDF model + Model model = JenaOwlTool.getJenaModel(ont); + //add superclasses to types used in model - needed for shex to find everything + //model may now have additional inferred assertions from Arachne + model = shex.enrichSuperClasses(model); + try { + LOGGER.info("Running shex validation - model (enriched with superclass hierarchy) size:" + model.size()); + shex_validation = shex.runShapeMapValidation(model); + LOGGER.info("Done with shex validation. model is conformant is: " + shex_validation.isConformant()); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + ValidationResultSet all_validations = new ValidationResultSet(reasoner_validation, shex_validation); + return new MapInferenceProvider(isConsistent, inferredTypes, inferredTypesWithIndirects, all_validations); + } - - @Override - public boolean isConsistent() { - return isConsistent; - } + @Override + public boolean isConsistent() { + return isConsistent; + } - @Override - public Set getTypes(OWLNamedIndividual i) { - Set result = Collections.emptySet(); - if (isConsistent && i != null) { - Set inferences = inferredTypes.get(i); - if (inferences != null) { - result = Collections.unmodifiableSet(inferences); - } - } - return result; - } - @Override - public Set getAllTypes(OWLNamedIndividual i) { - Set result = Collections.emptySet(); - if (isConsistent && i != null) { - Set inferences = inferredTypesWithIndirects.get(i); - if (inferences != null) { - result = Collections.unmodifiableSet(inferences); - } - } - return result; - } + @Override + public Set getTypes(OWLNamedIndividual i) { + Set result = Collections.emptySet(); + if (isConsistent && i != null) { + Set inferences = inferredTypes.get(i); + if (inferences != null) { + result = Collections.unmodifiableSet(inferences); + } + } + return result; + } - public ValidationResultSet getValidation_results() { - return validation_results; - } + @Override + public Set getAllTypes(OWLNamedIndividual i) { + Set result = Collections.emptySet(); + if (isConsistent && i != null) { + Set inferences = inferredTypesWithIndirects.get(i); + if (inferences != null) { + result = Collections.unmodifiableSet(inferences); + } + } + return result; + } + public ValidationResultSet getValidation_results() { + return validation_results; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/validation/BeforeSaveModelValidator.java b/minerva-server/src/main/java/org/geneontology/minerva/server/validation/BeforeSaveModelValidator.java index 1c753f01..52f79b54 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/validation/BeforeSaveModelValidator.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/validation/BeforeSaveModelValidator.java @@ -1,61 +1,57 @@ package org.geneontology.minerva.server.validation; +import org.geneontology.minerva.ModelContainer; +import org.geneontology.minerva.util.AnnotationShorthand; +import org.semanticweb.owlapi.model.*; + import java.util.ArrayList; import java.util.List; import java.util.Set; -import org.geneontology.minerva.ModelContainer; -import org.geneontology.minerva.util.AnnotationShorthand; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationProperty; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; - public class BeforeSaveModelValidator { - - public List validateBeforeSave(ModelContainer model) throws OWLOntologyCreationException { - // get model - List errors = new ArrayList(3); - // check that model has required meta data - OWLOntology aboxOntology = model.getAboxOntology(); - boolean hasTitle = false; - boolean hasContributor = false; - - // get ontology annotations - Set annotations = aboxOntology.getAnnotations(); - for (OWLAnnotation annotation : annotations) { - OWLAnnotationProperty p = annotation.getProperty(); - AnnotationShorthand legoType = AnnotationShorthand.getShorthand(p.getIRI()); - if (legoType != null) { - // check for title - if (AnnotationShorthand.title.equals(legoType)) { - hasTitle = true; - } - // check for contributor - else if (AnnotationShorthand.contributor.equals(legoType)) { - hasContributor = true; - } - } - } - - if (hasTitle == false) { - errors.add("The model has no title. All models must have a human readable title."); - } - if (hasContributor == false) { - errors.add("The model has no contributors. All models must have an association with their contributors."); - } - - // require at least one declared instance - Set individuals = aboxOntology.getIndividualsInSignature(); - if (individuals.isEmpty()) { - errors.add("The model has no individuals. Empty models should not be saved."); - } - - // avoid returning empty list - if (errors.isEmpty()) { - errors = null; - } - return errors; - } + + public List validateBeforeSave(ModelContainer model) throws OWLOntologyCreationException { + // get model + List errors = new ArrayList(3); + // check that model has required meta data + OWLOntology aboxOntology = model.getAboxOntology(); + boolean hasTitle = false; + boolean hasContributor = false; + + // get ontology annotations + Set annotations = aboxOntology.getAnnotations(); + for (OWLAnnotation annotation : annotations) { + OWLAnnotationProperty p = annotation.getProperty(); + AnnotationShorthand legoType = AnnotationShorthand.getShorthand(p.getIRI()); + if (legoType != null) { + // check for title + if (AnnotationShorthand.title.equals(legoType)) { + hasTitle = true; + } + // check for contributor + else if (AnnotationShorthand.contributor.equals(legoType)) { + hasContributor = true; + } + } + } + + if (hasTitle == false) { + errors.add("The model has no title. All models must have a human readable title."); + } + if (hasContributor == false) { + errors.add("The model has no contributors. All models must have an association with their contributors."); + } + + // require at least one declared instance + Set individuals = aboxOntology.getIndividualsInSignature(); + if (individuals.isEmpty()) { + errors.add("The model has no individuals. Empty models should not be saved."); + } + + // avoid returning empty list + if (errors.isEmpty()) { + errors = null; + } + return errors; + } } diff --git a/minerva-server/src/main/java/org/geneontology/minerva/server/validation/MinervaShexValidator.java b/minerva-server/src/main/java/org/geneontology/minerva/server/validation/MinervaShexValidator.java index f54f9cbd..1c43e3fc 100644 --- a/minerva-server/src/main/java/org/geneontology/minerva/server/validation/MinervaShexValidator.java +++ b/minerva-server/src/main/java/org/geneontology/minerva/server/validation/MinervaShexValidator.java @@ -1,17 +1,13 @@ /** - * + * */ package org.geneontology.minerva.server.validation; -import java.io.File; - -import org.apache.jena.rdf.model.Resource; import org.geneontology.minerva.BlazegraphOntologyManager; import org.geneontology.minerva.curie.CurieHandler; -import org.geneontology.minerva.lookup.ExternalLookupService; import org.geneontology.minerva.validation.ShexValidator; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.reasoner.OWLReasoner; + +import java.io.File; /** * @author bgood @@ -19,32 +15,33 @@ */ public class MinervaShexValidator extends ShexValidator { - boolean active = true; - - public boolean isActive() { - return active; - } - - public void setActive(boolean active) { - this.active = active; - } - - /** - * @param shexpath - * @param goshapemappath - * @throws Exception - */ - public MinervaShexValidator(String shexpath, String goshapemappath, CurieHandler curieHandler, BlazegraphOntologyManager go_lego) throws Exception { - super(shexpath, goshapemappath, go_lego, curieHandler); - } - /** - * @param shex_schema_file - * @param shex_map_file - * @throws Exception - */ - public MinervaShexValidator(File shex_schema_file, File shex_map_file, CurieHandler curieHandler, BlazegraphOntologyManager go_lego) throws Exception { - super(shex_schema_file, shex_map_file, go_lego, curieHandler); - } - + boolean active = true; + + public boolean isActive() { + return active; + } + + public void setActive(boolean active) { + this.active = active; + } + + /** + * @param shexpath + * @param goshapemappath + * @throws Exception + */ + public MinervaShexValidator(String shexpath, String goshapemappath, CurieHandler curieHandler, BlazegraphOntologyManager go_lego) throws Exception { + super(shexpath, goshapemappath, go_lego, curieHandler); + } + + /** + * @param shex_schema_file + * @param shex_map_file + * @throws Exception + */ + public MinervaShexValidator(File shex_schema_file, File shex_map_file, CurieHandler curieHandler, BlazegraphOntologyManager go_lego) throws Exception { + super(shex_schema_file, shex_map_file, go_lego, curieHandler); + } + } diff --git a/minerva-server/src/main/resources/ModelSearchQueryTemplate.rq b/minerva-server/src/main/resources/ModelSearchQueryTemplate.rq index f0a6d601..6521f41e 100644 --- a/minerva-server/src/main/resources/ModelSearchQueryTemplate.rq +++ b/minerva-server/src/main/resources/ModelSearchQueryTemplate.rq @@ -1,8 +1,8 @@ -PREFIX owl: -PREFIX rdf: +PREFIX owl: +PREFIX rdf: #model metadata PREFIX metago: -PREFIX lego: +PREFIX lego: #model data PREFIX part_of: PREFIX occurs_in: @@ -12,28 +12,27 @@ PREFIX has_output: PREFIX causally_upstream_of: PREFIX provides_direct_input_for: PREFIX directly_positively_regulates: - -SELECT +SELECT WHERE { - - GRAPH ?id { - ?id ?title ; - ?date ; - ?contributor ; - optional{?id ?group } . - optional{?id lego:modelstate ?state } . - - - - - - - - - } - } - - ORDER BY desc(?mindate) desc(?id) + +GRAPH ?id { +?id ?title ; + ?date ; + ?contributor ; +optional{?id ?group } . +optional{?id lego:modelstate ?state } . + + + + + + + + +} +} + +ORDER BY desc(?mindate) desc(?id) \ No newline at end of file diff --git a/minerva-server/src/main/resources/log4j.properties b/minerva-server/src/main/resources/log4j.properties index dcccb7f9..350808e3 100644 --- a/minerva-server/src/main/resources/log4j.properties +++ b/minerva-server/src/main/resources/log4j.properties @@ -1,13 +1,10 @@ log4j.appender.console=org.apache.log4j.ConsoleAppender log4j.appender.console.layout=org.apache.log4j.PatternLayout log4j.appender.console.layout.ConversionPattern=%d %-5p (%c:%L) %m\n - -log4j.logger.org.semanticweb.elk = ERROR +log4j.logger.org.semanticweb.elk=ERROR log4j.logger.org.obolibrary.obo2owl=OFF log4j.logger.org.semanticweb.owlapi=error - # uncomment to enable GOLR lookup request messages #log4j.logger.org.geneontology.minerva.server.external.GolrExternalLookupService=DEBUG - log4j.rootLogger=INFO, console diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ARTHandlerTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ARTHandlerTest.java index a15a4be2..772b1d16 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ARTHandlerTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ARTHandlerTest.java @@ -1,33 +1,16 @@ /** - * + * */ package org.geneontology.minerva.server.handler; -import static org.junit.Assert.*; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.net.HttpURLConnection; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.time.LocalDate; -import java.time.LocalTime; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - +import com.google.gson.Gson; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.apache.log4j.Logger; import org.eclipse.jetty.server.HttpConfiguration; @@ -43,7 +26,6 @@ import org.geneontology.minerva.curie.CurieMappings; import org.geneontology.minerva.curie.DefaultCurieHandler; import org.geneontology.minerva.curie.MappedCurieHandler; -import org.geneontology.minerva.json.JsonAnnotation; import org.geneontology.minerva.json.JsonModel; import org.geneontology.minerva.json.JsonOwlIndividual; import org.geneontology.minerva.lookup.ExternalLookupService; @@ -51,379 +33,373 @@ import org.geneontology.minerva.server.RequireJsonpFilter; import org.geneontology.minerva.server.StartUpTool; import org.geneontology.minerva.server.StartUpTool.MinervaStartUpConfig; -import org.geneontology.minerva.server.handler.M3BatchHandler.Entity; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3Argument; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3BatchResponse; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3Request; -import org.geneontology.minerva.server.handler.M3BatchHandler.Operation; +import org.geneontology.minerva.server.handler.M3BatchHandler.*; import org.geneontology.minerva.server.handler.ModelARTHandler.ModelARTResult; import org.geneontology.minerva.server.inferences.InferenceProviderCreator; import org.geneontology.minerva.server.validation.MinervaShexValidator; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.*; import org.junit.rules.TemporaryFolder; import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import org.semanticweb.owlapi.model.*; -import com.google.gson.Gson; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Collections; +import java.util.List; -import owltools.gaf.eco.EcoMapperFactory; -import owltools.gaf.eco.SimpleEcoMapper; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * @author tremaynemushayahama * */ public class ARTHandlerTest { - private static final Logger LOGGER = Logger.getLogger(ARTHandlerTest.class); - static Server server; - static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; - static final String modelIdcurie = "http://model.geneontology.org/"; - static final String modelIdPrefix = "gomodel"; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static final String valid_model_folder = "src/test/resources/models/art-simple/"; - static final String model_save = "src/test/resources/models/tmp/"; - - static OWLOntology tbox_ontology; - static CurieHandler curieHandler; - static UndoAwareMolecularModelManager models; - private static JsonOrJsonpBatchHandler handler; - - @ClassRule - public static TemporaryFolder tmp = new TemporaryFolder(); - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - - LOGGER.info("Setup shex."); - File shex_schema_file = new File("src/test/resources/validate.shex"); - File shex_map_file = new File("src/test/resources/validate.shapemap"); - - LOGGER.info("Set up molecular model manager - loading files into a journal"); - // set curie handler - final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - String inputDB = makeBlazegraphJournal(valid_model_folder); - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - tbox_ontology = ontman.createOntology(IRI.create("http://example.org/dummy"));//empty tbox - models = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, model_save, go_lego_journal_file, true); - models.addTaxonMetadata(); - - LOGGER.info("Setup Jetty config."); - ResourceConfig resourceConfig = new ResourceConfig(); - resourceConfig.register(GsonMessageBodyHandler.class); - resourceConfig.register(RequireJsonpFilter.class); - - MinervaShexValidator shex = new MinervaShexValidator(shex_schema_file, shex_map_file, curieHandler, models.getGolego_repo()); - shex.setActive(true); - - //setup the config for the startup tool. - MinervaStartUpConfig conf = new MinervaStartUpConfig(); - conf.reasonerOpt = "arachne"; - conf.shex = shex; - conf.port = 6800; - conf.contextString = "/"; - - InferenceProviderCreator ipc = StartUpTool.createInferenceProviderCreator(conf.reasonerOpt, models, conf.shex); - - ModelARTHandler artHandler = new ModelARTHandler(models, ipc); - //set up a handler for testing with M3BatchRequest service - handler = new JsonOrJsonpBatchHandler(models, "development", ipc, - Collections.emptySet(), (ExternalLookupService) null); - - resourceConfig = resourceConfig.registerInstances(artHandler); - - // setup jetty server port, buffers and context path - server = new Server(); - // create connector with port and custom buffer sizes - - HttpConfiguration http_config = new HttpConfiguration(); - - http_config.setRequestHeaderSize(conf.requestHeaderSize); - ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); - connector.setPort(conf.port); - server.addConnector(connector); - ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); - context.setContextPath(conf.contextString); - server.setHandler(context); - ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); - context.addServlet(h, "/*"); - - // start jetty server - LOGGER.info("Start server on port: "+conf.port+" context: "+conf.contextString); - server.start(); - - } - - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - models.dispose(); - server.stop(); - if (handler != null) { - handler = null; - } - } - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - } - - @Test - public final void testGetModel() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { - //get a hold of a test model - String mid = "5fbeae9c00000008"; - final String modelId = "http://model.geneontology.org/"+mid; - M3BatchResponse response = BatchTestTools.getModel(handler, modelId, false); - - ModelARTResult result = getStoredModel(mid); - //Repeat it after - response = BatchTestTools.getModel(handler, modelId, false); - - assertFalse("Model should not be modified", response.data.modifiedFlag); - assertTrue("Active Model should be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); - - //Set up m3Batch response to compare active models - JsonModel m3JsonModel = m3ResponseToJsonModel(response); - assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); - } - - @Test - public final void testStoredModelModified() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { - //get a hold of a test model - String mid = "5fbeae9c00000008"; - final String modelId = "http://model.geneontology.org/"+mid; - - M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); - List batch = Collections.singletonList(r); - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), false, true); - - //Get the model - response = BatchTestTools.getModel(handler, modelId, false); - ModelARTResult result = getStoredModel(mid); - //Repeat - response = BatchTestTools.getModel(handler, modelId, false); - - int activeIndividualCount = result.getActiveModel().individuals.length; - int storedIndividualCount = result.getStoredModel().individuals.length; - - assertTrue("Model should be modified", response.data.modifiedFlag); - assertFalse("Active model should not be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); - - //Since we added one individual - assertTrue("Active individuals should be one more stored individual count", activeIndividualCount==storedIndividualCount+1); - - //Set up m3Batch response to compare active models - JsonModel m3JsonModel = m3ResponseToJsonModel(response); - assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); - - //Store the model - response = storeModel(modelId); - result = getStoredModel(mid); - - assertFalse("Model should not be modified", response.data.modifiedFlag); - assertTrue("Active model should be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); - - //Set up m3Batch response to compare active models - m3JsonModel = m3ResponseToJsonModel(response); - assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); - - //After store active should be equal to stored - storedIndividualCount = result.getStoredModel().individuals.length; - assertTrue("Active individuals should be one more stored individual count", activeIndividualCount==storedIndividualCount); - - - //Get the model - response = BatchTestTools.getModel(handler, modelId, false); - - assertFalse("Model should not be modified", response.data.modifiedFlag); - - //Set up m3Batch response to compare active models - m3JsonModel = m3ResponseToJsonModel(response); - assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); - - } - - @Test - public final void testStoredModelReset() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { - //get a hold of a test model - String mid = "5fbeae9c00000008"; - final String modelId = "http://model.geneontology.org/"+mid; - - M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); - List batch = Collections.singletonList(r); - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), false, true); - //Reset the model - response = resetModel(modelId); - - //Get the model - response = BatchTestTools.getModel(handler, modelId, false); - ModelARTResult result = getStoredModel(mid); - - assertFalse("Model should not be modified", response.data.modifiedFlag); - assertTrue("Active Model should be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); - - //Set up m3Batch response to compare active models - JsonModel m3JsonModel = m3ResponseToJsonModel(response); - assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); - - } - - - public static ModelARTResult getStoredModel(String modelId) throws URISyntaxException, IOException { - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/stored"); - builder.addParameter("id", "gomodel:"+modelId); - URI artUrl = builder.build(); - String json_result = getJsonStringFromUri(artUrl); - Gson g = new Gson(); - ModelARTResult result = g.fromJson(json_result, ModelARTResult.class); - - LOGGER.info("Model Stored Model "+json_result); - - return result; - } - - private static M3BatchResponse storeModel(String modelId) { - M3Request r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.storeModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - List batch = Collections.singletonList(r); - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), false, true); - - return response; - } - - private static M3BatchResponse resetModel(String modelId) { - M3Request r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.resetModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - List batch = Collections.singletonList(r); - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), false, true); - - return response; - } - - - private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { - String inputDB = tmp.newFile().getAbsolutePath(); - File i = new File(input_folder); - if(i.exists()) { - //remove anything that existed earlier - File bgdb = new File(inputDB); - if(bgdb.exists()) { - bgdb.delete(); - } - //load everything into a bg journal - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); - if(i.isDirectory()) { - FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file-> { - if(file.getName().endsWith(".ttl")||file.getName().endsWith("owl")) { - LOGGER.info("Loading " + file); - try { - String modeluri = m3.importModelToDatabase(file, true); - } catch (OWLOntologyCreationException | RepositoryException | RDFParseException - | RDFHandlerException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - }); - }else { - LOGGER.info("Loading " + i); - m3.importModelToDatabase(i, true); - } - LOGGER.info("loaded files into blazegraph journal: "+input_folder); - m3.dispose(); - } - return inputDB; - } - - private static String getJsonStringFromUri(URI uri) throws IOException { - final URL url = uri.toURL(); - final HttpURLConnection connection; - InputStream response = null; - // setup and open (actual connection) - connection = (HttpURLConnection) url.openConnection(); - connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https - response = connection.getInputStream(); // opens the connection to the server - // get string response from stream - String json = IOUtils.toString(response); - - return json; - } - - - private static String getJsonStringFromPost(HttpPost post) throws IOException { - - CloseableHttpClient httpClient = HttpClients.createDefault(); - CloseableHttpResponse response = httpClient.execute(post); - String json = EntityUtils.toString(response.getEntity()); - - return json; - } - - private static JsonModel m3ResponseToJsonModel(M3BatchResponse response) { - JsonModel jsonModel = new JsonModel(); - jsonModel.annotations = response.data.annotations; - jsonModel.individuals = response.data.individuals; - jsonModel.facts = response.data.facts; - - return jsonModel; - } - - /* - * Removes Inferred Types from JsonModel - */ - private static void cleanJsonModel(JsonModel dirty) { - for (JsonOwlIndividual annotation : dirty.individuals) { - annotation.inferredType = null; - } - } - - private static boolean equalJsonSize(JsonModel a, JsonModel b) { - - cleanJsonModel(a); - cleanJsonModel(b); - - return a.equals(b); - } + private static final Logger LOGGER = Logger.getLogger(ARTHandlerTest.class); + static Server server; + static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; + static final String modelIdcurie = "http://model.geneontology.org/"; + static final String modelIdPrefix = "gomodel"; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + static final String valid_model_folder = "src/test/resources/models/art-simple/"; + static final String model_save = "src/test/resources/models/tmp/"; + + static OWLOntology tbox_ontology; + static CurieHandler curieHandler; + static UndoAwareMolecularModelManager models; + private static JsonOrJsonpBatchHandler handler; + + @ClassRule + public static TemporaryFolder tmp = new TemporaryFolder(); + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + + LOGGER.info("Setup shex."); + File shex_schema_file = new File("src/test/resources/validate.shex"); + File shex_map_file = new File("src/test/resources/validate.shapemap"); + + LOGGER.info("Set up molecular model manager - loading files into a journal"); + // set curie handler + final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + String inputDB = makeBlazegraphJournal(valid_model_folder); + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + tbox_ontology = ontman.createOntology(IRI.create("http://example.org/dummy"));//empty tbox + models = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, model_save, go_lego_journal_file, true); + models.addTaxonMetadata(); + + LOGGER.info("Setup Jetty config."); + ResourceConfig resourceConfig = new ResourceConfig(); + resourceConfig.register(GsonMessageBodyHandler.class); + resourceConfig.register(RequireJsonpFilter.class); + + MinervaShexValidator shex = new MinervaShexValidator(shex_schema_file, shex_map_file, curieHandler, models.getGolego_repo()); + shex.setActive(true); + + //setup the config for the startup tool. + MinervaStartUpConfig conf = new MinervaStartUpConfig(); + conf.reasonerOpt = "arachne"; + conf.shex = shex; + conf.port = 6800; + conf.contextString = "/"; + + InferenceProviderCreator ipc = StartUpTool.createInferenceProviderCreator(conf.reasonerOpt, models, conf.shex); + + ModelARTHandler artHandler = new ModelARTHandler(models, ipc); + //set up a handler for testing with M3BatchRequest service + handler = new JsonOrJsonpBatchHandler(models, "development", ipc, + Collections.emptySet(), (ExternalLookupService) null); + + resourceConfig = resourceConfig.registerInstances(artHandler); + + // setup jetty server port, buffers and context path + server = new Server(); + // create connector with port and custom buffer sizes + + HttpConfiguration http_config = new HttpConfiguration(); + + http_config.setRequestHeaderSize(conf.requestHeaderSize); + ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); + connector.setPort(conf.port); + server.addConnector(connector); + ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); + context.setContextPath(conf.contextString); + server.setHandler(context); + ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); + context.addServlet(h, "/*"); + + // start jetty server + LOGGER.info("Start server on port: " + conf.port + " context: " + conf.contextString); + server.start(); + + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + models.dispose(); + server.stop(); + if (handler != null) { + handler = null; + } + } + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception { + } + + /** + * @throws java.lang.Exception + */ + @After + public void tearDown() throws Exception { + } + + @Test + public final void testGetModel() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { + //get a hold of a test model + String mid = "5fbeae9c00000008"; + final String modelId = "http://model.geneontology.org/" + mid; + M3BatchResponse response = BatchTestTools.getModel(handler, modelId, false); + + ModelARTResult result = getStoredModel(mid); + //Repeat it after + response = BatchTestTools.getModel(handler, modelId, false); + + assertFalse("Model should not be modified", response.data.modifiedFlag); + assertTrue("Active Model should be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); + + //Set up m3Batch response to compare active models + JsonModel m3JsonModel = m3ResponseToJsonModel(response); + assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); + } + + @Test + public final void testStoredModelModified() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { + //get a hold of a test model + String mid = "5fbeae9c00000008"; + final String modelId = "http://model.geneontology.org/" + mid; + + M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); + List batch = Collections.singletonList(r); + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), false, true); + + //Get the model + response = BatchTestTools.getModel(handler, modelId, false); + ModelARTResult result = getStoredModel(mid); + //Repeat + response = BatchTestTools.getModel(handler, modelId, false); + + int activeIndividualCount = result.getActiveModel().individuals.length; + int storedIndividualCount = result.getStoredModel().individuals.length; + + assertTrue("Model should be modified", response.data.modifiedFlag); + assertFalse("Active model should not be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); + + //Since we added one individual + assertTrue("Active individuals should be one more stored individual count", activeIndividualCount == storedIndividualCount + 1); + + //Set up m3Batch response to compare active models + JsonModel m3JsonModel = m3ResponseToJsonModel(response); + assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); + + //Store the model + response = storeModel(modelId); + result = getStoredModel(mid); + + assertFalse("Model should not be modified", response.data.modifiedFlag); + assertTrue("Active model should be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); + + //Set up m3Batch response to compare active models + m3JsonModel = m3ResponseToJsonModel(response); + assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); + + //After store active should be equal to stored + storedIndividualCount = result.getStoredModel().individuals.length; + assertTrue("Active individuals should be one more stored individual count", activeIndividualCount == storedIndividualCount); + + + //Get the model + response = BatchTestTools.getModel(handler, modelId, false); + + assertFalse("Model should not be modified", response.data.modifiedFlag); + + //Set up m3Batch response to compare active models + m3JsonModel = m3ResponseToJsonModel(response); + assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); + + } + + @Test + public final void testStoredModelReset() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { + //get a hold of a test model + String mid = "5fbeae9c00000008"; + final String modelId = "http://model.geneontology.org/" + mid; + + M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); + List batch = Collections.singletonList(r); + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), false, true); + //Reset the model + response = resetModel(modelId); + + //Get the model + response = BatchTestTools.getModel(handler, modelId, false); + ModelARTResult result = getStoredModel(mid); + + assertFalse("Model should not be modified", response.data.modifiedFlag); + assertTrue("Active Model should be the same as stored model", equalJsonSize(result.getActiveModel(), result.getStoredModel())); + + //Set up m3Batch response to compare active models + JsonModel m3JsonModel = m3ResponseToJsonModel(response); + assertTrue("Active Model should be the same as m3Batch active model", equalJsonSize(result.getActiveModel(), m3JsonModel)); + + } + + + public static ModelARTResult getStoredModel(String modelId) throws URISyntaxException, IOException { + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/stored"); + builder.addParameter("id", "gomodel:" + modelId); + URI artUrl = builder.build(); + String json_result = getJsonStringFromUri(artUrl); + Gson g = new Gson(); + ModelARTResult result = g.fromJson(json_result, ModelARTResult.class); + + LOGGER.info("Model Stored Model " + json_result); + + return result; + } + + private static M3BatchResponse storeModel(String modelId) { + M3Request r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.storeModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + List batch = Collections.singletonList(r); + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), false, true); + + return response; + } + + private static M3BatchResponse resetModel(String modelId) { + M3Request r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.resetModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + List batch = Collections.singletonList(r); + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), false, true); + + return response; + } + + + private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { + String inputDB = tmp.newFile().getAbsolutePath(); + File i = new File(input_folder); + if (i.exists()) { + //remove anything that existed earlier + File bgdb = new File(inputDB); + if (bgdb.exists()) { + bgdb.delete(); + } + //load everything into a bg journal + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); + if (i.isDirectory()) { + FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file -> { + if (file.getName().endsWith(".ttl") || file.getName().endsWith("owl")) { + LOGGER.info("Loading " + file); + try { + String modeluri = m3.importModelToDatabase(file, true); + } catch (OWLOntologyCreationException | RepositoryException | RDFParseException + | RDFHandlerException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + }); + } else { + LOGGER.info("Loading " + i); + m3.importModelToDatabase(i, true); + } + LOGGER.info("loaded files into blazegraph journal: " + input_folder); + m3.dispose(); + } + return inputDB; + } + + private static String getJsonStringFromUri(URI uri) throws IOException { + final URL url = uri.toURL(); + final HttpURLConnection connection; + InputStream response = null; + // setup and open (actual connection) + connection = (HttpURLConnection) url.openConnection(); + connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https + response = connection.getInputStream(); // opens the connection to the server + // get string response from stream + String json = IOUtils.toString(response); + + return json; + } + + + private static String getJsonStringFromPost(HttpPost post) throws IOException { + + CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = httpClient.execute(post); + String json = EntityUtils.toString(response.getEntity()); + + return json; + } + + private static JsonModel m3ResponseToJsonModel(M3BatchResponse response) { + JsonModel jsonModel = new JsonModel(); + jsonModel.annotations = response.data.annotations; + jsonModel.individuals = response.data.individuals; + jsonModel.facts = response.data.facts; + + return jsonModel; + } + + /* + * Removes Inferred Types from JsonModel + */ + private static void cleanJsonModel(JsonModel dirty) { + for (JsonOwlIndividual annotation : dirty.individuals) { + annotation.inferredType = null; + } + } + + private static boolean equalJsonSize(JsonModel a, JsonModel b) { + + cleanJsonModel(a); + cleanJsonModel(b); + + return a.equals(b); + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchModelHandlerTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchModelHandlerTest.java index 47c3f2b3..f55ece7a 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchModelHandlerTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchModelHandlerTest.java @@ -1,7 +1,7 @@ package org.geneontology.minerva.server.handler; -import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.MinervaOWLGraphWrapper; +import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.UndoAwareMolecularModelManager; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.curie.CurieMappings; @@ -27,7 +27,6 @@ import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.parameters.Imports; - import owltools.io.ParserWrapper; import java.io.IOException; @@ -38,2102 +37,2076 @@ @SuppressWarnings("unchecked") public class BatchModelHandlerTest { - @ClassRule - public static TemporaryFolder folder = new TemporaryFolder(); - - private static CurieHandler curieHandler = null; - private static JsonOrJsonpBatchHandler handler = null; - private static UndoAwareMolecularModelManager models = null; - private static Set importantRelations = null; - private final static DateGenerator dateGenerator = new DateGenerator(); - static final String ontology_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static final String uid = "test-user"; - static final Set providedBy = Collections.singleton("test-provider"); - static final String intention = "test-intention"; - private static final String packetId = "foo-packet-id"; - - private static ExternalLookupService lookupService; - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - init(new ParserWrapper()); - } - - static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException, UnknownIdentifierException { - final MinervaOWLGraphWrapper graph = pw.parseToOWLGraph("src/test/resources/go-lego-minimal.owl"); - final OWLObjectProperty legorelParent = StartUpTool.getRelation("http://purl.obolibrary.org/obo/LEGOREL_0000000", graph); - assertNotNull(legorelParent); - importantRelations = StartUpTool.getAssertedSubProperties(legorelParent, graph); - assertFalse(importantRelations.isEmpty()); - // curie handler - final String modelIdcurie = "gomodel"; - final String modelIdPrefix = "http://model.geneontology.org/"; - final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - InferenceProviderCreator ipc = CachingInferenceProviderCreatorImpl.createElk(false, null); - models = new UndoAwareMolecularModelManager(graph.getSourceOntology(), curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, ontology_journal_file, true); - lookupService = createTestProteins(curieHandler); - handler = new JsonOrJsonpBatchHandler(models, "development", ipc, importantRelations, lookupService) { - - @Override - protected String generateDateString() { - // hook for overriding the date generation with a custom counter - if (dateGenerator.useCounter) { - int count = dateGenerator.counter; - dateGenerator.counter += 1; - return Integer.toString(count); - } - return super.generateDateString(); - } - }; - JsonOrJsonpBatchHandler.VALIDATE_BEFORE_SAVE = true; - } - - private static ExternalLookupService createTestProteins(CurieHandler curieHandler) throws UnknownIdentifierException { - List testEntries = new ArrayList(); - testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0000"), - "P0000", "protein", "fake-taxon-id", null)); - testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0001"), - "P0001", "protein", "fake-taxon-id", null)); - testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0002"), - "P0002", "protein", "fake-taxon-id", null)); - testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0003"), - "P0003", "protein", "fake-taxon-id", null)); - return new TableLookupService(testEntries); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - if (handler != null) { - handler = null; - } - if (models != null) { - models.dispose(); - } - } - - @Test - public void testTypeOperations() throws Exception { - final String modelId = generateBlankModel(); - - List batch = new ArrayList(); - M3Request r = BatchTestTools.addIndividual(modelId, "GO:0006915"); // apoptotic process - r.arguments.assignToVariable = "i1"; - r.arguments.values = new JsonAnnotation[2]; - r.arguments.values[0] = JsonTools.create(AnnotationShorthand.comment, "comment 1", null); - r.arguments.values[1] = JsonTools.create(AnnotationShorthand.comment, "comment 2", null); - batch.add(r); - - r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.addType; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.individual = "i1"; - r.arguments.expressions = new JsonOwlObject[1]; - r.arguments.expressions[0] = BatchTestTools.createSvf("BFO:0000066", "GO:0005623"); // occurs_in, cell - batch.add(r); - - r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.addType; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.individual = "i1"; - r.arguments.expressions = new JsonOwlObject[1]; - r.arguments.expressions[0] = new JsonOwlObject(); - r.arguments.expressions[0].type = JsonOwlObjectType.SomeValueFrom; - r.arguments.expressions[0].property = new JsonOwlObject(); - r.arguments.expressions[0].property.type = JsonOwlObjectType.ObjectProperty; - r.arguments.expressions[0].property.id = "RO:0002333"; // enabled_by - // "GO:0043234 and (('has part' some UniProtKB:P0002) OR ('has part' some UniProtKB:P0003))"; - r.arguments.expressions[0].filler = createComplexExpr(); - batch.add(r); - - r = BatchTestTools.addIndividual(modelId, "GO:0043276", - BatchTestTools.createSvf("RO:0002333", "GO:0043234")); // enabled_by - batch.add(r); - - M3BatchResponse resp2 = executeBatch(batch, false); - String individual1 = null; - String individual2 = null; - JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(resp2); - assertEquals(2, iObjs.length); - for(JsonOwlIndividual iObj : iObjs) { - String clsId = null; - for(JsonOwlObject currentType : iObj.type) { - if (currentType.type == JsonOwlObjectType.Class) { - clsId = currentType.id; - } - } - if (clsId.contains("6915")) { - individual1 = iObj.id; - assertEquals(3, iObj.type.length); - } - else { - individual2 = iObj.id; - assertEquals(2, iObj.type.length); - } - } - assertNotNull(individual1); - assertNotNull(individual2); - - // create fact - final M3Request r3 = new M3Request(); - r3.entity = Entity.edge; - r3.operation = Operation.add; - r3.arguments = new M3Argument(); - r3.arguments.modelId = modelId; - r3.arguments.subject = individual1; - r3.arguments.object = individual2; - r3.arguments.predicate = "BFO:0000050"; // part_of - - execute(r3, false); - - // delete complex expression type - final M3Request r4 = new M3Request(); - r4.entity = Entity.individual; - r4.operation = Operation.removeType; - r4.arguments = new M3Argument(); - r4.arguments.modelId = modelId; - r4.arguments.individual = individual1; - r4.arguments.expressions = new JsonOwlObject[1]; - r4.arguments.expressions[0] = new JsonOwlObject(); - r4.arguments.expressions[0].type = JsonOwlObjectType.SomeValueFrom; - r4.arguments.expressions[0].property = new JsonOwlObject(); - r4.arguments.expressions[0].property.type = JsonOwlObjectType.ObjectProperty; - r4.arguments.expressions[0].property.id = "RO:0002333"; // enabled_by - // "GO:0043234 and (('has part' some UniProtKB:P0002) OR ('has part' some UniProtKB:P0003))"; - r4.arguments.expressions[0].filler = createComplexExpr(); - - M3BatchResponse resp4 = execute(r4, false); - JsonOwlIndividual[] iObjs4 = BatchTestTools.responseIndividuals(resp4); - assertEquals(1, iObjs4.length); - JsonOwlObject[] types = iObjs4[0].type; - assertEquals(2, types.length); - } - - private static JsonOwlObject createComplexExpr() { - // "GO:0043234 and (('has part' some UniProtKB:P0002) OR ('has part' some UniProtKB:P0003))"; - JsonOwlObject expr = new JsonOwlObject(); - expr.type = JsonOwlObjectType.IntersectionOf; - expr.expressions = new JsonOwlObject[2]; - - // GO:0043234 - expr.expressions[0] = new JsonOwlObject(); - expr.expressions[0].type = JsonOwlObjectType.Class; - expr.expressions[0].id = "GO:0043234"; - - // OR - expr.expressions[1] = new JsonOwlObject(); - expr.expressions[1].type = JsonOwlObjectType.UnionOf; - expr.expressions[1].expressions = new JsonOwlObject[2]; - - //'has part' some UniProtKB:P0002 - expr.expressions[1].expressions[0] = BatchTestTools.createSvf("BFO:0000051", "UniProtKB:P0002"); - - // 'has part' some UniProtKB:P0003 - expr.expressions[1].expressions[1] = BatchTestTools.createSvf("BFO:0000051", "UniProtKB:P0003"); - - return expr; - } - - - - @Test - public void testAddIndividual() throws Exception { - final String modelId = generateBlankModel(); - - // create one individuals - final M3Request r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.add; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.expressions = new JsonOwlObject[1]; - r.arguments.expressions[0] = new JsonOwlObject(); - r.arguments.expressions[0].type = JsonOwlObjectType.Class; - r.arguments.expressions[0].id = "GO:0006915"; // apoptotic process - - M3BatchResponse resp = execute(r, false); - assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); - JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(resp); - assertEquals(1, iObjs.length); - } - - @Test - public void testModelAnnotations() throws Exception { - final String modelId = generateBlankModel(); - - final JsonAnnotation[] annotations1 = getModelAnnotations(modelId); - // creation date - // user id - // providedBy - // model state - assertEquals(4, annotations1.length); - - // create annotations - final M3Request r1 = new M3Request(); - r1.entity = Entity.model; - r1.operation = Operation.addAnnotation; - r1.arguments = new M3Argument(); - r1.arguments.modelId = modelId; - - r1.arguments.values = new JsonAnnotation[2]; - r1.arguments.values[0] = new JsonAnnotation(); - r1.arguments.values[0].key = AnnotationShorthand.comment.name(); - r1.arguments.values[0].value = "comment 1"; - r1.arguments.values[1] = new JsonAnnotation(); - r1.arguments.values[1].key = AnnotationShorthand.comment.name(); - r1.arguments.values[1].value = "comment 2"; - - execute(r1, false); - - final JsonAnnotation[] annotations2 = getModelAnnotations(modelId); - assertNotNull(annotations2); - assertEquals(6, annotations2.length); - - - // remove one annotation - final M3Request r2 = new M3Request(); - r2.entity = Entity.model; - r2.operation = Operation.removeAnnotation; - r2.arguments = new M3Argument(); - r2.arguments.modelId = modelId; - - r2.arguments.values = new JsonAnnotation[1]; - r2.arguments.values[0] = new JsonAnnotation(); - r2.arguments.values[0].key = AnnotationShorthand.comment.name(); - r2.arguments.values[0].value = "comment 1"; - - execute(r2, false); - - final JsonAnnotation[] annotations3 = getModelAnnotations(modelId); - assertNotNull(annotations3); - assertEquals(5, annotations3.length); - } - - @Test - public void testModelAnnotationsTemplate() throws Exception { - final String modelId = generateBlankModel(); - final JsonAnnotation[] annotations1 = getModelAnnotations(modelId); - // creation date - // user id - // providedBy - // model state - assertEquals(4, annotations1.length); - - // create template annotation - final M3Request r1 = new M3Request(); - r1.entity = Entity.model; - r1.operation = Operation.addAnnotation; - r1.arguments = new M3Argument(); - r1.arguments.modelId = modelId; - - r1.arguments.values = new JsonAnnotation[1]; - r1.arguments.values[0] = new JsonAnnotation(); - r1.arguments.values[0].key = AnnotationShorthand.templatestate.getShorthand(); - r1.arguments.values[0].value = Boolean.TRUE.toString(); - - execute(r1, false); - - final JsonAnnotation[] annotations2 = getModelAnnotations(modelId); - assertNotNull(annotations2); - assertEquals(5, annotations2.length); - - // remove one annotation - final M3Request r2 = new M3Request(); - r2.entity = Entity.model; - r2.operation = Operation.removeAnnotation; - r2.arguments = new M3Argument(); - r2.arguments.modelId = modelId; - - r2.arguments.values = new JsonAnnotation[1]; - r2.arguments.values[0] = new JsonAnnotation(); - r2.arguments.values[0].key = AnnotationShorthand.modelstate.getShorthand(); - r2.arguments.values[0].value = "development"; - - final M3Request r3 = new M3Request(); - r3.entity = Entity.model; - r3.operation = Operation.addAnnotation; - r3.arguments = new M3Argument(); - r3.arguments.modelId = modelId; - - r3.arguments.values = new JsonAnnotation[1]; - r3.arguments.values[0] = new JsonAnnotation(); - r3.arguments.values[0].key = AnnotationShorthand.modelstate.getShorthand(); - r3.arguments.values[0].value = "review"; - - executeBatch(Arrays.asList(r2, r3), false); - - final JsonAnnotation[] annotations3 = getModelAnnotations(modelId); - assertNotNull(annotations3); - assertEquals(5, annotations3.length); - String foundModelState = null; - for (JsonAnnotation annotation : annotations3) { - if (AnnotationShorthand.modelstate.getShorthand().equals(annotation.key)) { - assertNull("Multiple model states are not allowed", foundModelState); - foundModelState = annotation.value; - } - } - assertEquals("review", foundModelState); - } - - @Test - public void testMultipleMeta() throws Exception { - //models.dispose(); - - // get meta - final M3Request r = new M3Request(); - r.entity = Entity.meta; - r.operation = Operation.get; - - M3BatchResponse response = execute(r, false); - final JsonRelationInfo[] relations = BatchTestTools.responseRelations(response); - final OWLObjectProperty part_of = OWLManager.getOWLDataFactory().getOWLObjectProperty(IRI.create("http://purl.obolibrary.org/obo/BFO_0000050")); - assertNotNull(part_of); - final String partOfJsonId = models.getCuriHandler().getCuri(part_of); - boolean hasPartOf = false; - for (JsonRelationInfo info : relations) { - String id = info.id; - assertNotNull(id); - if (partOfJsonId.equals(id)) { - assertEquals(true, info.relevant); - hasPartOf = true; - } - } - assertTrue(relations.length > 20); - assertTrue(hasPartOf); - - final JsonEvidenceInfo[] evidences = BatchTestTools.responseEvidences(response); - assertTrue(evidences.length > 100); - - final Map> modelIds = BatchTestTools.responseModelsMeta(response); - assertFalse(modelIds.size()==0); - } - - @Test - public void testFailOnMetaAndChange() throws Exception { + @ClassRule + public static TemporaryFolder folder = new TemporaryFolder(); + + private static CurieHandler curieHandler = null; + private static JsonOrJsonpBatchHandler handler = null; + private static UndoAwareMolecularModelManager models = null; + private static Set importantRelations = null; + private final static DateGenerator dateGenerator = new DateGenerator(); + static final String ontology_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + static final String uid = "test-user"; + static final Set providedBy = Collections.singleton("test-provider"); + static final String intention = "test-intention"; + private static final String packetId = "foo-packet-id"; + + private static ExternalLookupService lookupService; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + init(new ParserWrapper()); + } + + static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException, UnknownIdentifierException { + final MinervaOWLGraphWrapper graph = pw.parseToOWLGraph("src/test/resources/go-lego-minimal.owl"); + final OWLObjectProperty legorelParent = StartUpTool.getRelation("http://purl.obolibrary.org/obo/LEGOREL_0000000", graph); + assertNotNull(legorelParent); + importantRelations = StartUpTool.getAssertedSubProperties(legorelParent, graph); + assertFalse(importantRelations.isEmpty()); + // curie handler + final String modelIdcurie = "gomodel"; + final String modelIdPrefix = "http://model.geneontology.org/"; + final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + InferenceProviderCreator ipc = CachingInferenceProviderCreatorImpl.createElk(false, null); + models = new UndoAwareMolecularModelManager(graph.getSourceOntology(), curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, ontology_journal_file, true); + lookupService = createTestProteins(curieHandler); + handler = new JsonOrJsonpBatchHandler(models, "development", ipc, importantRelations, lookupService) { + + @Override + protected String generateDateString() { + // hook for overriding the date generation with a custom counter + if (dateGenerator.useCounter) { + int count = dateGenerator.counter; + dateGenerator.counter += 1; + return Integer.toString(count); + } + return super.generateDateString(); + } + }; + JsonOrJsonpBatchHandler.VALIDATE_BEFORE_SAVE = true; + } + + private static ExternalLookupService createTestProteins(CurieHandler curieHandler) throws UnknownIdentifierException { + List testEntries = new ArrayList(); + testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0000"), + "P0000", "protein", "fake-taxon-id", null)); + testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0001"), + "P0001", "protein", "fake-taxon-id", null)); + testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0002"), + "P0002", "protein", "fake-taxon-id", null)); + testEntries.add(new LookupEntry(curieHandler.getIRI("UniProtKB:P0003"), + "P0003", "protein", "fake-taxon-id", null)); + return new TableLookupService(testEntries); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + if (handler != null) { + handler = null; + } + if (models != null) { + models.dispose(); + } + } + + @Test + public void testTypeOperations() throws Exception { + final String modelId = generateBlankModel(); + + List batch = new ArrayList(); + M3Request r = BatchTestTools.addIndividual(modelId, "GO:0006915"); // apoptotic process + r.arguments.assignToVariable = "i1"; + r.arguments.values = new JsonAnnotation[2]; + r.arguments.values[0] = JsonTools.create(AnnotationShorthand.comment, "comment 1", null); + r.arguments.values[1] = JsonTools.create(AnnotationShorthand.comment, "comment 2", null); + batch.add(r); + + r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.addType; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.individual = "i1"; + r.arguments.expressions = new JsonOwlObject[1]; + r.arguments.expressions[0] = BatchTestTools.createSvf("BFO:0000066", "GO:0005623"); // occurs_in, cell + batch.add(r); + + r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.addType; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.individual = "i1"; + r.arguments.expressions = new JsonOwlObject[1]; + r.arguments.expressions[0] = new JsonOwlObject(); + r.arguments.expressions[0].type = JsonOwlObjectType.SomeValueFrom; + r.arguments.expressions[0].property = new JsonOwlObject(); + r.arguments.expressions[0].property.type = JsonOwlObjectType.ObjectProperty; + r.arguments.expressions[0].property.id = "RO:0002333"; // enabled_by + // "GO:0043234 and (('has part' some UniProtKB:P0002) OR ('has part' some UniProtKB:P0003))"; + r.arguments.expressions[0].filler = createComplexExpr(); + batch.add(r); + + r = BatchTestTools.addIndividual(modelId, "GO:0043276", + BatchTestTools.createSvf("RO:0002333", "GO:0043234")); // enabled_by + batch.add(r); + + M3BatchResponse resp2 = executeBatch(batch, false); + String individual1 = null; + String individual2 = null; + JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(resp2); + assertEquals(2, iObjs.length); + for (JsonOwlIndividual iObj : iObjs) { + String clsId = null; + for (JsonOwlObject currentType : iObj.type) { + if (currentType.type == JsonOwlObjectType.Class) { + clsId = currentType.id; + } + } + if (clsId.contains("6915")) { + individual1 = iObj.id; + assertEquals(3, iObj.type.length); + } else { + individual2 = iObj.id; + assertEquals(2, iObj.type.length); + } + } + assertNotNull(individual1); + assertNotNull(individual2); + + // create fact + final M3Request r3 = new M3Request(); + r3.entity = Entity.edge; + r3.operation = Operation.add; + r3.arguments = new M3Argument(); + r3.arguments.modelId = modelId; + r3.arguments.subject = individual1; + r3.arguments.object = individual2; + r3.arguments.predicate = "BFO:0000050"; // part_of + + execute(r3, false); + + // delete complex expression type + final M3Request r4 = new M3Request(); + r4.entity = Entity.individual; + r4.operation = Operation.removeType; + r4.arguments = new M3Argument(); + r4.arguments.modelId = modelId; + r4.arguments.individual = individual1; + r4.arguments.expressions = new JsonOwlObject[1]; + r4.arguments.expressions[0] = new JsonOwlObject(); + r4.arguments.expressions[0].type = JsonOwlObjectType.SomeValueFrom; + r4.arguments.expressions[0].property = new JsonOwlObject(); + r4.arguments.expressions[0].property.type = JsonOwlObjectType.ObjectProperty; + r4.arguments.expressions[0].property.id = "RO:0002333"; // enabled_by + // "GO:0043234 and (('has part' some UniProtKB:P0002) OR ('has part' some UniProtKB:P0003))"; + r4.arguments.expressions[0].filler = createComplexExpr(); + + M3BatchResponse resp4 = execute(r4, false); + JsonOwlIndividual[] iObjs4 = BatchTestTools.responseIndividuals(resp4); + assertEquals(1, iObjs4.length); + JsonOwlObject[] types = iObjs4[0].type; + assertEquals(2, types.length); + } + + private static JsonOwlObject createComplexExpr() { + // "GO:0043234 and (('has part' some UniProtKB:P0002) OR ('has part' some UniProtKB:P0003))"; + JsonOwlObject expr = new JsonOwlObject(); + expr.type = JsonOwlObjectType.IntersectionOf; + expr.expressions = new JsonOwlObject[2]; + + // GO:0043234 + expr.expressions[0] = new JsonOwlObject(); + expr.expressions[0].type = JsonOwlObjectType.Class; + expr.expressions[0].id = "GO:0043234"; + + // OR + expr.expressions[1] = new JsonOwlObject(); + expr.expressions[1].type = JsonOwlObjectType.UnionOf; + expr.expressions[1].expressions = new JsonOwlObject[2]; + + //'has part' some UniProtKB:P0002 + expr.expressions[1].expressions[0] = BatchTestTools.createSvf("BFO:0000051", "UniProtKB:P0002"); + + // 'has part' some UniProtKB:P0003 + expr.expressions[1].expressions[1] = BatchTestTools.createSvf("BFO:0000051", "UniProtKB:P0003"); + + return expr; + } + + + @Test + public void testAddIndividual() throws Exception { + final String modelId = generateBlankModel(); + + // create one individuals + final M3Request r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.add; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.expressions = new JsonOwlObject[1]; + r.arguments.expressions[0] = new JsonOwlObject(); + r.arguments.expressions[0].type = JsonOwlObjectType.Class; + r.arguments.expressions[0].id = "GO:0006915"; // apoptotic process + + M3BatchResponse resp = execute(r, false); + assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); + JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(resp); + assertEquals(1, iObjs.length); + } + + @Test + public void testModelAnnotations() throws Exception { + final String modelId = generateBlankModel(); + + final JsonAnnotation[] annotations1 = getModelAnnotations(modelId); + // creation date + // user id + // providedBy + // model state + assertEquals(4, annotations1.length); + + // create annotations + final M3Request r1 = new M3Request(); + r1.entity = Entity.model; + r1.operation = Operation.addAnnotation; + r1.arguments = new M3Argument(); + r1.arguments.modelId = modelId; + + r1.arguments.values = new JsonAnnotation[2]; + r1.arguments.values[0] = new JsonAnnotation(); + r1.arguments.values[0].key = AnnotationShorthand.comment.name(); + r1.arguments.values[0].value = "comment 1"; + r1.arguments.values[1] = new JsonAnnotation(); + r1.arguments.values[1].key = AnnotationShorthand.comment.name(); + r1.arguments.values[1].value = "comment 2"; + + execute(r1, false); + + final JsonAnnotation[] annotations2 = getModelAnnotations(modelId); + assertNotNull(annotations2); + assertEquals(6, annotations2.length); + + + // remove one annotation + final M3Request r2 = new M3Request(); + r2.entity = Entity.model; + r2.operation = Operation.removeAnnotation; + r2.arguments = new M3Argument(); + r2.arguments.modelId = modelId; + + r2.arguments.values = new JsonAnnotation[1]; + r2.arguments.values[0] = new JsonAnnotation(); + r2.arguments.values[0].key = AnnotationShorthand.comment.name(); + r2.arguments.values[0].value = "comment 1"; + + execute(r2, false); + + final JsonAnnotation[] annotations3 = getModelAnnotations(modelId); + assertNotNull(annotations3); + assertEquals(5, annotations3.length); + } + + @Test + public void testModelAnnotationsTemplate() throws Exception { + final String modelId = generateBlankModel(); + final JsonAnnotation[] annotations1 = getModelAnnotations(modelId); + // creation date + // user id + // providedBy + // model state + assertEquals(4, annotations1.length); + + // create template annotation + final M3Request r1 = new M3Request(); + r1.entity = Entity.model; + r1.operation = Operation.addAnnotation; + r1.arguments = new M3Argument(); + r1.arguments.modelId = modelId; + + r1.arguments.values = new JsonAnnotation[1]; + r1.arguments.values[0] = new JsonAnnotation(); + r1.arguments.values[0].key = AnnotationShorthand.templatestate.getShorthand(); + r1.arguments.values[0].value = Boolean.TRUE.toString(); + + execute(r1, false); + + final JsonAnnotation[] annotations2 = getModelAnnotations(modelId); + assertNotNull(annotations2); + assertEquals(5, annotations2.length); + + // remove one annotation + final M3Request r2 = new M3Request(); + r2.entity = Entity.model; + r2.operation = Operation.removeAnnotation; + r2.arguments = new M3Argument(); + r2.arguments.modelId = modelId; + + r2.arguments.values = new JsonAnnotation[1]; + r2.arguments.values[0] = new JsonAnnotation(); + r2.arguments.values[0].key = AnnotationShorthand.modelstate.getShorthand(); + r2.arguments.values[0].value = "development"; + + final M3Request r3 = new M3Request(); + r3.entity = Entity.model; + r3.operation = Operation.addAnnotation; + r3.arguments = new M3Argument(); + r3.arguments.modelId = modelId; + + r3.arguments.values = new JsonAnnotation[1]; + r3.arguments.values[0] = new JsonAnnotation(); + r3.arguments.values[0].key = AnnotationShorthand.modelstate.getShorthand(); + r3.arguments.values[0].value = "review"; + + executeBatch(Arrays.asList(r2, r3), false); + + final JsonAnnotation[] annotations3 = getModelAnnotations(modelId); + assertNotNull(annotations3); + assertEquals(5, annotations3.length); + String foundModelState = null; + for (JsonAnnotation annotation : annotations3) { + if (AnnotationShorthand.modelstate.getShorthand().equals(annotation.key)) { + assertNull("Multiple model states are not allowed", foundModelState); + foundModelState = annotation.value; + } + } + assertEquals("review", foundModelState); + } + + @Test + public void testMultipleMeta() throws Exception { + //models.dispose(); + + // get meta + final M3Request r = new M3Request(); + r.entity = Entity.meta; + r.operation = Operation.get; + + M3BatchResponse response = execute(r, false); + final JsonRelationInfo[] relations = BatchTestTools.responseRelations(response); + final OWLObjectProperty part_of = OWLManager.getOWLDataFactory().getOWLObjectProperty(IRI.create("http://purl.obolibrary.org/obo/BFO_0000050")); + assertNotNull(part_of); + final String partOfJsonId = models.getCuriHandler().getCuri(part_of); + boolean hasPartOf = false; + for (JsonRelationInfo info : relations) { + String id = info.id; + assertNotNull(id); + if (partOfJsonId.equals(id)) { + assertEquals(true, info.relevant); + hasPartOf = true; + } + } + assertTrue(relations.length > 20); + assertTrue(hasPartOf); + + final JsonEvidenceInfo[] evidences = BatchTestTools.responseEvidences(response); + assertTrue(evidences.length > 100); + + final Map> modelIds = BatchTestTools.responseModelsMeta(response); + assertFalse(modelIds.size() == 0); + } + + @Test + public void testFailOnMetaAndChange() throws Exception { // models.dispose(); - - final String modelId = generateBlankModel(); - - final List batch1 = new ArrayList(); - batch1.add(BatchTestTools.addIndividual(modelId, "GO:0008150")); // biological process - - M3Request r = new M3Request(); - r.entity = Entity.meta; - r.operation = Operation.get; - batch1.add(r); - - M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch1.toArray(new M3Request[batch1.size()]), false, true); - assertEquals(uid, response.uid); - assertEquals(intention, response.intention); - - assertEquals(M3BatchResponse.MESSAGE_TYPE_ERROR, response.messageType); - } - - @Test - public void testSaveAsNonMeta() throws Exception { - //models.dispose(); - - final String modelId = generateBlankModel(); - - final List batch1 = new ArrayList(); - batch1.add(BatchTestTools.addIndividual(modelId, "GO:0008150")); // biological process - - - M3Request r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.addAnnotation; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.title, "foo"); - batch1.add(r); - - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.storeModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - batch1.add(r); - - M3BatchResponse response = executeBatch(batch1, false); - JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response); - assertEquals(1, responseIndividuals.length); - } - - @Test - public void testAddBlankModel() throws Exception { + + final String modelId = generateBlankModel(); + + final List batch1 = new ArrayList(); + batch1.add(BatchTestTools.addIndividual(modelId, "GO:0008150")); // biological process + + M3Request r = new M3Request(); + r.entity = Entity.meta; + r.operation = Operation.get; + batch1.add(r); + + M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch1.toArray(new M3Request[batch1.size()]), false, true); + assertEquals(uid, response.uid); + assertEquals(intention, response.intention); + + assertEquals(M3BatchResponse.MESSAGE_TYPE_ERROR, response.messageType); + } + + @Test + public void testSaveAsNonMeta() throws Exception { + //models.dispose(); + + final String modelId = generateBlankModel(); + + final List batch1 = new ArrayList(); + batch1.add(BatchTestTools.addIndividual(modelId, "GO:0008150")); // biological process + + + M3Request r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.addAnnotation; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.title, "foo"); + batch1.add(r); + + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.storeModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + batch1.add(r); + + M3BatchResponse response = executeBatch(batch1, false); + JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response); + assertEquals(1, responseIndividuals.length); + } + + @Test + public void testAddBlankModel() throws Exception { // models.dispose(); - - final M3Request r1 = new M3Request(); - r1.entity = Entity.model; - r1.operation = Operation.add; - r1.arguments = new M3Argument(); - - M3BatchResponse response1 = execute(r1, false); - final String modelId1 = BatchTestTools.responseId(response1); - - final M3Request r2 = new M3Request(); - r2.entity = Entity.model; - r2.operation = Operation.add; - r2.arguments = new M3Argument(); - - M3BatchResponse response2 = execute(r2, false); - final String modelId2 = BatchTestTools.responseId(response2); - - assertNotEquals(modelId1, modelId2); - - final M3Request batch3 = new M3Request(); - batch3.entity = Entity.model; - batch3.operation = Operation.add; - batch3.arguments = new M3Argument(); - - M3BatchResponse response3 = execute(batch3, false); - final String modelId3 = BatchTestTools.responseId(response3); - - assertNotEquals(modelId1, modelId3); - assertNotEquals(modelId2, modelId3); - } - - @Test - public void testDelete() throws Exception { - //models.dispose(); - - final String modelId = generateBlankModel(); - - // create - final M3Request r1 = BatchTestTools.addIndividual(modelId, "GO:0008104", // protein localization - BatchTestTools.createSvf("RO:0002333", "UniProtKB:P0000"), // enabled_by - BatchTestTools.createSvf("BFO:0000050", "GO:0006915")); // part_of apoptotic process - - final M3BatchResponse response1 = execute(r1, false); - - JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); - assertEquals(1, iObjs1.length); - JsonOwlIndividual individual1 = iObjs1[0]; - assertNotNull(individual1); - assertNotNull(individual1.id); - - JsonOwlObject[] types1 = individual1.type; - assertEquals(3, types1.length); - String apopId = null; - for(JsonOwlObject e : types1) { - if (JsonOwlObjectType.SomeValueFrom == e.type) { - if (e.filler.id.equals("GO:0006915")) { - apopId = e.filler.id; - break; - } - } - } - assertNotNull(apopId); - - // delete - final M3Request r2 = new M3Request(); - r2.entity = Entity.individual; - r2.operation = Operation.removeType; - r2.arguments = new M3Argument(); - r2.arguments.modelId = modelId; - r2.arguments.individual = individual1.id; - - r2.arguments.expressions = new JsonOwlObject[1]; - r2.arguments.expressions[0] = BatchTestTools.createSvf("BFO:0000050", apopId); // part_of - - final M3BatchResponse response2 = execute(r2, false); - - JsonOwlIndividual[] iObjs2 = BatchTestTools.responseIndividuals(response2); - assertEquals(1, iObjs2.length); - JsonOwlIndividual individual2 = iObjs2[0]; - assertNotNull(individual2); - JsonOwlObject[] types2 = individual2.type; - assertEquals(2, types2.length); - } - - @Test - public void testDeleteEdge() throws Exception { - //models.dispose(); - final String modelId = generateBlankModel(); - - // setup model - // simple three individuals (mf, bp, gene) with two facts: bp -p-> mf, mf -enabled_by-> gene - final List batch1 = new ArrayList(); - - // activity/mf - M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function - r.arguments.assignToVariable = "mf"; - batch1.add(r); - - // process - r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process - r.arguments.assignToVariable = "bp"; - batch1.add(r); - - // gene - r = BatchTestTools.addIndividual(modelId, "UniProtKB:P0000"); // gene - r.arguments.assignToVariable = "gene"; - batch1.add(r); - - // activity -> process - r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of - batch1.add(r); // part_of - - // mf -enabled_by-> gene - r = BatchTestTools.addEdge(modelId, "mf", "RO:0002333", "gene"); // enabled_by - batch1.add(r); // part_of - - final M3BatchResponse response1 = executeBatch(batch1, false); - JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); - assertEquals(3, iObjs1.length); - - String mf = null; - String bp = null; - for (JsonOwlIndividual iObj : iObjs1) { - String id = iObj.id; - assertNotNull(id); - JsonOwlObject[] types = iObj.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0003674".equals(typeId)) { - mf = id; - } - else if ("GO:0008150".equals(typeId)) { - bp = id; - } - } - assertNotNull(mf); - assertNotNull(bp); - - - final List batch2 = new ArrayList(); - r = BatchTestTools.deleteEdge(modelId, mf, "BFO:0000050", bp); - batch2.add(r); - - final M3BatchResponse response2 = executeBatch(batch2, false); - assertEquals(M3BatchResponse.SIGNAL_MERGE, response2.signal); - JsonOwlIndividual[] iObjs2 = BatchTestTools.responseIndividuals(response2); - assertEquals(2, iObjs2.length); - } - - @Test - public void testDeleteEvidenceIndividuals() throws Exception { - //models.dispose(); - final String modelId = generateBlankModel(); - - // setup model - // simple four individuals (mf, bp, 2 evidences) with a fact in between bp and mf - final List batch1 = new ArrayList(); - - // evidence1 - M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var1"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); - batch1.add(r); - - // evidence2 - r = BatchTestTools.addIndividual(modelId, "ECO:0000001"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var2"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000001"); - batch1.add(r); - - // evidence3 - r = BatchTestTools.addIndividual(modelId, "ECO:0000002"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var3"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000002"); - batch1.add(r); - - // activity/mf - r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function - r.arguments.assignToVariable = "mf"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var1"); - batch1.add(r); - - // process - r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var3"); - r.arguments.assignToVariable = "bp"; - batch1.add(r); - - // activity -> process - r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var2"); - batch1.add(r); // part_of - - final M3BatchResponse response1 = executeBatch(batch1, false); - - //run diff to show changes - //test diff command for comparison - M3Request dr = new M3Request(); - dr.entity = Entity.model; - dr.operation = Operation.diffModel; - dr.arguments = new M3Argument(); - dr.arguments.modelId = modelId; - M3BatchResponse diffresp = execute(dr, false); - String diff = diffresp.data.diffResult; - assertFalse(diff.equals("Ontologies are identical\n")); - - - // find individuals - JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); - assertEquals(5, iObjs1.length); - String evidence1 = null; - String evidence2 = null; - String evidence3 = null; - String mf = null; - String bp = null; - for (JsonOwlIndividual iObj : iObjs1) { - String id = iObj.id; - assertNotNull(id); - JsonOwlObject[] types = iObj.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0003674".equals(typeId)) { - mf = id; - } - else if ("GO:0008150".equals(typeId)) { - bp = id; - } - else if ("ECO:0000000".equals(typeId)) { - evidence1 = id; - } - else if ("ECO:0000001".equals(typeId)) { - evidence2 = id; - } - else if ("ECO:0000002".equals(typeId)) { - evidence3 = id; - } - } - assertNotNull(evidence1); - assertNotNull(evidence2); - assertNotNull(evidence3); - assertNotNull(mf); - assertNotNull(bp); - - // one edge - JsonOwlFact[] facts1 = BatchTestTools.responseFacts(response1); - assertEquals(1, facts1.length); - assertEquals(4, facts1[0].annotations.length); // evidence, date, contributor, provider - - // remove fact evidence - final List batch2 = new ArrayList(); - r = BatchTestTools.removeIndividual(modelId, evidence2); - batch2.add(r); - - executeBatch(batch2, false); - - final M3BatchResponse response3 = checkCounts(modelId, 4, 1); - JsonOwlFact[] factsObjs = BatchTestTools.responseFacts(response3); - assertEquals(3, factsObjs[0].annotations.length); // date and contributor remain - - // delete bp evidence instance - final List batch4 = new ArrayList(); - r = BatchTestTools.removeIndividual(modelId, evidence3); - batch4.add(r); - - executeBatch(batch4, false); - - final M3BatchResponse response5 = checkCounts(modelId, 3, 1); - JsonOwlIndividual[] indivdualObjs5 = BatchTestTools.responseIndividuals(response5); - boolean found = false; - for (JsonOwlIndividual iObj : indivdualObjs5) { - String id = iObj.id; - assertNotNull(id); - JsonOwlObject[] types = iObj.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0008150".equals(typeId)) { - found = true; - assertTrue(iObj.annotations.length == 3); // date and contributor and provider remain - } - } - assertTrue(found); - - - // delete mf instance -> delete also mf evidence instance and fact - final List batch6 = new ArrayList(); - r = BatchTestTools.removeIndividual(modelId, mf); - batch6.add(r); - - executeBatch(batch6, false); - - M3BatchResponse response7 = checkCounts(modelId, 1, 0); - JsonOwlIndividual[] iObjs7 = BatchTestTools.responseIndividuals(response7); - assertEquals(bp, iObjs7[0].id); - } - - //FIXME @Test - public void testInconsistentModel() throws Exception { - //models.dispose(); - - final String modelId = generateBlankModel(); - - // create - final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0009653", // anatomical structure morphogenesis - BatchTestTools.createClass("GO:0048856")); // anatomical structure development - - final M3BatchResponse response = execute(r, true); - assertTrue(response.isReasoned); - Boolean inconsistentFlag = BatchTestTools.responseInconsistent(response); - assertEquals(Boolean.TRUE, inconsistentFlag); - } - - //FIXME @Test - public void testInferencesRedundant() throws Exception { - //models.dispose(); - final String modelId = generateBlankModel(); - - // GO:0009826 ! unidimensional cell growth - // GO:0000902 ! cell morphogenesis - // should infer only one type: 'unidimensional cell growth' - // 'cell morphogenesis' is a super-class and redundant - - // create - final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0000902", // cell morphogenesis - BatchTestTools.createClass("GO:0009826")); // unidimensional cell growth - - final M3BatchResponse response = execute(r, true); - assertTrue(response.isReasoned); - assertNull("Model should not be inconsistent", BatchTestTools.responseInconsistent(response)); - JsonOwlIndividual[] inferred = BatchTestTools.responseIndividuals(response); - assertNotNull(inferred); - assertEquals(1, inferred.length); - JsonOwlIndividual inferredData = inferred[0]; - JsonOwlObject[] types = inferredData.inferredType; - assertEquals(1, types.length); - JsonOwlObject type = types[0]; - assertEquals(JsonOwlObjectType.Class, type.type); - assertEquals("GO:0009826", type.id); - } - - //FIXME @Test - public void testTrivialInferences() throws Exception { - //models.dispose(); - - final String modelId = generateBlankModel(); - // create - final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0051231"); // spindle elongation - - final M3BatchResponse response = execute(r, true); - assertTrue(response.isReasoned); - assertNull("Model should not be inconsistent", BatchTestTools.responseInconsistent(response)); - JsonOwlIndividual[] inferred = BatchTestTools.responseIndividuals(response); - assertNotNull(inferred); - assertEquals(1, inferred.length); - JsonOwlIndividual inferredData = inferred[0]; - assertNull(inferredData.inferredType); - } - - //FIXME @Test - public void testInferencesAdditional() throws Exception { - //models.dispose(); - - final String modelId = generateBlankModel(); - - // GO:0051231 ! spindle elongation - // part_of GO:0000278 ! mitotic cell cycle - // should infer one new type: GO:0000022 ! mitotic spindle elongation - - // create - final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0051231", // spindle elongation - BatchTestTools.createSvf("BFO:0000050", "GO:0000278")); // part_of, mitotic cell cycle - - final M3BatchResponse response = execute(r, true); - assertTrue(response.isReasoned); - assertNull("Model should not be inconsistent", BatchTestTools.responseInconsistent(response)); - JsonOwlIndividual[] inferred = BatchTestTools.responseIndividuals(response); - assertNotNull(inferred); - assertEquals(1, inferred.length); - JsonOwlIndividual inferredData = inferred[0]; - JsonOwlObject[] types = inferredData.inferredType; - assertEquals(1, types.length); - JsonOwlObject type = types[0]; - assertEquals(JsonOwlObjectType.Class, type.type); - assertEquals("GO:0000022", type.id); - } - - @Test - public void testValidationBeforeSave() throws Exception { - assertTrue(JsonOrJsonpBatchHandler.VALIDATE_BEFORE_SAVE); - //models.dispose(); - - final String modelId = generateBlankModel(); - - // try to save - M3Request[] batch = new M3Request[1]; - batch[0] = new M3Request(); - batch[0].entity = Entity.model; - batch[0].operation = Operation.storeModel; - batch[0].arguments = new M3Argument(); - batch[0].arguments.modelId = modelId; - M3BatchResponse resp1 = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, true); - assertEquals("This operation must fail as the model has no title or individuals", M3BatchResponse.MESSAGE_TYPE_ERROR, resp1.messageType); - assertNotNull(resp1.commentary); - assertTrue(resp1.commentary.contains("title")); - } - - @Test - public void testPrivileged() throws Exception { - M3Request[] batch = new M3Request[1]; - batch[0] = new M3Request(); - batch[0].entity = Entity.model; - batch[0].operation = Operation.add; - M3BatchResponse resp1 = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, false); - assertEquals(M3BatchResponse.MESSAGE_TYPE_ERROR, resp1.messageType); - assertTrue(resp1.message.contains("Insufficient")); - } - - //FIXME @Test - public void testExportLegacy() throws Exception { - final String modelId = generateBlankModel(); - - // create - final M3Request r1 = BatchTestTools.addIndividual(modelId, "GO:0008104", // protein localization - BatchTestTools.createSvf("RO:0002333", "UniProtKB:P0000"), // enabled_by - BatchTestTools.createSvf("BFO:0000050", "GO:0006915")); // part_of - - execute(r1, false); - - - final M3Request r2 = new M3Request(); - r2.operation = Operation.exportModelLegacy; - r2.entity = Entity.model; - r2.arguments = new M3Argument(); - r2.arguments.modelId = modelId; + + final M3Request r1 = new M3Request(); + r1.entity = Entity.model; + r1.operation = Operation.add; + r1.arguments = new M3Argument(); + + M3BatchResponse response1 = execute(r1, false); + final String modelId1 = BatchTestTools.responseId(response1); + + final M3Request r2 = new M3Request(); + r2.entity = Entity.model; + r2.operation = Operation.add; + r2.arguments = new M3Argument(); + + M3BatchResponse response2 = execute(r2, false); + final String modelId2 = BatchTestTools.responseId(response2); + + assertNotEquals(modelId1, modelId2); + + final M3Request batch3 = new M3Request(); + batch3.entity = Entity.model; + batch3.operation = Operation.add; + batch3.arguments = new M3Argument(); + + M3BatchResponse response3 = execute(batch3, false); + final String modelId3 = BatchTestTools.responseId(response3); + + assertNotEquals(modelId1, modelId3); + assertNotEquals(modelId2, modelId3); + } + + @Test + public void testDelete() throws Exception { + //models.dispose(); + + final String modelId = generateBlankModel(); + + // create + final M3Request r1 = BatchTestTools.addIndividual(modelId, "GO:0008104", // protein localization + BatchTestTools.createSvf("RO:0002333", "UniProtKB:P0000"), // enabled_by + BatchTestTools.createSvf("BFO:0000050", "GO:0006915")); // part_of apoptotic process + + final M3BatchResponse response1 = execute(r1, false); + + JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); + assertEquals(1, iObjs1.length); + JsonOwlIndividual individual1 = iObjs1[0]; + assertNotNull(individual1); + assertNotNull(individual1.id); + + JsonOwlObject[] types1 = individual1.type; + assertEquals(3, types1.length); + String apopId = null; + for (JsonOwlObject e : types1) { + if (JsonOwlObjectType.SomeValueFrom == e.type) { + if (e.filler.id.equals("GO:0006915")) { + apopId = e.filler.id; + break; + } + } + } + assertNotNull(apopId); + + // delete + final M3Request r2 = new M3Request(); + r2.entity = Entity.individual; + r2.operation = Operation.removeType; + r2.arguments = new M3Argument(); + r2.arguments.modelId = modelId; + r2.arguments.individual = individual1.id; + + r2.arguments.expressions = new JsonOwlObject[1]; + r2.arguments.expressions[0] = BatchTestTools.createSvf("BFO:0000050", apopId); // part_of + + final M3BatchResponse response2 = execute(r2, false); + + JsonOwlIndividual[] iObjs2 = BatchTestTools.responseIndividuals(response2); + assertEquals(1, iObjs2.length); + JsonOwlIndividual individual2 = iObjs2[0]; + assertNotNull(individual2); + JsonOwlObject[] types2 = individual2.type; + assertEquals(2, types2.length); + } + + @Test + public void testDeleteEdge() throws Exception { + //models.dispose(); + final String modelId = generateBlankModel(); + + // setup model + // simple three individuals (mf, bp, gene) with two facts: bp -p-> mf, mf -enabled_by-> gene + final List batch1 = new ArrayList(); + + // activity/mf + M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function + r.arguments.assignToVariable = "mf"; + batch1.add(r); + + // process + r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process + r.arguments.assignToVariable = "bp"; + batch1.add(r); + + // gene + r = BatchTestTools.addIndividual(modelId, "UniProtKB:P0000"); // gene + r.arguments.assignToVariable = "gene"; + batch1.add(r); + + // activity -> process + r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of + batch1.add(r); // part_of + + // mf -enabled_by-> gene + r = BatchTestTools.addEdge(modelId, "mf", "RO:0002333", "gene"); // enabled_by + batch1.add(r); // part_of + + final M3BatchResponse response1 = executeBatch(batch1, false); + JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); + assertEquals(3, iObjs1.length); + + String mf = null; + String bp = null; + for (JsonOwlIndividual iObj : iObjs1) { + String id = iObj.id; + assertNotNull(id); + JsonOwlObject[] types = iObj.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0003674".equals(typeId)) { + mf = id; + } else if ("GO:0008150".equals(typeId)) { + bp = id; + } + } + assertNotNull(mf); + assertNotNull(bp); + + + final List batch2 = new ArrayList(); + r = BatchTestTools.deleteEdge(modelId, mf, "BFO:0000050", bp); + batch2.add(r); + + final M3BatchResponse response2 = executeBatch(batch2, false); + assertEquals(M3BatchResponse.SIGNAL_MERGE, response2.signal); + JsonOwlIndividual[] iObjs2 = BatchTestTools.responseIndividuals(response2); + assertEquals(2, iObjs2.length); + } + + @Test + public void testDeleteEvidenceIndividuals() throws Exception { + //models.dispose(); + final String modelId = generateBlankModel(); + + // setup model + // simple four individuals (mf, bp, 2 evidences) with a fact in between bp and mf + final List batch1 = new ArrayList(); + + // evidence1 + M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var1"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); + batch1.add(r); + + // evidence2 + r = BatchTestTools.addIndividual(modelId, "ECO:0000001"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var2"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000001"); + batch1.add(r); + + // evidence3 + r = BatchTestTools.addIndividual(modelId, "ECO:0000002"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var3"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000002"); + batch1.add(r); + + // activity/mf + r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function + r.arguments.assignToVariable = "mf"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var1"); + batch1.add(r); + + // process + r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var3"); + r.arguments.assignToVariable = "bp"; + batch1.add(r); + + // activity -> process + r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var2"); + batch1.add(r); // part_of + + final M3BatchResponse response1 = executeBatch(batch1, false); + + //run diff to show changes + //test diff command for comparison + M3Request dr = new M3Request(); + dr.entity = Entity.model; + dr.operation = Operation.diffModel; + dr.arguments = new M3Argument(); + dr.arguments.modelId = modelId; + M3BatchResponse diffresp = execute(dr, false); + String diff = diffresp.data.diffResult; + assertFalse(diff.equals("Ontologies are identical\n")); + + + // find individuals + JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); + assertEquals(5, iObjs1.length); + String evidence1 = null; + String evidence2 = null; + String evidence3 = null; + String mf = null; + String bp = null; + for (JsonOwlIndividual iObj : iObjs1) { + String id = iObj.id; + assertNotNull(id); + JsonOwlObject[] types = iObj.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0003674".equals(typeId)) { + mf = id; + } else if ("GO:0008150".equals(typeId)) { + bp = id; + } else if ("ECO:0000000".equals(typeId)) { + evidence1 = id; + } else if ("ECO:0000001".equals(typeId)) { + evidence2 = id; + } else if ("ECO:0000002".equals(typeId)) { + evidence3 = id; + } + } + assertNotNull(evidence1); + assertNotNull(evidence2); + assertNotNull(evidence3); + assertNotNull(mf); + assertNotNull(bp); + + // one edge + JsonOwlFact[] facts1 = BatchTestTools.responseFacts(response1); + assertEquals(1, facts1.length); + assertEquals(4, facts1[0].annotations.length); // evidence, date, contributor, provider + + // remove fact evidence + final List batch2 = new ArrayList(); + r = BatchTestTools.removeIndividual(modelId, evidence2); + batch2.add(r); + + executeBatch(batch2, false); + + final M3BatchResponse response3 = checkCounts(modelId, 4, 1); + JsonOwlFact[] factsObjs = BatchTestTools.responseFacts(response3); + assertEquals(3, factsObjs[0].annotations.length); // date and contributor remain + + // delete bp evidence instance + final List batch4 = new ArrayList(); + r = BatchTestTools.removeIndividual(modelId, evidence3); + batch4.add(r); + + executeBatch(batch4, false); + + final M3BatchResponse response5 = checkCounts(modelId, 3, 1); + JsonOwlIndividual[] indivdualObjs5 = BatchTestTools.responseIndividuals(response5); + boolean found = false; + for (JsonOwlIndividual iObj : indivdualObjs5) { + String id = iObj.id; + assertNotNull(id); + JsonOwlObject[] types = iObj.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0008150".equals(typeId)) { + found = true; + assertTrue(iObj.annotations.length == 3); // date and contributor and provider remain + } + } + assertTrue(found); + + + // delete mf instance -> delete also mf evidence instance and fact + final List batch6 = new ArrayList(); + r = BatchTestTools.removeIndividual(modelId, mf); + batch6.add(r); + + executeBatch(batch6, false); + + M3BatchResponse response7 = checkCounts(modelId, 1, 0); + JsonOwlIndividual[] iObjs7 = BatchTestTools.responseIndividuals(response7); + assertEquals(bp, iObjs7[0].id); + } + + //FIXME @Test + public void testInconsistentModel() throws Exception { + //models.dispose(); + + final String modelId = generateBlankModel(); + + // create + final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0009653", // anatomical structure morphogenesis + BatchTestTools.createClass("GO:0048856")); // anatomical structure development + + final M3BatchResponse response = execute(r, true); + assertTrue(response.isReasoned); + Boolean inconsistentFlag = BatchTestTools.responseInconsistent(response); + assertEquals(Boolean.TRUE, inconsistentFlag); + } + + //FIXME @Test + public void testInferencesRedundant() throws Exception { + //models.dispose(); + final String modelId = generateBlankModel(); + + // GO:0009826 ! unidimensional cell growth + // GO:0000902 ! cell morphogenesis + // should infer only one type: 'unidimensional cell growth' + // 'cell morphogenesis' is a super-class and redundant + + // create + final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0000902", // cell morphogenesis + BatchTestTools.createClass("GO:0009826")); // unidimensional cell growth + + final M3BatchResponse response = execute(r, true); + assertTrue(response.isReasoned); + assertNull("Model should not be inconsistent", BatchTestTools.responseInconsistent(response)); + JsonOwlIndividual[] inferred = BatchTestTools.responseIndividuals(response); + assertNotNull(inferred); + assertEquals(1, inferred.length); + JsonOwlIndividual inferredData = inferred[0]; + JsonOwlObject[] types = inferredData.inferredType; + assertEquals(1, types.length); + JsonOwlObject type = types[0]; + assertEquals(JsonOwlObjectType.Class, type.type); + assertEquals("GO:0009826", type.id); + } + + //FIXME @Test + public void testTrivialInferences() throws Exception { + //models.dispose(); + + final String modelId = generateBlankModel(); + // create + final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0051231"); // spindle elongation + + final M3BatchResponse response = execute(r, true); + assertTrue(response.isReasoned); + assertNull("Model should not be inconsistent", BatchTestTools.responseInconsistent(response)); + JsonOwlIndividual[] inferred = BatchTestTools.responseIndividuals(response); + assertNotNull(inferred); + assertEquals(1, inferred.length); + JsonOwlIndividual inferredData = inferred[0]; + assertNull(inferredData.inferredType); + } + + //FIXME @Test + public void testInferencesAdditional() throws Exception { + //models.dispose(); + + final String modelId = generateBlankModel(); + + // GO:0051231 ! spindle elongation + // part_of GO:0000278 ! mitotic cell cycle + // should infer one new type: GO:0000022 ! mitotic spindle elongation + + // create + final M3Request r = BatchTestTools.addIndividual(modelId, "GO:0051231", // spindle elongation + BatchTestTools.createSvf("BFO:0000050", "GO:0000278")); // part_of, mitotic cell cycle + + final M3BatchResponse response = execute(r, true); + assertTrue(response.isReasoned); + assertNull("Model should not be inconsistent", BatchTestTools.responseInconsistent(response)); + JsonOwlIndividual[] inferred = BatchTestTools.responseIndividuals(response); + assertNotNull(inferred); + assertEquals(1, inferred.length); + JsonOwlIndividual inferredData = inferred[0]; + JsonOwlObject[] types = inferredData.inferredType; + assertEquals(1, types.length); + JsonOwlObject type = types[0]; + assertEquals(JsonOwlObjectType.Class, type.type); + assertEquals("GO:0000022", type.id); + } + + @Test + public void testValidationBeforeSave() throws Exception { + assertTrue(JsonOrJsonpBatchHandler.VALIDATE_BEFORE_SAVE); + //models.dispose(); + + final String modelId = generateBlankModel(); + + // try to save + M3Request[] batch = new M3Request[1]; + batch[0] = new M3Request(); + batch[0].entity = Entity.model; + batch[0].operation = Operation.storeModel; + batch[0].arguments = new M3Argument(); + batch[0].arguments.modelId = modelId; + M3BatchResponse resp1 = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, true); + assertEquals("This operation must fail as the model has no title or individuals", M3BatchResponse.MESSAGE_TYPE_ERROR, resp1.messageType); + assertNotNull(resp1.commentary); + assertTrue(resp1.commentary.contains("title")); + } + + @Test + public void testPrivileged() throws Exception { + M3Request[] batch = new M3Request[1]; + batch[0] = new M3Request(); + batch[0].entity = Entity.model; + batch[0].operation = Operation.add; + M3BatchResponse resp1 = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, false); + assertEquals(M3BatchResponse.MESSAGE_TYPE_ERROR, resp1.messageType); + assertTrue(resp1.message.contains("Insufficient")); + } + + //FIXME @Test + public void testExportLegacy() throws Exception { + final String modelId = generateBlankModel(); + + // create + final M3Request r1 = BatchTestTools.addIndividual(modelId, "GO:0008104", // protein localization + BatchTestTools.createSvf("RO:0002333", "UniProtKB:P0000"), // enabled_by + BatchTestTools.createSvf("BFO:0000050", "GO:0006915")); // part_of + + execute(r1, false); + + + final M3Request r2 = new M3Request(); + r2.operation = Operation.exportModelLegacy; + r2.entity = Entity.model; + r2.arguments = new M3Argument(); + r2.arguments.modelId = modelId; // batch2.arguments.format = "gpad"; // optional, default is gaf - - final M3BatchResponse response2 = execute(r2, false); - String exportString = BatchTestTools.responseExport(response2); + + final M3BatchResponse response2 = execute(r2, false); + String exportString = BatchTestTools.responseExport(response2); // System.out.println("----------------"); // System.out.println(exportString); // System.out.println("----------------"); - assertNotNull(exportString); - } - - //FIXME @Test - public void testUndoRedo() throws Exception { - final String modelId = generateBlankModel(); - - // create - final M3Request r1 = BatchTestTools.addIndividual(modelId, "GO:0008104", // protein localization - BatchTestTools.createSvf("RO:0002333", "UniProtKB:P0000"), // enabled_by - BatchTestTools.createSvf("BFO:0000050", "GO:0006915")); // part_of apoptotic process - - final M3BatchResponse response1 = execute(r1, false); - JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); - assertEquals(1, iObjs1.length); - JsonOwlIndividual individual1 = iObjs1[0]; - assertNotNull(individual1); - final String individualId = individual1.id; - assertNotNull(individualId); - - JsonOwlObject[] types1 = individual1.type; - assertEquals(3, types1.length); - String apopId = null; - for(JsonOwlObject e : types1) { - if (JsonOwlObjectType.SomeValueFrom == e.type) { - if (e.filler.id.equals("GO:0006915")) { - apopId = e.filler.id; - break; - } - } - } - assertNotNull(apopId); - - // check undo redo list - final M3Request r2 = new M3Request(); - r2.entity = Entity.model; - r2.operation = Operation.getUndoRedo; - r2.arguments = new M3Argument(); - r2.arguments.modelId = modelId; - final M3BatchResponse response2 = execute(r2, false); - List undo2 = (List) response2.data.undo; - List redo2 = (List) response2.data.redo; - assertTrue(undo2.size() > 1); - assertTrue(redo2.isEmpty()); - - // delete - final M3Request r3 = new M3Request(); - r3.entity = Entity.individual; - r3.operation = Operation.removeType; - r3.arguments = new M3Argument(); - r3.arguments.modelId = modelId; - r3.arguments.individual = individualId; - r3.arguments.expressions = new JsonOwlObject[]{ BatchTestTools.createSvf("BFO:0000050", apopId)}; - - final M3BatchResponse response3 = execute(r3, false); - JsonOwlIndividual[] iObjs3 = BatchTestTools.responseIndividuals(response3); - assertEquals(1, iObjs3.length); - JsonOwlIndividual individual3 = iObjs3[0]; - assertNotNull(individual3); - JsonOwlObject[] types3 = individual3.type; - assertEquals(2, types3.length); - - // check undo redo list - final M3Request r4 = new M3Request(); - r4.entity = Entity.model; - r4.operation = Operation.getUndoRedo; - r4.arguments = new M3Argument(); - r4.arguments.modelId = modelId; - - final M3BatchResponse response4 = execute(r4, false); - List undo4 = (List) response4.data.undo; - List redo4 = (List) response4.data.redo; - assertTrue(undo4.size() > 1); - assertTrue(redo4.isEmpty()); - - // undo - final M3Request r5 = new M3Request(); - r5.entity = Entity.model; - r5.operation = Operation.undo; - r5.arguments = new M3Argument(); - r5.arguments.modelId = modelId; - - execute(r5, false); - - // check undo redo list - final M3Request r6 = new M3Request(); - r6.entity = Entity.model; - r6.operation = Operation.getUndoRedo; - r6.arguments = new M3Argument(); - r6.arguments.modelId = modelId; - - final M3BatchResponse response6 = execute(r6, false); - List undo6 = (List) response6.data.undo; - List redo6 = (List) response6.data.redo; - assertTrue(undo6.size() > 1); - assertTrue(redo6.size() == 1); - - } - - //FIXME @Test - public void testAllIndividualEvidenceDelete() throws Exception { - /* - * create three individuals, two facts and two evidence individuals - */ - // blank model - final String modelId = generateBlankModel(); - final List batch1 = new ArrayList(); - - // evidence1 - M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var1"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); - batch1.add(r); - - // evidence2 - r = BatchTestTools.addIndividual(modelId, "ECO:0000001"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var2"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000001"); - batch1.add(r); - - // activity/mf - r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function - r.arguments.assignToVariable = "mf"; - batch1.add(r); - - // process - r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process - r.arguments.assignToVariable = "bp"; - batch1.add(r); - - // location/cc - r = BatchTestTools.addIndividual(modelId, "GO:0005575"); // cellular component - r.arguments.assignToVariable = "cc"; - batch1.add(r); - - // activity -> process - r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var1"); - batch1.add(r); // part_of - - // activity -> cc - r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000066", "cc"); // occurs_in - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var2"); - batch1.add(r); - - final M3BatchResponse response1 = executeBatch(batch1, false); - - // find individuals - JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); - assertEquals(5, iObjs1.length); - String evidence1 = null; - String evidence2 = null; - String mf = null; - String bp = null; - String cc = null; - for (JsonOwlIndividual iObj : iObjs1) { - String id = iObj.id; - assertNotNull(id); - JsonOwlObject[] types = iObj.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0003674".equals(typeId)) { - mf = id; - } - else if ("GO:0008150".equals(typeId)) { - bp = id; - } - else if ("GO:0005575".equals(typeId)) { - cc = id; - } - else if ("ECO:0000000".equals(typeId)) { - evidence1 = id; - } - else if ("ECO:0000001".equals(typeId)) { - evidence2 = id; - } - } - assertNotNull(evidence1); - assertNotNull(evidence2); - assertNotNull(mf); - assertNotNull(bp); - assertNotNull(cc); - - // two edges - JsonOwlFact[] facts1 = BatchTestTools.responseFacts(response1); - assertEquals(2, facts1.length); - - /* - * delete one fact and expect that the associated evidence is also deleted - */ - // delete: mf -part_of-> bp - r = BatchTestTools.deleteEdge(modelId, mf, "BFO:0000050", bp); - final M3BatchResponse response2 = execute(r, false); - - JsonOwlIndividual[] iObjs2 = BatchTestTools.responseIndividuals(response2); - assertEquals(4, iObjs2.length); // should return the whole model, due to the delete of the evidence! - - // get the whole model to check global counts - checkCounts(modelId, 4, 1); - - /* - * delete one individuals of an fact and expect a cascading delete, including the evidence - */ - r = BatchTestTools.removeIndividual(modelId, cc); - M3BatchResponse response3 = execute(r, false); - - JsonOwlIndividual[] iObjs3 = BatchTestTools.responseIndividuals(response3); - assertEquals(2, iObjs3.length); - JsonOwlFact[] facts3 = BatchTestTools.responseFacts(response3); - assertEquals(0, facts3.length); - - checkCounts(modelId, 2, 0); - } - - private M3BatchResponse checkCounts(String modelId, int individuals, int facts) { - final M3BatchResponse response = BatchTestTools.getModel(handler, modelId, false); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(response); - assertEquals(individuals, iObjs.length); - JsonOwlFact[] factsObjs = BatchTestTools.responseFacts(response); - assertEquals(facts, factsObjs.length); - return response; - } - - private JsonAnnotation[] getModelAnnotations(String modelId) { - final M3BatchResponse response = BatchTestTools.getModel(handler, modelId, false); - return response.data.annotations; - } - - @Test - public void testAllIndividualUseCase() throws Exception { - /* - * Create a full set of individuals for an activity diagram of a gene. - */ - // blank model - final String modelId = generateBlankModel(); - List batch = new ArrayList(); - - // evidence - M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); - batch.add(r); - - // activity/mf - r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function - r.arguments.assignToVariable = "mf"; - batch.add(r); - - // process - r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process - r.arguments.assignToVariable = "bp"; - batch.add(r); - - // location/cc - r = BatchTestTools.addIndividual(modelId, "GO:0005575"); // cellular component - r.arguments.assignToVariable = "cc"; - batch.add(r); - - // gene - r = BatchTestTools.addIndividual(modelId, "MGI:000000"); // fake gene (not in the test set of known genes!) - r.arguments.assignToVariable = "gene"; - batch.add(r); - - // relations - // activity -> gene - r = BatchTestTools.addEdge(modelId, "mf", "RO:0002333", "gene"); // enabled_by - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var"); - batch.add(r); - - // activity -> process - r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var"); - batch.add(r); // part_of - - // activity -> cc - r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000066", "cc"); // occurs_in - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var"); - batch.add(r); - - /* - * Test for annoying work-around until the external validation is more stable - */ - boolean defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; - try { - handler.CHECK_LITERAL_IDENTIFIERS = false; - executeBatch(batch, false); - } - finally { - handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; - } - } - - //FIXME @Test - public void testVariables1() throws Exception { - /* - * TASK: create three individuals (mf,bp,cc) and a directed relation - * between the new instances - */ - final String modelId = generateBlankModel(); - final M3Request[] batch = new M3Request[5]; - batch[0] = new M3Request(); - batch[0].entity = Entity.individual; - batch[0].operation = Operation.add; - batch[0].arguments = new M3Argument(); - batch[0].arguments.modelId = modelId; - BatchTestTools.setExpressionClass(batch[0].arguments, "GO:0003674"); // molecular function - batch[0].arguments.assignToVariable = "mf"; - - batch[1] = new M3Request(); - batch[1].entity = Entity.individual; - batch[1].operation = Operation.add; - batch[1].arguments = new M3Argument(); - batch[1].arguments.modelId = modelId; - BatchTestTools.setExpressionClass(batch[1].arguments, "GO:0008150"); // biological process - batch[1].arguments.assignToVariable = "bp"; - - batch[2] = new M3Request(); - batch[2].entity = Entity.edge; - batch[2].operation = Operation.add; - batch[2].arguments = new M3Argument(); - batch[2].arguments.modelId = modelId; - batch[2].arguments.subject = "mf"; - batch[2].arguments.predicate = "BFO:0000050"; // part_of - batch[2].arguments.object = "bp"; - - batch[3] = new M3Request(); - batch[3].entity = Entity.individual; - batch[3].operation = Operation.add; - batch[3].arguments = new M3Argument(); - batch[3].arguments.modelId = modelId; - BatchTestTools.setExpressionClass(batch[3].arguments, "GO:0005575"); // cellular component - batch[3].arguments.assignToVariable = "cc"; - - batch[4] = new M3Request(); - batch[4].entity = Entity.edge; - batch[4].operation = Operation.add; - batch[4].arguments = new M3Argument(); - batch[4].arguments.modelId = modelId; - batch[4].arguments.subject = "mf"; - batch[4].arguments.predicate = "BFO:0000066"; // occurs_in - batch[4].arguments.object = "cc"; - - M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, true); - assertEquals(uid, response.uid); - assertEquals(intention, response.intention); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - - JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(response); - assertEquals(3, iObjs.length); - String mf = null; - String bp = null; - String cc = null; - for (JsonOwlIndividual iObj : iObjs) { - String id = iObj.id; - assertNotNull(id); - JsonOwlObject[] types = iObj.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0003674".equals(typeId)) { - mf = id; - } - else if ("GO:0008150".equals(typeId)) { - bp = id; - } - else if ("GO:0005575".equals(typeId)) { - cc = id; - } - } - assertNotNull(mf); - assertNotNull(bp); - assertNotNull(cc); - - JsonOwlFact[] facts = BatchTestTools.responseFacts(response); - assertEquals(2, facts.length); - boolean mfbp = false; - boolean mfcc = false; - for (JsonOwlFact fact : facts) { - String subject = fact.subject; - String property = fact.property; - String object = fact.object; - assertNotNull(subject); - assertNotNull(property); - assertNotNull(object); - if (mf.equals(subject) && "BFO:0000050".equals(property) && bp.equals(object)) { - mfbp = true; - } - if (mf.equals(subject) && "BFO:0000066".equals(property) && cc.equals(object)) { - mfcc = true; - } - } - assertTrue(mfbp); - assertTrue(mfcc); - } - - //FIXME @Test - public void testVariables2() throws Exception { - /* - * TASK: try to use an undefined variable - */ - final String modelId = generateBlankModel(); - final M3Request[] batch = new M3Request[2]; - batch[0] = new M3Request(); - batch[0].entity = Entity.individual; - batch[0].operation = Operation.add; - batch[0].arguments = new M3Argument(); - batch[0].arguments.modelId = modelId; - BatchTestTools.setExpressionClass(batch[0].arguments, "GO:0003674"); // molecular function - batch[0].arguments.assignToVariable = "mf"; - - batch[1] = new M3Request(); - batch[1].entity = Entity.edge; - batch[1].operation = Operation.add; - batch[1].arguments = new M3Argument(); - batch[1].arguments.modelId = modelId; - batch[1].arguments.subject = "mf"; - batch[1].arguments.predicate = "BFO:0000050"; // part_of - batch[1].arguments.object = "foo"; - - M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, true); - assertEquals(uid, response.uid); - assertEquals(intention, response.intention); - assertEquals("The operation should fail with an unknown identifier exception", - M3BatchResponse.MESSAGE_TYPE_ERROR, response.messageType); - assertTrue(response.message, response.message.contains("UnknownIdentifierException")); - assertTrue(response.message, response.message.contains("foo")); // unknown - } - - @Test - public void testDeprecatedModel() throws Exception { + assertNotNull(exportString); + } + + //FIXME @Test + public void testUndoRedo() throws Exception { + final String modelId = generateBlankModel(); + + // create + final M3Request r1 = BatchTestTools.addIndividual(modelId, "GO:0008104", // protein localization + BatchTestTools.createSvf("RO:0002333", "UniProtKB:P0000"), // enabled_by + BatchTestTools.createSvf("BFO:0000050", "GO:0006915")); // part_of apoptotic process + + final M3BatchResponse response1 = execute(r1, false); + JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); + assertEquals(1, iObjs1.length); + JsonOwlIndividual individual1 = iObjs1[0]; + assertNotNull(individual1); + final String individualId = individual1.id; + assertNotNull(individualId); + + JsonOwlObject[] types1 = individual1.type; + assertEquals(3, types1.length); + String apopId = null; + for (JsonOwlObject e : types1) { + if (JsonOwlObjectType.SomeValueFrom == e.type) { + if (e.filler.id.equals("GO:0006915")) { + apopId = e.filler.id; + break; + } + } + } + assertNotNull(apopId); + + // check undo redo list + final M3Request r2 = new M3Request(); + r2.entity = Entity.model; + r2.operation = Operation.getUndoRedo; + r2.arguments = new M3Argument(); + r2.arguments.modelId = modelId; + final M3BatchResponse response2 = execute(r2, false); + List undo2 = (List) response2.data.undo; + List redo2 = (List) response2.data.redo; + assertTrue(undo2.size() > 1); + assertTrue(redo2.isEmpty()); + + // delete + final M3Request r3 = new M3Request(); + r3.entity = Entity.individual; + r3.operation = Operation.removeType; + r3.arguments = new M3Argument(); + r3.arguments.modelId = modelId; + r3.arguments.individual = individualId; + r3.arguments.expressions = new JsonOwlObject[]{BatchTestTools.createSvf("BFO:0000050", apopId)}; + + final M3BatchResponse response3 = execute(r3, false); + JsonOwlIndividual[] iObjs3 = BatchTestTools.responseIndividuals(response3); + assertEquals(1, iObjs3.length); + JsonOwlIndividual individual3 = iObjs3[0]; + assertNotNull(individual3); + JsonOwlObject[] types3 = individual3.type; + assertEquals(2, types3.length); + + // check undo redo list + final M3Request r4 = new M3Request(); + r4.entity = Entity.model; + r4.operation = Operation.getUndoRedo; + r4.arguments = new M3Argument(); + r4.arguments.modelId = modelId; + + final M3BatchResponse response4 = execute(r4, false); + List undo4 = (List) response4.data.undo; + List redo4 = (List) response4.data.redo; + assertTrue(undo4.size() > 1); + assertTrue(redo4.isEmpty()); + + // undo + final M3Request r5 = new M3Request(); + r5.entity = Entity.model; + r5.operation = Operation.undo; + r5.arguments = new M3Argument(); + r5.arguments.modelId = modelId; + + execute(r5, false); + + // check undo redo list + final M3Request r6 = new M3Request(); + r6.entity = Entity.model; + r6.operation = Operation.getUndoRedo; + r6.arguments = new M3Argument(); + r6.arguments.modelId = modelId; + + final M3BatchResponse response6 = execute(r6, false); + List undo6 = (List) response6.data.undo; + List redo6 = (List) response6.data.redo; + assertTrue(undo6.size() > 1); + assertTrue(redo6.size() == 1); + + } + + //FIXME @Test + public void testAllIndividualEvidenceDelete() throws Exception { + /* + * create three individuals, two facts and two evidence individuals + */ + // blank model + final String modelId = generateBlankModel(); + final List batch1 = new ArrayList(); + + // evidence1 + M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var1"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); + batch1.add(r); + + // evidence2 + r = BatchTestTools.addIndividual(modelId, "ECO:0000001"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var2"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000001"); + batch1.add(r); + + // activity/mf + r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function + r.arguments.assignToVariable = "mf"; + batch1.add(r); + + // process + r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process + r.arguments.assignToVariable = "bp"; + batch1.add(r); + + // location/cc + r = BatchTestTools.addIndividual(modelId, "GO:0005575"); // cellular component + r.arguments.assignToVariable = "cc"; + batch1.add(r); + + // activity -> process + r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var1"); + batch1.add(r); // part_of + + // activity -> cc + r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000066", "cc"); // occurs_in + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var2"); + batch1.add(r); + + final M3BatchResponse response1 = executeBatch(batch1, false); + + // find individuals + JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); + assertEquals(5, iObjs1.length); + String evidence1 = null; + String evidence2 = null; + String mf = null; + String bp = null; + String cc = null; + for (JsonOwlIndividual iObj : iObjs1) { + String id = iObj.id; + assertNotNull(id); + JsonOwlObject[] types = iObj.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0003674".equals(typeId)) { + mf = id; + } else if ("GO:0008150".equals(typeId)) { + bp = id; + } else if ("GO:0005575".equals(typeId)) { + cc = id; + } else if ("ECO:0000000".equals(typeId)) { + evidence1 = id; + } else if ("ECO:0000001".equals(typeId)) { + evidence2 = id; + } + } + assertNotNull(evidence1); + assertNotNull(evidence2); + assertNotNull(mf); + assertNotNull(bp); + assertNotNull(cc); + + // two edges + JsonOwlFact[] facts1 = BatchTestTools.responseFacts(response1); + assertEquals(2, facts1.length); + + /* + * delete one fact and expect that the associated evidence is also deleted + */ + // delete: mf -part_of-> bp + r = BatchTestTools.deleteEdge(modelId, mf, "BFO:0000050", bp); + final M3BatchResponse response2 = execute(r, false); + + JsonOwlIndividual[] iObjs2 = BatchTestTools.responseIndividuals(response2); + assertEquals(4, iObjs2.length); // should return the whole model, due to the delete of the evidence! + + // get the whole model to check global counts + checkCounts(modelId, 4, 1); + + /* + * delete one individuals of an fact and expect a cascading delete, including the evidence + */ + r = BatchTestTools.removeIndividual(modelId, cc); + M3BatchResponse response3 = execute(r, false); + + JsonOwlIndividual[] iObjs3 = BatchTestTools.responseIndividuals(response3); + assertEquals(2, iObjs3.length); + JsonOwlFact[] facts3 = BatchTestTools.responseFacts(response3); + assertEquals(0, facts3.length); + + checkCounts(modelId, 2, 0); + } + + private M3BatchResponse checkCounts(String modelId, int individuals, int facts) { + final M3BatchResponse response = BatchTestTools.getModel(handler, modelId, false); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(response); + assertEquals(individuals, iObjs.length); + JsonOwlFact[] factsObjs = BatchTestTools.responseFacts(response); + assertEquals(facts, factsObjs.length); + return response; + } + + private JsonAnnotation[] getModelAnnotations(String modelId) { + final M3BatchResponse response = BatchTestTools.getModel(handler, modelId, false); + return response.data.annotations; + } + + @Test + public void testAllIndividualUseCase() throws Exception { + /* + * Create a full set of individuals for an activity diagram of a gene. + */ + // blank model + final String modelId = generateBlankModel(); + List batch = new ArrayList(); + + // evidence + M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); + batch.add(r); + + // activity/mf + r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function + r.arguments.assignToVariable = "mf"; + batch.add(r); + + // process + r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process + r.arguments.assignToVariable = "bp"; + batch.add(r); + + // location/cc + r = BatchTestTools.addIndividual(modelId, "GO:0005575"); // cellular component + r.arguments.assignToVariable = "cc"; + batch.add(r); + + // gene + r = BatchTestTools.addIndividual(modelId, "MGI:000000"); // fake gene (not in the test set of known genes!) + r.arguments.assignToVariable = "gene"; + batch.add(r); + + // relations + // activity -> gene + r = BatchTestTools.addEdge(modelId, "mf", "RO:0002333", "gene"); // enabled_by + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var"); + batch.add(r); + + // activity -> process + r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var"); + batch.add(r); // part_of + + // activity -> cc + r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000066", "cc"); // occurs_in + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var"); + batch.add(r); + + /* + * Test for annoying work-around until the external validation is more stable + */ + boolean defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; + try { + handler.CHECK_LITERAL_IDENTIFIERS = false; + executeBatch(batch, false); + } finally { + handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; + } + } + + //FIXME @Test + public void testVariables1() throws Exception { + /* + * TASK: create three individuals (mf,bp,cc) and a directed relation + * between the new instances + */ + final String modelId = generateBlankModel(); + final M3Request[] batch = new M3Request[5]; + batch[0] = new M3Request(); + batch[0].entity = Entity.individual; + batch[0].operation = Operation.add; + batch[0].arguments = new M3Argument(); + batch[0].arguments.modelId = modelId; + BatchTestTools.setExpressionClass(batch[0].arguments, "GO:0003674"); // molecular function + batch[0].arguments.assignToVariable = "mf"; + + batch[1] = new M3Request(); + batch[1].entity = Entity.individual; + batch[1].operation = Operation.add; + batch[1].arguments = new M3Argument(); + batch[1].arguments.modelId = modelId; + BatchTestTools.setExpressionClass(batch[1].arguments, "GO:0008150"); // biological process + batch[1].arguments.assignToVariable = "bp"; + + batch[2] = new M3Request(); + batch[2].entity = Entity.edge; + batch[2].operation = Operation.add; + batch[2].arguments = new M3Argument(); + batch[2].arguments.modelId = modelId; + batch[2].arguments.subject = "mf"; + batch[2].arguments.predicate = "BFO:0000050"; // part_of + batch[2].arguments.object = "bp"; + + batch[3] = new M3Request(); + batch[3].entity = Entity.individual; + batch[3].operation = Operation.add; + batch[3].arguments = new M3Argument(); + batch[3].arguments.modelId = modelId; + BatchTestTools.setExpressionClass(batch[3].arguments, "GO:0005575"); // cellular component + batch[3].arguments.assignToVariable = "cc"; + + batch[4] = new M3Request(); + batch[4].entity = Entity.edge; + batch[4].operation = Operation.add; + batch[4].arguments = new M3Argument(); + batch[4].arguments.modelId = modelId; + batch[4].arguments.subject = "mf"; + batch[4].arguments.predicate = "BFO:0000066"; // occurs_in + batch[4].arguments.object = "cc"; + + M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, true); + assertEquals(uid, response.uid); + assertEquals(intention, response.intention); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + + JsonOwlIndividual[] iObjs = BatchTestTools.responseIndividuals(response); + assertEquals(3, iObjs.length); + String mf = null; + String bp = null; + String cc = null; + for (JsonOwlIndividual iObj : iObjs) { + String id = iObj.id; + assertNotNull(id); + JsonOwlObject[] types = iObj.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0003674".equals(typeId)) { + mf = id; + } else if ("GO:0008150".equals(typeId)) { + bp = id; + } else if ("GO:0005575".equals(typeId)) { + cc = id; + } + } + assertNotNull(mf); + assertNotNull(bp); + assertNotNull(cc); + + JsonOwlFact[] facts = BatchTestTools.responseFacts(response); + assertEquals(2, facts.length); + boolean mfbp = false; + boolean mfcc = false; + for (JsonOwlFact fact : facts) { + String subject = fact.subject; + String property = fact.property; + String object = fact.object; + assertNotNull(subject); + assertNotNull(property); + assertNotNull(object); + if (mf.equals(subject) && "BFO:0000050".equals(property) && bp.equals(object)) { + mfbp = true; + } + if (mf.equals(subject) && "BFO:0000066".equals(property) && cc.equals(object)) { + mfcc = true; + } + } + assertTrue(mfbp); + assertTrue(mfcc); + } + + //FIXME @Test + public void testVariables2() throws Exception { + /* + * TASK: try to use an undefined variable + */ + final String modelId = generateBlankModel(); + final M3Request[] batch = new M3Request[2]; + batch[0] = new M3Request(); + batch[0].entity = Entity.individual; + batch[0].operation = Operation.add; + batch[0].arguments = new M3Argument(); + batch[0].arguments.modelId = modelId; + BatchTestTools.setExpressionClass(batch[0].arguments, "GO:0003674"); // molecular function + batch[0].arguments.assignToVariable = "mf"; + + batch[1] = new M3Request(); + batch[1].entity = Entity.edge; + batch[1].operation = Operation.add; + batch[1].arguments = new M3Argument(); + batch[1].arguments.modelId = modelId; + batch[1].arguments.subject = "mf"; + batch[1].arguments.predicate = "BFO:0000050"; // part_of + batch[1].arguments.object = "foo"; + + M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch, false, true); + assertEquals(uid, response.uid); + assertEquals(intention, response.intention); + assertEquals("The operation should fail with an unknown identifier exception", + M3BatchResponse.MESSAGE_TYPE_ERROR, response.messageType); + assertTrue(response.message, response.message.contains("UnknownIdentifierException")); + assertTrue(response.message, response.message.contains("foo")); // unknown + } + + @Test + public void testDeprecatedModel() throws Exception { // models.dispose(); - - final String modelId1 = generateBlankModel(); - final String modelId2 = generateBlankModel(); - - // add deprecated annotation to model 2 - final M3Request batch1 = new M3Request(); - batch1.entity = Entity.model; - batch1.operation = Operation.addAnnotation; - batch1.arguments = new M3Argument(); - batch1.arguments.modelId = modelId2; - batch1.arguments.values = new JsonAnnotation[1]; - batch1.arguments.values[0] = new JsonAnnotation(); - batch1.arguments.values[0].key = AnnotationShorthand.deprecated.name(); - batch1.arguments.values[0].value = Boolean.TRUE.toString(); - - execute(batch1, false); - - final M3Request batch2 = new M3Request(); - batch2.entity = Entity.meta; - batch2.operation = Operation.get; - - final M3BatchResponse response2 = execute(batch2, false); - - Map> meta = BatchTestTools.responseModelsMeta(response2); - //assertEquals(2, meta.size()); //FIXME should not reuse models after dispose; need to change these tests to make a fresh m3 - // model 1 - List modelData = meta.get(modelId1); - assertNotNull(modelData); - for (JsonAnnotation json : modelData) { - if (json.key.equals(AnnotationShorthand.deprecated.name())) { - fail("the model should not have a deprecation annotation"); - } - } - - // model 2, deprecated - modelData = meta.get(modelId2); - assertNotNull(modelData); - boolean found = false; - for (JsonAnnotation json : modelData) { - if (json.key.equals(AnnotationShorthand.deprecated.name())) { - found = true; - assertEquals("true", json.value); - } - } - assertTrue("the model must have a deprecation annotation", found); - - } - - //FIXME @Test - public void testAutoAnnotationsForAddType() throws Exception { - /* - * test that if a type is added or removed from an individual also - * updates the contributes annotation - */ - final String modelId = generateBlankModel(); - // create individual - final M3Request[] batch1 = new M3Request[1]; - batch1[0] = new M3Request(); - batch1[0].entity = Entity.individual; - batch1[0].operation = Operation.add; - batch1[0].arguments = new M3Argument(); - batch1[0].arguments.modelId = modelId; - BatchTestTools.setExpressionClass(batch1[0].arguments, "GO:0003674"); // molecular function - - String uid1 = "1"; - Set providedBy1 = Collections.singleton("provider1"); - M3BatchResponse response1 = handler.m3Batch(uid1, providedBy1, intention, packetId, batch1, false, true); - assertEquals(uid1, response1.uid); - assertEquals(intention, response1.intention); - assertEquals(response1.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response1.messageType); - - // find contributor - JsonOwlIndividual[] individuals1 = BatchTestTools.responseIndividuals(response1); - assertEquals(1, individuals1.length); - - final String id = individuals1[0].id; - - JsonAnnotation[] annotations1 = individuals1[0].annotations; - assertEquals(2, annotations1.length); - String contrib1 = null; - for (JsonAnnotation annotation : annotations1) { - if (AnnotationShorthand.contributor.name().equals(annotation.key)) { - contrib1 = annotation.value; - } - } - assertNotNull(contrib1); - assertEquals(uid1, contrib1); - - // remove type - final M3Request[] batch2 = new M3Request[1]; - batch2[0] = new M3Request(); - batch2[0].entity = Entity.individual; - batch2[0].operation = Operation.removeType; - batch2[0].arguments = new M3Argument(); - batch2[0].arguments.modelId = modelId; - batch2[0].arguments.individual = id; - BatchTestTools.setExpressionClass(batch2[0].arguments, "GO:0003674"); // molecular function - - String uid2 = "2"; - Set providedBy2 = Collections.singleton("provider2"); - M3BatchResponse response2 = handler.m3Batch(uid2, providedBy2, intention, packetId, batch2, false, true); - assertEquals(uid2, response2.uid); - assertEquals(intention, response2.intention); - assertEquals(response2.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response2.messageType); - - // find contributor and compare with prev - JsonOwlIndividual[] individuals2 = BatchTestTools.responseIndividuals(response2); - assertEquals(1, individuals2.length); - - JsonAnnotation[] annotations2 = individuals2[0].annotations; - assertEquals(3, annotations2.length); - Set contribSet1 = new HashSet(); - for (JsonAnnotation annotation : annotations2) { - if (AnnotationShorthand.contributor.name().equals(annotation.key)) { - contribSet1.add(annotation.value); - } - } - assertEquals(2, contribSet1.size()); - assertTrue(contribSet1.contains(uid1)); - assertTrue(contribSet1.contains(uid2)); - - // add type - final M3Request[] batch3 = new M3Request[1]; - batch3[0] = new M3Request(); - batch3[0].entity = Entity.individual; - batch3[0].operation = Operation.addType; - batch3[0].arguments = new M3Argument(); - batch3[0].arguments.modelId = modelId; - batch3[0].arguments.individual = id; - BatchTestTools.setExpressionClass(batch3[0].arguments, "GO:0003674"); // molecular function - - String uid3 = "3"; - Set providedBy3 = Collections.singleton("provider3"); - M3BatchResponse response3 = handler.m3Batch(uid3, providedBy3, intention, packetId, batch3, false, true); - assertEquals(uid3, response3.uid); - assertEquals(intention, response3.intention); - assertEquals(response3.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response3.messageType); - - // find contributor and compare with prev - JsonOwlIndividual[] individuals3 = BatchTestTools.responseIndividuals(response3); - assertEquals(1, individuals3.length); - - JsonAnnotation[] annotations3 = individuals3[0].annotations; - assertEquals(4, annotations3.length); - Set contribSet2 = new HashSet(); - for (JsonAnnotation annotation : annotations3) { - if (AnnotationShorthand.contributor.name().equals(annotation.key)) { - contribSet2.add(annotation.value); - } - } - assertEquals(3, contribSet2.size()); - assertTrue(contribSet2.contains(uid1)); - assertTrue(contribSet2.contains(uid2)); - assertTrue(contribSet2.contains(uid3)); - } - - static class DateGenerator { - - boolean useCounter = false; - int counter = 0; - } - - //FIXME @Test - public void testUpdateDateAnnotation() throws Exception { - /* - * test that the last modification date is update for every change of an - * individual or fact - */ - try { - dateGenerator.counter = 0; - dateGenerator.useCounter = true; - - // test update with add/remove annotation of a fact - final String modelId = generateBlankModel(); - - // setup initial fact with two individuals - final M3Request[] batch1 = new M3Request[3]; - batch1[0] = new M3Request(); - batch1[0].entity = Entity.individual; - batch1[0].operation = Operation.add; - batch1[0].arguments = new M3Argument(); - batch1[0].arguments.modelId = modelId; - BatchTestTools.setExpressionClass(batch1[0].arguments, "GO:0003674"); // molecular function - batch1[0].arguments.assignToVariable = "mf"; - - batch1[1] = new M3Request(); - batch1[1].entity = Entity.individual; - batch1[1].operation = Operation.add; - batch1[1].arguments = new M3Argument(); - batch1[1].arguments.modelId = modelId; - BatchTestTools.setExpressionClass(batch1[1].arguments, "GO:0008150"); // biological process - batch1[1].arguments.assignToVariable = "bp"; - - batch1[2] = new M3Request(); - batch1[2].entity = Entity.edge; - batch1[2].operation = Operation.add; - batch1[2].arguments = new M3Argument(); - batch1[2].arguments.modelId = modelId; - batch1[2].arguments.subject = "mf"; - batch1[2].arguments.predicate = "BFO:0000050"; // part_of - batch1[2].arguments.object = "bp"; - - M3BatchResponse response1 = handler.m3Batch(uid, providedBy, intention, packetId, batch1, false, true); - assertEquals(uid, response1.uid); - assertEquals(intention, response1.intention); - assertEquals(response1.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response1.messageType); - - // find fact and date annotation - String prevDate = null; - { - JsonOwlFact[] responseFacts = BatchTestTools.responseFacts(response1); - assertEquals(1, responseFacts.length); - Set dates = new HashSet(); - for(JsonAnnotation ann : responseFacts[0].annotations) { - if (AnnotationShorthand.date.name().equals(ann.key)) { - dates.add(ann.value); - } - } - assertEquals(1, dates.size()); - prevDate = dates.iterator().next(); - assertNotNull(prevDate); - } - String mf = null; - String bp = null; - { - JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response1); - assertEquals(2, responseIndividuals.length); - for (JsonOwlIndividual iObj : responseIndividuals) { - String id = iObj.id; - assertNotNull(id); - JsonOwlObject[] types = iObj.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0003674".equals(typeId)) { - mf = id; - } - else if ("GO:0008150".equals(typeId)) { - bp = id; - } - } - } - assertNotNull(mf); - assertNotNull(bp); - - // add comment to fact - final M3Request[] batch2 = new M3Request[1]; - batch2[0] = new M3Request(); - batch2[0].entity = Entity.edge; - batch2[0].operation = Operation.addAnnotation; - batch2[0].arguments = new M3Argument(); - batch2[0].arguments.modelId = modelId; - batch2[0].arguments.subject = mf; - batch2[0].arguments.predicate = "BFO:0000050"; // part_of - batch2[0].arguments.object = bp; - batch2[0].arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.comment, "foo"); - - M3BatchResponse response2 = handler.m3Batch(uid, providedBy, intention, packetId, batch2, false, true); - assertEquals(uid, response2.uid); - assertEquals(intention, response2.intention); - assertEquals(response2.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response2.messageType); - - // find fact and compare date with prev - { - JsonOwlFact[] responseFacts = BatchTestTools.responseFacts(response2); - assertEquals(1, responseFacts.length); - Set dates = new HashSet(); - for(JsonAnnotation ann : responseFacts[0].annotations) { - if (AnnotationShorthand.date.name().equals(ann.key)) { - dates.add(ann.value); - } - } - assertEquals(1, dates.size()); - String currentDate = dates.iterator().next(); - assertNotNull(currentDate); - assertNotEquals(prevDate, currentDate); - prevDate = currentDate; - } - - // remove comment from fact - final M3Request[] batch3 = new M3Request[1]; - batch3[0] = new M3Request(); - batch3[0].entity = Entity.edge; - batch3[0].operation = Operation.removeAnnotation; - batch3[0].arguments = new M3Argument(); - batch3[0].arguments.modelId = modelId; - batch3[0].arguments.subject = mf; - batch3[0].arguments.predicate = "BFO:0000050"; // part_of - batch3[0].arguments.object = bp; - batch3[0].arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.comment, "foo"); - - M3BatchResponse response3 = handler.m3Batch(uid, providedBy, intention, packetId, batch3, false, true); - assertEquals(uid, response3.uid); - assertEquals(intention, response3.intention); - assertEquals(response3.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response3.messageType); - - // find fact and compare date with prev - { - JsonOwlFact[] responseFacts = BatchTestTools.responseFacts(response3); - assertEquals(1, responseFacts.length); - Set dates = new HashSet(); - for(JsonAnnotation ann : responseFacts[0].annotations) { - if (AnnotationShorthand.date.name().equals(ann.key)) { - dates.add(ann.value); - } - } - assertEquals(1, dates.size()); - String currentDate = dates.iterator().next(); - assertNotNull(currentDate); - assertNotEquals(prevDate, currentDate); - prevDate = currentDate; - } - - - // test update with add/remove type of an individual - // find individual and date annotation - - String individualId = null; - { - JsonOwlIndividual[] individuals1 = BatchTestTools.responseIndividuals(response1); - assertEquals(2, individuals1.length); - final Set dates = new HashSet(); - for (JsonOwlIndividual individual : individuals1) { - individualId = individual.id; - assertNotNull(individualId); - JsonOwlObject[] types = individual.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0003674".equals(typeId)) { - for(JsonAnnotation annotation : individual.annotations) { - if (AnnotationShorthand.date.name().equals(annotation.key)) { - dates.add(annotation.value); - } - } - } - } - assertEquals(1, dates.size()); - prevDate = dates.iterator().next(); - assertNotNull(prevDate); - } - - // remove type - final M3Request[] batch4 = new M3Request[1]; - batch4[0] = new M3Request(); - batch4[0].entity = Entity.individual; - batch4[0].operation = Operation.removeType; - batch4[0].arguments = new M3Argument(); - batch4[0].arguments.modelId = modelId; - batch4[0].arguments.individual = individualId; - BatchTestTools.setExpressionClass(batch4[0].arguments, "GO:0003674"); - - M3BatchResponse response4 = handler.m3Batch(uid, providedBy, intention, packetId, batch4, false, true); - assertEquals(uid, response4.uid); - assertEquals(intention, response4.intention); - assertEquals(response4.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response4.messageType); - - // find individual and compare date with prev - { - JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response4); - assertEquals(1, responseIndividuals.length); - final Set dates = new HashSet(); - for(JsonAnnotation annotation : responseIndividuals[0].annotations) { - if (AnnotationShorthand.date.name().equals(annotation.key)) { - dates.add(annotation.value); - } - } - assertEquals(1, dates.size()); - String currentDate = dates.iterator().next(); - assertNotNull(currentDate); - assertNotEquals(prevDate, currentDate); - prevDate = currentDate; - } - - // add type - final M3Request[] batch5 = new M3Request[1]; - batch5[0] = new M3Request(); - batch5[0].entity = Entity.individual; - batch5[0].operation = Operation.addType; - batch5[0].arguments = new M3Argument(); - batch5[0].arguments.modelId = modelId; - batch5[0].arguments.individual = individualId; - BatchTestTools.setExpressionClass(batch5[0].arguments, "GO:0003674"); - - M3BatchResponse response5 = handler.m3Batch(uid, providedBy, intention, packetId, batch5, false, true); - assertEquals(uid, response5.uid); - assertEquals(intention, response5.intention); - assertEquals(response5.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response5.messageType); - - // find individual and compare date with prev - { - JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response5); - assertEquals(1, responseIndividuals.length); - final Set dates = new HashSet(); - for(JsonAnnotation annotation : responseIndividuals[0].annotations) { - if (AnnotationShorthand.date.name().equals(annotation.key)) { - dates.add(annotation.value); - } - } - assertEquals(1, dates.size()); - assertEquals(1, dates.size()); - String currentDate = dates.iterator().next(); - assertNotNull(currentDate); - assertNotEquals(prevDate, currentDate); - } - } - finally { - dateGenerator.useCounter = false; - } - } - - //FIXME @Test - public void testUpdateDateAnnotationEvidence() throws Exception { - try { - dateGenerator.counter = 0; - dateGenerator.useCounter = true; - - // test update with add/remove annotation of an evidence individuals - final String modelId = generateBlankModel(); - - // setup initial fact with two individuals - List batch1 = new ArrayList(); - - // evidence1 - M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var1"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); - batch1.add(r); - - // evidence2 - r = BatchTestTools.addIndividual(modelId, "ECO:0000001"); // evidence from ECO - r.arguments.assignToVariable = "evidence-var2"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000001"); - batch1.add(r); - - r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function - r.arguments.assignToVariable = "mf"; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var1"); - batch1.add(r); - - r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process - r.arguments.assignToVariable = "bp"; - batch1.add(r); - - r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var2"); - batch1.add(r); - - M3BatchResponse response1 = executeBatch(batch1, "FOO:1", false); - - // find all the individual ids - // find date for mf - String evidence1 = null; - String evidence2 = null; - String mf = null; - String dateMf = null; - String bp = null; - { - JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); - assertEquals(4, iObjs1.length); - for (JsonOwlIndividual iObj : iObjs1) { - String id = iObj.id; - assertNotNull(id); - JsonOwlObject[] types = iObj.type; - assertNotNull(types); - assertEquals(1, types.length); - JsonOwlObject typeObj = types[0]; - String typeId = typeObj.id; - assertNotNull(typeId); - if ("GO:0003674".equals(typeId)) { - mf = id; - for(JsonAnnotation ann: iObj.annotations) { - if (AnnotationShorthand.date.name().equals(ann.key)) { - dateMf = ann.value; - } - } - } - else if ("GO:0008150".equals(typeId)) { - bp = id; - } - else if ("ECO:0000000".equals(typeId)) { - evidence1 = id; - } - else if ("ECO:0000001".equals(typeId)) { - evidence2 = id; - } - } - assertNotNull(evidence1); - assertNotNull(evidence2); - assertNotNull(mf); - assertNotNull(dateMf); - assertNotNull(bp); - } - - // delete evidence1 and expect a date update and contrib for mf - - final List batch2 = new ArrayList(); - r = BatchTestTools.removeIndividual(modelId, evidence1); - batch2.add(r); - - { - M3BatchResponse response2 = executeBatch(batch2, "FOO:2", false); - - JsonOwlIndividual[] individuals = BatchTestTools.responseIndividuals(response2); - Set currentDates = new HashSet(); - Set contrib = new HashSet(); - for (JsonOwlIndividual individual : individuals) { - if (mf.equals(individual.id)) { - for(JsonAnnotation annotation : individual.annotations) { - if (AnnotationShorthand.date.name().equals(annotation.key)) { - currentDates.add(annotation.value); - } - else if (AnnotationShorthand.contributor.name().equals(annotation.key)) { - contrib.add(annotation.value); - } - } - } - } - assertEquals(1, currentDates.size()); - assertFalse(currentDates.contains(dateMf)); // prev Date - dateMf = currentDates.iterator().next(); - - assertEquals(2, contrib.size()); - assertTrue(contrib.contains("FOO:1")); - assertTrue(contrib.contains("FOO:2")); - } - - // delete evidence2 and expect a date update and contrib for fact - - final List batch3 = new ArrayList(); - r = BatchTestTools.removeIndividual(modelId, evidence2); - batch3.add(r); - - { - M3BatchResponse response3 = executeBatch(batch3, "FOO:3", false); - JsonOwlFact[] facts = BatchTestTools.responseFacts(response3); - assertEquals(1, facts.length); - Set currentDates = new HashSet(); - Set contrib = new HashSet(); - for(JsonAnnotation annotation : facts[0].annotations) { - if (AnnotationShorthand.date.name().equals(annotation.key)) { - currentDates.add(annotation.value); - } - else if (AnnotationShorthand.contributor.name().equals(annotation.key)) { - contrib.add(annotation.value); - } - } - assertEquals(1, currentDates.size()); - assertFalse(currentDates.contains(dateMf)); // prev Date - - assertEquals(2, contrib.size()); - assertTrue(contrib.contains("FOO:1")); - assertTrue(contrib.contains("FOO:3")); - } - } - finally { - dateGenerator.useCounter = false; - } - } - - //FIXME @Test - public void testCoordinateRoundTrip() throws Exception { - //models.dispose(); - - String modelId = generateBlankModel(); - - M3Request r; - final List batch1 = new ArrayList(); - r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process - r.arguments.values = new JsonAnnotation[2]; - r.arguments.values[0] = JsonTools.create(AnnotationShorthand.x, "100", null); - r.arguments.values[1] = JsonTools.create(AnnotationShorthand.y, "200", null); - batch1.add(r); - - - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.addAnnotation; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.title, "foo"); - batch1.add(r); - - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.storeModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - batch1.add(r); - - final M3BatchResponse response1 = executeBatch(batch1, false); - JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response1); - assertEquals(1, responseIndividuals.length); - - //models.dispose(); - assertTrue(models.getCurrentModelIds().isEmpty()); - - Set availableModelIds = models.getAvailableModelIds(); - assertEquals(1, availableModelIds.size()); - - r = new M3Request(); - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.entity = Entity.model; - r.operation = Operation.get; - - final M3BatchResponse response2 = executeBatch(Collections.singletonList(r), false); - JsonOwlIndividual[] responseIndividuals2 = BatchTestTools.responseIndividuals(response2); - assertEquals(1, responseIndividuals2.length); - JsonOwlIndividual ind = responseIndividuals2[0]; - boolean foundX = false; - boolean foundY = false; - for(JsonAnnotation ann : ind.annotations) { - if (ann.key.equals(AnnotationShorthand.x.getShorthand())) { - foundX = "100".equals(ann.value); - } - else if (ann.key.equals(AnnotationShorthand.y.getShorthand())) { - foundY = "200".equals(ann.value); - } - } - assertTrue(foundX); - assertTrue(foundY); - } - - @Test - public void testPmidIRIIndividual() throws Exception { - String modelId = generateBlankModel(); - - M3Request r; - final List batch1 = new ArrayList(); - r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.add; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.individualIRI = "PMID:0000"; - BatchTestTools.setExpressionClass(r.arguments, "IAO:0000311"); - batch1.add(r); - - // de-activate check as "IAO:0000311" is currently not in the import chain - boolean defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; - M3BatchResponse response1; - try { - handler.CHECK_LITERAL_IDENTIFIERS = false; - response1 = executeBatch(batch1, false); - } - finally { - handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; - } - - JsonOwlIndividual[] individuals1 = BatchTestTools.responseIndividuals(response1); - assertEquals(1, individuals1.length); - assertEquals("PMID:0000", individuals1[0].id); - - // de-activate check as "IAO:0000311" is currently not in the import chain - // execute second request to test behavior for multiple adds with the same PMID - defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; - M3BatchResponse response2; - try { - handler.CHECK_LITERAL_IDENTIFIERS = false; - response2 = executeBatch(batch1, false); - } - finally { - handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; - } - - JsonOwlIndividual[] individuals2 = BatchTestTools.responseIndividuals(response2); - assertEquals(1, individuals2.length); - assertEquals("PMID:0000", individuals2[0].id); - } - - @Test - public void testUnknownIdentifier() throws Exception { - String modelId = generateBlankModel(); - - M3Request r; - final List batch1 = new ArrayList(); - r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.add; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - BatchTestTools.setExpressionClass(r.arguments, "IDA"); - batch1.add(r); - - boolean defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; - M3BatchResponse response; - try { - handler.CHECK_LITERAL_IDENTIFIERS = true; - response = handler.m3Batch(uid, providedBy, intention, packetId, batch1.toArray(new M3Request[batch1.size()]), false, true); - } - finally { - handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; - } - assertEquals(uid, response.uid); - assertEquals(intention, response.intention); - - // this has to fail as IDA is *not* a known identifier - assertEquals(M3BatchResponse.MESSAGE_TYPE_ERROR, response.messageType); - } - - //FIXME @Test - public void testRelationLabels() throws Exception { - //models.dispose(); - - // find test relation - Set properties = models.getOntology().getObjectPropertiesInSignature(Imports.INCLUDED); - OWLObjectProperty gorel0002006 = null; - for (OWLObjectProperty p : properties) { - IRI iri = p.getIRI(); - if (iri.toString().endsWith("http://purl.obolibrary.org/obo/GOREL_0002006")) { - gorel0002006 = p; - } - } - assertNotNull(gorel0002006); - String gorel0002006Curie = curieHandler.getCuri(gorel0002006); - - // check meta - M3Request r = new M3Request(); - r.entity = Entity.meta; - r.operation = Operation.get; - - M3BatchResponse response1 = execute(r, false); - final JsonRelationInfo[] relations = BatchTestTools.responseRelations(response1); - JsonRelationInfo gorel0002006Info = null; - for(JsonRelationInfo rel : relations) { - if (rel.id.equals(gorel0002006Curie)) { - gorel0002006Info = rel; - } - } - assertNotNull(gorel0002006Info); - assertEquals("results_in_organization_of", gorel0002006Info.label); - - - // use relation and check that response also contains relation label - String modelId = generateBlankModel(); - - r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.add; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.expressions = new JsonOwlObject[1]; - r.arguments.expressions[0] = BatchTestTools.createSvf(gorel0002006Curie, "GO:0003674"); - - M3BatchResponse response2 = execute(r, false); - JsonOwlIndividual[] individuals = BatchTestTools.responseIndividuals(response2); - assertEquals(1, individuals.length); - JsonOwlIndividual individual = individuals[0]; - JsonOwlObject[] types = individual.type; - assertEquals(1, types.length); - JsonOwlObject property = types[0].property; - assertEquals(gorel0002006Curie, property.id); - assertEquals("results_in_organization_of", property.label); - - } - - private M3BatchResponse execute(M3Request r, boolean useReasoner) { - return executeBatch(Collections.singletonList(r), useReasoner); - } - - private M3BatchResponse executeBatch(List batch, boolean useReasoner) { - return executeBatch(batch, uid, useReasoner); - } - - private M3BatchResponse executeBatch(List batch, String uid, boolean useReasoner) { - M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch.toArray(new M3Request[batch.size()]), useReasoner, true); - assertEquals(uid, response.uid); - assertEquals(intention, response.intention); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - return response; - } - - /** - * @return modelId - */ - private String generateBlankModel() { - String modelId = BatchTestTools.generateBlankModel(handler); - return modelId; - } + + final String modelId1 = generateBlankModel(); + final String modelId2 = generateBlankModel(); + + // add deprecated annotation to model 2 + final M3Request batch1 = new M3Request(); + batch1.entity = Entity.model; + batch1.operation = Operation.addAnnotation; + batch1.arguments = new M3Argument(); + batch1.arguments.modelId = modelId2; + batch1.arguments.values = new JsonAnnotation[1]; + batch1.arguments.values[0] = new JsonAnnotation(); + batch1.arguments.values[0].key = AnnotationShorthand.deprecated.name(); + batch1.arguments.values[0].value = Boolean.TRUE.toString(); + + execute(batch1, false); + + final M3Request batch2 = new M3Request(); + batch2.entity = Entity.meta; + batch2.operation = Operation.get; + + final M3BatchResponse response2 = execute(batch2, false); + + Map> meta = BatchTestTools.responseModelsMeta(response2); + //assertEquals(2, meta.size()); //FIXME should not reuse models after dispose; need to change these tests to make a fresh m3 + // model 1 + List modelData = meta.get(modelId1); + assertNotNull(modelData); + for (JsonAnnotation json : modelData) { + if (json.key.equals(AnnotationShorthand.deprecated.name())) { + fail("the model should not have a deprecation annotation"); + } + } + + // model 2, deprecated + modelData = meta.get(modelId2); + assertNotNull(modelData); + boolean found = false; + for (JsonAnnotation json : modelData) { + if (json.key.equals(AnnotationShorthand.deprecated.name())) { + found = true; + assertEquals("true", json.value); + } + } + assertTrue("the model must have a deprecation annotation", found); + + } + + //FIXME @Test + public void testAutoAnnotationsForAddType() throws Exception { + /* + * test that if a type is added or removed from an individual also + * updates the contributes annotation + */ + final String modelId = generateBlankModel(); + // create individual + final M3Request[] batch1 = new M3Request[1]; + batch1[0] = new M3Request(); + batch1[0].entity = Entity.individual; + batch1[0].operation = Operation.add; + batch1[0].arguments = new M3Argument(); + batch1[0].arguments.modelId = modelId; + BatchTestTools.setExpressionClass(batch1[0].arguments, "GO:0003674"); // molecular function + + String uid1 = "1"; + Set providedBy1 = Collections.singleton("provider1"); + M3BatchResponse response1 = handler.m3Batch(uid1, providedBy1, intention, packetId, batch1, false, true); + assertEquals(uid1, response1.uid); + assertEquals(intention, response1.intention); + assertEquals(response1.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response1.messageType); + + // find contributor + JsonOwlIndividual[] individuals1 = BatchTestTools.responseIndividuals(response1); + assertEquals(1, individuals1.length); + + final String id = individuals1[0].id; + + JsonAnnotation[] annotations1 = individuals1[0].annotations; + assertEquals(2, annotations1.length); + String contrib1 = null; + for (JsonAnnotation annotation : annotations1) { + if (AnnotationShorthand.contributor.name().equals(annotation.key)) { + contrib1 = annotation.value; + } + } + assertNotNull(contrib1); + assertEquals(uid1, contrib1); + + // remove type + final M3Request[] batch2 = new M3Request[1]; + batch2[0] = new M3Request(); + batch2[0].entity = Entity.individual; + batch2[0].operation = Operation.removeType; + batch2[0].arguments = new M3Argument(); + batch2[0].arguments.modelId = modelId; + batch2[0].arguments.individual = id; + BatchTestTools.setExpressionClass(batch2[0].arguments, "GO:0003674"); // molecular function + + String uid2 = "2"; + Set providedBy2 = Collections.singleton("provider2"); + M3BatchResponse response2 = handler.m3Batch(uid2, providedBy2, intention, packetId, batch2, false, true); + assertEquals(uid2, response2.uid); + assertEquals(intention, response2.intention); + assertEquals(response2.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response2.messageType); + + // find contributor and compare with prev + JsonOwlIndividual[] individuals2 = BatchTestTools.responseIndividuals(response2); + assertEquals(1, individuals2.length); + + JsonAnnotation[] annotations2 = individuals2[0].annotations; + assertEquals(3, annotations2.length); + Set contribSet1 = new HashSet(); + for (JsonAnnotation annotation : annotations2) { + if (AnnotationShorthand.contributor.name().equals(annotation.key)) { + contribSet1.add(annotation.value); + } + } + assertEquals(2, contribSet1.size()); + assertTrue(contribSet1.contains(uid1)); + assertTrue(contribSet1.contains(uid2)); + + // add type + final M3Request[] batch3 = new M3Request[1]; + batch3[0] = new M3Request(); + batch3[0].entity = Entity.individual; + batch3[0].operation = Operation.addType; + batch3[0].arguments = new M3Argument(); + batch3[0].arguments.modelId = modelId; + batch3[0].arguments.individual = id; + BatchTestTools.setExpressionClass(batch3[0].arguments, "GO:0003674"); // molecular function + + String uid3 = "3"; + Set providedBy3 = Collections.singleton("provider3"); + M3BatchResponse response3 = handler.m3Batch(uid3, providedBy3, intention, packetId, batch3, false, true); + assertEquals(uid3, response3.uid); + assertEquals(intention, response3.intention); + assertEquals(response3.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response3.messageType); + + // find contributor and compare with prev + JsonOwlIndividual[] individuals3 = BatchTestTools.responseIndividuals(response3); + assertEquals(1, individuals3.length); + + JsonAnnotation[] annotations3 = individuals3[0].annotations; + assertEquals(4, annotations3.length); + Set contribSet2 = new HashSet(); + for (JsonAnnotation annotation : annotations3) { + if (AnnotationShorthand.contributor.name().equals(annotation.key)) { + contribSet2.add(annotation.value); + } + } + assertEquals(3, contribSet2.size()); + assertTrue(contribSet2.contains(uid1)); + assertTrue(contribSet2.contains(uid2)); + assertTrue(contribSet2.contains(uid3)); + } + + static class DateGenerator { + + boolean useCounter = false; + int counter = 0; + } + + //FIXME @Test + public void testUpdateDateAnnotation() throws Exception { + /* + * test that the last modification date is update for every change of an + * individual or fact + */ + try { + dateGenerator.counter = 0; + dateGenerator.useCounter = true; + + // test update with add/remove annotation of a fact + final String modelId = generateBlankModel(); + + // setup initial fact with two individuals + final M3Request[] batch1 = new M3Request[3]; + batch1[0] = new M3Request(); + batch1[0].entity = Entity.individual; + batch1[0].operation = Operation.add; + batch1[0].arguments = new M3Argument(); + batch1[0].arguments.modelId = modelId; + BatchTestTools.setExpressionClass(batch1[0].arguments, "GO:0003674"); // molecular function + batch1[0].arguments.assignToVariable = "mf"; + + batch1[1] = new M3Request(); + batch1[1].entity = Entity.individual; + batch1[1].operation = Operation.add; + batch1[1].arguments = new M3Argument(); + batch1[1].arguments.modelId = modelId; + BatchTestTools.setExpressionClass(batch1[1].arguments, "GO:0008150"); // biological process + batch1[1].arguments.assignToVariable = "bp"; + + batch1[2] = new M3Request(); + batch1[2].entity = Entity.edge; + batch1[2].operation = Operation.add; + batch1[2].arguments = new M3Argument(); + batch1[2].arguments.modelId = modelId; + batch1[2].arguments.subject = "mf"; + batch1[2].arguments.predicate = "BFO:0000050"; // part_of + batch1[2].arguments.object = "bp"; + + M3BatchResponse response1 = handler.m3Batch(uid, providedBy, intention, packetId, batch1, false, true); + assertEquals(uid, response1.uid); + assertEquals(intention, response1.intention); + assertEquals(response1.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response1.messageType); + + // find fact and date annotation + String prevDate = null; + { + JsonOwlFact[] responseFacts = BatchTestTools.responseFacts(response1); + assertEquals(1, responseFacts.length); + Set dates = new HashSet(); + for (JsonAnnotation ann : responseFacts[0].annotations) { + if (AnnotationShorthand.date.name().equals(ann.key)) { + dates.add(ann.value); + } + } + assertEquals(1, dates.size()); + prevDate = dates.iterator().next(); + assertNotNull(prevDate); + } + String mf = null; + String bp = null; + { + JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response1); + assertEquals(2, responseIndividuals.length); + for (JsonOwlIndividual iObj : responseIndividuals) { + String id = iObj.id; + assertNotNull(id); + JsonOwlObject[] types = iObj.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0003674".equals(typeId)) { + mf = id; + } else if ("GO:0008150".equals(typeId)) { + bp = id; + } + } + } + assertNotNull(mf); + assertNotNull(bp); + + // add comment to fact + final M3Request[] batch2 = new M3Request[1]; + batch2[0] = new M3Request(); + batch2[0].entity = Entity.edge; + batch2[0].operation = Operation.addAnnotation; + batch2[0].arguments = new M3Argument(); + batch2[0].arguments.modelId = modelId; + batch2[0].arguments.subject = mf; + batch2[0].arguments.predicate = "BFO:0000050"; // part_of + batch2[0].arguments.object = bp; + batch2[0].arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.comment, "foo"); + + M3BatchResponse response2 = handler.m3Batch(uid, providedBy, intention, packetId, batch2, false, true); + assertEquals(uid, response2.uid); + assertEquals(intention, response2.intention); + assertEquals(response2.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response2.messageType); + + // find fact and compare date with prev + { + JsonOwlFact[] responseFacts = BatchTestTools.responseFacts(response2); + assertEquals(1, responseFacts.length); + Set dates = new HashSet(); + for (JsonAnnotation ann : responseFacts[0].annotations) { + if (AnnotationShorthand.date.name().equals(ann.key)) { + dates.add(ann.value); + } + } + assertEquals(1, dates.size()); + String currentDate = dates.iterator().next(); + assertNotNull(currentDate); + assertNotEquals(prevDate, currentDate); + prevDate = currentDate; + } + + // remove comment from fact + final M3Request[] batch3 = new M3Request[1]; + batch3[0] = new M3Request(); + batch3[0].entity = Entity.edge; + batch3[0].operation = Operation.removeAnnotation; + batch3[0].arguments = new M3Argument(); + batch3[0].arguments.modelId = modelId; + batch3[0].arguments.subject = mf; + batch3[0].arguments.predicate = "BFO:0000050"; // part_of + batch3[0].arguments.object = bp; + batch3[0].arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.comment, "foo"); + + M3BatchResponse response3 = handler.m3Batch(uid, providedBy, intention, packetId, batch3, false, true); + assertEquals(uid, response3.uid); + assertEquals(intention, response3.intention); + assertEquals(response3.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response3.messageType); + + // find fact and compare date with prev + { + JsonOwlFact[] responseFacts = BatchTestTools.responseFacts(response3); + assertEquals(1, responseFacts.length); + Set dates = new HashSet(); + for (JsonAnnotation ann : responseFacts[0].annotations) { + if (AnnotationShorthand.date.name().equals(ann.key)) { + dates.add(ann.value); + } + } + assertEquals(1, dates.size()); + String currentDate = dates.iterator().next(); + assertNotNull(currentDate); + assertNotEquals(prevDate, currentDate); + prevDate = currentDate; + } + + + // test update with add/remove type of an individual + // find individual and date annotation + + String individualId = null; + { + JsonOwlIndividual[] individuals1 = BatchTestTools.responseIndividuals(response1); + assertEquals(2, individuals1.length); + final Set dates = new HashSet(); + for (JsonOwlIndividual individual : individuals1) { + individualId = individual.id; + assertNotNull(individualId); + JsonOwlObject[] types = individual.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0003674".equals(typeId)) { + for (JsonAnnotation annotation : individual.annotations) { + if (AnnotationShorthand.date.name().equals(annotation.key)) { + dates.add(annotation.value); + } + } + } + } + assertEquals(1, dates.size()); + prevDate = dates.iterator().next(); + assertNotNull(prevDate); + } + + // remove type + final M3Request[] batch4 = new M3Request[1]; + batch4[0] = new M3Request(); + batch4[0].entity = Entity.individual; + batch4[0].operation = Operation.removeType; + batch4[0].arguments = new M3Argument(); + batch4[0].arguments.modelId = modelId; + batch4[0].arguments.individual = individualId; + BatchTestTools.setExpressionClass(batch4[0].arguments, "GO:0003674"); + + M3BatchResponse response4 = handler.m3Batch(uid, providedBy, intention, packetId, batch4, false, true); + assertEquals(uid, response4.uid); + assertEquals(intention, response4.intention); + assertEquals(response4.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response4.messageType); + + // find individual and compare date with prev + { + JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response4); + assertEquals(1, responseIndividuals.length); + final Set dates = new HashSet(); + for (JsonAnnotation annotation : responseIndividuals[0].annotations) { + if (AnnotationShorthand.date.name().equals(annotation.key)) { + dates.add(annotation.value); + } + } + assertEquals(1, dates.size()); + String currentDate = dates.iterator().next(); + assertNotNull(currentDate); + assertNotEquals(prevDate, currentDate); + prevDate = currentDate; + } + + // add type + final M3Request[] batch5 = new M3Request[1]; + batch5[0] = new M3Request(); + batch5[0].entity = Entity.individual; + batch5[0].operation = Operation.addType; + batch5[0].arguments = new M3Argument(); + batch5[0].arguments.modelId = modelId; + batch5[0].arguments.individual = individualId; + BatchTestTools.setExpressionClass(batch5[0].arguments, "GO:0003674"); + + M3BatchResponse response5 = handler.m3Batch(uid, providedBy, intention, packetId, batch5, false, true); + assertEquals(uid, response5.uid); + assertEquals(intention, response5.intention); + assertEquals(response5.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response5.messageType); + + // find individual and compare date with prev + { + JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response5); + assertEquals(1, responseIndividuals.length); + final Set dates = new HashSet(); + for (JsonAnnotation annotation : responseIndividuals[0].annotations) { + if (AnnotationShorthand.date.name().equals(annotation.key)) { + dates.add(annotation.value); + } + } + assertEquals(1, dates.size()); + assertEquals(1, dates.size()); + String currentDate = dates.iterator().next(); + assertNotNull(currentDate); + assertNotEquals(prevDate, currentDate); + } + } finally { + dateGenerator.useCounter = false; + } + } + + //FIXME @Test + public void testUpdateDateAnnotationEvidence() throws Exception { + try { + dateGenerator.counter = 0; + dateGenerator.useCounter = true; + + // test update with add/remove annotation of an evidence individuals + final String modelId = generateBlankModel(); + + // setup initial fact with two individuals + List batch1 = new ArrayList(); + + // evidence1 + M3Request r = BatchTestTools.addIndividual(modelId, "ECO:0000000"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var1"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000000"); + batch1.add(r); + + // evidence2 + r = BatchTestTools.addIndividual(modelId, "ECO:0000001"); // evidence from ECO + r.arguments.assignToVariable = "evidence-var2"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.source, "PMID:000001"); + batch1.add(r); + + r = BatchTestTools.addIndividual(modelId, "GO:0003674"); // molecular function + r.arguments.assignToVariable = "mf"; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var1"); + batch1.add(r); + + r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process + r.arguments.assignToVariable = "bp"; + batch1.add(r); + + r = BatchTestTools.addEdge(modelId, "mf", "BFO:0000050", "bp"); // part_of + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.evidence, "evidence-var2"); + batch1.add(r); + + M3BatchResponse response1 = executeBatch(batch1, "FOO:1", false); + + // find all the individual ids + // find date for mf + String evidence1 = null; + String evidence2 = null; + String mf = null; + String dateMf = null; + String bp = null; + { + JsonOwlIndividual[] iObjs1 = BatchTestTools.responseIndividuals(response1); + assertEquals(4, iObjs1.length); + for (JsonOwlIndividual iObj : iObjs1) { + String id = iObj.id; + assertNotNull(id); + JsonOwlObject[] types = iObj.type; + assertNotNull(types); + assertEquals(1, types.length); + JsonOwlObject typeObj = types[0]; + String typeId = typeObj.id; + assertNotNull(typeId); + if ("GO:0003674".equals(typeId)) { + mf = id; + for (JsonAnnotation ann : iObj.annotations) { + if (AnnotationShorthand.date.name().equals(ann.key)) { + dateMf = ann.value; + } + } + } else if ("GO:0008150".equals(typeId)) { + bp = id; + } else if ("ECO:0000000".equals(typeId)) { + evidence1 = id; + } else if ("ECO:0000001".equals(typeId)) { + evidence2 = id; + } + } + assertNotNull(evidence1); + assertNotNull(evidence2); + assertNotNull(mf); + assertNotNull(dateMf); + assertNotNull(bp); + } + + // delete evidence1 and expect a date update and contrib for mf + + final List batch2 = new ArrayList(); + r = BatchTestTools.removeIndividual(modelId, evidence1); + batch2.add(r); + + { + M3BatchResponse response2 = executeBatch(batch2, "FOO:2", false); + + JsonOwlIndividual[] individuals = BatchTestTools.responseIndividuals(response2); + Set currentDates = new HashSet(); + Set contrib = new HashSet(); + for (JsonOwlIndividual individual : individuals) { + if (mf.equals(individual.id)) { + for (JsonAnnotation annotation : individual.annotations) { + if (AnnotationShorthand.date.name().equals(annotation.key)) { + currentDates.add(annotation.value); + } else if (AnnotationShorthand.contributor.name().equals(annotation.key)) { + contrib.add(annotation.value); + } + } + } + } + assertEquals(1, currentDates.size()); + assertFalse(currentDates.contains(dateMf)); // prev Date + dateMf = currentDates.iterator().next(); + + assertEquals(2, contrib.size()); + assertTrue(contrib.contains("FOO:1")); + assertTrue(contrib.contains("FOO:2")); + } + + // delete evidence2 and expect a date update and contrib for fact + + final List batch3 = new ArrayList(); + r = BatchTestTools.removeIndividual(modelId, evidence2); + batch3.add(r); + + { + M3BatchResponse response3 = executeBatch(batch3, "FOO:3", false); + JsonOwlFact[] facts = BatchTestTools.responseFacts(response3); + assertEquals(1, facts.length); + Set currentDates = new HashSet(); + Set contrib = new HashSet(); + for (JsonAnnotation annotation : facts[0].annotations) { + if (AnnotationShorthand.date.name().equals(annotation.key)) { + currentDates.add(annotation.value); + } else if (AnnotationShorthand.contributor.name().equals(annotation.key)) { + contrib.add(annotation.value); + } + } + assertEquals(1, currentDates.size()); + assertFalse(currentDates.contains(dateMf)); // prev Date + + assertEquals(2, contrib.size()); + assertTrue(contrib.contains("FOO:1")); + assertTrue(contrib.contains("FOO:3")); + } + } finally { + dateGenerator.useCounter = false; + } + } + + //FIXME @Test + public void testCoordinateRoundTrip() throws Exception { + //models.dispose(); + + String modelId = generateBlankModel(); + + M3Request r; + final List batch1 = new ArrayList(); + r = BatchTestTools.addIndividual(modelId, "GO:0008150"); // biological process + r.arguments.values = new JsonAnnotation[2]; + r.arguments.values[0] = JsonTools.create(AnnotationShorthand.x, "100", null); + r.arguments.values[1] = JsonTools.create(AnnotationShorthand.y, "200", null); + batch1.add(r); + + + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.addAnnotation; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.values = BatchTestTools.singleAnnotation(AnnotationShorthand.title, "foo"); + batch1.add(r); + + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.storeModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + batch1.add(r); + + final M3BatchResponse response1 = executeBatch(batch1, false); + JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response1); + assertEquals(1, responseIndividuals.length); + + //models.dispose(); + assertTrue(models.getCurrentModelIds().isEmpty()); + + Set availableModelIds = models.getAvailableModelIds(); + assertEquals(1, availableModelIds.size()); + + r = new M3Request(); + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.entity = Entity.model; + r.operation = Operation.get; + + final M3BatchResponse response2 = executeBatch(Collections.singletonList(r), false); + JsonOwlIndividual[] responseIndividuals2 = BatchTestTools.responseIndividuals(response2); + assertEquals(1, responseIndividuals2.length); + JsonOwlIndividual ind = responseIndividuals2[0]; + boolean foundX = false; + boolean foundY = false; + for (JsonAnnotation ann : ind.annotations) { + if (ann.key.equals(AnnotationShorthand.x.getShorthand())) { + foundX = "100".equals(ann.value); + } else if (ann.key.equals(AnnotationShorthand.y.getShorthand())) { + foundY = "200".equals(ann.value); + } + } + assertTrue(foundX); + assertTrue(foundY); + } + + @Test + public void testPmidIRIIndividual() throws Exception { + String modelId = generateBlankModel(); + + M3Request r; + final List batch1 = new ArrayList(); + r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.add; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.individualIRI = "PMID:0000"; + BatchTestTools.setExpressionClass(r.arguments, "IAO:0000311"); + batch1.add(r); + + // de-activate check as "IAO:0000311" is currently not in the import chain + boolean defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; + M3BatchResponse response1; + try { + handler.CHECK_LITERAL_IDENTIFIERS = false; + response1 = executeBatch(batch1, false); + } finally { + handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; + } + + JsonOwlIndividual[] individuals1 = BatchTestTools.responseIndividuals(response1); + assertEquals(1, individuals1.length); + assertEquals("PMID:0000", individuals1[0].id); + + // de-activate check as "IAO:0000311" is currently not in the import chain + // execute second request to test behavior for multiple adds with the same PMID + defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; + M3BatchResponse response2; + try { + handler.CHECK_LITERAL_IDENTIFIERS = false; + response2 = executeBatch(batch1, false); + } finally { + handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; + } + + JsonOwlIndividual[] individuals2 = BatchTestTools.responseIndividuals(response2); + assertEquals(1, individuals2.length); + assertEquals("PMID:0000", individuals2[0].id); + } + + @Test + public void testUnknownIdentifier() throws Exception { + String modelId = generateBlankModel(); + + M3Request r; + final List batch1 = new ArrayList(); + r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.add; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + BatchTestTools.setExpressionClass(r.arguments, "IDA"); + batch1.add(r); + + boolean defaultIdPolicy = handler.CHECK_LITERAL_IDENTIFIERS; + M3BatchResponse response; + try { + handler.CHECK_LITERAL_IDENTIFIERS = true; + response = handler.m3Batch(uid, providedBy, intention, packetId, batch1.toArray(new M3Request[batch1.size()]), false, true); + } finally { + handler.CHECK_LITERAL_IDENTIFIERS = defaultIdPolicy; + } + assertEquals(uid, response.uid); + assertEquals(intention, response.intention); + + // this has to fail as IDA is *not* a known identifier + assertEquals(M3BatchResponse.MESSAGE_TYPE_ERROR, response.messageType); + } + + //FIXME @Test + public void testRelationLabels() throws Exception { + //models.dispose(); + + // find test relation + Set properties = models.getOntology().getObjectPropertiesInSignature(Imports.INCLUDED); + OWLObjectProperty gorel0002006 = null; + for (OWLObjectProperty p : properties) { + IRI iri = p.getIRI(); + if (iri.toString().endsWith("http://purl.obolibrary.org/obo/GOREL_0002006")) { + gorel0002006 = p; + } + } + assertNotNull(gorel0002006); + String gorel0002006Curie = curieHandler.getCuri(gorel0002006); + + // check meta + M3Request r = new M3Request(); + r.entity = Entity.meta; + r.operation = Operation.get; + + M3BatchResponse response1 = execute(r, false); + final JsonRelationInfo[] relations = BatchTestTools.responseRelations(response1); + JsonRelationInfo gorel0002006Info = null; + for (JsonRelationInfo rel : relations) { + if (rel.id.equals(gorel0002006Curie)) { + gorel0002006Info = rel; + } + } + assertNotNull(gorel0002006Info); + assertEquals("results_in_organization_of", gorel0002006Info.label); + + + // use relation and check that response also contains relation label + String modelId = generateBlankModel(); + + r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.add; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.expressions = new JsonOwlObject[1]; + r.arguments.expressions[0] = BatchTestTools.createSvf(gorel0002006Curie, "GO:0003674"); + + M3BatchResponse response2 = execute(r, false); + JsonOwlIndividual[] individuals = BatchTestTools.responseIndividuals(response2); + assertEquals(1, individuals.length); + JsonOwlIndividual individual = individuals[0]; + JsonOwlObject[] types = individual.type; + assertEquals(1, types.length); + JsonOwlObject property = types[0].property; + assertEquals(gorel0002006Curie, property.id); + assertEquals("results_in_organization_of", property.label); + + } + + private M3BatchResponse execute(M3Request r, boolean useReasoner) { + return executeBatch(Collections.singletonList(r), useReasoner); + } + + private M3BatchResponse executeBatch(List batch, boolean useReasoner) { + return executeBatch(batch, uid, useReasoner); + } + + private M3BatchResponse executeBatch(List batch, String uid, boolean useReasoner) { + M3BatchResponse response = handler.m3Batch(uid, providedBy, intention, packetId, batch.toArray(new M3Request[batch.size()]), useReasoner, true); + assertEquals(uid, response.uid); + assertEquals(intention, response.intention); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + return response; + } + + /** + * @return modelId + */ + private String generateBlankModel() { + String modelId = BatchTestTools.generateBlankModel(handler); + return modelId; + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchTestTools.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchTestTools.java index 26cfebbc..6cadbd69 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchTestTools.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/BatchTestTools.java @@ -1,233 +1,222 @@ package org.geneontology.minerva.server.handler; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import org.geneontology.minerva.json.*; +import org.geneontology.minerva.json.JsonOwlObject.JsonOwlObjectType; +import org.geneontology.minerva.server.handler.M3BatchHandler.*; +import org.geneontology.minerva.server.handler.M3BatchHandler.M3BatchResponse.MetaResponse; +import org.geneontology.minerva.util.AnnotationShorthand; import java.util.List; import java.util.Map; -import org.geneontology.minerva.json.JsonAnnotation; -import org.geneontology.minerva.json.JsonEvidenceInfo; -import org.geneontology.minerva.json.JsonOwlFact; -import org.geneontology.minerva.json.JsonOwlIndividual; -import org.geneontology.minerva.json.JsonOwlObject; -import org.geneontology.minerva.json.JsonOwlObject.JsonOwlObjectType; -import org.geneontology.minerva.json.JsonRelationInfo; -import org.geneontology.minerva.json.JsonTools; -import org.geneontology.minerva.json.MolecularModelJsonRenderer; -import org.geneontology.minerva.server.handler.M3BatchHandler.Entity; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3Argument; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3BatchResponse; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3BatchResponse.MetaResponse; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3Request; -import org.geneontology.minerva.server.handler.M3BatchHandler.Operation; -import org.geneontology.minerva.util.AnnotationShorthand; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; public class BatchTestTools { - static M3Request addIndividual(String modelId, String cls, JsonOwlObject...expressions) { - M3Request r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.add; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - BatchTestTools.setExpressionClass(r.arguments, cls); - if (expressions != null && expressions.length > 0) { - JsonOwlObject[] temp = new JsonOwlObject[expressions.length+1]; - temp[0] = r.arguments.expressions[0]; - for (int i = 0; i < expressions.length; i++) { - temp[i+1] = expressions[i]; - } - r.arguments.expressions = temp; - } - - return r; - } - - static M3Request removeIndividual(String modelId, String individual) { - M3Request r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.remove; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.individual = individual; - return r; - } - - static M3Request removeIndividualAnnotation(String modelId, String individual, AnnotationShorthand key, String value) { - M3Request r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.removeAnnotation; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.individual = individual; - r.arguments.values = singleAnnotation(key, value); - return r; - } - - static M3Request addEdge(String modelId, String sub, String pred, String obj) { - M3Request r = new M3Request(); - r.entity = Entity.edge; - r.operation = Operation.add; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.subject = sub; - r.arguments.predicate = pred; - r.arguments.object = obj; - return r; - } - - static M3Request deleteEdge(String modelId, String sub, String pred, String obj) { - M3Request r = new M3Request(); - r.entity = Entity.edge; - r.operation = Operation.remove; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - r.arguments.subject = sub; - r.arguments.predicate = pred; - r.arguments.object = obj; - return r; - } - - static void setExpressionClass(M3Argument arg, String cls) { - arg.expressions = new JsonOwlObject[1]; - arg.expressions[0] = new JsonOwlObject(); - arg.expressions[0].type = JsonOwlObjectType.Class; - arg.expressions[0].id = cls; - } - - static JsonOwlObject createClass(String cls) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.Class; - json.id = cls; - return json; - } - - static JsonOwlObject createSvf(String prop, String filler) { - JsonOwlObject json = new JsonOwlObject(); - json.type = JsonOwlObjectType.SomeValueFrom; - json.property = new JsonOwlObject(); - json.property.type = JsonOwlObjectType.ObjectProperty; - json.property.id = prop; - json.filler = new JsonOwlObject(); - json.filler.type = JsonOwlObjectType.Class; - json.filler.id = filler; - return json; - } - - static void printJson(Object resp) { - String json = MolecularModelJsonRenderer.renderToJson(resp, true); - System.out.println("---------"); - System.out.println(json); - System.out.println("---------"); - } - - static JsonOwlIndividual[] responseIndividuals(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - return response.data.individuals; - } - - static JsonOwlFact[] responseFacts(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - return response.data.facts; - } - - static JsonAnnotation[] responseAnnotations(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - return response.data.annotations; - } - - static String responseId(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - return response.data.modelId; - } - - static JsonRelationInfo[] responseRelations(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - assertNotNull(response.data.meta); - return response.data.meta.relations; - } - - static JsonRelationInfo[] responseDataProperties(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - assertNotNull(response.data.meta); - return response.data.meta.dataProperties; - } - - static Boolean responseInconsistent(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - return response.data.inconsistentFlag; - } - - static Map> responseModelsMeta(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - assertNotNull(response.data.meta); - return response.data.meta.modelsMeta; - } - - static JsonEvidenceInfo[] responseEvidences(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - assertNotNull(response.data.meta); - return response.data.meta.evidence; - } - - static String responseExport(M3BatchResponse response) { - assertNotNull(response); - assertNotNull(response.data); - return response.data.exportModel; - } - - static String generateBlankModel(JsonOrJsonpBatchHandler handler) { - // create blank model - M3Request[] batch = new M3Request[1]; - batch[0] = new M3Request(); - batch[0].entity = Entity.model; - batch[0].operation = Operation.add; - M3BatchResponse resp = handler.m3Batch(BatchModelHandlerTest.uid, BatchModelHandlerTest.providedBy, BatchModelHandlerTest.intention, null, batch, false, true); - assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); - assertNotNull(resp.packetId); - String modelId = responseId(resp); - assertNotNull(modelId); - return modelId; - } - - static MetaResponse getMeta(JsonOrJsonpBatchHandler handler) { - M3Request[] batch = new M3Request[1]; - batch[0] = new M3Request(); - batch[0].entity = Entity.meta; - batch[0].operation = Operation.get; - M3BatchResponse resp = handler.m3Batch(BatchModelHandlerTest.uid, BatchModelHandlerTest.providedBy, BatchModelHandlerTest.intention, null, batch, false, true); - assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); - assertNotNull(resp.packetId); - assertNotNull(resp.data); - assertNotNull(resp.data.meta); - return resp.data.meta; - } - - static M3BatchResponse getModel(JsonOrJsonpBatchHandler handler, String modelId, boolean useReasoner) { - M3Request[] batch = new M3Request[1]; - batch[0] = new M3Request(); - batch[0].entity = Entity.model; - batch[0].operation = Operation.get; - batch[0].arguments = new M3Argument(); - batch[0].arguments.modelId = modelId; - M3BatchResponse resp = handler.m3Batch(BatchModelHandlerTest.uid, BatchModelHandlerTest.providedBy, BatchModelHandlerTest.intention, null, batch, useReasoner, true); - assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); - assertNotNull(resp.packetId); - return resp; - } - - static JsonAnnotation[] singleAnnotation(AnnotationShorthand sh, String value) { - return new JsonAnnotation[]{ JsonTools.create(sh, value, null)}; - } + static M3Request addIndividual(String modelId, String cls, JsonOwlObject... expressions) { + M3Request r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.add; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + BatchTestTools.setExpressionClass(r.arguments, cls); + if (expressions != null && expressions.length > 0) { + JsonOwlObject[] temp = new JsonOwlObject[expressions.length + 1]; + temp[0] = r.arguments.expressions[0]; + for (int i = 0; i < expressions.length; i++) { + temp[i + 1] = expressions[i]; + } + r.arguments.expressions = temp; + } + + return r; + } + + static M3Request removeIndividual(String modelId, String individual) { + M3Request r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.remove; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.individual = individual; + return r; + } + + static M3Request removeIndividualAnnotation(String modelId, String individual, AnnotationShorthand key, String value) { + M3Request r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.removeAnnotation; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.individual = individual; + r.arguments.values = singleAnnotation(key, value); + return r; + } + + static M3Request addEdge(String modelId, String sub, String pred, String obj) { + M3Request r = new M3Request(); + r.entity = Entity.edge; + r.operation = Operation.add; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.subject = sub; + r.arguments.predicate = pred; + r.arguments.object = obj; + return r; + } + + static M3Request deleteEdge(String modelId, String sub, String pred, String obj) { + M3Request r = new M3Request(); + r.entity = Entity.edge; + r.operation = Operation.remove; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + r.arguments.subject = sub; + r.arguments.predicate = pred; + r.arguments.object = obj; + return r; + } + + static void setExpressionClass(M3Argument arg, String cls) { + arg.expressions = new JsonOwlObject[1]; + arg.expressions[0] = new JsonOwlObject(); + arg.expressions[0].type = JsonOwlObjectType.Class; + arg.expressions[0].id = cls; + } + + static JsonOwlObject createClass(String cls) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.Class; + json.id = cls; + return json; + } + + static JsonOwlObject createSvf(String prop, String filler) { + JsonOwlObject json = new JsonOwlObject(); + json.type = JsonOwlObjectType.SomeValueFrom; + json.property = new JsonOwlObject(); + json.property.type = JsonOwlObjectType.ObjectProperty; + json.property.id = prop; + json.filler = new JsonOwlObject(); + json.filler.type = JsonOwlObjectType.Class; + json.filler.id = filler; + return json; + } + + static void printJson(Object resp) { + String json = MolecularModelJsonRenderer.renderToJson(resp, true); + System.out.println("---------"); + System.out.println(json); + System.out.println("---------"); + } + + static JsonOwlIndividual[] responseIndividuals(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + return response.data.individuals; + } + + static JsonOwlFact[] responseFacts(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + return response.data.facts; + } + + static JsonAnnotation[] responseAnnotations(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + return response.data.annotations; + } + + static String responseId(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + return response.data.modelId; + } + + static JsonRelationInfo[] responseRelations(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + assertNotNull(response.data.meta); + return response.data.meta.relations; + } + + static JsonRelationInfo[] responseDataProperties(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + assertNotNull(response.data.meta); + return response.data.meta.dataProperties; + } + + static Boolean responseInconsistent(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + return response.data.inconsistentFlag; + } + + static Map> responseModelsMeta(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + assertNotNull(response.data.meta); + return response.data.meta.modelsMeta; + } + + static JsonEvidenceInfo[] responseEvidences(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + assertNotNull(response.data.meta); + return response.data.meta.evidence; + } + + static String responseExport(M3BatchResponse response) { + assertNotNull(response); + assertNotNull(response.data); + return response.data.exportModel; + } + + static String generateBlankModel(JsonOrJsonpBatchHandler handler) { + // create blank model + M3Request[] batch = new M3Request[1]; + batch[0] = new M3Request(); + batch[0].entity = Entity.model; + batch[0].operation = Operation.add; + M3BatchResponse resp = handler.m3Batch(BatchModelHandlerTest.uid, BatchModelHandlerTest.providedBy, BatchModelHandlerTest.intention, null, batch, false, true); + assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); + assertNotNull(resp.packetId); + String modelId = responseId(resp); + assertNotNull(modelId); + return modelId; + } + + static MetaResponse getMeta(JsonOrJsonpBatchHandler handler) { + M3Request[] batch = new M3Request[1]; + batch[0] = new M3Request(); + batch[0].entity = Entity.meta; + batch[0].operation = Operation.get; + M3BatchResponse resp = handler.m3Batch(BatchModelHandlerTest.uid, BatchModelHandlerTest.providedBy, BatchModelHandlerTest.intention, null, batch, false, true); + assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); + assertNotNull(resp.packetId); + assertNotNull(resp.data); + assertNotNull(resp.data.meta); + return resp.data.meta; + } + + static M3BatchResponse getModel(JsonOrJsonpBatchHandler handler, String modelId, boolean useReasoner) { + M3Request[] batch = new M3Request[1]; + batch[0] = new M3Request(); + batch[0].entity = Entity.model; + batch[0].operation = Operation.get; + batch[0].arguments = new M3Argument(); + batch[0].arguments.modelId = modelId; + M3BatchResponse resp = handler.m3Batch(BatchModelHandlerTest.uid, BatchModelHandlerTest.providedBy, BatchModelHandlerTest.intention, null, batch, useReasoner, true); + assertEquals(resp.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, resp.messageType); + assertNotNull(resp.packetId); + return resp; + } + + static JsonAnnotation[] singleAnnotation(AnnotationShorthand sh, String value) { + return new JsonAnnotation[]{JsonTools.create(sh, value, null)}; + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/DataPropertyTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/DataPropertyTest.java index fe756d7c..cb41017f 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/DataPropertyTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/DataPropertyTest.java @@ -26,175 +26,175 @@ import static org.junit.Assert.assertNotNull; public class DataPropertyTest { - - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - private final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - private UndoAwareMolecularModelManager createM3(OWLOntology tbox) throws OWLOntologyCreationException, IOException { - UndoAwareMolecularModelManager mmm = new UndoAwareMolecularModelManager(tbox, curieHandler, - "http://model.geneontology.org/", folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); - return mmm; - } - - @Test - public void testDataPropertyMetadata() throws Exception { - OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - OWLOntology ontology = m.createOntology(IRI.generateDocumentIRI()); - { - // create a test ontology with one data property - OWLDataFactory f = m.getOWLDataFactory(); - IRI propIRI = IRI.generateDocumentIRI(); - OWLDataProperty prop = f.getOWLDataProperty(propIRI); - m.addAxiom(ontology, f.getOWLDeclarationAxiom(prop)); - m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(propIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-data-property")))); - } - MolecularModelManager mmm = createM3(ontology); - Pair,List> pair = MolecularModelJsonRenderer.renderProperties(mmm, null, curieHandler); - List dataProperties = pair.getRight(); - assertEquals(1, dataProperties.size()); - mmm.dispose(); - } - - @Test - public void testDataProperyRenderer() throws Exception { - OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - OWLOntology ontology = m.createOntology(IRI.generateDocumentIRI()); - final IRI clsIRI = IRI.generateDocumentIRI(); - final IRI propIRI = IRI.generateDocumentIRI(); - - // create a test ontology with one data property and one class - OWLDataFactory f = m.getOWLDataFactory(); - OWLDataProperty prop = f.getOWLDataProperty(propIRI); - m.addAxiom(ontology, f.getOWLDeclarationAxiom(prop)); - m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(propIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-data-property")))); - - OWLClass cls = f.getOWLClass(clsIRI); - m.addAxiom(ontology, f.getOWLDeclarationAxiom(cls)); - m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(clsIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-cls")))); - - // graph and m3 - final UndoMetadata metadata = new UndoMetadata("foo-user"); - UndoAwareMolecularModelManager m3 = createM3(ontology); - - final ModelContainer model = m3.generateBlankModel(metadata); - final OWLNamedIndividual individual = m3.createIndividual(model, cls, metadata); - m3.addDataProperty(model, individual, prop, f.getOWLLiteral(10), metadata); - - MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(model, null, curieHandler); - final JsonModel jsonModel = r.renderModel(); - assertEquals(1, jsonModel.individuals.length); - assertEquals(1, jsonModel.individuals[0].annotations.length); - { - JsonAnnotation ann = jsonModel.individuals[0].annotations[0]; - assertEquals(propIRI.toString(), ann.key); - assertEquals("10", ann.value); - assertEquals("xsd:integer", ann.valueType); - } - m3.dispose(); - } - - @Test - public void testDataPropertyBatch() throws Exception { - OWLOntologyManager m = OWLManager.createOWLOntologyManager(); - OWLOntology ontology = m.createOntology(IRI.generateDocumentIRI()); - final IRI clsIRI = IRI.create("http://purl.obolibrary.org/obo/GO_0001"); - final IRI propIRI = IRI.create("http://purl.obolibrary.org/obo/RO_0001"); - - // create a test ontology with one data property and one class - OWLDataFactory f = m.getOWLDataFactory(); - OWLDataProperty prop = f.getOWLDataProperty(propIRI); - m.addAxiom(ontology, f.getOWLDeclarationAxiom(prop)); - m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(propIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-data-property")))); - - OWLClass cls = f.getOWLClass(clsIRI); - m.addAxiom(ontology, f.getOWLDeclarationAxiom(cls)); - m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(clsIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-cls")))); - - // graph and m3 - UndoAwareMolecularModelManager m3 = createM3(ontology); - - // handler - InferenceProviderCreator ipc = null; - JsonOrJsonpBatchHandler handler = new JsonOrJsonpBatchHandler(m3, "development", ipc, null, null); - - // empty model - final ModelContainer model = m3.generateBlankModel(new UndoMetadata("foo-user")); - - // create individual with annotations, including one data property - M3Request r1 = BatchTestTools.addIndividual(curieHandler.getCuri(model.getModelId()), "GO:0001"); - r1.arguments.values = new JsonAnnotation[2]; - r1.arguments.values[0] = new JsonAnnotation(); - r1.arguments.values[0].key = AnnotationShorthand.comment.name(); - r1.arguments.values[0].value = "foo-comment"; - r1.arguments.values[1] = new JsonAnnotation(); - r1.arguments.values[1].key = curieHandler.getCuri(propIRI); - r1.arguments.values[1].value = "10"; - r1.arguments.values[1].valueType = "xsd:integer"; - - M3BatchResponse response1 = exec(handler, Collections.singletonList(r1)); - - final String individualsId; - // check for data property as annotation - { - assertEquals(1, response1.data.individuals.length); - JsonOwlIndividual i = response1.data.individuals[0]; - assertEquals(4, i.annotations.length); - individualsId = i.id; - JsonAnnotation dataPropAnnotation = null; - for(JsonAnnotation ann : i.annotations) { - if (curieHandler.getCuri(propIRI).equals(ann.key)) { - dataPropAnnotation = ann; - } - } - assertNotNull(dataPropAnnotation); - } - assertNotNull(individualsId); - - // check underlying owl model for usage of OWLDataProperty - { - Set axioms = model.getAboxOntology().getAxioms(AxiomType.DATA_PROPERTY_ASSERTION); - assertEquals(1, axioms.size()); - OWLDataPropertyAssertionAxiom ax = axioms.iterator().next(); - OWLLiteral literal = ax.getObject(); - assertEquals(prop, ax.getProperty()); - assertEquals(f.getOWLLiteral(10), literal); - } - - // delete data property - M3Request r2 = new M3Request(); - r2.entity = Entity.individual; - r2.operation = Operation.removeAnnotation; - r2.arguments = new M3Argument(); - r2.arguments.individual = individualsId; - r2.arguments.modelId = curieHandler.getCuri(model.getModelId()); - r2.arguments.values = new JsonAnnotation[1]; - r2.arguments.values[0] = new JsonAnnotation(); - r2.arguments.values[0].key = propIRI.toString(); - r2.arguments.values[0].value = "10"; - r2.arguments.values[0].valueType = "xsd:integer"; - - M3BatchResponse response2 = exec(handler, Collections.singletonList(r2)); - // check for deleted property as annotation - { - assertEquals(1, response2.data.individuals.length); - JsonOwlIndividual i = response2.data.individuals[0]; - assertEquals(3, i.annotations.length); - } - m3.dispose(); - } - - private M3BatchResponse exec(JsonOrJsonpBatchHandler handler, List requests) { - String uid = "foo-user"; - String intention = "generated"; - String packetId = "0"; - M3BatchResponse response = handler.m3Batch(uid, Collections.emptySet(), intention, packetId, requests.toArray(new M3Request[requests.size()]), false, true); - assertEquals(uid, response.uid); - assertEquals(intention, response.intention); - assertEquals(packetId, response.packetId); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - return response; - } + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + private final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + + private UndoAwareMolecularModelManager createM3(OWLOntology tbox) throws OWLOntologyCreationException, IOException { + UndoAwareMolecularModelManager mmm = new UndoAwareMolecularModelManager(tbox, curieHandler, + "http://model.geneontology.org/", folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); + return mmm; + } + + @Test + public void testDataPropertyMetadata() throws Exception { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology ontology = m.createOntology(IRI.generateDocumentIRI()); + { + // create a test ontology with one data property + OWLDataFactory f = m.getOWLDataFactory(); + IRI propIRI = IRI.generateDocumentIRI(); + OWLDataProperty prop = f.getOWLDataProperty(propIRI); + m.addAxiom(ontology, f.getOWLDeclarationAxiom(prop)); + m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(propIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-data-property")))); + } + MolecularModelManager mmm = createM3(ontology); + Pair, List> pair = MolecularModelJsonRenderer.renderProperties(mmm, null, curieHandler); + List dataProperties = pair.getRight(); + assertEquals(1, dataProperties.size()); + mmm.dispose(); + } + + @Test + public void testDataProperyRenderer() throws Exception { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology ontology = m.createOntology(IRI.generateDocumentIRI()); + final IRI clsIRI = IRI.generateDocumentIRI(); + final IRI propIRI = IRI.generateDocumentIRI(); + + // create a test ontology with one data property and one class + OWLDataFactory f = m.getOWLDataFactory(); + OWLDataProperty prop = f.getOWLDataProperty(propIRI); + m.addAxiom(ontology, f.getOWLDeclarationAxiom(prop)); + m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(propIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-data-property")))); + + OWLClass cls = f.getOWLClass(clsIRI); + m.addAxiom(ontology, f.getOWLDeclarationAxiom(cls)); + m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(clsIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-cls")))); + + // graph and m3 + final UndoMetadata metadata = new UndoMetadata("foo-user"); + UndoAwareMolecularModelManager m3 = createM3(ontology); + + final ModelContainer model = m3.generateBlankModel(metadata); + final OWLNamedIndividual individual = m3.createIndividual(model, cls, metadata); + m3.addDataProperty(model, individual, prop, f.getOWLLiteral(10), metadata); + + MolecularModelJsonRenderer r = new MolecularModelJsonRenderer(model, null, curieHandler); + final JsonModel jsonModel = r.renderModel(); + assertEquals(1, jsonModel.individuals.length); + assertEquals(1, jsonModel.individuals[0].annotations.length); + { + JsonAnnotation ann = jsonModel.individuals[0].annotations[0]; + assertEquals(propIRI.toString(), ann.key); + assertEquals("10", ann.value); + assertEquals("xsd:integer", ann.valueType); + } + m3.dispose(); + } + + @Test + public void testDataPropertyBatch() throws Exception { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology ontology = m.createOntology(IRI.generateDocumentIRI()); + final IRI clsIRI = IRI.create("http://purl.obolibrary.org/obo/GO_0001"); + final IRI propIRI = IRI.create("http://purl.obolibrary.org/obo/RO_0001"); + + // create a test ontology with one data property and one class + OWLDataFactory f = m.getOWLDataFactory(); + OWLDataProperty prop = f.getOWLDataProperty(propIRI); + m.addAxiom(ontology, f.getOWLDeclarationAxiom(prop)); + m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(propIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-data-property")))); + + OWLClass cls = f.getOWLClass(clsIRI); + m.addAxiom(ontology, f.getOWLDeclarationAxiom(cls)); + m.addAxiom(ontology, f.getOWLAnnotationAssertionAxiom(clsIRI, f.getOWLAnnotation(f.getRDFSLabel(), f.getOWLLiteral("fake-cls")))); + + // graph and m3 + UndoAwareMolecularModelManager m3 = createM3(ontology); + + // handler + InferenceProviderCreator ipc = null; + JsonOrJsonpBatchHandler handler = new JsonOrJsonpBatchHandler(m3, "development", ipc, null, null); + + // empty model + final ModelContainer model = m3.generateBlankModel(new UndoMetadata("foo-user")); + + // create individual with annotations, including one data property + M3Request r1 = BatchTestTools.addIndividual(curieHandler.getCuri(model.getModelId()), "GO:0001"); + r1.arguments.values = new JsonAnnotation[2]; + r1.arguments.values[0] = new JsonAnnotation(); + r1.arguments.values[0].key = AnnotationShorthand.comment.name(); + r1.arguments.values[0].value = "foo-comment"; + r1.arguments.values[1] = new JsonAnnotation(); + r1.arguments.values[1].key = curieHandler.getCuri(propIRI); + r1.arguments.values[1].value = "10"; + r1.arguments.values[1].valueType = "xsd:integer"; + + M3BatchResponse response1 = exec(handler, Collections.singletonList(r1)); + + final String individualsId; + // check for data property as annotation + { + assertEquals(1, response1.data.individuals.length); + JsonOwlIndividual i = response1.data.individuals[0]; + assertEquals(4, i.annotations.length); + individualsId = i.id; + JsonAnnotation dataPropAnnotation = null; + for (JsonAnnotation ann : i.annotations) { + if (curieHandler.getCuri(propIRI).equals(ann.key)) { + dataPropAnnotation = ann; + } + } + assertNotNull(dataPropAnnotation); + } + assertNotNull(individualsId); + + // check underlying owl model for usage of OWLDataProperty + { + Set axioms = model.getAboxOntology().getAxioms(AxiomType.DATA_PROPERTY_ASSERTION); + assertEquals(1, axioms.size()); + OWLDataPropertyAssertionAxiom ax = axioms.iterator().next(); + OWLLiteral literal = ax.getObject(); + assertEquals(prop, ax.getProperty()); + assertEquals(f.getOWLLiteral(10), literal); + } + + // delete data property + M3Request r2 = new M3Request(); + r2.entity = Entity.individual; + r2.operation = Operation.removeAnnotation; + r2.arguments = new M3Argument(); + r2.arguments.individual = individualsId; + r2.arguments.modelId = curieHandler.getCuri(model.getModelId()); + r2.arguments.values = new JsonAnnotation[1]; + r2.arguments.values[0] = new JsonAnnotation(); + r2.arguments.values[0].key = propIRI.toString(); + r2.arguments.values[0].value = "10"; + r2.arguments.values[0].valueType = "xsd:integer"; + + M3BatchResponse response2 = exec(handler, Collections.singletonList(r2)); + // check for deleted property as annotation + { + assertEquals(1, response2.data.individuals.length); + JsonOwlIndividual i = response2.data.individuals[0]; + assertEquals(3, i.annotations.length); + } + m3.dispose(); + } + + private M3BatchResponse exec(JsonOrJsonpBatchHandler handler, List requests) { + String uid = "foo-user"; + String intention = "generated"; + String packetId = "0"; + M3BatchResponse response = handler.m3Batch(uid, Collections.emptySet(), intention, packetId, requests.toArray(new M3Request[requests.size()]), false, true); + assertEquals(uid, response.uid); + assertEquals(intention, response.intention); + assertEquals(packetId, response.packetId); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + return response; + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/LocalServerTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/LocalServerTest.java index 1170efa8..1af7b48a 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/LocalServerTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/LocalServerTest.java @@ -33,108 +33,108 @@ public class LocalServerTest { - @ClassRule - public static TemporaryFolder folder = new TemporaryFolder(); - - private static CurieHandler curieHandler = null; - private static UndoAwareMolecularModelManager models = null; - private static Server server = null; - private static String urlPrefix; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - init(new ParserWrapper()); - } - - @AfterClass - public static void afterClass() throws Exception { - models.dispose(); - server.stop(); - server.destroy(); - } - - @After - public void after() { - } - - static void init(ParserWrapper pw) throws Exception { - final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/go-lego-minimal.owl"))); - // curie handler - final String modelIdcurie = "gomodel"; - final String modelIdPrefix = "http://model.geneontology.org/"; - final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); - - MinervaStartUpConfig conf = new MinervaStartUpConfig(); - conf.reasonerOpt = "elk"; - conf.useRequestLogging = true; - conf.checkLiteralIds = false; - conf.lookupService = null; - conf.importantRelations = null; - conf.port = 6800; - conf.contextString = "/"; - server = StartUpTool.startUp(models, conf, null); - urlPrefix = "http://localhost:"+conf.port+conf.contextString; - } - - @Test - public void testLongGet() throws Exception { - String longGetSuffix = FileUtils.readFileToString(new File("src/test/resources/server-test/long-get.txt")); - String urlString = urlPrefix + longGetSuffix; - URL url = new URL(urlString); - String responseString = IOUtils.toString(url.openStream()); - M3BatchResponse response = parseResponse(responseString); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - } - - @Test - public void testPost() throws Exception { - String urlString = urlPrefix + "m3Batch"; - final URL url = new URL(urlString); - - final Map params = new LinkedHashMap<>(); - params.put("uid", "uid-1"); - params.put("intention", "query"); - params.put("requests", createMetaGetRequest()); - - StringBuilder postData = new StringBuilder(); - for (Map.Entry param : params.entrySet()) { - if (postData.length() != 0) postData.append('&'); - postData.append(URLEncoder.encode(param.getKey(), "UTF-8")); - postData.append('='); - postData.append(URLEncoder.encode(param.getValue(), "UTF-8")); - } - byte[] postDataBytes = postData.toString().getBytes("UTF-8"); - - HttpURLConnection conn = (HttpURLConnection)url.openConnection(); - conn.setRequestMethod("POST"); - conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); - conn.setRequestProperty("Content-Length", String.valueOf(postDataBytes.length)); - conn.setDoOutput(true); - conn.getOutputStream().write(postDataBytes); - - String responseString = IOUtils.toString(conn.getInputStream(), "UTF-8"); - M3BatchResponse response = parseResponse(responseString); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - - } - - private M3BatchResponse parseResponse(String responseString) { - Gson gson = new GsonBuilder().create(); - M3BatchResponse response = gson.fromJson(responseString, M3BatchResponse.class); - return response; - } - - private String createMetaGetRequest() { - M3Request r = new M3Request(); - r.entity = Entity.meta; - r.operation = Operation.get; - r.arguments = new M3Argument(); - String json = MolecularModelJsonRenderer.renderToJson(new M3Request[]{r}, false); - return json; - - } + @ClassRule + public static TemporaryFolder folder = new TemporaryFolder(); + + private static CurieHandler curieHandler = null; + private static UndoAwareMolecularModelManager models = null; + private static Server server = null; + private static String urlPrefix; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + init(new ParserWrapper()); + } + + @AfterClass + public static void afterClass() throws Exception { + models.dispose(); + server.stop(); + server.destroy(); + } + + @After + public void after() { + } + + static void init(ParserWrapper pw) throws Exception { + final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/go-lego-minimal.owl"))); + // curie handler + final String modelIdcurie = "gomodel"; + final String modelIdPrefix = "http://model.geneontology.org/"; + final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); + + MinervaStartUpConfig conf = new MinervaStartUpConfig(); + conf.reasonerOpt = "elk"; + conf.useRequestLogging = true; + conf.checkLiteralIds = false; + conf.lookupService = null; + conf.importantRelations = null; + conf.port = 6800; + conf.contextString = "/"; + server = StartUpTool.startUp(models, conf, null); + urlPrefix = "http://localhost:" + conf.port + conf.contextString; + } + + @Test + public void testLongGet() throws Exception { + String longGetSuffix = FileUtils.readFileToString(new File("src/test/resources/server-test/long-get.txt")); + String urlString = urlPrefix + longGetSuffix; + URL url = new URL(urlString); + String responseString = IOUtils.toString(url.openStream()); + M3BatchResponse response = parseResponse(responseString); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + } + + @Test + public void testPost() throws Exception { + String urlString = urlPrefix + "m3Batch"; + final URL url = new URL(urlString); + + final Map params = new LinkedHashMap<>(); + params.put("uid", "uid-1"); + params.put("intention", "query"); + params.put("requests", createMetaGetRequest()); + + StringBuilder postData = new StringBuilder(); + for (Map.Entry param : params.entrySet()) { + if (postData.length() != 0) postData.append('&'); + postData.append(URLEncoder.encode(param.getKey(), "UTF-8")); + postData.append('='); + postData.append(URLEncoder.encode(param.getValue(), "UTF-8")); + } + byte[] postDataBytes = postData.toString().getBytes("UTF-8"); + + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("POST"); + conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); + conn.setRequestProperty("Content-Length", String.valueOf(postDataBytes.length)); + conn.setDoOutput(true); + conn.getOutputStream().write(postDataBytes); + + String responseString = IOUtils.toString(conn.getInputStream(), "UTF-8"); + M3BatchResponse response = parseResponse(responseString); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + + } + + private M3BatchResponse parseResponse(String responseString) { + Gson gson = new GsonBuilder().create(); + M3BatchResponse response = gson.fromJson(responseString, M3BatchResponse.class); + return response; + } + + private String createMetaGetRequest() { + M3Request r = new M3Request(); + r.entity = Entity.meta; + r.operation = Operation.get; + r.arguments = new M3Argument(); + String json = MolecularModelJsonRenderer.renderToJson(new M3Request[]{r}, false); + return json; + + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/M3ExpressionParserTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/M3ExpressionParserTest.java index 4e94662b..3be39d4f 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/M3ExpressionParserTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/M3ExpressionParserTest.java @@ -1,126 +1,119 @@ package org.geneontology.minerva.server.handler; -import static org.junit.Assert.*; - -import java.io.File; -import java.io.IOException; - import org.geneontology.minerva.MinervaOWLGraphWrapper; import org.geneontology.minerva.MolecularModelManager.UnknownIdentifierException; import org.geneontology.minerva.curie.CurieHandler; import org.geneontology.minerva.curie.DefaultCurieHandler; import org.geneontology.minerva.json.JsonOwlObject; import org.geneontology.minerva.json.JsonOwlObject.JsonOwlObjectType; -import org.geneontology.minerva.server.handler.M3ExpressionParser; import org.geneontology.minerva.server.handler.OperationsTools.MissingParameterException; import org.junit.BeforeClass; import org.junit.Test; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLObjectComplementOf; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; - +import org.semanticweb.owlapi.model.*; import owltools.io.ParserWrapper; +import java.io.File; +import java.io.IOException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + public class M3ExpressionParserTest { - private static final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); - private static MinervaOWLGraphWrapper graph; - - // these are present in the test module + private static final CurieHandler curieHandler = DefaultCurieHandler.getDefaultHandler(); + private static MinervaOWLGraphWrapper graph; + + // these are present in the test module private static final String CELL_MORPHOGENESIS = "GO:0000902"; private static final String NUCLEUS = "GO:0005623"; private static final String OCCURS_IN = "BFO:0000066"; - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - init(new ParserWrapper()); - } - - static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException { - /* File file = new File("src/test/resources/go-lego-module.omn.gz").getCanonicalFile(); */ - File file = new File("src/test/resources/go-lego-module-compact.omn.gz").getCanonicalFile(); - graph = new MinervaOWLGraphWrapper(pw.parseOWL(IRI.create(file))); - } - - @Test(expected=MissingParameterException.class) - public void testMissing0() throws Exception { - JsonOwlObject expression = null; - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test(expected=MissingParameterException.class) - public void testMissing1() throws Exception { - JsonOwlObject expression = new JsonOwlObject(); - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test(expected=MissingParameterException.class) - public void testMissing2() throws Exception { - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.Class; - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test(expected=MissingParameterException.class) - public void testMissing3() throws Exception { - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = OCCURS_IN; // occurs_in - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test(expected=MissingParameterException.class) - public void testMissing4() throws Exception { - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = OCCURS_IN; // occurs_in - expression.filler = new JsonOwlObject(); - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test(expected=MissingParameterException.class) - public void testMissing5() throws Exception { - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = OCCURS_IN; // occurs_in - expression.filler = new JsonOwlObject(); - expression.filler.type = JsonOwlObjectType.Class; - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test(expected=MissingParameterException.class) - public void testMissing6() throws Exception { - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = OCCURS_IN; // occurs_in - expression.filler = new JsonOwlObject(); - expression.filler.id = NUCLEUS; - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test - public void testParseClazz() throws Exception { - - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.Class; - expression.id = CELL_MORPHOGENESIS; - - OWLClassExpression ce = new M3ExpressionParser(curieHandler).parse(graph, expression, null); - assertEquals(graph.getOWLClassByIdentifier(CELL_MORPHOGENESIS), ce); - } - + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + init(new ParserWrapper()); + } + + static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException { + /* File file = new File("src/test/resources/go-lego-module.omn.gz").getCanonicalFile(); */ + File file = new File("src/test/resources/go-lego-module-compact.omn.gz").getCanonicalFile(); + graph = new MinervaOWLGraphWrapper(pw.parseOWL(IRI.create(file))); + } + + @Test(expected = MissingParameterException.class) + public void testMissing0() throws Exception { + JsonOwlObject expression = null; + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test(expected = MissingParameterException.class) + public void testMissing1() throws Exception { + JsonOwlObject expression = new JsonOwlObject(); + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test(expected = MissingParameterException.class) + public void testMissing2() throws Exception { + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.Class; + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test(expected = MissingParameterException.class) + public void testMissing3() throws Exception { + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = OCCURS_IN; // occurs_in + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test(expected = MissingParameterException.class) + public void testMissing4() throws Exception { + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = OCCURS_IN; // occurs_in + expression.filler = new JsonOwlObject(); + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test(expected = MissingParameterException.class) + public void testMissing5() throws Exception { + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = OCCURS_IN; // occurs_in + expression.filler = new JsonOwlObject(); + expression.filler.type = JsonOwlObjectType.Class; + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test(expected = MissingParameterException.class) + public void testMissing6() throws Exception { + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = OCCURS_IN; // occurs_in + expression.filler = new JsonOwlObject(); + expression.filler.id = NUCLEUS; + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test + public void testParseClazz() throws Exception { + + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.Class; + expression.id = CELL_MORPHOGENESIS; + + OWLClassExpression ce = new M3ExpressionParser(curieHandler).parse(graph, expression, null); + assertEquals(graph.getOWLClassByIdentifier(CELL_MORPHOGENESIS), ce); + } + @Test public void testParseClazzNegated() throws Exception { @@ -139,7 +132,7 @@ public void testParseClazzNegated() throws Exception { @Test public void testParseClazzNegatedExpression() throws Exception { - JsonOwlObject svf = new JsonOwlObject(); + JsonOwlObject svf = new JsonOwlObject(); svf.type = JsonOwlObjectType.SomeValueFrom; svf.property = new JsonOwlObject(); svf.property.type = JsonOwlObjectType.ObjectProperty; @@ -151,74 +144,74 @@ public void testParseClazzNegatedExpression() throws Exception { JsonOwlObject expression = new JsonOwlObject(); expression.type = JsonOwlObjectType.ComplementOf; expression.filler = svf; - + OWLDataFactory df = graph.getDataFactory(); OWLClassExpression ce = new M3ExpressionParser(curieHandler).parse(graph, expression, null); - + OWLClass nucleus = graph.getOWLClassByIdentifier(NUCLEUS); OWLObjectSomeValuesFrom svfx = df.getOWLObjectSomeValuesFrom(graph.getOWLObjectPropertyByIdentifier(OCCURS_IN), nucleus); OWLObjectComplementOf ceExpected = df.getOWLObjectComplementOf(svfx); assertEquals(ceExpected, ce); } - - /** + + /** * test that Default expression parser will throw UnknownIdentifierException * when confronted with a non-CURIE - * + * * @throws Exception */ - @Test(expected=UnknownIdentifierException.class) + @Test(expected = UnknownIdentifierException.class) public void testBadCurieFail() throws Exception { - + JsonOwlObject expression = new JsonOwlObject(); expression.type = JsonOwlObjectType.Class; expression.id = "ABC"; // not a CURIE - + OWLClassExpression ce = new M3ExpressionParser(curieHandler).parse(graph, expression, null); } /** * test that Default expression parser will throw UnknownIdentifierException * when confronted with an ID with an unknown prefix - * + * * @throws Exception */ - @Test(expected=UnknownIdentifierException.class) + @Test(expected = UnknownIdentifierException.class) public void testParseClazzFail() throws Exception { - + JsonOwlObject expression = new JsonOwlObject(); expression.type = JsonOwlObjectType.Class; expression.id = "FO:0006915"; - + OWLClassExpression ce = new M3ExpressionParser(curieHandler).parse(graph, expression, null); } /** * test that unknown prefixes cannot be entered even with id-literal checking off - * + * * @throws Exception */ - @Test(expected=UnknownIdentifierException.class) + @Test(expected = UnknownIdentifierException.class) public void testParseClazzFailNoCheckLiteralIds() throws Exception { - + JsonOwlObject expression = new JsonOwlObject(); expression.type = JsonOwlObjectType.Class; expression.id = "THISISNOTAPREFIX:0006915"; - + OWLClassExpression ce = new M3ExpressionParser(false, curieHandler).parse(graph, expression, null); } - + @Test public void testParseClazzNoCheckLiteralIds() throws Exception { - + JsonOwlObject expression = new JsonOwlObject(); expression.type = JsonOwlObjectType.Class; expression.id = "GO:23"; // valid prefix, not a known class - + // create a parser that explicitly disables checking so-called literal ids OWLClassExpression ce = new M3ExpressionParser(false, curieHandler).parse(graph, expression, null); - + // check the retrieved class is the same as the input // note: we don't use the owltools getClass method directly, as that depends on the class // being known @@ -227,80 +220,80 @@ public void testParseClazzNoCheckLiteralIds() throws Exception { } - @Test - public void testParseSvf() throws Exception { - - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = OCCURS_IN; // occurs_in - expression.filler = new JsonOwlObject(); - expression.filler.type = JsonOwlObjectType.Class; - expression.filler.id = NUCLEUS; - - OWLClassExpression ce = new M3ExpressionParser(curieHandler).parse(graph, expression, null); - assertNotNull(ce); - } - - @Test(expected=UnknownIdentifierException.class) - public void testParseSvfFail1() throws Exception { - - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = OCCURS_IN; // occurs_in - expression.filler = new JsonOwlObject(); - expression.filler.type = JsonOwlObjectType.Class; - expression.filler.id = "FO:0005623"; // error - - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - - @Test(expected=UnknownIdentifierException.class) - public void testParseSvfFailNoCheckLiteralIds1() throws Exception { - - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = OCCURS_IN; // occurs_in - expression.filler = new JsonOwlObject(); - expression.filler.type = JsonOwlObjectType.Class; - expression.filler.id = "DEFINITELYNOTAPREFIX:0005623"; // error - - new M3ExpressionParser(false, curieHandler).parse(graph, expression, null); - } - - @Test(expected=UnknownIdentifierException.class) - public void testParseSvfFailNoCheckLiteralIds2() throws Exception { - - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = "NOTARELATIONPREFIX:123"; - expression.filler = new JsonOwlObject(); - expression.filler.type = JsonOwlObjectType.Class; - expression.filler.id = NUCLEUS; // error - - new M3ExpressionParser(false, curieHandler).parse(graph, expression, null); - } - - @Test(expected=UnknownIdentifierException.class) - public void testParseSvfFail2() throws Exception { - - JsonOwlObject expression = new JsonOwlObject(); - expression.type = JsonOwlObjectType.SomeValueFrom; - expression.property = new JsonOwlObject(); - expression.property.type = JsonOwlObjectType.ObjectProperty; - expression.property.id = "FFO:0000066"; // error - expression.filler = new JsonOwlObject(); - expression.filler.type = JsonOwlObjectType.Class; - expression.filler.id = NUCLEUS; - - new M3ExpressionParser(curieHandler).parse(graph, expression, null); - } - + @Test + public void testParseSvf() throws Exception { + + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = OCCURS_IN; // occurs_in + expression.filler = new JsonOwlObject(); + expression.filler.type = JsonOwlObjectType.Class; + expression.filler.id = NUCLEUS; + + OWLClassExpression ce = new M3ExpressionParser(curieHandler).parse(graph, expression, null); + assertNotNull(ce); + } + + @Test(expected = UnknownIdentifierException.class) + public void testParseSvfFail1() throws Exception { + + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = OCCURS_IN; // occurs_in + expression.filler = new JsonOwlObject(); + expression.filler.type = JsonOwlObjectType.Class; + expression.filler.id = "FO:0005623"; // error + + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + + @Test(expected = UnknownIdentifierException.class) + public void testParseSvfFailNoCheckLiteralIds1() throws Exception { + + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = OCCURS_IN; // occurs_in + expression.filler = new JsonOwlObject(); + expression.filler.type = JsonOwlObjectType.Class; + expression.filler.id = "DEFINITELYNOTAPREFIX:0005623"; // error + + new M3ExpressionParser(false, curieHandler).parse(graph, expression, null); + } + + @Test(expected = UnknownIdentifierException.class) + public void testParseSvfFailNoCheckLiteralIds2() throws Exception { + + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = "NOTARELATIONPREFIX:123"; + expression.filler = new JsonOwlObject(); + expression.filler.type = JsonOwlObjectType.Class; + expression.filler.id = NUCLEUS; // error + + new M3ExpressionParser(false, curieHandler).parse(graph, expression, null); + } + + @Test(expected = UnknownIdentifierException.class) + public void testParseSvfFail2() throws Exception { + + JsonOwlObject expression = new JsonOwlObject(); + expression.type = JsonOwlObjectType.SomeValueFrom; + expression.property = new JsonOwlObject(); + expression.property.type = JsonOwlObjectType.ObjectProperty; + expression.property.id = "FFO:0000066"; // error + expression.filler = new JsonOwlObject(); + expression.filler.type = JsonOwlObjectType.Class; + expression.filler.id = NUCLEUS; + + new M3ExpressionParser(curieHandler).parse(graph, expression, null); + } + } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelEditTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelEditTest.java index f93ea12b..f2817e51 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelEditTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelEditTest.java @@ -16,7 +16,6 @@ import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.*; import org.semanticweb.owlapi.model.parameters.OntologyCopy; - import owltools.io.ParserWrapper; import java.io.File; @@ -29,234 +28,233 @@ public class ModelEditTest { - @ClassRule - public static TemporaryFolder folder = new TemporaryFolder(); - - private static CurieHandler curieHandler = null; - private static JsonOrJsonpBatchHandler handler = null; - private static UndoAwareMolecularModelManager models = null; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - init(new ParserWrapper()); - } - - static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException { - //This includes only the needed terms for the test to pass - final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/edit-test/go-lego-empty.owl"))); - // curie handler - final String modelIdcurie = "gomodel"; - final String modelIdPrefix = "http://model.geneontology.org/"; - final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - - models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); - InferenceProviderCreator ipc = null; - handler = new JsonOrJsonpBatchHandler(models, "development", ipc, - Collections.emptySet(), (ExternalLookupService) null); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - if (handler != null) { - handler = null; - } - if (models != null) { - models.dispose(); - } - } - - @Before - public void before() throws Exception { - StringWriter writer = new StringWriter(); - IOUtils.copy(this.getClass().getResourceAsStream("/edit-test/5437882f00000024"), writer, "utf-8"); - models.importModel(writer.toString()); - } - - @Test - public void testAddEdgeAsBatch() throws Exception { - List batch = new ArrayList<>(); - M3Request r; - - final String individualId = "http://model.geneontology.org/5437882f00000024/5437882f0000032"; - final IRI individualIRI = IRI.create(individualId); - final String individualIdCurie = curieHandler.getCuri(individualIRI); - final String modelId = "http://model.geneontology.org/5437882f00000024"; - final ModelContainer model = models.getModel(IRI.create(modelId)); - assertNotNull(model); - boolean found = false; - boolean foundCurie = false; - Set individuals = model.getAboxOntology().getIndividualsInSignature(); - for (OWLNamedIndividual individual : individuals) { - if (individualIRI.equals(individual.getIRI())) { - found = true; - foundCurie = individualIdCurie.equals(curieHandler.getCuri(individual.getIRI())); - } - } - assertTrue(found); - assertTrue(foundCurie); - - - // create new individual - r = BatchTestTools.addIndividual(modelId, "GO:0003674"); - r.arguments.assignToVariable = "VAR1"; - batch.add(r); - - // add link to existing individual (converted from old model) - r = BatchTestTools.addEdge(modelId, "VAR1", "BFO:0000050", individualId); - batch.add(r); - - r = BatchTestTools.addEdge(modelId, "VAR1", "RO:0002333", individualId); - batch.add(r); - - executeBatch(batch); - } - - - @Test - public void testModelReset() throws Exception { - - final String modelId = "http://model.geneontology.org/5437882f00000024"; - M3Request r; - - models.saveModel(models.getModel(IRI.create(modelId)), Collections.emptySet(), null); - //cache a version of the initial model. - OWLOntologyManager man1 = OWLManager.createOWLOntologyManager(); - OWLOntology startModel = man1.copyOntology(models.getModelAbox(IRI.create(modelId)), OntologyCopy.DEEP); - Set start_axioms = startModel.getABoxAxioms(null); - // get model, check that the model is indicated as not modified - M3BatchResponse resp1 = BatchTestTools.getModel(handler, modelId, false); - assertFalse(resp1.data.modifiedFlag); - - // modify model - // create new individual - r = BatchTestTools.addIndividual(modelId, "GO:0003674"); - M3BatchResponse resp2 = executeBatch(r); - // check that response indicates modified - assertTrue(resp2.data.modifiedFlag); - - //compare and show they are different - OWLOntologyManager man2 = OWLManager.createOWLOntologyManager(); - OWLOntology midModel = man2.copyOntology(models.getModelAbox(IRI.create(modelId)), OntologyCopy.DEEP); - Set mid_axioms = midModel.getABoxAxioms(null); - assertFalse(mid_axioms.equals(start_axioms)); - - //test diff command for comparison - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.diffModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - M3BatchResponse diffresp = executeBatch(r); - String dr = diffresp.data.diffResult; - assertFalse(dr.equals("Ontologies are identical\n")); - - //now reset the model - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.resetModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - M3BatchResponse resp3 = executeBatch(r); - - // check that response indicates not modified - assertFalse(resp3.data.modifiedFlag); - - //compare and show they are the same - OWLOntologyManager man3 = OWLManager.createOWLOntologyManager(); - OWLOntology endModel = man3.copyOntology(models.getModelAbox(IRI.create(modelId)), OntologyCopy.DEEP); - Set end_axioms = endModel.getABoxAxioms(null); - assertTrue(start_axioms.equals(end_axioms)); - - //test diff command for comparison - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.diffModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - diffresp = executeBatch(r); - dr = diffresp.data.diffResult; - assertTrue(dr.equals("Ontologies are identical\n")); - } - - @Test - public void testModifiedFlag() throws Exception { - - final String modelId = "http://model.geneontology.org/5437882f00000024"; - final String curie = curieHandler.getCuri(IRI.create(modelId)); - M3Request r; - - models.saveModel(models.getModel(IRI.create(modelId)), Collections.emptySet(), null); - - // get meta, check that the model shows up as not modified - MetaResponse meta1 = BatchTestTools.getMeta(handler); - assertNotNull(meta1.modelsReadOnly); - assertFalse(meta1.modelsReadOnly.isEmpty()); - for(Entry> entity : meta1.modelsReadOnly.entrySet()) { - boolean modifiedFlag = (Boolean) entity.getValue().get("modified-p"); - assertFalse(modifiedFlag); - } - - // get model, check that the model indicated as not modified - M3BatchResponse resp1 = BatchTestTools.getModel(handler, modelId, false); - assertFalse(resp1.data.modifiedFlag); - - // modify model - // create new individual - r = BatchTestTools.addIndividual(modelId, "GO:0003674"); - M3BatchResponse resp2 = executeBatch(r); - - // check that response indicates modified - assertTrue(resp2.data.modifiedFlag); - - // get meta, check that the model shows up as modified - MetaResponse meta2 = BatchTestTools.getMeta(handler); - assertNotNull(meta2.modelsReadOnly); - Map> readOnly = (Map>) meta2.modelsReadOnly; - assertFalse(readOnly.isEmpty()); - for(Entry> entity : readOnly.entrySet()) { - boolean modifiedFlag = (Boolean) entity.getValue().get("modified-p"); - if(entity.getKey().equals(curie)) { - assertTrue(modifiedFlag); - } - else { - assertFalse(modifiedFlag); - } - } - - // save - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.storeModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - M3BatchResponse resp3 = executeBatch(r); - - // check that response indicates not modified - assertFalse(resp3.data.modifiedFlag); - - // get meta, check that the model shows up as not modified - MetaResponse meta3 = BatchTestTools.getMeta(handler); - assertNotNull(meta3.modelsReadOnly); - Map> modelsModified3 = (Map>) meta3.modelsReadOnly; - assertFalse(modelsModified3.isEmpty()); - for(Entry> entity : modelsModified3.entrySet()) { - boolean modifiedFlag = (Boolean) entity.getValue().get("modified-p"); - assertFalse(modifiedFlag); - } - } - - private M3BatchResponse executeBatch(M3Request r) { - return executeBatch(Collections.singletonList(r)); - } - - private M3BatchResponse executeBatch(List batch) { - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), false, true); - assertEquals("test-user", response.uid); - assertEquals("test-intention", response.intention); - //assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - return response; - } + @ClassRule + public static TemporaryFolder folder = new TemporaryFolder(); + + private static CurieHandler curieHandler = null; + private static JsonOrJsonpBatchHandler handler = null; + private static UndoAwareMolecularModelManager models = null; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + init(new ParserWrapper()); + } + + static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException { + //This includes only the needed terms for the test to pass + final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/edit-test/go-lego-empty.owl"))); + // curie handler + final String modelIdcurie = "gomodel"; + final String modelIdPrefix = "http://model.geneontology.org/"; + final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + + models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); + InferenceProviderCreator ipc = null; + handler = new JsonOrJsonpBatchHandler(models, "development", ipc, + Collections.emptySet(), (ExternalLookupService) null); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + if (handler != null) { + handler = null; + } + if (models != null) { + models.dispose(); + } + } + + @Before + public void before() throws Exception { + StringWriter writer = new StringWriter(); + IOUtils.copy(this.getClass().getResourceAsStream("/edit-test/5437882f00000024"), writer, "utf-8"); + models.importModel(writer.toString()); + } + + @Test + public void testAddEdgeAsBatch() throws Exception { + List batch = new ArrayList<>(); + M3Request r; + + final String individualId = "http://model.geneontology.org/5437882f00000024/5437882f0000032"; + final IRI individualIRI = IRI.create(individualId); + final String individualIdCurie = curieHandler.getCuri(individualIRI); + final String modelId = "http://model.geneontology.org/5437882f00000024"; + final ModelContainer model = models.getModel(IRI.create(modelId)); + assertNotNull(model); + boolean found = false; + boolean foundCurie = false; + Set individuals = model.getAboxOntology().getIndividualsInSignature(); + for (OWLNamedIndividual individual : individuals) { + if (individualIRI.equals(individual.getIRI())) { + found = true; + foundCurie = individualIdCurie.equals(curieHandler.getCuri(individual.getIRI())); + } + } + assertTrue(found); + assertTrue(foundCurie); + + + // create new individual + r = BatchTestTools.addIndividual(modelId, "GO:0003674"); + r.arguments.assignToVariable = "VAR1"; + batch.add(r); + + // add link to existing individual (converted from old model) + r = BatchTestTools.addEdge(modelId, "VAR1", "BFO:0000050", individualId); + batch.add(r); + + r = BatchTestTools.addEdge(modelId, "VAR1", "RO:0002333", individualId); + batch.add(r); + + executeBatch(batch); + } + + + @Test + public void testModelReset() throws Exception { + + final String modelId = "http://model.geneontology.org/5437882f00000024"; + M3Request r; + + models.saveModel(models.getModel(IRI.create(modelId)), Collections.emptySet(), null); + //cache a version of the initial model. + OWLOntologyManager man1 = OWLManager.createOWLOntologyManager(); + OWLOntology startModel = man1.copyOntology(models.getModelAbox(IRI.create(modelId)), OntologyCopy.DEEP); + Set start_axioms = startModel.getABoxAxioms(null); + // get model, check that the model is indicated as not modified + M3BatchResponse resp1 = BatchTestTools.getModel(handler, modelId, false); + assertFalse(resp1.data.modifiedFlag); + + // modify model + // create new individual + r = BatchTestTools.addIndividual(modelId, "GO:0003674"); + M3BatchResponse resp2 = executeBatch(r); + // check that response indicates modified + assertTrue(resp2.data.modifiedFlag); + + //compare and show they are different + OWLOntologyManager man2 = OWLManager.createOWLOntologyManager(); + OWLOntology midModel = man2.copyOntology(models.getModelAbox(IRI.create(modelId)), OntologyCopy.DEEP); + Set mid_axioms = midModel.getABoxAxioms(null); + assertFalse(mid_axioms.equals(start_axioms)); + + //test diff command for comparison + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.diffModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + M3BatchResponse diffresp = executeBatch(r); + String dr = diffresp.data.diffResult; + assertFalse(dr.equals("Ontologies are identical\n")); + + //now reset the model + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.resetModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + M3BatchResponse resp3 = executeBatch(r); + + // check that response indicates not modified + assertFalse(resp3.data.modifiedFlag); + + //compare and show they are the same + OWLOntologyManager man3 = OWLManager.createOWLOntologyManager(); + OWLOntology endModel = man3.copyOntology(models.getModelAbox(IRI.create(modelId)), OntologyCopy.DEEP); + Set end_axioms = endModel.getABoxAxioms(null); + assertTrue(start_axioms.equals(end_axioms)); + + //test diff command for comparison + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.diffModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + diffresp = executeBatch(r); + dr = diffresp.data.diffResult; + assertTrue(dr.equals("Ontologies are identical\n")); + } + + @Test + public void testModifiedFlag() throws Exception { + + final String modelId = "http://model.geneontology.org/5437882f00000024"; + final String curie = curieHandler.getCuri(IRI.create(modelId)); + M3Request r; + + models.saveModel(models.getModel(IRI.create(modelId)), Collections.emptySet(), null); + + // get meta, check that the model shows up as not modified + MetaResponse meta1 = BatchTestTools.getMeta(handler); + assertNotNull(meta1.modelsReadOnly); + assertFalse(meta1.modelsReadOnly.isEmpty()); + for (Entry> entity : meta1.modelsReadOnly.entrySet()) { + boolean modifiedFlag = (Boolean) entity.getValue().get("modified-p"); + assertFalse(modifiedFlag); + } + + // get model, check that the model indicated as not modified + M3BatchResponse resp1 = BatchTestTools.getModel(handler, modelId, false); + assertFalse(resp1.data.modifiedFlag); + + // modify model + // create new individual + r = BatchTestTools.addIndividual(modelId, "GO:0003674"); + M3BatchResponse resp2 = executeBatch(r); + + // check that response indicates modified + assertTrue(resp2.data.modifiedFlag); + + // get meta, check that the model shows up as modified + MetaResponse meta2 = BatchTestTools.getMeta(handler); + assertNotNull(meta2.modelsReadOnly); + Map> readOnly = (Map>) meta2.modelsReadOnly; + assertFalse(readOnly.isEmpty()); + for (Entry> entity : readOnly.entrySet()) { + boolean modifiedFlag = (Boolean) entity.getValue().get("modified-p"); + if (entity.getKey().equals(curie)) { + assertTrue(modifiedFlag); + } else { + assertFalse(modifiedFlag); + } + } + + // save + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.storeModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + M3BatchResponse resp3 = executeBatch(r); + + // check that response indicates not modified + assertFalse(resp3.data.modifiedFlag); + + // get meta, check that the model shows up as not modified + MetaResponse meta3 = BatchTestTools.getMeta(handler); + assertNotNull(meta3.modelsReadOnly); + Map> modelsModified3 = (Map>) meta3.modelsReadOnly; + assertFalse(modelsModified3.isEmpty()); + for (Entry> entity : modelsModified3.entrySet()) { + boolean modifiedFlag = (Boolean) entity.getValue().get("modified-p"); + assertFalse(modifiedFlag); + } + } + + private M3BatchResponse executeBatch(M3Request r) { + return executeBatch(Collections.singletonList(r)); + } + + private M3BatchResponse executeBatch(List batch) { + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), false, true); + assertEquals("test-user", response.uid); + assertEquals("test-intention", response.intention); + //assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + return response; + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelReasonerTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelReasonerTest.java index c90e2555..eeb9b096 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelReasonerTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelReasonerTest.java @@ -28,98 +28,98 @@ import static org.junit.Assert.*; public class ModelReasonerTest { - - @ClassRule - public static TemporaryFolder folder = new TemporaryFolder(); - - private static CurieHandler curieHandler = null; - private static JsonOrJsonpBatchHandler handler = null; - private static UndoAwareMolecularModelManager models = null; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - init(); - } - - static void init() throws OWLOntologyCreationException, IOException { - //FIXME need more from go-lego - final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/go-lego-minimal.owl"))); - // curie handler - final String modelIdcurie = "gomodel"; - final String modelIdPrefix = "http://model.geneontology.org/"; - final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - - models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); - InferenceProviderCreator ipc = CachingInferenceProviderCreatorImpl.createElk(false, null); - handler = new JsonOrJsonpBatchHandler(models, "development", ipc, - Collections.emptySet(), (ExternalLookupService) null); - //models.setPathToOWLFiles("src/test/resources/reasoner-test"); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - if (handler != null) { - handler = null; - } - if (models != null) { - models.dispose(); - } - } - - //FIXME @Test - public void testReasoner() throws Exception { - List batch = new ArrayList<>(); - M3Request r; - - final String individualId = "http://model.geneontology.org/5525a0fc00000001/5525a0fc0000023"; - final IRI individualIRI = IRI.create(individualId); - final String individualIdCurie = curieHandler.getCuri(individualIRI); - final String modelId = "http://model.geneontology.org/5525a0fc00000001"; - final ModelContainer model = models.getModel(IRI.create(modelId)); - assertNotNull(model); - boolean found = false; - boolean foundCurie = false; - Set individuals = model.getAboxOntology().getIndividualsInSignature(); - for (OWLNamedIndividual individual : individuals) { - if (individualIRI.equals(individual.getIRI())) { - found = true; - foundCurie = individualIdCurie.equals(curieHandler.getCuri(individual.getIRI())); - } - } - assertTrue(found); - assertTrue(foundCurie); - - - // get model - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.get; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - batch.add(r); - - M3BatchResponse response = executeBatch(batch); - JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response); - JsonOwlIndividual targetIndividual = null; - for (JsonOwlIndividual individual : responseIndividuals) { - if (individualIdCurie.equals(individual.id)) { - targetIndividual = individual; - break; - } - } - assertNotNull(targetIndividual); - assertNotNull(targetIndividual.inferredType); - assertEquals("Expected two inferences", 2, targetIndividual.inferredType.length); - } - - private M3BatchResponse executeBatch(List batch) { - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), true, true); - assertEquals("test-user", response.uid); - assertEquals("test-intention", response.intention); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - return response; - } + + @ClassRule + public static TemporaryFolder folder = new TemporaryFolder(); + + private static CurieHandler curieHandler = null; + private static JsonOrJsonpBatchHandler handler = null; + private static UndoAwareMolecularModelManager models = null; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + init(); + } + + static void init() throws OWLOntologyCreationException, IOException { + //FIXME need more from go-lego + final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/go-lego-minimal.owl"))); + // curie handler + final String modelIdcurie = "gomodel"; + final String modelIdPrefix = "http://model.geneontology.org/"; + final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + + models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); + InferenceProviderCreator ipc = CachingInferenceProviderCreatorImpl.createElk(false, null); + handler = new JsonOrJsonpBatchHandler(models, "development", ipc, + Collections.emptySet(), (ExternalLookupService) null); + //models.setPathToOWLFiles("src/test/resources/reasoner-test"); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + if (handler != null) { + handler = null; + } + if (models != null) { + models.dispose(); + } + } + + //FIXME @Test + public void testReasoner() throws Exception { + List batch = new ArrayList<>(); + M3Request r; + + final String individualId = "http://model.geneontology.org/5525a0fc00000001/5525a0fc0000023"; + final IRI individualIRI = IRI.create(individualId); + final String individualIdCurie = curieHandler.getCuri(individualIRI); + final String modelId = "http://model.geneontology.org/5525a0fc00000001"; + final ModelContainer model = models.getModel(IRI.create(modelId)); + assertNotNull(model); + boolean found = false; + boolean foundCurie = false; + Set individuals = model.getAboxOntology().getIndividualsInSignature(); + for (OWLNamedIndividual individual : individuals) { + if (individualIRI.equals(individual.getIRI())) { + found = true; + foundCurie = individualIdCurie.equals(curieHandler.getCuri(individual.getIRI())); + } + } + assertTrue(found); + assertTrue(foundCurie); + + + // get model + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.get; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + batch.add(r); + + M3BatchResponse response = executeBatch(batch); + JsonOwlIndividual[] responseIndividuals = BatchTestTools.responseIndividuals(response); + JsonOwlIndividual targetIndividual = null; + for (JsonOwlIndividual individual : responseIndividuals) { + if (individualIdCurie.equals(individual.id)) { + targetIndividual = individual; + break; + } + } + assertNotNull(targetIndividual); + assertNotNull(targetIndividual.inferredType); + assertEquals("Expected two inferences", 2, targetIndividual.inferredType.length); + } + + private M3BatchResponse executeBatch(List batch) { + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), true, true); + assertEquals("test-user", response.uid); + assertEquals("test-intention", response.intention); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + return response; + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelSearchHandlerTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelSearchHandlerTest.java index 3d9d8588..b639320d 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelSearchHandlerTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ModelSearchHandlerTest.java @@ -1,21 +1,9 @@ /** - * + * */ package org.geneontology.minerva.server.handler; -import static org.junit.Assert.*; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.net.HttpURLConnection; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - +import com.google.gson.Gson; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.http.client.entity.UrlEncodedFormEntity; @@ -43,656 +31,654 @@ import org.geneontology.minerva.lookup.ExternalLookupService; import org.geneontology.minerva.server.GsonMessageBodyHandler; import org.geneontology.minerva.server.RequireJsonpFilter; -import org.geneontology.minerva.server.handler.M3BatchHandler.Entity; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3Argument; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3BatchResponse; -import org.geneontology.minerva.server.handler.M3BatchHandler.M3Request; -import org.geneontology.minerva.server.handler.M3BatchHandler.Operation; +import org.geneontology.minerva.server.handler.M3BatchHandler.*; import org.geneontology.minerva.server.handler.ModelSearchHandler.ModelSearchResult; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.*; import org.junit.rules.TemporaryFolder; import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import org.semanticweb.owlapi.model.*; -import com.google.gson.Gson; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * @author benjamingood * */ public class ModelSearchHandlerTest { - private static final Logger LOGGER = Logger.getLogger(ModelSearchHandlerTest.class); - static Server server; - static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; - static final String modelIdcurie = "http://model.geneontology.org/"; - static final String modelIdPrefix = "gomodel"; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static OWLOntology tbox_ontology; - static CurieHandler curieHandler; - static UndoAwareMolecularModelManager models; - private static JsonOrJsonpBatchHandler handler; - - @ClassRule - public static TemporaryFolder tmp = new TemporaryFolder(); - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - LOGGER.info("Set up molecular model manager - loading files into a journal"); - // set curie handler - String modelIdPrefix = "http://model.geneontology.org/"; - String modelIdcurie = "gomodel"; - final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - String valid_model_folder = "src/test/resources/models/should_pass/"; - String model_save = "src/test/resources/models/tmp/"; - String inputDB = makeBlazegraphJournal(valid_model_folder); - //leave tbox empty for now - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - tbox_ontology = ontman.createOntology(IRI.create("http://example.org/dummy")); - models = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, model_save, go_lego_journal_file, true); - models.addTaxonMetadata(); - - LOGGER.info("Setup Jetty config."); - // Configuration: Use an already existing handler instance - // Configuration: Use custom JSON renderer (GSON) - ResourceConfig resourceConfig = new ResourceConfig(); - resourceConfig.register(GsonMessageBodyHandler.class); - resourceConfig.register(RequireJsonpFilter.class); - - ModelSearchHandler searchHandler = new ModelSearchHandler(models); - resourceConfig = resourceConfig.registerInstances(searchHandler); - - // setup jetty server port, buffers and context path - server = new Server(); - // create connector with port and custom buffer sizes - - HttpConfiguration http_config = new HttpConfiguration(); - int requestHeaderSize = 64*1024; - int requestBufferSize = 128*1024; - int port = 6800; - String contextString = "/"; - http_config.setRequestHeaderSize(requestHeaderSize); - ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); - connector.setPort(port); - server.addConnector(connector); - - ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); - context.setContextPath(contextString); - server.setHandler(context); - ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); - context.addServlet(h, "/*"); - - // start jetty server - LOGGER.info("Start server on port: "+port+" context: "+contextString); - server.start(); - - //set up a handler for testing with M3BatchRequest service - handler = new JsonOrJsonpBatchHandler(models, "development", null, - Collections.emptySet(), (ExternalLookupService) null); - } - - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - models.dispose(); - server.stop(); - if (handler != null) { - handler = null; - } - } - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - } - - @Test - public final void testReturnModifiedP() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { - //get a hold of a test model - String mid = "5d29221b00001265"; - final String modelId = "http://model.geneontology.org/"+mid; - models.saveModel(models.getModel(IRI.create(modelId)), Collections.emptySet(), null); - // get model via standard Noctua request (non-search), check that the model indicated as not modified - M3BatchResponse resp1 = BatchTestTools.getModel(handler, modelId, false); - assertFalse(resp1.data.modifiedFlag); - //run a search query, show that the model found has not been modified - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("id", "gomodel:"+mid); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - assertTrue(result.getN()==1); - for(ModelSearchHandler.ModelMeta mm : result.getModels()) { - assertFalse(mm.isModified()); - } - //modify the model, but don't save it to the database - // create new individual - M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); - List batch = Collections.singletonList(r); - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), false, true); - // check that response indicates modified - assertTrue(response.data.modifiedFlag); - - //run the query again and show that the modified-p flag has been set to true - json_result = getJsonStringFromUri(searchuri); - g = new Gson(); - result = g.fromJson(json_result, ModelSearchResult.class); - assertTrue(result.getN()==1); - //show that the search result knows it was modified - for(ModelSearchHandler.ModelMeta mm : result.getModels()) { - assertTrue(mm.isModified()); - } - //now save it to the database using the m3 api - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.storeModel; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - batch = Collections.singletonList(r); - response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), false, true); - // check that response now indicates not modified - assertFalse(response.data.modifiedFlag); - //now look it up by search API again and show that modified state is false once again - json_result = getJsonStringFromUri(searchuri); - g = new Gson(); - result = g.fromJson(json_result, ModelSearchResult.class); - assertTrue(result.getN()==1); - //show that it now knows it in a non-modified state - for(ModelSearchHandler.ModelMeta mm : result.getModels()) { - assertFalse(mm.isModified()); - } - //don't need to undo changes as the database is rebuilt each time from files and never flushed to file here. - } - - @Test - public final void testSearchGetByModelIdAsCurie() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - // - builder.addParameter("id", "gomodel:5d29221b00001265"); - builder.addParameter("id", "gomodel:5d29218800000021"); - /// 5d29218800000021 - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by id URI "+searchuri); - LOGGER.info("Search by id result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()==2); - } - - @Test - public final void testSearchGetByModelIdAsURI() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - // - //builder.addParameter("id", "gomodel:5d29221b00001265"); - builder.addParameter("id", "http://model.geneontology.org/5d29221b00001265"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by id URI "+searchuri); - LOGGER.info("Search by id result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()==1); - } - - /** - * Test method for {@link org.geneontology.minerva.server.handler.ModelSearchHandler#searchGet(java.util.Set, java.util.Set, java.util.Set, java.lang.String, java.util.Set, java.util.Set, java.util.Set, java.lang.String, int, int, java.lang.String)}. - * @throws URISyntaxException - * @throws IOException - */ - @Test - public final void testSearchGetByGene() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("gp", "http://identifiers.org/uniprot/P15822-3"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by gene URI "+searchuri); - LOGGER.info("Search by gene result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchPostByGene() throws URISyntaxException, IOException { - HttpPost post = new HttpPost(server.getURI()+"search/models"); - List urlParameters = new ArrayList<>(); - urlParameters.add(new BasicNameValuePair("gp", "http://identifiers.org/wormbase/WBGene00001865")); - urlParameters.add(new BasicNameValuePair("gp", "http://identifiers.org/wormbase/WBGene00017304")); - urlParameters.add(new BasicNameValuePair("gp", "http://identifiers.org/wormbase/WBGene00003418")); - post.setEntity(new UrlEncodedFormEntity(urlParameters)); - LOGGER.info("post "+post.toString()); - String json_result = getJsonStringFromPost(post); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by gene POST result "+json_result); - LOGGER.info("POST N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByGO() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0003677"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by GO term URI "+searchuri); - LOGGER.info("Search by GO term result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByGOclosure() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0140110");//transcription factor regulator activity GO_0140110 - builder.addParameter("expand", ""); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by GO term URI "+searchuri); - LOGGER.info("Search by GO term result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()+" models found should find some from children of GO_0140110", result.getN()>0); - - builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0140110"); - searchuri = builder.build(); - json_result = getJsonStringFromUri(searchuri); - g = new Gson(); - result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by GO term URI "+searchuri); - LOGGER.info("Search by GO term result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()+" without expand on, should find now models for GO_0140110", result.getN()==0); - } - - @Test - public final void testSearchGetByGOGiantclosure() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0003824"); - builder.addParameter("expand", ""); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by GO term URI "+searchuri); - LOGGER.info("Search by GO term result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()+" models found should find many children of GO_0003824", result.getN()>0); - } - - @Test - public final void testSearchGetByWormAnatomy() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("term", "http://purl.obolibrary.org/obo/WBbt_0006748"); //vulva - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by GO term URI "+searchuri); - LOGGER.info("Search by GO term result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue("", result.getN()>0); - } - - @Test - public final void testSearchGetByWormAnatomyClosure() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("term", "http://purl.obolibrary.org/obo/WBbt_0008422"); //sex organ parent of vulva - builder.addParameter("expand", ""); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by GO term URI "+searchuri); - LOGGER.info("Search by GO term result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue("", result.getN()>0); - } - - // - - @Test - public final void testSearchGetByTaxon() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("taxon", "6239");//worm 6239 14 models //9606 2 zebrafish 7955 2 - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by taxon "+searchuri); - LOGGER.info("Search by taxon result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue("No models found for taxon ", result.getN()>0); - } - - @Test - public final void testSearchGetByTaxonCurie() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("taxon", "NCBITaxon:559292");//worm 6239 14 models //9606 2 zebrafish 7955 2 - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by taxon "+searchuri); - LOGGER.info("Search by taxon result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue("No models found for taxon ", result.getN()>0); - } - - @Test - public final void testSearchGetByTaxonURI() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("taxon", "http://purl.obolibrary.org/obo/NCBITaxon_7955");//worm 6239 14 models //9606 2 zebrafish 7955 2 - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by taxon "+searchuri); - LOGGER.info("Search by taxon result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue("No models found for taxon ", result.getN()>0); - } - - @Test - public final void testSearchGetByTitle() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - //builder.addParameter("title", "*test*"); - builder.addParameter("title", "GO_shapes Activity unit test "); //gcy-8 . GO_shapes Activity unit test 37 (results in specification of) - builder.addParameter("debug", ""); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by title text URI "+searchuri); - LOGGER.info("Search by title text result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByPMID() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("pmid", "PMID:1457892"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by PMID URI "+searchuri); - LOGGER.info("Search by PMID result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - //&state=development&state=review {development, production, closed, review, delete} or operator - @Test - public final void testSearchGetByState() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("state", "development"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by state URI "+searchuri); - LOGGER.info("Search by state result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByContributors() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("contributor", "http://orcid.org/0000-0002-1706-4196"); - - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by contributor URI "+searchuri); - LOGGER.info("Search by contributor "+json_result); - LOGGER.info("N models found: "+result.getN()); - - builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("contributor", "http://orcid.org/0000-0003-1813-6857"); - searchuri = builder.build(); - json_result = getJsonStringFromUri(searchuri); - g = new Gson(); - result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by contributor URI "+searchuri); - LOGGER.info("Search by contributor "+json_result); - LOGGER.info("N models found: "+result.getN()); - - builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("contributor", "http://orcid.org/0000-0002-8688-6599"); - searchuri = builder.build(); - json_result = getJsonStringFromUri(searchuri); - g = new Gson(); - result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by contributor URI "+searchuri); - LOGGER.info("Search by contributor "+json_result); - LOGGER.info("N models found: "+result.getN()); - - builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("contributor", "http://orcid.org/0000-0002-1706-4196"); - builder.addParameter("contributor", "http://orcid.org/0000-0003-1813-6857"); - builder.addParameter("contributor", "http://orcid.org/0000-0002-8688-6599"); - searchuri = builder.build(); - json_result = getJsonStringFromUri(searchuri); - g = new Gson(); - result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by multi contributor URI "+searchuri); - LOGGER.info("Search by multi contributor "+json_result); - LOGGER.info("N models found: "+result.getN()); - - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByGroups() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("group", "http://geneontology.org"); //http://www.igs.umaryland.edu "http://www.wormbase.org" - builder.addParameter("group", "http://www.igs.umaryland.edu"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by group URI "+searchuri); - LOGGER.info("Search by group "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByDate() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("date", "2018-08-20"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by start date URI "+searchuri); - LOGGER.info("Search by start date "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByDateRange() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("date", "2018-08-20"); - builder.addParameter("dateend", "2019-12-02"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by date range URI "+searchuri); - LOGGER.info("Search by date range result "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByExactDate() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("exactdate", "2020-02-07"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - LOGGER.info("Search by EXACT date URI "+searchuri); - LOGGER.info("Search by EXACT date "+json_result); - LOGGER.info("N models found: "+result.getN()); - assertTrue(result.getN()>0); - } - - @Test - public final void testSearchGetByDateAndOffset() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("date", "2018-08-20"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result1 = g.fromJson(json_result, ModelSearchResult.class); - int n1 = result1.getN(); - builder.addParameter("offset", "1"); - searchuri = builder.build(); - json_result = getJsonStringFromUri(searchuri); - ModelSearchResult result2 = g.fromJson(json_result, ModelSearchResult.class); - int n2 = result2.getN(); - assertTrue(n1>n2); - } - - @Test - public final void testSearchGetByDateAndCount() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); - builder.addParameter("date", "2018-08-20"); - builder.addParameter("count", ""); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - Gson g = new Gson(); - ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); - assertTrue(result.getN()>0); - LOGGER.info("N models found by count query: "+result.getN()); - assertTrue(result.getModels()==null); - } - - private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { - String inputDB = tmp.newFile().getAbsolutePath(); - File i = new File(input_folder); - if(i.exists()) { - //remove anything that existed earlier - File bgdb = new File(inputDB); - if(bgdb.exists()) { - bgdb.delete(); - } - //load everything into a bg journal - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); - if(i.isDirectory()) { - FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file-> { - if(file.getName().endsWith(".ttl")||file.getName().endsWith("owl")) { - LOGGER.info("Loading " + file); - try { - String modeluri = m3.importModelToDatabase(file, true); - } catch (OWLOntologyCreationException | RepositoryException | RDFParseException - | RDFHandlerException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - }); - }else { - LOGGER.info("Loading " + i); - m3.importModelToDatabase(i, true); - } - LOGGER.info("loaded files into blazegraph journal: "+input_folder); - m3.dispose(); - } - return inputDB; - } - - private static String getJsonStringFromUri(URI uri) throws IOException { - final URL url = uri.toURL(); - final HttpURLConnection connection; - InputStream response = null; - // setup and open (actual connection) - connection = (HttpURLConnection) url.openConnection(); - connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https - response = connection.getInputStream(); // opens the connection to the server - // get string response from stream - String json = IOUtils.toString(response); - - return json; - } - - - private static String getJsonStringFromPost(HttpPost post) throws IOException { - - CloseableHttpClient httpClient = HttpClients.createDefault(); - CloseableHttpResponse response = httpClient.execute(post); - String json = EntityUtils.toString(response.getEntity()); - - return json; - } + private static final Logger LOGGER = Logger.getLogger(ModelSearchHandlerTest.class); + static Server server; + static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; + static final String modelIdcurie = "http://model.geneontology.org/"; + static final String modelIdPrefix = "gomodel"; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + static OWLOntology tbox_ontology; + static CurieHandler curieHandler; + static UndoAwareMolecularModelManager models; + private static JsonOrJsonpBatchHandler handler; + + @ClassRule + public static TemporaryFolder tmp = new TemporaryFolder(); + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + LOGGER.info("Set up molecular model manager - loading files into a journal"); + // set curie handler + String modelIdPrefix = "http://model.geneontology.org/"; + String modelIdcurie = "gomodel"; + final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + String valid_model_folder = "src/test/resources/models/should_pass/"; + String model_save = "src/test/resources/models/tmp/"; + String inputDB = makeBlazegraphJournal(valid_model_folder); + //leave tbox empty for now + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + tbox_ontology = ontman.createOntology(IRI.create("http://example.org/dummy")); + models = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, model_save, go_lego_journal_file, true); + models.addTaxonMetadata(); + + LOGGER.info("Setup Jetty config."); + // Configuration: Use an already existing handler instance + // Configuration: Use custom JSON renderer (GSON) + ResourceConfig resourceConfig = new ResourceConfig(); + resourceConfig.register(GsonMessageBodyHandler.class); + resourceConfig.register(RequireJsonpFilter.class); + + ModelSearchHandler searchHandler = new ModelSearchHandler(models); + resourceConfig = resourceConfig.registerInstances(searchHandler); + + // setup jetty server port, buffers and context path + server = new Server(); + // create connector with port and custom buffer sizes + + HttpConfiguration http_config = new HttpConfiguration(); + int requestHeaderSize = 64 * 1024; + int requestBufferSize = 128 * 1024; + int port = 6800; + String contextString = "/"; + http_config.setRequestHeaderSize(requestHeaderSize); + ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); + connector.setPort(port); + server.addConnector(connector); + + ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); + context.setContextPath(contextString); + server.setHandler(context); + ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); + context.addServlet(h, "/*"); + + // start jetty server + LOGGER.info("Start server on port: " + port + " context: " + contextString); + server.start(); + + //set up a handler for testing with M3BatchRequest service + handler = new JsonOrJsonpBatchHandler(models, "development", null, + Collections.emptySet(), (ExternalLookupService) null); + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + models.dispose(); + server.stop(); + if (handler != null) { + handler = null; + } + } + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception { + } + + /** + * @throws java.lang.Exception + */ + @After + public void tearDown() throws Exception { + } + + @Test + public final void testReturnModifiedP() throws URISyntaxException, IOException, OWLOntologyStorageException, OWLOntologyCreationException, RepositoryException, UnknownIdentifierException { + //get a hold of a test model + String mid = "5d29221b00001265"; + final String modelId = "http://model.geneontology.org/" + mid; + models.saveModel(models.getModel(IRI.create(modelId)), Collections.emptySet(), null); + // get model via standard Noctua request (non-search), check that the model indicated as not modified + M3BatchResponse resp1 = BatchTestTools.getModel(handler, modelId, false); + assertFalse(resp1.data.modifiedFlag); + //run a search query, show that the model found has not been modified + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("id", "gomodel:" + mid); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + assertTrue(result.getN() == 1); + for (ModelSearchHandler.ModelMeta mm : result.getModels()) { + assertFalse(mm.isModified()); + } + //modify the model, but don't save it to the database + // create new individual + M3Request r = BatchTestTools.addIndividual(modelId, "GO:0003674"); + List batch = Collections.singletonList(r); + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), false, true); + // check that response indicates modified + assertTrue(response.data.modifiedFlag); + + //run the query again and show that the modified-p flag has been set to true + json_result = getJsonStringFromUri(searchuri); + g = new Gson(); + result = g.fromJson(json_result, ModelSearchResult.class); + assertTrue(result.getN() == 1); + //show that the search result knows it was modified + for (ModelSearchHandler.ModelMeta mm : result.getModels()) { + assertTrue(mm.isModified()); + } + //now save it to the database using the m3 api + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.storeModel; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + batch = Collections.singletonList(r); + response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), false, true); + // check that response now indicates not modified + assertFalse(response.data.modifiedFlag); + //now look it up by search API again and show that modified state is false once again + json_result = getJsonStringFromUri(searchuri); + g = new Gson(); + result = g.fromJson(json_result, ModelSearchResult.class); + assertTrue(result.getN() == 1); + //show that it now knows it in a non-modified state + for (ModelSearchHandler.ModelMeta mm : result.getModels()) { + assertFalse(mm.isModified()); + } + //don't need to undo changes as the database is rebuilt each time from files and never flushed to file here. + } + + @Test + public final void testSearchGetByModelIdAsCurie() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + // + builder.addParameter("id", "gomodel:5d29221b00001265"); + builder.addParameter("id", "gomodel:5d29218800000021"); + /// 5d29218800000021 + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by id URI " + searchuri); + LOGGER.info("Search by id result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() == 2); + } + + @Test + public final void testSearchGetByModelIdAsURI() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + // + //builder.addParameter("id", "gomodel:5d29221b00001265"); + builder.addParameter("id", "http://model.geneontology.org/5d29221b00001265"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by id URI " + searchuri); + LOGGER.info("Search by id result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() == 1); + } + + /** + * Test method for {@link org.geneontology.minerva.server.handler.ModelSearchHandler#searchGet(java.util.Set, java.util.Set, java.util.Set, java.lang.String, java.util.Set, java.util.Set, java.util.Set, java.lang.String, int, int, java.lang.String)}. + * @throws URISyntaxException + * @throws IOException + */ + @Test + public final void testSearchGetByGene() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("gp", "http://identifiers.org/uniprot/P15822-3"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by gene URI " + searchuri); + LOGGER.info("Search by gene result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchPostByGene() throws URISyntaxException, IOException { + HttpPost post = new HttpPost(server.getURI() + "search/models"); + List urlParameters = new ArrayList<>(); + urlParameters.add(new BasicNameValuePair("gp", "http://identifiers.org/wormbase/WBGene00001865")); + urlParameters.add(new BasicNameValuePair("gp", "http://identifiers.org/wormbase/WBGene00017304")); + urlParameters.add(new BasicNameValuePair("gp", "http://identifiers.org/wormbase/WBGene00003418")); + post.setEntity(new UrlEncodedFormEntity(urlParameters)); + LOGGER.info("post " + post.toString()); + String json_result = getJsonStringFromPost(post); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by gene POST result " + json_result); + LOGGER.info("POST N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByGO() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0003677"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by GO term URI " + searchuri); + LOGGER.info("Search by GO term result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByGOclosure() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0140110");//transcription factor regulator activity GO_0140110 + builder.addParameter("expand", ""); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by GO term URI " + searchuri); + LOGGER.info("Search by GO term result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() + " models found should find some from children of GO_0140110", result.getN() > 0); + + builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0140110"); + searchuri = builder.build(); + json_result = getJsonStringFromUri(searchuri); + g = new Gson(); + result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by GO term URI " + searchuri); + LOGGER.info("Search by GO term result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() + " without expand on, should find now models for GO_0140110", result.getN() == 0); + } + + @Test + public final void testSearchGetByGOGiantclosure() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("term", "http://purl.obolibrary.org/obo/GO_0003824"); + builder.addParameter("expand", ""); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by GO term URI " + searchuri); + LOGGER.info("Search by GO term result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() + " models found should find many children of GO_0003824", result.getN() > 0); + } + + @Test + public final void testSearchGetByWormAnatomy() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("term", "http://purl.obolibrary.org/obo/WBbt_0006748"); //vulva + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by GO term URI " + searchuri); + LOGGER.info("Search by GO term result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue("", result.getN() > 0); + } + + @Test + public final void testSearchGetByWormAnatomyClosure() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("term", "http://purl.obolibrary.org/obo/WBbt_0008422"); //sex organ parent of vulva + builder.addParameter("expand", ""); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by GO term URI " + searchuri); + LOGGER.info("Search by GO term result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue("", result.getN() > 0); + } + + // + + @Test + public final void testSearchGetByTaxon() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("taxon", "6239");//worm 6239 14 models //9606 2 zebrafish 7955 2 + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by taxon " + searchuri); + LOGGER.info("Search by taxon result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue("No models found for taxon ", result.getN() > 0); + } + + @Test + public final void testSearchGetByTaxonCurie() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("taxon", "NCBITaxon:559292");//worm 6239 14 models //9606 2 zebrafish 7955 2 + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by taxon " + searchuri); + LOGGER.info("Search by taxon result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue("No models found for taxon ", result.getN() > 0); + } + + @Test + public final void testSearchGetByTaxonURI() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("taxon", "http://purl.obolibrary.org/obo/NCBITaxon_7955");//worm 6239 14 models //9606 2 zebrafish 7955 2 + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by taxon " + searchuri); + LOGGER.info("Search by taxon result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue("No models found for taxon ", result.getN() > 0); + } + + @Test + public final void testSearchGetByTitle() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + //builder.addParameter("title", "*test*"); + builder.addParameter("title", "GO_shapes Activity unit test "); //gcy-8 . GO_shapes Activity unit test 37 (results in specification of) + builder.addParameter("debug", ""); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by title text URI " + searchuri); + LOGGER.info("Search by title text result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByPMID() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("pmid", "PMID:1457892"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by PMID URI " + searchuri); + LOGGER.info("Search by PMID result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + //&state=development&state=review {development, production, closed, review, delete} or operator + @Test + public final void testSearchGetByState() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("state", "development"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by state URI " + searchuri); + LOGGER.info("Search by state result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByContributors() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("contributor", "http://orcid.org/0000-0002-1706-4196"); + + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by contributor URI " + searchuri); + LOGGER.info("Search by contributor " + json_result); + LOGGER.info("N models found: " + result.getN()); + + builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("contributor", "http://orcid.org/0000-0003-1813-6857"); + searchuri = builder.build(); + json_result = getJsonStringFromUri(searchuri); + g = new Gson(); + result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by contributor URI " + searchuri); + LOGGER.info("Search by contributor " + json_result); + LOGGER.info("N models found: " + result.getN()); + + builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("contributor", "http://orcid.org/0000-0002-8688-6599"); + searchuri = builder.build(); + json_result = getJsonStringFromUri(searchuri); + g = new Gson(); + result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by contributor URI " + searchuri); + LOGGER.info("Search by contributor " + json_result); + LOGGER.info("N models found: " + result.getN()); + + builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("contributor", "http://orcid.org/0000-0002-1706-4196"); + builder.addParameter("contributor", "http://orcid.org/0000-0003-1813-6857"); + builder.addParameter("contributor", "http://orcid.org/0000-0002-8688-6599"); + searchuri = builder.build(); + json_result = getJsonStringFromUri(searchuri); + g = new Gson(); + result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by multi contributor URI " + searchuri); + LOGGER.info("Search by multi contributor " + json_result); + LOGGER.info("N models found: " + result.getN()); + + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByGroups() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("group", "http://geneontology.org"); //http://www.igs.umaryland.edu "http://www.wormbase.org" + builder.addParameter("group", "http://www.igs.umaryland.edu"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by group URI " + searchuri); + LOGGER.info("Search by group " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByDate() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("date", "2018-08-20"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by start date URI " + searchuri); + LOGGER.info("Search by start date " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByDateRange() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("date", "2018-08-20"); + builder.addParameter("dateend", "2019-12-02"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by date range URI " + searchuri); + LOGGER.info("Search by date range result " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByExactDate() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("exactdate", "2020-02-07"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + LOGGER.info("Search by EXACT date URI " + searchuri); + LOGGER.info("Search by EXACT date " + json_result); + LOGGER.info("N models found: " + result.getN()); + assertTrue(result.getN() > 0); + } + + @Test + public final void testSearchGetByDateAndOffset() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("date", "2018-08-20"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result1 = g.fromJson(json_result, ModelSearchResult.class); + int n1 = result1.getN(); + builder.addParameter("offset", "1"); + searchuri = builder.build(); + json_result = getJsonStringFromUri(searchuri); + ModelSearchResult result2 = g.fromJson(json_result, ModelSearchResult.class); + int n2 = result2.getN(); + assertTrue(n1 > n2); + } + + @Test + public final void testSearchGetByDateAndCount() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/models/"); + builder.addParameter("date", "2018-08-20"); + builder.addParameter("count", ""); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + Gson g = new Gson(); + ModelSearchResult result = g.fromJson(json_result, ModelSearchResult.class); + assertTrue(result.getN() > 0); + LOGGER.info("N models found by count query: " + result.getN()); + assertTrue(result.getModels() == null); + } + + private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { + String inputDB = tmp.newFile().getAbsolutePath(); + File i = new File(input_folder); + if (i.exists()) { + //remove anything that existed earlier + File bgdb = new File(inputDB); + if (bgdb.exists()) { + bgdb.delete(); + } + //load everything into a bg journal + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); + if (i.isDirectory()) { + FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file -> { + if (file.getName().endsWith(".ttl") || file.getName().endsWith("owl")) { + LOGGER.info("Loading " + file); + try { + String modeluri = m3.importModelToDatabase(file, true); + } catch (OWLOntologyCreationException | RepositoryException | RDFParseException + | RDFHandlerException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + }); + } else { + LOGGER.info("Loading " + i); + m3.importModelToDatabase(i, true); + } + LOGGER.info("loaded files into blazegraph journal: " + input_folder); + m3.dispose(); + } + return inputDB; + } + + private static String getJsonStringFromUri(URI uri) throws IOException { + final URL url = uri.toURL(); + final HttpURLConnection connection; + InputStream response = null; + // setup and open (actual connection) + connection = (HttpURLConnection) url.openConnection(); + connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https + response = connection.getInputStream(); // opens the connection to the server + // get string response from stream + String json = IOUtils.toString(response); + + return json; + } + + + private static String getJsonStringFromPost(HttpPost post) throws IOException { + + CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = httpClient.execute(post); + String json = EntityUtils.toString(response.getEntity()); + + return json; + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ParallelModelReasonerTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ParallelModelReasonerTest.java index cf4af1b3..631e1caf 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ParallelModelReasonerTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/ParallelModelReasonerTest.java @@ -29,203 +29,203 @@ import static org.junit.Assert.assertTrue; public class ParallelModelReasonerTest { - - @ClassRule - public static TemporaryFolder folder = new TemporaryFolder(); - - private static CurieHandler curieHandler = null; - private static JsonOrJsonpBatchHandler handler = null; - private static UndoAwareMolecularModelManager models = null; - private static CountingCachingInferenceProvider ipc; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - init(new ParserWrapper()); - } - - static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException { - //FIXME need more from go-lego - final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/go-lego-minimal.owl"))); - // curie handler - final String modelIdcurie = "gomodel"; - final String modelIdPrefix = "http://model.geneontology.org/"; - final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - - models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); - ipc = new CountingCachingInferenceProvider(false); - handler = new JsonOrJsonpBatchHandler(models, "development", ipc , - Collections.emptySet(), (ExternalLookupService) null); - //models.setPathToOWLFiles("src/test/resources/reasoner-test"); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - if (handler != null) { - handler = null; - } - if (models != null) { - models.dispose(); - } - } - - @Before - public void before() { - ipc.clear(); - models.dispose(); - } - - //FIXME @Test - public void testMostlyReadReasoner() throws Exception { - List threads = new ArrayList<>(); - threads.add(new ModifyingDelayedRequestThread(0)); - for (int i = 0; i < 10; i++) { - threads.add(new DelayedRequestThread(i*100)); - } - for (DelayedRequestThread thread : threads) { - thread.start(); - } - for (DelayedRequestThread thread : threads) { - thread.join(); - validateResponse(thread.response); - } - System.out.println("Hit: "+ipc.hit+" Miss: "+ipc.miss); - assertTrue(ipc.hit.get() >= 7); // most should be hits - assertTrue(ipc.miss.get() >= 1); // at least one miss - } - - @Ignore("Fails currently on the build server") - @Test - public void testMostlyModifyReasoner() throws Exception { - List threads = new ArrayList<>(); - for (int i = 0; i < 10; i++) { - threads.add(new ModifyingDelayedRequestThread(i*100)); - } - threads.add(new DelayedRequestThread(250)); - threads.add(new DelayedRequestThread(1050)); - - for (DelayedRequestThread thread : threads) { - thread.start(); - } - for (DelayedRequestThread thread : threads) { - thread.join(); - validateResponse(thread.response); - } - System.out.println("Hit: "+ipc.hit+" Miss: "+ipc.miss); - assertTrue(ipc.miss.get() >= 10); // ten changes, at least ten cache miss - } - - private static final class CountingCachingInferenceProvider extends CachingInferenceProviderCreatorImpl { - - final AtomicLong hit = new AtomicLong(0L); - final AtomicLong miss = new AtomicLong(0L); - - private CountingCachingInferenceProvider(boolean useSLME) { - super(new ElkReasonerFactory(), 1, useSLME, "Counting Caching ELK", null); - } - - @Override - protected void addHit() { - hit.incrementAndGet(); - } - - @Override - protected void addMiss() { - miss.incrementAndGet(); - } - - protected void clear() { - super.clear(); - hit.set(0); - miss.set(0); - } - } - - private class ModifyingDelayedRequestThread extends DelayedRequestThread { - private ModifyingDelayedRequestThread(int delay) { - super(delay); - } - - @Override - protected List createBatch() { - return createAddBatch(); - } - } - - private class DelayedRequestThread extends Thread { - - private final long millis; - private M3BatchResponse response; - - public DelayedRequestThread(int delay) { - millis = delay; - } - - @Override - public void run() { - // delay - try { - Thread.sleep(millis); - } catch (InterruptedException e) { - // ignore - } - // work - List batch = createBatch(); - - response = executeBatch(batch); - - } - - protected List createBatch() { - return createReadBatch(); - } - } - - private List createReadBatch() { - List batch = new ArrayList<>(); - M3Request r; - - final String modelId = "http://model.geneontology.org/5525a0fc00000001"; - - // get model - r = new M3Request(); - r.entity = Entity.model; - r.operation = Operation.get; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - batch.add(r); - - return batch; - } - - private List createAddBatch() { - List batch = new ArrayList<>(); - M3Request r; - - final String modelId = "http://model.geneontology.org/5525a0fc00000001"; - - // get model - r = new M3Request(); - r.entity = Entity.individual; - r.operation = Operation.add; - r.arguments = new M3Argument(); - r.arguments.modelId = modelId; - BatchTestTools.setExpressionClass(r.arguments, "GO:0003674"); - batch.add(r); - - return batch; - } - - private void validateResponse(M3BatchResponse response) { - assertEquals("test-user", response.uid); - assertEquals("test-intention", response.intention); - assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); - } - - private M3BatchResponse executeBatch(List batch) { - M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", - batch.toArray(new M3Request[batch.size()]), true, true); - return response; - } + + @ClassRule + public static TemporaryFolder folder = new TemporaryFolder(); + + private static CurieHandler curieHandler = null; + private static JsonOrJsonpBatchHandler handler = null; + private static UndoAwareMolecularModelManager models = null; + private static CountingCachingInferenceProvider ipc; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + init(new ParserWrapper()); + } + + static void init(ParserWrapper pw) throws OWLOntologyCreationException, IOException { + //FIXME need more from go-lego + final OWLOntology tbox = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(new File("src/test/resources/go-lego-minimal.owl"))); + // curie handler + final String modelIdcurie = "gomodel"; + final String modelIdPrefix = "http://model.geneontology.org/"; + final CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + + models = new UndoAwareMolecularModelManager(tbox, curieHandler, modelIdPrefix, folder.newFile().getAbsolutePath(), null, go_lego_journal_file, true); + ipc = new CountingCachingInferenceProvider(false); + handler = new JsonOrJsonpBatchHandler(models, "development", ipc, + Collections.emptySet(), (ExternalLookupService) null); + //models.setPathToOWLFiles("src/test/resources/reasoner-test"); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + if (handler != null) { + handler = null; + } + if (models != null) { + models.dispose(); + } + } + + @Before + public void before() { + ipc.clear(); + models.dispose(); + } + + //FIXME @Test + public void testMostlyReadReasoner() throws Exception { + List threads = new ArrayList<>(); + threads.add(new ModifyingDelayedRequestThread(0)); + for (int i = 0; i < 10; i++) { + threads.add(new DelayedRequestThread(i * 100)); + } + for (DelayedRequestThread thread : threads) { + thread.start(); + } + for (DelayedRequestThread thread : threads) { + thread.join(); + validateResponse(thread.response); + } + System.out.println("Hit: " + ipc.hit + " Miss: " + ipc.miss); + assertTrue(ipc.hit.get() >= 7); // most should be hits + assertTrue(ipc.miss.get() >= 1); // at least one miss + } + + @Ignore("Fails currently on the build server") + @Test + public void testMostlyModifyReasoner() throws Exception { + List threads = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + threads.add(new ModifyingDelayedRequestThread(i * 100)); + } + threads.add(new DelayedRequestThread(250)); + threads.add(new DelayedRequestThread(1050)); + + for (DelayedRequestThread thread : threads) { + thread.start(); + } + for (DelayedRequestThread thread : threads) { + thread.join(); + validateResponse(thread.response); + } + System.out.println("Hit: " + ipc.hit + " Miss: " + ipc.miss); + assertTrue(ipc.miss.get() >= 10); // ten changes, at least ten cache miss + } + + private static final class CountingCachingInferenceProvider extends CachingInferenceProviderCreatorImpl { + + final AtomicLong hit = new AtomicLong(0L); + final AtomicLong miss = new AtomicLong(0L); + + private CountingCachingInferenceProvider(boolean useSLME) { + super(new ElkReasonerFactory(), 1, useSLME, "Counting Caching ELK", null); + } + + @Override + protected void addHit() { + hit.incrementAndGet(); + } + + @Override + protected void addMiss() { + miss.incrementAndGet(); + } + + protected void clear() { + super.clear(); + hit.set(0); + miss.set(0); + } + } + + private class ModifyingDelayedRequestThread extends DelayedRequestThread { + private ModifyingDelayedRequestThread(int delay) { + super(delay); + } + + @Override + protected List createBatch() { + return createAddBatch(); + } + } + + private class DelayedRequestThread extends Thread { + + private final long millis; + private M3BatchResponse response; + + public DelayedRequestThread(int delay) { + millis = delay; + } + + @Override + public void run() { + // delay + try { + Thread.sleep(millis); + } catch (InterruptedException e) { + // ignore + } + // work + List batch = createBatch(); + + response = executeBatch(batch); + + } + + protected List createBatch() { + return createReadBatch(); + } + } + + private List createReadBatch() { + List batch = new ArrayList<>(); + M3Request r; + + final String modelId = "http://model.geneontology.org/5525a0fc00000001"; + + // get model + r = new M3Request(); + r.entity = Entity.model; + r.operation = Operation.get; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + batch.add(r); + + return batch; + } + + private List createAddBatch() { + List batch = new ArrayList<>(); + M3Request r; + + final String modelId = "http://model.geneontology.org/5525a0fc00000001"; + + // get model + r = new M3Request(); + r.entity = Entity.individual; + r.operation = Operation.add; + r.arguments = new M3Argument(); + r.arguments.modelId = modelId; + BatchTestTools.setExpressionClass(r.arguments, "GO:0003674"); + batch.add(r); + + return batch; + } + + private void validateResponse(M3BatchResponse response) { + assertEquals("test-user", response.uid); + assertEquals("test-intention", response.intention); + assertEquals(response.message, M3BatchResponse.MESSAGE_TYPE_SUCCESS, response.messageType); + } + + private M3BatchResponse executeBatch(List batch) { + M3BatchResponse response = handler.m3Batch("test-user", Collections.emptySet(), "test-intention", "foo-packet-id", + batch.toArray(new M3Request[batch.size()]), true, true); + return response; + } } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/TaxonHandlerTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/TaxonHandlerTest.java index e55f7e88..eb5e85fc 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/handler/TaxonHandlerTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/handler/TaxonHandlerTest.java @@ -1,35 +1,16 @@ /** - * + * */ package org.geneontology.minerva.server.handler; -import static org.junit.Assert.*; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.UnsupportedEncodingException; -import java.lang.reflect.Type; -import java.net.HttpURLConnection; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Set; - +import com.google.gson.Gson; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.apache.log4j.Logger; import org.eclipse.jetty.server.HttpConfiguration; @@ -41,28 +22,12 @@ import org.geneontology.minerva.BlazegraphMolecularModelManager; import org.geneontology.minerva.UndoAwareMolecularModelManager; import org.geneontology.minerva.curie.CurieHandler; -import org.geneontology.minerva.curie.CurieMappings; -import org.geneontology.minerva.curie.DefaultCurieHandler; import org.geneontology.minerva.curie.MappedCurieHandler; -import org.geneontology.minerva.lookup.CachingExternalLookupService; -import org.geneontology.minerva.lookup.ExternalLookupService; -import org.geneontology.minerva.lookup.GolrExternalLookupService; -import org.geneontology.minerva.lookup.MonarchExternalLookupService; import org.geneontology.minerva.server.GsonMessageBodyHandler; -import org.geneontology.minerva.server.LoggingApplicationEventListener; import org.geneontology.minerva.server.RequireJsonpFilter; -import org.geneontology.minerva.server.handler.ModelSearchHandler.ModelSearchResult; -import org.geneontology.minerva.server.inferences.InferenceProviderCreator; -import org.geneontology.minerva.server.validation.MinervaShexValidator; -import org.geneontology.minerva.server.validation.ValidationTest; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.*; import org.junit.rules.TemporaryFolder; import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFHandlerException; @@ -71,181 +36,186 @@ import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyID; import org.semanticweb.owlapi.model.OWLOntologyManager; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; + +import static org.junit.Assert.assertTrue; /** * @author benjamingood * */ public class TaxonHandlerTest { - private static final Logger LOGGER = Logger.getLogger(TaxonHandlerTest.class); - static Server server; - static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; - static final String modelIdcurie = "http://model.geneontology.org/"; - static final String modelIdPrefix = "gomodel"; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static OWLOntology tbox_ontology; - static CurieHandler curieHandler; - static TaxonHandler taxonHandler; - - @ClassRule - public static TemporaryFolder tmp = new TemporaryFolder(); - - /** - * @throws java.lang.Exception - */ - @BeforeClass - public static void setUpBeforeClass() throws Exception { - LOGGER.info("Set up molecular model manager - loading files into a journal"); - // set curie handler - String modelIdPrefix = "http://model.geneontology.org/"; - String modelIdcurie = "gomodel"; - curieHandler = new MappedCurieHandler(); - String valid_model_folder = "src/test/resources/models/should_pass/"; - String inputDB = makeBlazegraphJournal(valid_model_folder); - //leave tbox empty for now - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - tbox_ontology = ontman.createOntology(IRI.create("http://example.org/dummy")); - UndoAwareMolecularModelManager models = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); - models.addTaxonMetadata(); - - LOGGER.info("Setup Jetty config."); - // Configuration: Use an already existing handler instance - // Configuration: Use custom JSON renderer (GSON) - ResourceConfig resourceConfig = new ResourceConfig(); - resourceConfig.register(GsonMessageBodyHandler.class); - resourceConfig.register(RequireJsonpFilter.class); - - taxonHandler = new TaxonHandler(models); - resourceConfig = resourceConfig.registerInstances(taxonHandler); - - // setup jetty server port, buffers and context path - server = new Server(); - // create connector with port and custom buffer sizes - - HttpConfiguration http_config = new HttpConfiguration(); - int requestHeaderSize = 64*1024; - int requestBufferSize = 128*1024; - int port = 6800; - String contextString = "/"; - http_config.setRequestHeaderSize(requestHeaderSize); - ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); - connector.setPort(port); - server.addConnector(connector); - - ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); - context.setContextPath(contextString); - server.setHandler(context); - ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); - context.addServlet(h, "/*"); - - // start jetty server - LOGGER.info("Start server on port: "+port+" context: "+contextString); - server.start(); - } - - /** - * @throws java.lang.Exception - */ - @AfterClass - public static void tearDownAfterClass() throws Exception { - taxonHandler.getM3().dispose(); - server.stop(); - } - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - } - - - - @Test - public final void testTaxa() throws URISyntaxException, IOException { - //make the request - URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/taxa/"); - URI searchuri = builder.build(); - String json_result = getJsonStringFromUri(searchuri); - LOGGER.info("JSON result from test taxa\n"+json_result); - Gson g = new Gson(); - TaxonHandler.Taxa result = g.fromJson(json_result, TaxonHandler.Taxa.class); - assertTrue(result.taxa.size()>1); - LOGGER.info("N taxa: "+result.taxa.size()); - for(TaxonHandler.Taxa.Taxon t : result.taxa) { - LOGGER.info(t.id+" "+t.label); - } - } - - private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { - String inputDB = tmp.newFile().getAbsolutePath(); - File i = new File(input_folder); - if(i.exists()) { - //remove anything that existed earlier - File bgdb = new File(inputDB); - if(bgdb.exists()) { - bgdb.delete(); - } - //load everything into a bg journal - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); - if(i.isDirectory()) { - FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file-> { - if(file.getName().endsWith(".ttl")||file.getName().endsWith("owl")) { - LOGGER.info("Loading " + file); - try { - m3.importModelToDatabase(file, true); - } catch (OWLOntologyCreationException | RepositoryException | RDFParseException - | RDFHandlerException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - }); - }else { - LOGGER.info("Loading " + i); - m3.importModelToDatabase(i, true); - } - LOGGER.info("loaded files into blazegraph journal: "+input_folder); - m3.dispose(); - } - return inputDB; - } - - private static String getJsonStringFromUri(URI uri) throws IOException { - final URL url = uri.toURL(); - final HttpURLConnection connection; - InputStream response = null; - // setup and open (actual connection) - connection = (HttpURLConnection) url.openConnection(); - connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https - response = connection.getInputStream(); // opens the connection to the server - // get string response from stream - String json = IOUtils.toString(response); - - return json; - } - - -private static String getJsonStringFromPost(HttpPost post) throws IOException { - - CloseableHttpClient httpClient = HttpClients.createDefault(); - CloseableHttpResponse response = httpClient.execute(post); - String json = EntityUtils.toString(response.getEntity()); - - return json; - } - + private static final Logger LOGGER = Logger.getLogger(TaxonHandlerTest.class); + static Server server; + static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; + static final String modelIdcurie = "http://model.geneontology.org/"; + static final String modelIdPrefix = "gomodel"; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + static OWLOntology tbox_ontology; + static CurieHandler curieHandler; + static TaxonHandler taxonHandler; + + @ClassRule + public static TemporaryFolder tmp = new TemporaryFolder(); + + /** + * @throws java.lang.Exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + LOGGER.info("Set up molecular model manager - loading files into a journal"); + // set curie handler + String modelIdPrefix = "http://model.geneontology.org/"; + String modelIdcurie = "gomodel"; + curieHandler = new MappedCurieHandler(); + String valid_model_folder = "src/test/resources/models/should_pass/"; + String inputDB = makeBlazegraphJournal(valid_model_folder); + //leave tbox empty for now + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + tbox_ontology = ontman.createOntology(IRI.create("http://example.org/dummy")); + UndoAwareMolecularModelManager models = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); + models.addTaxonMetadata(); + + LOGGER.info("Setup Jetty config."); + // Configuration: Use an already existing handler instance + // Configuration: Use custom JSON renderer (GSON) + ResourceConfig resourceConfig = new ResourceConfig(); + resourceConfig.register(GsonMessageBodyHandler.class); + resourceConfig.register(RequireJsonpFilter.class); + + taxonHandler = new TaxonHandler(models); + resourceConfig = resourceConfig.registerInstances(taxonHandler); + + // setup jetty server port, buffers and context path + server = new Server(); + // create connector with port and custom buffer sizes + + HttpConfiguration http_config = new HttpConfiguration(); + int requestHeaderSize = 64 * 1024; + int requestBufferSize = 128 * 1024; + int port = 6800; + String contextString = "/"; + http_config.setRequestHeaderSize(requestHeaderSize); + ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config)); + connector.setPort(port); + server.addConnector(connector); + + ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); + context.setContextPath(contextString); + server.setHandler(context); + ServletHolder h = new ServletHolder(new ServletContainer(resourceConfig)); + context.addServlet(h, "/*"); + + // start jetty server + LOGGER.info("Start server on port: " + port + " context: " + contextString); + server.start(); + } + + /** + * @throws java.lang.Exception + */ + @AfterClass + public static void tearDownAfterClass() throws Exception { + taxonHandler.getM3().dispose(); + server.stop(); + } + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception { + } + + /** + * @throws java.lang.Exception + */ + @After + public void tearDown() throws Exception { + } + + + @Test + public final void testTaxa() throws URISyntaxException, IOException { + //make the request + URIBuilder builder = new URIBuilder("http://127.0.0.1:6800/search/taxa/"); + URI searchuri = builder.build(); + String json_result = getJsonStringFromUri(searchuri); + LOGGER.info("JSON result from test taxa\n" + json_result); + Gson g = new Gson(); + TaxonHandler.Taxa result = g.fromJson(json_result, TaxonHandler.Taxa.class); + assertTrue(result.taxa.size() > 1); + LOGGER.info("N taxa: " + result.taxa.size()); + for (TaxonHandler.Taxa.Taxon t : result.taxa) { + LOGGER.info(t.id + " " + t.label); + } + } + + private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { + String inputDB = tmp.newFile().getAbsolutePath(); + File i = new File(input_folder); + if (i.exists()) { + //remove anything that existed earlier + File bgdb = new File(inputDB); + if (bgdb.exists()) { + bgdb.delete(); + } + //load everything into a bg journal + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); + if (i.isDirectory()) { + FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file -> { + if (file.getName().endsWith(".ttl") || file.getName().endsWith("owl")) { + LOGGER.info("Loading " + file); + try { + m3.importModelToDatabase(file, true); + } catch (OWLOntologyCreationException | RepositoryException | RDFParseException + | RDFHandlerException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + }); + } else { + LOGGER.info("Loading " + i); + m3.importModelToDatabase(i, true); + } + LOGGER.info("loaded files into blazegraph journal: " + input_folder); + m3.dispose(); + } + return inputDB; + } + + private static String getJsonStringFromUri(URI uri) throws IOException { + final URL url = uri.toURL(); + final HttpURLConnection connection; + InputStream response = null; + // setup and open (actual connection) + connection = (HttpURLConnection) url.openConnection(); + connection.setInstanceFollowRedirects(true); // warning does not follow redirects from http to https + response = connection.getInputStream(); // opens the connection to the server + // get string response from stream + String json = IOUtils.toString(response); + + return json; + } + + + private static String getJsonStringFromPost(HttpPost post) throws IOException { + + CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = httpClient.execute(post); + String json = EntityUtils.toString(response.getEntity()); + + return json; + } + } diff --git a/minerva-server/src/test/java/org/geneontology/minerva/server/validation/ValidationTest.java b/minerva-server/src/test/java/org/geneontology/minerva/server/validation/ValidationTest.java index 7a5f5eba..7d7e8e9e 100644 --- a/minerva-server/src/test/java/org/geneontology/minerva/server/validation/ValidationTest.java +++ b/minerva-server/src/test/java/org/geneontology/minerva/server/validation/ValidationTest.java @@ -1,21 +1,7 @@ package org.geneontology.minerva.server.validation; -import static org.junit.Assert.*; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - +import com.google.common.collect.Sets; import org.apache.commons.io.FileUtils; -import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.geneontology.minerva.BlazegraphMolecularModelManager; import org.geneontology.minerva.ModelContainer; @@ -25,8 +11,6 @@ import org.geneontology.minerva.curie.DefaultCurieHandler; import org.geneontology.minerva.curie.MappedCurieHandler; import org.geneontology.minerva.json.InferenceProvider; -import org.geneontology.minerva.lookup.ExternalLookupService; -import org.geneontology.minerva.lookup.GolrExternalLookupService; import org.geneontology.minerva.server.StartUpTool; import org.geneontology.minerva.server.inferences.InferenceProviderCreator; import org.geneontology.minerva.validation.ValidationResultSet; @@ -39,273 +23,271 @@ import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParseException; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAnnotation; -import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import org.semanticweb.owlapi.model.*; -import com.google.common.collect.Sets; +import java.io.File; +import java.io.IOException; +import java.net.URL; +import java.util.*; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class ValidationTest { - private static final Logger LOGGER = Logger.getLogger(ValidationTest.class); - static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; - static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; - static final String catalog = "src/test/resources/ontology/catalog-for-validation.xml"; - //add something like this to the catalog to replace the download step for local testing - // - static final String modelIdcurie = "http://model.geneontology.org/"; - static final String modelIdPrefix = "gomodel"; - static final String shexFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shex"; - static final String goshapemapFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shapeMap"; - static OWLOntology tbox_ontology; - static CurieHandler curieHandler; + private static final Logger LOGGER = Logger.getLogger(ValidationTest.class); + static final String ontologyIRI = "http://purl.obolibrary.org/obo/go/extensions/go-lego.owl"; + static final String go_lego_journal_file = "/tmp/test-go-lego-blazegraph.jnl"; + static final String catalog = "src/test/resources/ontology/catalog-for-validation.xml"; + //add something like this to the catalog to replace the download step for local testing + // + static final String modelIdcurie = "http://model.geneontology.org/"; + static final String modelIdPrefix = "gomodel"; + static final String shexFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shex"; + static final String goshapemapFileUrl = "https://raw.githubusercontent.com/geneontology/go-shapes/master/shapes/go-cam-shapes.shapeMap"; + static OWLOntology tbox_ontology; + static CurieHandler curieHandler; + + @ClassRule + public static TemporaryFolder tmp = new TemporaryFolder(); - @ClassRule - public static TemporaryFolder tmp = new TemporaryFolder(); + @BeforeClass + public static void setUpBeforeClass() { + CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); + curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); - @BeforeClass - public static void setUpBeforeClass() { - CurieMappings localMappings = new CurieMappings.SimpleCurieMappings(Collections.singletonMap(modelIdcurie, modelIdPrefix)); - curieHandler = new MappedCurieHandler(DefaultCurieHandler.loadDefaultMappings(), localMappings); + LOGGER.info("loading tbox ontology: " + ontologyIRI); + OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); + LOGGER.info("using catalog: " + catalog); + try { + ontman.setIRIMappers(Sets.newHashSet(new owltools.io.CatalogXmlIRIMapper(catalog))); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + try { + tbox_ontology = ontman.loadOntology(IRI.create(ontologyIRI)); + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + LOGGER.info("tbox ontologies loaded: " + tbox_ontology.getAxiomCount()); + } - LOGGER.info("loading tbox ontology: "+ontologyIRI); - OWLOntologyManager ontman = OWLManager.createOWLOntologyManager(); - LOGGER.info("using catalog: "+catalog); - try { - ontman.setIRIMappers(Sets.newHashSet(new owltools.io.CatalogXmlIRIMapper(catalog))); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - try { - tbox_ontology = ontman.loadOntology(IRI.create(ontologyIRI)); - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - LOGGER.info("tbox ontologies loaded: "+tbox_ontology.getAxiomCount()); - } + @AfterClass + public static void tearDownAfterClass() throws Exception { + } - @AfterClass - public static void tearDownAfterClass() throws Exception { - } + // @Test + public void testTmpValid() { + String valid_model_folder = "src/test/resources/models/tmp/"; + testJournalLoad(valid_model_folder); + boolean should_fail = false; + boolean check_shex = true; + try { + validateGoCams( + valid_model_folder, + should_fail, //models should fail check + check_shex //check shex (false just OWL) + ); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } -// @Test - public void testTmpValid() { - String valid_model_folder = "src/test/resources/models/tmp/"; - testJournalLoad(valid_model_folder); - boolean should_fail = false; - boolean check_shex = true; - try { - validateGoCams( - valid_model_folder, - should_fail, //models should fail check - check_shex //check shex (false just OWL) - ); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } + @Test + public void testValid() { + String valid_model_folder = "src/test/resources/models/should_pass/"; + boolean should_fail = false; + boolean check_shex = true; + try { + validateGoCams( + valid_model_folder, + should_fail, //models should fail check + check_shex //check shex (false just OWL) + ); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } - @Test - public void testValid() { - String valid_model_folder = "src/test/resources/models/should_pass/"; - boolean should_fail = false; - boolean check_shex = true; - try { - validateGoCams( - valid_model_folder, - should_fail, //models should fail check - check_shex //check shex (false just OWL) - ); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } + @Test + public void testValidLoad() { + String valid_model_folder = "src/test/resources/models/should_pass/"; + testJournalLoad(valid_model_folder); + } - @Test - public void testValidLoad() { - String valid_model_folder = "src/test/resources/models/should_pass/"; - testJournalLoad(valid_model_folder); - } - - @Test - public void testInValid() { - String valid_model_folder = "src/test/resources/models/should_fail/"; - testJournalLoad(valid_model_folder); - boolean should_fail = true; - boolean check_shex = true; - try { - validateGoCams( - valid_model_folder, - should_fail, //models should fail check - check_shex //check shex (false just OWL) - ); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } + @Test + public void testInValid() { + String valid_model_folder = "src/test/resources/models/should_fail/"; + testJournalLoad(valid_model_folder); + boolean should_fail = true; + boolean check_shex = true; + try { + validateGoCams( + valid_model_folder, + should_fail, //models should fail check + check_shex //check shex (false just OWL) + ); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } - @Test - public void testInValidLoad() { - String valid_model_folder = "src/test/resources/models/should_fail/"; - testJournalLoad(valid_model_folder); - } + @Test + public void testInValidLoad() { + String valid_model_folder = "src/test/resources/models/should_fail/"; + testJournalLoad(valid_model_folder); + } - public static void validateGoCams(String input, boolean should_fail, boolean check_shex) throws Exception { + public static void validateGoCams(String input, boolean should_fail, boolean check_shex) throws Exception { - String blazegraph_journal = makeBlazegraphJournal(input); - UndoAwareMolecularModelManager m3 = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, blazegraph_journal, null, go_lego_journal_file, true); - try { - URL shex_schema_url = new URL(shexFileUrl); - File shex_schema_file = new File("src/test/resources/validate.shex"); //for some reason the temporary_model file won't parse.. - org.apache.commons.io.FileUtils.copyURLToFile(shex_schema_url, shex_schema_file); + String blazegraph_journal = makeBlazegraphJournal(input); + UndoAwareMolecularModelManager m3 = new UndoAwareMolecularModelManager(tbox_ontology, curieHandler, modelIdPrefix, blazegraph_journal, null, go_lego_journal_file, true); + try { + URL shex_schema_url = new URL(shexFileUrl); + File shex_schema_file = new File("src/test/resources/validate.shex"); //for some reason the temporary_model file won't parse.. + org.apache.commons.io.FileUtils.copyURLToFile(shex_schema_url, shex_schema_file); - URL shex_map_url = new URL(goshapemapFileUrl); - File shex_map_file = new File("src/test/resources/validate.shapemap"); - org.apache.commons.io.FileUtils.copyURLToFile(shex_map_url, shex_map_file); + URL shex_map_url = new URL(goshapemapFileUrl); + File shex_map_file = new File("src/test/resources/validate.shapemap"); + org.apache.commons.io.FileUtils.copyURLToFile(shex_map_url, shex_map_file); - MinervaShexValidator shex = new MinervaShexValidator(shex_schema_file, shex_map_file, curieHandler, m3.getGolego_repo()); - if(check_shex) { - if(check_shex) { - shex.setActive(true); - }else { - shex.setActive(false); - } - } - InferenceProviderCreator ipc = StartUpTool.createInferenceProviderCreator("arachne", m3, shex); - LOGGER.info("Validating models:"); - m3.getAvailableModelIds().stream().forEach(modelIRI -> { - boolean isConsistent = true; - boolean isConformant = true; - LOGGER.info("processing \t"+modelIRI); + MinervaShexValidator shex = new MinervaShexValidator(shex_schema_file, shex_map_file, curieHandler, m3.getGolego_repo()); + if (check_shex) { + if (check_shex) { + shex.setActive(true); + } else { + shex.setActive(false); + } + } + InferenceProviderCreator ipc = StartUpTool.createInferenceProviderCreator("arachne", m3, shex); + LOGGER.info("Validating models:"); + m3.getAvailableModelIds().stream().forEach(modelIRI -> { + boolean isConsistent = true; + boolean isConformant = true; + LOGGER.info("processing \t" + modelIRI); - ModelContainer mc = m3.getModel(modelIRI); - Set annos = mc.getAboxOntology().getAnnotations(); - //this is where everything actually happens - InferenceProvider ip; - try { - //this ipc.create method results in the execution of the OWL reasoner and, if shex is set to active, the shex validation - ip = ipc.create(mc); - isConsistent = ip.isConsistent(); - if(!should_fail) { - assertTrue(modelIRI+" is assessed to be (OWL) inconsistent but should not be.", isConsistent); - }else if(!check_shex) { - assertFalse(modelIRI+" is assessed to be (OWL) consistent but should not be.", isConsistent); - } - if(check_shex) { - ValidationResultSet validations = ip.getValidation_results(); - isConformant = validations.allConformant(); - if(!should_fail) { - assertTrue(modelIRI+" does not conform to the shex schema and it should: \n"+annos, isConformant); - }else { - assertFalse(modelIRI+" conforms to the shex schema and it should not: \n"+annos, isConformant); - } - } - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - }); - LOGGER.info("done with validation"); - }finally { - m3.dispose(); - } - } + ModelContainer mc = m3.getModel(modelIRI); + Set annos = mc.getAboxOntology().getAnnotations(); + //this is where everything actually happens + InferenceProvider ip; + try { + //this ipc.create method results in the execution of the OWL reasoner and, if shex is set to active, the shex validation + ip = ipc.create(mc); + isConsistent = ip.isConsistent(); + if (!should_fail) { + assertTrue(modelIRI + " is assessed to be (OWL) inconsistent but should not be.", isConsistent); + } else if (!check_shex) { + assertFalse(modelIRI + " is assessed to be (OWL) consistent but should not be.", isConsistent); + } + if (check_shex) { + ValidationResultSet validations = ip.getValidation_results(); + isConformant = validations.allConformant(); + if (!should_fail) { + assertTrue(modelIRI + " does not conform to the shex schema and it should: \n" + annos, isConformant); + } else { + assertFalse(modelIRI + " conforms to the shex schema and it should not: \n" + annos, isConformant); + } + } + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }); + LOGGER.info("done with validation"); + } finally { + m3.dispose(); + } + } - public void testJournalLoad(String input_folder) { - try { - String inputDB = tmp.newFile().getAbsolutePath(); - File i = new File(input_folder); - if(i.exists()) { - //remove anything that existed earlier - File bgdb = new File(inputDB); - if(bgdb.exists()) { - bgdb.delete(); - } - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); - Map file_iri = new HashMap(); - Map iri_file = new HashMap(); - Set model_iris = new HashSet(); - if(i.isDirectory()) { - FileUtils.listFiles(i, null, true).forEach(file-> { - if(file.getName().endsWith(".ttl")||file.getName().endsWith("owl")) { - try { - String modeluri = m3.importModelToDatabase(file, true); - if(!model_iris.add(modeluri)) { - String error = "\n"+file+"\n redundant iri "+modeluri+"\n with file "+iri_file.get(modeluri); - assertFalse(error, true); - }else { - file_iri.put(file.getName(), modeluri); - iri_file.put(modeluri, file.getName()); - } - } catch (OWLOntologyCreationException | RepositoryException | RDFParseException - | RDFHandlerException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - }); - if(model_iris.size()!=file_iri.size()) { + public void testJournalLoad(String input_folder) { + try { + String inputDB = tmp.newFile().getAbsolutePath(); + File i = new File(input_folder); + if (i.exists()) { + //remove anything that existed earlier + File bgdb = new File(inputDB); + if (bgdb.exists()) { + bgdb.delete(); + } + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); + Map file_iri = new HashMap(); + Map iri_file = new HashMap(); + Set model_iris = new HashSet(); + if (i.isDirectory()) { + FileUtils.listFiles(i, null, true).forEach(file -> { + if (file.getName().endsWith(".ttl") || file.getName().endsWith("owl")) { + try { + String modeluri = m3.importModelToDatabase(file, true); + if (!model_iris.add(modeluri)) { + String error = "\n" + file + "\n redundant iri " + modeluri + "\n with file " + iri_file.get(modeluri); + assertFalse(error, true); + } else { + file_iri.put(file.getName(), modeluri); + iri_file.put(modeluri, file.getName()); + } + } catch (OWLOntologyCreationException | RepositoryException | RDFParseException + | RDFHandlerException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + }); + if (model_iris.size() != file_iri.size()) { - } - assertTrue("same model iri used more than once ", model_iris.size()==file_iri.size()); - }else { - LOGGER.info("Loading " + i); - m3.importModelToDatabase(i, true); - } - LOGGER.info("loaded files into blazegraph journal: "+input_folder); - m3.dispose(); - } - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } + } + assertTrue("same model iri used more than once ", model_iris.size() == file_iri.size()); + } else { + LOGGER.info("Loading " + i); + m3.importModelToDatabase(i, true); + } + LOGGER.info("loaded files into blazegraph journal: " + input_folder); + m3.dispose(); + } + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } - private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { - String inputDB = tmp.newFile().getAbsolutePath(); - File i = new File(input_folder); - if(i.exists()) { - //remove anything that existed earlier - File bgdb = new File(inputDB); - if(bgdb.exists()) { - bgdb.delete(); - } - //load everything into a bg journal - OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); - BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); - if(i.isDirectory()) { - FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file-> { - if(file.getName().endsWith(".ttl")||file.getName().endsWith("owl")) { - try { - String modeluri = m3.importModelToDatabase(file, true); - LOGGER.info("Loaded\t" + file+"\t"+modeluri); - } catch (OWLOntologyCreationException | RepositoryException | RDFParseException - | RDFHandlerException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - }); - }else { - LOGGER.info("Loading " + i); - m3.importModelToDatabase(i, true); - } - LOGGER.info("loaded files into blazegraph journal: "+input_folder); - m3.dispose(); - } - return inputDB; - } + private static String makeBlazegraphJournal(String input_folder) throws IOException, OWLOntologyCreationException, RepositoryException, RDFParseException, RDFHandlerException { + String inputDB = tmp.newFile().getAbsolutePath(); + File i = new File(input_folder); + if (i.exists()) { + //remove anything that existed earlier + File bgdb = new File(inputDB); + if (bgdb.exists()) { + bgdb.delete(); + } + //load everything into a bg journal + OWLOntology dummy = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://example.org/dummy")); + BlazegraphMolecularModelManager m3 = new BlazegraphMolecularModelManager<>(dummy, curieHandler, modelIdPrefix, inputDB, null, go_lego_journal_file, true); + if (i.isDirectory()) { + FileUtils.listFiles(i, null, true).parallelStream().parallel().forEach(file -> { + if (file.getName().endsWith(".ttl") || file.getName().endsWith("owl")) { + try { + String modeluri = m3.importModelToDatabase(file, true); + LOGGER.info("Loaded\t" + file + "\t" + modeluri); + } catch (OWLOntologyCreationException | RepositoryException | RDFParseException + | RDFHandlerException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + }); + } else { + LOGGER.info("Loading " + i); + m3.importModelToDatabase(i, true); + } + LOGGER.info("loaded files into blazegraph journal: " + input_folder); + m3.dispose(); + } + return inputDB; + } } \ No newline at end of file diff --git a/minerva-server/src/test/resources/ontology/catalog-for-validation.xml b/minerva-server/src/test/resources/ontology/catalog-for-validation.xml index 0f4837cf..47bed7b3 100644 --- a/minerva-server/src/test/resources/ontology/catalog-for-validation.xml +++ b/minerva-server/src/test/resources/ontology/catalog-for-validation.xml @@ -1,6 +1,7 @@ - - + + diff --git a/pom.xml b/pom.xml index 82a199c9..f6e2fca4 100644 --- a/pom.xml +++ b/pom.xml @@ -1,386 +1,387 @@ - 4.0.0 - org.geneontology - 0.6.1 - minerva - pom - Minerva - - https://github.com/geneontology/minerva - scm:git:git://github.com/geneontology/minerva.git - - - https://github.com/geneontology/minerva/issues - - - UTF-8 - 4.5.15 - 2.7.12 - 9.2.3.v20140905 - 2.29 - 2.17.1 - + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + org.geneontology + 0.6.1 + minerva + pom + Minerva + + https://github.com/geneontology/minerva + scm:git:git://github.com/geneontology/minerva.git + + + https://github.com/geneontology/minerva/issues + + + UTF-8 + 4.5.15 + 2.7.12 + 9.2.3.v20140905 + + 2.29 + 2.17.1 + - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.2 - - 1.8 - 1.8 - - - - org.apache.maven.plugins - maven-source-plugin - 2.4 - - - attach-sources - - jar - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.1 - - 8 - -Xdoclint:none - - - - attach-javadocs - - jar - - - - - - org.apache.maven.plugins - maven-deploy-plugin - 2.8.2 - - - org.apache.maven.wagon - wagon-ssh - 2.8 - - - - - org.codehaus.mojo - versions-maven-plugin - 2.1 - - - org.apache.maven.plugins - maven-surefire-plugin - 2.18.1 - - @{argLine} -Xmx5G - - - - org.jacoco - jacoco-maven-plugin - 0.8.4 - - - jacoco-initialize - - prepare-agent - - - - jacoco-site - package - - report - - - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - 2.5.3 - - - - + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.2 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-source-plugin + 2.4 + + + attach-sources + + jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.1 + + 8 + -Xdoclint:none + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.wagon + wagon-ssh + 2.8 + + + + + org.codehaus.mojo + versions-maven-plugin + 2.1 + + + org.apache.maven.plugins + maven-surefire-plugin + 2.18.1 + + @{argLine} -Xmx5G + + + + org.jacoco + jacoco-maven-plugin + 0.8.4 + + + jacoco-initialize + + prepare-agent + + + + jacoco-site + package + + report + + + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.5.3 + + + + - - - junit - junit - 4.13.2 - test - - - - - - - com.google.code.gson - gson - 2.8.5 - - - org.eclipse.jetty - jetty-server - ${jetty.version} - - - org.eclipse.jetty - jetty-servlet - ${jetty.version} - - - org.eclipse.jetty - jetty-util - ${jetty.version} - - - org.eclipse.jetty - jetty-io - ${jetty.version} - - - org.eclipse.jetty - jetty-jmx - ${jetty.version} - - - org.eclipse.jetty - jetty-jndi - ${jetty.version} - - - org.eclipse.jetty - jetty-rewrite - ${jetty.version} - - - org.eclipse.jetty - jetty-webapp - ${jetty.version} - - - org.eclipse.jetty - jetty-xml - ${jetty.version} - - - org.glassfish.jersey.containers - jersey-container-servlet-core - ${jersey.version} - - - org.glassfish.jersey.core - jersey-common - ${jersey.version} - - - org.glassfish.jersey.inject - jersey-hk2 - ${jersey.version} - - - org.apache.httpcomponents - httpclient - 4.5.9 - - - net.sourceforge.owlapi - owlapi-distribution - ${owlapi.version} - - - commons-collections - commons-collections - 3.2.2 - - - com.blazegraph - bigdata-core - 2.1.4 - - - log4j - log4j - - - - - - - org.openrdf.sesame - sesame-model - ${sesame.version} - - - org.openrdf.sesame - sesame-runtime - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-api - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-languages - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-datatypes - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-binary - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-n3 - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-nquads - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-ntriples - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-rdfjson - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-rdfxml - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-trix - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-turtle - ${sesame.version} - - - org.openrdf.sesame - sesame-rio-trig - ${sesame.version} - - - commons-io - commons-io - 2.7 - - - org.geneontology - owl-to-rules_2.12 - 0.3.6 - - - ch.qos.logback - logback-classic - - - - - org.geneontology - arachne_2.12 - 1.2 - - - ch.qos.logback - logback-classic - - - - - org.apache.jena - apache-jena-libs - 3.12.0 - pom - - - net.sf.trove4j - trove4j - 3.0.3 - - - org.apache.logging.log4j - log4j - ${log4j.version} - pom - - - org.apache.logging.log4j - log4j-core - ${log4j.version} - - - org.apache.logging.log4j - log4j-1.2-api - ${log4j.version} - - - org.obolibrary.robot - robot-core - 1.7.1 - - - log4j - log4j - - - - - - - - - BBOPDeployRepository - BBOPDeployRepository - ${bbopdeployrepository} - - - BBOPSnapshotRepository - BBOPSnapshotRepository - ${bbopsnapshotrepository} - - - - - minerva-core - minerva-json - minerva-server - minerva-converter - minerva-cli - minerva-lookup - + + + junit + junit + 4.13.2 + test + + + + + + + com.google.code.gson + gson + 2.8.5 + + + org.eclipse.jetty + jetty-server + ${jetty.version} + + + org.eclipse.jetty + jetty-servlet + ${jetty.version} + + + org.eclipse.jetty + jetty-util + ${jetty.version} + + + org.eclipse.jetty + jetty-io + ${jetty.version} + + + org.eclipse.jetty + jetty-jmx + ${jetty.version} + + + org.eclipse.jetty + jetty-jndi + ${jetty.version} + + + org.eclipse.jetty + jetty-rewrite + ${jetty.version} + + + org.eclipse.jetty + jetty-webapp + ${jetty.version} + + + org.eclipse.jetty + jetty-xml + ${jetty.version} + + + org.glassfish.jersey.containers + jersey-container-servlet-core + ${jersey.version} + + + org.glassfish.jersey.core + jersey-common + ${jersey.version} + + + org.glassfish.jersey.inject + jersey-hk2 + ${jersey.version} + + + org.apache.httpcomponents + httpclient + 4.5.9 + + + net.sourceforge.owlapi + owlapi-distribution + ${owlapi.version} + + + commons-collections + commons-collections + 3.2.2 + + + com.blazegraph + bigdata-core + 2.1.4 + + + log4j + log4j + + + + + + + org.openrdf.sesame + sesame-model + ${sesame.version} + + + org.openrdf.sesame + sesame-runtime + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-api + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-languages + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-datatypes + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-binary + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-n3 + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-nquads + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-ntriples + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-rdfjson + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-rdfxml + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-trix + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-turtle + ${sesame.version} + + + org.openrdf.sesame + sesame-rio-trig + ${sesame.version} + + + commons-io + commons-io + 2.7 + + + org.geneontology + owl-to-rules_2.12 + 0.3.6 + + + ch.qos.logback + logback-classic + + + + + org.geneontology + arachne_2.12 + 1.2 + + + ch.qos.logback + logback-classic + + + + + org.apache.jena + apache-jena-libs + 3.12.0 + pom + + + net.sf.trove4j + trove4j + 3.0.3 + + + org.apache.logging.log4j + log4j + ${log4j.version} + pom + + + org.apache.logging.log4j + log4j-core + ${log4j.version} + + + org.apache.logging.log4j + log4j-1.2-api + ${log4j.version} + + + org.obolibrary.robot + robot-core + 1.7.1 + + + log4j + log4j + + + + + + + + + BBOPDeployRepository + BBOPDeployRepository + ${bbopdeployrepository} + + + BBOPSnapshotRepository + BBOPSnapshotRepository + ${bbopsnapshotrepository} + + + + + minerva-core + minerva-json + minerva-server + minerva-converter + minerva-cli + minerva-lookup + diff --git a/specs/README.md b/specs/README.md index 6a3e0f4e..b59e3e3e 100644 --- a/specs/README.md +++ b/specs/README.md @@ -1,6 +1,6 @@ This directory contains specifications for the Noctua-Minerva modeling system - * [owl-model](owl-model.md) High level specification for core OWL model - * [gaf-to-lego](gaf-to-lego.md) mapping between LEGO and GAF/GPAD +* [owl-model](owl-model.md) High level specification for core OWL model +* [gaf-to-lego](gaf-to-lego.md) mapping between LEGO and GAF/GPAD diff --git a/specs/gaf-to-lego.md b/specs/gaf-to-lego.md index d0050528..b30697fe 100644 --- a/specs/gaf-to-lego.md +++ b/specs/gaf-to-lego.md @@ -1,9 +1,10 @@ -Given a set of gene associations, this procedure will generate LEGO models. The procedure can be reversed for a lossy LEGO->GAF conversion. +Given a set of gene associations, this procedure will generate LEGO models. The procedure can be reversed for a lossy +LEGO->GAF conversion. The set of associations can be specified by a user query. Includes: - * grepping a GAF and feeding results - * selecting all associations for all genes that are involved with some process +* grepping a GAF and feeding results +* selecting all associations for all genes that are involved with some process ## STEP 0 - map GeneAssociation in GAF model @@ -27,8 +28,7 @@ IF ELSE let = ``` -(note this may require further transformation, if EXT contains -references to gene products) +(note this may require further transformation, if EXT contains references to gene products) TODO: specify behavior for all-individual model @@ -89,15 +89,15 @@ TODO: specify behavior for all-individual model (optional) -keep a map of Refs -> generated Ids +keep a map of Refs -> generated Ids -when performing ``, first check map. If an individual Id has already been generated for this , then re-use the existing id from the map. +when performing ``, first check map. If an individual Id has already been generated for this , then +re-use the existing id from the map. Note this may result in multiple classification of individuals (MCI). The user can rectify these in Protege. -One variant of this strategy may be to retain the original Id, -generate new Ids for the collapsed aggregate MF individual, and -include evidence links back to the atomic MF individuals. +One variant of this strategy may be to retain the original Id, generate new Ids for the collapsed aggregate MF +individual, and include evidence links back to the atomic MF individuals. ## Evidence diff --git a/specs/owl-model.md b/specs/owl-model.md index 9b08878a..e7d07e8e 100644 --- a/specs/owl-model.md +++ b/specs/owl-model.md @@ -1,99 +1,80 @@ - ## Noctua Models in OWL -A Noctua Model (NM) is a collection of OWL ABox axioms (i.e. axioms -about OWL individuals). Noctua models follow a set of conventions for -modeling biological knowledge and associated evidence. +A Noctua Model (NM) is a collection of OWL ABox axioms (i.e. axioms about OWL individuals). Noctua models follow a set +of conventions for modeling biological knowledge and associated evidence. -Please read the [OWL2 Primer](http://www.w3.org/TR/owl2-primer/) and -other documentation prior to this. +Please read the [OWL2 Primer](http://www.w3.org/TR/owl2-primer/) and other documentation prior to this. -By convention we use OWL2 syntax - this is normative. In some cases we -provide the corresponding RDF syntax - this is informative. +By convention we use OWL2 syntax - this is normative. In some cases we provide the corresponding RDF syntax - this is +informative. Note that GO/LEGO conventions are documented here: https://github.com/geneontology/noctua-models/blob/master/README.md ### Notes on OWL2 terminology -Some OWL2 terminology can be confusing as terms may mean different -things outside the description logic community. When we use terms that -appear ambiguous in this document, we always intend the OWL2 sense of -the term. Please refer to the official OWL2 documentation for -clarification. Some potentially ambiguous terms include: - - * `Annotation` - a tuple consisting of a property and a value - (literal or IRI) that can be used to annotate either an object - (Entity Annotation) or an axiom (Axiom Annotation). Annotations are - non-logical (i.e. they are ignored by reasoners, and are intended - primarily for humans). When we say Annotation, we always mean it in - the OWL2 sense and never in the Gene Ontology sense. - - * `Ontology` - any collection of OWL axioms intended to be - interpreted together; in OWL, ontologies can include any mix of - axioms about classes and axioms about individuals. Outside the DL - community, the term 'ontology' is typically restricted to - collections of *class* axioms. A Noctua model is formally an OWL - Ontology that is a collection of *instance* axioms. To avoid - confusion we use the term 'model', but it should be understood that - every noctua model is an OWL Ontology. The model is identified via - an ontology IRI, and versioned using an ontology versionIRI. +Some OWL2 terminology can be confusing as terms may mean different things outside the description logic community. When +we use terms that appear ambiguous in this document, we always intend the OWL2 sense of the term. Please refer to the +official OWL2 documentation for clarification. Some potentially ambiguous terms include: + +* `Annotation` - a tuple consisting of a property and a value + (literal or IRI) that can be used to annotate either an object + (Entity Annotation) or an axiom (Axiom Annotation). Annotations are non-logical (i.e. they are ignored by reasoners, + and are intended primarily for humans). When we say Annotation, we always mean it in the OWL2 sense and never in the + Gene Ontology sense. + +* `Ontology` - any collection of OWL axioms intended to be interpreted together; in OWL, ontologies can include any mix + of axioms about classes and axioms about individuals. Outside the DL community, the term 'ontology' is typically + restricted to collections of *class* axioms. A Noctua model is formally an OWL Ontology that is a collection of * + instance* axioms. To avoid confusion we use the term 'model', but it should be understood that every noctua model is + an OWL Ontology. The model is identified via an ontology IRI, and versioned using an ontology versionIRI. ### Models A Noctua Model is an OWL ontology that can be manipulated by Minerva -Each model can be annotated with multiple Annotations. Any annotation -properties can be used, but by convention we recommend the following: +Each model can be annotated with multiple Annotations. Any annotation properties can be used, but by convention we +recommend the following: - * `dc:title` - used as the default display label for a model - * `dc:creator` - automatically added by system for the person who initiated the model - * `dc:contributor` - automatically added by system for anyone that edits +* `dc:title` - used as the default display label for a model +* `dc:creator` - automatically added by system for the person who initiated the model +* `dc:contributor` - automatically added by system for anyone that edits -A model will typically have an Imports declaration in order to bring a -relevant set of classes, object properties and related axioms into -scope. There is no constraint on what is imported, and conventions may -vary by Noctua store. +A model will typically have an Imports declaration in order to bring a relevant set of classes, object properties and +related axioms into scope. There is no constraint on what is imported, and conventions may vary by Noctua store. ### Core Axiom Types - * ClassAssertion (rdf:type) -- determines the type of a Node in the model. - * ObjectPropertyAssertion (triples, aka Facts) -- connects two nodes +* ClassAssertion (rdf:type) -- determines the type of a Node in the model. +* ObjectPropertyAssertion (triples, aka Facts) -- connects two nodes -These are the basic building blocks that are required for any -non-degenerate model. +These are the basic building blocks that are required for any non-degenerate model. -By convention, individuals in the model are assumed by default to have -non-meaningful IRIs (e.g. UUIDs) and to lack rdfs:label -annotations. They are typically displayed to the user using the -rdfs:label of the Class they instantiate. There may be exceptions in -some cases; for example, publications are modeled as individuals. +By convention, individuals in the model are assumed by default to have non-meaningful IRIs (e.g. UUIDs) and to lack +rdfs:label annotations. They are typically displayed to the user using the rdfs:label of the Class they instantiate. +There may be exceptions in some cases; for example, publications are modeled as individuals. -ObjectPropertyAssertions can use any OWL ObjectProperty, taken from -ontologies such as RO. Particular Noctua deployments may be configured -with different lists, see below. +ObjectPropertyAssertions can use any OWL ObjectProperty, taken from ontologies such as RO. Particular Noctua deployments +may be configured with different lists, see below. ### Class Constructs - * SomeValuesFrom - * IntersectionOf - * UnionOf +* SomeValuesFrom +* IntersectionOf +* UnionOf -These are typically used to construct a class expression for use in a -ClassAssertion. Additional constructs from the OWL2 spec can be added, -but this requires extension of the [bbop-class-expression -library](https://github.com/berkeleybop/class-expression/). +These are typically used to construct a class expression for use in a ClassAssertion. Additional constructs from the +OWL2 spec can be added, but this requires extension of +the [bbop-class-expression library](https://github.com/berkeleybop/class-expression/). ## Modeling Conventions ### Axiom Annotations and Evidence -Evidence and provenance is handled by convention using -AxiomAnnotations, typically AxiomAnnotations on +Evidence and provenance is handled by convention using AxiomAnnotations, typically AxiomAnnotations on ObjectPropertyAssertions. -For example, consider a triple `i r j` which has some evidence of type -E, supported by publication p. This would be written (in Manchester -syntax) as: +For example, consider a triple `i r j` which has some evidence of type E, supported by publication p. This would be +written (in Manchester syntax) as: ``` Prefix: axiom-has-evidence: RO:0002612 @@ -121,13 +102,11 @@ Individual: ![diagram](./evidence-model.png) -Note that at the level of OWL-DL, the axiom-has-evidence axiom -annotation does not point to the `` individual, it points to its -IRI. +Note that at the level of OWL-DL, the axiom-has-evidence axiom annotation does not point to the `` individual, it +points to its IRI. -Additionally, axioms within the evidence part of the model can be -annotated. For example, the has-supporting-reference edge between a evidence -individual and the publication can be refined by providing +Additionally, axioms within the evidence part of the model can be annotated. For example, the has-supporting-reference +edge between a evidence individual and the publication can be refined by providing *span* information: ``` @@ -138,26 +117,30 @@ Individual: has-span: "spanNNN" has-supporting-reference: ``` -Here span is a TextPressoCentral construct representing a portion of -text. Here we use a literal (TBD: fill in span ID, or actual text?) +Here span is a TextPressoCentral construct representing a portion of text. Here we use a literal (TBD: fill in span ID, +or actual text?) ## Operations and Structural Constraints on Models Minerva assures all the following are true: - 1. Every individual has at least one type assertion - 2. No Object Property Assertion is left 'dangling' +1. Every individual has at least one type assertion +2. No Object Property Assertion is left 'dangling' Note that 2 follows from 1 To ensure this state, Minerva implements the following cascading delete rules: - * If an individual `i` is deleted, its declaration is deleted - * If an individual `i` is deleted, all of its ClassAssertions are deleted - * If an individual `i` is deleted, any AnnotationAssertion for which `i` is either the subject IRI or target IRI will be deleted. - * If an individual `i` is deleted, any OPA that has `i` as either subject or target will be deleted - * If an OPA is deleted, all its annotations are deleted (this is enforced by the OWLAPI, as axiom annotations cannot exist without an exiom) - * If an axiom annotation is deleted, and that annotation uses the property [RO_0002612](http://purl.obolibrary.org/obo/RO_0002612) references an IRI, then the individual for that IRI is deleted +* If an individual `i` is deleted, its declaration is deleted +* If an individual `i` is deleted, all of its ClassAssertions are deleted +* If an individual `i` is deleted, any AnnotationAssertion for which `i` is either the subject IRI or target IRI will be + deleted. +* If an individual `i` is deleted, any OPA that has `i` as either subject or target will be deleted +* If an OPA is deleted, all its annotations are deleted (this is enforced by the OWLAPI, as axiom annotations cannot + exist without an exiom) +* If an axiom annotation is deleted, and that annotation uses the + property [RO_0002612](http://purl.obolibrary.org/obo/RO_0002612) references an IRI, then the individual for that IRI + is deleted No other cascades are performed. @@ -165,31 +148,26 @@ Note the underlying assumption is that RO:0002612 is inverse-functional (i.e. no ## LEGO Models -A LEGO Model (LM) is a NM that consists of at least one individual -instantiating a *GO activity class* (ie SubClassOf GO:0003674). A LEGO -model makes use of the LEGO subset of RO, which includes relations -such as: +A LEGO Model (LM) is a NM that consists of at least one individual instantiating a *GO activity class* (ie SubClassOf +GO:0003674). A LEGO model makes use of the LEGO subset of RO, which includes relations such as: - * [RO:0002333](http://purl.obolibrary.org/obo/RO_0002333) ! enabled by - * [BFO:0000066](http://purl.obolibrary.org/obo/BFO_0000066) ! occurs in - * [RO:0002406](http://purl.obolibrary.org/obo/RO_0002406) ! directly activates +* [RO:0002333](http://purl.obolibrary.org/obo/RO_0002333) ! enabled by +* [BFO:0000066](http://purl.obolibrary.org/obo/BFO_0000066) ! occurs in +* [RO:0002406](http://purl.obolibrary.org/obo/RO_0002406) ! directly activates Standard uses include but are not limited to: - * ` enabled_by ` - * ` {directly_activates,directly_inhibits} ` - * `<{MF,BP}> part_of ` - * `<{MF,BP}> occurs_in ` +* ` enabled_by ` +* ` {directly_activates,directly_inhibits} ` +* `<{MF,BP}> part_of ` +* `<{MF,BP}> occurs_in ` -Here `` denotes an OWL individual that instantiates a GO molecular -function classes. Similarly `` denotes an instance of a -class or protein or RNA. Here the class would be something like -UniProtKB:Q15465, and the individual would have an IRI that is -specific to the model. +Here `` denotes an OWL individual that instantiates a GO molecular function classes. Similarly `` +denotes an instance of a class or protein or RNA. Here the class would be something like UniProtKB:Q15465, and the +individual would have an IRI that is specific to the model. -Biological constraints on the structure of models are specified within -the GO, RO and any related ontologies, and are enforced by standard -OWL reasoners. +Biological constraints on the structure of models are specified within the GO, RO and any related ontologies, and are +enforced by standard OWL reasoners. Evidence follows the standard evidence model (see above). @@ -206,9 +184,8 @@ A biological process (BP) association is mapped to part_of type -Note in the above the MFIndividual is untyped, because a GO BP -association carries no information about the MF type; we can also type -by default to the root MF node +Note in the above the MFIndividual is untyped, because a GO BP association carries no information about the MF type; we +can also type by default to the root MF node A cellular component (CC) association is mapped to @@ -228,16 +205,12 @@ repository. ## Phenotype Models -A Phenotype Model (PM) is a NM that consists of at least one -individual instantiating a *disease* or *phenotype* class (e.g. MP or -HP). The set of PMs and the set of LMs are not disjoint. A Phenotype -LEGO model incorporates aspects of both; for example, an allele to -phenotype link that serves as evidence for a GO IMP. +A Phenotype Model (PM) is a NM that consists of at least one individual instantiating a *disease* or *phenotype* class ( +e.g. MP or HP). The set of PMs and the set of LMs are not disjoint. A Phenotype LEGO model incorporates aspects of both; +for example, an allele to phenotype link that serves as evidence for a GO IMP. -A Causal Phenotype Model (CPM) is a PM in which there exists at least -one OPA that uses a subrelation of -[RO:0002410](http://purl.obolibrary.org/obo/RO_0002410) *causally -related to* to connect two phenotype instances. +A Causal Phenotype Model (CPM) is a PM in which there exists at least one OPA that uses a subrelation of +[RO:0002410](http://purl.obolibrary.org/obo/RO_0002410) *causally related to* to connect two phenotype instances.