From b5232f7ed83abd010e6e2d38c542ba5fbb8cbf55 Mon Sep 17 00:00:00 2001 From: Guillaume Poirier-Morency Date: Fri, 15 Sep 2023 12:54:12 -0700 Subject: [PATCH 1/5] Update versions for hotfix --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 22189e4e..e83c2a14 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ baseCode baseCode baseCode - 1.1.18 + 1.1.19 2003 From b796fc0fec246527a74af5050faa462636c45ffc Mon Sep 17 00:00:00 2001 From: Guillaume Poirier-Morency Date: Wed, 5 Jul 2023 11:39:07 -0700 Subject: [PATCH 2/5] Remove remaining serialization-related code for ontologies Make OntologyTermSimple serializable. --- src/ubic/basecode/ontology/model/OntologyResource.java | 1 - src/ubic/basecode/ontology/model/OntologyTermSimple.java | 5 +++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ubic/basecode/ontology/model/OntologyResource.java b/src/ubic/basecode/ontology/model/OntologyResource.java index ca3a1f7f..a8533dca 100644 --- a/src/ubic/basecode/ontology/model/OntologyResource.java +++ b/src/ubic/basecode/ontology/model/OntologyResource.java @@ -19,7 +19,6 @@ package ubic.basecode.ontology.model; import javax.annotation.Nullable; -import java.io.Serializable; /** * @author pavlidis diff --git a/src/ubic/basecode/ontology/model/OntologyTermSimple.java b/src/ubic/basecode/ontology/model/OntologyTermSimple.java index 7d2f4e45..608ac750 100644 --- a/src/ubic/basecode/ontology/model/OntologyTermSimple.java +++ b/src/ubic/basecode/ontology/model/OntologyTermSimple.java @@ -15,6 +15,7 @@ package ubic.basecode.ontology.model; import javax.annotation.Nullable; +import java.io.Serializable; import java.util.Collection; import java.util.Comparator; import java.util.Objects; @@ -25,8 +26,8 @@ * * @author Paul */ - -public class OntologyTermSimple implements OntologyTerm { +@SuppressWarnings("unused") +public class OntologyTermSimple implements OntologyTerm, Serializable { /** * From f6b3dd21f45beb91034aa1b351c97e9635578456 Mon Sep 17 00:00:00 2001 From: Guillaume Poirier-Morency Date: Fri, 15 Sep 2023 12:53:38 -0700 Subject: [PATCH 3/5] Add support for OWL micro, mini and full reasoners Add LanguageLevel so that one can configure the level of OWL supported. Remove unused getSpec() in AbstractOntologyService. --- .../AbstractOntologyMemoryBackedService.java | 48 ++++++++++++++++--- .../jena/AbstractOntologyService.java | 38 +++++++++------ .../providers/MedicOntologyService.java | 4 +- .../providers/NIFSTDOntologyService.java | 4 +- .../ontology/providers/OntologyService.java | 43 ++++++++++++++++- 5 files changed, 111 insertions(+), 26 deletions(-) diff --git a/src/ubic/basecode/ontology/jena/AbstractOntologyMemoryBackedService.java b/src/ubic/basecode/ontology/jena/AbstractOntologyMemoryBackedService.java index d9f126fd..32b96f3b 100644 --- a/src/ubic/basecode/ontology/jena/AbstractOntologyMemoryBackedService.java +++ b/src/ubic/basecode/ontology/jena/AbstractOntologyMemoryBackedService.java @@ -15,6 +15,13 @@ package ubic.basecode.ontology.jena; import com.hp.hpl.jena.ontology.OntModelSpec; +import com.hp.hpl.jena.ontology.ProfileRegistry; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.reasoner.ReasonerFactory; +import com.hp.hpl.jena.reasoner.rulesys.OWLFBRuleReasonerFactory; +import com.hp.hpl.jena.reasoner.rulesys.OWLMicroReasonerFactory; +import com.hp.hpl.jena.reasoner.rulesys.OWLMiniReasonerFactory; +import com.hp.hpl.jena.reasoner.transitiveReasoner.TransitiveReasonerFactory; import ubic.basecode.ontology.model.OntologyModel; import ubic.basecode.util.Configuration; @@ -35,23 +42,50 @@ protected String getOntologyUrl() { } @Override - protected OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) throws IOException { - return new OntologyModelImpl( OntologyLoader.loadMemoryModel( this.getOntologyUrl(), this.getCacheName(), processImports, this.getSpec( inferenceMode ) ) ); + protected OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException { + return new OntologyModelImpl( OntologyLoader.loadMemoryModel( this.getOntologyUrl(), this.getCacheName(), processImports, this.getSpec( languageLevel, inferenceMode ) ) ); } @Override - protected OntologyModel loadModelFromStream( InputStream is, boolean processImports, InferenceMode inferenceMode ) throws IOException { - return new OntologyModelImpl( OntologyLoader.loadMemoryModel( is, this.getOntologyUrl(), processImports, this.getSpec( inferenceMode ) ) ); + protected OntologyModel loadModelFromStream( InputStream is, boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException { + return new OntologyModelImpl( OntologyLoader.loadMemoryModel( is, this.getOntologyUrl(), processImports, this.getSpec( languageLevel, inferenceMode ) ) ); } - private OntModelSpec getSpec( InferenceMode inferenceMode ) { + private OntModelSpec getSpec( LanguageLevel languageLevel, InferenceMode inferenceMode ) { + String profile; + switch ( languageLevel ) { + case FULL: + profile = ProfileRegistry.OWL_LANG; + break; + case DL: + profile = ProfileRegistry.OWL_DL_LANG; + break; + case LITE: + profile = ProfileRegistry.OWL_LITE_LANG; + break; + default: + throw new UnsupportedOperationException( String.format( "Unsupported OWL language level %s.", languageLevel ) ); + } + ReasonerFactory reasonerFactory; switch ( inferenceMode ) { + case FULL: + reasonerFactory = OWLFBRuleReasonerFactory.theInstance(); + break; + case MINI: + reasonerFactory = OWLMiniReasonerFactory.theInstance(); + break; + case MICRO: + reasonerFactory = OWLMicroReasonerFactory.theInstance(); + break; case TRANSITIVE: - return OntModelSpec.OWL_MEM_TRANS_INF; + reasonerFactory = TransitiveReasonerFactory.theInstance(); + break; case NONE: - return OntModelSpec.OWL_MEM; + reasonerFactory = null; + break; default: throw new UnsupportedOperationException( String.format( "Unsupported inference level %s.", inferenceMode ) ); } + return new OntModelSpec( ModelFactory.createMemModelMaker(), null, reasonerFactory, profile ); } } diff --git a/src/ubic/basecode/ontology/jena/AbstractOntologyService.java b/src/ubic/basecode/ontology/jena/AbstractOntologyService.java index d105efb6..b570445e 100644 --- a/src/ubic/basecode/ontology/jena/AbstractOntologyService.java +++ b/src/ubic/basecode/ontology/jena/AbstractOntologyService.java @@ -74,6 +74,7 @@ public abstract class AbstractOntologyService implements OntologyService { } /* settings (applicable for next initialization) */ + private LanguageLevel nextLanguageLevel = LanguageLevel.FULL; private InferenceMode nextInferenceMode = InferenceMode.TRANSITIVE; private boolean nextProcessImports = true; private boolean nextSearchEnabled = true; @@ -91,12 +92,30 @@ public abstract class AbstractOntologyService implements OntologyService { private Set additionalRestrictions; private boolean isInitialized = false; @Nullable + private LanguageLevel languageLevel = null; + @Nullable private InferenceMode inferenceMode = null; @Nullable private Boolean processImports = null; @Nullable private Boolean searchEnabled = null; + @Override + public LanguageLevel getLanguageLevel() { + Lock lock = rwLock.readLock(); + try { + lock.lock(); + return this.languageLevel != null ? this.languageLevel : nextLanguageLevel; + } finally { + lock.unlock(); + } + } + + @Override + public void setLanguageLevel( LanguageLevel languageLevel ) { + this.nextLanguageLevel = languageLevel; + } + @Override public InferenceMode getInferenceMode() { Lock lock = rwLock.readLock(); @@ -162,6 +181,7 @@ private void initialize( @Nullable InputStream stream, boolean forceLoad, boolea String ontologyUrl = getOntologyUrl(); String ontologyName = getOntologyName(); String cacheName = getCacheName(); + LanguageLevel languageLevel = nextLanguageLevel; InferenceMode inferenceMode = nextInferenceMode; boolean processImports = nextProcessImports; boolean searchEnabled = nextSearchEnabled; @@ -196,7 +216,7 @@ private void initialize( @Nullable InputStream stream, boolean forceLoad, boolea return; try { - OntologyModel m = stream != null ? loadModelFromStream( stream, processImports, inferenceMode ) : loadModel( processImports, inferenceMode ); // can take a while. + OntologyModel m = stream != null ? loadModelFromStream( stream, processImports, languageLevel, inferenceMode ) : loadModel( processImports, languageLevel, inferenceMode ); // can take a while. if ( m instanceof OntologyModelImpl ) { model = ( ( OntologyModelImpl ) m ).getOntModel(); } else { @@ -254,6 +274,7 @@ private void initialize( @Nullable InputStream stream, boolean forceLoad, boolea this.additionalRestrictions = additionalRestrictions; this.index = index; this.isInitialized = true; + this.languageLevel = languageLevel; this.inferenceMode = inferenceMode; this.processImports = processImports; this.searchEnabled = searchEnabled; @@ -615,13 +636,13 @@ public void waitForInitializationThread() throws InterruptedException { * Delegates the call as to load the model into memory or leave it on disk. Simply delegates to either * OntologyLoader.loadMemoryModel( url ); OR OntologyLoader.loadPersistentModel( url, spec ); */ - protected abstract OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) throws IOException; + protected abstract OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException; /** * Load a model from a given input stream. */ - protected abstract OntologyModel loadModelFromStream( InputStream stream, boolean processImports, InferenceMode inferenceMode ) throws IOException; + protected abstract OntologyModel loadModelFromStream( InputStream stream, boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException; /** * A name for caching this ontology, or null to disable caching. @@ -633,17 +654,6 @@ protected String getCacheName() { return getOntologyName(); } - private OntModelSpec getSpec( InferenceMode inferenceMode ) { - switch ( inferenceMode ) { - case TRANSITIVE: - return OntModelSpec.OWL_MEM_TRANS_INF; - case NONE: - return OntModelSpec.OWL_MEM; - default: - throw new UnsupportedOperationException( String.format( "Unsupported inference level %s.", inferenceMode ) ); - } - } - @Override public void index( boolean force ) { String cacheName = getCacheName(); diff --git a/src/ubic/basecode/ontology/providers/MedicOntologyService.java b/src/ubic/basecode/ontology/providers/MedicOntologyService.java index f5d10d36..27e0acfb 100644 --- a/src/ubic/basecode/ontology/providers/MedicOntologyService.java +++ b/src/ubic/basecode/ontology/providers/MedicOntologyService.java @@ -52,12 +52,12 @@ protected String getOntologyUrl() { } @Override - protected OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) { + protected OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) { try ( InputStream is = this.getClass().getResourceAsStream( MEDIC_ONTOLOGY_FILE ) ) { if ( is == null ) { throw new RuntimeException( String.format( "The MEDIC ontology was not found in classpath at %s.", MEDIC_ONTOLOGY_FILE ) ); } - return loadModelFromStream( new GZIPInputStream( is ), processImports, inferenceMode ); + return loadModelFromStream( new GZIPInputStream( is ), processImports, languageLevel, inferenceMode ); } catch ( IOException e ) { throw new RuntimeException( e ); } diff --git a/src/ubic/basecode/ontology/providers/NIFSTDOntologyService.java b/src/ubic/basecode/ontology/providers/NIFSTDOntologyService.java index d3b4c5f0..d4ddc2d0 100644 --- a/src/ubic/basecode/ontology/providers/NIFSTDOntologyService.java +++ b/src/ubic/basecode/ontology/providers/NIFSTDOntologyService.java @@ -43,12 +43,12 @@ protected String getOntologyUrl() { } @Override - protected OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) { + protected OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) { try ( InputStream stream = getClass().getResourceAsStream( NIFSTD_ONTOLOGY_FILE ) ) { if ( stream == null ) { throw new RuntimeException( String.format( "The NIF ontology was not found in classpath at %s.", NIFSTD_ONTOLOGY_FILE ) ); } - return loadModelFromStream( new GZIPInputStream( stream ), processImports, inferenceMode ); + return loadModelFromStream( new GZIPInputStream( stream ), processImports, languageLevel, inferenceMode ); } catch ( IOException e ) { throw new RuntimeException( e ); } diff --git a/src/ubic/basecode/ontology/providers/OntologyService.java b/src/ubic/basecode/ontology/providers/OntologyService.java index 7afd9c38..614d7b7c 100644 --- a/src/ubic/basecode/ontology/providers/OntologyService.java +++ b/src/ubic/basecode/ontology/providers/OntologyService.java @@ -16,9 +16,50 @@ public interface OntologyService { void setProcessImports( boolean processImports ); + enum LanguageLevel { + /** + * The full OWL language. + */ + FULL, + /** + * OWL-DL + */ + DL, + /** + * OWL/Lite + */ + LITE + } + + LanguageLevel getLanguageLevel(); + + void setLanguageLevel( LanguageLevel languageLevel ); + enum InferenceMode { + /** + * No inference is supported, only the axioms defined in the ontology are considered. + */ NONE, - TRANSITIVE + /** + * Only basic inference is supported for {@code subClassOf} and {@code subPropertyOf}. + *

+ * This is the fastest inference mode. + */ + TRANSITIVE, + /** + * Very limited inference. + */ + MICRO, + /** + * Limited inference. + */ + MINI, + /** + * Complete inference. + *

+ * This is the slowest inference mode. + */ + FULL } /** From 6f6247797ce76b934c0cc248410227814a3d939a Mon Sep 17 00:00:00 2001 From: Guillaume Poirier-Morency Date: Sun, 17 Sep 2023 15:31:17 -0700 Subject: [PATCH 4/5] Add language level and inference mode to OntologyService.toString() --- src/ubic/basecode/ontology/jena/AbstractOntologyService.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/ubic/basecode/ontology/jena/AbstractOntologyService.java b/src/ubic/basecode/ontology/jena/AbstractOntologyService.java index b570445e..cec8cd4e 100644 --- a/src/ubic/basecode/ontology/jena/AbstractOntologyService.java +++ b/src/ubic/basecode/ontology/jena/AbstractOntologyService.java @@ -753,7 +753,8 @@ public void loadTermsInNameSpace( InputStream is, boolean forceIndex ) { @Override public String toString() { - return String.format( "%s [%s]", getOntologyName(), getOntologyUrl() ); + return String.format( "%s [url=%s] [language level=%s] [inference mode=%s] [imports=%b] [search=%b]", + getOntologyName(), getOntologyUrl(), getLanguageLevel(), getInferenceMode(), getProcessImports(), isSearchEnabled() ); } private Set getOntClassesFromTerms( Collection terms ) { From 6b8a194a13fad5196aa57c7a46d8c58c00c6ea9d Mon Sep 17 00:00:00 2001 From: Guillaume Poirier-Morency Date: Sun, 17 Sep 2023 11:34:33 -0700 Subject: [PATCH 5/5] Use Jena 2.12.1 implementation of the rulesys reasoner The implementation supplied by Jena 2.7 is very inefficient because it has a O(n^2) complexity for inferring axioms. This is achieved by shading Jena under ubic.basecode.ontology.jena.impl package namespace. It also implicates that baseCode does not depend on Jena anymore. Jena 2.13.0 has breaking implementation changes and cannot be integrated this way. --- pom.xml | 94 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/pom.xml b/pom.xml index e83c2a14..39cd22ae 100644 --- a/pom.xml +++ b/pom.xml @@ -347,6 +347,100 @@ + + org.apache.maven.plugins + maven-dependency-plugin + 3.3.0 + + + unpack-jena-rete-engine + generate-sources + + unpack + + + + + org.apache.jena + jena-core + 2.12.1 + sources + ${project.build.directory}/generated-sources/jena-core + com/hp/hpl/jena/reasoner/rulesys/impl/*.java,com/hp/hpl/jena/graph/NodeFactory.java + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.3.0 + + + generate-sources + + add-source + + + + ${project.build.directory}/generated-sources/jena-core + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.0 + + + package + + shade + + + + + org.apache.jena:* + + + + + org.apache.jena:* + + com/hp/hpl/jena/**/* + org/apache/jena/**/* + org/openjena/**/* + etc/**/* + vocabularies/**/* + + + + com/hp/hpl/jena/reasoner/rulesys/impl/**/* + + + + + + + com.hp.hpl.jena + ubic.basecode.ontology.jena.impl + + + org.apache.jena + ubic.basecode.ontology.jena.impl + + + org.openjena + ubic.basecode.ontology.jena.impl + + + + + + org.apache.maven.plugins maven-compiler-plugin