From: <lor...@us...> - 2013-03-20 12:08:44
|
Revision: 3914 http://sourceforge.net/p/dl-learner/code/3914 Author: lorenz_b Date: 2013-03-20 12:08:41 +0000 (Wed, 20 Mar 2013) Log Message: ----------- Started OWL axiom pattern detection. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/AbstractRenderer.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/FullIRIEntityShortFromProvider.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/ManchesterOWLSyntaxEditorParser.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/ManchesterOWLSyntaxOWLObjectRendererImpl.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/ManchesterOWLSyntaxObjectRenderer.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLClassExpressionOrderingComparator.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLClassExpressionRenamer.java Added: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/AbstractRenderer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/AbstractRenderer.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/AbstractRenderer.java 2013-03-20 12:08:41 UTC (rev 3914) @@ -0,0 +1,219 @@ +package org.dllearner.algorithms.pattern; + +/* + * This file is part of the OWL API. + * + * The contents of this file are subject to the LGPL License, Version 3.0. + * + * Copyright (C) 2011, The University of Manchester + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + * + * + * Alternatively, the contents of this file may be used under the terms of the Apache License, Version 2.0 + * in which case, the provisions of the Apache License Version 2.0 are applicable instead of those above. + * + * Copyright 2011, University of Manchester + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import java.io.IOException; +import java.io.Writer; +import java.util.ArrayList; +import java.util.List; +import java.util.StringTokenizer; + +import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntax; +import org.semanticweb.owlapi.io.OWLRendererException; +import org.semanticweb.owlapi.io.OWLRendererIOException; +import org.semanticweb.owlapi.util.ShortFormProvider; + +/** + * Author: Matthew Horridge<br> + * The University Of Manchester<br> + * Bio-Health Informatics Group<br> + * Date: 25-Apr-2007<br><br> + */ +@SuppressWarnings("javadoc") +public class AbstractRenderer { + + private ShortFormProvider shortFormProvider; + + private int lastNewLinePos = -1; + + private int currentPos; + + private Writer writer; + + private List<Integer> tabs; + + private boolean useTabbing = true; + + private boolean useWrapping = true; + + public AbstractRenderer(Writer writer, ShortFormProvider shortFormProvider) { + this.writer = writer; + this.shortFormProvider = shortFormProvider; + tabs = new ArrayList<Integer>(); + pushTab(0); + } + + + public void setUseTabbing(boolean useTabbing) { + this.useTabbing = useTabbing; + } + + + public void setUseWrapping(boolean useWrapping) { + this.useWrapping = useWrapping; + } + + + public boolean isUseWrapping() { + return useWrapping; + } + + + public boolean isUseTabbing() { + return useTabbing; + } + + +// public void setShortFormProvider(ShortFormProvider shortFormProvider) { +// this.shortFormProvider = shortFormProvider; +// } + + + public void flush() throws OWLRendererException { + try { + writer.flush(); + } + catch (IOException e) { + throw new OWLRendererIOException(e); + } + } + + protected void pushTab(int size) { + tabs.add(0, size); + } + + protected void incrementTab(int increment) { + int base = 0; + if(!tabs.isEmpty()) { + base = tabs.get(0); + } + tabs.add(0, base + increment); + } + + protected void popTab() { + tabs.remove(0); + } + + protected void writeTab() { + int tab = tabs.get(0); + for(int i = 0; i < tab; i++) { + write(" "); + } + } + + protected int getIndent() { + return currentPos - lastNewLinePos - 2; + } + + + protected void write(String s) { + if(s == null) { + return; + } + int indexOfNewLine = s.indexOf('\n'); + if(indexOfNewLine != -1) { + lastNewLinePos = currentPos + indexOfNewLine; + } + currentPos += s.length(); + try { + writer.write(s); + } + catch (IOException e) { + e.printStackTrace(); + } + } + + protected void write(char ch) { + write(Character.toString(ch)); + } + + protected void write(String s, int lineLen) { + StringTokenizer tokenizer = new StringTokenizer(s, " \n\t-", true); + int currentLineLength = 0; + while(tokenizer.hasMoreTokens()) { + String curToken = tokenizer.nextToken(); + write(curToken); + if(curToken.equals("\n")) { + writeTab(); + } + currentLineLength += curToken.length(); + if(currentLineLength > lineLen && curToken.trim().length() != 0 && tokenizer.hasMoreTokens()) { + writeNewLine(); + currentLineLength = 0; + } + } + } + + protected void writeSpace() { + write(" "); + } + + protected void write(ManchesterOWLSyntax keyword) { + write(" ", keyword, keyword.isSectionKeyword() ? ": " : " "); + } + + protected void writeFrameKeyword(ManchesterOWLSyntax keyword) { + write("", keyword, ": "); + } + + protected void writeSectionKeyword(ManchesterOWLSyntax keyword) { + write(" ", keyword, ": "); + } + + protected void writeNewLine() { + write("\n"); + if (useTabbing) { + writeTab(); + } + } + + protected void write(String prefix, ManchesterOWLSyntax keyword, String suffix) { + write(prefix); + write(keyword.toString()); + write(suffix); + } + + protected ShortFormProvider getShortFormProvider() { + return shortFormProvider; + } + +} + Added: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/FullIRIEntityShortFromProvider.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/FullIRIEntityShortFromProvider.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/FullIRIEntityShortFromProvider.java 2013-03-20 12:08:41 UTC (rev 3914) @@ -0,0 +1,17 @@ +package org.dllearner.algorithms.pattern; + +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.util.ShortFormProvider; + +public class FullIRIEntityShortFromProvider implements ShortFormProvider{ + + @Override + public void dispose() { + } + + @Override + public String getShortForm(OWLEntity entity) { + return entity.toStringID(); + } + +} Added: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/ManchesterOWLSyntaxEditorParser.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/ManchesterOWLSyntaxEditorParser.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/ManchesterOWLSyntaxEditorParser.java 2013-03-20 12:08:41 UTC (rev 3914) @@ -0,0 +1,4129 @@ +package org.dllearner.algorithms.pattern; + +/* + * This file is part of the OWL API. + * + * The contents of this file are subject to the LGPL License, Version 3.0. + * + * Copyright (C) 2011, The University of Manchester + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see http://www.gnu.org/licenses/. + * + * + * Alternatively, the contents of this file may be used under the terms of the Apache License, Version 2.0 + * in which case, the provisions of the Apache License Version 2.0 are applicable instead of those above. + * + * Copyright 2011, University of Manchester + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntax; +import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxOntologyFormat; +import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxOntologyHeader; +import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxTokenizer; +import org.coode.owlapi.manchesterowlsyntax.OntologyAxiomPair; +import org.semanticweb.owlapi.expression.OWLEntityChecker; +import org.semanticweb.owlapi.expression.OWLOntologyChecker; +import org.semanticweb.owlapi.expression.ParserException; +import org.semanticweb.owlapi.model.AddAxiom; +import org.semanticweb.owlapi.model.AddImport; +import org.semanticweb.owlapi.model.AddOntologyAnnotation; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; +import org.semanticweb.owlapi.model.OWLAnnotationSubject; +import org.semanticweb.owlapi.model.OWLAnnotationValue; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassAxiom; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLDataProperty; +import org.semanticweb.owlapi.model.OWLDataPropertyCharacteristicAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyExpression; +import org.semanticweb.owlapi.model.OWLDataRange; +import org.semanticweb.owlapi.model.OWLDatatype; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLFacetRestriction; +import org.semanticweb.owlapi.model.OWLImportsDeclaration; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLNamedIndividual; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyCharacteristicAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyChange; +import org.semanticweb.owlapi.model.OWLOntologyID; +import org.semanticweb.owlapi.model.OWLOntologyLoaderConfiguration; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLPropertyExpression; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; +import org.semanticweb.owlapi.model.SWRLAtom; +import org.semanticweb.owlapi.model.SWRLBuiltInAtom; +import org.semanticweb.owlapi.model.SWRLDArgument; +import org.semanticweb.owlapi.model.SWRLDifferentIndividualsAtom; +import org.semanticweb.owlapi.model.SWRLIArgument; +import org.semanticweb.owlapi.model.SWRLIndividualArgument; +import org.semanticweb.owlapi.model.SWRLLiteralArgument; +import org.semanticweb.owlapi.model.SWRLRule; +import org.semanticweb.owlapi.model.SWRLSameIndividualAtom; +import org.semanticweb.owlapi.model.SWRLVariable; +import org.semanticweb.owlapi.model.SetOntologyID; +import org.semanticweb.owlapi.model.UnloadableImportException; +import org.semanticweb.owlapi.util.DefaultPrefixManager; +import org.semanticweb.owlapi.util.NamespaceUtil; +import org.semanticweb.owlapi.vocab.DublinCoreVocabulary; +import org.semanticweb.owlapi.vocab.Namespaces; +import org.semanticweb.owlapi.vocab.OWL2Datatype; +import org.semanticweb.owlapi.vocab.OWLFacet; +import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; +import org.semanticweb.owlapi.vocab.SWRLBuiltInsVocabulary; +import org.semanticweb.owlapi.vocab.XSDVocabulary; + + +/** + * Author: Matthew Horridge<br> The University Of Manchester<br> Bio-Health Informatics Group<br> Date: + * 10-Sep-2007<br><br> + * <p/> + * A parser for the Manchester OWL Syntax. All properties must be defined before they are used. For example, consider + * the restriction hasPart some Leg. The parser must know in advance whether or not hasPart is an object property or a + * data property so that Leg gets parsed correctly. In a tool, such as an editor, it is expected that hasPart will + * already exists as either a data property or an object property. If a complete ontology is being parsed, it is + * expected that hasPart will have been defined at the top of the file before it is used in any class expressions or + * property assertions (e.g. ObjectProperty: hasPart) + */ +@SuppressWarnings({"unused","javadoc"}) +public class ManchesterOWLSyntaxEditorParser { + + // This parser was built by hand! After struggling with terrible + // error messages produced by ANTLR (or JavaCC) I decides to construct + // this parser by hand. The error messages that this parser generates + // are specific to the Manchester OWL Syntax and are such that it should + // be easy to use this parser in tools such as editors. + + private OWLOntologyLoaderConfiguration configuration; + + protected OWLDataFactory dataFactory; + + private List<ManchesterOWLSyntaxTokenizer.Token> tokens; + + private int tokenIndex; + + private OWLEntityChecker owlEntityChecker; + + private OWLOntologyChecker owlOntologyChecker = new OWLOntologyChecker() { + public OWLOntology getOntology(String name) { + return null; + } + }; + + private String base; + + private Set<String> classNames; + + private Set<String> objectPropertyNames; + + private Set<String> dataPropertyNames; + + private Set<String> individualNames; + + private Set<String> dataTypeNames; + + private Set<String> annotationPropertyNames; + + private Map<String, SWRLBuiltInsVocabulary> ruleBuiltIns = new TreeMap<String, SWRLBuiltInsVocabulary>(); + + private DefaultPrefixManager pm = new DefaultPrefixManager(); + + public static final String AND = ManchesterOWLSyntax.AND.toString(); + + public static final String OR = ManchesterOWLSyntax.OR.toString(); + + public static final String INVERSE = ManchesterOWLSyntax.INVERSE.toString(); + + public static final String SOME = ManchesterOWLSyntax.SOME.toString(); + + public static final String SELF = ManchesterOWLSyntax.SELF.toString(); + + public static final String ONLY = ManchesterOWLSyntax.ONLY.toString(); + + public static final String VALUE = ManchesterOWLSyntax.VALUE.toString(); + + public static final String MIN = ManchesterOWLSyntax.MIN.toString(); + + public static final String MAX = ManchesterOWLSyntax.MAX.toString(); + + public static final String EXACTLY = ManchesterOWLSyntax.EXACTLY.toString(); + + public static final String ONLYSOME = ManchesterOWLSyntax.ONLYSOME.toString(); + + public static final String NOT = ManchesterOWLSyntax.NOT.toString(); + + public static final String CLASS = ManchesterOWLSyntax.CLASS.toString() + ":"; + + public static final String DATATYPE = ManchesterOWLSyntax.DATATYPE.toString() + ":"; + + public static final String SUB_CLASS_OF = ManchesterOWLSyntax.SUBCLASS_OF.toString() + ":"; + + public static final String SUPER_CLASS_OF = ManchesterOWLSyntax.SUPERCLASS_OF.toString() + ":"; + + public static final String INSTANCES = "Instances:"; + + public static final String EQUIVALENT_TO = ManchesterOWLSyntax.EQUIVALENT_TO.toString() + ":"; + + public static final String EQUIVALENT_CLASSES = ManchesterOWLSyntax.EQUIVALENT_CLASSES.toString() + ":"; + + public static final String EQUIVALENT_PROPERTIES = ManchesterOWLSyntax.EQUIVALENT_PROPERTIES.toString() + ":"; + + public static final String DISJOINT_WITH = ManchesterOWLSyntax.DISJOINT_WITH.toString() + ":"; + + public static final String DISJOINT_UNION_OF = ManchesterOWLSyntax.DISJOINT_UNION_OF.toString() + ":"; + + public static final String HAS_KEY = ManchesterOWLSyntax.HAS_KEY.toString() + ":"; + + public static final String DISJOINT_CLASSES = ManchesterOWLSyntax.DISJOINT_CLASSES.toString() + ":"; + + public static final String DISJOINT_PROPERTIES = ManchesterOWLSyntax.DISJOINT_PROPERTIES.toString() + ":"; + + public static final String OBJECT_PROPERTY = ManchesterOWLSyntax.OBJECT_PROPERTY.toString() + ":"; + + public static final String DATA_PROPERTY = ManchesterOWLSyntax.DATA_PROPERTY.toString() + ":"; + + public static final String ANNOTATION_PROPERTY = ManchesterOWLSyntax.ANNOTATION_PROPERTY.toString() + ":"; + + public static final String SUB_PROPERTY_OF = ManchesterOWLSyntax.SUB_PROPERTY_OF.toString() + ":"; + + public static final String SUPER_PROPERTY_OF = ManchesterOWLSyntax.SUPER_PROPERTY_OF.toString() + ":"; + + public static final String DOMAIN = ManchesterOWLSyntax.DOMAIN.toString() + ":"; + + public static final String RANGE = ManchesterOWLSyntax.RANGE.toString() + ":"; + + public static final String INVERSES = ManchesterOWLSyntax.INVERSES.toString() + ":"; + + public static final String CHARACTERISTICS = ManchesterOWLSyntax.CHARACTERISTICS.toString() + ":"; + + public static final String INDIVIDUAL = ManchesterOWLSyntax.INDIVIDUAL.toString() + ":"; + + public static final String INDIVIDUALS = ManchesterOWLSyntax.INDIVIDUALS.toString() + ":"; + + public static final String ANNOTATIONS = ManchesterOWLSyntax.ANNOTATIONS.toString() + ":"; + + public static final String TYPES = ManchesterOWLSyntax.TYPES.toString() + ":"; + + public static final String TYPE = ManchesterOWLSyntax.TYPE.toString() + ":"; + + public static final String FACTS = ManchesterOWLSyntax.FACTS.toString() + ":"; + + public static final String SAME_AS = ManchesterOWLSyntax.SAME_AS.toString() + ":"; + + public static final String SAME_INDIVIDUAL = ManchesterOWLSyntax.SAME_INDIVIDUAL.toString() + ":"; + + public static final String DIFFERENT_FROM = ManchesterOWLSyntax.DIFFERENT_FROM.toString() + ":"; + + public static final String DIFFERENT_INDIVIDUALS = ManchesterOWLSyntax.DIFFERENT_INDIVIDUALS.toString() + ":"; + + public static final String VALUE_PARTITION = "ValuePartition:"; + + public static final String ONTOLOGY = ManchesterOWLSyntax.ONTOLOGY.toString() + ":"; + + public static final String PREFIX = ManchesterOWLSyntax.PREFIX.toString() + ":"; + + public static final String IMPORT = ManchesterOWLSyntax.IMPORT.toString() + ":"; + + public static final String SUB_PROPERTY_CHAIN = ManchesterOWLSyntax.SUB_PROPERTY_CHAIN.toString() + ":"; + + public static final String FUNCTIONAL = ManchesterOWLSyntax.FUNCTIONAL.toString() + ":"; + + public static final String INVERSE_FUNCTIONAL = ManchesterOWLSyntax.INVERSE_FUNCTIONAL.toString() + ":"; + + public static final String SYMMETRIC = ManchesterOWLSyntax.SYMMETRIC.toString() + ":"; + + public static final String ANTI_SYMMETRIC = ManchesterOWLSyntax.ANTI_SYMMETRIC.toString() + ":"; + + public static final String ASYMMETRIC = ManchesterOWLSyntax.ASYMMETRIC.toString() + ":"; + + public static final String TRANSITIVE = ManchesterOWLSyntax.TRANSITIVE.toString() + ":"; + + public static final String REFLEXIVE = ManchesterOWLSyntax.REFLEXIVE.toString() + ":"; + + public static final String IRREFLEXIVE = ManchesterOWLSyntax.IRREFLEXIVE.toString() + ":"; + + public static final String INVERSE_OF = ManchesterOWLSyntax.INVERSE_OF + ":"; + + public static final String RULE = ManchesterOWLSyntax.RULE + ":"; + + private Set<String> potentialKeywords; + + private OWLOntology defaultOntology = null; + + private boolean allowEmptyFrameSections = false; + + + private Map<String, AnnotatedListItemParser<OWLDataProperty, ?>> dataPropertyFrameSections = new HashMap<String, AnnotatedListItemParser<OWLDataProperty, ?>>(); + + + public ManchesterOWLSyntaxEditorParser(OWLDataFactory dataFactory, String s) { + this(new OWLOntologyLoaderConfiguration(), dataFactory, s); + } + + public boolean isAllowEmptyFrameSections() { + return allowEmptyFrameSections; + } + + public void setAllowEmptyFrameSections(boolean allowEmptyFrameSections) { + this.allowEmptyFrameSections = allowEmptyFrameSections; + } + + public ManchesterOWLSyntaxEditorParser(OWLOntologyLoaderConfiguration configuration, OWLDataFactory dataFactory, String s) { + this.configuration = configuration; + this.dataFactory = dataFactory; + potentialKeywords = new HashSet<String>(); + + classNames = new HashSet<String>(); + objectPropertyNames = new HashSet<String>(); + dataPropertyNames = new HashSet<String>(); + individualNames = new HashSet<String>(); + dataTypeNames = new HashSet<String>(); + annotationPropertyNames = new HashSet<String>(); + pm.setPrefix("rdf:", Namespaces.RDF.toString()); + pm.setPrefix("rdfs:", Namespaces.RDFS.toString()); + pm.setPrefix("owl:", Namespaces.OWL.toString()); + pm.setPrefix("dc:", DublinCoreVocabulary.NAME_SPACE); + NamespaceUtil u = new NamespaceUtil(); + + initialiseClassFrameSections(); + initialiseObjectPropertyFrameSections(); + initialiseDataPropertyFrameSections(); + initialiseAnnotationPropertyFrameSections(); + initialiseIndividualFrameSections(); + + + for (XSDVocabulary v : XSDVocabulary.values()) { + dataTypeNames.add(v.getIRI().getFragment()); + dataTypeNames.add("xsd:" + v.getIRI().getFragment()); + } + dataTypeNames.add(OWLRDFVocabulary.RDF_XML_LITERAL.getIRI().getFragment()); + dataTypeNames.add("rdf:" + OWLRDFVocabulary.RDF_XML_LITERAL.getIRI().getFragment()); + + dataTypeNames.add(dataFactory.getTopDatatype().getIRI().getFragment()); + + for (IRI iri : OWLRDFVocabulary.BUILT_IN_ANNOTATION_PROPERTY_IRIS) { + String[] res = u.split(iri.toString(), null); + annotationPropertyNames.add(u.getPrefix(res[0]) + ":" + res[1]); + } + + base = "http://www.semanticweb.org#"; + owlEntityChecker = new DefaultEntityChecker(); + tokens = new ArrayList<ManchesterOWLSyntaxTokenizer.Token>(); + tokens.addAll(getTokenizer(s).tokenize()); + tokenIndex = 0; + + for (SWRLBuiltInsVocabulary v : SWRLBuiltInsVocabulary.values()) { + ruleBuiltIns.put(v.getShortName(), v); + ruleBuiltIns.put(v.getIRI().toQuotedString(), v); + } + } + + protected ManchesterOWLSyntaxTokenizer getTokenizer(String s) { + return new ManchesterOWLSyntaxTokenizer(s); + } + + public OWLDataFactory getDataFactory() { + return dataFactory; + } + + private Map<String, AnnotatedListItemParser<OWLClass, ?>> classFrameSections = new HashMap<String, AnnotatedListItemParser<OWLClass, ?>>(); + + public void initialiseClassFrameSections() { + initialiseSection(new EntityAnnotationsListItemParser<OWLClass>(), classFrameSections); + initialiseSection(new ClassSubClassOfListItemParser(), classFrameSections); + initialiseSection(new ClassEquivalentToListItemParser(), classFrameSections); + initialiseSection(new ClassDisjointWithListItemParser(), classFrameSections); + initialiseSection(new ClassHasKeyListItemParser(), classFrameSections); + // Extensions + initialiseSection(new ClassSuperClassOfListItemParser(), classFrameSections); + initialiseSection(new ClassDisjointClassesListItemParser(), classFrameSections); + initialiseSection(new ClassIndividualsListItemParser(), classFrameSections); + } + + private Map<String, AnnotatedListItemParser<OWLObjectProperty, ?>> objectPropertyFrameSections = new HashMap<String, AnnotatedListItemParser<OWLObjectProperty, ?>>(); + + private void initialiseObjectPropertyFrameSections() { + initialiseSection(new EntityAnnotationsListItemParser<OWLObjectProperty>(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertySubPropertyOfListItemParser(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertyEquivalentToListItemParser(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertyDisjointWithListItemParser(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertyDomainListItemParser(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertyRangeListItemParser(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertyInverseOfListItemParser(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertyCharacteristicsItemParser(), objectPropertyFrameSections); + initialiseSection(new ObjectPropertySubPropertyChainListItemParser(), objectPropertyFrameSections); + // Extensions + initialiseSection(new ObjectPropertySuperPropertyOfListItemParser(), objectPropertyFrameSections); + } + + private void initialiseDataPropertyFrameSections() { + initialiseSection(new DataPropertySubPropertyOfListItemParser(), dataPropertyFrameSections); + initialiseSection(new DataPropertyEquivalentToListItemParser(), dataPropertyFrameSections); + initialiseSection(new DataPropertyDisjointWithListItemParser(), dataPropertyFrameSections); + initialiseSection(new DataPropertyDomainListItemParser(), dataPropertyFrameSections); + initialiseSection(new DataPropertyRangeListItemParser(), dataPropertyFrameSections); + initialiseSection(new DataPropertyCharacteristicsItemParser(), dataPropertyFrameSections); + initialiseSection(new EntityAnnotationsListItemParser<OWLDataProperty>(), dataPropertyFrameSections); + } + + + private Map<String, AnnotatedListItemParser<OWLAnnotationProperty, ?>> annotationPropertyFrameSections = new HashMap<String, AnnotatedListItemParser<OWLAnnotationProperty, ?>>(); + + private void initialiseAnnotationPropertyFrameSections() { + initialiseSection(new AnnotationPropertySubPropertyOfListItemParser(), annotationPropertyFrameSections); + initialiseSection(new AnnotationPropertyDomainListItemParser(), annotationPropertyFrameSections); + initialiseSection(new AnnotationPropertyRangeListItemParser(), annotationPropertyFrameSections); + initialiseSection(new EntityAnnotationsListItemParser<OWLAnnotationProperty>(), annotationPropertyFrameSections); + } + + private Map<String, AnnotatedListItemParser<OWLIndividual, ?>> individualFrameSections = new HashMap<String, AnnotatedListItemParser<OWLIndividual, ?>>(); + + public void initialiseIndividualFrameSections() { + initialiseSection(new IndividualAnnotationItemParser(), individualFrameSections); + initialiseSection(new IndividualTypesItemParser(), individualFrameSections); + initialiseSection(new IndividualFactsItemParser(), individualFrameSections); + initialiseSection(new IndividualSameAsItemParser(), individualFrameSections); + initialiseSection(new IndividualDifferentFromItemParser(), individualFrameSections); + // Extensions + initialiseSection(new IndividualDifferentIndividualsItemParser(), individualFrameSections); + } + + + + protected List<ManchesterOWLSyntaxTokenizer.Token> getTokens() { + return tokens; + } + + + protected void reset() { + tokenIndex = 0; + } + + + public String getBase() { + return base; + } + + + public void setBase(String base) { + this.base = base; + } + + + public OWLEntityChecker getOWLEntityChecker() { + return owlEntityChecker; + } + + + public void setOWLEntityChecker(OWLEntityChecker owlEntityChecker) { + this.owlEntityChecker = owlEntityChecker; + } + + + public boolean isOntologyName(String name) { + return owlOntologyChecker.getOntology(name) != null; + } + + + public boolean isClassName(String name) { + return classNames.contains(name) || owlEntityChecker != null && owlEntityChecker.getOWLClass(name) != null; + } + + + public OWLOntology getOntology(String name) { + return owlOntologyChecker.getOntology(name); + } + + + public void setOWLOntologyChecker(OWLOntologyChecker owlOntologyChecker) { + this.owlOntologyChecker = owlOntologyChecker; + } + + + public boolean isObjectPropertyName(String name) { + return objectPropertyNames.contains(name) || owlEntityChecker != null && owlEntityChecker.getOWLObjectProperty(name) != null; + } + + + public boolean isAnnotationPropertyName(String name) { + return annotationPropertyNames.contains(name) || owlEntityChecker != null && owlEntityChecker.getOWLAnnotationProperty(name) != null; + } + + + public boolean isDataPropertyName(String name) { + return dataPropertyNames.contains(name) || owlEntityChecker != null && owlEntityChecker.getOWLDataProperty(name) != null; + } + + + public boolean isIndividualName(String name) { + return individualNames.contains(name) || owlEntityChecker != null && owlEntityChecker.getOWLIndividual(name) != null; + } + + + public boolean isDatatypeName(String name) { + return dataTypeNames.contains(name) || owlEntityChecker != null && owlEntityChecker.getOWLDatatype(name) != null; + } + + + public boolean isSWRLBuiltin(String name) { + return ruleBuiltIns.containsKey(name); + } + + + public OWLClass getOWLClass(String name) { + OWLClass cls = owlEntityChecker.getOWLClass(name); + if (cls == null && classNames.contains(name)) { + cls = dataFactory.getOWLClass(getIRI(name)); + } + return cls; + } + + + public OWLObjectProperty getOWLObjectProperty(String name) { + OWLObjectProperty prop = owlEntityChecker.getOWLObjectProperty(name); + if (prop == null && objectPropertyNames.contains(name)) { + prop = dataFactory.getOWLObjectProperty(getIRI(name)); + } + return prop; + } + + + public OWLIndividual getOWLIndividual(String name) { + if (name.startsWith("_:")) { + return dataFactory.getOWLAnonymousIndividual(name); + } + return getOWLNamedIndividual(name); + } + + private OWLNamedIndividual getOWLNamedIndividual(String name) { + OWLNamedIndividual ind = owlEntityChecker.getOWLIndividual(name); + if (ind == null && individualNames.contains(name)) { + ind = dataFactory.getOWLNamedIndividual(getIRI(name)); + } + return ind; + } + + + public OWLDataProperty getOWLDataProperty(String name) { + OWLDataProperty prop = owlEntityChecker.getOWLDataProperty(name); + if (prop == null && dataPropertyNames.contains(name)) { + prop = dataFactory.getOWLDataProperty(getIRI(name)); + } + return prop; + } + + + public OWLDatatype getOWLDatatype(String name) { + OWLDatatype dt = owlEntityChecker.getOWLDatatype(name); + if (dt == null && dataTypeNames.contains(name)) { + dt = dataFactory.getOWLDatatype(getIRI(name)); + } + return dt; + } + + + public OWLAnnotationProperty getOWLAnnotationProperty(String name) { + OWLAnnotationProperty prop = owlEntityChecker.getOWLAnnotationProperty(name); + if (prop == null && annotationPropertyNames.contains(name)) { + prop = dataFactory.getOWLAnnotationProperty(getIRI(name)); + } + return prop; + } + + + protected ManchesterOWLSyntaxTokenizer.Token getLastToken() { + if (tokenIndex - 1 > -1) { + return tokens.get(tokenIndex - 1); +// return tokenIndex < tokens.size() ? tokens.get(tokenIndex) : tokens.get(tokens.size() - 1); + } + else { + return tokens.get(0); + } + } + + + protected String peekToken() { + return getToken().getToken(); + } + + //////////////////////////////////////////////////////////////////////////////////////////////////// + // + // Tokenizer + // + //////////////////////////////////////////////////////////////////////////////////////////////////// + + + protected String consumeToken() { + String token = getToken().getToken(); + if (tokenIndex < tokens.size() - 1) { + tokenIndex++; + } + + return token; + } + + + protected void consumeToken(String expected) throws ParserException { + String tok = consumeToken(); + if (!tok.equals(expected)) { + throw createException(expected); + } + } + + + public ManchesterOWLSyntaxTokenizer.Token getToken() { + return tokens.get((tokenIndex < tokens.size()) ? tokenIndex : tokenIndex - 1); + } + + + public int getTokenPos() { + return getToken().getPos(); + } + + + public int getTokenCol() { + return getToken().getCol(); + } + + + public int getTokenRow() { + return getToken().getRow(); + } + + //////////////////////////////////////////////////////////////////////////////////////////////////// + // + // Parser + // + //////////////////////////////////////////////////////////////////////////////////////////////////// + + + /** + * Parses an OWL class expression that is represented in Manchester OWL Syntax + * @return The parsed class expression + * @throws ParserException If a class expression could not be parsed. + */ + public OWLClassExpression parseClassExpression() throws ParserException { + OWLClassExpression desc = parseIntersection(); + if (!consumeToken().equals(ManchesterOWLSyntaxTokenizer.EOF)) { + throw createException(ManchesterOWLSyntaxTokenizer.EOF); + } + return desc; + } + + + public OWLClassExpression parseIntersection() throws ParserException { + Set<OWLClassExpression> ops = new HashSet<OWLClassExpression>(); + String kw = AND; + while (kw.equalsIgnoreCase(AND)) { + potentialKeywords.remove(AND); + ops.add(parseUnion()); + potentialKeywords.add(AND); + kw = peekToken(); + if (kw.equalsIgnoreCase(AND)) { + kw = consumeToken(); + } + else if (kw.equalsIgnoreCase("that")) { + consumeToken(); + kw = AND; + } + } + if (ops.size() == 1) { + return ops.iterator().next(); + } + else { + return dataFactory.getOWLObjectIntersectionOf(ops); + } + } + + + public OWLClassExpression parseUnion() throws ParserException { + Set<OWLClassExpression> ops = new HashSet<OWLClassExpression>(); + String kw = OR; + while (kw.equalsIgnoreCase(OR)) { + potentialKeywords.remove(OR); + ops.add(parseNonNaryClassExpression()); + potentialKeywords.add(OR); + kw = peekToken(); + if (kw.equalsIgnoreCase(OR)) { + kw = consumeToken(); + } + } + if (ops.size() == 1) { + return ops.iterator().next(); + } + else { + return dataFactory.getOWLObjectUnionOf(ops); + } + } + + + public OWLObjectPropertyExpression parseObjectPropertyExpression(boolean allowUndeclared) throws ParserException { + String tok = consumeToken(); + if (tok.equalsIgnoreCase(INVERSE)) { + String open = peekToken(); + boolean brackets = false; + if (open.equals("(")) { + consumeToken(); + brackets = true; + } + OWLObjectPropertyExpression prop = parseObjectPropertyExpression(); + if (brackets) { + String close = consumeToken(); + if (!close.equals(")")) { + throw createException(")"); + } + } + return dataFactory.getOWLObjectInverseOf(prop); + } + else { + if (!allowUndeclared && !isObjectPropertyName(tok)) { + throw createException(false, true, false, false, false, false, INVERSE); + } + return getOWLObjectProperty(tok); + } + } + + + public OWLObjectPropertyExpression parseObjectPropertyExpression() throws ParserException { + return parseObjectPropertyExpression(false); + } + + + public OWLPropertyExpression<?,?> parsePropertyExpression() throws ParserException { + String tok = peekToken(); + if (isObjectPropertyName(tok)) { + return parseObjectPropertyExpression(); + } + else if (tok.equalsIgnoreCase(INVERSE)) { + return parseObjectPropertyExpression(); + } + else if (isDataPropertyName(tok)) { + return parseDataProperty(); + } + else { + consumeToken(); + throw createException(false, true, true, false, false, false, INVERSE); + } + } + + + public OWLClassExpression parseRestriction() throws ParserException { + String tok = peekToken(); + if (isObjectPropertyName(tok) || tok.equalsIgnoreCase(INVERSE)) { + return parseObjectRestriction(); + } + else if (isDataPropertyName(tok)) { + return parseDataRestriction(); + } + else { + consumeToken(); + throw createException(false, true, true, false); + } + } + + + /** + * Parses all class expressions except ObjectIntersectionOf and ObjectUnionOf + * @return The class expression which was parsed + * @throws ParserException if a non-nary class expression could not be parsed + */ + public OWLClassExpression parseNonNaryClassExpression() throws ParserException { + + String tok = peekToken(); + if (tok.equalsIgnoreCase(NOT)) { + consumeToken(); + OWLClassExpression complemented = parseNestedClassExpression(false); + return dataFactory.getOWLObjectComplementOf(complemented); + } + else if (isObjectPropertyName(tok) || tok.equalsIgnoreCase(INVERSE)) { + return parseObjectRestriction(); + } + else if (isDataPropertyName(tok)) { + // Data restriction + return parseDataRestriction(); + } + else if (tok.equals("{")) { + return parseObjectOneOf(); + } + else if (tok.equals("(")) { + return parseNestedClassExpression(false); + } + else if (isClassName(tok)) { + consumeToken(); + return getOWLClass(tok); + } + // Add option for strict class name checking + else { + consumeToken(); + throw createException(true, true, true, false, false, false, "(", "{", NOT, INVERSE); + } + } + + + public OWLClassExpression parseObjectRestriction() throws ParserException { + OWLObjectPropertyExpression prop = parseObjectPropertyExpression(); + String kw = consumeToken(); + if (kw.equalsIgnoreCase(SOME)) { + String possSelfToken = peekToken(); + if (possSelfToken.equalsIgnoreCase(SELF)) { + consumeToken(); + return dataFactory.getOWLObjectHasSelf(prop); + } + else { + OWLClassExpression filler = null; + try { + filler = parseNestedClassExpression(false); + } + catch (ParserException e) { + Set<String> keywords = new HashSet<String>(); + keywords.addAll(e.getExpectedKeywords()); + keywords.add(SELF); + throw createException(e.isClassNameExpected(), e.isObjectPropertyNameExpected(), e.isDataPropertyNameExpected(), e.isIndividualNameExpected(), e.isDatatypeNameExpected(), e.isAnnotationPropertyNameExpected(), keywords.toArray(new String[keywords.size()])); + } + return dataFactory.getOWLObjectSomeValuesFrom(prop, filler); + } + } + else if (kw.equalsIgnoreCase(ONLY)) { + OWLClassExpression filler = parseNestedClassExpression(false); + return dataFactory.getOWLObjectAllValuesFrom(prop, filler); + } + else if (kw.equalsIgnoreCase(VALUE)) { + String indName = consumeToken(); + if (!isIndividualName(indName)) { + throw createException(false, false, false, true); + } + return dataFactory.getOWLObjectHasValue(prop, getOWLIndividual(indName)); + } + else if (kw.equalsIgnoreCase(MIN)) { + int card = parseInteger(); + OWLClassExpression filler = parseNestedClassExpression(true); + if (filler != null) { + return dataFactory.getOWLObjectMinCardinality(card, prop, filler); + } + else { + return dataFactory.getOWLObjectMinCardinality(card, prop); + } + } + else if (kw.equalsIgnoreCase(MAX)) { + int card = parseInteger(); + OWLClassExpression filler = parseNestedClassExpression(true); + if (filler != null) { + return dataFactory.getOWLObjectMaxCardinality(card, prop, filler); + } + else { + return dataFactory.getOWLObjectMaxCardinality(card, prop); + } + } + else if (kw.equalsIgnoreCase(EXACTLY)) { + int card = parseInteger(); + OWLClassExpression filler = parseNestedClassExpression(true); + if (filler != null) { + return dataFactory.getOWLObjectExactCardinality(card, prop, filler); + } + else { + return dataFactory.getOWLObjectExactCardinality(card, prop); + } + } + else if (kw.equalsIgnoreCase(ONLYSOME)) { + String tok = peekToken(); + Set<OWLClassExpression> descs = new HashSet<OWLClassExpression>(); + if (!tok.equals("[")) { + descs.add(parseIntersection()); + } + else { + descs.addAll(parseClassExpressionList("[", "]")); + } + Set<OWLClassExpression> ops = new HashSet<OWLClassExpression>(); + for (OWLClassExpression desc : descs) { + ops.add(dataFactory.getOWLObjectSomeValuesFrom(prop, desc)); + } + OWLClassExpression filler; + if (descs.size() == 1) { + filler = descs.iterator().next(); + } + else { + filler = dataFactory.getOWLObjectUnionOf(descs); + } + ops.add(dataFactory.getOWLObjectAllValuesFrom(prop, filler)); + return dataFactory.getOWLObjectIntersectionOf(ops); + } + else if (kw.equalsIgnoreCase(SELF)) { + return dataFactory.getOWLObjectHasSelf(prop); + } + else { + // Error! + throw createException(SOME, ONLY, VALUE, MIN, MAX, EXACTLY, SELF); + } + } + + + public OWLClassExpression parseDataRestriction() throws ParserException { + OWLDataPropertyExpression prop = parseDataProperty(); + String kw = consumeToken(); + if (kw.equalsIgnoreCase(SOME)) { + OWLDataRange rng = parseDataRange(); + if(rng==null) { + throw new ParserException(getTokenSequence(), getTokenPos(), getTokenRow(), getTokenCol(), true, false, false, false, true, false, Collections.<String>emptySet()); +// return dataFactory.getOWLDataSomeValuesFrom(prop, rng) + } + return dataFactory.getOWLDataSomeValuesFrom(prop, rng); + } + else if (kw.equalsIgnoreCase(ONLY)) { + OWLDataRange rng = parseDataRange(); + return dataFactory.getOWLDataAllValuesFrom(prop, rng); + } + else if (kw.equalsIgnoreCase(VALUE)) { + OWLLiteral con = parseConstant(); + return dataFactory.getOWLDataHasValue(prop, con); + } + else if (kw.equalsIgnoreCase(MIN)) { + int card = parseInteger(); + OWLDataRange rng = parseDataRange(); + if (rng != null) { + return dataFactory.getOWLDataMinCardinality(card, prop, rng); + } + else { + return dataFactory.getOWLDataMinCardinality(card, prop); + } + } + else if (kw.equalsIgnoreCase(EXACTLY)) { + int card = parseInteger(); + OWLDataRange rng = parseDataRange(); + if (rng != null) { + return dataFactory.getOWLDataExactCardinality(card, prop, rng); + } + else { + return dataFactory.getOWLDataExactCardinality(card, prop); + } + } + else if (kw.equalsIgnoreCase(MAX)) { + int card = parseInteger(); + OWLDataRange rng = parseDataRange(); + if (rng != null) { + return dataFactory.getOWLDataMaxCardinality(card, prop, rng); + } + else { + return dataFactory.getOWLDataMaxCardinality(card, prop); + } + } + throw createException(SOME, ONLY, VALUE, MIN, EXACTLY, MAX); + } + + + public OWLFacet parseFacet() throws ParserException { + String facet = consumeToken(); + if (facet.equals(">")) { + if (peekToken().equals("=")) { + consumeToken(); + return OWLFacet.MIN_INCLUSIVE; + } + else { + return OWLFacet.MIN_EXCLUSIVE; + } + } + else if (facet.equals("<")) { + if (peekToken().equals("=")) { + consumeToken(); + return OWLFacet.MAX_INCLUSIVE; + } + else { + return OWLFacet.MAX_EXCLUSIVE; + } + } + return OWLFacet.getFacetBySymbolicName(facet); + } + + + public OWLDatatype parseDatatype() throws ParserException { + String name = consumeToken(); + return getOWLDatatype(name); + } + + + public OWLDataRange parseDataRange() throws ParserException { + return parseDataIntersectionOf(); + } + + public OWLDataRange parseDataIntersectionOf() throws ParserException { + String sep = AND; + Set<OWLDataRange> ranges = new HashSet<OWLDataRange>(); + while (sep.equals(AND)) { + ranges.add(parseDataUnionOf()); + sep = peekToken(); + if (sep.equals(AND)) { + consumeToken(); + } + } + if (ranges.size() == 1) { + return ranges.iterator().next(); + } + else { + return dataFactory.getOWLDataIntersectionOf(ranges); + } + } + + public OWLDataRange parseDataUnionOf() throws ParserException { + String sep = OR; + Set<OWLDataRange> ranges = new HashSet<OWLDataRange>(); + while (sep.equals(OR)) { + ranges.add(parseDataRangePrimary()); + sep = peekToken(); + if (sep.equals(OR)) { + consumeToken(); + } + } + if (ranges.size() == 1) { + return ranges.iterator().next(); + } + else { + return dataFactory.getOWLDataUnionOf(ranges); + } + } + + private OWLDataRange parseDataRangePrimary() throws ParserException { + String tok = peekToken(); + + if (isDatatypeName(tok)) { + consumeToken(); + OWLDatatype datatype = getOWLDatatype(tok); + String next = peekToken(); + if (next.equals("[")) { + // Restricted data range + consumeToken(); + String sep = ","; + Set<OWLFacetRestriction> facetRestrictions = new HashSet<OWLFacetRestriction>(); + while (sep.equals(",")) { + OWLFacet fv = parseFacet(); + if (fv == null) { + throw createException(OWLFacet.getFacets().toArray(new String[OWLFacet.getFacetIRIs().size()])); + } + OWLLiteral con = parseConstant(); + // if (!con.isRDFPlainLiteral()) { + // con = dataFactory.getOWLLiteral(con.getLiteral()); + // } + facetRestrictions.add(dataFactory.getOWLFacetRestriction(fv, con)); + sep = consumeToken(); + } + if (!sep.equals("]")) { + throw createException("]"); + } + return dataFactory.getOWLDatatypeRestriction(datatype, facetRestrictions); + } + else { + return datatype; + } + } + else if (tok.equalsIgnoreCase(NOT)) { + return parseDataComplementOf(); + } + else if (tok.equals("{")) { + return parseDataOneOf(); + } + else if (tok.equals("(")) { + consumeToken(); + OWLDataRange rng = parseDataRange(); + consumeToken(")"); + return rng; + } + else if (!tok.equals(ManchesterOWLSyntaxTokenizer.EOF)) { + consumeToken(); + throw createException(false, false, false, false, true, false, NOT, "{"); + } + return null; + } + + + public Set<OWLDataRange> parseDataRangeList() throws ParserException { + String sep = ","; + Set<OWLDataRange> ranges = new HashSet<OWLDataRange>(); + while (sep.equals(",")) { + potentialKeywords.remove(","); + OWLDataRange rng = parseDataRange(); + ranges.add(rng); + potentialKeywords.add(","); + sep = peekToken(); + if (sep.equals(",")) { + consumeToken(); + } + } + return ranges; + } + + + private OWLDataRange parseDataOneOf() throws ParserException { + consumeToken(); + Set<OWLLiteral> cons = new HashSet<OWLLiteral>(); + String sep = ","; + while (sep.equals(",")) { + OWLLiteral con = parseConstant(); + cons.add(con); + sep = consumeToken(); + } + if (!sep.equals("}")) { + throw createException(",", "}"); + } + return dataFactory.getOWLDataOneOf(cons); + } + + + private OWLDataRange parseDataComplementOf() throws ParserException { + String not = consumeToken(); + if (!not.equalsIgnoreCase(NOT)) { + throw createException(NOT); + } + OWLDataRange complementedDataRange = parseDataRangePrimary(); + return dataFactory.getOWLDataComplementOf(complementedDataRange); + } + + public OWLLiteral parseLiteral() throws ParserException { + String tok = consumeToken(); + if (tok.startsWith("\"")) { + String lit = ""; + if (tok.length() > 2) { + lit = tok.substring(1, tok.length() - 1); + } + if (peekToken().equals("^")) { + consumeToken(); + if (!peekToken().equals("^")) { + throw createException("^"); + } + consumeToken(); + return dataFactory.getOWLLiteral(lit, parseDatatype()); + } + else if (peekToken().startsWith("@")) { + // Plain literal with a language tag + String lang = consumeToken().substring(1); + return dataFactory.getOWLLiteral(lit, lang); + } + else { + // Plain literal without a language tag + return dataFactory.getOWLLiteral(lit, ""); + } + } + else { + try { + int i = Integer.parseInt(tok); + return dataFactory.getOWLLiteral(tok, OWL2Datatype.XSD_INTEGER); + } + catch (NumberFormatException e) { + // Ignore - not interested + } + if (tok.endsWith("f")||tok.endsWith("F")) { + try { + // XXX this extra F might qualify as Float a Double INF/-INF + float f = Float.parseFloat(tok.replace("INF", "Infinity").replace("inf", "Infinity")); + return dataFactory.getOWLLiteral(Float.toString(f).replace("Infinity", "INF"), OWL2Datatype.XSD_FLOAT); + } + catch (NumberFormatException e) { + // Ignore - not interested + } + } + try { + double d = Double.parseDouble(tok); + return dataFactory.getOWLLiteral(tok, OWL2Datatype.XSD_DOUBLE); + } + catch (NumberFormatException e) { + // Ignore - not interested + } + + if (tok.equals("true")) { + return dataFactory.getOWLLiteral(true); + } + else if (tok.equals("false")) { + return dataFactory.getOWLLiteral(false); + } + } + throw createException(false, false, false, false, false, false, "true", "false", "$integer$", "$float$", "$double$", "\"$Literal$\"", "\"$Literal$\"^^<datatype>", "\"$Literal$\"@<lang>"); + } + + /** + * @deprecated Use {@link #parseLiteral()} instead + */ + @Deprecated + public OWLLiteral parseConstant() throws ParserException { + return parseLiteral(); + } + + + public int parseInteger() throws ParserException { + String i = consumeToken(); + try { + return Integer.parseInt(i); + } + catch (NumberFormatException e) { + throw new ParserException(Arrays.asList(getToken().getToken()), getTokenPos(), getTokenRow(), true, getTokenCol()); + } + } + + + public String getLineCol() { + return "Encountered " + getLastToken() + " at " + getTokenRow() + ":" + getTokenCol() + " "; + } + + + private OWLClassExpression parseNestedClassExpression(boolean lookaheadCheck) throws ParserException { + String tok = peekToken(); + if (tok.equals("(")) { + consumeToken(); + OWLClassExpression desc = parseIntersection(); + String closeBracket = consumeToken(); + if (!closeBracket.equals(")")) { + // Error! + throw createException(")"); + } + return desc; + } + else if (tok.equals("{")) { + return parseObjectOneOf(); + } + else if (isClassName(tok)) { + String name = consumeToken(); + return getOWLClass(name); + } + else if (!tok.equals(ManchesterOWLSyntaxTokenizer.EOF) || !lookaheadCheck) { + consumeToken(); + throw createException(true, false, false, false, false, false, "(", "{"); + } + return null; + } + + + public OWLClassExpression parseObjectOneOf() throws ParserException { + String open = consumeToken(); + if (!open.equals("{")) { + throw createException("{"); + } + String sep = ","; + Set<OWLIndividual> inds = new HashSet<OWLIndividual>(); + while (sep.equals(",")) { + OWLIndividual ind = parseIndividual(); + inds.add(ind); + sep = peekToken(); + if (sep.equals(",")) { + consumeToken(); + } + } + String close = consumeToken(); + if (!close.equals("}")) { + throw createException("}", ","); + } + return dataFactory.getOWLObjectOneOf(inds); + } + + + + private <F> void initialiseSection(AnnotatedListItemParser<F, ?> parser, Map<String, AnnotatedListItemParser<F, ?>> map, String ... synonyms) { + map.put(parser.getFrameSectionKeyword(), parser); + for(String syn : synonyms) { + map.put(syn, parser); + } + } + + public Set<OntologyAxiomPair> parseFrames() throws ParserException { + Set<OntologyAxiomPair> axioms = new HashSet<OntologyAxiomPair>(); + Set<String> possible = new HashSet<String>(); + resetPossible(possible); + while (true) { + String tok = peekToken(); + if (tok.equalsIgnoreCase(CLASS)) { + potentialKeywords.clear(); + resetPossible(possible); + axioms.addAll(parseClassFrame()); + possible.addAll(classFrameSections.keySet()); + } + else if (tok.equalsIgnoreCase(OBJECT_PROPERTY)) { + potentialKeywords.clear(); + resetPossible(possible); + axioms.addAll(parseObjectPropertyFrame()); + possible.addAll(objectPropertyFrameSections.keySet()); + } + else if (tok.equalsIgnoreCase(DATA_PROPERTY)) { + potentialKeywords.clear(); + resetPossible(possible); + axioms.addAll(parseDataPropertyFrame()); + possible.ad... [truncated message content] |
From: <lor...@us...> - 2013-05-04 05:50:52
|
Revision: 3941 http://sourceforge.net/p/dl-learner/code/3941 Author: lorenz_b Date: 2013-05-04 05:50:47 +0000 (Sat, 04 May 2013) Log Message: ----------- Some modifications in pattern detection script. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java trunk/components-core/src/main/java/org/dllearner/kb/repository/tones/TONESRepository.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/pom.xml trunk/scripts/pom.xml trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-05-03 08:49:17 UTC (rev 3940) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-05-04 05:50:47 UTC (rev 3941) @@ -21,6 +21,7 @@ import java.util.Set; import java.util.prefs.Preferences; +import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; import org.dllearner.kb.dataset.OWLOntologyDataset; import org.dllearner.kb.repository.OntologyRepository; import org.dllearner.kb.repository.OntologyRepositoryEntry; @@ -28,6 +29,7 @@ import org.ini4j.InvalidFileFormatException; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.OWLObjectRenderer; +import org.semanticweb.owlapi.io.StringDocumentSource; import org.semanticweb.owlapi.io.UnparsableOntologyException; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.IRI; @@ -359,7 +361,9 @@ entries = entryList; } Multiset<OWLAxiom> allAxiomPatterns = HashMultiset.create(); + int i = 1; for (OntologyRepositoryEntry entry : entries) { + System.out.print(i + ": "); URI uri = entry.getPhysicalURI(); // if(uri.toString().startsWith("http://rest.bioontology.org/bioportal/ontologies/download/42764")){ if (!ontologyProcessed(uri)) { @@ -368,15 +372,19 @@ manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.loadOntology(IRI.create(uri)); Multiset<OWLAxiom> axiomPatterns = HashMultiset.create(); - for (OWLLogicalAxiom axiom : ontology.getLogicalAxioms()) { + Set<OWLAxiom> logicalAxioms = new HashSet<OWLAxiom>(); + for (AxiomType<?> type : AxiomType.AXIOM_TYPES) { + logicalAxioms.addAll(ontology.getAxioms(type, true)); + } + for (OWLAxiom axiom : logicalAxioms) { OWLAxiom renamedAxiom = renamer.rename(axiom); axiomPatterns.add(renamedAxiom); } allAxiomPatterns.addAll(axiomPatterns); addOntologyPatterns(uri, ontology, axiomPatterns); - for (OWLAxiom owlAxiom : Multisets.copyHighestCountFirst(allAxiomPatterns).elementSet()) { +// for (OWLAxiom owlAxiom : Multisets.copyHighestCountFirst(allAxiomPatterns).elementSet()) { // System.out.println(owlAxiom + ": " + allAxiomPatterns.count(owlAxiom)); - } +// } manager.removeOntology(ontology); } catch (OWLOntologyAlreadyExistsException e) { e.printStackTrace(); @@ -384,6 +392,8 @@ e.printStackTrace(); addOntologyError(uri, e); } + } else { + System.out.println("Already processed."); } } @@ -401,5 +411,11 @@ org.coode.owlapi.functionalrenderer.OWLObjectRenderer r = new org.coode.owlapi.functionalrenderer.OWLObjectRenderer(man, ontology, sw); axiom.accept(r); System.out.println(sw.toString()); + StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org>" + sw.toString() + ")"); + OWLFunctionalSyntaxOWLParser p = new OWLFunctionalSyntaxOWLParser(); + OWLOntology newOntology = man.createOntology(); + p.parse(s, newOntology); + System.out.println(newOntology.getLogicalAxioms()); + } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java 2013-05-03 08:49:17 UTC (rev 3940) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java 2013-05-04 05:50:47 UTC (rev 3941) @@ -341,14 +341,13 @@ @Override public void visit(OWLSubPropertyChainOfAxiom axiom) { + List<OWLObjectPropertyExpression> renamedSubPropertyChain = new ArrayList<OWLObjectPropertyExpression>(); + for (OWLObjectPropertyExpression owlObjectPropertyExpression : axiom.getPropertyChain()) { + renamedSubPropertyChain.add(expressionRenamer.rename(owlObjectPropertyExpression)); + } OWLObjectPropertyExpression superProperty = axiom.getSuperProperty(); superProperty = expressionRenamer.rename(superProperty); - List<OWLObjectPropertyExpression> subPropertyChain = axiom.getPropertyChain(); - List<OWLObjectPropertyExpression> renamedSubPropertyChain = axiom.getPropertyChain(); - for (OWLObjectPropertyExpression owlObjectPropertyExpression : subPropertyChain) { - renamedSubPropertyChain.add(expressionRenamer.rename(owlObjectPropertyExpression)); - } - renamedAxiom = df.getOWLSubPropertyChainOfAxiom(subPropertyChain, superProperty); + renamedAxiom = df.getOWLSubPropertyChainOfAxiom(renamedSubPropertyChain, superProperty); } @Override Modified: trunk/components-core/src/main/java/org/dllearner/kb/repository/tones/TONESRepository.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/repository/tones/TONESRepository.java 2013-05-03 08:49:17 UTC (rev 3940) +++ trunk/components-core/src/main/java/org/dllearner/kb/repository/tones/TONESRepository.java 2013-05-04 05:50:47 UTC (rev 3941) @@ -10,14 +10,18 @@ import java.util.Collections; import java.util.List; +import org.apache.log4j.Logger; import org.dllearner.kb.repository.OntologyRepository; import org.dllearner.kb.repository.OntologyRepositoryEntry; +import org.dllearner.kb.repository.bioportal.BioPortalRepository; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; import org.semanticweb.owlapi.util.OntologyIRIShortFormProvider; public class TONESRepository implements OntologyRepository{ + private static final Logger log = Logger.getLogger(TONESRepository.class); + private final String repositoryName = "TONES"; private final URI repositoryLocation = URI.create("http://owl.cs.manchester.ac.uk/repository"); @@ -91,6 +95,7 @@ catch (IOException e) { e.printStackTrace(); } + log.info("Loaded " + entries.size() + " ontology entries from TONES."); } private class RepositoryEntry implements OntologyRepositoryEntry { Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-05-03 08:49:17 UTC (rev 3940) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-05-04 05:50:47 UTC (rev 3941) @@ -61,11 +61,14 @@ import joptsimple.OptionSpec; import org.aksw.commons.jena_owlapi.Conversion; +import org.apache.jena.riot.checker.CheckerLiterals; +import org.apache.jena.riot.system.ErrorHandlerFactory; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.SimpleLayout; import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxOntologyFormat; +import org.coode.owlapi.turtle.TurtleOntologyFormat; import org.dllearner.algorithms.celoe.CELOE; import org.dllearner.algorithms.properties.AsymmetricObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.DataPropertyDomainAxiomLearner; @@ -127,6 +130,7 @@ import org.dllearner.utilities.datastructures.SortedSetTuple; import org.dllearner.utilities.examples.AutomaticNegativeExampleFinderSPARQL2; import org.dllearner.utilities.owl.OWLAPIAxiomConvertVisitor; +import org.dllearner.utilities.owl.OWLEntityTypeAdder; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.io.SystemOutDocumentTarget; @@ -212,7 +216,7 @@ // restrict tested number of entities per type (only for testing purposes); // should be set to -1 in production mode - int maxEntitiesPerType = -1; + int maxEntitiesPerType = 5; // number of axioms which will be learned/considered (only applies to // some learners) @@ -462,7 +466,7 @@ ksFragment = ksCached; rc = rcCached; } else { - System.out.print("extracting fragment ... "); + System.out.print("extracting fragment ... ");//com.hp.hpl.jena.shared.impl.JenaParameters.enableEagerLiteralValidation = true; startTime = System.currentTimeMillis(); ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getEndpoint(), cache, 2); Model model = ModelFactory.createDefaultModel(); @@ -471,6 +475,7 @@ model.add(cbd); } filter(model); + OWLEntityTypeAdder.addEntityTypes(model); runTime = System.currentTimeMillis() - startTime; System.out.println("done (" + model.size()+ " triples found in " + runTime + " ms)"); OWLOntology ontology = asOWLOntology(model); @@ -768,6 +773,13 @@ model.write(fos, "TURTLE", null); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); OWLOntology ontology = man.loadOntologyFromOntologyDocument(new ByteArrayInputStream(baos.toByteArray())); + try { + man.saveOntology(ontology, new TurtleOntologyFormat(), new FileOutputStream("error.owl")); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } return ontology; } catch (OWLOntologyCreationException e) { e.printStackTrace(); @@ -797,6 +809,7 @@ // fix URIs with spaces Resource newSubject = (Resource) subject; RDFNode newObject = object; + boolean validTriple = true; if (subject.isURIResource()) { String uri = subject.asResource().getURI(); if (uri.contains(" ")) { @@ -814,8 +827,11 @@ if (lit.getDatatype() == null || lit.getDatatype().equals(XSD.STRING)) { newObject = model.createLiteral("shortened", "en"); } + validTriple = CheckerLiterals.checkLiteral(object.asNode(), ErrorHandlerFactory.errorHandlerNoLogging, 1l, 1l); } - statementsToAdd.add(model.createStatement(newSubject, st.getPredicate(), newObject)); + if(validTriple){ + statementsToAdd.add(model.createStatement(newSubject, st.getPredicate(), newObject)); + } statementsToRemove.add(st); } Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2013-05-03 08:49:17 UTC (rev 3940) +++ trunk/pom.xml 2013-05-04 05:50:47 UTC (rev 3941) @@ -123,7 +123,8 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <version>3.4.4</version> + <version>3.4.3</version> + <type>pom</type> </dependency> <dependency> <groupId>net.sourceforge.owlapi</groupId> @@ -379,7 +380,7 @@ <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> - <version>5.1.13</version> + <version>5.1.24</version> </dependency> <!--GWT Dependencies --> Modified: trunk/scripts/pom.xml =================================================================== --- trunk/scripts/pom.xml 2013-05-03 08:49:17 UTC (rev 3940) +++ trunk/scripts/pom.xml 2013-05-04 05:50:47 UTC (rev 3941) @@ -116,6 +116,7 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> + <type>pom</type> </dependency> <dependency> <groupId>net.sourceforge.owlapi</groupId> Modified: trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-03 08:49:17 UTC (rev 3940) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 05:50:47 UTC (rev 3941) @@ -11,17 +11,34 @@ import java.sql.SQLException; import java.util.Arrays; import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; import java.util.prefs.Preferences; +import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; import org.dllearner.algorithms.pattern.OWLAxiomPatternFinder; import org.dllearner.kb.repository.OntologyRepository; import org.dllearner.kb.repository.bioportal.BioPortalRepository; import org.dllearner.kb.repository.tones.TONESRepository; import org.ini4j.IniPreferences; import org.ini4j.InvalidFileFormatException; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.OWLObjectRenderer; +import org.semanticweb.owlapi.io.OWLParserException; +import org.semanticweb.owlapi.io.StringDocumentSource; +import org.semanticweb.owlapi.io.ToStringRenderer; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.UnloadableImportException; +import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxOWLObjectRendererImpl; +import uk.ac.manchester.cs.owlapi.dlsyntax.DLSyntaxObjectRenderer; + public class OWLAxiomPatternDetectionEvaluation { + private OWLObjectRenderer axiomRenderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); private Connection conn; public OWLAxiomPatternDetectionEvaluation() { @@ -59,6 +76,9 @@ //create statistics for the repositories makeRepositoryStatistics(repositories); + + //get top n TBox, RBox and ABox patterns + makePatternStatistics(repositories); } private void analyze(Collection<OntologyRepository> repositories){ @@ -69,6 +89,35 @@ } } + private void makePatternStatistics(Collection<OntologyRepository> repositories){ + int n = 10; + String latex = ""; + Map<OWLAxiom, Integer> topNTBoxAxiomPatterns = getTopNTBoxAxiomPatterns(n); + latex += asLatex("Total TBox", topNTBoxAxiomPatterns) + "\n\n"; + Map<OWLAxiom, Integer> topNRBoxAxiomPatterns = getTopNRBoxAxiomPatterns(n); + latex += asLatex("Total RBox", topNRBoxAxiomPatterns) + "\n\n"; + Map<OWLAxiom, Integer> topNABoxAxiomPatterns = getTopNABoxAxiomPatterns(n); + latex += asLatex("Total ABox", topNABoxAxiomPatterns) + "\n\n"; + + //get top n TBox, RBox and ABox patterns by repository + + Map<OntologyRepository, Map<OWLAxiom, Integer>> topNTBoxAxiomPatternsByRepository = getTopNTBoxAxiomPatterns(repositories, n); + Map<OntologyRepository, Map<OWLAxiom, Integer>> topNRBoxAxiomPatternsByRepository = getTopNRBoxAxiomPatterns(repositories, n); + Map<OntologyRepository, Map<OWLAxiom, Integer>> topNABoxAxiomPatternsByRepository = getTopNABoxAxiomPatterns(repositories, n); + for (OntologyRepository repository : repositories) { + latex += asLatex(repository.getName() + " TBox", topNTBoxAxiomPatternsByRepository.get(repository)) + "\n\n"; + latex += asLatex(repository.getName() + " RBox", topNRBoxAxiomPatternsByRepository.get(repository)) + "\n\n"; + latex += asLatex(repository.getName() + " ABox", topNABoxAxiomPatternsByRepository.get(repository)) + "\n\n"; + } + try { + new FileOutputStream("pattern-statistics.tex").write(latex.getBytes()); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + private void makeRepositoryStatistics(Collection<OntologyRepository> repositories){ String latexTable = "\\begin{tabular}{lrr|rrr|rrr|rrr|rrr}"; latexTable += "\\toprule\n"; @@ -147,7 +196,7 @@ ps.setString(1, repository.getName()); minNumberOfAboxAxioms = count(ps); //get max number of abox axioms - ps = conn.prepareStatement("SELECT MAX(tbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps = conn.prepareStatement("SELECT MAX(abox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); ps.setString(1, repository.getName()); maxNumberOfAboxAxioms = count(ps); //get avg number of abox axioms @@ -186,6 +235,193 @@ } } + private String asLatex(String title, Map<OWLAxiom, Integer> topN){ + String latexTable = "\\begin{table}\n"; + latexTable += "\\begin{tabular}{lr}\n"; + latexTable += "\\toprule\n"; + latexTable += "Pattern & Frequency\\\\\\midrule\n"; + + for (Entry<OWLAxiom, Integer> entry : topN.entrySet()) { + OWLAxiom axiom = entry.getKey(); + Integer frequency = entry.getValue(); + + latexTable += axiomRenderer.render(axiom) + " & " + frequency + "\\\\\n"; + + } + latexTable += "\\bottomrule\\end{tabular}\n"; + latexTable += "\\caption{" + title + "}\n"; + latexTable += "\\end{table}\n"; + return latexTable; + } + + private Map<OWLAxiom, Integer> getTopNTBoxAxiomPatterns(int n){ + Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + PreparedStatement ps; + ResultSet rs; + try { + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, "TBox"); + ps.setInt(2, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + + private Map<OntologyRepository, Map<OWLAxiom, Integer>> getTopNTBoxAxiomPatterns(Collection<OntologyRepository> repositories, int n){ + Map<OntologyRepository, Map<OWLAxiom, Integer>> topNByRepository = new LinkedHashMap<OntologyRepository, Map<OWLAxiom,Integer>>(); + PreparedStatement ps; + ResultSet rs; + //for each repository + for (OntologyRepository repository : repositories) { + Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + try { + //get number of ontologies + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, repository.getName()); + ps.setString(2, "TBox"); + ps.setInt(3, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + } + } catch(SQLException e){ + e.printStackTrace(); + } + topNByRepository.put(repository, topN); + } + return topNByRepository; + } + + private Map<OWLAxiom, Integer> getTopNRBoxAxiomPatterns(int n){ + Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + PreparedStatement ps; + ResultSet rs; + try { + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, "RBox"); + ps.setInt(2, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + + private Map<OntologyRepository, Map<OWLAxiom, Integer>> getTopNRBoxAxiomPatterns(Collection<OntologyRepository> repositories, int n){ + Map<OntologyRepository, Map<OWLAxiom, Integer>> topNByRepository = new LinkedHashMap<OntologyRepository, Map<OWLAxiom,Integer>>(); + PreparedStatement ps; + ResultSet rs; + //for each repository + for (OntologyRepository repository : repositories) { + Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + try { + //get number of ontologies + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, repository.getName()); + ps.setString(2, "RBox"); + ps.setInt(3, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + } + } catch(SQLException e){ + e.printStackTrace(); + } + topNByRepository.put(repository, topN); + } + return topNByRepository; + } + + private Map<OWLAxiom, Integer> getTopNABoxAxiomPatterns(int n){ + Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + PreparedStatement ps; + ResultSet rs; + try { + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, "ABox"); + ps.setInt(2, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + + private Map<OntologyRepository, Map<OWLAxiom, Integer>> getTopNABoxAxiomPatterns(Collection<OntologyRepository> repositories, int n){ + Map<OntologyRepository, Map<OWLAxiom, Integer>> topNByRepository = new LinkedHashMap<OntologyRepository, Map<OWLAxiom,Integer>>(); + PreparedStatement ps; + ResultSet rs; + //for each repository + for (OntologyRepository repository : repositories) { + Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + try { + //get number of ontologies + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, repository.getName()); + ps.setString(2, "ABox"); + ps.setInt(3, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + } + } catch(SQLException e){ + e.printStackTrace(); + } + topNByRepository.put(repository, topN); + } + return topNByRepository; + } + + private OWLAxiom asOWLAxiom(String functionalSyntaxAxiomString){ + try { + StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org>" + functionalSyntaxAxiomString + ")"); + OWLFunctionalSyntaxOWLParser p = new OWLFunctionalSyntaxOWLParser(); + OWLOntology newOntology = OWLManager.createOWLOntologyManager().createOntology(); + p.parse(s, newOntology); + if(!newOntology.getLogicalAxioms().isEmpty()){ + return newOntology.getLogicalAxioms().iterator().next(); + } + } catch (UnloadableImportException e) { + e.printStackTrace(); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (OWLParserException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } + private int count(PreparedStatement ps) throws SQLException{ ResultSet rs = ps.executeQuery(); rs.next(); @@ -193,6 +429,7 @@ } public static void main(String[] args) throws Exception { +// ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); new OWLAxiomPatternDetectionEvaluation().run(Arrays.asList( new TONESRepository(), new BioPortalRepository())); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-04 06:14:56
|
Revision: 3942 http://sourceforge.net/p/dl-learner/code/3942 Author: lorenz_b Date: 2013-05-04 06:14:53 +0000 (Sat, 04 May 2013) Log Message: ----------- Some modifications in pattern detection script. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java trunk/scripts/src/main/java/org/dllearner/scripts/OntologyMatching.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2013-05-04 05:50:47 UTC (rev 3941) +++ trunk/components-core/pom.xml 2013-05-04 06:14:53 UTC (rev 3942) @@ -124,7 +124,7 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <version>3.4</version> + <type>pom</type> </dependency> <dependency> <groupId>net.sourceforge.owlapi</groupId> Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-05-04 05:50:47 UTC (rev 3941) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomPatternFinder.java 2013-05-04 06:14:53 UTC (rev 3942) @@ -36,7 +36,6 @@ import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; -import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyAlreadyExistsException; import org.semanticweb.owlapi.model.OWLOntologyCreationException; @@ -46,7 +45,6 @@ import com.google.common.collect.HashMultiset; import com.google.common.collect.Multiset; -import com.google.common.collect.Multisets; public class OWLAxiomPatternFinder { @@ -363,19 +361,22 @@ Multiset<OWLAxiom> allAxiomPatterns = HashMultiset.create(); int i = 1; for (OntologyRepositoryEntry entry : entries) { - System.out.print(i + ": "); + System.out.print(i++ + ": "); URI uri = entry.getPhysicalURI(); // if(uri.toString().startsWith("http://rest.bioontology.org/bioportal/ontologies/download/42764")){ if (!ontologyProcessed(uri)) { - System.out.println("Loading \"" + entry.getOntologyShortName() + "\" from "+ uri); + System.out.print("Loading \"" + entry.getOntologyShortName() + "\" from "+ uri); try { manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.loadOntology(IRI.create(uri)); Multiset<OWLAxiom> axiomPatterns = HashMultiset.create(); Set<OWLAxiom> logicalAxioms = new HashSet<OWLAxiom>(); for (AxiomType<?> type : AxiomType.AXIOM_TYPES) { - logicalAxioms.addAll(ontology.getAxioms(type, true)); + if(type.isLogical()){ + logicalAxioms.addAll(ontology.getAxioms(type, true)); + } } + System.out.println(" (" + logicalAxioms.size() + " axioms)"); for (OWLAxiom axiom : logicalAxioms) { OWLAxiom renamedAxiom = renamer.rename(axiom); axiomPatterns.add(renamedAxiom); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/OntologyMatching.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OntologyMatching.java 2013-05-04 05:50:47 UTC (rev 3941) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OntologyMatching.java 2013-05-04 06:14:53 UTC (rev 3942) @@ -315,7 +315,7 @@ logger.info("Done."); if(performCrossValidation){ - org.dllearner.cli.CrossValidation cv = new org.dllearner.cli.CrossValidation(la, lp, rc, 5, false); +// org.dllearner.cli.CrossValidation cv = new org.dllearner.cli.CrossValidation(la, lp, rc, 5, false); } else { //apply the learning algorithm logger.info("Running learning algorithm..."); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-04 09:36:24
|
Revision: 3943 http://sourceforge.net/p/dl-learner/code/3943 Author: lorenz_b Date: 2013-05-04 09:36:21 +0000 (Sat, 04 May 2013) Log Message: ----------- Ignore illegal axioms? Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java 2013-05-04 06:14:53 UTC (rev 3942) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java 2013-05-04 09:36:21 UTC (rev 3943) @@ -8,6 +8,7 @@ import java.util.Set; import java.util.TreeSet; +import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; import org.semanticweb.owlapi.model.OWLAnnotationPropertyDomainAxiom; import org.semanticweb.owlapi.model.OWLAnnotationPropertyRangeAxiom; @@ -66,6 +67,8 @@ private OWLClassExpressionRenamer expressionRenamer; private OWLAxiom renamedAxiom; + private boolean normalizeABoxAxioms = true; + public OWLAxiomRenamer(OWLDataFactory df) { this.df = df; } @@ -163,8 +166,13 @@ @Override public void visit(OWLDifferentIndividualsAxiom axiom) { Set<OWLIndividual> renamedIndividuals = new HashSet<OWLIndividual>(); - for(OWLIndividual ind : axiom.getIndividuals()){ - renamedIndividuals.add(expressionRenamer.rename(ind)); + if(normalizeABoxAxioms){ + renamedIndividuals.add(df.getOWLNamedIndividual(IRI.create("http://dl-learner.org/pattern/a"))); + renamedIndividuals.add(df.getOWLNamedIndividual(IRI.create("http://dl-learner.org/pattern/b"))); + } else { + for(OWLIndividual ind : axiom.getIndividuals()){ + renamedIndividuals.add(expressionRenamer.rename(ind)); + } } renamedAxiom = df.getOWLDifferentIndividualsAxiom(renamedIndividuals); } @@ -333,8 +341,13 @@ @Override public void visit(OWLSameIndividualAxiom axiom) { Set<OWLIndividual> renamedIndividuals = new HashSet<OWLIndividual>(); - for(OWLIndividual ind : axiom.getIndividuals()){ - renamedIndividuals.add(expressionRenamer.rename(ind)); + if(normalizeABoxAxioms){ + renamedIndividuals.add(df.getOWLNamedIndividual(IRI.create("http://dl-learner.org/pattern/a"))); + renamedIndividuals.add(df.getOWLNamedIndividual(IRI.create("http://dl-learner.org/pattern/b"))); + } else { + for(OWLIndividual ind : axiom.getIndividuals()){ + renamedIndividuals.add(expressionRenamer.rename(ind)); + } } renamedAxiom = df.getOWLSameIndividualAxiom(renamedIndividuals); } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 06:14:53 UTC (rev 3942) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 09:36:21 UTC (rev 3943) @@ -72,7 +72,7 @@ public void run(Collection<OntologyRepository> repositories){ //analyze repositories - analyze(repositories); +// analyze(repositories); //create statistics for the repositories makeRepositoryStatistics(repositories); @@ -245,8 +245,9 @@ OWLAxiom axiom = entry.getKey(); Integer frequency = entry.getValue(); - latexTable += axiomRenderer.render(axiom) + " & " + frequency + "\\\\\n"; - + if(axiom != null){ + latexTable += axiomRenderer.render(axiom) + " & " + frequency + "\\\\\n"; + } } latexTable += "\\bottomrule\\end{tabular}\n"; latexTable += "\\caption{" + title + "}\n"; @@ -403,7 +404,7 @@ private OWLAxiom asOWLAxiom(String functionalSyntaxAxiomString){ try { - StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org>" + functionalSyntaxAxiomString + ")"); + StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org> " + functionalSyntaxAxiomString + ")"); OWLFunctionalSyntaxOWLParser p = new OWLFunctionalSyntaxOWLParser(); OWLOntology newOntology = OWLManager.createOWLOntologyManager().createOntology(); p.parse(s, newOntology); @@ -415,7 +416,7 @@ } catch (OWLOntologyCreationException e) { e.printStackTrace(); } catch (OWLParserException e) { - e.printStackTrace(); + System.err.println("Parsing failed for axiom " + functionalSyntaxAxiomString); } catch (IOException e) { e.printStackTrace(); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-04 10:59:46
|
Revision: 3945 http://sourceforge.net/p/dl-learner/code/3945 Author: lorenz_b Date: 2013-05-04 10:59:43 +0000 (Sat, 04 May 2013) Log Message: ----------- Improved script Latex output. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java 2013-05-04 10:20:34 UTC (rev 3944) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/pattern/OWLAxiomRenamer.java 2013-05-04 10:59:43 UTC (rev 3945) @@ -68,6 +68,7 @@ private OWLAxiom renamedAxiom; private boolean normalizeABoxAxioms = true; + private boolean ignoreTrivialAxioms = true;//ignore Thing(a),SubClassOf(A,Thing),SubPropertyOf(A,TopProperty) public OWLAxiomRenamer(OWLDataFactory df) { this.df = df; Modified: trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 10:20:34 UTC (rev 3944) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 10:59:43 UTC (rev 3945) @@ -16,6 +16,7 @@ import java.util.Map.Entry; import java.util.prefs.Preferences; +import org.aksw.commons.util.Pair; import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; import org.dllearner.algorithms.pattern.OWLAxiomPatternFinder; import org.dllearner.kb.repository.OntologyRepository; @@ -42,6 +43,8 @@ private OWLObjectRenderer axiomRenderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); private Connection conn; + + private boolean fancyLatex = false; public OWLAxiomPatternDetectionEvaluation() { initDBConnection(); @@ -98,14 +101,14 @@ //total pattern statistics for (AxiomTypeCategory axiomTypeCategory : AxiomTypeCategory.values()) { - Map<OWLAxiom, Integer> topNAxiomPatterns = getTopNAxiomPatterns(axiomTypeCategory, n); + Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(axiomTypeCategory, n); latex += asLatex("Top " + n + " " + axiomTypeCategory.name() + " axiom patterns.", topNAxiomPatterns) + "\n\n"; } //get top n TBox, RBox and ABox patterns by repository for (OntologyRepository repository : repositories) { for (AxiomTypeCategory axiomTypeCategory : AxiomTypeCategory.values()) { - Map<OWLAxiom, Integer> topNAxiomPatterns = getTopNAxiomPatterns(repository, axiomTypeCategory, n); + Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(repository, axiomTypeCategory, n); latex += asLatex("Top " + n + " " + axiomTypeCategory.name() + " axiom patterns for " + repository.getName() + " repository.", topNAxiomPatterns) + "\n\n"; } } @@ -235,32 +238,37 @@ } } - private String asLatex(String title, Map<OWLAxiom, Integer> topN){ + private String asLatex(String title, Map<OWLAxiom, Pair<Integer, Integer>> topN){ String latexTable = "\\begin{table}\n"; - latexTable += "\\begin{tabular}{lr}\n"; + latexTable += "\\begin{tabular}{lrr}\n"; latexTable += "\\toprule\n"; - latexTable += "Pattern & Frequency\\\\\\midrule\n"; + latexTable += "Pattern & Frequency & \\#Ontologies\\\\\\midrule\n"; - for (Entry<OWLAxiom, Integer> entry : topN.entrySet()) { + for (Entry<OWLAxiom, Pair<Integer, Integer>> entry : topN.entrySet()) { OWLAxiom axiom = entry.getKey(); - Integer frequency = entry.getValue(); + Integer frequency = entry.getValue().getKey(); + Integer idf = entry.getValue().getValue(); if(axiom != null){ - latexTable += axiomRenderer.render(axiom) + " & " + frequency + "\\\\\n"; + String axiomColumn = axiomRenderer.render(axiom); + if(fancyLatex){ + axiomColumn = "\\begin{lstlisting}[language=manchester]" + axiomColumn + "\\end{lstlisting}"; + } + latexTable += axiomColumn + " & " + frequency + " & " + idf + "\\\\\n"; } } - latexTable += "\\bottomrule\\end{tabular}\n"; + latexTable += "\\bottomrule\n\\end{tabular}\n"; latexTable += "\\caption{" + title + "}\n"; latexTable += "\\end{table}\n"; return latexTable; } - private Map<OWLAxiom, Integer> getTopNAxiomPatterns(AxiomTypeCategory axiomType, int n){ - Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(AxiomTypeCategory axiomType, int n){ + Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); PreparedStatement ps; ResultSet rs; try { - ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); @@ -268,7 +276,7 @@ ps.setInt(2, n); rs = ps.executeQuery(); while(rs.next()){ - topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); } } catch(SQLException e){ e.printStackTrace(); @@ -276,13 +284,13 @@ return topN; } - private Map<OWLAxiom, Integer> getTopNAxiomPatterns(OntologyRepository repository, AxiomTypeCategory axiomType, int n){ + private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(OntologyRepository repository, AxiomTypeCategory axiomType, int n){ + Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); PreparedStatement ps; ResultSet rs; - Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); try { //get number of ontologies - ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); @@ -291,7 +299,7 @@ ps.setInt(3, n); rs = ps.executeQuery(); while(rs.next()){ - topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); } } catch(SQLException e){ e.printStackTrace(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-04 11:27:54
|
Revision: 3949 http://sourceforge.net/p/dl-learner/code/3949 Author: lorenz_b Date: 2013-05-04 11:27:51 +0000 (Sat, 04 May 2013) Log Message: ----------- Added Oxford Ontologies repository Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/kb/repository/oxford/ trunk/components-core/src/main/java/org/dllearner/kb/repository/oxford/OxfordRepository.java Added: trunk/components-core/src/main/java/org/dllearner/kb/repository/oxford/OxfordRepository.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/repository/oxford/OxfordRepository.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/kb/repository/oxford/OxfordRepository.java 2013-05-04 11:27:51 UTC (rev 3949) @@ -0,0 +1,125 @@ +package org.dllearner.kb.repository.oxford; + +import java.net.URI; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.apache.log4j.Logger; +import org.dllearner.kb.repository.OntologyRepository; +import org.dllearner.kb.repository.OntologyRepositoryEntry; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.util.OntologyIRIShortFormProvider; + +public class OxfordRepository implements OntologyRepository{ + + private static final Logger log = Logger.getLogger(OxfordRepository.class); + + private final String repositoryName = "Oxford"; + + private final URI repositoryLocation = URI.create("http://www.cs.ox.ac.uk/isg/ontologies/UID/"); + + private List<RepositoryEntry> entries; + + int numberOfEntries = 793; + + DecimalFormat df = new DecimalFormat("00000"); + + + public OxfordRepository() { + entries = new ArrayList<RepositoryEntry>(); + } + + @Override + public void initialize() { + refresh(); + } + + + public String getName() { + return repositoryName; + } + + + public String getLocation() { + return repositoryLocation.toString(); + } + + + public void refresh() { + fillRepository(); + } + + + public Collection<OntologyRepositoryEntry> getEntries() { + List<OntologyRepositoryEntry> ret = new ArrayList<OntologyRepositoryEntry>(); + ret.addAll(entries); + return ret; + } + + + public List<Object> getMetaDataKeys() { + return Collections.emptyList(); + } + + + public void dispose() throws Exception { + } + + ///////////////////////////////////////////////////////////////////////////////////////////////// + // + // Implementation details + + + private void fillRepository() { + entries.clear(); + for(int i = 1; i <= numberOfEntries; i++){ + entries.add(new RepositoryEntry(URI.create(repositoryLocation + df.format(i) + ".owl"))); + } + log.info("Loaded " + entries.size() + " ontology entries from Oxford."); + } + + private class RepositoryEntry implements OntologyRepositoryEntry { + + private String shortName; + + private URI ontologyURI; + + private URI physicalURI; + + public RepositoryEntry(URI ontologyIRI) { + this.ontologyURI = ontologyIRI;System.out.println(ontologyIRI); + OntologyIRIShortFormProvider sfp = new OntologyIRIShortFormProvider(); + shortName = sfp.getShortForm(IRI.create(ontologyIRI)); + physicalURI = ontologyIRI; + } + + + public String getOntologyShortName() { + return shortName; + } + + + public URI getOntologyURI() { + return ontologyURI; + } + + + public URI getPhysicalURI() { + return physicalURI; + } + + + public String getMetaData(Object key) { + return null; + } + + } + + public static void main(String[] args) throws Exception { + new OxfordRepository().fillRepository(); + } + +} Modified: trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 11:13:37 UTC (rev 3948) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 11:27:51 UTC (rev 3949) @@ -21,6 +21,7 @@ import org.dllearner.algorithms.pattern.OWLAxiomPatternFinder; import org.dllearner.kb.repository.OntologyRepository; import org.dllearner.kb.repository.bioportal.BioPortalRepository; +import org.dllearner.kb.repository.oxford.OxfordRepository; import org.dllearner.kb.repository.tones.TONESRepository; import org.ini4j.IniPreferences; import org.ini4j.InvalidFileFormatException; @@ -375,7 +376,7 @@ analyzeRepositories = Boolean.parseBoolean(args[0]); } new OWLAxiomPatternDetectionEvaluation().run(analyzeRepositories, Arrays.asList( - new TONESRepository(), new BioPortalRepository())); + new TONESRepository(), new BioPortalRepository(), new OxfordRepository())); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-05 14:24:35
|
Revision: 3950 http://sourceforge.net/p/dl-learner/code/3950 Author: lorenz_b Date: 2013-05-05 14:24:32 +0000 (Sun, 05 May 2013) Log Message: ----------- Added script to evaluate patterns. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/EvaluatedAxiom.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java Added Paths: ----------- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Removed Paths: ------------- trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/core/EvaluatedAxiom.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/EvaluatedAxiom.java 2013-05-04 11:27:51 UTC (rev 3949) +++ trunk/components-core/src/main/java/org/dllearner/core/EvaluatedAxiom.java 2013-05-05 14:24:32 UTC (rev 3950) @@ -26,11 +26,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.TreeSet; import org.apache.commons.codec.digest.DigestUtils; import org.dllearner.core.owl.Axiom; import org.dllearner.utilities.EnrichmentVocabulary; import org.dllearner.utilities.PrefixCCMap; +import org.dllearner.utilities.owl.AxiomComparator; import org.dllearner.utilities.owl.OWLAPIConverter; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotation; @@ -44,9 +47,10 @@ import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxObjectRenderer; import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxPrefixNameShortFormProvider; -public class EvaluatedAxiom { +public class EvaluatedAxiom implements Comparable<EvaluatedAxiom>{ private static DecimalFormat df = new DecimalFormat("##0.0"); + private AxiomComparator axiomComparator = new AxiomComparator(); private Axiom axiom; private Score score; @@ -136,5 +140,38 @@ return str; } + public static List<EvaluatedAxiom> getBestEvaluatedAxioms(Set<EvaluatedAxiom> evaluatedAxioms, int nrOfAxioms) { + return getBestEvaluatedAxioms(evaluatedAxioms, nrOfAxioms, 0.0); + } + + public static List<EvaluatedAxiom> getBestEvaluatedAxioms(Set<EvaluatedAxiom> evaluatedAxioms, double accuracyThreshold) { + return getBestEvaluatedAxioms(evaluatedAxioms, Integer.MAX_VALUE, accuracyThreshold); + } + public static List<EvaluatedAxiom> getBestEvaluatedAxioms(Set<EvaluatedAxiom> evaluatedAxioms, int nrOfAxioms, + double accuracyThreshold) { + List<EvaluatedAxiom> returnList = new ArrayList<EvaluatedAxiom>(); + + //get the currently best evaluated axioms + Set<EvaluatedAxiom> orderedEvaluatedAxioms = new TreeSet<EvaluatedAxiom>(evaluatedAxioms); + + for(EvaluatedAxiom evAx : orderedEvaluatedAxioms){ + if(evAx.getScore().getAccuracy() >= accuracyThreshold && returnList.size() < nrOfAxioms){ + returnList.add(evAx); + } + } + + return returnList; + } + + @Override + public int compareTo(EvaluatedAxiom other) { + int ret = Double.compare(score.getAccuracy(), other.getScore().getAccuracy()); + if(ret == 0){ + ret = axiomComparator.compare(axiom, other.getAxiom()); + } + return ret; + } + + } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-05-04 11:27:51 UTC (rev 3949) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-05-05 14:24:32 UTC (rev 3950) @@ -450,8 +450,12 @@ } public Set<NamedClass> getTypes() { + return getTypes((String)null); + } + + public Set<NamedClass> getTypes(String namespace) { Set<NamedClass> types = new TreeSet<NamedClass>(); - String query = String.format("SELECT DISTINCT ?class WHERE {[] a ?class.}"); + String query = String.format("SELECT DISTINCT ?class WHERE {[] a ?class." + (namespace != null ? ("FILTER(REGEX(?class,'^" + namespace + "'))") : "") + "}"); ResultSet rs = executeSelectQuery(query); QuerySolution qs; while(rs.hasNext()){ Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-05-04 11:27:51 UTC (rev 3949) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-05-05 14:24:32 UTC (rev 3950) @@ -2,6 +2,7 @@ import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -74,7 +75,7 @@ private OWLDataFactory df = new OWLDataFactoryImpl(); private Map<Integer, Boolean> intersection; - private Set<? extends OWLEntity> variableEntities; + private Set<? extends OWLEntity> variableEntities = new HashSet<OWLEntity>(); public OWLClassExpressionToSPARQLConverter() { } @@ -97,6 +98,10 @@ return asQuery(rootVariable, expr, Collections.<OWLEntity>emptySet()); } + public Query asQuery(String rootVariable, OWLClassExpression expr, boolean countQuery){ + return asQuery(rootVariable, expr, Collections.<OWLEntity>emptySet()); + } + public Query asQuery(String rootVariable, OWLClassExpression expr, Set<? extends OWLEntity> variableEntities){ this.variableEntities = variableEntities; String queryString = "SELECT DISTINCT "; @@ -108,7 +113,7 @@ String var = variablesMapping.get(owlEntity); queryString += var + " "; } - queryString += "COUNT(" + rootVariable + ") WHERE {"; + queryString += "(COUNT(DISTINCT " + rootVariable + ") AS ?cnt) WHERE {"; } queryString += triplePattern; @@ -119,10 +124,15 @@ String var = variablesMapping.get(owlEntity); queryString += var; } + queryString += " ORDER BY DESC(?cnt)"; } return QueryFactory.create(queryString, Syntax.syntaxARQ); } + public Map<OWLEntity, String> getVariablesMapping() { + return variablesMapping; + } + private void reset(){ variablesMapping.clear(); variables.clear(); Deleted: trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-04 11:27:51 UTC (rev 3949) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OWLAxiomPatternDetectionEvaluation.java 2013-05-05 14:24:32 UTC (rev 3950) @@ -1,383 +0,0 @@ -package org.dllearner.scripts; - -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.prefs.Preferences; - -import org.aksw.commons.util.Pair; -import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; -import org.dllearner.algorithms.pattern.OWLAxiomPatternFinder; -import org.dllearner.kb.repository.OntologyRepository; -import org.dllearner.kb.repository.bioportal.BioPortalRepository; -import org.dllearner.kb.repository.oxford.OxfordRepository; -import org.dllearner.kb.repository.tones.TONESRepository; -import org.ini4j.IniPreferences; -import org.ini4j.InvalidFileFormatException; -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.io.OWLObjectRenderer; -import org.semanticweb.owlapi.io.OWLParserException; -import org.semanticweb.owlapi.io.StringDocumentSource; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.UnloadableImportException; - -import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxOWLObjectRendererImpl; - -public class OWLAxiomPatternDetectionEvaluation { - - enum AxiomTypeCategory{ - TBox, RBox, ABox - } - - private OWLObjectRenderer axiomRenderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); - private Connection conn; - - private boolean fancyLatex = false; - - public OWLAxiomPatternDetectionEvaluation() { - initDBConnection(); - } - - private void initDBConnection() { - try { - InputStream is = this.getClass().getClassLoader().getResourceAsStream("db_settings.ini"); - Preferences prefs = new IniPreferences(is); - String dbServer = prefs.node("database").get("server", null); - String dbName = prefs.node("database").get("name", null); - String dbUser = prefs.node("database").get("user", null); - String dbPass = prefs.node("database").get("pass", null); - - Class.forName("com.mysql.jdbc.Driver"); - String url = "jdbc:mysql://" + dbServer + "/" + dbName; - conn = DriverManager.getConnection(url, dbUser, dbPass); - } catch (ClassNotFoundException e) { - e.printStackTrace(); - } catch (SQLException e) { - e.printStackTrace(); - } catch (InvalidFileFormatException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - public void run(boolean analyzeRepositories, Collection<OntologyRepository> repositories){ - //analyze repositories - if(analyzeRepositories){ - analyze(repositories); - } - - //create statistics for the repositories - makeRepositoryStatistics(repositories); - - //get top n TBox, RBox and ABox patterns - makePatternStatistics(repositories); - } - - public void run(Collection<OntologyRepository> repositories){ - run(true, repositories); - } - - private void analyze(Collection<OntologyRepository> repositories){ - for (OntologyRepository repository : repositories) { - repository.initialize(); - OWLAxiomPatternFinder patternFinder = new OWLAxiomPatternFinder(repository, conn); - patternFinder.start(); - } - } - - private void makePatternStatistics(Collection<OntologyRepository> repositories){ - int n = 10; - - String latex = ""; - - //total pattern statistics - for (AxiomTypeCategory axiomTypeCategory : AxiomTypeCategory.values()) { - Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(axiomTypeCategory, n); - latex += asLatex("Top " + n + " " + axiomTypeCategory.name() + " axiom patterns.", topNAxiomPatterns) + "\n\n"; - } - - //get top n TBox, RBox and ABox patterns by repository - for (OntologyRepository repository : repositories) { - for (AxiomTypeCategory axiomTypeCategory : AxiomTypeCategory.values()) { - Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(repository, axiomTypeCategory, n); - latex += asLatex("Top " + n + " " + axiomTypeCategory.name() + " axiom patterns for " + repository.getName() + " repository.", topNAxiomPatterns) + "\n\n"; - } - } - try { - new FileOutputStream("pattern-statistics.tex").write(latex.getBytes()); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - private void makeRepositoryStatistics(Collection<OntologyRepository> repositories){ - String latexTable = "\\begin{tabular}{lrr|rrr|rrr|rrr|rrr}"; - latexTable += "\\toprule\n"; - latexTable += "Repository & \\multicolumn{2}{c}{\\#Ontologies} & \\multicolumn{12}{c}{\\#Axioms} \\\\\n"; - latexTable += "& Total & Error & \\multicolumn{3}{c}{Total} & \\multicolumn{3}{c}{Tbox} & \\multicolumn{3}{c}{RBox} & \\multicolumn{3}{c}{Abox} \\\\\\midrule\n"; - latexTable += "& & & Min & Avg & Max & Min & Avg & Max & Min & Avg & Max & Min & Avg & Max \\\\\\midrule\n"; - - - PreparedStatement ps; - ResultSet rs; - - int numberOfOntologies; - int numberOfErrorOntologies; - int minNumberOfLogicalAxioms; - int maxNumberOfLogicalAxioms; - int avgNumberOfLogicalAxioms; - int minNumberOfTboxAxioms; - int maxNumberOfTboxAxioms; - int avgNumberOfTboxAxioms; - int minNumberOfRboxAxioms; - int maxNumberOfRboxAxioms; - int avgNumberOfRboxAxioms; - int minNumberOfAboxAxioms; - int maxNumberOfAboxAxioms; - int avgNumberOfAboxAxioms; - - //for each repository - for (OntologyRepository repository : repositories) { - try { - //get number of ontologies - ps = conn.prepareStatement("SELECT COUNT(*) FROM Ontology WHERE repository=?"); - ps.setString(1, repository.getName()); - numberOfOntologies = count(ps); - //get number of error causing ontologies - ps = conn.prepareStatement("SELECT COUNT(*) FROM Ontology WHERE repository=? AND iri LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - numberOfErrorOntologies = count(ps); - //get min number of logical axioms - ps = conn.prepareStatement("SELECT MIN(logical_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - minNumberOfLogicalAxioms = count(ps); - //get max number of logical axioms - ps = conn.prepareStatement("SELECT MAX(logical_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - maxNumberOfLogicalAxioms = count(ps); - //get avg number of logical axioms - ps = conn.prepareStatement("SELECT AVG(logical_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - avgNumberOfLogicalAxioms = count(ps); - //get min number of tbox axioms - ps = conn.prepareStatement("SELECT MIN(tbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - minNumberOfTboxAxioms = count(ps); - //get max number of tbox axioms - ps = conn.prepareStatement("SELECT MAX(tbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - maxNumberOfTboxAxioms = count(ps); - //get avg number of tbox axioms - ps = conn.prepareStatement("SELECT AVG(tbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - avgNumberOfTboxAxioms = count(ps); - //get min number of rbox axioms - ps = conn.prepareStatement("SELECT MIN(rbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - minNumberOfRboxAxioms = count(ps); - //get max number of rbox axioms - ps = conn.prepareStatement("SELECT MAX(rbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - maxNumberOfRboxAxioms = count(ps); - //get avg number of rbox axioms - ps = conn.prepareStatement("SELECT AVG(rbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - avgNumberOfRboxAxioms = count(ps); - //get min number of abox axioms - ps = conn.prepareStatement("SELECT MIN(abox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - minNumberOfAboxAxioms = count(ps); - //get max number of abox axioms - ps = conn.prepareStatement("SELECT MAX(abox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - maxNumberOfAboxAxioms = count(ps); - //get avg number of abox axioms - ps = conn.prepareStatement("SELECT AVG(abox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); - ps.setString(1, repository.getName()); - avgNumberOfAboxAxioms = count(ps); - - latexTable += - repository.getName() + "&" + - numberOfOntologies + "&" + - numberOfErrorOntologies + "&" + - minNumberOfLogicalAxioms + "&" + - avgNumberOfLogicalAxioms + "&" + - maxNumberOfLogicalAxioms + "&" + - minNumberOfTboxAxioms + "&" + - avgNumberOfTboxAxioms + "&" + - maxNumberOfTboxAxioms + "&" + - minNumberOfRboxAxioms + "&" + - avgNumberOfRboxAxioms + "&" + - maxNumberOfRboxAxioms + "&" + - minNumberOfAboxAxioms + "&" + - avgNumberOfAboxAxioms + "&" + - maxNumberOfAboxAxioms + "\\\\\n"; - - } catch (SQLException e) { - e.printStackTrace(); - } - } - latexTable += "\\bottomrule\\end{tabular}"; - try { - new FileOutputStream("repository-statistics.tex").write(latexTable.getBytes()); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - private String asLatex(String title, Map<OWLAxiom, Pair<Integer, Integer>> topN){ - String latexTable = "\\begin{table}\n"; - latexTable += "\\begin{tabular}{lrr}\n"; - latexTable += "\\toprule\n"; - latexTable += "Pattern & Frequency & \\#Ontologies\\\\\\midrule\n"; - - for (Entry<OWLAxiom, Pair<Integer, Integer>> entry : topN.entrySet()) { - OWLAxiom axiom = entry.getKey(); - Integer frequency = entry.getValue().getKey(); - Integer idf = entry.getValue().getValue(); - - if(axiom != null){ - String axiomColumn = axiomRenderer.render(axiom); - if(fancyLatex){ - axiomColumn = "\\begin{lstlisting}[language=manchester]" + axiomColumn + "\\end{lstlisting}"; - } - latexTable += axiomColumn + " & " + frequency + " & " + idf + "\\\\\n"; - } - } - latexTable += "\\bottomrule\n\\end{tabular}\n"; - latexTable += "\\caption{" + title + "}\n"; - latexTable += "\\end{table}\n"; - return latexTable; - } - - private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(AxiomTypeCategory axiomType, int n){ - Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); - PreparedStatement ps; - ResultSet rs; - try { - ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + - "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + - "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + - "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); - ps.setString(1, axiomType.name()); - ps.setInt(2, n); - rs = ps.executeQuery(); - while(rs.next()){ - topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); - } - } catch(SQLException e){ - e.printStackTrace(); - } - return topN; - } - - private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(OntologyRepository repository, AxiomTypeCategory axiomType, int n){ - Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); - PreparedStatement ps; - ResultSet rs; - try { - //get number of ontologies - ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + - "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + - "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + - "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); - ps.setString(1, repository.getName()); - ps.setString(2, axiomType.name()); - ps.setInt(3, n); - rs = ps.executeQuery(); - while(rs.next()){ - topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); - } - } catch(SQLException e){ - e.printStackTrace(); - } - return topN; - } - - private Map<OntologyRepository, Map<OWLAxiom, Integer>> getTopNAxiomPatterns(Collection<OntologyRepository> repositories, AxiomTypeCategory axiomType, int n){ - Map<OntologyRepository, Map<OWLAxiom, Integer>> topNByRepository = new LinkedHashMap<OntologyRepository, Map<OWLAxiom,Integer>>(); - PreparedStatement ps; - ResultSet rs; - //for each repository - for (OntologyRepository repository : repositories) { - Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); - try { - //get number of ontologies - ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + - "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + - "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + - "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); - ps.setString(1, repository.getName()); - ps.setString(2, "RBox"); - ps.setInt(3, n); - rs = ps.executeQuery(); - while(rs.next()){ - topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); - } - } catch(SQLException e){ - e.printStackTrace(); - } - topNByRepository.put(repository, topN); - } - return topNByRepository; - } - - private OWLAxiom asOWLAxiom(String functionalSyntaxAxiomString){ - try { - StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org> " + functionalSyntaxAxiomString + ")"); - OWLFunctionalSyntaxOWLParser p = new OWLFunctionalSyntaxOWLParser(); - OWLOntology newOntology = OWLManager.createOWLOntologyManager().createOntology(); - p.parse(s, newOntology); - if(!newOntology.getLogicalAxioms().isEmpty()){ - return newOntology.getLogicalAxioms().iterator().next(); - } - } catch (UnloadableImportException e) { - e.printStackTrace(); - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } catch (OWLParserException e) { - System.err.println("Parsing failed for axiom " + functionalSyntaxAxiomString); - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - - private int count(PreparedStatement ps) throws SQLException{ - ResultSet rs = ps.executeQuery(); - rs.next(); - return rs.getInt(1); - } - - public static void main(String[] args) throws Exception { -// ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); - boolean analyzeRepositories = true; - if(args.length == 1){ - analyzeRepositories = Boolean.parseBoolean(args[0]); - } - new OWLAxiomPatternDetectionEvaluation().run(analyzeRepositories, Arrays.asList( - new TONESRepository(), new BioPortalRepository(), new OxfordRepository())); - } - - -} Added: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java (rev 0) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-05-05 14:24:32 UTC (rev 3950) @@ -0,0 +1,408 @@ +package org.dllearner.scripts.pattern; + +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.prefs.Preferences; + +import org.aksw.commons.util.Pair; +import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; +import org.dllearner.algorithms.pattern.OWLAxiomPatternFinder; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.repository.OntologyRepository; +import org.dllearner.kb.repository.bioportal.BioPortalRepository; +import org.dllearner.kb.repository.oxford.OxfordRepository; +import org.dllearner.kb.repository.tones.TONESRepository; +import org.dllearner.kb.sparql.SparqlEndpoint; +import org.dllearner.reasoning.SPARQLReasoner; +import org.dllearner.utilities.owl.OWLClassExpressionToSPARQLConverter; +import org.ini4j.IniPreferences; +import org.ini4j.InvalidFileFormatException; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.OWLObjectRenderer; +import org.semanticweb.owlapi.io.OWLParserException; +import org.semanticweb.owlapi.io.StringDocumentSource; +import org.semanticweb.owlapi.model.AxiomType; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.UnloadableImportException; + +import com.hp.hpl.jena.query.Query; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; +import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxOWLObjectRendererImpl; + +public class OWLAxiomPatternDetectionEvaluation { + + enum AxiomTypeCategory{ + TBox, RBox, ABox + } + + private OWLObjectRenderer axiomRenderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); + private OWLDataFactory df = new OWLDataFactoryImpl(); + private Connection conn; + + private boolean fancyLatex = false; + + public OWLAxiomPatternDetectionEvaluation() { + initDBConnection(); + } + + private void initDBConnection() { + try { + InputStream is = this.getClass().getClassLoader().getResourceAsStream("db_settings.ini"); + Preferences prefs = new IniPreferences(is); + String dbServer = prefs.node("database").get("server", null); + String dbName = prefs.node("database").get("name", null); + String dbUser = prefs.node("database").get("user", null); + String dbPass = prefs.node("database").get("pass", null); + + Class.forName("com.mysql.jdbc.Driver"); + String url = "jdbc:mysql://" + dbServer + "/" + dbName; + conn = DriverManager.getConnection(url, dbUser, dbPass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); + } catch (InvalidFileFormatException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public void run(boolean analyzeRepositories, Collection<OntologyRepository> repositories){ + //analyze repositories + if(analyzeRepositories){ + analyze(repositories); + } + + //create statistics for the repositories + makeRepositoryStatistics(repositories); + + //get top n TBox, RBox and ABox patterns + makePatternStatistics(repositories); + } + + public void run(Collection<OntologyRepository> repositories){ + run(true, repositories); + } + + public List<OWLAxiom> getPatternsToEvaluate(){ + List<OWLAxiom> axiomPatterns = new ArrayList<OWLAxiom>(); + + Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(AxiomTypeCategory.TBox, 10); + axiomPatterns.addAll(topNAxiomPatterns.keySet()); + + return axiomPatterns; + } + + private void analyze(Collection<OntologyRepository> repositories){ + for (OntologyRepository repository : repositories) { + repository.initialize(); + OWLAxiomPatternFinder patternFinder = new OWLAxiomPatternFinder(repository, conn); + patternFinder.start(); + } + } + + private void makePatternStatistics(Collection<OntologyRepository> repositories){ + int n = 10; + + String latex = ""; + + //total pattern statistics + for (AxiomTypeCategory axiomTypeCategory : AxiomTypeCategory.values()) { + Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(axiomTypeCategory, n); + latex += asLatex("Top " + n + " " + axiomTypeCategory.name() + " axiom patterns.", topNAxiomPatterns) + "\n\n"; + } + + //get top n TBox, RBox and ABox patterns by repository + for (OntologyRepository repository : repositories) { + for (AxiomTypeCategory axiomTypeCategory : AxiomTypeCategory.values()) { + Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(repository, axiomTypeCategory, n); + latex += asLatex("Top " + n + " " + axiomTypeCategory.name() + " axiom patterns for " + repository.getName() + " repository.", topNAxiomPatterns) + "\n\n"; + } + } + try { + new FileOutputStream("pattern-statistics.tex").write(latex.getBytes()); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + private void makeRepositoryStatistics(Collection<OntologyRepository> repositories){ + String latexTable = "\\begin{tabular}{lrr|rrr|rrr|rrr|rrr}"; + latexTable += "\\toprule\n"; + latexTable += "Repository & \\multicolumn{2}{c}{\\#Ontologies} & \\multicolumn{12}{c}{\\#Axioms} \\\\\n"; + latexTable += "& Total & Error & \\multicolumn{3}{c}{Total} & \\multicolumn{3}{c}{Tbox} & \\multicolumn{3}{c}{RBox} & \\multicolumn{3}{c}{Abox} \\\\\\midrule\n"; + latexTable += "& & & Min & Avg & Max & Min & Avg & Max & Min & Avg & Max & Min & Avg & Max \\\\\\midrule\n"; + + + PreparedStatement ps; + ResultSet rs; + + int numberOfOntologies; + int numberOfErrorOntologies; + int minNumberOfLogicalAxioms; + int maxNumberOfLogicalAxioms; + int avgNumberOfLogicalAxioms; + int minNumberOfTboxAxioms; + int maxNumberOfTboxAxioms; + int avgNumberOfTboxAxioms; + int minNumberOfRboxAxioms; + int maxNumberOfRboxAxioms; + int avgNumberOfRboxAxioms; + int minNumberOfAboxAxioms; + int maxNumberOfAboxAxioms; + int avgNumberOfAboxAxioms; + + //for each repository + for (OntologyRepository repository : repositories) { + try { + //get number of ontologies + ps = conn.prepareStatement("SELECT COUNT(*) FROM Ontology WHERE repository=?"); + ps.setString(1, repository.getName()); + numberOfOntologies = count(ps); + //get number of error causing ontologies + ps = conn.prepareStatement("SELECT COUNT(*) FROM Ontology WHERE repository=? AND iri LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + numberOfErrorOntologies = count(ps); + //get min number of logical axioms + ps = conn.prepareStatement("SELECT MIN(logical_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + minNumberOfLogicalAxioms = count(ps); + //get max number of logical axioms + ps = conn.prepareStatement("SELECT MAX(logical_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + maxNumberOfLogicalAxioms = count(ps); + //get avg number of logical axioms + ps = conn.prepareStatement("SELECT AVG(logical_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + avgNumberOfLogicalAxioms = count(ps); + //get min number of tbox axioms + ps = conn.prepareStatement("SELECT MIN(tbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + minNumberOfTboxAxioms = count(ps); + //get max number of tbox axioms + ps = conn.prepareStatement("SELECT MAX(tbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + maxNumberOfTboxAxioms = count(ps); + //get avg number of tbox axioms + ps = conn.prepareStatement("SELECT AVG(tbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + avgNumberOfTboxAxioms = count(ps); + //get min number of rbox axioms + ps = conn.prepareStatement("SELECT MIN(rbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + minNumberOfRboxAxioms = count(ps); + //get max number of rbox axioms + ps = conn.prepareStatement("SELECT MAX(rbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + maxNumberOfRboxAxioms = count(ps); + //get avg number of rbox axioms + ps = conn.prepareStatement("SELECT AVG(rbox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + avgNumberOfRboxAxioms = count(ps); + //get min number of abox axioms + ps = conn.prepareStatement("SELECT MIN(abox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + minNumberOfAboxAxioms = count(ps); + //get max number of abox axioms + ps = conn.prepareStatement("SELECT MAX(abox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + maxNumberOfAboxAxioms = count(ps); + //get avg number of abox axioms + ps = conn.prepareStatement("SELECT AVG(abox_axioms) FROM Ontology WHERE repository=? AND iri NOT LIKE 'ERROR%'"); + ps.setString(1, repository.getName()); + avgNumberOfAboxAxioms = count(ps); + + latexTable += + repository.getName() + "&" + + numberOfOntologies + "&" + + numberOfErrorOntologies + "&" + + minNumberOfLogicalAxioms + "&" + + avgNumberOfLogicalAxioms + "&" + + maxNumberOfLogicalAxioms + "&" + + minNumberOfTboxAxioms + "&" + + avgNumberOfTboxAxioms + "&" + + maxNumberOfTboxAxioms + "&" + + minNumberOfRboxAxioms + "&" + + avgNumberOfRboxAxioms + "&" + + maxNumberOfRboxAxioms + "&" + + minNumberOfAboxAxioms + "&" + + avgNumberOfAboxAxioms + "&" + + maxNumberOfAboxAxioms + "\\\\\n"; + + } catch (SQLException e) { + e.printStackTrace(); + } + } + latexTable += "\\bottomrule\\end{tabular}"; + try { + new FileOutputStream("repository-statistics.tex").write(latexTable.getBytes()); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + private String asLatex(String title, Map<OWLAxiom, Pair<Integer, Integer>> topN){ + String latexTable = "\\begin{table}\n"; + latexTable += "\\begin{tabular}{lrr}\n"; + latexTable += "\\toprule\n"; + latexTable += "Pattern & Frequency & \\#Ontologies\\\\\\midrule\n"; + + for (Entry<OWLAxiom, Pair<Integer, Integer>> entry : topN.entrySet()) { + OWLAxiom axiom = entry.getKey(); + Integer frequency = entry.getValue().getKey(); + Integer idf = entry.getValue().getValue(); + + if(axiom != null){ + String axiomColumn = axiomRenderer.render(axiom); + if(fancyLatex){ + axiomColumn = "\\begin{lstlisting}[language=manchester]" + axiomColumn + "\\end{lstlisting}"; + } + latexTable += axiomColumn + " & " + frequency + " & " + idf + "\\\\\n"; + } + } + latexTable += "\\bottomrule\n\\end{tabular}\n"; + latexTable += "\\caption{" + title + "}\n"; + latexTable += "\\end{table}\n"; + return latexTable; + } + + private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(AxiomTypeCategory axiomType, int n){ + Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); + PreparedStatement ps; + ResultSet rs; + try { + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, axiomType.name()); + ps.setInt(2, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + + private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(OntologyRepository repository, AxiomTypeCategory axiomType, int n){ + Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); + PreparedStatement ps; + ResultSet rs; + try { + //get number of ontologies + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, repository.getName()); + ps.setString(2, axiomType.name()); + ps.setInt(3, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + + private Map<OntologyRepository, Map<OWLAxiom, Integer>> getTopNAxiomPatterns(Collection<OntologyRepository> repositories, AxiomTypeCategory axiomType, int n){ + Map<OntologyRepository, Map<OWLAxiom, Integer>> topNByRepository = new LinkedHashMap<OntologyRepository, Map<OWLAxiom,Integer>>(); + PreparedStatement ps; + ResultSet rs; + //for each repository + for (OntologyRepository repository : repositories) { + Map<OWLAxiom, Integer> topN = new LinkedHashMap<OWLAxiom, Integer>(); + try { + //get number of ontologies + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND O.repository=? AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, repository.getName()); + ps.setString(2, "RBox"); + ps.setInt(3, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), rs.getInt(2)); + } + } catch(SQLException e){ + e.printStackTrace(); + } + topNByRepository.put(repository, topN); + } + return topNByRepository; + } + + private OWLAxiom asOWLAxiom(String functionalSyntaxAxiomString){ + try { + StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org> " + functionalSyntaxAxiomString + ")"); + OWLFunctionalSyntaxOWLParser p = new OWLFunctionalSyntaxOWLParser(); + OWLOntology newOntology = OWLManager.createOWLOntologyManager().createOntology(); + p.parse(s, newOntology); + if(!newOntology.getLogicalAxioms().isEmpty()){ + return newOntology.getLogicalAxioms().iterator().next(); + } + } catch (UnloadableImportException e) { + e.printStackTrace(); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (OWLParserException e) { + System.err.println("Parsing failed for axiom " + functionalSyntaxAxiomString); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } + + private int count(PreparedStatement ps) throws SQLException{ + ResultSet rs = ps.executeQuery(); + rs.next(); + return rs.getInt(1); + } + + public static void main(String[] args) throws Exception { +// ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); + boolean analyzeRepositories = true; + if(args.length == 1){ + analyzeRepositories = Boolean.parseBoolean(args[0]); + } + new OWLAxiomPatternDetectionEvaluation().run(analyzeRepositories, Arrays.asList( + new TONESRepository(), new BioPortalRepository(), new OxfordRepository())); + } + + +} Added: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java (rev 0) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-05-05 14:24:32 UTC (rev 3950) @@ -0,0 +1,418 @@ +package org.dllearner.scripts.pattern; + +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.SocketTimeoutException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.prefs.Preferences; + +import org.aksw.commons.util.Pair; +import org.apache.log4j.Logger; +import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; +import org.coode.owlapi.turtle.TurtleOntologyFormat; +import org.dllearner.core.EvaluatedAxiom; +import org.dllearner.core.Score; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; +import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.ExtractionDBCache; +import org.dllearner.kb.sparql.SparqlEndpoint; +import org.dllearner.kb.sparql.SparqlQuery; +import org.dllearner.learningproblems.AxiomScore; +import org.dllearner.learningproblems.Heuristics; +import org.dllearner.reasoning.SPARQLReasoner; +import org.dllearner.utilities.owl.DLLearnerAxiomConvertVisitor; +import org.dllearner.utilities.owl.OWLClassExpressionToSPARQLConverter; +import org.ini4j.IniPreferences; +import org.ini4j.InvalidFileFormatException; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.OWLObjectRenderer; +import org.semanticweb.owlapi.io.OWLParserException; +import org.semanticweb.owlapi.io.StringDocumentSource; +import org.semanticweb.owlapi.io.ToStringRenderer; +import org.semanticweb.owlapi.model.AxiomType; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.UnloadableImportException; +import org.semanticweb.owlapi.util.OWLObjectDuplicator; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; +import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxOWLObjectRendererImpl; +import uk.ac.manchester.cs.owlapi.dlsyntax.DLSyntaxObjectRenderer; + +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecution; +import com.hp.hpl.jena.query.QueryExecutionFactory; +import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.Syntax; +import com.hp.hpl.jena.rdf.model.Resource; +import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; +import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; + +public class OWLAxiomPatternUsageEvaluation { + + + private static final Logger logger = Logger.getLogger(OWLAxiomPatternUsageEvaluation.AxiomTypeCategory.class + .getName()); + + enum AxiomTypeCategory{ + TBox, RBox, ABox + } + + private OWLObjectRenderer axiomRenderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); + private OWLDataFactory df = new OWLDataFactoryImpl(); + private Connection conn; + + private ExtractionDBCache cache = new ExtractionDBCache("pattern-cache"); + private SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia(), cache);//new LocalModelBasedSparqlEndpointKS(model); + private String ns = "http://dbpedia.org/ontology/"; + + private boolean fancyLatex = false; + private DecimalFormat format = new DecimalFormat("00.0%"); + private long waitingTime = TimeUnit.SECONDS.toMillis(3); + private double threshold = 0.6; + private OWLAnnotationProperty confidenceProperty = df.getOWLAnnotationProperty(IRI.create("http://dl-learner.org/pattern/confidence")); + + public OWLAxiomPatternUsageEvaluation() { + initDBConnection(); + } + + private void initDBConnection() { + try { + InputStream is = this.getClass().getClassLoader().getResourceAsStream("db_settings.ini"); + Preferences prefs = new IniPreferences(is); + String dbServer = prefs.node("database").get("server", null); + String dbName = prefs.node("database").get("name", null); + String dbUser = prefs.node("database").get("user", null); + String dbPass = prefs.node("database").get("pass", null); + + Class.forName("com.mysql.jdbc.Driver"); + String url = "jdbc:mysql://" + dbServer + "/" + dbName; + conn = DriverManager.getConnection(url, dbUser, dbPass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); + } catch (InvalidFileFormatException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public void run(){ + SPARQLReasoner reasoner = new SPARQLReasoner(ks, cache); + + OWLClassExpressionToSPARQLConverter converter = new OWLClassExpressionToSPARQLConverter(); + + //get the axiom patterns to evaluate + List<OWLAxiom> patterns = getPatternsToEvaluate(); + + //get all classes in KB + Set<NamedClass> classes = reasoner.getTypes(ns); + + //for each pattern + for (OWLAxiom pattern : patterns) { + if(pattern.isOfType(AxiomType.SUBCLASS_OF)){ + logger.info("Processing " + pattern + "..."); + Set<EvaluatedAxiom> evaluatedAxioms = new HashSet<EvaluatedAxiom>(); + Map<OWLAxiom, Score> axioms2Score = new LinkedHashMap<OWLAxiom, Score>(); + OWLClassExpression patternSubClass = ((OWLSubClassOfAxiom)pattern).getSubClass(); + OWLClassExpression superClass = ((OWLSubClassOfAxiom)pattern).getSuperClass(); + //for each class + int i = 1; + for (NamedClass cls : classes) { + logger.info("Processing " + cls + "..."); + //set the subclass as a class from the KB + OWLClass subClass = df.getOWLClass(IRI.create(cls.getName())); + + //1. count number of instances in subclass expression + Query query = QueryFactory.create("SELECT (COUNT(DISTINCT ?x) AS ?cnt) WHERE {" + converter.convert("?x", subClass) + "}",Syntax.syntaxARQ); + int subClassCnt = executeSelectQuery(query).next().getLiteral("cnt").getInt(); + + //2. count number of instances in subclass AND superclass expression + //we have 2 options here to evaluate the whole axiom pattern: + //a) we replace all entities in the signature of the super class expression(except the subclass) with variables + //and GROUP BY them + //b) we replace only 1 entity with a variable, thus we have to try it for several combinations +// for (OWLEntity entity : signature) { +// //replace current entity with variable and for the rest use existing entities in KB +// query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(subClass, superClass), signature); +// } + Set<OWLEntity> signature = superClass.getSignature(); + signature.remove(subClass); + query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(subClass, superClass), signature); + query.setLimit(100); + Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping();System.out.println(query); + com.hp.hpl.jena.query.ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + while(rs.hasNext()){ + qs = rs.next(); + //get the IRIs for each variable + Map<OWLEntity, IRI> entity2IRIMap = new HashMap<OWLEntity, IRI>(); + entity2IRIMap.put(patternSubClass.asOWLClass(), subClass.getIRI()); + for (OWLEntity entity : signature) { + String var = variablesMapping.get(entity); + Resource resource = qs.getResource(var); + entity2IRIMap.put(entity, IRI.create(resource.getURI())); + } + //instantiate the pattern + OWLObjectDuplicator duplicator = new OWLObjectDuplicator(entity2IRIMap, df); + OWLAxiom patternInstantiation = duplicator.duplicateObject(pattern); + int patternInstantiationCnt = qs.getLiteral("cnt").getInt(); + //compute score + Score score; + try { + score = computeScore(subClassCnt, patternInstantiationCnt); + axioms2Score.put(patternInstantiation, score); + logger.info(patternInstantiation + "(" + format.format(score.getAccuracy()) + ")"); + } catch (IllegalArgumentException e) { + //sometimes Virtuosos returns 'wrong' cnt values such that the success number as bigger than the total number of instances + e.printStackTrace(); + } + +// //convert into EvaluatedAxiom such we can serialize it as RDF with accuracy value as annotation +// EvaluatedAxiom evaluatedAxiom = new EvaluatedAxiom(DLLearnerAxiomConvertVisitor.getDLLearnerAxiom(patternInstantiation), score); +// evaluatedAxioms.add(evaluatedAxiom); + + } + //wait some time to avoid flooding of endpoint + try { + Thread.sleep(waitingTime); + } catch (InterruptedException e) { + e.printStackTrace(); + } +// if(i++ == 3) break; + } + save(axioms2Score); + } + } + } + + private void save(Map<OWLAxiom, Score> axioms2Score){ + try { + Set<OWLAxiom> annotatedAxioms = new HashSet<OWLAxiom>(); + for (Entry<OWLAxiom, Score> entry : axioms2Score.entrySet()) { + OWLAxiom axiom = entry.getKey(); + Score score = entry.getValue(); + if(score.getAccuracy() >= threshold){ + annotatedAxioms.add(axiom.getAnnotatedAxiom(Collections.singleton(df.getOWLAnnotation(confidenceProperty, df.getOWLLiteral(score.getAccuracy()))))); + + } + } + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLOntology ontology = man.createOntology(annotatedAxioms); + man.saveOntology(ontology, new TurtleOntologyFormat(), new FileOutputStream("pattern.ttl")); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + } + + private void save(Set<EvaluatedAxiom> evaluatedAxioms){ + try { + Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); + for (EvaluatedAxiom evaluatedAxiom : EvaluatedAxiom.getBestEvaluatedAxioms(evaluatedAxioms, threshold)) { + axioms.addAll(evaluatedAxiom.toRDF("http://dl-learner.org/pattern/").values().iterator().next()); + } + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLOntology ontology = man.createOntology(axioms); + man.saveOntology(ontology, new TurtleOntologyFormat(), new FileOutputStream("pattern.ttl")); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + } + + public List<OWLAxiom> getPatternsToEvaluate(){ + List<OWLAxiom> axiomPatterns = new ArrayList<OWLAxiom>(); + + Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(AxiomTypeCategory.TBox, 10); + axiomPatterns.addAll(topNAxiomPatterns.keySet()); + + return axiomPatterns; + } + + + private String asLatex(String title, Map<OWLAxiom, Pair<Integer, Integer>> topN){ + String latexTable = "\\begin{table}\n"; + latexTable += "\\begin{tabular}{lrr}\n"; + latexTable += "\\toprule\n"; + latexTable += "Pattern & Frequency & \\#Ontologies\\\\\\midrule\n"; + + for (Entry<OWLAxiom, Pair<Integer, Integer>> entry : topN.entrySet()) { + OWLAxiom axiom = entry.getKey(); + Integer frequency = entry.getValue().getKey(); + Integer idf = entry.getValue().getValue(); + + if(axiom != null){ + String axiomColumn = axiomRenderer.render(axiom); + if(fancyLatex){ + axiomColumn = "\\begin{lstlisting}[language=manchester]" + axiomColumn + "\\end{lstlisting}"; + } + latexTable += axiomColumn + " & " + frequency + " & " + idf + "\\\\\n"; + } + } + latexTable += "\\bottomrule\n\\end{tabular}\n"; + latexTable += "\\caption{" + title + "}\n"; + latexTable += "\\end{table}\n"; + return latexTable; + } + + private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(AxiomTypeCategory axiomType, int n){ + Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); + PreparedStatement ps; + ResultSet rs; + try { + ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, axiomType.name()); + ps.setInt(2, n); + rs = ps.executeQuery(); + while(rs.next()){ + topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + + protected com.hp.hpl.jena.query.ResultSet executeSelectQuery(Query query) { + com.hp.hpl.jena.query.ResultSet rs = null; + if(ks.isRemote()){ + SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); + ExtractionDBCache cache = ks.getCache(); + if(cache != null){ + rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query.toString())); + } else { + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), + query); + queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); + try { + rs = queryExecution.execSelect(); + return rs; + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout"); + } else { + logger.error("Exception executing query", e); + } + } + } + + } else { + QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + rs = queryExecution.execSelect(); + } + return rs; + } + + protected com.hp.hpl.jena.query.ResultSet executeSelectQuery(Query query, boolean cached) { + com.hp.hpl.jena.query.ResultSet rs = null; + if(ks.isRemote()){ + SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); + ExtractionDBCache cache = ks.getCache(); + if(cache != null && cached){ + rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query.toString())); + } else { + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), + query); + queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); + try { + rs = queryExecution.execSelect(); + return rs; + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout"); + } else { + logger.error("Exception executing query", e); + } + } + } + + } else { + QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + rs = queryExecution.execSelect(); + } + return rs; + } + + private Score computeScore(int total, int success){ + double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(total, success); + + double accuracy = (confidenceInterval[0] + confidenceInterval[1]) / 2; + + double confidence = confidenceInterval[1] - confidenceInterval[0]; + + return new AxiomScore(accuracy, confidence, total, success, total-success); + } + + private OWLAxiom asOWLAxiom(String functionalSyntaxAxiomString){ + try { + StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org> " + functionalSyntaxAxiomString + ")"); + OWLFunctionalSyntaxOWLParser p = new OWLFunctionalSyntaxOWLParser(); + OWLOntology newOntology = OWLManager.createOWLOntologyManager().createOntology(); + p.parse(s, newOntology); + if(!newOntology.getLogicalAxioms().isEmpty()){ + return newOntology.getLogicalAxioms().iterator().next(); + } + } catch (UnloadableImportException e) { + e.printStackTrace(); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (OWLParserException e) { + System.err.println("Parsing failed for axiom " + functionalSyntaxAxiomString); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } + + public static void main(String[] args) throws Exception { + ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); + new OWLAxiomPatternUsageEvaluation().run(); + } + + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-06 10:34:57
|
Revision: 3952 http://sourceforge.net/p/dl-learner/code/3952 Author: lorenz_b Date: 2013-05-06 10:34:52 +0000 (Mon, 06 May 2013) Log Message: ----------- Improved Latex output. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/repository/bioportal/BioPortalRepository.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/repository/bioportal/BioPortalRepository.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/repository/bioportal/BioPortalRepository.java 2013-05-05 14:56:53 UTC (rev 3951) +++ trunk/components-core/src/main/java/org/dllearner/kb/repository/bioportal/BioPortalRepository.java 2013-05-06 10:34:52 UTC (rev 3952) @@ -26,6 +26,8 @@ private String apiKey = "8fadfa2c-47de-4487-a1f5-b7af7378d693"; private String serviceURL = "http://rest.bioontology.org/bioportal/ontologies"; + private boolean initialized = false; + private List<OntologyRepositoryEntry> entries = new ArrayList<OntologyRepositoryEntry>(); @Override @@ -41,6 +43,7 @@ @Override public void initialize() { refresh(); + initialized = true; } @Override @@ -90,6 +93,9 @@ @Override public Collection<OntologyRepositoryEntry> getEntries() { + if(!initialized){ + initialize(); + } return entries; } @@ -121,8 +127,6 @@ for(OntologyRepositoryEntry entry : entries){ System.out.println("Loading " + entry.getOntologyShortName()); System.out.println("From " + entry.getPhysicalURI()); - OWLOntology ont = OWLManager.createOWLOntologyManager().loadOntology(IRI.create(entry.getPhysicalURI())); - System.out.println(ont.getAxiomCount()); } } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-05-05 14:56:53 UTC (rev 3951) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-05-06 10:34:52 UTC (rev 3952) @@ -4,6 +4,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.io.StringWriter; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -21,6 +22,8 @@ import org.aksw.commons.util.Pair; import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; +import org.coode.owlapi.latex.LatexObjectVisitor; +import org.coode.owlapi.latex.LatexWriter; import org.dllearner.algorithms.pattern.OWLAxiomPatternFinder; import org.dllearner.core.owl.NamedClass; import org.dllearner.kb.SparqlEndpointKS; @@ -62,6 +65,8 @@ private Connection conn; private boolean fancyLatex = false; + private boolean dlSyntax = false; + private boolean formatNumbers = true; public OWLAxiomPatternDetectionEvaluation() { initDBConnection(); @@ -239,22 +244,41 @@ ps.setString(1, repository.getName()); avgNumberOfAboxAxioms = count(ps); - latexTable += - repository.getName() + "&" + - numberOfOntologies + "&" + - numberOfErrorOntologies + "&" + - minNumberOfLogicalAxioms + "&" + - avgNumberOfLogicalAxioms + "&" + - maxNumberOfLogicalAxioms + "&" + - minNumberOfTboxAxioms + "&" + - avgNumberOfTboxAxioms + "&" + - maxNumberOfTboxAxioms + "&" + - minNumberOfRboxAxioms + "&" + - avgNumberOfRboxAxioms + "&" + - maxNumberOfRboxAxioms + "&" + - minNumberOfAboxAxioms + "&" + - avgNumberOfAboxAxioms + "&" + - maxNumberOfAboxAxioms + "\\\\\n"; + if(formatNumbers){ + latexTable += + repository.getName() + "} & " + + "\\num{" + numberOfOntologies + "} & " + + "\\num{" + numberOfErrorOntologies + "} & " + + "\\num{" + minNumberOfLogicalAxioms + "} & " + + "\\num{" + avgNumberOfLogicalAxioms + "} & " + + "\\num{" + maxNumberOfLogicalAxioms + "} & " + + "\\num{" + minNumberOfTboxAxioms + "} & " + + "\\num{" + avgNumberOfTboxAxioms + "} & " + + "\\num{" + maxNumberOfTboxAxioms + "} & " + + "\\num{" + minNumberOfRboxAxioms + "} & " + + "\\num{" + avgNumberOfRboxAxioms + "} & " + + "\\num{" + maxNumberOfRboxAxioms + "} & " + + "\\num{" + minNumberOfAboxAxioms + "} & " + + "\\num{" + avgNumberOfAboxAxioms + "} & " + + "\\num{" + maxNumberOfAboxAxioms + "}\\\\\n"; + } else { + latexTable += + repository.getName() + " & " + + numberOfOntologies + " & " + + numberOfErrorOntologies + " & " + + minNumberOfLogicalAxioms + " & " + + avgNumberOfLogicalAxioms + " & " + + maxNumberOfLogicalAxioms + " & " + + minNumberOfTboxAxioms + " & " + + avgNumberOfTboxAxioms + " & " + + maxNumberOfTboxAxioms + " & " + + minNumberOfRboxAxioms + " & " + + avgNumberOfRboxAxioms + " & " + + maxNumberOfRboxAxioms + " & " + + minNumberOfAboxAxioms + " & " + + avgNumberOfAboxAxioms + " & " + + maxNumberOfAboxAxioms + "\\\\\n"; + } } catch (SQLException e) { e.printStackTrace(); @@ -271,22 +295,34 @@ } private String asLatex(String title, Map<OWLAxiom, Pair<Integer, Integer>> topN){ + StringWriter sw = new StringWriter(); + LatexWriter w = new LatexWriter(sw); + LatexObjectVisitor renderer = new LatexObjectVisitor(w, df); String latexTable = "\\begin{table}\n"; - latexTable += "\\begin{tabular}{lrr}\n"; + latexTable += "\\begin{tabular}{lrrr}\n"; latexTable += "\\toprule\n"; - latexTable += "Pattern & Frequency & \\#Ontologies\\\\\\midrule\n"; + latexTable += "Pattern & Frequency & \\#Ontologies & tf-idf \\\\\\midrule\n"; for (Entry<OWLAxiom, Pair<Integer, Integer>> entry : topN.entrySet()) { OWLAxiom axiom = entry.getKey(); Integer frequency = entry.getValue().getKey(); - Integer idf = entry.getValue().getValue(); + Integer df = entry.getValue().getValue(); if(axiom != null){ String axiomColumn = axiomRenderer.render(axiom); if(fancyLatex){ axiomColumn = "\\begin{lstlisting}[language=manchester]" + axiomColumn + "\\end{lstlisting}"; } - latexTable += axiomColumn + " & " + frequency + " & " + idf + "\\\\\n"; + if(dlSyntax){ + axiom.accept(renderer); + axiomColumn = sw.toString();sw.getBuffer().setLength(0); + + } + if(formatNumbers){ + latexTable += axiomColumn + " & " + "\\num{" + frequency + "} & " + df + "\\\\\n"; + } else { + latexTable += axiomColumn + " & " + frequency + " & " + df + "\\\\\n"; + } } } latexTable += "\\bottomrule\n\\end{tabular}\n"; @@ -394,6 +430,16 @@ return rs.getInt(1); } + private double tf_idf(int nrOfOccurrences, int nrOfOntologies, int nrOfOntologiesWithAxiom){ + double tf = nrOfOccurrences; + double idf = Math.log10(nrOfOntologies / nrOfOntologiesWithAxiom); + return tf * idf; + } + + private double popularity(int totalNrOfOntologies, int patternFrequency, int nrOfOntologiesWithPattern){ + return (double)Math.log10(patternFrequency) * (nrOfOntologiesWithPattern / (double)totalNrOfOntologies); + } + public static void main(String[] args) throws Exception { // ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); boolean analyzeRepositories = true; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-06 21:02:48
|
Revision: 3954 http://sourceforge.net/p/dl-learner/code/3954 Author: lorenz_b Date: 2013-05-06 21:02:45 +0000 (Mon, 06 May 2013) Log Message: ----------- Improved Latex output. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-05-06 10:40:57 UTC (rev 3953) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-05-06 21:02:45 UTC (rev 3954) @@ -24,7 +24,7 @@ private Model baseModel; private List<String> namespaces; - private int maxRecursionDepth = 2; + private int maxRecursionDepth = 3; public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint, ExtractionDBCache cache) { this.endpoint = endpoint; Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-05-06 10:40:57 UTC (rev 3953) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-05-06 21:02:45 UTC (rev 3954) @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -67,6 +68,8 @@ private boolean fancyLatex = false; private boolean dlSyntax = false; private boolean formatNumbers = true; + + private int numberOfRowsPerTable = 25; public OWLAxiomPatternDetectionEvaluation() { initDBConnection(); @@ -107,7 +110,7 @@ makeRepositoryStatistics(repositories); //get top n TBox, RBox and ABox patterns - makePatternStatistics(repositories); + makePatternStatisticsSingleTable(repositories); } public void run(Collection<OntologyRepository> repositories){ @@ -132,7 +135,7 @@ } private void makePatternStatistics(Collection<OntologyRepository> repositories){ - int n = 10; + int n = numberOfRowsPerTable; String latex = ""; @@ -158,6 +161,26 @@ } } + private void makePatternStatisticsSingleTable(Collection<OntologyRepository> repositories){ + int n = numberOfRowsPerTable; + + String latex = ""; + + //total pattern statistics + for (AxiomTypeCategory axiomTypeCategory : AxiomTypeCategory.values()) { + Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> topNAxiomPatterns = getTopNAxiomPatternsWithId(axiomTypeCategory, n); + latex += asLatexWithId(axiomTypeCategory, topNAxiomPatterns, repositories, n) + "\n\n"; + } + + try { + new FileOutputStream("pattern-statistics.tex").write(latex.getBytes()); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + private void makeRepositoryStatistics(Collection<OntologyRepository> repositories){ String latexTable = "\\begin{tabular}{lrr|rrr|rrr|rrr|rrr}"; latexTable += "\\toprule\n"; @@ -331,6 +354,66 @@ return latexTable; } + private String asLatexWithId(AxiomTypeCategory axiomTypeCategory, Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> topNAxiomPatterns, Collection<OntologyRepository> repositories, int n){ + StringWriter sw = new StringWriter(); + LatexWriter w = new LatexWriter(sw); + LatexObjectVisitor renderer = new LatexObjectVisitor(w, df); + String latexTable = "\\begin{table}\n"; + latexTable += "\\begin{tabular}{rlrr"; + for (int i = 0; i < repositories.size(); i++) { + latexTable += "r"; + } + latexTable += "}\n"; + latexTable += "\\toprule\n"; + latexTable += " & Pattern & Frequency & \\#Ontologies"; + for (OntologyRepository repository : repositories) { + latexTable += " & " + repository.getName(); + } + latexTable += "\\\\\\midrule\n"; + + int i = 0; + for (Entry<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> entry : topNAxiomPatterns.entrySet()) { + i++; + int patternId = entry.getKey(); + OWLAxiom axiom = entry.getValue().keySet().iterator().next(); + Integer frequency = entry.getValue().values().iterator().next().getKey(); + Integer df = entry.getValue().values().iterator().next().getValue(); + + if(axiom != null){ + String axiomColumn = axiomRenderer.render(axiom); + if(fancyLatex){ + axiomColumn = "\\begin{lstlisting}[language=manchester]" + axiomColumn + "\\end{lstlisting}"; + } + if(dlSyntax){ + axiom.accept(renderer); + axiomColumn = sw.toString();sw.getBuffer().setLength(0); + + } + if(formatNumbers){ + latexTable += i + " & " + axiomColumn + " & " + "\\num{" + frequency + "} & " + df; + for (OntologyRepository repository : repositories) { + int rank = 0; + Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> topNAxiomPatternsWithId = getTopNAxiomPatternsWithId(repository, axiomTypeCategory, 100); + for (Entry<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> entry2 : topNAxiomPatternsWithId.entrySet()) { + rank++; + if(entry2.getKey() == patternId){ + break; + } + } + latexTable += " & " + rank; + } + latexTable += "\\\\\n"; + } else { + latexTable += axiomColumn + " & " + frequency + " & " + df + "\\\\\n"; + } + } + } + latexTable += "\\bottomrule\n\\end{tabular}\n"; + latexTable += "\\caption{" + "Top " + n + " " + axiomTypeCategory.name() + " axiom patterns." + "}\n"; + latexTable += "\\end{table}\n"; + return latexTable; + } + private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(AxiomTypeCategory axiomType, int n){ Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); PreparedStatement ps; @@ -352,6 +435,53 @@ return topN; } + private Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> getTopNAxiomPatternsWithId(AxiomTypeCategory axiomType, int n){ + Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> topN = new LinkedHashMap<Integer, Map<OWLAxiom, Pair<Integer, Integer>>>(); + PreparedStatement ps; + ResultSet rs; + try { + ps = conn.prepareStatement("SELECT P.id, pattern,SUM(occurrences),COUNT(ontology_id) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, axiomType.name()); + ps.setInt(2, n); + rs = ps.executeQuery(); + while(rs.next()){ + Map<OWLAxiom, Pair<Integer, Integer>> m = new LinkedHashMap<OWLAxiom, Pair<Integer,Integer>>(); + m.put(asOWLAxiom(rs.getString(2)), new Pair<Integer, Integer>(rs.getInt(3), rs.getInt(4))); + topN.put(rs.getInt(1), m); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + + private Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> getTopNAxiomPatternsWithId(OntologyRepository repository, AxiomTypeCategory axiomType, int n){ + Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> topN = new LinkedHashMap<Integer, Map<OWLAxiom, Pair<Integer, Integer>>>(); + PreparedStatement ps; + ResultSet rs; + try { + ps = conn.prepareStatement("SELECT P.id, pattern,SUM(occurrences),COUNT(ontology_id) FROM " + + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=? AND O.repository=?) " + + "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + ps.setString(1, axiomType.name()); + ps.setString(2, repository.getName()); + ps.setInt(3, n); + rs = ps.executeQuery(); + while(rs.next()){ + Map<OWLAxiom, Pair<Integer, Integer>> m = new LinkedHashMap<OWLAxiom, Pair<Integer,Integer>>(); + m.put(asOWLAxiom(rs.getString(2)), new Pair<Integer, Integer>(rs.getInt(3), rs.getInt(4))); + topN.put(rs.getInt(1), m); + } + } catch(SQLException e){ + e.printStackTrace(); + } + return topN; + } + private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(OntologyRepository repository, AxiomTypeCategory axiomType, int n){ Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); PreparedStatement ps; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-07 20:50:59
|
Revision: 3957 http://sourceforge.net/p/dl-learner/code/3957 Author: lorenz_b Date: 2013-05-07 20:50:56 +0000 (Tue, 07 May 2013) Log Message: ----------- Added option to set a maximum number of tested classes for each pattern. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/LatexObjectVisitor.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-05-07 06:24:03 UTC (rev 3956) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-05-07 20:50:56 UTC (rev 3957) @@ -24,7 +24,7 @@ private Model baseModel; private List<String> namespaces; - private int maxRecursionDepth = 3; + private int maxRecursionDepth = 1; public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint, ExtractionDBCache cache) { this.endpoint = endpoint; Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-05-07 06:24:03 UTC (rev 3956) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-05-07 20:50:56 UTC (rev 3957) @@ -103,6 +103,10 @@ } public Query asQuery(String rootVariable, OWLClassExpression expr, Set<? extends OWLEntity> variableEntities){ + return asQuery(rootVariable, expr, variableEntities, false); + } + + public Query asQuery(String rootVariable, OWLClassExpression expr, Set<? extends OWLEntity> variableEntities, boolean count){ this.variableEntities = variableEntities; String queryString = "SELECT DISTINCT "; String triplePattern = convert(rootVariable, expr); @@ -113,18 +117,25 @@ String var = variablesMapping.get(owlEntity); queryString += var + " "; } - queryString += "(COUNT(DISTINCT " + rootVariable + ") AS ?cnt) WHERE {"; + if(count){ + queryString += "(COUNT(DISTINCT " + rootVariable + ") AS ?cnt)"; + } else { + queryString += rootVariable; + } + queryString += " WHERE {"; } queryString += triplePattern; queryString += "}"; if(!variableEntities.isEmpty()){ - queryString += "GROUP BY "; - for (OWLEntity owlEntity : variableEntities) { - String var = variablesMapping.get(owlEntity); - queryString += var; + if(count){ + queryString += "GROUP BY "; + for (OWLEntity owlEntity : variableEntities) { + String var = variablesMapping.get(owlEntity); + queryString += var; + } + queryString += " ORDER BY DESC(?cnt)"; } - queryString += " ORDER BY DESC(?cnt)"; } return QueryFactory.create(queryString, Syntax.syntaxARQ); } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/LatexObjectVisitor.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/LatexObjectVisitor.java 2013-05-07 06:24:03 UTC (rev 3956) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/LatexObjectVisitor.java 2013-05-07 20:50:56 UTC (rev 3957) @@ -248,7 +248,8 @@ write(SOME); writeSpace(); node.getProperty().accept(this); - writeSpace(); +// writeSpace(); + writeDot(); writeOpenBrace(); node.getValue().accept(this); writeCloseBrace(); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-05-07 06:24:03 UTC (rev 3956) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-05-07 20:50:56 UTC (rev 3957) @@ -378,7 +378,7 @@ } if(formatNumbers){ - latexTable += i + " & " + axiomColumn + " & " + "\\num{" + frequency + "} & " + df; + latexTable += i + ". & " + axiomColumn + " & " + "\\num{" + frequency + "} & " + df; for (OntologyRepository repository : repositories) { int rank = 0; boolean contained = false; Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-05-07 06:24:03 UTC (rev 3956) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-05-07 20:50:56 UTC (rev 3957) @@ -12,11 +12,10 @@ import java.net.URL; import java.sql.Connection; import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; import java.sql.SQLException; import java.text.DecimalFormat; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -25,6 +24,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Random; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.prefs.Preferences; @@ -33,12 +33,13 @@ import joptsimple.OptionParser; import joptsimple.OptionSet; -import org.aksw.commons.util.Pair; +import org.apache.commons.lang.text.StrTokenizer; import org.apache.log4j.Logger; import org.coode.owlapi.functionalparser.OWLFunctionalSyntaxOWLParser; import org.coode.owlapi.turtle.TurtleOntologyFormat; import org.dllearner.core.EvaluatedAxiom; import org.dllearner.core.Score; +import org.dllearner.core.owl.EquivalentClassesAxiom; import org.dllearner.core.owl.NamedClass; import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; @@ -64,6 +65,7 @@ import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; @@ -77,6 +79,8 @@ import uk.ac.manchester.cs.owlapi.dlsyntax.DLSyntaxObjectRenderer; import com.google.common.base.Charsets; +import com.google.common.collect.HashMultiset; +import com.google.common.collect.Multiset; import com.google.common.hash.HashCode; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; @@ -105,7 +109,7 @@ private Connection conn; private ExtractionDBCache cache = new ExtractionDBCache("pattern-cache"); - private SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia(), cache);//new LocalModelBasedSparqlEndpointKS(model); + private SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW(), cache);//new LocalModelBasedSparqlEndpointKS(model); private String ns = "http://dbpedia.org/ontology/"; private boolean fancyLatex = false; @@ -113,6 +117,10 @@ private long waitingTime = TimeUnit.SECONDS.toMillis(3); private double threshold = 0.6; private OWLAnnotationProperty confidenceProperty = df.getOWLAnnotationProperty(IRI.create("http://dl-learner.org/pattern/confidence")); + + private OWLClassExpressionToSPARQLConverter converter = new OWLClassExpressionToSPARQLConverter(); + private long maxExecutionTime = TimeUnit.SECONDS.toMillis(20); + private int queryLimit = 10000; public OWLAxiomPatternUsageEvaluation() { initDBConnection(); @@ -143,83 +151,50 @@ } } - public void run(SparqlEndpoint endpoint, OWLOntology ontology, File outputFile){ - ks = new SparqlEndpointKS(endpoint); + public void run(SparqlEndpoint endpoint, OWLOntology ontology, File outputFile, int maxNrOfTestedClasses){ + ks = new SparqlEndpointKS(endpoint, cache); SPARQLReasoner reasoner = new SPARQLReasoner(ks, cache); - OWLClassExpressionToSPARQLConverter converter = new OWLClassExpressionToSPARQLConverter(); - //get the axiom patterns to evaluate - List<OWLAxiom> patterns = getPatternsToEvaluate(); + List<OWLAxiom> patterns = getPatternsToEvaluate(ontology); //get all classes in KB - Set<NamedClass> classes = reasoner.getTypes(ns); + Collection<NamedClass> classes = reasoner.getTypes(ns); + List<NamedClass> classesList = new ArrayList<NamedClass>(classes); + Collections.shuffle(classesList, new Random(123)); + classesList = classesList.subList(0, maxNrOfTestedClasses); + classes = classesList; //for each pattern for (OWLAxiom pattern : patterns) { + //if pattern is equivalent classes axiom, we need to get the subclass axiom where the named class is the subclass + if(pattern.isOfType(AxiomType.EQUIVALENT_CLASSES)){ + Set<OWLSubClassOfAxiom> subClassOfAxioms = ((OWLEquivalentClassesAxiom)pattern).asOWLSubClassOfAxioms(); + for (OWLSubClassOfAxiom axiom : subClassOfAxioms) { + if(!axiom.getSubClass().isAnonymous()){ + pattern = axiom; + break; + } + } + } if(pattern.isOfType(AxiomType.SUBCLASS_OF)){ logger.info("Processing " + pattern + "..."); - Set<EvaluatedAxiom> evaluatedAxioms = new HashSet<EvaluatedAxiom>(); Map<OWLAxiom, Score> axioms2Score = new LinkedHashMap<OWLAxiom, Score>(); - OWLClassExpression patternSubClass = ((OWLSubClassOfAxiom)pattern).getSubClass(); - OWLClassExpression superClass = ((OWLSubClassOfAxiom)pattern).getSuperClass(); //for each class int i = 1; for (NamedClass cls : classes) { logger.info("Processing " + cls + "..."); - //set the subclass as a class from the KB - OWLClass subClass = df.getOWLClass(IRI.create(cls.getName())); - //1. count number of instances in subclass expression - Query query = QueryFactory.create("SELECT (COUNT(DISTINCT ?x) AS ?cnt) WHERE {" + converter.convert("?x", subClass) + "}",Syntax.syntaxARQ); - int subClassCnt = executeSelectQuery(query).next().getLiteral("cnt").getInt(); + Map<OWLAxiom, Score> result = evaluate2(pattern, cls); - //2. count number of instances in subclass AND superclass expression - //we have 2 options here to evaluate the whole axiom pattern: - //a) we replace all entities in the signature of the super class expression(except the subclass) with variables - //and GROUP BY them - //b) we replace only 1 entity with a variable, thus we have to try it for several combinations -// for (OWLEntity entity : signature) { -// //replace current entity with variable and for the rest use existing entities in KB -// query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(subClass, superClass), signature); -// } - Set<OWLEntity> signature = superClass.getSignature(); - signature.remove(subClass); - query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(subClass, superClass), signature); - query.setLimit(100); - Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping();System.out.println(query); - com.hp.hpl.jena.query.ResultSet rs = executeSelectQuery(query); - QuerySolution qs; - while(rs.hasNext()){ - qs = rs.next(); - //get the IRIs for each variable - Map<OWLEntity, IRI> entity2IRIMap = new HashMap<OWLEntity, IRI>(); - entity2IRIMap.put(patternSubClass.asOWLClass(), subClass.getIRI()); - for (OWLEntity entity : signature) { - String var = variablesMapping.get(entity); - Resource resource = qs.getResource(var); - entity2IRIMap.put(entity, IRI.create(resource.getURI())); + for (Entry<OWLAxiom, Score> entry : result.entrySet()) { + OWLAxiom axiom = entry.getKey(); + Score score = entry.getValue(); + if(score.getAccuracy() >= threshold){ + logger.info(axiom + "(" + format.format(score.getAccuracy()) + ")"); } - //instantiate the pattern - OWLObjectDuplicator duplicator = new OWLObjectDuplicator(entity2IRIMap, df); - OWLAxiom patternInstantiation = duplicator.duplicateObject(pattern); - int patternInstantiationCnt = qs.getLiteral("cnt").getInt(); - //compute score - Score score; - try { - score = computeScore(subClassCnt, patternInstantiationCnt); - axioms2Score.put(patternInstantiation, score); - logger.info(patternInstantiation + "(" + format.format(score.getAccuracy()) + ")"); - } catch (IllegalArgumentException e) { - //sometimes Virtuosos returns 'wrong' cnt values such that the success number as bigger than the total number of instances - e.printStackTrace(); - } - -// //convert into EvaluatedAxiom such we can serialize it as RDF with accuracy value as annotation -// EvaluatedAxiom evaluatedAxiom = new EvaluatedAxiom(DLLearnerAxiomConvertVisitor.getDLLearnerAxiom(patternInstantiation), score); -// evaluatedAxioms.add(evaluatedAxiom); - } + //wait some time to avoid flooding of endpoint try { Thread.sleep(waitingTime); @@ -233,6 +208,167 @@ } } + private Map<OWLAxiom, Score> evaluate1(OWLAxiom pattern, NamedClass cls){ + Map<OWLAxiom, Score> axioms2Score = new HashMap<OWLAxiom, Score>(); + + OWLClassExpression patternSubClass = ((OWLSubClassOfAxiom)pattern).getSubClass(); + OWLClassExpression superClass = ((OWLSubClassOfAxiom)pattern).getSuperClass(); + + //set the subclass as a class from the KB + OWLClass subClass = df.getOWLClass(IRI.create(cls.getName())); + + //1. count number of instances in subclass expression + Query query = QueryFactory.create("SELECT (COUNT(DISTINCT ?x) AS ?cnt) WHERE {" + converter.convert("?x", subClass) + "}",Syntax.syntaxARQ); + int subClassCnt = executeSelectQuery(query).next().getLiteral("cnt").getInt(); + + //2. replace all entities which are not the subclass, GROUP BY and COUNT + Set<OWLEntity> signature = superClass.getSignature(); + signature.remove(subClass); + query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(subClass, superClass), signature, true); + Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping(); + com.hp.hpl.jena.query.ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + while(rs.hasNext()){ + qs = rs.next(); + //get the IRIs for each variable + Map<OWLEntity, IRI> entity2IRIMap = new HashMap<OWLEntity, IRI>(); + entity2IRIMap.put(patternSubClass.asOWLClass(), subClass.getIRI()); + for (OWLEntity entity : signature) { + String var = variablesMapping.get(entity); + Resource resource = qs.getResource(var); + entity2IRIMap.put(entity, IRI.create(resource.getURI())); + } + //instantiate the pattern + OWLObjectDuplicator duplicator = new OWLObjectDuplicator(entity2IRIMap, df); + OWLAxiom patternInstantiation = duplicator.duplicateObject(pattern); + int patternInstantiationCnt = qs.getLiteral("cnt").getInt(); + //compute score + Score score; + try { + score = computeScore(subClassCnt, patternInstantiationCnt); + axioms2Score.put(patternInstantiation, score); + } catch (IllegalArgumentException e) { + //sometimes Virtuosos returns 'wrong' cnt values such that the success number as bigger than the total number of instances + e.printStackTrace(); + } + } + + return axioms2Score; + } + + private Map<OWLAxiom, Score> evaluate2(OWLAxiom pattern, NamedClass cls){ + Map<OWLAxiom, Score> axioms2Score = new HashMap<OWLAxiom, Score>(); + + OWLClassExpression patternSubClass = ((OWLSubClassOfAxiom)pattern).getSubClass(); + OWLClassExpression superClass = ((OWLSubClassOfAxiom)pattern).getSuperClass(); + + //set the subclass as a class from the KB + OWLClass subClass = df.getOWLClass(IRI.create(cls.getName())); + + //1. convert class expression, replace non-subclass entities and get result + Set<OWLEntity> signature = superClass.getSignature(); + signature.remove(subClass); + + Query query; + Multiset<OWLAxiom> instantiations = HashMultiset.create(); + Set<String> resources = new HashSet<String>();//we need the number of distinct resources (?x) to compute the score + long startTime = System.currentTimeMillis(); + int offset = 0; + boolean hasMoreResults = true; + while(hasMoreResults && (System.currentTimeMillis() - startTime)<= maxExecutionTime){ + query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(subClass, superClass), signature); + query.setLimit(queryLimit); + query.setOffset(offset); + System.out.println(query); + Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping(); + com.hp.hpl.jena.query.ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + if(!rs.hasNext()){ + hasMoreResults = false; + } + while(rs.hasNext()){ + qs = rs.next(); + resources.add(qs.getResource("x").getURI()); + //get the IRIs for each variable + Map<OWLEntity, IRI> entity2IRIMap = new HashMap<OWLEntity, IRI>(); + entity2IRIMap.put(patternSubClass.asOWLClass(), subClass.getIRI()); + for (OWLEntity entity : signature) { + String var = variablesMapping.get(entity); + Resource resource = qs.getResource(var); + entity2IRIMap.put(entity, IRI.create(resource.getURI())); + } + //instantiate the pattern + OWLObjectDuplicator duplicator = new OWLObjectDuplicator(entity2IRIMap, df); + OWLAxiom patternInstantiation = duplicator.duplicateObject(pattern); + + instantiations.add(patternInstantiation); + } + offset += queryLimit; + try { + Thread.sleep(300); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + //compute the score + int total = resources.size(); + for (OWLAxiom axiom : instantiations.elementSet()) { + int frequency = instantiations.count(axiom); + Score score = computeScore(total, Math.min(total, frequency)); + axioms2Score.put(axiom, score); + } + + return axioms2Score; + } + + private Map<OWLAxiom, Score> evaluate3(OWLAxiom pattern, NamedClass cls){ + Map<OWLAxiom, Score> axioms2Score = new HashMap<OWLAxiom, Score>(); + + OWLClassExpression patternSubClass = ((OWLSubClassOfAxiom)pattern).getSubClass(); + OWLClassExpression superClass = ((OWLSubClassOfAxiom)pattern).getSuperClass(); + + //set the subclass as a class from the KB + OWLClass subClass = df.getOWLClass(IRI.create(cls.getName())); + + //1. count number of instances in subclass expression + Query query = QueryFactory.create("SELECT (COUNT(DISTINCT ?x) AS ?cnt) WHERE {" + converter.convert("?x", subClass) + "}",Syntax.syntaxARQ); + int subClassCnt = executeSelectQuery(query).next().getLiteral("cnt").getInt(); + + //2. get result + Set<OWLEntity> signature = superClass.getSignature(); + signature.remove(subClass); + query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(subClass, superClass), signature); + Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping(); + com.hp.hpl.jena.query.ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + Multiset<OWLAxiom> instantiations = HashMultiset.create(); + while(rs.hasNext()){ + qs = rs.next(); + //get the IRIs for each variable + Map<OWLEntity, IRI> entity2IRIMap = new HashMap<OWLEntity, IRI>(); + entity2IRIMap.put(patternSubClass.asOWLClass(), subClass.getIRI()); + for (OWLEntity entity : signature) { + String var = variablesMapping.get(entity); + Resource resource = qs.getResource(var); + entity2IRIMap.put(entity, IRI.create(resource.getURI())); + } + //instantiate the pattern + OWLObjectDuplicator duplicator = new OWLObjectDuplicator(entity2IRIMap, df); + OWLAxiom patternInstantiation = duplicator.duplicateObject(pattern); + + instantiations.add(patternInstantiation); + } + //compute the score + for (OWLAxiom axiom : instantiations.elementSet()) { + int frequency = instantiations.count(axiom); + Score score = new AxiomScore(frequency); + axioms2Score.put(axiom, score); + } + + return axioms2Score; + } + private void save(OWLAxiom pattern, Map<OWLAxiom, Score> axioms2Score){ try { Set<OWLAxiom> annotatedAxioms = new HashSet<OWLAxiom>(); @@ -278,15 +414,6 @@ } } - public List<OWLAxiom> getPatternsToEvaluate(){ - List<OWLAxiom> axiomPatterns = new ArrayList<OWLAxiom>(); - - Map<OWLAxiom, Pair<Integer, Integer>> topNAxiomPatterns = getTopNAxiomPatterns(AxiomTypeCategory.TBox, 10); - axiomPatterns.addAll(topNAxiomPatterns.keySet()); - - return axiomPatterns; - } - public List<OWLAxiom> getPatternsToEvaluate(OWLOntology ontology){ List<OWLAxiom> axiomPatterns = new ArrayList<OWLAxiom>(); @@ -295,53 +422,6 @@ return axiomPatterns; } - - private String asLatex(String title, Map<OWLAxiom, Pair<Integer, Integer>> topN){ - String latexTable = "\\begin{table}\n"; - latexTable += "\\begin{tabular}{lrr}\n"; - latexTable += "\\toprule\n"; - latexTable += "Pattern & Frequency & \\#Ontologies\\\\\\midrule\n"; - - for (Entry<OWLAxiom, Pair<Integer, Integer>> entry : topN.entrySet()) { - OWLAxiom axiom = entry.getKey(); - Integer frequency = entry.getValue().getKey(); - Integer idf = entry.getValue().getValue(); - - if(axiom != null){ - String axiomColumn = axiomRenderer.render(axiom); - if(fancyLatex){ - axiomColumn = "\\begin{lstlisting}[language=manchester]" + axiomColumn + "\\end{lstlisting}"; - } - latexTable += axiomColumn + " & " + frequency + " & " + idf + "\\\\\n"; - } - } - latexTable += "\\bottomrule\n\\end{tabular}\n"; - latexTable += "\\caption{" + title + "}\n"; - latexTable += "\\end{table}\n"; - return latexTable; - } - - private Map<OWLAxiom, Pair<Integer, Integer>> getTopNAxiomPatterns(AxiomTypeCategory axiomType, int n){ - Map<OWLAxiom, Pair<Integer, Integer>> topN = new LinkedHashMap<OWLAxiom, Pair<Integer, Integer>>(); - PreparedStatement ps; - ResultSet rs; - try { - ps = conn.prepareStatement("SELECT pattern,SUM(occurrences),COUNT(ontology_id) FROM " + - "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + - "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + - "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); - ps.setString(1, axiomType.name()); - ps.setInt(2, n); - rs = ps.executeQuery(); - while(rs.next()){ - topN.put(asOWLAxiom(rs.getString(1)), new Pair<Integer, Integer>(rs.getInt(2), rs.getInt(3))); - } - } catch(SQLException e){ - e.printStackTrace(); - } - return topN; - } - protected com.hp.hpl.jena.query.ResultSet executeSelectQuery(Query query) { com.hp.hpl.jena.query.ResultSet rs = null; if(ks.isRemote()){ @@ -414,27 +494,6 @@ return new AxiomScore(accuracy, confidence, total, success, total-success); } - private OWLAxiom asOWLAxiom(String functionalSyntaxAxiomString){ - try { - StringDocumentSource s = new StringDocumentSource("Ontology(<http://www.pattern.org> " + functionalSyntaxAxiomString + ")"); - OWLFunctionalSyntaxOWLParser p = new OWLFunctionalSyntaxOWLParser(); - OWLOntology newOntology = OWLManager.createOWLOntologyManager().createOntology(); - p.parse(s, newOntology); - if(!newOntology.getLogicalAxioms().isEmpty()){ - return newOntology.getLogicalAxioms().iterator().next(); - } - } catch (UnloadableImportException e) { - e.printStackTrace(); - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } catch (OWLParserException e) { - System.err.println("Parsing failed for axiom " + functionalSyntaxAxiomString); - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - public static void main(String[] args) throws Exception { ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); @@ -450,6 +509,8 @@ "The ontology file which contains the patterns.").withOptionalArg().ofType(File.class); parser.acceptsAll(asList("o", "output"), "Specify a file where the output can be written.") .withOptionalArg().ofType(File.class); + parser.acceptsAll(asList("l", "limit"), "Specify the maximum number of classes tested for each pattern.") + .withRequiredArg().ofType(Integer.class); // parse options and display a message for the user in case of problems OptionSet options = null; @@ -503,7 +564,7 @@ System.out.println("The specified ontology patterns file can not be found."); System.exit(0); } - OWLOntology ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(patternsFile); + OWLOntology patternsOntology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(patternsFile); File outputFile = null; try { outputFile = (File) options.valueOf("output"); @@ -511,7 +572,8 @@ System.out.println("The specified output file can not be found."); System.exit(0); } - new OWLAxiomPatternUsageEvaluation().run(endpoint, ontology, outputFile); + int maxNrOfTestedClasses = (Integer) options.valueOf("limit"); + new OWLAxiomPatternUsageEvaluation().run(endpoint, patternsOntology, outputFile, maxNrOfTestedClasses); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-05-08 21:03:48
|
Revision: 3963 http://sourceforge.net/p/dl-learner/code/3963 Author: lorenz_b Date: 2013-05-08 21:03:43 +0000 (Wed, 08 May 2013) Log Message: ----------- Refactored pattern eval. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-05-08 19:07:53 UTC (rev 3962) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLClassExpressionToSPARQLConverter.java 2013-05-08 21:03:43 UTC (rev 3963) @@ -1,5 +1,7 @@ package org.dllearner.utilities.owl; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -7,6 +9,7 @@ import java.util.Map; import java.util.Set; import java.util.Stack; +import java.util.TreeSet; import org.aksw.commons.collections.diff.ModelDiff; import org.semanticweb.owlapi.apibinding.OWLManager; @@ -58,6 +61,10 @@ import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; import uk.ac.manchester.cs.owlapi.dlsyntax.DLSyntaxObjectRenderer; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimap; +import com.google.common.collect.Multimaps; +import com.google.common.collect.Sets; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.Syntax; @@ -74,6 +81,8 @@ private OWLDataFactory df = new OWLDataFactoryImpl(); + private Multimap<Integer, OWLEntity> properties = HashMultimap.create(); + private Map<Integer, Boolean> intersection; private Set<? extends OWLEntity> variableEntities = new HashSet<OWLEntity>(); @@ -147,6 +156,7 @@ private void reset(){ variablesMapping.clear(); variables.clear(); + properties.clear(); classCnt = 0; propCnt = 0; indCnt = 0; @@ -238,6 +248,9 @@ } else { s = "<" + entity.toStringID() + ">"; } + if(entity.isOWLObjectProperty()){ + properties.put(modalDepth(), entity); + } return s; } @@ -269,6 +282,19 @@ for (OWLClassExpression operand : operands) { operand.accept(this); } + Collection<OWLEntity> props = properties.get(modalDepth()); + if(props.size() > 1){ + Collection<String> vars = new TreeSet<String>(); + for (OWLEntity p : props) { + if(variablesMapping.containsKey(p)){ + vars.add(variablesMapping.get(p)); + } + } + if(vars.size() == 2){ + List<String> varList = new ArrayList<String>(vars); + sparql += "FILTER(" + varList.get(0) + "!=" + varList.get(1) + ")"; + } + } leaveIntersection(); } @@ -728,8 +754,14 @@ //variable entity expr = df.getOWLObjectIntersectionOf( df.getOWLObjectSomeValuesFrom(propR, clsB), + clsB, df.getOWLObjectSomeValuesFrom(propS, clsA)); + query = converter.asQuery(rootVar, expr, Sets.newHashSet(propR, propS)).toString(); + System.out.println(expr + "\n" + query); + + expr = df.getOWLObjectIntersectionOf( + df.getOWLObjectSomeValuesFrom(propR, df.getOWLObjectIntersectionOf(df.getOWLObjectSomeValuesFrom(propS, clsA), clsC)), clsB); - query = converter.asQuery(rootVar, expr, Collections.singleton(propR)).toString(); + query = converter.asQuery(rootVar, expr, Sets.newHashSet(propR, propS)).toString(); System.out.println(expr + "\n" + query); } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-05-08 19:07:53 UTC (rev 3962) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-05-08 21:03:43 UTC (rev 3963) @@ -6,14 +6,10 @@ import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.SocketTimeoutException; import java.net.URI; import java.net.URL; -import java.sql.Connection; -import java.sql.DriverManager; import java.sql.SQLException; import java.text.DecimalFormat; import java.util.ArrayList; @@ -30,7 +26,6 @@ import java.util.Set; import java.util.TreeSet; import java.util.concurrent.TimeUnit; -import java.util.prefs.Preferences; import joptsimple.OptionException; import joptsimple.OptionParser; @@ -52,8 +47,6 @@ import org.dllearner.reasoning.SPARQLReasoner; import org.dllearner.utilities.owl.DLLearnerDescriptionConvertVisitor; import org.dllearner.utilities.owl.OWLClassExpressionToSPARQLConverter; -import org.ini4j.IniPreferences; -import org.ini4j.InvalidFileFormatException; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.OWLObjectRenderer; import org.semanticweb.owlapi.io.ToStringRenderer; @@ -92,7 +85,9 @@ import com.hp.hpl.jena.query.Syntax; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.Resource; +import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; import com.hp.hpl.jena.vocabulary.RDF; @@ -298,6 +293,7 @@ } catch (FileNotFoundException e) { e.printStackTrace(); } + filterModel(fragment); class2Fragment.put(cls, fragment); logger.info("...got " + fragment.size() + " triples."); continue; @@ -336,6 +332,19 @@ return class2Fragment; } + private void filterModel(Model model){ + List<Statement> statements2Remove = new ArrayList<Statement>(); + for (Statement st : model.listStatements().toSet()) { + if(st.getObject().isLiteral()){ + statements2Remove.add(st); + } + if(st.getPredicate().equals(RDF.type) && !st.getObject().asResource().getURI().startsWith("http://dbpedia.org/ontology/")){ + statements2Remove.add(st); + } + } + model.remove(statements2Remove); + } + private Map<OWLAxiom, Score> evaluate1(OWLAxiom pattern, NamedClass cls){ Map<OWLAxiom, Score> axioms2Score = new HashMap<OWLAxiom, Score>(); @@ -540,15 +549,11 @@ patternSubClass = cls; - // 2. execute SPARQL query on local model - Query query = QueryFactory.create( - "SELECT (COUNT(DISTINCT ?x) AS ?cnt) WHERE {" + converter.convert("?x", patternSubClass) + "}", - Syntax.syntaxARQ); - int subClassCnt = QueryExecutionFactory.create(query, fragment).execSelect().next().getLiteral("cnt").getInt(); + Set<OWLEntity> signature = patternSuperClass.getSignature(); signature.remove(patternSubClass); - query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(patternSubClass, patternSuperClass), signature); + Query query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(patternSubClass, patternSuperClass), signature); logger.info("Running query\n" + query); Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping(); com.hp.hpl.jena.query.ResultSet rs = QueryExecutionFactory.create(query, fragment).execSelect(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-13 07:46:21
|
Revision: 3993 http://sourceforge.net/p/dl-learner/code/3993 Author: lorenz_b Date: 2013-06-13 07:46:18 +0000 (Thu, 13 Jun 2013) Log Message: ----------- Updated SPARQL API. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/pom.xml trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/components-core/pom.xml 2013-06-13 07:46:18 UTC (rev 3993) @@ -91,28 +91,6 @@ <dependencies> <dependency> - <groupId>org.aksw.commons</groupId> - <artifactId>sparql</artifactId> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - <exclusion> - <artifactId>arq</artifactId> - <groupId>com.hp.hpl.jena</groupId> - </exclusion> - <exclusion> - <artifactId>xercesImpl</artifactId> - <groupId>xerces</groupId> - </exclusion> - <exclusion> - <artifactId>any23-core</artifactId> - <groupId>org.deri.any23</groupId> - </exclusion> - </exclusions> - </dependency> - <dependency> <groupId>org.ini4j</groupId> <artifactId>ini4j</artifactId> </dependency> @@ -323,7 +301,7 @@ <dependency> <groupId>org.aksw.jena-sparql-api</groupId> <artifactId>jena-sparql-api-core</artifactId> - <version>2.10.0-3</version> + <version>2.10.0-4-SNAPSHOT</version> </dependency> </dependencies> <dependencyManagement> Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java 2013-06-13 07:46:18 UTC (rev 3993) @@ -18,7 +18,6 @@ import javax.xml.ws.http.HTTPException; -import org.aksw.commons.jena.ExtendedQueryEngineHTTP; import org.apache.log4j.Logger; import org.dllearner.algorithms.qtl.datastructures.QueryTree; import org.dllearner.algorithms.qtl.datastructures.impl.GeneralisedQueryTree; @@ -41,6 +40,7 @@ import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.RDFNode; +import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; import com.hp.hpl.jena.sparql.expr.E_Equals; import com.hp.hpl.jena.sparql.expr.E_LogicalNot; import com.hp.hpl.jena.sparql.expr.ExprVar; @@ -1401,8 +1401,8 @@ private ResultSet executeSelectQuery(String query){ ResultSet rs; if(model == null){ - ExtendedQueryEngineHTTP queryExecution = new ExtendedQueryEngineHTTP(endpoint.getURL().toString(), query); - queryExecution.setTimeOut(maxExecutionTimeInSeconds * 1000); + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); + queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); for (String dgu : endpoint.getDefaultGraphURIs()) { queryExecution.addDefaultGraph(dgu); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-13 07:46:18 UTC (rev 3993) @@ -21,8 +21,8 @@ import java.net.SocketTimeoutException; import java.net.URL; +import java.sql.SQLException; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -31,11 +31,17 @@ import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; +import java.util.concurrent.TimeUnit; -import org.aksw.commons.sparql.api.core.QueryExecutionFactory; -import org.aksw.commons.sparql.api.http.QueryExecutionFactoryHttp; -import org.aksw.commons.sparql.api.pagination.core.QueryExecutionFactoryPaginated; import org.aksw.commons.util.strings.StringUtils; +import org.aksw.jena_sparql_api.cache.core.QueryExecutionFactoryCacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreH2; +import org.aksw.jena_sparql_api.cache.extra.CacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheExImpl; +import org.aksw.jena_sparql_api.core.QueryExecutionFactory; +import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; +import org.aksw.jena_sparql_api.pagination.core.QueryExecutionFactoryPaginated; import org.dllearner.core.ComponentAnn; import org.dllearner.core.IndividualReasoner; import org.dllearner.core.SchemaReasoner; @@ -61,7 +67,6 @@ import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; -import org.dllearner.kb.sparql.SparqlQuery; import org.dllearner.utilities.datastructures.SortedSetTuple; import org.dllearner.utilities.owl.ConceptComparator; import org.slf4j.Logger; @@ -73,7 +78,6 @@ import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; -import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; @@ -1360,22 +1364,26 @@ logger.debug("Sending query \n {}", query); ResultSet rs = null; if(ks.isRemote()){ - if(useCache && cache != null){ - rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(ks.getEndpoint(), query)); - } else { - QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); - for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); + SparqlEndpoint endpoint = ks.getEndpoint(); + QueryExecutionFactory qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); + if(cache != null){ + try { + long timeToLive = TimeUnit.DAYS.toMillis(30); + CacheCoreEx cacheBackend = CacheCoreH2.create(cache.getCacheDirectory(), timeToLive, true); + CacheEx cacheFrontend = new CacheExImpl(cacheBackend); + qef = new QueryExecutionFactoryCacheEx(qef, cacheFrontend); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); } - for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } - rs = queryExecution.execSelect(); } + qef = new QueryExecutionFactoryPaginated(qef, 10000); + QueryExecution qe = qef.createQueryExecution(query); + rs = qe.execSelect(); } else { QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); rs = qExec.execSelect(); - } return rs; } Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/pom.xml 2013-06-13 07:46:18 UTC (rev 3993) @@ -123,7 +123,7 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <version>3.4.3</version> + <version>3.4.4</version> <type>pom</type> </dependency> <dependency> @@ -133,6 +133,11 @@ </dependency> <dependency> <groupId>net.sourceforge.owlapi</groupId> + <artifactId>owlapi-debugging</artifactId> + <version>3.3</version> + </dependency> + <dependency> + <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-util</artifactId> <version>3.3</version> </dependency> @@ -524,6 +529,12 @@ <url>http://elk-reasoner.googlecode.com/svn/m2/releases</url> <releases/> </repository> + <repository> + <id>central</id> + <url>http://oss.sonatype.org</url> + <releases><enabled>true</enabled></releases> + <snapshots><enabled>true</enabled></snapshots> + </repository> </repositories> <distributionManagement> Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-13 07:46:18 UTC (rev 3993) @@ -225,7 +225,7 @@ Collections.shuffle(classesList, new Random(123)); classesList = classesList.subList(0, maxNrOfTestedClasses); classes = classesList; - //classes = Collections.singleton(new NamedClass("http://dbpedia.org/ontology/ChristianBishop")); + classes = Collections.singleton(new NamedClass("http://dbpedia.org/ontology/BaseballPlayer")); //get the maximum modal depth in the pattern axioms int maxModalDepth = maxModalDepth(patterns); @@ -1147,7 +1147,7 @@ //compute recall double recall = wald(subClassCnt, overlap); //if recall is too low we can skip the computation of the precision - if(recall < 0.2){ + if(recall < 0.3){ logger.warn("Recall(" + recall + ") too low. Skipping precision computation."); continue; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2013-06-18 15:15:01
|
Revision: 4003 http://sourceforge.net/p/dl-learner/code/4003 Author: dcherix Date: 2013-06-18 15:14:54 +0000 (Tue, 18 Jun 2013) Log Message: ----------- Added examples for the sparqr webapp Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java trunk/examples/sparql/AristotlePosNeg.conf trunk/interfaces/pom.xml trunk/interfaces/src/main/java/org/dllearner/server/Rest.java Added Paths: ----------- trunk/examples/sparqr/ trunk/examples/sparqr/AristotlePosNeg.conf trunk/examples/sparqr/StGeorge.conf Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java 2013-06-18 15:14:54 UTC (rev 4003) @@ -26,7 +26,7 @@ if (aboxfilter != null) { builder.append(aboxfilter); } - builder.append("FILTER (! (?p=rdf:type))"); + builder.append("FILTER ( (?p!=rdf:type))"); builder.append("}"); monABoxQueryGeneration.stop(); return builder.toString(); Modified: trunk/examples/sparql/AristotlePosNeg.conf =================================================================== --- trunk/examples/sparql/AristotlePosNeg.conf 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/examples/sparql/AristotlePosNeg.conf 2013-06-18 15:14:54 UTC (rev 4003) @@ -11,7 +11,7 @@ // SPARQL options sparql.type = "SPARQL endpoint fragment" -sparql.url = "http://dbpedia.openlinksw.com:8890/sparql" +sparql.url = "http://dbpedia.org/sparql" sparql.defaultGraphURIs = {"http://dbpedia.org"} sparql.recursionDepth = 1 //TODOREFACTOR check if predefinedFilter works at all @@ -29,8 +29,8 @@ reasoner.type = "fast instance checker" reasoner.sources = {sparql} +reasoner.forAllSemantics="SomeOnly" - // we want to learn from positive and negative examples lp.type = "posNegStandard" lp.positiveExamples = { Added: trunk/examples/sparqr/AristotlePosNeg.conf =================================================================== --- trunk/examples/sparqr/AristotlePosNeg.conf (rev 0) +++ trunk/examples/sparqr/AristotlePosNeg.conf 2013-06-18 15:14:54 UTC (rev 4003) @@ -0,0 +1,107 @@ +/** + * Some people from Greece. + * Note: DBpedia is always subject to change, solutions will change over time + + * Possible Solution: + * Theorist OR (Mathematician AND Physicist) + */ + + +/****************** + * SPARQL options * + ******************/ +// sparql component to use +sparql.type = "sparqls" +// endpoint +sparql.endpointURL = "http://dbpedia.org/sparql" +// default graph uri for the SPARQL queries +sparql.defaultGraphURI = "http://dbpedia.org" +// recursion depth +sparql.recursionDepth = 2 +// url(s) of the schema definition of the used ontologies. +sparql.ontologySchemaUrls = {"http://downloads.dbpedia.org/3.6/dbpedia_3.6.owl" } + +//SPARQL Filters for the instances. Those filters excludes all literals, and all Catergory instances. Thereto a instance must begin with http://dbpedia.org/Resource +sparql.aboxfilter = "FILTER ( +!isLiteral(?o) && +!regex(str(?p), 'http://dbpedia.org/property/website') && +!regex(str(?p), 'http://dbpedia.org/property/wikipage') && +!regex(str(?p), 'http://dbpedia.org/property/wikiPageUsesTemplate') && +!regex(str(?p), 'http://dbpedia.org/property/reference') && +!regex(str(?p), 'http://www.w3.org/2004/02/skos/core') && +!regex(str(?p), 'http://www.w3.org/2002/07/owl#sameAs') && +!regex(str(?p), 'http://xmlns.com/foaf/0.1/') && +!regex(str(?p), 'http://dbpedia.org/property/wordnet_type') && +!regex(str(?p), 'http://dbpedia.org/property/wikilink') && +regex(str(?o), '^http://dbpedia.org/resource/') && +!regex(str(?o),'^http://dbpedia.org/resource/Category:') +) " +// SPARQL Filters for the classes. With this filter only class in the dbpedia ontology are accepted and at example not from YAGO +sparql.tboxfilter = "FILTER ( !regex(str(?class), '^http://upload.wikimedia.org/wikipedia') && +!regex(str(?class), '^http://dbpedia.org/resource/Template') && +!regex(str(?class), '^http://dbpedia.org/resource/Category:') && +!regex(str(?class), '^http://umbel.org/umbel/') + ) . " + +//A list of the start instances +sparql.instances = { +"http://dbpedia.org/resource/Democritus", +"http://dbpedia.org/resource/Zeno_of_Elea", +"http://dbpedia.org/resource/Plato", +"http://dbpedia.org/resource/Socrates", +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Archimedes", +"http://dbpedia.org/resource/EuClid", +"http://dbpedia.org/resource/Heraclitus" +} + +/******************** + * Reasoner options * + ********************/ +reasoner.type = "fast instance checker" +reasoner.sources = {sparql} +reasoner.forAllSemantics="Standard" + +/***************************** + * Learning problems options * + *****************************/ +// we want to learn from positive and negative examples +lp.type = "posNegStandard" + +//the positives examples +lp.positiveExamples = { +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Archimedes", +"http://dbpedia.org/resource/Thales" +} + +//the negatives examples +lp.negativeExamples = { +"http://dbpedia.org/resource/Democritus", +"http://dbpedia.org/resource/Zeno_of_Elea", +"http://dbpedia.org/resource/Plato", +"http://dbpedia.org/resource/Socrates", +"http://dbpedia.org/resource/EuClid", +"http://dbpedia.org/resource/Heraclitus" +} +lp.reasoner = reasoner + +/******************************* + * refinement operator options * + *******************************/ + +// create a refinement operator and configure it +op.type = "rho" +op.useHasValueConstructor = true +op.reasoner = reasoner + +/*************************** + * learn algorithm options * + ***************************/ +// we use the OCEL algorithm +alg.type = "ocel" +alg.reasoner = reasoner + + Added: trunk/examples/sparqr/StGeorge.conf =================================================================== --- trunk/examples/sparqr/StGeorge.conf (rev 0) +++ trunk/examples/sparqr/StGeorge.conf 2013-06-18 15:14:54 UTC (rev 4003) @@ -0,0 +1,151 @@ + + +/****************** + * SPARQL options * + ******************/ +// sparql component to use +sparql.type = "sparqls" +// endpoint +sparql.endpointURL = "http://dbpedia.org/sparql" +// default graph uri for the SPARQL queries +sparql.defaultGraphURI = "http://dbpedia.org" +// recursion depth +sparql.recursionDepth = 2 +// url(s) of the schema definition of the used ontologies. +sparql.ontologySchemaUrls = {"http://downloads.dbpedia.org/3.6/dbpedia_3.6.owl" } + +sparql.aboxfilter = "FILTER ( +!isLiteral(?o) && +!regex(str(?p), 'http://dbpedia.org/property/website') && +!regex(str(?p), 'http://dbpedia.org/property/wikipage') && +!regex(str(?p), 'http://dbpedia.org/property/wikiPageUsesTemplate') && +!regex(str(?p), 'http://dbpedia.org/property/reference') && +!regex(str(?p), 'http://www.w3.org/2004/02/skos/core') && +!regex(str(?p), 'http://www.w3.org/2002/07/owl#sameAs') && +!regex(str(?p), 'http://xmlns.com/foaf/0.1/') && +!regex(str(?p), 'http://dbpedia.org/property/wordnet_type') && +!regex(str(?p), 'http://dbpedia.org/property/wikilink') && +regex(str(?o), '^http://dbpedia.org/resource/') +) " + +sparql.tboxfilter = "FILTER ( !regex(str(?class), '^http://upload.wikimedia.org/wikipedia') && +!regex(str(?class), '^http://dbpedia.org/resource/Template') && +!regex(str(?class), '^http://dbpedia.org/resource/Category:') && +!regex(str(?class), '^http://umbel.org/umbel/') && +!regex(str(?class), '^http://dbpedia.org/class/yago') + ) . " + +//A list of the start instances +sparql.instances = {"http://dbpedia.org/resource/Gorden_Tallis", +"http://dbpedia.org/resource/Matthew_Elliott_(rugby_league)", +"http://dbpedia.org/resource/Graeme_Bradley", +"http://dbpedia.org/resource/Elton_Rasmussen", +"http://dbpedia.org/resource/Noel_Goldthorpe", +"http://dbpedia.org/resource/Shane_Kenward", +"http://dbpedia.org/resource/Anthony_Mundine", +"http://dbpedia.org/resource/Steve_Edge_(rugby_league)", +"http://dbpedia.org/resource/Robbie_Simpson_(rugby_league)", +"http://dbpedia.org/resource/Ivan_Henjak", +"http://dbpedia.org/resource/Dick_Huddart", +"http://dbpedia.org/resource/Mark_Coyne_(rugby_league)", +"http://dbpedia.org/resource/Henry_Tatana", +"http://dbpedia.org/resource/Col_Maxwell", +"http://dbpedia.org/resource/Wayne_Bartrim", +"http://dbpedia.org/resource/Mitch_Brennan", +"http://dbpedia.org/resource/Steve_Morris", +"http://dbpedia.org/resource/Brian_Johnston_(rugby_league)", +"http://dbpedia.org/resource/Herb_Narvo", +"http://dbpedia.org/resource/Chris_Johns_(rugby_league)", +"http://dbpedia.org/resource/Martin_Offiah", +"http://dbpedia.org/resource/Gorden_Tallis", +"http://dbpedia.org/resource/Lance_Thompson", +"http://dbpedia.org/resource/Rod_Reddy", +"http://dbpedia.org/resource/Wally_Fullerton_Smith", +"http://dbpedia.org/resource/Neil_Tierney", +"http://dbpedia.org/resource/Daniel_Wagon", +"http://dbpedia.org/resource/Ian_Herron", +"http://dbpedia.org/resource/Eric_Grothe%2C_Jr.", +"http://dbpedia.org/resource/Michael_Ennis", +"http://dbpedia.org/resource/Michael_Buettner", +"http://dbpedia.org/resource/Chris_Mortimer", +"http://dbpedia.org/resource/Feleti_Mateo", +"http://dbpedia.org/resource/John_Barclay_(cricketer)", +"http://dbpedia.org/resource/Lewis_Collins_(aviator)", +"http://dbpedia.org/resource/Johann_Mohr", +"http://dbpedia.org/resource/John_Williams_(VC)", +"http://dbpedia.org/resource/Paula_Pequeno", +"http://dbpedia.org/resource/Ben_Ross", +"http://dbpedia.org/resource/Colin_Forsyth", +"http://dbpedia.org/resource/Brian_Davies_(rugby_league)", +"http://dbpedia.org/resource/Harry_Taylor_(rugby_league)", +"http://dbpedia.org/resource/Wayne_Proctor_(rugby_league)" + } + +reasoner.type = "fast instance checker" +reasoner.sources = {sparql} + +lp.type = "posNegStandard" + +lp.positiveExamples = { +"http://dbpedia.org/resource/Matthew_Elliott_(rugby_league)", +"http://dbpedia.org/resource/Graeme_Bradley", +"http://dbpedia.org/resource/Elton_Rasmussen", +"http://dbpedia.org/resource/Noel_Goldthorpe", +"http://dbpedia.org/resource/Shane_Kenward", +"http://dbpedia.org/resource/Anthony_Mundine", +"http://dbpedia.org/resource/Steve_Edge_(rugby_league)", +"http://dbpedia.org/resource/Robbie_Simpson_(rugby_league)", +"http://dbpedia.org/resource/Ivan_Henjak", +"http://dbpedia.org/resource/Dick_Huddart", +"http://dbpedia.org/resource/Mark_Coyne_(rugby_league)", +"http://dbpedia.org/resource/Henry_Tatana", +"http://dbpedia.org/resource/Col_Maxwell", +"http://dbpedia.org/resource/Wayne_Bartrim", +"http://dbpedia.org/resource/Mitch_Brennan", +"http://dbpedia.org/resource/Steve_Morris", +"http://dbpedia.org/resource/Brian_Johnston_(rugby_league)", +"http://dbpedia.org/resource/Herb_Narvo", +"http://dbpedia.org/resource/Chris_Johns_(rugby_league)", +"http://dbpedia.org/resource/Martin_Offiah", +"http://dbpedia.org/resource/Gorden_Tallis", +"http://dbpedia.org/resource/Lance_Thompson", +"http://dbpedia.org/resource/Rod_Reddy", +"http://dbpedia.org/resource/Wally_Fullerton_Smith", +"http://dbpedia.org/resource/Neil_Tierney" +} + +lp.negativeExamples = { +"http://dbpedia.org/resource/Daniel_Wagon", +"http://dbpedia.org/resource/Ian_Herron", +"http://dbpedia.org/resource/Eric_Grothe%2C_Jr.", +"http://dbpedia.org/resource/Michael_Ennis", +"http://dbpedia.org/resource/Michael_Buettner", +"http://dbpedia.org/resource/Chris_Mortimer", +"http://dbpedia.org/resource/Feleti_Mateo", +"http://dbpedia.org/resource/John_Barclay_(cricketer)", +"http://dbpedia.org/resource/Lewis_Collins_(aviator)", +"http://dbpedia.org/resource/Johann_Mohr", +"http://dbpedia.org/resource/John_Williams_(VC)", +"http://dbpedia.org/resource/Paula_Pequeno", +"http://dbpedia.org/resource/Ben_Ross", +"http://dbpedia.org/resource/Colin_Forsyth", +"http://dbpedia.org/resource/Brian_Davies_(rugby_league)", +"http://dbpedia.org/resource/Harry_Taylor_(rugby_league)", +"http://dbpedia.org/resource/Wayne_Proctor_(rugby_league)" + } + +lp.reasoner = reasoner + + +op.type = "rho" +op.useNegation = false +op.useAllConstructor = false +op.useCardinalityRestrictions = false +op.useHasValueConstructor = true +op.reasoner = reasoner + + +alg.type = "ocel" +alg.reasoner = reasoner +alg.maxExecutionTimeInSeconds = 30 +alg.noisePercentage = 10.0 Property changes on: trunk/examples/sparqr/StGeorge.conf ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/interfaces/pom.xml 2013-06-18 15:14:54 UTC (rev 4003) @@ -356,7 +356,7 @@ <build> <finalName>dl-learner</finalName> <plugins> - <plugin> + <!-- <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <version>1.6</version> @@ -376,7 +376,7 @@ </configuration> </execution> </executions> - </plugin> + </plugin>--> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>xmlbeans-maven-plugin</artifactId> @@ -521,8 +521,8 @@ <!-- Exclude Project-D from Project-B --> <artifactId>slf4j-log4j12</artifactId> </exclusion> - <exclusion> - <groupId>net.sourceforge</groupId> + <exclusion> + <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi</artifactId> </exclusion> </exclusions> Modified: trunk/interfaces/src/main/java/org/dllearner/server/Rest.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2013-06-18 15:14:54 UTC (rev 4003) @@ -94,7 +94,7 @@ learningResult.put("manchester", ed.getDescription().toManchesterSyntaxString(null, null)); learningResult.put("kbsyntax", ed.getDescription().toKBSyntaxString()); // learningResult.put("sparql", sqd.getSparqlQuery(ed.getDescription())); - learningResult.put("sparql", sparqlConv.asQuery("?subject", OWLAPIConverter.getOWLAPIDescription(ed.getDescription()))); + learningResult.put("sparql", " "+ sparqlConv.asQuery("?subject", OWLAPIConverter.getOWLAPIDescription(ed.getDescription()))+" "); learningResult.put("accuracy", ed.getAccuracy()); learningResult.put("truePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getCoveredPositives())); learningResult.put("falsePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getNotCoveredPositives())); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-09-04 08:10:12
|
Revision: 4044 http://sourceforge.net/p/dl-learner/code/4044 Author: jenslehmann Date: 2013-09-04 08:10:08 +0000 (Wed, 04 Sep 2013) Log Message: ----------- ISLE father example Added Paths: ----------- trunk/test/isle/ trunk/test/isle/father/ trunk/test/isle/father/corpus/ trunk/test/isle/father/corpus/child.txt trunk/test/isle/father/corpus/father.txt trunk/test/isle/father/corpus/female.txt trunk/test/isle/father/corpus/male.txt trunk/test/isle/father/corpus/man.txt trunk/test/isle/father/corpus/mother.txt trunk/test/isle/father/corpus/person.txt trunk/test/isle/father/father_labeled.owl Removed Paths: ------------- trunk/examples/isle/catalog-v001.xml Deleted: trunk/examples/isle/catalog-v001.xml =================================================================== --- trunk/examples/isle/catalog-v001.xml 2013-09-04 08:04:33 UTC (rev 4043) +++ trunk/examples/isle/catalog-v001.xml 2013-09-04 08:10:08 UTC (rev 4044) @@ -1,6 +0,0 @@ -<?xml version="1.0" encoding="UTF-8" standalone="no"?> -<catalog prefer="public" xmlns="urn:oasis:names:tc:entity:xmlns:xml:catalog"> - <group id="Folder Repository, directory=, recursive=true, Auto-Update=true, version=2" prefer="public" xml:base=""> - <uri id="Automatically generated entry, Timestamp=1373287083350" name="http://example.com/father" uri="father_labeled.owl"/> - </group> -</catalog> Added: trunk/test/isle/father/corpus/child.txt =================================================================== --- trunk/test/isle/father/corpus/child.txt (rev 0) +++ trunk/test/isle/father/corpus/child.txt 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1 @@ +Biologically, a child is generally a human between the stages of birth and puberty. Some vernacular definitions of a child include the fetus, as being an unborn child. The legal definition of "child" generally refers to a minor, otherwise known as a person younger than the age of majority. "Child" may also describe a relationship with a parent (such as sons and daughters of any age) or, metaphorically, an authority figure, or signify group membership in a clan, tribe, or religion; it can also signify being strongly affected by a specific time, place, or circumstance, as in "a child of nature" or "a child of the Sixties". Added: trunk/test/isle/father/corpus/father.txt =================================================================== --- trunk/test/isle/father/corpus/father.txt (rev 0) +++ trunk/test/isle/father/corpus/father.txt 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1 @@ +A father (or dad) is defined as a male parent or Individual progenitor of human offspring. The adjective "paternal" refers to a father and comparatively to "maternal" for a mother. The verb "to father" means to procreate or to sire a child from which also derives the gerund "fathering". Fathers determine the gender of their child through a sperm cell which either contains an X chromosome (female), or Y chromosome (male). Added: trunk/test/isle/father/corpus/female.txt =================================================================== --- trunk/test/isle/father/corpus/female.txt (rev 0) +++ trunk/test/isle/father/corpus/female.txt 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1 @@ +Female (♀) is the sex of an organism, or a part of an organism, which produces non-mobile ova (egg cells). Added: trunk/test/isle/father/corpus/male.txt =================================================================== --- trunk/test/isle/father/corpus/male.txt (rev 0) +++ trunk/test/isle/father/corpus/male.txt 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1 @@ +Male (♂) refers to organisms with the physiological sex which produces sperm. Each spermatozoon can fuse with a larger female gamete or ovum, in the process of fertilization. A male cannot reproduce sexually without access to at least one ovum from a female, but some organisms can reproduce both sexually and asexually. Not all species share a common sex-determination system. In most animals including humans, sex is determined genetically, but in some species it can be determined due to social, environmental or other factors. The existence of two sexes seems to have been selected independently across different evolutionary lineages. The repeated pattern is sexual reproduction in isogamous species with two or more mating types with gametes of identical form and behavior (but different at the molecular level) to anisogamous species with gametes of male and female types to oogamous species in which the female gamete is very much larger than the male and has no ability to move. There is a good argument that this pattern was driven by the physical constraints on the mechanisms by which two gametes get together as required for sexual reproduction. Accordingly, sex is defined operationally across species by the type of gametes produced and differences between males and females in one lineage are not always predictive of differences in another. Male/female dimorphism between organisms or reproductive organs of different sexes is not limited to animals; male gametes are produced by chytrids, diatoms and land plants, among others. In land plants, female and male designate not only the female and male gamete-producing organisms and structures but also the structures of the sporophytes that give rise to male and female plants. Added: trunk/test/isle/father/corpus/man.txt =================================================================== --- trunk/test/isle/father/corpus/man.txt (rev 0) +++ trunk/test/isle/father/corpus/man.txt 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1 @@ +In English, lower case man (pl. men) refers to an adult human male (the term boy is the usual term for a human male child or adolescent). Sometimes it is also used as an adjective to identify a set of male humans, regardless of age, as in phrases such as "men's rights". Although men typically have a male reproductive system, some intersex people with ambiguous genitals, and biologically female transgender people, may also be classified or self-identify as a "man". The term manhood is used to refer to masculinity, the various qualities and characteristics attributed to men such as strength and male sexuality. Added: trunk/test/isle/father/corpus/mother.txt =================================================================== --- trunk/test/isle/father/corpus/mother.txt (rev 0) +++ trunk/test/isle/father/corpus/mother.txt 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1 @@ +A mother (or mum/mom) is a woman who has raised a child, given birth to a child, and/or supplied the ovum that united with a sperm which grew into a child. Because of the complexity and differences of a mother's social, cultural, and religious definitions and roles, it is challenging to specify a universally acceptable definition for the term. The male equivalent is a father. Added: trunk/test/isle/father/corpus/person.txt =================================================================== --- trunk/test/isle/father/corpus/person.txt (rev 0) +++ trunk/test/isle/father/corpus/person.txt 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1 @@ +A person is a being, such as a human, that has certain capacities or attributes constituting personhood, the precise definition of which is the subject of much controversy. The common plural of "person", "people", is often used to refer to an entire nation or ethnic group (as in "a people"), so the plural "persons" is often used in contexts which require precision such as philosophical and legal writing. Added: trunk/test/isle/father/father_labeled.owl =================================================================== --- trunk/test/isle/father/father_labeled.owl (rev 0) +++ trunk/test/isle/father/father_labeled.owl 2013-09-04 08:10:08 UTC (rev 4044) @@ -0,0 +1,169 @@ +<?xml version="1.0"?> + + +<!DOCTYPE rdf:RDF [ + <!ENTITY father "http://example.com/father#" > + <!ENTITY owl "http://www.w3.org/2002/07/owl#" > + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > + <!ENTITY owl2xml "http://www.w3.org/2006/12/owl2-xml#" > + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > +]> + + +<rdf:RDF xmlns="http://example.com/father#" + xml:base="http://example.com/father" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:owl2xml="http://www.w3.org/2006/12/owl2-xml#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:father="http://example.com/father#"> + <owl:Ontology rdf:about="http://example.com/father"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Object Properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://example.com/father#hasChild --> + + <owl:ObjectProperty rdf:about="&father;hasChild"> + <rdfs:label xml:lang="en">has child</rdfs:label> + <rdfs:domain rdf:resource="&father;person"/> + <rdfs:range rdf:resource="&father;person"/> + </owl:ObjectProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Classes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://example.com/father#father --> + + <owl:Class rdf:about="&father;father"> + <rdfs:label xml:lang="en">person which has at least 1 child</rdfs:label> + <rdfs:subClassOf rdf:resource="&father;male"/> + </owl:Class> + + + + <!-- http://example.com/father#female --> + + <owl:Class rdf:about="&father;female"> + <rdfs:label xml:lang="en">female</rdfs:label> + <rdfs:subClassOf rdf:resource="&father;person"/> + <owl:disjointWith rdf:resource="&father;male"/> + </owl:Class> + + + + <!-- http://example.com/father#male --> + + <owl:Class rdf:about="&father;male"> + <rdfs:label xml:lang="en">male</rdfs:label> + <rdfs:subClassOf rdf:resource="&father;person"/> + </owl:Class> + + + + <!-- http://example.com/father#person --> + + <owl:Class rdf:about="&father;person"> + <rdfs:label xml:lang="en">Person</rdfs:label> + <rdfs:subClassOf rdf:resource="&owl;Thing"/> + </owl:Class> + + + + <!-- http://www.w3.org/2002/07/owl#Thing --> + + <owl:Class rdf:about="&owl;Thing"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Individuals + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://example.com/father#anna --> + + <owl:NamedIndividual rdf:about="&father;anna"> + <rdf:type rdf:resource="&father;female"/> + <hasChild rdf:resource="&father;heinz"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#heinz --> + + <owl:NamedIndividual rdf:about="&father;heinz"> + <rdf:type rdf:resource="&father;male"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#markus --> + + <owl:NamedIndividual rdf:about="&father;markus"> + <rdf:type rdf:resource="&father;father"/> + <rdf:type rdf:resource="&father;male"/> + <hasChild rdf:resource="&father;anna"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#martin --> + + <owl:NamedIndividual rdf:about="&father;martin"> + <rdf:type rdf:resource="&father;father"/> + <rdf:type rdf:resource="&father;male"/> + <hasChild rdf:resource="&father;heinz"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#michelle --> + + <owl:NamedIndividual rdf:about="&father;michelle"> + <rdf:type rdf:resource="&father;female"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#stefan --> + + <owl:NamedIndividual rdf:about="&father;stefan"> + <rdf:type rdf:resource="&father;father"/> + <rdf:type rdf:resource="&father;male"/> + <hasChild rdf:resource="&father;markus"/> + </owl:NamedIndividual> +</rdf:RDF> + + + +<!-- Generated by the OWL API (version 3.4.2) http://owlapi.sourceforge.net --> + This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-09-04 09:18:54
|
Revision: 4047 http://sourceforge.net/p/dl-learner/code/4047 Author: jenslehmann Date: 2013-09-04 09:18:50 +0000 (Wed, 04 Sep 2013) Log Message: ----------- started unit test for comparing ISLE and CELOE Modified Paths: -------------- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Property Changed: ---------------- trunk/test/isle/father/ Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-09-04 08:25:56 UTC (rev 4046) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-09-04 09:18:50 UTC (rev 4047) @@ -9,6 +9,7 @@ import java.util.Map; import java.util.Set; +import org.dllearner.algorithms.celoe.CELOE; import org.dllearner.algorithms.isle.index.AnnotatedDocument; import org.dllearner.algorithms.isle.index.TextDocument; import org.dllearner.algorithms.isle.index.semantic.SemanticIndex; @@ -56,12 +57,16 @@ private SemanticIndex semanticIndex; private SyntacticIndex syntacticIndex; + // we assume that the ontology is named "ontology.owl" and that all text files + // are in a subdirectory called "corpus" + private String testFolder = "../test/isle/father/"; + /** * */ public ISLETest() throws Exception{ manager = OWLManager.createOWLOntologyManager(); - ontology = manager.loadOntologyFromOntologyDocument(new File("../examples/isle/father_labeled.owl")); + ontology = manager.loadOntologyFromOntologyDocument(new File(testFolder + "father_labeled.owl")); cls = new NamedClass("http://example.com/father#father"); textRetriever = new RDFSLabelEntityTextRetriever(ontology); syntacticIndex = new OWLOntologyLuceneSyntacticIndexCreator(ontology, df.getRDFSLabel(), searchField).buildIndex(); @@ -71,7 +76,7 @@ private Set<TextDocument> createDocuments(){ Set<TextDocument> documents = new HashSet<TextDocument>(); - File folder = new File("../test/isle/father/corpus"); + File folder = new File(testFolder+"corpus/"); for (File file : folder.listFiles()) { try { String text = Files.toString(file, Charsets.UTF_8); @@ -148,4 +153,42 @@ isle.start(); } + @Test + public void compareISLE() throws Exception { + KnowledgeSource ks = new OWLAPIOntology(ontology); + AbstractReasonerComponent reasoner = new FastInstanceChecker(ks); + reasoner.init(); + + ClassLearningProblem lp = new ClassLearningProblem(reasoner); + lp.setClassToDescribe(cls); + lp.init(); + + semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); + semanticIndex.buildIndex(createDocuments()); + + relevance = new PMIRelevanceMetric(semanticIndex); + + Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); + NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); + + // run ISLE + ISLE isle = new ISLE(lp, reasoner); + isle.setHeuristic(heuristic); + isle.setSearchTreeFile(testFolder + "searchTreeISLE.txt"); + isle.setWriteSearchTree(true); + isle.setReplaceSearchTree(true); + isle.setTerminateOnNoiseReached(true); + isle.init(); + isle.start(); + + // run standard CELOE as reference + CELOE celoe = new CELOE(lp, reasoner); + celoe.setSearchTreeFile(testFolder + "searchTreeCELOE.txt"); + celoe.setWriteSearchTree(true); + celoe.setTerminateOnNoiseReached(true); + celoe.setReplaceSearchTree(true); + celoe.init(); + celoe.start(); + } + } Index: trunk/test/isle/father =================================================================== --- trunk/test/isle/father 2013-09-04 08:25:56 UTC (rev 4046) +++ trunk/test/isle/father 2013-09-04 09:18:50 UTC (rev 4047) Property changes on: trunk/test/isle/father ___________________________________________________________________ Added: svn:ignore ## -0,0 +1 ## +searchTree*.txt This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-09-04 10:04:18
|
Revision: 4055 http://sourceforge.net/p/dl-learner/code/4055 Author: jenslehmann Date: 2013-09-04 10:04:14 +0000 (Wed, 04 Sep 2013) Log Message: ----------- ISLE / CELOE comparison extended Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Added Paths: ----------- trunk/test/isle/father/ontology.owl Removed Paths: ------------- trunk/test/isle/father/father_labeled.owl Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2013-09-04 09:56:41 UTC (rev 4054) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2013-09-04 10:04:14 UTC (rev 4055) @@ -149,9 +149,11 @@ private int expressionTests = 0; private int minHorizExp = 0; private int maxHorizExp = 0; + private long totalRuntimeNs; // TODO: turn those into config options + // important: do not initialise those with empty sets // null = no settings for allowance / ignorance // empty set = allow / ignore nothing (it is often not desired to allow no class!) @@ -536,7 +538,8 @@ if (stop) { logger.info("Algorithm stopped ("+expressionTests+" descriptions tested). " + nodes.size() + " nodes in the search tree.\n"); } else { - logger.info("Algorithm terminated successfully (time: " + Helper.prettyPrintNanoSeconds(System.nanoTime()-nanoStartTime) + ", "+expressionTests+" descriptions tested, " + nodes.size() + " nodes in the search tree).\n"); + totalRuntimeNs = System.nanoTime()-nanoStartTime; + logger.info("Algorithm terminated successfully (time: " + Helper.prettyPrintNanoSeconds(totalRuntimeNs) + ", "+expressionTests+" descriptions tested, " + nodes.size() + " nodes in the search tree).\n"); logger.info(reasoner.toString()); } @@ -897,6 +900,10 @@ } } + public TreeSet<OENode> getNodes() { + return nodes; + } + public int getMaximumHorizontalExpansion() { return maxHorizExp; } @@ -1102,6 +1109,10 @@ this.stopOnFirstDefinition = stopOnFirstDefinition; } + public long getTotalRuntimeNs() { + return totalRuntimeNs; + } + public static void main(String[] args) throws Exception{ AbstractKnowledgeSource ks = new OWLFile("../examples/family/father_oe.owl"); ks.init(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2013-09-04 09:56:41 UTC (rev 4054) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2013-09-04 10:04:14 UTC (rev 4055) @@ -146,6 +146,7 @@ private int expressionTests = 0; private int minHorizExp = 0; private int maxHorizExp = 0; + private long totalRuntimeNs; // TODO: turn those into config options @@ -533,7 +534,8 @@ if (stop) { logger.info("Algorithm stopped ("+expressionTests+" descriptions tested). " + nodes.size() + " nodes in the search tree.\n"); } else { - logger.info("Algorithm terminated successfully (time: " + Helper.prettyPrintNanoSeconds(System.nanoTime()-nanoStartTime) + ", "+expressionTests+" descriptions tested, " + nodes.size() + " nodes in the search tree).\n"); + totalRuntimeNs = System.nanoTime()-nanoStartTime; + logger.info("Algorithm terminated successfully (time: " + Helper.prettyPrintNanoSeconds(totalRuntimeNs) + ", "+expressionTests+" descriptions tested, " + nodes.size() + " nodes in the search tree).\n"); logger.info(reasoner.toString()); } @@ -1099,4 +1101,12 @@ this.stopOnFirstDefinition = stopOnFirstDefinition; } + public long getTotalRuntimeNs() { + return totalRuntimeNs; + } + + public TreeSet<OENode> getNodes() { + return nodes; + } + } Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-09-04 09:56:41 UTC (rev 4054) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-09-04 10:04:14 UTC (rev 4055) @@ -5,6 +5,7 @@ import java.io.File; import java.io.IOException; +import java.text.DecimalFormat; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -28,6 +29,7 @@ import org.dllearner.kb.OWLAPIOntology; import org.dllearner.learningproblems.ClassLearningProblem; import org.dllearner.reasoning.FastInstanceChecker; +import org.dllearner.utilities.Helper; import org.junit.Before; import org.junit.Test; import org.semanticweb.owlapi.apibinding.OWLManager; @@ -66,7 +68,7 @@ */ public ISLETest() throws Exception{ manager = OWLManager.createOWLOntologyManager(); - ontology = manager.loadOntologyFromOntologyDocument(new File(testFolder + "father_labeled.owl")); + ontology = manager.loadOntologyFromOntologyDocument(new File(testFolder + "ontology.owl")); cls = new NamedClass("http://example.com/father#father"); textRetriever = new RDFSLabelEntityTextRetriever(ontology); syntacticIndex = new OWLOntologyLuceneSyntacticIndexCreator(ontology, df.getRDFSLabel(), searchField).buildIndex(); @@ -189,6 +191,16 @@ celoe.setReplaceSearchTree(true); celoe.init(); celoe.start(); + System.out.println(); + + DecimalFormat df = new DecimalFormat("000.00"); + System.out.println("Summary ISLE vs. CELOE"); + System.out.println("======================"); + System.out.println("accuracy: " + df.format(100*isle.getCurrentlyBestAccuracy())+"% vs. " + df.format(100*celoe.getCurrentlyBestAccuracy())+"%"); + System.out.println("expressions tested: " + isle.getClassExpressionTests() + " vs. " + celoe.getClassExpressionTests()); + System.out.println("search tree nodes: " + isle.getNodes().size() + " vs. " + celoe.getNodes().size()); + System.out.println("runtime: " + Helper.prettyPrintMilliSeconds(isle.getTotalRuntimeNs()) + " vs. " + Helper.prettyPrintMilliSeconds(celoe.getTotalRuntimeNs())); + } } Deleted: trunk/test/isle/father/father_labeled.owl =================================================================== --- trunk/test/isle/father/father_labeled.owl 2013-09-04 09:56:41 UTC (rev 4054) +++ trunk/test/isle/father/father_labeled.owl 2013-09-04 10:04:14 UTC (rev 4055) @@ -1,169 +0,0 @@ -<?xml version="1.0"?> - - -<!DOCTYPE rdf:RDF [ - <!ENTITY father "http://example.com/father#" > - <!ENTITY owl "http://www.w3.org/2002/07/owl#" > - <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > - <!ENTITY owl2xml "http://www.w3.org/2006/12/owl2-xml#" > - <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > - <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > -]> - - -<rdf:RDF xmlns="http://example.com/father#" - xml:base="http://example.com/father" - xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" - xmlns:owl2xml="http://www.w3.org/2006/12/owl2-xml#" - xmlns:owl="http://www.w3.org/2002/07/owl#" - xmlns:xsd="http://www.w3.org/2001/XMLSchema#" - xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" - xmlns:father="http://example.com/father#"> - <owl:Ontology rdf:about="http://example.com/father"/> - - - - <!-- - /////////////////////////////////////////////////////////////////////////////////////// - // - // Object Properties - // - /////////////////////////////////////////////////////////////////////////////////////// - --> - - - - - <!-- http://example.com/father#hasChild --> - - <owl:ObjectProperty rdf:about="&father;hasChild"> - <rdfs:label xml:lang="en">has child</rdfs:label> - <rdfs:domain rdf:resource="&father;person"/> - <rdfs:range rdf:resource="&father;person"/> - </owl:ObjectProperty> - - - - <!-- - /////////////////////////////////////////////////////////////////////////////////////// - // - // Classes - // - /////////////////////////////////////////////////////////////////////////////////////// - --> - - - - - <!-- http://example.com/father#father --> - - <owl:Class rdf:about="&father;father"> - <rdfs:label xml:lang="en">person which has at least 1 child</rdfs:label> - <rdfs:subClassOf rdf:resource="&father;male"/> - </owl:Class> - - - - <!-- http://example.com/father#female --> - - <owl:Class rdf:about="&father;female"> - <rdfs:label xml:lang="en">female</rdfs:label> - <rdfs:subClassOf rdf:resource="&father;person"/> - <owl:disjointWith rdf:resource="&father;male"/> - </owl:Class> - - - - <!-- http://example.com/father#male --> - - <owl:Class rdf:about="&father;male"> - <rdfs:label xml:lang="en">male</rdfs:label> - <rdfs:subClassOf rdf:resource="&father;person"/> - </owl:Class> - - - - <!-- http://example.com/father#person --> - - <owl:Class rdf:about="&father;person"> - <rdfs:label xml:lang="en">Person</rdfs:label> - <rdfs:subClassOf rdf:resource="&owl;Thing"/> - </owl:Class> - - - - <!-- http://www.w3.org/2002/07/owl#Thing --> - - <owl:Class rdf:about="&owl;Thing"/> - - - - <!-- - /////////////////////////////////////////////////////////////////////////////////////// - // - // Individuals - // - /////////////////////////////////////////////////////////////////////////////////////// - --> - - - - - <!-- http://example.com/father#anna --> - - <owl:NamedIndividual rdf:about="&father;anna"> - <rdf:type rdf:resource="&father;female"/> - <hasChild rdf:resource="&father;heinz"/> - </owl:NamedIndividual> - - - - <!-- http://example.com/father#heinz --> - - <owl:NamedIndividual rdf:about="&father;heinz"> - <rdf:type rdf:resource="&father;male"/> - </owl:NamedIndividual> - - - - <!-- http://example.com/father#markus --> - - <owl:NamedIndividual rdf:about="&father;markus"> - <rdf:type rdf:resource="&father;father"/> - <rdf:type rdf:resource="&father;male"/> - <hasChild rdf:resource="&father;anna"/> - </owl:NamedIndividual> - - - - <!-- http://example.com/father#martin --> - - <owl:NamedIndividual rdf:about="&father;martin"> - <rdf:type rdf:resource="&father;father"/> - <rdf:type rdf:resource="&father;male"/> - <hasChild rdf:resource="&father;heinz"/> - </owl:NamedIndividual> - - - - <!-- http://example.com/father#michelle --> - - <owl:NamedIndividual rdf:about="&father;michelle"> - <rdf:type rdf:resource="&father;female"/> - </owl:NamedIndividual> - - - - <!-- http://example.com/father#stefan --> - - <owl:NamedIndividual rdf:about="&father;stefan"> - <rdf:type rdf:resource="&father;father"/> - <rdf:type rdf:resource="&father;male"/> - <hasChild rdf:resource="&father;markus"/> - </owl:NamedIndividual> -</rdf:RDF> - - - -<!-- Generated by the OWL API (version 3.4.2) http://owlapi.sourceforge.net --> - Copied: trunk/test/isle/father/ontology.owl (from rev 4046, trunk/test/isle/father/father_labeled.owl) =================================================================== --- trunk/test/isle/father/ontology.owl (rev 0) +++ trunk/test/isle/father/ontology.owl 2013-09-04 10:04:14 UTC (rev 4055) @@ -0,0 +1,169 @@ +<?xml version="1.0"?> + + +<!DOCTYPE rdf:RDF [ + <!ENTITY father "http://example.com/father#" > + <!ENTITY owl "http://www.w3.org/2002/07/owl#" > + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > + <!ENTITY owl2xml "http://www.w3.org/2006/12/owl2-xml#" > + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > +]> + + +<rdf:RDF xmlns="http://example.com/father#" + xml:base="http://example.com/father" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:owl2xml="http://www.w3.org/2006/12/owl2-xml#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:father="http://example.com/father#"> + <owl:Ontology rdf:about="http://example.com/father"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Object Properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://example.com/father#hasChild --> + + <owl:ObjectProperty rdf:about="&father;hasChild"> + <rdfs:label xml:lang="en">has child</rdfs:label> + <rdfs:domain rdf:resource="&father;person"/> + <rdfs:range rdf:resource="&father;person"/> + </owl:ObjectProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Classes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://example.com/father#father --> + + <owl:Class rdf:about="&father;father"> + <rdfs:label xml:lang="en">person which has at least 1 child</rdfs:label> + <rdfs:subClassOf rdf:resource="&father;male"/> + </owl:Class> + + + + <!-- http://example.com/father#female --> + + <owl:Class rdf:about="&father;female"> + <rdfs:label xml:lang="en">female</rdfs:label> + <rdfs:subClassOf rdf:resource="&father;person"/> + <owl:disjointWith rdf:resource="&father;male"/> + </owl:Class> + + + + <!-- http://example.com/father#male --> + + <owl:Class rdf:about="&father;male"> + <rdfs:label xml:lang="en">male</rdfs:label> + <rdfs:subClassOf rdf:resource="&father;person"/> + </owl:Class> + + + + <!-- http://example.com/father#person --> + + <owl:Class rdf:about="&father;person"> + <rdfs:label xml:lang="en">Person</rdfs:label> + <rdfs:subClassOf rdf:resource="&owl;Thing"/> + </owl:Class> + + + + <!-- http://www.w3.org/2002/07/owl#Thing --> + + <owl:Class rdf:about="&owl;Thing"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Individuals + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://example.com/father#anna --> + + <owl:NamedIndividual rdf:about="&father;anna"> + <rdf:type rdf:resource="&father;female"/> + <hasChild rdf:resource="&father;heinz"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#heinz --> + + <owl:NamedIndividual rdf:about="&father;heinz"> + <rdf:type rdf:resource="&father;male"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#markus --> + + <owl:NamedIndividual rdf:about="&father;markus"> + <rdf:type rdf:resource="&father;father"/> + <rdf:type rdf:resource="&father;male"/> + <hasChild rdf:resource="&father;anna"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#martin --> + + <owl:NamedIndividual rdf:about="&father;martin"> + <rdf:type rdf:resource="&father;father"/> + <rdf:type rdf:resource="&father;male"/> + <hasChild rdf:resource="&father;heinz"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#michelle --> + + <owl:NamedIndividual rdf:about="&father;michelle"> + <rdf:type rdf:resource="&father;female"/> + </owl:NamedIndividual> + + + + <!-- http://example.com/father#stefan --> + + <owl:NamedIndividual rdf:about="&father;stefan"> + <rdf:type rdf:resource="&father;father"/> + <rdf:type rdf:resource="&father;male"/> + <hasChild rdf:resource="&father;markus"/> + </owl:NamedIndividual> +</rdf:RDF> + + + +<!-- Generated by the OWL API (version 3.4.2) http://owlapi.sourceforge.net --> + This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-09-04 14:15:35
|
Revision: 4059 http://sourceforge.net/p/dl-learner/code/4059 Author: jenslehmann Date: 2013-09-04 14:15:30 +0000 (Wed, 04 Sep 2013) Log Message: ----------- SWORE test Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Added Paths: ----------- trunk/test/isle/swore/ trunk/test/isle/swore/corpus/ trunk/test/isle/swore/corpus/requirements_elicitation.txt trunk/test/isle/swore/corpus/requirements_management.txt trunk/test/isle/swore/ontology.owl Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2013-09-04 13:58:43 UTC (rev 4058) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2013-09-04 14:15:30 UTC (rev 4059) @@ -149,7 +149,7 @@ private int expressionTests = 0; private int minHorizExp = 0; private int maxHorizExp = 0; - private long totalRuntimeNs; + private long totalRuntimeNs = 0; // TODO: turn those into config options Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java 2013-09-04 13:58:43 UTC (rev 4058) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java 2013-09-04 14:15:30 UTC (rev 4059) @@ -111,6 +111,9 @@ } Set<AnnotatedDocument> annotatedDocuments = index.get(entity); + if(annotatedDocuments == null) { + annotatedDocuments = new HashSet<AnnotatedDocument>(); + } return annotatedDocuments; } Modified: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-09-04 13:58:43 UTC (rev 4058) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-09-04 14:15:30 UTC (rev 4059) @@ -52,7 +52,6 @@ private OWLOntologyManager manager; private OWLOntology ontology; private OWLDataFactory df = new OWLDataFactoryImpl(); - private NamedClass cls; private EntityTextRetriever textRetriever; private RelevanceMetric relevance; private String searchField = "label"; @@ -61,7 +60,9 @@ // we assume that the ontology is named "ontology.owl" and that all text files // are in a subdirectory called "corpus" - private String testFolder = "../test/isle/father/"; + private String testFolder = "../test/isle/swore/"; +// NamedClass cls = new NamedClass("http://example.com/father#father"); + NamedClass cls = new NamedClass("http://ns.softwiki.de/req/CustomerRequirement"); /** * @@ -69,7 +70,6 @@ public ISLETest() throws Exception{ manager = OWLManager.createOWLOntologyManager(); ontology = manager.loadOntologyFromOntologyDocument(new File(testFolder + "ontology.owl")); - cls = new NamedClass("http://example.com/father#father"); textRetriever = new RDFSLabelEntityTextRetriever(ontology); syntacticIndex = new OWLOntologyLuceneSyntacticIndexCreator(ontology, df.getRDFSLabel(), searchField).buildIndex(); @@ -117,22 +117,17 @@ @Test public void testSemanticIndexAnnotationProperty(){ semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); - semanticIndex.buildIndex(df.getRDFSLabel(), null); - - NamedClass nc = new NamedClass("http://example.com/father#father"); - Set<AnnotatedDocument> documents = semanticIndex.getDocuments(nc); - System.out.println("Documents for " + nc + ":\n" + documents); - - nc = new NamedClass("http://example.com/father#person"); - documents = semanticIndex.getDocuments(nc); - System.out.println("Documents for " + nc + ":\n" + documents); + semanticIndex.buildIndex(df.getRDFSLabel(), null); +// NamedClass nc = new NamedClass("http://example.com/father#father"); + Set<AnnotatedDocument> documents = semanticIndex.getDocuments(cls); + System.out.println("Documents for " + cls + ":\n" + documents); } @Test public void testSemanticIndexCorpus(){ semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); semanticIndex.buildIndex(createDocuments()); - Set<AnnotatedDocument> documents = semanticIndex.getDocuments(new NamedClass("http://example.com/father#father")); + Set<AnnotatedDocument> documents = semanticIndex.getDocuments(cls); System.out.println(documents); } @@ -199,13 +194,13 @@ celoe.start(); System.out.println(); - DecimalFormat df = new DecimalFormat("000.00"); + DecimalFormat df = new DecimalFormat("#00.00"); System.out.println("Summary ISLE vs. CELOE"); System.out.println("======================"); System.out.println("accuracy: " + df.format(100*isle.getCurrentlyBestAccuracy())+"% vs. " + df.format(100*celoe.getCurrentlyBestAccuracy())+"%"); System.out.println("expressions tested: " + isle.getClassExpressionTests() + " vs. " + celoe.getClassExpressionTests()); System.out.println("search tree nodes: " + isle.getNodes().size() + " vs. " + celoe.getNodes().size()); - System.out.println("runtime: " + Helper.prettyPrintMilliSeconds(isle.getTotalRuntimeNs()) + " vs. " + Helper.prettyPrintMilliSeconds(celoe.getTotalRuntimeNs())); + System.out.println("runtime: " + Helper.prettyPrintNanoSeconds(isle.getTotalRuntimeNs()) + " vs. " + Helper.prettyPrintNanoSeconds(celoe.getTotalRuntimeNs())); } Added: trunk/test/isle/swore/corpus/requirements_elicitation.txt =================================================================== --- trunk/test/isle/swore/corpus/requirements_elicitation.txt (rev 0) +++ trunk/test/isle/swore/corpus/requirements_elicitation.txt 2013-09-04 14:15:30 UTC (rev 4059) @@ -0,0 +1 @@ +In requirements engineering, requirements elicitation is the practice of obtaining the requirements of a system from users, customers and other stakeholders. The practice is also sometimes referred to as requirements gathering. The term elicitation is used in books and research to raise the fact that good requirements can not just be collected from the customer, as would be indicated by the name requirements gathering. Requirements elicitation is non-trivial because you can never be sure you get all requirements from the user and customer by just asking them what the system should do. Requirements elicitation practices include interviews, questionnaires, user observation, workshops, brain storming, use cases, role playing and prototyping. Before requirements can be analyzed, modeled, or specified they must be gathered through an elicitation process. Requirements elicitation is a part of the requirements engineering process, usually followed by analysis and specification of the requirements. Commonly used elicitation processes are the stakeholder meetings or interviews. For example, an important first meeting could be between software engineers and customers where they discuss their perspective of the requirements. Added: trunk/test/isle/swore/corpus/requirements_management.txt =================================================================== Added: trunk/test/isle/swore/ontology.owl =================================================================== --- trunk/test/isle/swore/ontology.owl (rev 0) +++ trunk/test/isle/swore/ontology.owl 2013-09-04 14:15:30 UTC (rev 4059) @@ -0,0 +1,2273 @@ +<?xml version="1.0"?> + + +<!DOCTYPE rdf:RDF [ + <!ENTITY req "http://ns.softwiki.de/req/" > + <!ENTITY foaf2 "http://xmlns.com/foaf/0.1/" > + <!ENTITY dcmitype "http://purl.org/dc/dcmitype/" > + <!ENTITY owl "http://www.w3.org/2002/07/owl#" > + <!ENTITY dc "http://purl.org/dc/elements/1.1/" > + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > + <!ENTITY owl2xml "http://www.w3.org/2006/12/owl2-xml#" > + <!ENTITY foaf "http://www.holygoat.co.uk/foaf.rdf#" > + <!ENTITY skos "http://www.w3.org/2004/02/skos/core#" > + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > + <!ENTITY tags "http://www.holygoat.co.uk/owl/redwood/0.1/tags/" > +]> + + +<rdf:RDF xmlns="http://ns.softwiki.de/req/" + xml:base="http://ns.softwiki.de/req/" + xmlns:tags="http://www.holygoat.co.uk/owl/redwood/0.1/tags/" + xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:foaf2="http://xmlns.com/foaf/0.1/" + xmlns:foaf="http://www.holygoat.co.uk/foaf.rdf#" + xmlns:dcmitype="http://purl.org/dc/dcmitype/" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:owl2xml="http://www.w3.org/2006/12/owl2-xml#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:req="http://ns.softwiki.de/req/" + xmlns:skos="http://www.w3.org/2004/02/skos/core#"> + <owl:Ontology rdf:about="http://ns.softwiki.de/req/"> + <rdfs:label rdf:datatype="&xsd;string">SoftWiki Ontology for Requirements Engineering</rdfs:label> + <rdfs:comment rdf:datatype="&xsd;string">A requirements engineering ontology for the SoftWiki project.</rdfs:comment> + <dc:contributor rdf:datatype="&xsd;string">Jens Lehmann</dc:contributor> + <dc:contributor rdf:datatype="&xsd;string">Sebastian Dietzold</dc:contributor> + <owl:versionInfo rdf:datatype="&xsd;string">version 1.00 - Thomas Riechert, Steffen Lohmann, Kim Lauenroth, Philipp Heim - starting the next generation of SWORE on 8th of July 2008 in Duisburg +version 0.8 - Sebastian Dietzold - skos, tags and dc alignment (title now functional) +version 0.7 - Sebastian Dietzold - labels completed and namespace correction +version 0.6 - name space changed to ns.softwiki.de/req +version 0.5 - refined by Thomas according to ESWC Poster submission +version 0.4 - refined by Jens on the way home from Essen +version 0.3 - refined by Jens during discussion with Kim and Steffen on 13 March 2007 in Essen +version 0.2 - refined by Thomas and Jens in the evening of 12 March 2007 in Essen +version 0.1 - simple initial version by Thomas and Jens before meeting in Essen</owl:versionInfo> + </owl:Ontology> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Annotation properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + <owl:AnnotationProperty rdf:about="&owl;versionInfo"/> + <owl:AnnotationProperty rdf:about="&dc;contributor"/> + <owl:AnnotationProperty rdf:about="&rdfs;label"/> + <owl:AnnotationProperty rdf:about="&rdfs;comment"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Object Properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/broader --> + + <owl:ObjectProperty rdf:about="&req;broader"/> + + + + <!-- http://ns.softwiki.de/req/comments --> + + <owl:ObjectProperty rdf:about="&req;comments"> + <rdf:type rdf:resource="&owl;InverseFunctionalProperty"/> + <rdfs:domain rdf:resource="&req;AbstractComment"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/conflicts --> + + <owl:ObjectProperty rdf:about="&req;conflicts"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <owl:inverseOf rdf:resource="&req;conflicts"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/cui --> + + <owl:ObjectProperty rdf:about="&req;cui"/> + + + + <!-- http://ns.softwiki.de/req/defines --> + + <owl:ObjectProperty rdf:about="&req;defines"> + <rdfs:label rdf:datatype="&xsd;string">defines</rdfs:label> + <rdfs:domain rdf:resource="&req;Author"/> + <rdfs:range> + <owl:Class> + <owl:unionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;AbstractComment"/> + <rdf:Description rdf:about="&req;AbstractRequirement"/> + <rdf:Description rdf:about="&req;Keyword"/> + </owl:unionOf> + </owl:Class> + </rdfs:range> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/depentsOn --> + + <owl:ObjectProperty rdf:about="&req;depentsOn"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;entails"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/details --> + + <owl:ObjectProperty rdf:about="&req;details"> + <rdfs:label rdf:datatype="&xsd;string">details</rdfs:label> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/documentation --> + + <owl:ObjectProperty rdf:about="&req;documentation"/> + + + + <!-- http://ns.softwiki.de/req/entails --> + + <owl:ObjectProperty rdf:about="&req;entails"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/invalidates --> + + <owl:ObjectProperty rdf:about="&req;invalidates"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isCommentedBy --> + + <owl:ObjectProperty rdf:about="&req;isCommentedBy"> + <rdfs:range rdf:resource="&req;AbstractComment"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;comments"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isCreatedBy --> + + <owl:ObjectProperty rdf:about="&req;isCreatedBy"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label>is created by</rdfs:label> + <rdfs:comment>specifies the persons who created the requirement</rdfs:comment> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isDefinedBy --> + + <owl:ObjectProperty rdf:about="&req;isDefinedBy"> + <rdfs:label rdf:datatype="&xsd;string">defined by</rdfs:label> + <rdfs:range rdf:resource="&req;Author"/> + <owl:inverseOf rdf:resource="&req;defines"/> + <rdfs:domain> + <owl:Class> + <owl:unionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;AbstractComment"/> + <rdf:Description rdf:about="&req;AbstractRequirement"/> + <rdf:Description rdf:about="&req;Keyword"/> + </owl:unionOf> + </owl:Class> + </rdfs:domain> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isDetailedBy --> + + <owl:ObjectProperty rdf:about="&req;isDetailedBy"> + <rdfs:label rdf:datatype="&xsd;string">detailed by</rdfs:label> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;details"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isInvalidFor --> + + <owl:ObjectProperty rdf:about="&req;isInvalidFor"> + <rdf:type rdf:resource="&owl;InverseFunctionalProperty"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;invalidates"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isLeadingTo --> + + <owl:ObjectProperty rdf:about="&req;isLeadingTo"> + <rdfs:label rdf:datatype="&xsd;string">lead to</rdfs:label> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractSource"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isRedundant --> + + <owl:ObjectProperty rdf:about="&req;isRedundant"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <owl:inverseOf rdf:resource="&req;isRedundant"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isRelated --> + + <owl:ObjectProperty rdf:about="&req;isRelated"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <owl:inverseOf rdf:resource="&req;isRelated"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isReleatedTo --> + + <owl:ObjectProperty rdf:about="&req;isReleatedTo"> + <rdfs:range rdf:resource="&req;Customer"/> + <rdfs:domain rdf:resource="&req;CustomerRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isSimilarTo --> + + <owl:ObjectProperty rdf:about="&req;isSimilarTo"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;isSimilarTo"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/leadsTo --> + + <owl:ObjectProperty rdf:about="&req;leadsTo"> + <rdfs:label rdf:datatype="&xsd;string">leads to</rdfs:label> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractSource"/> + <owl:inverseOf rdf:resource="&req;isLeadingTo"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/rates --> + + <owl:ObjectProperty rdf:about="&req;rates"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;Rating"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/refersTo --> + + <owl:ObjectProperty rdf:about="&req;refersTo"> + <rdfs:label rdf:datatype="&xsd;string">refers to</rdfs:label> + <rdfs:comment xml:lang="de">Relevanter Aspekt eines geplantes Systems (ähnlich zu Tagging).</rdfs:comment> + <rdfs:range rdf:resource="&req;AbstractReferencePoint"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;relevantRequirements"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/relevantRequirements --> + + <owl:ObjectProperty rdf:about="&req;relevantRequirements"> + <rdfs:label rdf:datatype="&xsd;string">relevant requirements</rdfs:label> + <rdfs:domain rdf:resource="&req;AbstractReferencePoint"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/specifies --> + + <owl:ObjectProperty rdf:about="&req;specifies"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;Topic"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/undirectedrelation --> + + <owl:ObjectProperty rdf:about="&req;undirectedrelation"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdf:type rdf:resource="&owl;InverseFunctionalProperty"/> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: only one ration between the same pair of two requirements allowed.</rdfs:comment> + <owl:inverseOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/votes --> + + <owl:ObjectProperty rdf:about="&req;votes"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;Stakeholder"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/willLeadTo --> + + <owl:ObjectProperty rdf:about="&req;willLeadTo"> + <rdfs:domain rdf:resource="&req;Requirement"/> + <rdfs:range rdf:resource="&req;SystemRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://www.holygoat.co.uk/owl/redwood/0.1/tags/taggedWithTag --> + + <owl:ObjectProperty rdf:about="&tags;taggedWithTag"> + <rdfs:label xml:lang="de">Tags</rdfs:label> + </owl:ObjectProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Data properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/averagePriorityRate --> + + <owl:DatatypeProperty rdf:about="&req;averagePriorityRate"> + <rdfs:subPropertyOf rdf:resource="&req;averageRate"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/averageQualityRate --> + + <owl:DatatypeProperty rdf:about="&req;averageQualityRate"> + <rdfs:subPropertyOf rdf:resource="&req;averageRate"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/averageRate --> + + <owl:DatatypeProperty rdf:about="&req;averageRate"> + <rdfs:comment rdf:datatype="&xsd;string">Is calculated by given rates.</rdfs:comment> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&xsd;float"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/changeDate --> + + <owl:DatatypeProperty rdf:about="&req;changeDate"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&xsd;dateTime"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/creationDate --> + + <owl:DatatypeProperty rdf:about="&req;creationDate"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&xsd;dateTime"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/definition --> + + <owl:DatatypeProperty rdf:about="&req;definition"> + <rdfs:domain rdf:resource="&req;DefinedKeyword"/> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/rate --> + + <owl:DatatypeProperty rdf:about="&req;rate"> + <rdfs:domain rdf:resource="&req;Rating"/> + <rdfs:range rdf:resource="&xsd;float"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/rational --> + + <owl:DatatypeProperty rdf:about="&req;rational"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label rdf:datatype="&xsd;string">rational</rdfs:label> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/result --> + + <owl:DatatypeProperty rdf:about="&req;result"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label rdf:datatype="&xsd;string">result</rdfs:label> + <rdfs:comment xml:lang="de">z.B. Veränderung von priority und agreement</rdfs:comment> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/role --> + + <owl:DatatypeProperty rdf:about="&req;role"> + <rdfs:domain rdf:resource="&req;Author"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/scenarioStep --> + + <owl:DatatypeProperty rdf:about="&req;scenarioStep"> + <rdfs:label rdf:datatype="&xsd;string">scenario step</rdfs:label> + <rdfs:comment rdf:datatype="&xsd;string"></rdfs:comment> + <owl:versionInfo rdf:datatype="&xsd;string">TODO: es muss eine konkrete Reihenfolge der Steps gegeben sein (Listenstruktur)</owl:versionInfo> + <rdfs:domain rdf:resource="&req;TextualScenario"/> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/state --> + + <owl:DatatypeProperty rdf:about="&req;state"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range> + <rdfs:Datatype> + <owl:oneOf> + <rdf:Description> + <rdf:type rdf:resource="&rdf;List"/> + <rdf:first rdf:datatype="&xsd;string">isNegativDecided</rdf:first> + <rdf:rest> + <rdf:Description> + <rdf:type rdf:resource="&rdf;List"/> + <rdf:first rdf:datatype="&xsd;string">isPositvDecided</rdf:first> + <rdf:rest rdf:resource="&rdf;nil"/> + </rdf:Description> + </rdf:rest> + </rdf:Description> + </owl:oneOf> + </rdfs:Datatype> + </rdfs:range> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/voteTime --> + + <owl:DatatypeProperty rdf:about="&req;voteTime"> + <rdfs:label rdf:datatype="&xsd;string">vote time</rdfs:label> + <rdfs:domain rdf:resource="&req;Vote"/> + <rdfs:range rdf:resource="&xsd;dateTime"/> + </owl:DatatypeProperty> + + + + <!-- http://purl.org/dc/elements/1.1/description --> + + <owl:DatatypeProperty rdf:about="&dc;description"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label rdf:datatype="&xsd;string">description</rdfs:label> + <rdfs:label xml:lang="de">Beschreibung</rdfs:label> + <rdfs:range rdf:resource="&xsd;string"/> + <rdfs:domain> + <owl:Class> + <owl:unionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;Goal"/> + <rdf:Description rdf:about="&req;Requirement"/> + </owl:unionOf> + </owl:Class> + </rdfs:domain> + </owl:DatatypeProperty> + + + + <!-- http://purl.org/dc/elements/1.1/title --> + + <owl:DatatypeProperty rdf:about="&dc;title"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Classes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/AbstractComment --> + + <owl:Class rdf:about="&req;AbstractComment"> + <rdfs:label>abstract comment</rdfs:label> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AbstractReferencePoint --> + + <owl:Class rdf:about="&req;AbstractReferencePoint"> + <rdfs:label rdf:datatype="&xsd;string">reference point</rdfs:label> + <owl:disjointWith rdf:resource="&req;AbstractRequirement"/> + <owl:disjointWith rdf:resource="&req;AbstractSource"/> + <owl:disjointWith rdf:resource="&req;Author"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AbstractRequirement --> + + <owl:Class rdf:about="&req;AbstractRequirement"> + <rdfs:label rdf:datatype="&xsd;string">abstract requirement</rdfs:label> + <rdfs:label xml:lang="de">abstraktes Requirement</rdfs:label> + <owl:disjointWith rdf:resource="&req;AbstractSource"/> + <owl:disjointWith rdf:resource="&req;Author"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + <rdfs:comment rdf:datatype="&xsd;string">Es ist ungünstig, dass Requirement Subklasse von AbstractRequirement ist.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AbstractSource --> + + <owl:Class rdf:about="&req;AbstractSource"> + <rdfs:label rdf:datatype="&xsd;string">abstract source</rdfs:label> + <rdfs:subClassOf rdf:resource="&owl;Thing"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + <rdfs:comment rdf:datatype="&xsd;string"></rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AllocatedRequirement --> + + <owl:Class rdf:about="&req;AllocatedRequirement"> + <rdfs:label>allocated requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/ApplicationPointer --> + + <owl:Class rdf:about="&req;ApplicationPointer"> + <rdfs:label>application pointer</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractReferencePoint"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/ApplicationState --> + + <owl:Class rdf:about="&req;ApplicationState"> + <rdfs:label>application state</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractReferencePoint"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Author --> + + <owl:Class rdf:about="&req;Author"> + <rdfs:label xml:lang="de">Autor</rdfs:label> + <rdfs:label xml:lang="en">author</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Comment --> + + <owl:Class rdf:about="&req;Comment"> + <rdfs:label>comment</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractComment"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Creditor --> + + <owl:Class rdf:about="&req;Creditor"> + <rdfs:label>creditor</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Customer --> + + <owl:Class rdf:about="&req;Customer"> + <rdfs:label>customer</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + <owl:disjointWith rdf:resource="&req;Programmer"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/CustomerRequirement --> + + <owl:Class rdf:about="&req;CustomerRequirement"> + <rdfs:label>customer requirement</rdfs:label> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;Requirement"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;isCreatedBy"/> + <owl:someValuesFrom rdf:resource="&req;Customer"/> + </owl:Restriction> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/DefinedKeyword --> + + <owl:Class rdf:about="&req;DefinedKeyword"> + <rdfs:label>defined keyword</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Keyword"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/DerivedRequirement --> + + <owl:Class rdf:about="&req;DerivedRequirement"> + <rdfs:label>derived requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/DesignRequirement --> + + <owl:Class rdf:about="&req;DesignRequirement"> + <rdfs:label>design requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Document --> + + <owl:Class rdf:about="&req;Document"> + <rdfs:label>document</rdfs:label> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;AbstractSource"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;leadsTo"/> + <owl:someValuesFrom rdf:resource="&req;AbstractRequirement"/> + </owl:Restriction> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <rdfs:subClassOf rdf:resource="&req;AbstractSource"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/FunctionalRequirement --> + + <owl:Class rdf:about="&req;FunctionalRequirement"> + <rdfs:label rdf:datatype="&xsd;string">functional requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + <owl:disjointWith rdf:resource="&req;QualityRequirement"/> + <rdfs:comment rdf:datatype="&xsd;string">refers to functional reference point, for instance components of the system</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Goal --> + + <owl:Class rdf:about="&req;Goal"> + <rdfs:label rdf:datatype="&xsd;string">goal</rdfs:label> + <rdfs:label xml:lang="de">Ziel</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractRequirement"/> + <rdfs:subClassOf> + <owl:Restriction> + <owl:onProperty rdf:resource="&dc;description"/> + <owl:cardinality rdf:datatype="&xsd;nonNegativeInteger">1</owl:cardinality> + </owl:Restriction> + </rdfs:subClassOf> + <owl:disjointWith rdf:resource="&req;Requirement"/> + <owl:disjointWith rdf:resource="&req;Scenario"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Government --> + + <owl:Class rdf:about="&req;Government"> + <rdfs:label>government</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Keyword --> + + <owl:Class rdf:about="&req;Keyword"> + <rdfs:label>keyword</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractReferencePoint"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/PerformanceRequirement --> + + <owl:Class rdf:about="&req;PerformanceRequirement"> + <rdfs:label>performance requirement</rdfs:label> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;Requirement"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;willLeadTo"/> + <owl:someValuesFrom rdf:resource="&req;SystemRequirement"/> + </owl:Restriction> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + <owl:disjointWith rdf:resource="&req;SystemRequirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/PriorityRating --> + + <owl:Class rdf:about="&req;PriorityRating"> + <rdfs:label>priority rating</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Rating"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Author only defines at most one rating about the priority for each requirement.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Programmer --> + + <owl:Class rdf:about="&req;Programmer"> + <rdfs:label>programmer</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/QualityRating --> + + <owl:Class rdf:about="&req;QualityRating"> + <rdfs:label>quality rating</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Rating"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Author only defines at most one rating about the quality for each requirement.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/QualityRequirement --> + + <owl:Class rdf:about="&req;QualityRequirement"> + <rdfs:label rdf:datatype="&xsd;string">quality requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + <rdfs:comment rdf:datatype="&xsd;string">refers to quality reference point, e.g. reliability, performance, usability</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Rating --> + + <owl:Class rdf:about="&req;Rating"> + <rdfs:label>rating</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractComment"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Requirement --> + + <owl:Class rdf:about="&req;Requirement"> + <rdfs:label rdf:datatype="&xsd;string">requirement</rdfs:label> + <rdfs:label xml:lang="de">Anforderung(en)</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractRequirement"/> + <rdfs:subClassOf> + <owl:Restriction> + <owl:onProperty rdf:resource="&dc;description"/> + <owl:cardinality rdf:datatype="&xsd;nonNegativeInteger">1</owl:cardinality> + </owl:Restriction> + </rdfs:subClassOf> + <owl:disjointWith rdf:resource="&req;Scenario"/> + <rdfs:comment rdf:datatype="&xsd;string"></rdfs:comment> + <owl:versionInfo rdf:datatype="&xsd;string">TODO: semantische Verfeinerung geplant, d.h. Anforderungen nicht nur als Textstring, sondern z.B. als RDF-Triple formulieren</owl:versionInfo> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Scenario --> + + <owl:Class rdf:about="&req;Scenario"> + <rdfs:label rdf:datatype="&xsd;string">scenario</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractRequirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/SeniorManagementStaff --> + + <owl:Class rdf:about="&req;SeniorManagementStaff"> + <rdfs:label>senior management staff</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Stakeholder --> + + <owl:Class rdf:about="&req;Stakeholder"> + <rdfs:label>stakeholder</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractSource"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/SystemRequirement --> + + <owl:Class rdf:about="&req;SystemRequirement"> + <rdfs:label>system requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&owl;Thing"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/TextualScenario --> + + <owl:Class rdf:about="&req;TextualScenario"> + <rdfs:label rdf:datatype="&xsd;string">textual scenario</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Scenario"/> + <rdfs:subClassOf> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;scenarioStep"/> + <owl:minCardinality rdf:datatype="&xsd;nonNegativeInteger">1</owl:minCardinality> + </owl:Restriction> + </rdfs:subClassOf> + <rdfs:comment xml:lang="de">Szenario, welches aus mehreren textuell beschriebenen Szenarioschritten besteht.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Topic --> + + <owl:Class rdf:about="&req;Topic"> + <rdfs:label>topic</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;DefinedKeyword"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Requirement refers to exact one topic.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/TradeUnion --> + + <owl:Class rdf:about="&req;TradeUnion"> + <rdfs:label>trade union</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Vote --> + + <owl:Class rdf:about="&req;Vote"> + <rdfs:label rdf:datatype="&xsd;string">vote</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractComment"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Author only votes at most one requirement.</rdfs:comment> + </owl:Class> + + + + <!-- http://purl.org/dc/dcmitype/Image --> + + <owl:Class rdf:about="&dcmitype;Image"> + <rdfs:label>image</rdfs:label> + </owl:Class> + + + + <!-- http://www.w3.org/2000/01/rdf-schema#Resource --> + + <owl:Class rdf:about="&rdfs;Resource"> + <rdfs:label>resource</rdfs:label> + </owl:Class> + + + + <!-- http://www.w3.org/2001/XMLSchema#string --> + + <owl:Class rdf:about="&xsd;string"> + <rdfs:label rdf:datatype="&xsd;string">string</rdfs:label> + </owl:Class> + + + + <!-- http://www.w3.org/2002/07/owl#Datatype --> + + <owl:Class rdf:about="&owl;Datatype"/> + + + + <!-- http://www.w3.org/2002/07/owl#Thing --> + + <owl:Class rdf:about="&owl;Thing"/> + + + + <!-- http://www.w3.org/2004/02/skos/core#Concept --> + + <owl:Class rdf:about="&skos;Concept"> + <rdfs:label>concept</rdfs:label> + <rdfs:label xml:lang="de">Thema</rdfs:label> + </owl:Class> + + + + <!-- http://xmlns.com/foaf/0.1/Document --> + + <owl:Class rdf:about="&foaf2;Document"> + <rdfs:label>document</rdfs:label> + </owl:Class> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Individuals + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/1 --> + + <owl:Thing rdf:about="&req;1"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>1</rdfs:label> + <rates rdf:resource="&req;BuildASecureLoginSystem"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/2 --> + + <owl:Thing rdf:about="&req;2"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>2</rdfs:label> + <rates rdf:resource="&req;BuildASoftwareThatRuns24hADay"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/3 --> + + <owl:Thing rdf:about="&req;3"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>3</rdfs:label> + <rates rdf:resource="&req;BuildLoginSystem"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/4 --> + + <owl:Thing rdf:about="&req;4"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>4</rdfs:label> + <rates rdf:resource="&req;BuildNetworkLoginSystem"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/5 --> + + <owl:Thing rdf:about="&req;5"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>5</rdfs:label> + <rates rdf:resource="&req;DataBaseBackupCreatedSyncron"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/6 --> + + <owl:Thing rdf:about="&req;6"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>6</rdfs:label> + <rates rdf:resource="&req;CustomerRequirement1"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/7 --> + + <owl:Thing rdf:about="&req;7"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>7</rdfs:label> + <rates rdf:resource="&req;DialogSystemShoudRespondInUnder5Sec"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/8 --> + + <owl:Thing rdf:about="&req;8"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>8</rdfs:label> + <rates rdf:resource="&req;loadGUIInUnder2Sec"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/9 --> + + <owl:Thing rdf:about="&req;9"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>9</rdfs:label> + <rates rdf:resource="&req;LogEveryUserActivity"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/ActiveHelpDialog --> + + <owl:Thing rdf:about="&req;ActiveHelpDialog"> + <rdf:type rdf:resource="&req;DesignRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Active Help Dialog</rdfs:label> + <isCreatedBy rdf:resource="&req;GermanGovernment"/> + <isLeadingTo rdf:resource="&req;Pflichtenheft"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Andrew_Stellman --> + + <owl:Thing rdf:about="&req;Andrew_Stellman"> + <rdf:type rdf:resource="&req;Author"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Andrew Stellman</rdfs:label> + <defines rdf:resource="&req;MultiUserSystem"/> + <votes rdf:resource="&req;UseDatabaseToStoreUserData"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/BuildAFastSoftware --> + + <owl:Thing rdf:about="&req;BuildAFastSoftware"> + <rdf:type rdf:resource="&req;CustomerRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Build a Fast Software</rdfs:label> + <isCreatedBy rdf:resource="&req;Charlotte_Blay"/> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + <refersTo rdf:resource="&req;PerformanceTopic"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/BuildASecureLoginSystem --> + + <owl:Thing rdf:about="&req;BuildASecureLoginSystem"> + <rdf:type rdf:resource="&req;DerivedRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Build A Secure Login System</rdfs:label> + <details rdf:resource="&req;BuildLoginSystem"/> + <isLeadingTo rdf:resource="&req;Pflichtenheft"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/BuildASoftwareThatRuns24hADay --> + + <owl:Thing rdf:about="&req;BuildASoftwareThatRuns24hADay"> + <rdf:type rdf:resource="&req;CustomerRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Build A Software That Runs 24 h A Day</rdfs:label> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + <isCreatedBy rdf:resource="&req;Philippe_Soupault"/> + <depentsOn rdf:resource="&req;SystemStabilityRequirement"/> + <refersTo rdf:resource="&req;SystemStabilityTopic"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/BuildLoginSystem --> + + <owl:Thing rdf:about="&req;BuildLoginSystem"> + <rdf:type rdf:resource="&req;DerivedRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Build Login System</rdfs:label> + <details rdf:resource="&req;MultiUserSystem"/> + <isLeadingTo rdf:resource="&req;Pflichtenheft"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/BuildNetworkLoginSystem --> + + <owl:Thing rdf:about="&req;BuildNetworkLoginSystem"> + <rdf:type rdf:resource="&req;DerivedRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Build Network Login System</rdfs:label> + <isLeadingTo rdf:resource="&req;Pflichtenheft"/> + <details rdf:resource="&req;UserCanAccessDataFromEveryComputer"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CentralOrganisationOfFinnishTrade --> + + <owl:Thing rdf:about="&req;CentralOrganisationOfFinnishTrade"> + <rdf:type rdf:resource="&req;TradeUnion"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Central Organisaion Of Finnish Trade</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Charlotte_Blay --> + + <owl:Thing rdf:about="&req;Charlotte_Blay"> + <rdf:type rdf:resource="&req;Customer"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Carlotte Blay</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Consistent --> + + <owl:Thing rdf:about="&req;Consistent"> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Consistent</rdfs:label> + <isCreatedBy rdf:resource="&req;USGovernment"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Correct --> + + <owl:Thing rdf:about="&req;Correct"> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Correct</rdfs:label> + <isCreatedBy rdf:resource="&req;Jennifer_Greene"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CreateACheaperSoftware --> + + <owl:Thing rdf:about="&req;CreateACheaperSoftware"> + <rdf:type rdf:resource="&req;Goal"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Create A Cheaper Software</rdfs:label> + <details rdf:resource="&req;SearchShouldBeDoneIn3Sec"/> + <isDefinedBy rdf:resource="&req;Tim"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CreateDatabaseInterface --> + + <owl:Thing rdf:about="&req;CreateDatabaseInterface"> + <rdf:type rdf:resource="&req;DerivedRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Create Database Interface</rdfs:label> + <details rdf:resource="&req;DataBaseBackupCreatedSyncron"/> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CreateModernGUIDesign --> + + <owl:Thing rdf:about="&req;CreateModernGUIDesign"> + <rdf:type rdf:resource="&req;CustomerRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Create Modern GUI Design</rdfs:label> + <refersTo rdf:resource="&req;DesignTopic"/> + <isCreatedBy rdf:resource="&req;Jane_Smiley"/> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + <isCommentedBy rdf:resource="&req;UsefulRequirement"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CreateNetworkInterface --> + + <owl:Thing rdf:about="&req;CreateNetworkInterface"> + <rdf:type rdf:resource="&req;DerivedRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Create Network Interface</rdfs:label> + <details rdf:resource="&req;BuildNetworkLoginSystem"/> + <isLeadingTo rdf:resource="&req;Pflichtenheft"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CreateVersion1 --> + + <owl:Thing rdf:about="&req;CreateVersion1"> + <rdf:type rdf:resource="&req;Goal"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Create Version 1</rdfs:label> + <isDefinedBy rdf:resource="&req;Jim"/> + <details rdf:resource="&req;SystemStabilityRequirement"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CreateVersion2 --> + + <owl:Thing rdf:about="&req;CreateVersion2"> + <rdf:type rdf:resource="&req;Goal"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Create Version 2</rdfs:label> + <isDefinedBy rdf:resource="&req;Tom"/> + <details rdf:resource="&req;UseDatabaseToStoreUserData"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/CustomerRequirement1 --> + + <owl:Thing rdf:about="&req;CustomerRequirement1"> + <rdf:type rdf:resource="&req;Requirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Customer Requirement 1</rdfs:label> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + <isCreatedBy rdf:resource="&req;Tom"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DataBaseBackupCreatedSyncron --> + + <owl:Thing rdf:about="&req;DataBaseBackupCreatedSyncron"> + <rdf:type rdf:resource="&req;PerformanceRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Database Backup Created Syncrom</rdfs:label> + <isCreatedBy rdf:resource="&req;Andrew_Stellman"/> + <willLeadTo rdf:resource="&req;DualCoreSystemWith8GigRam"/> + <isCommentedBy rdf:resource="&req;MustBeDiscussed"/> + <refersTo rdf:resource="&req;PerformanceTopic"/> + <isLeadingTo rdf:resource="&req;Pflichtenheft"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DataExecutionShouldBeDoneInUnder3Sec --> + + <owl:Thing rdf:about="&req;DataExecutionShouldBeDoneInUnder3Sec"> + <rdf:type rdf:resource="&req;PerformanceRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Dataexecution Should Be Done In Under 3 Sec</rdfs:label> + <isCreatedBy rdf:resource="&req;Jennifer_Greene"/> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + <isCommentedBy rdf:resource="&req;MustBeDiscussed"/> + <refersTo rdf:resource="&req;PerformanceTopic"/> + <willLeadTo rdf:resource="&req;QuadCoreSystemWith16GigRam"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DatabaseServerCanBeUsedBy1000PersonsSimultaneus --> + + <owl:Thing rdf:about="&req;DatabaseServerCanBeUsedBy1000PersonsSimultaneus"> + <rdf:type rdf:resource="&req;PerformanceRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Database Server Can Be Used By 1000 Persons Simultaneus</rdfs:label> + <willLeadTo rdf:resource="&req;DatabaseServerWith32GigRam"/> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + <isCommentedBy rdf:resource="&req;MustBeDiscussed"/> + <refersTo rdf:resource="&req;PerformanceTopic"/> + <isCreatedBy rdf:resource="&req;Tom"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DatabaseServerWith32GigRam --> + + <owl:Thing rdf:about="&req;DatabaseServerWith32GigRam"> + <rdf:type rdf:resource="&req;SystemRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Database Server With 32 Gig Ram</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DatabaseTopic --> + + <owl:Thing rdf:about="&req;DatabaseTopic"> + <rdf:type rdf:resource="&req;Topic"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Database Topic</rdfs:label> + <specifies rdf:resource="&req;UseDatabaseToStoreUserData"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Derick_Garnier --> + + <owl:Thing rdf:about="&req;Derick_Garnier"> + <rdf:type rdf:resource="&req;Customer"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Derick Garnier</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DesignTopic --> + + <owl:Thing rdf:about="&req;DesignTopic"> + <rdf:type rdf:resource="&req;Topic"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Design Topic</rdfs:label> + <specifies rdf:resource="&req;CreateModernGUIDesign"/> + <specifies rdf:resource="&req;UseOfIcons"/> + <specifies rdf:resource="&req;WindowDesign"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DialogSystemShoudRespondInUnder5Sec --> + + <owl:Thing rdf:about="&req;DialogSystemShoudRespondInUnder5Sec"> + <rdf:type rdf:resource="&req;PerformanceRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Dialog System Should Respond In Under 5 Sec</rdfs:label> + <willLeadTo rdf:resource="&req;DualCoreSystemWith8GigRam"/> + <isCommentedBy rdf:resource="&req;GoodIdea"/> + <isLeadingTo rdf:resource="&req;Lastenheft"/> + <refersTo rdf:resource="&req;PerformanceTopic"/> + <isCreatedBy rdf:resource="&req;Tom"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/DualCoreSystemWith8GigRam --> + + <owl:Thing rdf:about="&req;DualCoreSystemWith8GigRam"> + <rdf:type rdf:resource="&req;SystemRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Dual Core System With 8 Gig Ram</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/EuropeanTradeUnionConfederation --> + + <owl:Thing rdf:about="&req;EuropeanTradeUnionConfederation"> + <rdf:type rdf:resource="&req;TradeUnion"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>European Trade Union Confederation</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/FunctionTopic --> + + <owl:Thing rdf:about="&req;FunctionTopic"> + <rdf:type rdf:resource="&req;Topic"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Function Topic</rdfs:label> + <specifies rdf:resource="&req;LogEveryUserActivity"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/GermanGovernment --> + + <owl:Thing rdf:about="&req;GermanGovernment"> + <rdf:type rdf:resource="&req;Government"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>German Government</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/GoodIdea --> + + <owl:Thing rdf:about="&req;GoodIdea"> + <rdf:type rdf:resource="&req;Comment"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Good Idea</rdfs:label> + <comments rdf:resource="&req;DialogSystemShoudRespondInUnder5Sec"/> + <isDefinedBy rdf:resource="&req;Jennifer_Greene"/> + <isDefinedBy rdf:resource="&req;Stefan"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Jane_Smiley --> + + <owl:Thing rdf:about="&req;Jane_Smiley"> + <rdf:type rdf:resource="&req;Customer"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Jane Smiley</rdfs:label> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Jennifer_Greene --> + + <owl:Thing rdf:about="&req;Jennifer_Greene"> + <rdf:type rdf:resource="&req;Author"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Jennifer Greene</rdfs:label> + <votes rdf:resource="&req;MultiWindowSystem"/> + <defines rdf:resource="&req;loadGUIInUnder2Sec"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Jill --> + + <owl:Thing rdf:about="&req;Jill"> + <rdf:type rdf:resource="&req;Author"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Jill</rdfs:label> + <defines rdf:resource="&req;DialogSystemShoudRespondInUnder5Sec"/> + <votes rdf:resource="&req;SystemRequirement1"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Jim --> + + <owl:Thing rdf:about="&req;Jim"> + <rdf:type rdf:resource="&req;Author"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Jim</rdfs:label> + <defines rdf:resource="&req;MultiWindowSystem"/> + <votes rdf:resource="&req;SystemStabilityRequirement"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/Lastenheft --> + + <owl:Thing rdf:about="&req;Lastenheft"> + <rdf:type rdf:resource="&req;Document"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + <rdfs:label>Lastenheft</rdfs:label> + <leadsTo rdf:resource="&req;text"/> + </owl:Thing> + + + + <!-- http://ns.softwiki.de/req/LogEveryUserActivity --> + + <owl:Thing rdf:about="&req;LogEveryUserActivity"> + <rdf:type rdf:resource="&req;FunctionalRequirement"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> + ... [truncated message content] |
From: <lor...@us...> - 2013-09-04 15:39:12
|
Revision: 4068 http://sourceforge.net/p/dl-learner/code/4068 Author: lorenz_b Date: 2013-09-04 15:39:08 +0000 (Wed, 04 Sep 2013) Log Message: ----------- Added method to get relvant text for all entities. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java trunk/components-core/src/main/java/org/dllearner/core/owl/Description.java trunk/components-core/src/main/java/org/dllearner/core/owl/Nothing.java trunk/examples/isle/father_labeled.owl trunk/pom.xml trunk/scripts/pom.xml trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/pom.xml 2013-09-04 15:39:08 UTC (rev 4068) @@ -314,11 +314,6 @@ <artifactId>jwnl</artifactId> <version>1.4.1.RC2</version> </dependency> - <dependency> - <groupId>com.google.collections</groupId> - <artifactId>google-collections</artifactId> - <version>1.0</version> - </dependency> </dependencies> <dependencyManagement> <dependencies> Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/ISLE.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -34,6 +34,7 @@ import org.dllearner.algorithms.celoe.CELOE; import org.dllearner.algorithms.celoe.OENode; import org.dllearner.core.AbstractCELA; +import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractLearningProblem; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentAnn; @@ -47,10 +48,12 @@ import org.dllearner.core.owl.NamedClass; import org.dllearner.core.owl.Restriction; import org.dllearner.core.owl.Thing; +import org.dllearner.kb.OWLFile; import org.dllearner.learningproblems.ClassLearningProblem; import org.dllearner.learningproblems.PosNegLP; import org.dllearner.learningproblems.PosNegLPStandard; import org.dllearner.learningproblems.PosOnlyLP; +import org.dllearner.reasoning.FastInstanceChecker; import org.dllearner.refinementoperators.CustomHierarchyRefinementOperator; import org.dllearner.refinementoperators.CustomStartRefinementOperator; import org.dllearner.refinementoperators.LengthLimitedRefinementOperator; @@ -94,8 +97,7 @@ // all nodes in the search tree (used for selecting most promising node) private TreeSet<OENode> nodes; -// private OEHeuristicRuntime heuristic; // = new OEHeuristicRuntime(); - private NLPHeuristic heuristic = new NLPHeuristic(); + private NLPHeuristic heuristic; // = new OEHeuristicRuntime(); // root of search tree private OENode startNode; // the class with which we start the refinement process @@ -146,10 +148,11 @@ private int expressionTests = 0; private int minHorizExp = 0; private int maxHorizExp = 0; - private long totalRuntimeNs; + private long totalRuntimeNs = 0; // TODO: turn those into config options + // important: do not initialise those with empty sets // null = no settings for allowance / ignorance // empty set = allow / ignore nothing (it is often not desired to allow no class!) @@ -896,6 +899,10 @@ } } + public TreeSet<OENode> getNodes() { + return nodes; + } + public int getMaximumHorizontalExpansion() { return maxHorizExp; } @@ -1099,14 +1106,30 @@ public void setStopOnFirstDefinition(boolean stopOnFirstDefinition) { this.stopOnFirstDefinition = stopOnFirstDefinition; - } - + } + public long getTotalRuntimeNs() { return totalRuntimeNs; } - - public TreeSet<OENode> getNodes() { - return nodes; + + public static void main(String[] args) throws Exception{ + AbstractKnowledgeSource ks = new OWLFile("../examples/family/father_oe.owl"); + ks.init(); + + AbstractReasonerComponent rc = new FastInstanceChecker(ks); + rc.init(); + + ClassLearningProblem lp = new ClassLearningProblem(rc); + lp.setClassToDescribe(new NamedClass("http://example.com/father#father")); + lp.init(); + + CELOE alg = new CELOE(lp, rc); + alg.setMaxExecutionTimeInSeconds(10); + alg.init(); + + alg.start(); + } + } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -108,17 +108,18 @@ //the NLP based scoring - Description expression = node.getExpression(); - OWLClassExpression owlapiDescription = OWLAPIConverter.getOWLAPIDescription(expression); - Set<Entity> entities = OWLAPIConverter.getEntities(owlapiDescription.getSignature()); - double sum = 0; - for (Entity entity : entities) { - double relevance = entityRelevance.containsKey(entity) ? entityRelevance.get(entity) : 0; - if(!Double.isInfinite(relevance)){ - sum += relevance; - } - } - score += nlpBonusFactor * sum; +// Description expression = node.getExpression(); +//// OWLClassExpression owlapiDescription = OWLAPIConverter.getOWLAPIDescription(expression); +//// Set<Entity> entities = OWLAPIConverter.getEntities(owlapiDescription.getSignature()); +// Set<Entity> entities = expression.getSignature(); +// double sum = 0; +// for (Entity entity : entities) { +// double relevance = entityRelevance.containsKey(entity) ? entityRelevance.get(entity) : 0; +// if(!Double.isInfinite(relevance)){ +// sum += relevance; +// } +// } +// score += nlpBonusFactor * sum; return score; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -75,5 +75,25 @@ } return labels; } + + private Set<String> getRelatedWordPhrases(Entity entity){ + //add the labels if exist + Set<String> relatedWordPhrases = new HashSet<String>(); + OWLEntity owlEntity = OWLAPIConverter.getOWLAPIEntity(entity); + Set<OWLAnnotationAssertionAxiom> axioms = ontology.getAnnotationAssertionAxioms(owlEntity.getIRI()); + for (OWLAnnotationAssertionAxiom annotation : axioms) { + if(annotation.getProperty().equals(annotationProperty)){ + if (annotation.getValue() instanceof OWLLiteral) { + OWLLiteral val = (OWLLiteral) annotation.getValue(); + relatedWordPhrases.add(val.getLiteral()); + } + } + } + //add the short form of the URI if no labels are available + if(relatedWordPhrases.isEmpty()){ + relatedWordPhrases.add(sfp.getShortForm(IRI.create(entity.getURI()))); + } + return relatedWordPhrases; + } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -28,10 +28,12 @@ Set<AnnotatedDocument> documentsAB = Sets.intersection(documentsA, documentsB); int nrOfDocuments = index.getSize(); - double dPClass = nrOfDocuments == 0 ? 0 : ((double) documentsA.size() / (double) nrOfDocuments); - double dPClassEntity = documentsB.size() == 0 ? 0 : (double) documentsAB.size() / (double) documentsB.size(); - double pmi = Math.log(dPClassEntity / dPClass); + double pA = nrOfDocuments == 0 ? 0 : ((double) documentsA.size() / (double) nrOfDocuments); + double pB = nrOfDocuments == 0 ? 0 : ((double) documentsB.size() / (double) nrOfDocuments); + double pAB = nrOfDocuments == 0 ? 0 : ((double) documentsAB.size() / (double) nrOfDocuments); + double pmi = Math.log(pAB / pA * pB); + return pmi; } @@ -42,11 +44,15 @@ Set<AnnotatedDocument> documentsAB = Sets.intersection(documentsA, documentsB); int nrOfDocuments = index.getSize(); - double dPClass = nrOfDocuments == 0 ? 0 : ((double) documentsA.size() / (double) nrOfDocuments); - double dPClassEntity = documentsB.size() == 0 ? 0 : (double) documentsAB.size() / (double) documentsB.size(); - double pmi = Math.log(dPClassEntity / dPClass); + double pA = nrOfDocuments == 0 ? 0 : ((double) documentsA.size() / (double) nrOfDocuments); + double pB = nrOfDocuments == 0 ? 0 : ((double) documentsB.size() / (double) nrOfDocuments); + double pAB = nrOfDocuments == 0 ? 0 : ((double) documentsAB.size() / (double) nrOfDocuments); - double pAB = (double) documentsAB.size() / (double) nrOfDocuments; + if(pA * pB == 0){ + return 0; + } + double pmi = Math.log(pAB / pA * pB); + double normalizedPMI = (pmi/-Math.log(pAB) + 1)/2; return normalizedPMI; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -40,7 +40,7 @@ Set<Entity> otherEntities = OWLAPIConverter.getEntities(owlEntities); for (Entity otherEntity : otherEntities) { - double relevance = metric.getRelevance(entity, otherEntity); + double relevance = metric.getNormalizedRelevance(entity, otherEntity); relevantEntities.put(otherEntity, relevance); } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -4,6 +4,7 @@ package org.dllearner.algorithms.isle.textretrieval; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -90,4 +91,26 @@ return textWithWeight; } + + /** + * Returns for each entity in the ontology all relevant text, i.e. eitherthe annotations or the short form of the IRI as fallback. + * @return + */ + public Map<Entity, Set<String>> getRelevantText() { + Map<Entity, Set<String>> entity2RelevantText = new HashMap<Entity, Set<String>>(); + + Set<OWLEntity> schemaEntities = new HashSet<OWLEntity>(); + schemaEntities.addAll(ontology.getClassesInSignature()); + schemaEntities.addAll(ontology.getObjectPropertiesInSignature()); + schemaEntities.addAll(ontology.getDataPropertiesInSignature()); + + Map<String, Double> relevantText; + for (OWLEntity owlEntity : schemaEntities) { + Entity entity = OWLAPIConverter.getEntity(owlEntity); + relevantText = getRelevantText(entity); + entity2RelevantText.put(entity, relevantText.keySet()); + } + + return entity2RelevantText; + } } Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/Description.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/Description.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/Description.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -19,9 +19,11 @@ package org.dllearner.core.owl; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; /** * A class description is sometimes also called "complex class" or "concept". @@ -211,6 +213,36 @@ } /** + * Returns all named entities. + * @return + */ + public Set<Entity> getSignature(){ + Set<Entity> entities = new HashSet<Entity>(); + if(this instanceof NamedClass){ + entities.add((NamedClass)this); + } else if(this instanceof Thing){ + entities.add(new NamedClass(Thing.uri)); + } else if(this instanceof Nothing){ + entities.add(new NamedClass(Nothing.uri)); + } else if(this instanceof Restriction){ + PropertyExpression propertyExpression = ((Restriction)this).getRestrictedPropertyExpression(); + if(propertyExpression instanceof ObjectProperty){ + entities.add((ObjectProperty)propertyExpression); + } else if(propertyExpression instanceof DatatypeProperty){ + entities.add((DatatypeProperty)propertyExpression); + } + entities.addAll(getChild(0).getSignature()); + + } else { + for (Description child : children) { + entities.addAll(child.getSignature()); + } + } + + return entities; + } + + /** * Returns a manchester syntax string of this description. For a * reference, see * <a href="http://www.co-ode.org/resources/reference/manchester_syntax">here</a> Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/Nothing.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/Nothing.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/Nothing.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -19,6 +19,7 @@ package org.dllearner.core.owl; +import java.net.URI; import java.util.Map; /** @@ -35,6 +36,8 @@ private static final long serialVersionUID = -3053885252153066318L; public static final Nothing instance = new Nothing(); + public static final URI uri = URI.create("http://www.w3.org/2002/07/owl#Thing"); + public String toString(String baseURI, Map<String,String> prefixes) { return "BOTTOM"; } @@ -52,7 +55,14 @@ // in Protege 4.0 only Nothing //return "owl:Nothing"; return "Nothing"; - } + } + + /** + * @return the uri + */ + public static URI getURI() { + return uri; + } public int getLength() { return 1; Modified: trunk/examples/isle/father_labeled.owl =================================================================== --- trunk/examples/isle/father_labeled.owl 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/examples/isle/father_labeled.owl 2013-09-04 15:39:08 UTC (rev 4068) @@ -58,7 +58,7 @@ <!-- http://example.com/father#father --> <owl:Class rdf:about="&father;father"> - <rdfs:label xml:lang="en">person which has at least 1 child</rdfs:label> + <rdfs:label xml:lang="en">male person which has at least 1 child</rdfs:label> <rdfs:subClassOf rdf:resource="&father;male"/> </owl:Class> Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/pom.xml 2013-09-04 15:39:08 UTC (rev 4068) @@ -164,7 +164,7 @@ <dependency> <groupId>org.semanticweb.hermit</groupId> <artifactId>hermit</artifactId> - <version>1.3.3</version> + <version>1.3.8</version> </dependency> <!-- SOLR Dependency --> Modified: trunk/scripts/pom.xml =================================================================== --- trunk/scripts/pom.xml 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/scripts/pom.xml 2013-09-04 15:39:08 UTC (rev 4068) @@ -139,6 +139,13 @@ </exclusions> </dependency> + + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-math3</artifactId> + <version>3.0</version> +</dependency> + </dependencies> <build> Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternDetectionEvaluation.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -13,6 +13,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -39,6 +41,8 @@ import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.UnloadableImportException; +import com.google.common.math.IntMath; + import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; import uk.ac.manchester.cs.owl.owlapi.mansyntaxrenderer.ManchesterOWLSyntaxOWLObjectRendererImpl; @@ -57,6 +61,11 @@ private boolean formatNumbers = true; private int numberOfRowsPerTable = 25; + private int minOntologies = 5; + + private Map<OWLAxiom, Integer> winsorizedFrequencies = new HashMap<OWLAxiom, Integer>(); + private int percentileInPercent = 95; + public OWLAxiomPatternDetectionEvaluation() { initDBConnection(); @@ -311,12 +320,13 @@ String latexTable = "\\begin{table}\n"; latexTable += "\\begin{tabular}{lrrr}\n"; latexTable += "\\toprule\n"; - latexTable += "Pattern & Frequency & \\#Ontologies\\\\\\midrule\n"; + latexTable += "Pattern & Frequency & Winsorised Frequency & \\#Ontologies\\\\\\midrule\n"; for (Entry<OWLAxiom, Pair<Integer, Integer>> entry : topN.entrySet()) { OWLAxiom axiom = entry.getKey(); Integer frequency = entry.getValue().getKey(); Integer df = entry.getValue().getValue(); + Integer winsorizedFrequency = winsorizedFrequencies.get(axiom); if(axiom != null){ String axiomColumn = axiomRenderer.render(axiom); @@ -330,7 +340,7 @@ } if(formatNumbers){ - latexTable += axiomColumn + " & " + "\\num{" + frequency + "} & " + df + "\\\\\n"; + latexTable += axiomColumn + " & \\num{" + frequency + "} & \\num{" + winsorizedFrequency + "} & "+ df + "\\\\\n"; } else { latexTable += axiomColumn + " & " + frequency + " & " + df + "\\\\\n"; } @@ -347,13 +357,13 @@ LatexWriter w = new LatexWriter(sw); LatexObjectVisitor renderer = new LatexObjectVisitor(w, df); String latexTable = "\\begin{table}\n"; - latexTable += "\\begin{tabular}{rlrr"; + latexTable += "\\begin{tabular}{rlrrr"; for (int i = 0; i < repositories.size(); i++) { latexTable += "r"; } latexTable += "}\n"; latexTable += "\\toprule\n"; - latexTable += " & Pattern & Frequency & \\#Ontologies"; + latexTable += " & Pattern & Frequency & Winsorized Frequency & \\#Ontologies"; for (OntologyRepository repository : repositories) { latexTable += " & " + repository.getName(); } @@ -366,6 +376,7 @@ OWLAxiom axiom = entry.getValue().keySet().iterator().next(); Integer frequency = entry.getValue().values().iterator().next().getKey(); Integer df = entry.getValue().values().iterator().next().getValue(); + int winsorizedFrequency = winsorizedFrequencies.get(axiom); if(axiom != null){ String axiomColumn = axiomRenderer.render(axiom); @@ -378,7 +389,7 @@ } if(formatNumbers){ - latexTable += i + ". & " + axiomColumn + " & " + "\\num{" + frequency + "} & " + df; + latexTable += i + ". & " + axiomColumn + " & \\num{" + frequency + "} & \\num{" + winsorizedFrequency + "} & "+ df; for (OntologyRepository repository : repositories) { int rank = 0; boolean contained = false; @@ -433,21 +444,62 @@ ps = conn.prepareStatement("SELECT P.id, pattern,SUM(occurrences),COUNT(ontology_id) FROM " + "Ontology_Pattern OP, Pattern P, Ontology O WHERE " + "(P.id=OP.pattern_id AND O.id=OP.ontology_id AND P.axiom_type=?) " + - "GROUP BY P.id ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); + "GROUP BY P.id HAVING COUNT(ontology_id)>=? ORDER BY SUM(`OP`.`occurrences`) DESC LIMIT ?"); ps.setString(1, axiomType.name()); - ps.setInt(2, n); + ps.setInt(2, minOntologies); + ps.setInt(3, n); rs = ps.executeQuery(); while(rs.next()){ + int patternID = rs.getInt(1); + OWLAxiom axiom = asOWLAxiom(rs.getString(2)); Map<OWLAxiom, Pair<Integer, Integer>> m = new LinkedHashMap<OWLAxiom, Pair<Integer,Integer>>(); - m.put(asOWLAxiom(rs.getString(2)), new Pair<Integer, Integer>(rs.getInt(3), rs.getInt(4))); - topN.put(rs.getInt(1), m); + m.put(axiom, new Pair<Integer, Integer>(rs.getInt(3), rs.getInt(4))); + topN.put(patternID, m); + + //get winsorized frequency + ps = conn.prepareStatement("SELECT occurrences FROM " + + "Ontology_Pattern WHERE " + + "(pattern_id=?) "); + ps.setInt(1, patternID); + ResultSet rs2 = ps.executeQuery(); + System.out.println("Pattern ID:" + patternID); + System.out.println(axiom); + + List<Integer> values = new ArrayList<Integer>(); + while(rs2.next()){ + values.add(rs2.getInt(1)); + } + winsorize(values); + int sum = 0; + for (Integer val : values) { + sum += val; + } + winsorizedFrequencies.put(axiom, sum); } + + + } catch(SQLException e){ e.printStackTrace(); } return topN; } + private void winsorize(List<Integer> values){ + //compute 95th percentile + int percentile = (int) Math.round(percentileInPercent/100d * values.size() + 1/2d); + //sort values + Collections.sort(values);System.out.println(values); + //get the value at percentile rank + int max = values.get(percentile-1); + //set all values after to max + for (int i = percentile; i < values.size(); i++) { + values.set(i, max); + } + System.out.println(percentile); + System.out.println(values); + } + private Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> getTopNAxiomPatternsWithId(OntologyRepository repository, AxiomTypeCategory axiomType, int n){ Map<Integer, Map<OWLAxiom, Pair<Integer, Integer>>> topN = new LinkedHashMap<Integer, Map<OWLAxiom, Pair<Integer, Integer>>>(); PreparedStatement ps; @@ -569,6 +621,5 @@ new OWLAxiomPatternDetectionEvaluation().run(analyzeRepositories, Arrays.asList( new TONESRepository(), new BioPortalRepository(), new OxfordRepository())); } - } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-09-04 15:23:29 UTC (rev 4067) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-09-04 15:39:08 UTC (rev 4068) @@ -42,6 +42,7 @@ import joptsimple.OptionSet; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.log4j.Logger; import org.coode.owlapi.turtle.TurtleOntologyFormat; import org.dllearner.core.EvaluatedAxiom; @@ -51,6 +52,7 @@ import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.ExtractionDBCache; +import org.dllearner.kb.sparql.QueryEngineHTTP; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.kb.sparql.SparqlQuery; import org.dllearner.learningproblems.AxiomScore; @@ -92,7 +94,6 @@ import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; import com.google.common.base.Charsets; import com.google.common.base.Joiner; -import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; @@ -117,7 +118,6 @@ import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Statement; -import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; import com.hp.hpl.jena.vocabulary.RDF; import com.jamonapi.Monitor; @@ -165,7 +165,12 @@ private File samplesDir; private File instantiationsDir; + private DescriptiveStatistics fragmentStatistics = new DescriptiveStatistics(100); + + + private int nrOfEarlyTerminations = 0; + public OWLAxiomPatternUsageEvaluation() { try { BZip2CompressorInputStream is = new BZip2CompressorInputStream(new URL(ontologyURL).openStream()); @@ -374,7 +379,9 @@ e.printStackTrace(); } } -// System.exit(0); + logger.info("Early terminations: " + nrOfEarlyTerminations ); + logger.info(fragmentStatistics.getMin() + "--" + fragmentStatistics.getMax() + "--" + fragmentStatistics.getMean()); + System.exit(0); Monitor patternTimeMon = MonitorFactory.getTimeMonitor("pattern-runtime"); //for each pattern @@ -694,28 +701,39 @@ long startTime = System.currentTimeMillis(); int offset = 0; boolean hasMoreResults = true; - while(hasMoreResults && (System.currentTimeMillis() - startTime)<= maxFragmentExtractionTime){ + long remainingTime = maxFragmentExtractionTime - (System.currentTimeMillis() - startTime); + while(hasMoreResults && remainingTime > 0){ query.setOffset(offset); logger.info(query); - Model m = executeConstructQuery(query); + Model m = executeConstructQuery(query, remainingTime); fragment.add(m); + remainingTime = maxFragmentExtractionTime - (System.currentTimeMillis() - startTime); if(m.size() == 0){ hasMoreResults = false; + if(remainingTime > 0){ + logger.info("No more triples left. Early termination..."); + nrOfEarlyTerminations++; + } + } offset += queryLimit; - try { - Thread.sleep(500); - } catch (InterruptedException e) { - e.printStackTrace(); - } +// try { +// Thread.sleep(500); +// } catch (InterruptedException e) { +// e.printStackTrace(); +// } } - logger.info("...got " + fragment.size() + " triples."); try { fragment.write(new FileOutputStream(file), "TURTLE"); } catch (FileNotFoundException e) { e.printStackTrace(); } filterModel(fragment); + logger.info("...got " + fragment.size() + " triples "); + ResultSet rs = QueryExecutionFactory.create("SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s a <" + cls.getName() + ">. }", fragment).execSelect(); + int nrOfInstances = rs.next().getLiteral("cnt").getInt(); + logger.info("with " + nrOfInstances + " instances of class " + cls.getName()); + fragmentStatistics.addValue(nrOfInstances); return fragment; } @@ -1281,6 +1299,45 @@ return rs; } + protected Model executeConstructQuery(Query query, long timeout) { + if(ks.isRemote()){ + SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); + ExtractionDBCache cache = ks.getCache(); + Model model = null; + try { +// if(cache != null){ +// try { +// model = cache.executeConstructQuery(endpoint, query.toString()); +// } catch (UnsupportedEncodingException e) { +// e.printStackTrace(); +// } catch (SQLException e) { +// e.printStackTrace(); +// } +// } else { + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), + query); + queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); + queryExecution.setTimeout(timeout, timeout); + model = queryExecution.execConstruct(); +// } + logger.debug("Got " + model.size() + " triples."); + return model; + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout"); + } else { + logger.error("Exception executing query", e); + } + return ModelFactory.createDefaultModel(); + } + } else { + QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + Model model = queryExecution.execConstruct(); + return model; + } + } + protected Model executeConstructQuery(Query query) { if(ks.isRemote()){ SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <and...@us...> - 2013-09-04 15:59:49
|
Revision: 4070 http://sourceforge.net/p/dl-learner/code/4070 Author: andremelo Date: 2013-09-04 15:59:46 +0000 (Wed, 04 Sep 2013) Log Message: ----------- Adding OWLOntology parameter to getRelevantText at AnnotationEntityTextRetriever and updating interface to include it Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/EntityTextRetriever.java trunk/protege/pom.xml Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java 2013-09-04 15:58:20 UTC (rev 4069) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/AnnotationEntityTextRetriever.java 2013-09-04 15:59:46 UTC (rev 4070) @@ -96,7 +96,8 @@ * Returns for each entity in the ontology all relevant text, i.e. eitherthe annotations or the short form of the IRI as fallback. * @return */ - public Map<Entity, Set<String>> getRelevantText() { + @Override + public Map<Entity, Set<String>> getRelevantText(OWLOntology ontology) { Map<Entity, Set<String>> entity2RelevantText = new HashMap<Entity, Set<String>>(); Set<OWLEntity> schemaEntities = new HashSet<OWLEntity>(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/EntityTextRetriever.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/EntityTextRetriever.java 2013-09-04 15:58:20 UTC (rev 4069) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/textretrieval/EntityTextRetriever.java 2013-09-04 15:59:46 UTC (rev 4070) @@ -20,8 +20,10 @@ package org.dllearner.algorithms.isle.textretrieval; import java.util.Map; +import java.util.Set; import org.dllearner.core.owl.Entity; +import org.semanticweb.owlapi.model.OWLOntology; /** * Interface for methods, which retrieve relevant texts given an entity @@ -45,4 +47,6 @@ */ public Map<String, Double> getRelevantText(Entity entity); + public Map<Entity, Set<String>> getRelevantText(OWLOntology ontology); + } Modified: trunk/protege/pom.xml =================================================================== --- trunk/protege/pom.xml 2013-09-04 15:58:20 UTC (rev 4069) +++ trunk/protege/pom.xml 2013-09-04 15:59:46 UTC (rev 4070) @@ -164,5 +164,42 @@ </executions> </plugin> </plugins> + <pluginManagement> + <plugins> + <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.--> + <plugin> + <groupId>org.eclipse.m2e</groupId> + <artifactId>lifecycle-mapping</artifactId> + <version>1.0.0</version> + <configuration> + <lifecycleMappingMetadata> + <pluginExecutions> + <pluginExecution> + <pluginExecutionFilter> + <groupId> + org.apache.maven.plugins + </groupId> + <artifactId> + maven-dependency-plugin + </artifactId> + <versionRange> + [2.4,) + </versionRange> + <goals> + <goal> + copy-dependencies + </goal> + </goals> + </pluginExecutionFilter> + <action> + <ignore></ignore> + </action> + </pluginExecution> + </pluginExecutions> + </lifecycleMappingMetadata> + </configuration> + </plugin> + </plugins> + </pluginManagement> </build> </project> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-09-05 10:17:37
|
Revision: 4090 http://sourceforge.net/p/dl-learner/code/4090 Author: lorenz_b Date: 2013-09-05 10:17:34 +0000 (Thu, 05 Sep 2013) Log Message: ----------- Bugfixes. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/simple/SimpleSemanticIndex.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java trunk/test/isle/swore/corpus/customer_requirement.txt Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-09-05 09:26:44 UTC (rev 4089) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/NLPHeuristic.java 2013-09-05 10:17:34 UTC (rev 4090) @@ -108,13 +108,13 @@ // the NLP based scoring - Description expression = node.getExpression();System.out.println(expression); + Description expression = node.getExpression();//System.out.println(expression); // OWLClassExpression owlapiDescription = OWLAPIConverter.getOWLAPIDescription(expression); // Set<Entity> entities = OWLAPIConverter.getEntities(owlapiDescription.getSignature()); Set<Entity> entities = expression.getSignature(); double sum = 0; for (Entity entity : entities) { - double relevance = entityRelevance.containsKey(entity) ? entityRelevance.get(entity) : 0;System.out.println(entity + ":" + relevance); + double relevance = entityRelevance.containsKey(entity) ? entityRelevance.get(entity) : 0;//System.out.println(entity + ":" + relevance); if(!Double.isInfinite(relevance)){ sum += relevance; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java 2013-09-05 09:26:44 UTC (rev 4089) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/SimpleWordSenseDisambiguation.java 2013-09-05 10:17:34 UTC (rev 4090) @@ -6,6 +6,7 @@ import java.util.HashSet; import java.util.Set; +import org.apache.log4j.Logger; import org.dllearner.algorithms.isle.index.Annotation; import org.dllearner.algorithms.isle.index.SemanticAnnotation; import org.dllearner.core.owl.Entity; @@ -28,6 +29,9 @@ */ public class SimpleWordSenseDisambiguation extends WordSenseDisambiguation{ + + private static final Logger logger = Logger.getLogger(SimpleWordSenseDisambiguation.class.getName()); + private IRIShortFormProvider sfp = new SimpleIRIShortFormProvider(); private OWLDataFactory df = new OWLDataFactoryImpl(); private OWLAnnotationProperty annotationProperty = df.getRDFSLabel(); @@ -44,17 +48,21 @@ */ @Override public SemanticAnnotation disambiguate(Annotation annotation, Set<Entity> candidateEntities) { - String token = annotation.getToken(); + logger.debug("Linguistic annotations:\n" + annotation); + logger.debug("Candidate entities:" + candidateEntities); + String token = annotation.getToken().trim(); //check if annotation token matches label of entity or the part behind #(resp. /) for (Entity entity : candidateEntities) { Set<String> labels = getLabels(entity); for (String label : labels) { if(label.equals(token)){ + logger.debug("Disambiguated entity: " + entity); return new SemanticAnnotation(annotation, entity); } } String shortForm = sfp.getShortForm(IRI.create(entity.getURI())); if(annotation.equals(shortForm)){ + logger.debug("Disambiguated entity: " + entity); return new SemanticAnnotation(annotation, entity); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java 2013-09-05 09:26:44 UTC (rev 4089) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/SemanticIndex.java 2013-09-05 10:17:34 UTC (rev 4090) @@ -36,6 +36,8 @@ private SyntacticIndex syntacticIndex; private Map<Entity, Set<AnnotatedDocument>> index; private OWLOntology ontology; + + private int size = 0; public SemanticIndex(OWLOntology ontology, SyntacticIndex syntacticIndex, WordSenseDisambiguation wordSenseDisambiguation, EntityCandidateGenerator entityCandidateGenerator, LinguisticAnnotator linguisticAnnotator) { @@ -77,6 +79,7 @@ } logger.info("Annotated document:" + annotatedDocument); } + size = documents.size(); logger.info("...done."); } @@ -145,6 +148,6 @@ * @return the total number of documents contained in the index */ public int getSize() { - return index.size(); + return size; } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/simple/SimpleSemanticIndex.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/simple/SimpleSemanticIndex.java 2013-09-05 09:26:44 UTC (rev 4089) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/semantic/simple/SimpleSemanticIndex.java 2013-09-05 10:17:34 UTC (rev 4090) @@ -31,7 +31,7 @@ public SimpleSemanticIndex(OWLOntology ontology, SyntacticIndex syntacticIndex) { super(ontology); SimpleEntityCandidatesTrie trie = new SimpleEntityCandidatesTrie(new RDFSLabelEntityTextRetriever(ontology), ontology); - trie.printTrie(); +// trie.printTrie(); setSemanticAnnotator(new SemanticAnnotator( new SimpleWordSenseDisambiguation(ontology), new TrieEntityCandidateGenerator(ontology, trie), Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java 2013-09-05 09:26:44 UTC (rev 4089) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/PMIRelevanceMetric.java 2013-09-05 10:17:34 UTC (rev 4090) @@ -43,18 +43,27 @@ Set<AnnotatedDocument> documentsB = index.getDocuments(entityB); Set<AnnotatedDocument> documentsAB = Sets.intersection(documentsA, documentsB); int nrOfDocuments = index.getSize(); +// System.out.println("A:" + documentsA.size()); +// System.out.println("B:" + documentsB.size()); +// System.out.println("AB:" + documentsAB.size()); +// System.out.println(nrOfDocuments); double pA = nrOfDocuments == 0 ? 0 : ((double) documentsA.size() / (double) nrOfDocuments); double pB = nrOfDocuments == 0 ? 0 : ((double) documentsB.size() / (double) nrOfDocuments); double pAB = nrOfDocuments == 0 ? 0 : ((double) documentsAB.size() / (double) nrOfDocuments); - if(pA * pB == 0){ + if(pAB == 0 || pA * pB == 0){ return 0; } - double pmi = Math.log(pAB / pA * pB); + double pmi = Math.log(pAB / (pA * pB)); - double normalizedPMI = (pmi/-Math.log(pAB) + 1)/2; + double denominator = -Math.log(pAB); + if(denominator == 0){ + return 0; + } + double normalizedPMI = (pmi/denominator + 1)/2; + return normalizedPMI; } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java 2013-09-05 09:26:44 UTC (rev 4089) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/metrics/RelevanceUtils.java 2013-09-05 10:17:34 UTC (rev 4090) @@ -31,6 +31,7 @@ } public static Map<Entity, Double> getRelevantEntities(Entity entity, OWLOntology ontology, RelevanceMetric metric){ + System.out.println(entity); Map<Entity, Double> relevantEntities = new HashMap<Entity, Double>(); Set<OWLEntity> owlEntities = new HashSet<OWLEntity>(); @@ -39,8 +40,10 @@ owlEntities.addAll(ontology.getObjectPropertiesInSignature()); Set<Entity> otherEntities = OWLAPIConverter.getEntities(owlEntities); + otherEntities.remove(entity); for (Entity otherEntity : otherEntities) { double relevance = metric.getNormalizedRelevance(entity, otherEntity); + System.out.println(otherEntity + ":" + relevance); relevantEntities.put(otherEntity, relevance); } Modified: trunk/test/isle/swore/corpus/customer_requirement.txt =================================================================== --- trunk/test/isle/swore/corpus/customer_requirement.txt 2013-09-05 09:26:44 UTC (rev 4089) +++ trunk/test/isle/swore/corpus/customer_requirement.txt 2013-09-05 10:17:34 UTC (rev 4090) @@ -1 +1 @@ -A customer requirement is usually desired by at least one customer and usually specificed or captured via requirements engineering systems for later inspection by software developers and maintainers. The goal of involving customer in the requirements elicitation process is to improve the quality of the software (see also: requirements elicitation, open source software, E-Government). +A customer requirement is usually desired by at least one customer and usually specified or captured via requirements engineering systems for later inspection by software developers and maintainers. The goal of involving customer in the requirements elicitation process is to improve the quality of the software (see also: requirements elicitation, open source software, E-Government). This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-09-16 00:14:36
|
Revision: 4110 http://sourceforge.net/p/dl-learner/code/4110 Author: lorenz_b Date: 2013-09-16 00:14:34 +0000 (Mon, 16 Sep 2013) Log Message: ----------- Updated Pellet deps. Modified Paths: -------------- trunk/components-core/pom.xml trunk/interfaces/pom.xml trunk/pom.xml Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2013-09-14 06:51:26 UTC (rev 4109) +++ trunk/components-core/pom.xml 2013-09-16 00:14:34 UTC (rev 4110) @@ -105,20 +105,9 @@ <version>3.4.4</version> </dependency> - <!-- THIS IS FROM THE UNIBAS REPO --> <dependency> - <groupId>com.owldl</groupId> + <groupId>com.clarkparsia</groupId> <artifactId>pellet</artifactId> - <exclusions> - <exclusion> <!-- declare the exclusion here --> - <groupId>org.mortbay.jetty</groupId> - <artifactId>org.mortbay.jetty</artifactId> - </exclusion> - <exclusion> - <artifactId>owlapi</artifactId> - <groupId>net.sourceforge.owlapi</groupId> - </exclusion> - </exclusions> </dependency> <dependency> Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2013-09-14 06:51:26 UTC (rev 4109) +++ trunk/interfaces/pom.xml 2013-09-16 00:14:34 UTC (rev 4110) @@ -525,6 +525,10 @@ <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi</artifactId> </exclusion> + <exclusion> + <groupId>com.owldl</groupId> + <artifactId>pellet</artifactId> + </exclusion> </exclusions> </dependency> <dependency> Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2013-09-14 06:51:26 UTC (rev 4109) +++ trunk/pom.xml 2013-09-16 00:14:34 UTC (rev 4110) @@ -20,7 +20,7 @@ <slf4j.version>1.6.4</slf4j.version> <log4j.version>1.2.16</log4j.version> - <solr.version>4.1.0</solr.version> + <solr.version>4.4.0</solr.version> </properties> <modules> @@ -143,16 +143,9 @@ </dependency> <dependency> - <groupId>com.owldl</groupId> + <groupId>com.clarkparsia</groupId> <artifactId>pellet</artifactId> - <version>2.3.0</version> - <exclusions> - <!--Excluding this because it has the same classpath as the new Apache Jena and can cause problems--> - <exclusion> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>jena</artifactId> - </exclusion> - </exclusions> + <version>2.3.1</version> </dependency> <dependency> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-10-07 07:38:22
|
Revision: 4119 http://sourceforge.net/p/dl-learner/code/4119 Author: lorenz_b Date: 2013-10-07 07:38:17 +0000 (Mon, 07 Oct 2013) Log Message: ----------- Added methods to SPARQL reasoner for testing if a property has some defined characterstics, e.g. functionality. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2013-10-05 09:10:00 UTC (rev 4118) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2013-10-07 07:38:17 UTC (rev 4119) @@ -32,6 +32,10 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.aksw.jena_sparql_api.cache.core.QueryExecutionFactoryCacheEx; +import org.aksw.jena_sparql_api.core.QueryExecutionFactory; +import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; +import org.aksw.jena_sparql_api.model.QueryExecutionFactoryModel; import org.dllearner.core.config.BooleanEditor; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.IntegerEditor; @@ -60,14 +64,10 @@ import com.hp.hpl.jena.query.ParameterizedSparqlString; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; -import com.hp.hpl.jena.query.QueryExecutionFactory; -import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.ResultSet; -import com.hp.hpl.jena.query.Syntax; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.RDFNode; -import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; import com.hp.hpl.jena.sparql.expr.E_Regex; import com.hp.hpl.jena.sparql.expr.E_Str; @@ -98,6 +98,7 @@ protected SparqlEndpointKS ks; protected SPARQLReasoner reasoner; + private QueryExecutionFactory qef; protected List<EvaluatedAxiom> currentlyBestAxioms; protected SortedSet<Axiom> existingAxioms; @@ -191,6 +192,17 @@ @Override public void init() throws ComponentInitException { + if(ks.isRemote()){ + SparqlEndpoint endpoint = ks.getEndpoint(); + qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); + if(ks.getCache() != null){ + qef = new QueryExecutionFactoryCacheEx(qef, ks.getCache()); + } +// qef = new QueryExecutionFactoryPaginated(qef, 10000); + + } else { + qef = new QueryExecutionFactoryModel(((LocalModelBasedSparqlEndpointKS)ks).getModel()); + } ks.init(); if(reasoner == null){ reasoner = new SPARQLReasoner((SparqlEndpointKS) ks); @@ -279,93 +291,61 @@ protected Model executeConstructQuery(String query) { logger.trace("Sending query\n{} ...", query); - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), - query); - queryExecution.setTimeout(getRemainingRuntimeInMilliSeconds()); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - try { - Model model = queryExecution.execConstruct(); - fetchedRows += model.size(); - timeout = false; - if(model.size() == 0){ - fullDataLoaded = true; - } - logger.debug("Got " + model.size() + " triples."); - return model; - } catch (QueryExceptionHTTP e) { - if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout"); - } else { - logger.error("Exception executing query", e); - } - return ModelFactory.createDefaultModel(); - } - } else { - QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - Model model = queryExecution.execConstruct(); + QueryExecution qe = qef.createQueryExecution(query); + try { + Model model = qe.execConstruct(); fetchedRows += model.size(); + timeout = false; if(model.size() == 0){ fullDataLoaded = true; } + logger.debug("Got " + model.size() + " triples."); return model; + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout"); + } else { + logger.error("Exception executing query", e); + } + return ModelFactory.createDefaultModel(); } } protected ResultSet executeSelectQuery(String query) { logger.trace("Sending query\n{} ...", query); - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), - query); - queryExecution.setTimeout(getRemainingRuntimeInMilliSeconds()); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - try { - ResultSet rs = queryExecution.execSelect(); - timeout = false; - return rs; - } catch (QueryExceptionHTTP e) { - if(e.getCause() instanceof SocketTimeoutException){ - if(timeout){ - logger.warn("Got timeout"); - throw e; - } else { - logger.trace("Got local timeout"); - } - + + QueryExecution qe = qef.createQueryExecution(query); + try { + ResultSet rs = qe.execSelect(); + timeout = false; + return rs; + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + if(timeout){ + logger.warn("Got timeout"); + throw e; } else { - logger.error("Exception executing query", e); + logger.trace("Got local timeout"); } - return new ResultSetMem(); + + } else { + logger.error("Exception executing query", e); } - } else { - return executeSelectQuery(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); + return new ResultSetMem(); } } protected ResultSet executeSelectQuery(String query, Model model) { logger.trace("Sending query on local model\n{} ...", query); - QueryExecution qexec = QueryExecutionFactory.create(QueryFactory.create(query, Syntax.syntaxARQ), model); - ResultSet rs = qexec.execSelect();; - + QueryExecutionFactory qef = new QueryExecutionFactoryModel(model); + QueryExecution qexec = qef.createQueryExecution(query); + ResultSet rs = qexec.execSelect(); return rs; } protected boolean executeAskQuery(String query){ logger.trace("Sending query\n{} ...", query); - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - return queryExecution.execAsk(); - } else { - QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - return queryExecution.execAsk(); - } + return qef.createQueryExecution(query).execAsk(); } protected <K, V extends Comparable<V>> List<Entry<K, V>> sortByValues(Map<K, V> map){ Modified: trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java 2013-10-05 09:10:00 UTC (rev 4118) +++ trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java 2013-10-07 07:38:17 UTC (rev 4119) @@ -23,12 +23,12 @@ import java.util.LinkedList; import java.util.List; +import org.aksw.jena_sparql_api.cache.extra.CacheEx; import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.ListStringEditor; -import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; import org.springframework.beans.propertyeditors.URLEditor; @@ -45,7 +45,7 @@ public class SparqlEndpointKS implements KnowledgeSource { private SparqlEndpoint endpoint; - private ExtractionDBCache cache; + private CacheEx cache; private boolean supportsSPARQL_1_1 = false; private boolean isRemote = true; private boolean initialized = false; @@ -69,15 +69,22 @@ this(endpoint, null); } - public SparqlEndpointKS(SparqlEndpoint endpoint, ExtractionDBCache cache) { + public SparqlEndpointKS(SparqlEndpoint endpoint, CacheEx cache) { this.endpoint = endpoint; this.cache = cache; } - public ExtractionDBCache getCache() { + public CacheEx getCache() { return cache; } + /** + * @param cache the cache to set + */ + public void setCache(CacheEx cache) { + this.cache = cache; + } + @Override public void init() throws ComponentInitException { if(!initialized){ Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-10-05 09:10:00 UTC (rev 4118) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-10-07 07:38:17 UTC (rev 4119) @@ -387,6 +387,26 @@ } return hierarchy; } + + public boolean isFunctional(ObjectProperty property){ + String query = "ASK {<" + property + "> a " + OWL.FunctionalProperty.getURI() + "}"; + return qef.createQueryExecution(query).execAsk(); + } + + public boolean isInverseFunctional(ObjectProperty property){ + String query = "ASK {<" + property + "> a " + OWL.InverseFunctionalProperty.getURI() + "}"; + return qef.createQueryExecution(query).execAsk(); + } + + public boolean isAsymmetric(ObjectProperty property){ + String query = "ASK {<" + property + "> a " + OWL2.AsymmetricProperty.getURI() + "}"; + return qef.createQueryExecution(query).execAsk(); + } + + public boolean isIrreflexive(ObjectProperty property){ + String query = "ASK {<" + property + "> a " + OWL2.IrreflexiveProperty.getURI() + "}"; + return qef.createQueryExecution(query).execAsk(); + } public final ClassHierarchy prepareSubsumptionHierarchyFast() { logger.info("Preparing subsumption hierarchy ..."); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-10-05 09:10:00 UTC (rev 4118) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-10-07 07:38:17 UTC (rev 4119) @@ -9,7 +9,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; -import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URI; @@ -41,6 +40,14 @@ import joptsimple.OptionParser; import joptsimple.OptionSet; +import org.aksw.jena_sparql_api.cache.core.QueryExecutionFactoryCacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreH2; +import org.aksw.jena_sparql_api.cache.extra.CacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheExImpl; +import org.aksw.jena_sparql_api.core.QueryExecutionFactory; +import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; +import org.aksw.jena_sparql_api.model.QueryExecutionFactoryModel; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.log4j.Logger; @@ -51,10 +58,7 @@ import org.dllearner.core.owl.NamedClass; import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; -import org.dllearner.kb.sparql.ExtractionDBCache; -import org.dllearner.kb.sparql.QueryEngineHTTP; import org.dllearner.kb.sparql.SparqlEndpoint; -import org.dllearner.kb.sparql.SparqlQuery; import org.dllearner.learningproblems.AxiomScore; import org.dllearner.learningproblems.Heuristics; import org.dllearner.reasoning.SPARQLReasoner; @@ -109,7 +113,6 @@ import com.hp.hpl.jena.query.ParameterizedSparqlString; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; -import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; @@ -132,10 +135,12 @@ private OWLObjectRenderer axiomRenderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); private OWLDataFactory df = new OWLDataFactoryImpl(); - private ExtractionDBCache cache = new ExtractionDBCache("pattern-cache/db"); + private String cacheDirectory = "pattern-cache/db"; + private CacheEx cache; private SparqlEndpoint endpoint = SparqlEndpoint.getEndpointDBpedia(); + private QueryExecutionFactory qef; - private SparqlEndpointKS ks = new SparqlEndpointKS(endpoint, cache);//new LocalModelBasedSparqlEndpointKS(model); + private SparqlEndpointKS ks = new SparqlEndpointKS(endpoint);//new LocalModelBasedSparqlEndpointKS(model); private String ns = "http://dbpedia.org/ontology/"; private DecimalFormat format = new DecimalFormat("00.0%"); @@ -185,6 +190,25 @@ e.printStackTrace(); } + if(ks.isRemote()){ + qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); + if(cacheDirectory != null){ + try { + long timeToLive = TimeUnit.DAYS.toMillis(30); + CacheCoreEx cacheBackend = CacheCoreH2.create(cacheDirectory, timeToLive, true); + cache = new CacheExImpl(cacheBackend); + qef = new QueryExecutionFactoryCacheEx(qef, cache); + ks.setCache(cache); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } else { + qef = new QueryExecutionFactoryModel(((LocalModelBasedSparqlEndpointKS)ks).getModel()); + } + initDBConnection(); samplesDir = new File("pattern-instantiations-samples"); @@ -730,7 +754,8 @@ } filterModel(fragment); logger.info("...got " + fragment.size() + " triples "); - ResultSet rs = QueryExecutionFactory.create("SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s a <" + cls.getName() + ">. }", fragment).execSelect(); + QueryExecutionFactory qef = new QueryExecutionFactoryModel(fragment); + ResultSet rs = qef.createQueryExecution("SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s a <" + cls.getName() + ">. }").execSelect(); int nrOfInstances = rs.next().getLiteral("cnt").getInt(); logger.info("with " + nrOfInstances + " instances of class " + cls.getName()); fragmentStatistics.addValue(nrOfInstances); @@ -928,14 +953,15 @@ //2. execute SPARQL query on local model query = QueryFactory.create("SELECT (COUNT(DISTINCT ?x) AS ?cnt) WHERE {" + converter.convert("?x", patternSubClass) + "}",Syntax.syntaxARQ); - int subClassCnt = QueryExecutionFactory.create(query, fragment).execSelect().next().getLiteral("cnt").getInt(); + QueryExecutionFactory qef = new QueryExecutionFactoryModel(fragment); + int subClassCnt = qef.createQueryExecution(query).execSelect().next().getLiteral("cnt").getInt(); System.out.println(subClassCnt); Set<OWLEntity> signature = patternSuperClass.getSignature(); signature.remove(patternSubClass); query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(patternSubClass, patternSuperClass), signature, true); Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping(); - com.hp.hpl.jena.query.ResultSet rs = QueryExecutionFactory.create(query, fragment).execSelect(); + com.hp.hpl.jena.query.ResultSet rs = qef.createQueryExecution(query).execSelect(); QuerySolution qs; while(rs.hasNext()){ qs = rs.next(); @@ -1032,7 +1058,8 @@ Query query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(cls, patternSuperClass), signature); logger.info("Running query\n" + query); Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping(); - com.hp.hpl.jena.query.ResultSet rs = QueryExecutionFactory.create(query, fragment).execSelect(); + QueryExecutionFactory qef = new QueryExecutionFactoryModel(fragment); + com.hp.hpl.jena.query.ResultSet rs = qef.createQueryExecution(query).execSelect(); QuerySolution qs; Set<String> resources = new HashSet<String>(); Multiset<OWLAxiom> instantiations = HashMultiset.create(); @@ -1106,14 +1133,15 @@ Query query = QueryFactory.create( "SELECT (COUNT(DISTINCT ?x) AS ?cnt) WHERE {" + converter.convert("?x", patternSubClass) + "}", Syntax.syntaxARQ); - int subClassCnt = QueryExecutionFactory.create(query, fragment).execSelect().next().getLiteral("cnt").getInt(); + QueryExecutionFactory qef = new QueryExecutionFactoryModel(fragment); + int subClassCnt = qef.createQueryExecution(query).execSelect().next().getLiteral("cnt").getInt(); Set<OWLEntity> signature = patternSuperClass.getSignature(); signature.remove(patternSubClass); query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(patternSubClass, patternSuperClass), signature, true); logger.info("Running query\n" + query); Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping(); - com.hp.hpl.jena.query.ResultSet rs = QueryExecutionFactory.create(query, fragment).execSelect(); + com.hp.hpl.jena.query.ResultSet rs = qef.createQueryExecution(query).execSelect(); QuerySolution qs; while (rs.hasNext()) { qs = rs.next(); @@ -1233,146 +1261,41 @@ } protected com.hp.hpl.jena.query.ResultSet executeSelectQuery(Query query) { - com.hp.hpl.jena.query.ResultSet rs = null; - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - ExtractionDBCache cache = ks.getCache(); - if(cache != null){ - rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query.toString())); - } else { - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), - query); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - try { - rs = queryExecution.execSelect(); - return rs; - } catch (QueryExceptionHTTP e) { - if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout"); - } else { - logger.error("Exception executing query", e); - } - } - } - try { - Thread.sleep(100); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - } else { - QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - rs = queryExecution.execSelect(); - } + com.hp.hpl.jena.query.ResultSet rs = qef.createQueryExecution(query).execSelect(); return rs; } protected com.hp.hpl.jena.query.ResultSet executeSelectQuery(Query query, boolean cached) { - com.hp.hpl.jena.query.ResultSet rs = null; - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - ExtractionDBCache cache = ks.getCache(); - if(cache != null && cached){ - rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query.toString())); - } else { - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), - query); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - try { - rs = queryExecution.execSelect(); - return rs; - } catch (QueryExceptionHTTP e) { - if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout"); - } else { - logger.error("Exception executing query", e); - } - } - } - - } else { - QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - rs = queryExecution.execSelect(); - } + com.hp.hpl.jena.query.ResultSet rs = qef.createQueryExecution(query).execSelect(); return rs; } protected Model executeConstructQuery(Query query, long timeout) { - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - ExtractionDBCache cache = ks.getCache(); - Model model = null; - try { -// if(cache != null){ -// try { -// model = cache.executeConstructQuery(endpoint, query.toString()); -// } catch (UnsupportedEncodingException e) { -// e.printStackTrace(); -// } catch (SQLException e) { -// e.printStackTrace(); -// } -// } else { - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), - query); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - queryExecution.setTimeout(timeout, timeout); - model = queryExecution.execConstruct(); -// } - logger.debug("Got " + model.size() + " triples."); - return model; - } catch (QueryExceptionHTTP e) { - if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout"); - } else { - logger.error("Exception executing query", e); - } - return ModelFactory.createDefaultModel(); + QueryExecution qe = qef.createQueryExecution(query); + qe.setTimeout(timeout); + try { + return qe.execConstruct(); + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout"); + } else { + logger.error("Exception executing query", e); } - } else { - QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - Model model = queryExecution.execConstruct(); - return model; + return ModelFactory.createDefaultModel(); } } protected Model executeConstructQuery(Query query) { - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - ExtractionDBCache cache = ks.getCache(); - Model model = null; - try { - if(cache != null){ - try { - model = cache.executeConstructQuery(endpoint, query.toString()); - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } catch (SQLException e) { - e.printStackTrace(); - } - } else { - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), - query); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - model = queryExecution.execConstruct(); - } - logger.debug("Got " + model.size() + " triples."); - return model; - } catch (QueryExceptionHTTP e) { - if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout"); - } else { - logger.error("Exception executing query", e); - } - return ModelFactory.createDefaultModel(); + QueryExecution qe = qef.createQueryExecution(query); + try { + return qe.execConstruct(); + } catch (QueryExceptionHTTP e) { + if(e.getCause() instanceof SocketTimeoutException){ + logger.warn("Got timeout"); + } else { + logger.error("Exception executing query", e); } - } else { - QueryExecution queryExecution = QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - Model model = queryExecution.execConstruct(); - return model; + return ModelFactory.createDefaultModel(); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-10-11 21:29:38
|
Revision: 4121 http://sourceforge.net/p/dl-learner/code/4121 Author: lorenz_b Date: 2013-10-11 21:29:34 +0000 (Fri, 11 Oct 2013) Log Message: ----------- Updated OWLAPI deps. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/TrieEntityCandidateGenerator.java trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/StructureBasedWordSenseDisambiguation.java trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java trunk/pom.xml trunk/scripts/pom.xml Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/ContextExtractor.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2013-10-07 09:15:20 UTC (rev 4120) +++ trunk/components-core/pom.xml 2013-10-11 21:29:34 UTC (rev 4121) @@ -102,7 +102,6 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <version>3.4.4</version> </dependency> <dependency> @@ -291,11 +290,6 @@ <groupId>edu.stanford.nlp</groupId> <artifactId>stanford-corenlp</artifactId> <version>1.3.4</version> - </dependency> - <dependency> - <groupId>edu.stanford.nlp</groupId> - <artifactId>stanford-corenlp</artifactId> - <version>1.3.4</version> <classifier>models</classifier> </dependency> <dependency> Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/TrieEntityCandidateGenerator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/TrieEntityCandidateGenerator.java 2013-10-07 09:15:20 UTC (rev 4120) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/index/TrieEntityCandidateGenerator.java 2013-10-11 21:29:34 UTC (rev 4121) @@ -14,7 +14,7 @@ import org.dllearner.core.owl.Entity; import org.semanticweb.owlapi.model.OWLOntology; -import edu.stanford.nlp.util.Sets; +import com.google.common.collect.Sets; /** * Generates candidates using a entity candidates prefix trie Added: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/ContextExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/ContextExtractor.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/ContextExtractor.java 2013-10-11 21:29:34 UTC (rev 4121) @@ -0,0 +1,15 @@ +/** + * + */ +package org.dllearner.algorithms.isle.wsd; + +import java.util.Set; + +/** + * @author Lorenz Buehmann + * + */ +public interface ContextExtractor { + + Set<String> extractContext(String token, String document); +} Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/StructureBasedWordSenseDisambiguation.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/StructureBasedWordSenseDisambiguation.java 2013-10-07 09:15:20 UTC (rev 4120) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/isle/wsd/StructureBasedWordSenseDisambiguation.java 2013-10-11 21:29:34 UTC (rev 4121) @@ -18,11 +18,14 @@ */ public class StructureBasedWordSenseDisambiguation extends WordSenseDisambiguation{ + private ContextExtractor contextExtractor; + /** * @param ontology */ - public StructureBasedWordSenseDisambiguation(OWLOntology ontology) { + public StructureBasedWordSenseDisambiguation(ContextExtractor contextExtractor, OWLOntology ontology) { super(ontology); + this.contextExtractor = contextExtractor; } /* (non-Javadoc) @@ -30,12 +33,13 @@ */ @Override public SemanticAnnotation disambiguate(Annotation annotation, Set<Entity> candidateEntities) { - //TODO we should find the sentence in which the annotated token is contained in - String content = annotation.getReferencedDocument().getContent(); + //get the context of the annotated token + Set<String> tokenContext = contextExtractor.extractContext(annotation.getToken(), annotation.getReferencedDocument().getContent()); + //compare this context with the context of each entity candidate for (Entity entity : candidateEntities) { Set<String> entityContext = StructuralEntityContext.getContextInNaturalLanguage(ontology, entity); + } return null; } - } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java 2013-10-07 09:15:20 UTC (rev 4120) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java 2013-10-11 21:29:34 UTC (rev 4121) @@ -16,8 +16,6 @@ import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; -import edu.stanford.nlp.util.Sets; - public class NoiseSensitiveLGG<N> { Modified: trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java 2013-10-07 09:15:20 UTC (rev 4120) +++ trunk/components-core/src/main/java/org/dllearner/kb/SparqlEndpointKS.java 2013-10-11 21:29:34 UTC (rev 4121) @@ -20,10 +20,16 @@ package org.dllearner.kb; import java.net.URL; +import java.sql.SQLException; import java.util.LinkedList; import java.util.List; +import java.util.concurrent.TimeUnit; +import org.aksw.jena_sparql_api.cache.core.QueryExecutionFactoryCacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreH2; import org.aksw.jena_sparql_api.cache.extra.CacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheExImpl; import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.KnowledgeSource; @@ -66,7 +72,7 @@ } public SparqlEndpointKS(SparqlEndpoint endpoint) { - this(endpoint, null); + this(endpoint, (String)null); } public SparqlEndpointKS(SparqlEndpoint endpoint, CacheEx cache) { @@ -74,6 +80,21 @@ this.cache = cache; } + public SparqlEndpointKS(SparqlEndpoint endpoint, String cacheDirectory) { + this.endpoint = endpoint; + if(cacheDirectory != null){ + try { + long timeToLive = TimeUnit.DAYS.toMillis(30); + CacheCoreEx cacheBackend = CacheCoreH2.create(cacheDirectory, timeToLive, true); + cache = new CacheExImpl(cacheBackend); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + public CacheEx getCache() { return cache; } Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2013-10-07 09:15:20 UTC (rev 4120) +++ trunk/pom.xml 2013-10-11 21:29:34 UTC (rev 4121) @@ -123,8 +123,7 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <version>3.4.4</version> - <type>pom</type> + <version>3.4.5</version> </dependency> <dependency> <groupId>net.sourceforge.owlapi</groupId> Modified: trunk/scripts/pom.xml =================================================================== --- trunk/scripts/pom.xml 2013-10-07 09:15:20 UTC (rev 4120) +++ trunk/scripts/pom.xml 2013-10-11 21:29:34 UTC (rev 4121) @@ -116,7 +116,6 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <type>pom</type> </dependency> <dependency> <groupId>net.sourceforge.owlapi</groupId> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-10-15 14:46:40
|
Revision: 4123 http://sourceforge.net/p/dl-learner/code/4123 Author: jenslehmann Date: 2013-10-15 14:46:35 +0000 (Tue, 15 Oct 2013) Log Message: ----------- added new unit test for the ISLE case without external corpus Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java Added Paths: ----------- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestCorpus.java trunk/test/isle/swore/ontology_with_comments.owl Removed Paths: ------------- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java 2013-10-15 12:06:45 UTC (rev 4122) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/OENode.java 2013-10-15 14:46:35 UTC (rev 4123) @@ -24,6 +24,7 @@ import java.util.List; import org.dllearner.algorithms.SearchTreeNode; +import org.dllearner.algorithms.isle.NLPHeuristic; import org.dllearner.core.owl.Description; /** @@ -119,6 +120,7 @@ public String getShortDescription(String baseURI) { String ret = description.toString(baseURI,null) + " ["; +// ret += "score" + NLPHeuristic.getNodeScore(this) + ","; ret += "acc:" + dfPercent.format(accuracy) + ", "; ret += "he:" + horizontalExpansion + ", "; ret += "c:" + children.size() + ", "; Deleted: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-10-15 12:06:45 UTC (rev 4122) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java 2013-10-15 14:46:35 UTC (rev 4123) @@ -1,264 +0,0 @@ -/** - * - */ -package org.dllearner.algorithms.isle; - -import com.google.common.base.Charsets; -import com.google.common.base.Joiner; -import com.google.common.io.Files; - -import org.dllearner.algorithms.celoe.CELOE; -import org.dllearner.algorithms.isle.index.*; -import org.dllearner.algorithms.isle.index.semantic.SemanticIndex; -import org.dllearner.algorithms.isle.index.semantic.simple.SimpleSemanticIndex; -import org.dllearner.algorithms.isle.index.syntactic.OWLOntologyLuceneSyntacticIndexCreator; -import org.dllearner.algorithms.isle.index.syntactic.SyntacticIndex; -import org.dllearner.algorithms.isle.metrics.PMIRelevanceMetric; -import org.dllearner.algorithms.isle.metrics.RelevanceMetric; -import org.dllearner.algorithms.isle.metrics.RelevanceUtils; -import org.dllearner.algorithms.isle.textretrieval.EntityTextRetriever; -import org.dllearner.algorithms.isle.textretrieval.RDFSLabelEntityTextRetriever; -import org.dllearner.algorithms.isle.wsd.SimpleWordSenseDisambiguation; -import org.dllearner.algorithms.isle.wsd.WordSenseDisambiguation; -import org.dllearner.core.AbstractReasonerComponent; -import org.dllearner.core.KnowledgeSource; -import org.dllearner.core.owl.Entity; -import org.dllearner.core.owl.NamedClass; -import org.dllearner.kb.OWLAPIOntology; -import org.dllearner.learningproblems.ClassLearningProblem; -import org.dllearner.reasoning.FastInstanceChecker; -import org.dllearner.utilities.Helper; -import org.junit.Before; -import org.junit.Test; -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLEntity; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyManager; - -import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; - -import java.io.File; -import java.io.IOException; -import java.text.DecimalFormat; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -/** - * Some tests for the ISLE algorithm. - * - * @author Lorenz Buehmann - * @author Jens Lehmann - */ -public class ISLETest { - - private OWLOntologyManager manager; - private OWLOntology ontology; - private OWLDataFactory df = new OWLDataFactoryImpl(); - private EntityTextRetriever textRetriever; - private RelevanceMetric relevance; - private String searchField = "label"; - private SemanticIndex semanticIndex; - private SyntacticIndex syntacticIndex; - - // we assume that the ontology is named "ontology.owl" and that all text files - // are in a subdirectory called "corpus" - private String testFolder = "../test/isle/swore/"; -// NamedClass cls = new NamedClass("http://example.com/father#father"); - NamedClass cls = new NamedClass("http://ns.softwiki.de/req/CustomerRequirement"); - - /** - * - */ - public ISLETest() throws Exception{ - manager = OWLManager.createOWLOntologyManager(); - ontology = manager.loadOntologyFromOntologyDocument(new File(testFolder + "ontology.owl")); - textRetriever = new RDFSLabelEntityTextRetriever(ontology); - syntacticIndex = new OWLOntologyLuceneSyntacticIndexCreator(ontology, df.getRDFSLabel(), searchField).buildIndex(); - - - } - - private Set<TextDocument> createDocuments(){ - Set<TextDocument> documents = new HashSet<TextDocument>(); - File folder = new File(testFolder+"corpus/"); - for (File file : folder.listFiles()) { - if(!file.isDirectory() && !file.isHidden()){ - try { - String text = Files.toString(file, Charsets.UTF_8); - documents.add(new TextDocument(text)); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - return documents; - } - - - - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception{ - - } - -// @Test - public void testTextRetrieval() { - System.out.println("Text for entity " + cls + ":"); - Map<String, Double> relevantText = textRetriever.getRelevantText(cls); - System.out.println(Joiner.on("\n").join(relevantText.entrySet())); - } - -// @Test - public void testEntityRelevance() throws Exception { - System.out.println("Relevant entities for entity " + cls + ":"); - Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); - System.out.println(Joiner.on("\n").join(entityRelevance.entrySet())); - } - - @Test - public void testSemanticIndexAnnotationProperty(){ - semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); - semanticIndex.buildIndex(df.getRDFSLabel(), null); -// NamedClass nc = new NamedClass("http://example.com/father#father"); - Set<AnnotatedDocument> documents = semanticIndex.getDocuments(cls); - System.out.println("Documents for " + cls + ":\n" + documents); - } - - @Test - public void testSemanticIndexCorpus(){ - semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); - semanticIndex.buildIndex(createDocuments()); - Set<AnnotatedDocument> documents = semanticIndex.getDocuments(cls); - System.out.println(documents); - } - - @Test - public void testISLE() throws Exception { - KnowledgeSource ks = new OWLAPIOntology(ontology); - AbstractReasonerComponent reasoner = new FastInstanceChecker(ks); - reasoner.init(); - - ClassLearningProblem lp = new ClassLearningProblem(reasoner); - lp.setClassToDescribe(cls); - lp.init(); - - semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); - semanticIndex.buildIndex(createDocuments()); - - relevance = new PMIRelevanceMetric(semanticIndex); - - Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); - NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); - - ISLE isle = new ISLE(lp, reasoner); - isle.setHeuristic(heuristic); - isle.init(); - - isle.start(); - } - - @Test - public void testEntityLinkingWithLemmatizing() throws Exception { - EntityCandidatesTrie ect = new SimpleEntityCandidatesTrie(new RDFSLabelEntityTextRetriever(ontology), ontology, - new SimpleEntityCandidatesTrie.LemmatizingWordNetNameGenerator(5)); - LinguisticAnnotator linguisticAnnotator = new TrieLinguisticAnnotator(ect); - WordSenseDisambiguation wsd = new SimpleWordSenseDisambiguation(ontology); - EntityCandidateGenerator ecg = new TrieEntityCandidateGenerator(ontology, ect); - SemanticAnnotator semanticAnnotator = new SemanticAnnotator(wsd, ecg, linguisticAnnotator); - - Set<TextDocument> docs = createDocuments(); - for (TextDocument doc : docs) { - AnnotatedDocument annotated = semanticAnnotator.processDocument(doc); - System.out.println(annotated); - } - } - - @Test - public void testEntityLinkingWithSimpleStringMatching() throws Exception { - EntityCandidatesTrie ect = new SimpleEntityCandidatesTrie(new RDFSLabelEntityTextRetriever(ontology), ontology, - new SimpleEntityCandidatesTrie.DummyNameGenerator()); - TrieLinguisticAnnotator linguisticAnnotator = new TrieLinguisticAnnotator(ect); - linguisticAnnotator.setNormalizeWords(false); - WordSenseDisambiguation wsd = new SimpleWordSenseDisambiguation(ontology); - EntityCandidateGenerator ecg = new TrieEntityCandidateGenerator(ontology, ect); - SemanticAnnotator semanticAnnotator = new SemanticAnnotator(wsd, ecg, linguisticAnnotator); - - Set<TextDocument> docs = createDocuments(); - for (TextDocument doc : docs) { - AnnotatedDocument annotated = semanticAnnotator.processDocument(doc); - System.out.println(annotated); - } - } - - @Test - public void compareISLE() throws Exception { - KnowledgeSource ks = new OWLAPIOntology(ontology); - AbstractReasonerComponent reasoner = new FastInstanceChecker(ks); - reasoner.init(); - - ClassLearningProblem lp = new ClassLearningProblem(reasoner); - lp.setClassToDescribe(cls); - lp.init(); - - semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex, false); - semanticIndex.buildIndex(createDocuments()); - - relevance = new PMIRelevanceMetric(semanticIndex); - - Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); - NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); - - // run ISLE - ISLE isle = new ISLE(lp, reasoner); - isle.setHeuristic(heuristic); - isle.setSearchTreeFile(testFolder + "searchTreeISLE.txt"); - isle.setWriteSearchTree(true); - isle.setReplaceSearchTree(true); - isle.setTerminateOnNoiseReached(true); - isle.init(); - isle.start(); - - // run standard CELOE as reference - CELOE celoe = new CELOE(lp, reasoner); -// celoe.setHeuristic(heuristic); - celoe.setSearchTreeFile(testFolder + "searchTreeCELOE.txt"); - celoe.setWriteSearchTree(true); - celoe.setTerminateOnNoiseReached(true); - celoe.setReplaceSearchTree(true); - celoe.init(); - celoe.start(); - System.out.println(); - - DecimalFormat df = new DecimalFormat("#00.00"); - System.out.println("Summary ISLE vs. CELOE"); - System.out.println("======================"); - System.out.println("accuracy: " + df.format(100*isle.getCurrentlyBestAccuracy())+"% vs. " + df.format(100*celoe.getCurrentlyBestAccuracy())+"%"); - System.out.println("expressions tested: " + isle.getClassExpressionTests() + " vs. " + celoe.getClassExpressionTests()); - System.out.println("search tree nodes: " + isle.getNodes().size() + " vs. " + celoe.getNodes().size()); - System.out.println("runtime: " + Helper.prettyPrintNanoSeconds(isle.getTotalRuntimeNs()) + " vs. " + Helper.prettyPrintNanoSeconds(celoe.getTotalRuntimeNs())); - - // only ISLE -// System.out.println("accuracy: " + df.format(100*isle.getCurrentlyBestAccuracy())+"%"); -// System.out.println("expressions tested: " + isle.getClassExpressionTests()); -// System.out.println("search tree nodes: " + isle.getNodes().size()); -// System.out.println("runtime: " + Helper.prettyPrintNanoSeconds(isle.getTotalRuntimeNs())); - - } - - @Test - public void testWordSenseDisambiguation() throws Exception { - Set<OWLEntity> context = StructuralEntityContext.getContext(ontology, df.getOWLClass(IRI.create(cls.getName()))); - System.out.println(context); - - Set<String> contextNL = StructuralEntityContext.getContextInNaturalLanguage(ontology, df.getOWLClass(IRI.create(cls.getName()))); - System.out.println(contextNL); - } - - -} Copied: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestCorpus.java (from rev 4122, trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETest.java) =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestCorpus.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestCorpus.java 2013-10-15 14:46:35 UTC (rev 4123) @@ -0,0 +1,266 @@ +/** + * + */ +package org.dllearner.algorithms.isle; + +import com.google.common.base.Charsets; +import com.google.common.base.Joiner; +import com.google.common.io.Files; +import com.hp.hpl.jena.vocabulary.RDFS; + +import org.dllearner.algorithms.celoe.CELOE; +import org.dllearner.algorithms.isle.index.*; +import org.dllearner.algorithms.isle.index.semantic.SemanticIndex; +import org.dllearner.algorithms.isle.index.semantic.simple.SimpleSemanticIndex; +import org.dllearner.algorithms.isle.index.syntactic.OWLOntologyLuceneSyntacticIndexCreator; +import org.dllearner.algorithms.isle.index.syntactic.SyntacticIndex; +import org.dllearner.algorithms.isle.metrics.PMIRelevanceMetric; +import org.dllearner.algorithms.isle.metrics.RelevanceMetric; +import org.dllearner.algorithms.isle.metrics.RelevanceUtils; +import org.dllearner.algorithms.isle.textretrieval.EntityTextRetriever; +import org.dllearner.algorithms.isle.textretrieval.RDFSLabelEntityTextRetriever; +import org.dllearner.algorithms.isle.wsd.SimpleWordSenseDisambiguation; +import org.dllearner.algorithms.isle.wsd.WordSenseDisambiguation; +import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.owl.Entity; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.kb.OWLAPIOntology; +import org.dllearner.learningproblems.ClassLearningProblem; +import org.dllearner.reasoning.FastInstanceChecker; +import org.dllearner.utilities.Helper; +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + +import java.io.File; +import java.io.IOException; +import java.text.DecimalFormat; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * Some tests for the ISLE algorithm. + * + * @author Lorenz Buehmann + * @author Jens Lehmann + */ +public class ISLETestCorpus { + + private OWLOntologyManager manager; + private OWLOntology ontology; + private OWLDataFactory df = new OWLDataFactoryImpl(); + private EntityTextRetriever textRetriever; + private RelevanceMetric relevance; + private String searchField = "label"; + private SemanticIndex semanticIndex; + private SyntacticIndex syntacticIndex; + + // we assume that the ontology is named "ontology.owl" and that all text files + // are in a subdirectory called "corpus" + private String testFolder = "../test/isle/swore/"; +// NamedClass cls = new NamedClass("http://example.com/father#father"); + NamedClass cls = new NamedClass("http://ns.softwiki.de/req/CustomerRequirement"); + + /** + * + */ + public ISLETestCorpus() throws Exception{ + manager = OWLManager.createOWLOntologyManager(); + ontology = manager.loadOntologyFromOntologyDocument(new File(testFolder + "ontology.owl")); + textRetriever = new RDFSLabelEntityTextRetriever(ontology); + syntacticIndex = new OWLOntologyLuceneSyntacticIndexCreator(ontology, df.getRDFSLabel(), searchField).buildIndex(); + + + } + + private Set<TextDocument> createDocuments(){ + Set<TextDocument> documents = new HashSet<TextDocument>(); + File folder = new File(testFolder+"corpus/"); + for (File file : folder.listFiles()) { + if(!file.isDirectory() && !file.isHidden()){ + try { + String text = Files.toString(file, Charsets.UTF_8); + documents.add(new TextDocument(text)); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return documents; + } + + + + /** + * @throws java.lang.Exception + */ + @Before + public void setUp() throws Exception{ + + } + +// @Test + public void testTextRetrieval() { + System.out.println("Text for entity " + cls + ":"); + Map<String, Double> relevantText = textRetriever.getRelevantText(cls); + System.out.println(Joiner.on("\n").join(relevantText.entrySet())); + } + +// @Test + public void testEntityRelevance() throws Exception { + System.out.println("Relevant entities for entity " + cls + ":"); + Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); + System.out.println(Joiner.on("\n").join(entityRelevance.entrySet())); + } + + @Test + public void testSemanticIndexAnnotationProperty(){ + semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); + semanticIndex.buildIndex(df.getRDFSLabel(), null); +// NamedClass nc = new NamedClass("http://example.com/father#father"); + Set<AnnotatedDocument> documents = semanticIndex.getDocuments(cls); + System.out.println("Documents for " + cls + ":\n" + documents); + } + + @Test + public void testSemanticIndexCorpus(){ + semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); + semanticIndex.buildIndex(createDocuments()); + Set<AnnotatedDocument> documents = semanticIndex.getDocuments(cls); + System.out.println(documents); + } + + @Test + public void testISLE() throws Exception { + KnowledgeSource ks = new OWLAPIOntology(ontology); + AbstractReasonerComponent reasoner = new FastInstanceChecker(ks); + reasoner.init(); + + ClassLearningProblem lp = new ClassLearningProblem(reasoner); + lp.setClassToDescribe(cls); + lp.init(); + + semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); + semanticIndex.buildIndex(createDocuments()); + + relevance = new PMIRelevanceMetric(semanticIndex); + + Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); + NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); + + ISLE isle = new ISLE(lp, reasoner); + isle.setHeuristic(heuristic); + isle.init(); + + isle.start(); + } + + @Test + public void testEntityLinkingWithLemmatizing() throws Exception { + EntityCandidatesTrie ect = new SimpleEntityCandidatesTrie(new RDFSLabelEntityTextRetriever(ontology), ontology, + new SimpleEntityCandidatesTrie.LemmatizingWordNetNameGenerator(5)); + LinguisticAnnotator linguisticAnnotator = new TrieLinguisticAnnotator(ect); + WordSenseDisambiguation wsd = new SimpleWordSenseDisambiguation(ontology); + EntityCandidateGenerator ecg = new TrieEntityCandidateGenerator(ontology, ect); + SemanticAnnotator semanticAnnotator = new SemanticAnnotator(wsd, ecg, linguisticAnnotator); + + Set<TextDocument> docs = createDocuments(); + for (TextDocument doc : docs) { + AnnotatedDocument annotated = semanticAnnotator.processDocument(doc); + System.out.println(annotated); + } + } + + @Test + public void testEntityLinkingWithSimpleStringMatching() throws Exception { + EntityCandidatesTrie ect = new SimpleEntityCandidatesTrie(new RDFSLabelEntityTextRetriever(ontology), ontology, + new SimpleEntityCandidatesTrie.DummyNameGenerator()); + TrieLinguisticAnnotator linguisticAnnotator = new TrieLinguisticAnnotator(ect); + linguisticAnnotator.setNormalizeWords(false); + WordSenseDisambiguation wsd = new SimpleWordSenseDisambiguation(ontology); + EntityCandidateGenerator ecg = new TrieEntityCandidateGenerator(ontology, ect); + SemanticAnnotator semanticAnnotator = new SemanticAnnotator(wsd, ecg, linguisticAnnotator); + + Set<TextDocument> docs = createDocuments(); + for (TextDocument doc : docs) { + AnnotatedDocument annotated = semanticAnnotator.processDocument(doc); + System.out.println(annotated); + } + } + + @Test + public void compareISLE() throws Exception { + KnowledgeSource ks = new OWLAPIOntology(ontology); + AbstractReasonerComponent reasoner = new FastInstanceChecker(ks); + reasoner.init(); + + ClassLearningProblem lp = new ClassLearningProblem(reasoner); + lp.setClassToDescribe(cls); + lp.init(); + + semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex, false); + semanticIndex.buildIndex(createDocuments()); + + relevance = new PMIRelevanceMetric(semanticIndex); + + Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); + NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); + + // run ISLE + ISLE isle = new ISLE(lp, reasoner); + isle.setHeuristic(heuristic); + isle.setSearchTreeFile(testFolder + "searchTreeISLE.txt"); + isle.setWriteSearchTree(true); +// isle.setReplaceSearchTree(true); + isle.setTerminateOnNoiseReached(true); + isle.init(); + isle.start(); + + // run standard CELOE as reference + CELOE celoe = new CELOE(lp, reasoner); +// celoe.setHeuristic(heuristic); + celoe.setSearchTreeFile(testFolder + "searchTreeCELOE.txt"); + celoe.setWriteSearchTree(true); + celoe.setTerminateOnNoiseReached(true); + celoe.setReplaceSearchTree(true); + celoe.init(); + celoe.start(); + System.out.println(); + + DecimalFormat df = new DecimalFormat("#00.00"); + System.out.println("Summary ISLE vs. CELOE"); + System.out.println("======================"); + System.out.println("accuracy: " + df.format(100*isle.getCurrentlyBestAccuracy())+"% vs. " + df.format(100*celoe.getCurrentlyBestAccuracy())+"%"); + System.out.println("expressions tested: " + isle.getClassExpressionTests() + " vs. " + celoe.getClassExpressionTests()); + System.out.println("search tree nodes: " + isle.getNodes().size() + " vs. " + celoe.getNodes().size()); + System.out.println("runtime: " + Helper.prettyPrintNanoSeconds(isle.getTotalRuntimeNs()) + " vs. " + Helper.prettyPrintNanoSeconds(celoe.getTotalRuntimeNs())); + + // only ISLE +// System.out.println("accuracy: " + df.format(100*isle.getCurrentlyBestAccuracy())+"%"); +// System.out.println("expressions tested: " + isle.getClassExpressionTests()); +// System.out.println("search tree nodes: " + isle.getNodes().size()); +// System.out.println("runtime: " + Helper.prettyPrintNanoSeconds(isle.getTotalRuntimeNs())); + + } + + @Test + public void testWordSenseDisambiguation() throws Exception { + Set<OWLEntity> context = StructuralEntityContext.getContext(ontology, df.getOWLClass(IRI.create(cls.getName()))); + System.out.println(context); + + Set<String> contextNL = StructuralEntityContext.getContextInNaturalLanguage(ontology, df.getOWLClass(IRI.create(cls.getName()))); + System.out.println(contextNL); + } + + +} Added: trunk/test/isle/swore/ontology_with_comments.owl =================================================================== --- trunk/test/isle/swore/ontology_with_comments.owl (rev 0) +++ trunk/test/isle/swore/ontology_with_comments.owl 2013-10-15 14:46:35 UTC (rev 4123) @@ -0,0 +1,2344 @@ +<?xml version="1.0"?> + + +<!DOCTYPE rdf:RDF [ + <!ENTITY req "http://ns.softwiki.de/req/" > + <!ENTITY foaf2 "http://xmlns.com/foaf/0.1/" > + <!ENTITY dcmitype "http://purl.org/dc/dcmitype/" > + <!ENTITY owl "http://www.w3.org/2002/07/owl#" > + <!ENTITY dc "http://purl.org/dc/elements/1.1/" > + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > + <!ENTITY owl2xml "http://www.w3.org/2006/12/owl2-xml#" > + <!ENTITY foaf "http://www.holygoat.co.uk/foaf.rdf#" > + <!ENTITY skos "http://www.w3.org/2004/02/skos/core#" > + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > + <!ENTITY tags "http://www.holygoat.co.uk/owl/redwood/0.1/tags/" > +]> + + +<rdf:RDF xmlns="http://ns.softwiki.de/req/" + xml:base="http://ns.softwiki.de/req/" + xmlns:tags="http://www.holygoat.co.uk/owl/redwood/0.1/tags/" + xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:foaf2="http://xmlns.com/foaf/0.1/" + xmlns:foaf="http://www.holygoat.co.uk/foaf.rdf#" + xmlns:dcmitype="http://purl.org/dc/dcmitype/" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:owl2xml="http://www.w3.org/2006/12/owl2-xml#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:req="http://ns.softwiki.de/req/" + xmlns:skos="http://www.w3.org/2004/02/skos/core#"> + <owl:Ontology rdf:about="http://ns.softwiki.de/req/"> + <rdfs:label rdf:datatype="&xsd;string">SoftWiki Ontology for Requirements Engineering</rdfs:label> + <rdfs:comment rdf:datatype="&xsd;string">A requirements engineering ontology for the SoftWiki project.</rdfs:comment> + <dc:contributor rdf:datatype="&xsd;string">Jens Lehmann</dc:contributor> + <dc:contributor rdf:datatype="&xsd;string">Sebastian Dietzold</dc:contributor> + <owl:versionInfo rdf:datatype="&xsd;string">version 1.00 - Thomas Riechert, Steffen Lohmann, Kim Lauenroth, Philipp Heim - starting the next generation of SWORE on 8th of July 2008 in Duisburg +version 0.8 - Sebastian Dietzold - skos, tags and dc alignment (title now functional) +version 0.7 - Sebastian Dietzold - labels completed and namespace correction +version 0.6 - name space changed to ns.softwiki.de/req +version 0.5 - refined by Thomas according to ESWC Poster submission +version 0.4 - refined by Jens on the way home from Essen +version 0.3 - refined by Jens during discussion with Kim and Steffen on 13 March 2007 in Essen +version 0.2 - refined by Thomas and Jens in the evening of 12 March 2007 in Essen +version 0.1 - simple initial version by Thomas and Jens before meeting in Essen</owl:versionInfo> + </owl:Ontology> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Annotation properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + <owl:AnnotationProperty rdf:about="&owl;versionInfo"/> + <owl:AnnotationProperty rdf:about="&dc;contributor"/> + <owl:AnnotationProperty rdf:about="&rdfs;label"/> + <owl:AnnotationProperty rdf:about="&rdfs;comment"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Datatypes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://www.w3.org/2001/XMLSchema#dateTime --> + + <rdf:Description rdf:about="&xsd;dateTime"> + <rdfs:label rdf:datatype="&xsd;string">dateTime</rdfs:label> + </rdf:Description> + + + + <!-- http://www.w3.org/2001/XMLSchema#string --> + + <rdf:Description rdf:about="&xsd;string"> + <rdfs:label rdf:datatype="&xsd;string">string</rdfs:label> + </rdf:Description> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Object Properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/broader --> + + <owl:ObjectProperty rdf:about="&req;broader"/> + + + + <!-- http://ns.softwiki.de/req/comments --> + + <owl:ObjectProperty rdf:about="&req;comments"> + <rdf:type rdf:resource="&owl;InverseFunctionalProperty"/> + <rdfs:domain rdf:resource="&req;AbstractComment"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/conflicts --> + + <owl:ObjectProperty rdf:about="&req;conflicts"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <owl:inverseOf rdf:resource="&req;conflicts"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/cui --> + + <owl:ObjectProperty rdf:about="&req;cui"/> + + + + <!-- http://ns.softwiki.de/req/defines --> + + <owl:ObjectProperty rdf:about="&req;defines"> + <rdfs:label rdf:datatype="&xsd;string">defines</rdfs:label> + <rdfs:domain rdf:resource="&req;Author"/> + <rdfs:range> + <owl:Class> + <owl:unionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;AbstractComment"/> + <rdf:Description rdf:about="&req;AbstractRequirement"/> + <rdf:Description rdf:about="&req;Keyword"/> + </owl:unionOf> + </owl:Class> + </rdfs:range> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/definition --> + + <owl:ObjectProperty rdf:about="&req;definition"> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/depentsOn --> + + <owl:ObjectProperty rdf:about="&req;depentsOn"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;entails"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/details --> + + <owl:ObjectProperty rdf:about="&req;details"> + <rdfs:label rdf:datatype="&xsd;string">details</rdfs:label> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/documentation --> + + <owl:ObjectProperty rdf:about="&req;documentation"/> + + + + <!-- http://ns.softwiki.de/req/entails --> + + <owl:ObjectProperty rdf:about="&req;entails"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/invalidates --> + + <owl:ObjectProperty rdf:about="&req;invalidates"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isCommentedBy --> + + <owl:ObjectProperty rdf:about="&req;isCommentedBy"> + <rdfs:range rdf:resource="&req;AbstractComment"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;comments"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isCreatedBy --> + + <owl:ObjectProperty rdf:about="&req;isCreatedBy"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label>is created by</rdfs:label> + <rdfs:comment>specifies the persons who created the requirement</rdfs:comment> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isDefinedBy --> + + <owl:ObjectProperty rdf:about="&req;isDefinedBy"> + <rdfs:label rdf:datatype="&xsd;string">defined by</rdfs:label> + <rdfs:range rdf:resource="&req;Author"/> + <owl:inverseOf rdf:resource="&req;defines"/> + <rdfs:domain> + <owl:Class> + <owl:unionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;AbstractComment"/> + <rdf:Description rdf:about="&req;AbstractRequirement"/> + <rdf:Description rdf:about="&req;Keyword"/> + </owl:unionOf> + </owl:Class> + </rdfs:domain> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isDetailedBy --> + + <owl:ObjectProperty rdf:about="&req;isDetailedBy"> + <rdfs:label rdf:datatype="&xsd;string">detailed by</rdfs:label> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;details"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isInvalidFor --> + + <owl:ObjectProperty rdf:about="&req;isInvalidFor"> + <rdf:type rdf:resource="&owl;InverseFunctionalProperty"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;invalidates"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isLeadingTo --> + + <owl:ObjectProperty rdf:about="&req;isLeadingTo"> + <rdfs:label rdf:datatype="&xsd;string">lead to</rdfs:label> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractSource"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isRedundant --> + + <owl:ObjectProperty rdf:about="&req;isRedundant"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <owl:inverseOf rdf:resource="&req;isRedundant"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isRelated --> + + <owl:ObjectProperty rdf:about="&req;isRelated"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <owl:inverseOf rdf:resource="&req;isRelated"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isReleatedTo --> + + <owl:ObjectProperty rdf:about="&req;isReleatedTo"> + <rdfs:range rdf:resource="&req;Customer"/> + <rdfs:domain rdf:resource="&req;CustomerRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/isSimilarTo --> + + <owl:ObjectProperty rdf:about="&req;isSimilarTo"> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;isSimilarTo"/> + <rdfs:subPropertyOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/leadsTo --> + + <owl:ObjectProperty rdf:about="&req;leadsTo"> + <rdfs:label rdf:datatype="&xsd;string">leads to</rdfs:label> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;AbstractSource"/> + <owl:inverseOf rdf:resource="&req;isLeadingTo"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/rates --> + + <owl:ObjectProperty rdf:about="&req;rates"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;Rating"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/rational --> + + <owl:ObjectProperty rdf:about="&req;rational"> + <rdfs:label rdf:datatype="&xsd;string">rational</rdfs:label> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/refersTo --> + + <owl:ObjectProperty rdf:about="&req;refersTo"> + <rdfs:label rdf:datatype="&xsd;string">refers to</rdfs:label> + <rdfs:comment xml:lang="de">Relevanter Aspekt eines geplantes Systems (ähnlich zu Tagging).</rdfs:comment> + <rdfs:range rdf:resource="&req;AbstractReferencePoint"/> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <owl:inverseOf rdf:resource="&req;relevantRequirements"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/relevantRequirements --> + + <owl:ObjectProperty rdf:about="&req;relevantRequirements"> + <rdfs:label rdf:datatype="&xsd;string">relevant requirements</rdfs:label> + <rdfs:domain rdf:resource="&req;AbstractReferencePoint"/> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/result --> + + <owl:ObjectProperty rdf:about="&req;result"> + <rdfs:label rdf:datatype="&xsd;string">result</rdfs:label> + <rdfs:comment xml:lang="de">z.B. Veränderung von priority und agreement</rdfs:comment> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/scenarioStep --> + + <owl:ObjectProperty rdf:about="&req;scenarioStep"> + <rdfs:label rdf:datatype="&xsd;string">scenario step</rdfs:label> + <rdfs:comment rdf:datatype="&xsd;string"></rdfs:comment> + <owl:versionInfo rdf:datatype="&xsd;string">TODO: es muss eine konkrete Reihenfolge der Steps gegeben sein (Listenstruktur)</owl:versionInfo> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/specifies --> + + <owl:ObjectProperty rdf:about="&req;specifies"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;Topic"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/undirectedrelation --> + + <owl:ObjectProperty rdf:about="&req;undirectedrelation"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdf:type rdf:resource="&owl;InverseFunctionalProperty"/> + <rdf:type rdf:resource="&owl;SymmetricProperty"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: only one ration between the same pair of two requirements allowed.</rdfs:comment> + <owl:inverseOf rdf:resource="&req;undirectedrelation"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/votes --> + + <owl:ObjectProperty rdf:about="&req;votes"> + <rdfs:range rdf:resource="&req;AbstractRequirement"/> + <rdfs:domain rdf:resource="&req;Stakeholder"/> + </owl:ObjectProperty> + + + + <!-- http://ns.softwiki.de/req/willLeadTo --> + + <owl:ObjectProperty rdf:about="&req;willLeadTo"> + <rdfs:domain rdf:resource="&req;Requirement"/> + <rdfs:range rdf:resource="&req;SystemRequirement"/> + </owl:ObjectProperty> + + + + <!-- http://purl.org/dc/elements/1.1/description --> + + <owl:ObjectProperty rdf:about="&dc;description"> + <rdfs:label rdf:datatype="&xsd;string">description</rdfs:label> + <rdfs:label xml:lang="de">Beschreibung</rdfs:label> + <rdfs:range rdf:resource="&xsd;string"/> + </owl:ObjectProperty> + + + + <!-- http://www.holygoat.co.uk/owl/redwood/0.1/tags/taggedWithTag --> + + <owl:ObjectProperty rdf:about="&tags;taggedWithTag"> + <rdfs:label xml:lang="de">Tags</rdfs:label> + </owl:ObjectProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Data properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/averagePriorityRate --> + + <owl:DatatypeProperty rdf:about="&req;averagePriorityRate"> + <rdfs:subPropertyOf rdf:resource="&req;averageRate"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/averageQualityRate --> + + <owl:DatatypeProperty rdf:about="&req;averageQualityRate"> + <rdfs:subPropertyOf rdf:resource="&req;averageRate"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/averageRate --> + + <owl:DatatypeProperty rdf:about="&req;averageRate"> + <rdfs:comment rdf:datatype="&xsd;string">Is calculated by given rates.</rdfs:comment> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&xsd;float"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/changeDate --> + + <owl:DatatypeProperty rdf:about="&req;changeDate"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&xsd;dateTime"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/creationDate --> + + <owl:DatatypeProperty rdf:about="&req;creationDate"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range rdf:resource="&xsd;dateTime"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/definition --> + + <owl:DatatypeProperty rdf:about="&req;definition"> + <rdfs:domain rdf:resource="&req;DefinedKeyword"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/rate --> + + <owl:DatatypeProperty rdf:about="&req;rate"> + <rdfs:domain rdf:resource="&req;Rating"/> + <rdfs:range rdf:resource="&xsd;float"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/rational --> + + <owl:DatatypeProperty rdf:about="&req;rational"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label rdf:datatype="&xsd;string">rational</rdfs:label> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/result --> + + <owl:DatatypeProperty rdf:about="&req;result"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label rdf:datatype="&xsd;string">result</rdfs:label> + <rdfs:comment xml:lang="de">z.B. Veränderung von priority und agreement</rdfs:comment> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/role --> + + <owl:DatatypeProperty rdf:about="&req;role"> + <rdfs:domain rdf:resource="&req;Author"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/scenarioStep --> + + <owl:DatatypeProperty rdf:about="&req;scenarioStep"> + <rdfs:label rdf:datatype="&xsd;string">scenario step</rdfs:label> + <rdfs:comment rdf:datatype="&xsd;string"></rdfs:comment> + <owl:versionInfo rdf:datatype="&xsd;string">TODO: es muss eine konkrete Reihenfolge der Steps gegeben sein (Listenstruktur)</owl:versionInfo> + <rdfs:domain rdf:resource="&req;TextualScenario"/> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/state --> + + <owl:DatatypeProperty rdf:about="&req;state"> + <rdfs:domain rdf:resource="&req;AbstractRequirement"/> + <rdfs:range> + <rdfs:Datatype> + <owl:oneOf> + <rdf:Description> + <rdf:type rdf:resource="&rdf;List"/> + <rdf:first rdf:datatype="&xsd;string">isNegativDecided</rdf:first> + <rdf:rest> + <rdf:Description> + <rdf:type rdf:resource="&rdf;List"/> + <rdf:first rdf:datatype="&xsd;string">isPositvDecided</rdf:first> + <rdf:rest rdf:resource="&rdf;nil"/> + </rdf:Description> + </rdf:rest> + </rdf:Description> + </owl:oneOf> + </rdfs:Datatype> + </rdfs:range> + </owl:DatatypeProperty> + + + + <!-- http://ns.softwiki.de/req/voteTime --> + + <owl:DatatypeProperty rdf:about="&req;voteTime"> + <rdfs:label rdf:datatype="&xsd;string">vote time</rdfs:label> + <rdfs:domain rdf:resource="&req;Vote"/> + <rdfs:range rdf:resource="&xsd;dateTime"/> + </owl:DatatypeProperty> + + + + <!-- http://purl.org/dc/elements/1.1/description --> + + <owl:DatatypeProperty rdf:about="&dc;description"> + <rdf:type rdf:resource="&owl;FunctionalProperty"/> + <rdfs:label rdf:datatype="&xsd;string">description</rdfs:label> + <rdfs:label xml:lang="de">Beschreibung</rdfs:label> + <rdfs:domain> + <owl:Class> + <owl:unionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;Goal"/> + <rdf:Description rdf:about="&req;Requirement"/> + </owl:unionOf> + </owl:Class> + </rdfs:domain> + </owl:DatatypeProperty> + + + + <!-- http://purl.org/dc/elements/1.1/title --> + + <owl:DatatypeProperty rdf:about="&dc;title"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Classes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/AbstractComment --> + + <owl:Class rdf:about="&req;AbstractComment"> + <rdfs:label>abstract comment</rdfs:label> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AbstractReferencePoint --> + + <owl:Class rdf:about="&req;AbstractReferencePoint"> + <rdfs:label rdf:datatype="&xsd;string">reference point</rdfs:label> + <owl:disjointWith rdf:resource="&req;AbstractRequirement"/> + <owl:disjointWith rdf:resource="&req;AbstractSource"/> + <owl:disjointWith rdf:resource="&req;Author"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AbstractRequirement --> + + <owl:Class rdf:about="&req;AbstractRequirement"> + <rdfs:label rdf:datatype="&xsd;string">abstract requirement</rdfs:label> + <rdfs:label xml:lang="de">abstraktes Requirement</rdfs:label> + <owl:disjointWith rdf:resource="&req;AbstractSource"/> + <owl:disjointWith rdf:resource="&req;Author"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + <rdfs:comment rdf:datatype="&xsd;string">Es ist ungünstig, dass Requirement Subklasse von AbstractRequirement ist.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AbstractSource --> + + <owl:Class rdf:about="&req;AbstractSource"> + <rdfs:label rdf:datatype="&xsd;string">abstract source</rdfs:label> + <rdfs:subClassOf rdf:resource="&owl;Thing"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + <rdfs:comment rdf:datatype="&xsd;string"></rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/AllocatedRequirement --> + + <owl:Class rdf:about="&req;AllocatedRequirement"> + <rdfs:label>allocated requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/ApplicationPointer --> + + <owl:Class rdf:about="&req;ApplicationPointer"> + <rdfs:label>application pointer</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractReferencePoint"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/ApplicationState --> + + <owl:Class rdf:about="&req;ApplicationState"> + <rdfs:label>application state</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractReferencePoint"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Author --> + + <owl:Class rdf:about="&req;Author"> + <rdfs:label xml:lang="de">Autor</rdfs:label> + <rdfs:label xml:lang="en">author</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + <owl:disjointWith rdf:resource="&req;Vote"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Comment --> + + <owl:Class rdf:about="&req;Comment"> + <rdfs:label>comment</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractComment"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Creditor --> + + <owl:Class rdf:about="&req;Creditor"> + <rdfs:label>creditor</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Customer --> + + <owl:Class rdf:about="&req;Customer"> + <rdfs:label>customer</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + <owl:disjointWith rdf:resource="&req;Programmer"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/CustomerRequirement --> + + <owl:Class rdf:about="&req;CustomerRequirement"> + <rdfs:label>customer requirement</rdfs:label> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;Requirement"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;isCreatedBy"/> + <owl:someValuesFrom rdf:resource="&req;Customer"/> + </owl:Restriction> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + <rdfs:comment>A customer requirement is a requirement, which was created by a customer.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/DefinedKeyword --> + + <owl:Class rdf:about="&req;DefinedKeyword"> + <rdfs:label>defined keyword</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Keyword"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/DerivedRequirement --> + + <owl:Class rdf:about="&req;DerivedRequirement"> + <rdfs:label>derived requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/DesignRequirement --> + + <owl:Class rdf:about="&req;DesignRequirement"> + <rdfs:label>design requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Document --> + + <owl:Class rdf:about="&req;Document"> + <rdfs:label>document</rdfs:label> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;AbstractSource"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;leadsTo"/> + <owl:someValuesFrom rdf:resource="&req;AbstractRequirement"/> + </owl:Restriction> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <rdfs:subClassOf rdf:resource="&req;AbstractSource"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/FunctionalRequirement --> + + <owl:Class rdf:about="&req;FunctionalRequirement"> + <rdfs:label rdf:datatype="&xsd;string">functional requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + <owl:disjointWith rdf:resource="&req;QualityRequirement"/> + <rdfs:comment rdf:datatype="&xsd;string">refers to functional reference point, for instance components of the system</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Goal --> + + <owl:Class rdf:about="&req;Goal"> + <rdfs:label rdf:datatype="&xsd;string">goal</rdfs:label> + <rdfs:label xml:lang="de">Ziel</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractRequirement"/> + <rdfs:subClassOf> + <owl:Restriction> + <owl:onProperty rdf:resource="&dc;description"/> + <owl:cardinality rdf:datatype="&xsd;nonNegativeInteger">1</owl:cardinality> + </owl:Restriction> + </rdfs:subClassOf> + <owl:disjointWith rdf:resource="&req;Requirement"/> + <owl:disjointWith rdf:resource="&req;Scenario"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Government --> + + <owl:Class rdf:about="&req;Government"> + <rdfs:label>government</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Keyword --> + + <owl:Class rdf:about="&req;Keyword"> + <rdfs:label>keyword</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractReferencePoint"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/PerformanceRequirement --> + + <owl:Class rdf:about="&req;PerformanceRequirement"> + <rdfs:label>performance requirement</rdfs:label> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&req;Requirement"/> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;willLeadTo"/> + <owl:someValuesFrom rdf:resource="&req;SystemRequirement"/> + </owl:Restriction> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + <owl:disjointWith rdf:resource="&req;SystemRequirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/PriorityRating --> + + <owl:Class rdf:about="&req;PriorityRating"> + <rdfs:label>priority rating</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Rating"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Author only defines at most one rating about the priority for each requirement.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Programmer --> + + <owl:Class rdf:about="&req;Programmer"> + <rdfs:label>programmer</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/QualityRating --> + + <owl:Class rdf:about="&req;QualityRating"> + <rdfs:label>quality rating</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Rating"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Author only defines at most one rating about the quality for each requirement.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/QualityRequirement --> + + <owl:Class rdf:about="&req;QualityRequirement"> + <rdfs:label rdf:datatype="&xsd;string">quality requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Requirement"/> + <rdfs:comment rdf:datatype="&xsd;string">refers to quality reference point, e.g. reliability, performance, usability</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Rating --> + + <owl:Class rdf:about="&req;Rating"> + <rdfs:label>rating</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractComment"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Requirement --> + + <owl:Class rdf:about="&req;Requirement"> + <rdfs:label rdf:datatype="&xsd;string">requirement</rdfs:label> + <rdfs:label xml:lang="de">Anforderung(en)</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractRequirement"/> + <rdfs:subClassOf> + <owl:Restriction> + <owl:onProperty rdf:resource="&dc;description"/> + <owl:cardinality rdf:datatype="&xsd;nonNegativeInteger">1</owl:cardinality> + </owl:Restriction> + </rdfs:subClassOf> + <owl:disjointWith rdf:resource="&req;Scenario"/> + <rdfs:comment rdf:datatype="&xsd;string"></rdfs:comment> + <owl:versionInfo rdf:datatype="&xsd;string">TODO: semantische Verfeinerung geplant, d.h. Anforderungen nicht nur als Textstring, sondern z.B. als RDF-Triple formulieren</owl:versionInfo> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Scenario --> + + <owl:Class rdf:about="&req;Scenario"> + <rdfs:label rdf:datatype="&xsd;string">scenario</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractRequirement"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/SeniorManagementStaff --> + + <owl:Class rdf:about="&req;SeniorManagementStaff"> + <rdfs:label>senior management staff</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Stakeholder --> + + <owl:Class rdf:about="&req;Stakeholder"> + <rdfs:label>stakeholder</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractSource"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/SystemRequirement --> + + <owl:Class rdf:about="&req;SystemRequirement"> + <rdfs:label>system requirement</rdfs:label> + <rdfs:subClassOf rdf:resource="&owl;Thing"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/TextualScenario --> + + <owl:Class rdf:about="&req;TextualScenario"> + <rdfs:label rdf:datatype="&xsd;string">textual scenario</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Scenario"/> + <rdfs:subClassOf> + <owl:Restriction> + <owl:onProperty rdf:resource="&req;scenarioStep"/> + <owl:minCardinality rdf:datatype="&xsd;nonNegativeInteger">1</owl:minCardinality> + </owl:Restriction> + </rdfs:subClassOf> + <rdfs:comment xml:lang="de">Szenario, welches aus mehreren textuell beschriebenen Szenarioschritten besteht.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Topic --> + + <owl:Class rdf:about="&req;Topic"> + <rdfs:label>topic</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;DefinedKeyword"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Requirement refers to exact one topic.</rdfs:comment> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/TradeUnion --> + + <owl:Class rdf:about="&req;TradeUnion"> + <rdfs:label>trade union</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;Stakeholder"/> + </owl:Class> + + + + <!-- http://ns.softwiki.de/req/Vote --> + + <owl:Class rdf:about="&req;Vote"> + <rdfs:label rdf:datatype="&xsd;string">vote</rdfs:label> + <rdfs:subClassOf rdf:resource="&req;AbstractComment"/> + <rdfs:comment rdf:datatype="&xsd;string">Rule: Every Author only votes at most one requirement.</rdfs:comment> + </owl:Class> + + + + <!-- http://purl.org/dc/dcmitype/Image --> + + <owl:Class rdf:about="&dcmitype;Image"> + <rdfs:label>image</rdfs:label> + </owl:Class> + + + + <!-- http://www.w3.org/2000/01/rdf-schema#Resource --> + + <owl:Class rdf:about="&rdfs;Resource"> + <rdfs:label>resource</rdfs:label> + </owl:Class> + + + + <!-- http://www.w3.org/2001/XMLSchema#string --> + + <owl:Class rdf:about="&xsd;string"> + <rdfs:label rdf:datatype="&xsd;string">string</rdfs:label> + </owl:Class> + + + + <!-- http://www.w3.org/2002/07/owl#Datatype --> + + <owl:Class rdf:about="&owl;Datatype"/> + + + + <!-- http://www.w3.org/2002/07/owl#Thing --> + + <owl:Class rdf:about="&owl;Thing"/> + + + + <!-- http://www.w3.org/2004/02/skos/core#Concept --> + + <owl:Class rdf:about="&skos;Concept"> + <rdfs:label>concept</rdfs:label> + <rdfs:label xml:lang="de">Thema</rdfs:label> + </owl:Class> + + + + <!-- http://xmlns.com/foaf/0.1/Document --> + + <owl:Class rdf:about="&foaf2;Document"> + <rdfs:label>document</rdfs:label> + </owl:Class> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Individuals + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://ns.softwiki.de/req/1 --> + + <owl:Thing rdf:about="&req;1"> + <rdf:type rdf:resource="&req;QualityRating"/> + <rdf:type rdf:resource="&owl;NamedIndividual"/> ... [truncated message content] |
From: <jen...@us...> - 2013-10-22 12:07:38
|
Revision: 4124 http://sourceforge.net/p/dl-learner/code/4124 Author: jenslehmann Date: 2013-10-22 12:07:35 +0000 (Tue, 22 Oct 2013) Log Message: ----------- unit test for ISLE without external corpus Modified Paths: -------------- trunk/test/isle/swore/ontology.owl Added Paths: ----------- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestNoCorpus.java Added: trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestNoCorpus.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestNoCorpus.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/algorithms/isle/ISLETestNoCorpus.java 2013-10-22 12:07:35 UTC (rev 4124) @@ -0,0 +1,87 @@ +package org.dllearner.algorithms.isle; + +import java.io.File; +import java.util.Map; + +import org.dllearner.algorithms.isle.index.semantic.SemanticIndex; +import org.dllearner.algorithms.isle.index.semantic.simple.SimpleSemanticIndex; +import org.dllearner.algorithms.isle.index.syntactic.OWLOntologyLuceneSyntacticIndexCreator; +import org.dllearner.algorithms.isle.index.syntactic.SyntacticIndex; +import org.dllearner.algorithms.isle.metrics.PMIRelevanceMetric; +import org.dllearner.algorithms.isle.metrics.RelevanceMetric; +import org.dllearner.algorithms.isle.metrics.RelevanceUtils; +import org.dllearner.algorithms.isle.textretrieval.EntityTextRetriever; +import org.dllearner.algorithms.isle.textretrieval.RDFSLabelEntityTextRetriever; +import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.owl.Entity; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.kb.OWLAPIOntology; +import org.dllearner.learningproblems.ClassLearningProblem; +import org.dllearner.reasoning.FastInstanceChecker; +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; + +import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; + +public class ISLETestNoCorpus { + + private OWLOntologyManager manager; + private OWLOntology ontology; + private OWLDataFactory df = new OWLDataFactoryImpl(); + private EntityTextRetriever textRetriever; + private RelevanceMetric relevance; + private String searchField = "label"; + private SemanticIndex semanticIndex; + private SyntacticIndex syntacticIndex; + + // we assume that the ontology is named "ontology.owl" and that all text files + // are in a subdirectory called "corpus" + private String testFolder = "../test/isle/swore/"; +// NamedClass cls = new NamedClass("http://example.com/father#father"); + NamedClass cls = new NamedClass("http://ns.softwiki.de/req/CustomerRequirement"); + + public ISLETestNoCorpus() throws Exception{ + manager = OWLManager.createOWLOntologyManager(); + ontology = manager.loadOntologyFromOntologyDocument(new File(testFolder + "ontology.owl")); + textRetriever = new RDFSLabelEntityTextRetriever(ontology); + syntacticIndex = new OWLOntologyLuceneSyntacticIndexCreator(ontology, df.getRDFSLabel(), searchField).buildIndex(); + + + } + + // uses the rdfs:label, rdfs:comment (or other properties) of the class directly instead of an external corpus + @Test + public void testISLENoCorpus() throws Exception { + KnowledgeSource ks = new OWLAPIOntology(ontology); + AbstractReasonerComponent reasoner = new FastInstanceChecker(ks); + reasoner.init(); + + ClassLearningProblem lp = new ClassLearningProblem(reasoner); + lp.setClassToDescribe(cls); + lp.init(); + + semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); + semanticIndex.buildIndex(df.getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_COMMENT.getIRI()), "en"); + +// semanticIndex = new SimpleSemanticIndex(ontology, syntacticIndex); +// semanticIndex.buildIndex(createDocuments()); + + relevance = new PMIRelevanceMetric(semanticIndex); + + Map<Entity, Double> entityRelevance = RelevanceUtils.getRelevantEntities(cls, ontology, relevance); + NLPHeuristic heuristic = new NLPHeuristic(entityRelevance); + + ISLE isle = new ISLE(lp, reasoner); + isle.setHeuristic(heuristic); + isle.init(); + + isle.start(); + } + + +} Modified: trunk/test/isle/swore/ontology.owl =================================================================== --- trunk/test/isle/swore/ontology.owl 2013-10-15 14:46:35 UTC (rev 4123) +++ trunk/test/isle/swore/ontology.owl 2013-10-22 12:07:35 UTC (rev 4124) @@ -669,7 +669,7 @@ <!-- http://ns.softwiki.de/req/CustomerRequirement --> <owl:Class rdf:about="&req;CustomerRequirement"> - <rdfs:label>customer requirement</rdfs:label> + <rdfs:label>customer requirement (a requirement created by a customer)</rdfs:label> <owl:equivalentClass> <owl:Class> <owl:intersectionOf rdf:parseType="Collection"> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |