remove rdf - not needed for runtime work
This commit is contained in:
parent
dc8708b8a8
commit
4657c4a810
|
@ -1,87 +0,0 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.hl7.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.core</artifactId>
|
||||
<version>3.7.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>org.hl7.fhir.rdf</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>bintray-labra</id>
|
||||
<url>https://dl.bintray.com/labra/maven/</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- HAPI Dependencies -->
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-base</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hl7.fhir</groupId>
|
||||
<artifactId>org.hl7.fhir.utilities</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JENA -->
|
||||
<dependency>
|
||||
<groupId>org.apache.jena</groupId>
|
||||
<artifactId>jena-core</artifactId>
|
||||
<version>3.10.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.jena</groupId>
|
||||
<artifactId>jena-arq</artifactId>
|
||||
<version>3.10.0</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.github.jsonld-java</groupId>
|
||||
<artifactId>jsonld-java</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!--
|
||||
<dependency>
|
||||
<groupId>com.github.jsonld-java</groupId>
|
||||
<artifactId>jsonld-java</artifactId>
|
||||
<version>0.12.3</version>
|
||||
<type>bundle</type>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
-->
|
||||
|
||||
<!-- Shaclex -->
|
||||
<dependency><groupId>org.scala-lang</groupId><artifactId>scala-compiler</artifactId><version>2.12.1</version></dependency>
|
||||
<dependency><groupId>org.scala-lang</groupId><artifactId>scala-library</artifactId><version>2.12.1</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>schema_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>shacl_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>shex_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>manifest_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>srdfjena_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>srdf_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>utils_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>converter_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>rbe_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>typing_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>validating_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
<dependency><groupId>es.weso</groupId><artifactId>server_2.12</artifactId><version>0.0.60</version></dependency>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</dependencies>
|
||||
|
||||
</project>
|
|
@ -1,102 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.jena.datatypes.xsd.XSDDatatype;
|
||||
import org.apache.jena.rdf.model.Model;
|
||||
import org.apache.jena.rdf.model.Property;
|
||||
import org.apache.jena.rdf.model.Resource;
|
||||
import org.apache.jena.rdf.model.ResourceFactory;
|
||||
import org.apache.jena.vocabulary.DC;
|
||||
import org.apache.jena.vocabulary.RDF;
|
||||
import org.apache.jena.vocabulary.RDFS;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
|
||||
|
||||
/**
|
||||
* FHIR wrapper class for RDF Resource
|
||||
*/
|
||||
public class FHIRResource {
|
||||
public Resource resource;
|
||||
|
||||
/**
|
||||
* Construct a named resource in the FHIR namespace
|
||||
* @param model RDF model that contains the resource
|
||||
* @param name Resource name
|
||||
* @param label Resource label
|
||||
*/
|
||||
FHIRResource(Model model, String name, String label) {
|
||||
resource = model.createResource(RDFNamespace.FHIR.uriFor(name));
|
||||
addLabel(label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an anonymous resource (aka. BNode)
|
||||
*/
|
||||
FHIRResource(Model model) {
|
||||
resource = model.createResource();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a list resource
|
||||
* @param members Members of the list
|
||||
*/
|
||||
FHIRResource(Model model, List<Resource> members) {
|
||||
resource = model.createList(members.iterator());
|
||||
}
|
||||
|
||||
public FHIRResource addObjectProperty(Property p, Resource o) {
|
||||
resource.addProperty(p, o);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FHIRResource addObjectProperty(Property p, FHIRResource o) {
|
||||
return addObjectProperty(p, o.resource);
|
||||
}
|
||||
|
||||
public FHIRResource addDataProperty(Property p, String o) {
|
||||
resource.addLiteral(p, o);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FHIRResource addDataProperty(Property p, String o, XSDDatatype dt) {
|
||||
resource.addLiteral(p, ResourceFactory.createTypedLiteral(o, dt));
|
||||
return this;
|
||||
}
|
||||
|
||||
public FHIRResource addType(Resource type) {
|
||||
return addObjectProperty(RDF.type, type);
|
||||
}
|
||||
|
||||
private FHIRResource addLabel(String name) {
|
||||
return addDataProperty(RDFS.label, name);
|
||||
}
|
||||
|
||||
public FHIRResource addTitle(String title) {
|
||||
if (!Utilities.noString(title))
|
||||
addDataProperty(DC.title, title);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FHIRResource addDefinition(String definition) {
|
||||
if (!Utilities.noString(definition) && (!resource.hasProperty(DC.title) ||
|
||||
!resource.getProperty(DC.title).toString().equals(definition))) {
|
||||
addDataProperty(RDFS.comment, definition);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public FHIRResource domain(FHIRResource d) {
|
||||
return addObjectProperty(RDFS.domain, d);
|
||||
}
|
||||
|
||||
public FHIRResource range(Resource r) {
|
||||
return addObjectProperty(RDFS.range, r);
|
||||
}
|
||||
|
||||
public FHIRResource restriction(Resource restriction) {
|
||||
return addObjectProperty(RDFS.subClassOf, restriction);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,261 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
import java.io.OutputStream;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.jena.datatypes.xsd.XSDDatatype;
|
||||
import org.apache.jena.rdf.model.Model;
|
||||
import org.apache.jena.rdf.model.ModelFactory;
|
||||
import org.apache.jena.rdf.model.Resource;
|
||||
import org.apache.jena.riot.RDFDataMgr;
|
||||
import org.apache.jena.riot.RDFFormat;
|
||||
import org.apache.jena.vocabulary.OWL2;
|
||||
import org.apache.jena.vocabulary.RDF;
|
||||
import org.apache.jena.vocabulary.RDFS;
|
||||
|
||||
|
||||
public class FHIRResourceFactory {
|
||||
private Model model;
|
||||
|
||||
public FHIRResourceFactory() {
|
||||
model = ModelFactory.createDefaultModel();
|
||||
RDFNamespace.addFHIRNamespaces(model);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize the namespace instance in RDF Turtle
|
||||
* @param writer
|
||||
*/
|
||||
public void serialize(OutputStream writer) {
|
||||
RDFDataMgr.write(writer, model, RDFFormat.TURTLE_PRETTY);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add a new datatype to the model
|
||||
* @param d resource to declare as a datatype
|
||||
* @return Resource in context of model
|
||||
*/
|
||||
public Resource add_datatype(Resource d) {
|
||||
return model.createResource(d.getURI()).addProperty(RDF.type, RDFS.Datatype);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge RDF from a different model
|
||||
*/
|
||||
public void merge_rdf(Model m) {
|
||||
model.add(m);
|
||||
}
|
||||
|
||||
/* =========================================================
|
||||
* FHIRResource factory methods
|
||||
* ========================================================= */
|
||||
|
||||
/**
|
||||
* Create a Resource in the FHIR namespace
|
||||
* @param name resource name
|
||||
* @param type resource type
|
||||
* @param label resource label
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_resource(String name, Resource type, String label) {
|
||||
return new FHIRResource(model, name, label).addType(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an OWL Ontology in the FHIR namespace
|
||||
*
|
||||
* @param name Ontology name
|
||||
* @param label Ontology label
|
||||
* @return Ontology resource
|
||||
*/
|
||||
public FHIRResource fhir_ontology(String name, String label) {
|
||||
return fhir_resource(name, OWL2.Ontology, label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an anonymous resource in the FHIR namespace
|
||||
*/
|
||||
public FHIRResource fhir_bnode() {
|
||||
return new FHIRResource(model);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Class in the FHIR namespace
|
||||
* @param name class name and label
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_class(String name) {
|
||||
return fhir_resource(name, OWL2.Class, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Class in the FHIR namespace
|
||||
* @param name class name and label
|
||||
* @param superClass superclass name
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_class(String name, Resource superClass) {
|
||||
return fhir_class(name).addObjectProperty(RDFS.subClassOf, superClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Class in the FHIR namespace
|
||||
* @param name class name and label
|
||||
* @param superClass superclass
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_class(String name, String superClass) {
|
||||
return fhir_class(name, RDFNamespace.FHIR.resourceRef(superClass));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a new ObjectProperty in the FHIR namespace
|
||||
*
|
||||
* @param name property name and label
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_objectProperty(String name) {
|
||||
return fhir_resource(name, OWL2.ObjectProperty, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new ObjectProperty in the FHIR namespace
|
||||
*
|
||||
* @param name property name and label
|
||||
* @param superProperty parent property
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_objectProperty(String name, Resource superProperty) {
|
||||
return fhir_objectProperty(name).addObjectProperty(RDFS.subPropertyOf, superProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new ObjectProperty in the FHIR namespace
|
||||
*
|
||||
* @param name property name and label
|
||||
* @param superProperty parent property name
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_objectProperty(String name, String superProperty) {
|
||||
return fhir_objectProperty(name, RDFNamespace.FHIR.resourceRef(superProperty));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new DataProperty in the FHIR namespace
|
||||
* @param name property name and label
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_dataProperty(String name) {
|
||||
return fhir_resource(name, OWL2.DatatypeProperty, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new DataProperty in the FHIR namespace
|
||||
* @param name property name and label
|
||||
* @param superProperty parent property
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_dataProperty(String name, Resource superProperty) {
|
||||
return fhir_dataProperty(name).addObjectProperty(RDFS.subPropertyOf, superProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new DataProperty in the FHir namespace
|
||||
* @param name property name and label
|
||||
* @param superProperty parent property name
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_dataProperty(String name, String superProperty) {
|
||||
return fhir_dataProperty(name, RDFNamespace.FHIR.resourceRef(superProperty));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new OWL Restriction
|
||||
* @param onProperty
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_restriction(Resource onProperty) {
|
||||
return fhir_bnode()
|
||||
.addType(OWL2.Restriction)
|
||||
.addObjectProperty(OWL2.onProperty, onProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new OWL restriction with the appropriate cardinality
|
||||
* @param onProperty property to apply the restriction to
|
||||
* @param from only/some target
|
||||
* @param min min cardinality
|
||||
* @param max max cardinality
|
||||
* @return restriction resource
|
||||
*/
|
||||
public Resource fhir_cardinality_restriction(Resource onProperty, Resource from, int min, int max) {
|
||||
FHIRResource rval = fhir_restriction(onProperty)
|
||||
.addObjectProperty(min > 0? OWL2.someValuesFrom : OWL2.allValuesFrom, from);
|
||||
if(min == max)
|
||||
rval.addDataProperty(OWL2.cardinality, Integer.toBinaryString(min), XSDDatatype.XSDinteger);
|
||||
else {
|
||||
if (min > 1)
|
||||
rval.addDataProperty(OWL2.minCardinality, Integer.toBinaryString(min), XSDDatatype.XSDinteger);
|
||||
if (max < Integer.MAX_VALUE)
|
||||
rval.addDataProperty(OWL2.maxCardinality, Integer.toBinaryString(max), XSDDatatype.XSDinteger);
|
||||
}
|
||||
return rval.resource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a generic restriction
|
||||
* @param onProperty
|
||||
* @param from
|
||||
* @return
|
||||
*/
|
||||
public Resource fhir_restriction(Resource onProperty, Resource from) {
|
||||
return fhir_cardinality_restriction(onProperty, from, 0, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return a union of the supplied members
|
||||
* @param members
|
||||
* @return Resource representing union
|
||||
*/
|
||||
public Resource fhir_union(List<Resource> members) {
|
||||
return fhir_bnode()
|
||||
.addObjectProperty(OWL2.unionOf, new FHIRResource(model, members))
|
||||
.resource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a simple datatype restriction
|
||||
* @param dataType data type to be restricted
|
||||
* @return
|
||||
*/
|
||||
public FHIRResource fhir_datatype(Resource dataType) {
|
||||
return fhir_bnode()
|
||||
.addType(RDFS.Datatype)
|
||||
.addObjectProperty(OWL2.onDatatype, dataType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a datatype restriction
|
||||
* @param dataType data type to be restricted
|
||||
* @param facets List of facets
|
||||
* @return
|
||||
*/
|
||||
public Resource fhir_datatype_restriction(Resource dataType, List<Resource> facets) {
|
||||
return fhir_datatype(dataType)
|
||||
.addObjectProperty(OWL2.withRestrictions, new FHIRResource(model, facets))
|
||||
.resource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a pattern BNode
|
||||
* @param pattern string pattern
|
||||
* @return
|
||||
*/
|
||||
public Resource fhir_pattern(String pattern) {
|
||||
return fhir_bnode()
|
||||
.addDataProperty(RDFNamespace.XSDpattern, pattern).resource;
|
||||
}
|
||||
}
|
|
@ -1,284 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.jena.graph.Node_Blank;
|
||||
import org.apache.jena.graph.Node_Literal;
|
||||
import org.apache.jena.graph.Node_URI;
|
||||
import org.apache.jena.graph.Triple;
|
||||
import org.apache.jena.rdf.model.Model;
|
||||
import org.apache.jena.rdf.model.Statement;
|
||||
import org.apache.jena.rdf.model.StmtIterator;
|
||||
|
||||
public class ModelComparer {
|
||||
|
||||
public enum TripleType {
|
||||
BNODE,
|
||||
URI, // o1.getClass() == Node_URI.class
|
||||
STRING,
|
||||
BOOLEAN,
|
||||
INTEGER,
|
||||
BASE64BINARY,
|
||||
DATE,
|
||||
DATETIME,
|
||||
GYEAR,
|
||||
GYEARMONTH,
|
||||
DECIMAL;
|
||||
}
|
||||
|
||||
|
||||
public class TypedTriple {
|
||||
public String object;
|
||||
private String subject;
|
||||
private String predicate;
|
||||
private TripleType type;
|
||||
|
||||
public TypedTriple(Triple trip) {
|
||||
subject = trip.getSubject().toString();
|
||||
predicate = trip.getPredicate().toString();
|
||||
if (trip.getObject().getClass() == Node_URI.class) {
|
||||
type = TripleType.URI;
|
||||
object = trip.getObject().toString();
|
||||
} else if (trip.getObject().getClass() == Node_Blank.class) {
|
||||
type = TripleType.BNODE;
|
||||
object = trip.getObject().toString();
|
||||
} else if (trip.getObject().getClass() == Node_Literal.class) {
|
||||
if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#decimal")) {
|
||||
type = TripleType.DECIMAL;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#string")) {
|
||||
type = TripleType.STRING;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#date")) {
|
||||
type = TripleType.DATE;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#dateTime")) {
|
||||
type = TripleType.DATETIME;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#gYear")) {
|
||||
type = TripleType.GYEAR;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#gYearMonth")) {
|
||||
type = TripleType.GYEARMONTH;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#integer")) {
|
||||
type = TripleType.INTEGER;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#boolean")) {
|
||||
type = TripleType.BOOLEAN;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else if (trip.getObject().getLiteralDatatypeURI().equals("http://www.w3.org/2001/XMLSchema#base64Binary")) {
|
||||
type = TripleType.BASE64BINARY;
|
||||
object = trip.getObject().getLiteralLexicalForm();
|
||||
} else {
|
||||
throw new Error("not done yet ("+trip.getObject().getLiteralDatatypeURI()+")");
|
||||
}
|
||||
} else {
|
||||
throw new Error("not done yet ("+trip.getObject().getClass().getName()+")");
|
||||
}
|
||||
}
|
||||
|
||||
public String getSubject() {
|
||||
return subject;
|
||||
}
|
||||
|
||||
public String getObject() {
|
||||
return object;
|
||||
}
|
||||
|
||||
public String getPredicate() {
|
||||
return predicate;
|
||||
}
|
||||
|
||||
public TripleType getType() {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
private Model model1;
|
||||
private Model model2;
|
||||
private String name1;
|
||||
private String name2;
|
||||
List<TypedTriple> tl1;
|
||||
List<TypedTriple> tl2;
|
||||
|
||||
public ModelComparer setModel1(Model model, String name) throws IOException {
|
||||
model1 = model;
|
||||
name1 = name;
|
||||
tl1 = listAllTriples(model1);
|
||||
log(model, "c:\\temp\\triples-"+name+".txt");
|
||||
return this;
|
||||
}
|
||||
|
||||
private void log(Model model, String filename) throws IOException {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append(filename);
|
||||
b.append("\r\n");
|
||||
for ( final StmtIterator res = model.listStatements(); res.hasNext(); ) {
|
||||
final Statement r = res.next();
|
||||
Triple t = r.asTriple();
|
||||
b.append(t.getSubject().toString());
|
||||
b.append("\t");
|
||||
b.append(t.getPredicate().toString());
|
||||
b.append("\t");
|
||||
b.append(t.getObject().toString());
|
||||
b.append("\r\n");
|
||||
}
|
||||
// TextFile.stringToFile(b.toString(), filename);
|
||||
// System.out.println(b.toString());
|
||||
}
|
||||
|
||||
public ModelComparer setModel2(Model model, String name) throws IOException {
|
||||
model2 = model;
|
||||
name2 = name;
|
||||
tl2 = listAllTriples(model2);
|
||||
log(model, "c:\\temp\\triples-"+name+".txt");
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<String> compare() {
|
||||
Set<String> ep1 = listEntryPoints(tl1);
|
||||
Set<String> ep2 = listEntryPoints(tl2);
|
||||
List<String> diffs = new ArrayList<String>();
|
||||
if (ep1.size() != ep2.size())
|
||||
diffs.add("Entry point counts differ");
|
||||
if (ep1.size() != 1)
|
||||
diffs.add("Entry point count != 1");
|
||||
String ep = ep1.iterator().next();
|
||||
compare(diffs, ep, ep, ep);
|
||||
return diffs;
|
||||
}
|
||||
|
||||
|
||||
private void compare(List<String> diffs, String url1, String url2, String statedPath) {
|
||||
List<TypedTriple> pl1 = listAllProperties(tl1, url1);
|
||||
List<TypedTriple> pl2 = listAllProperties(tl2, url2);
|
||||
Set<String> handled = new HashSet<String>();
|
||||
for (TypedTriple t : pl1) {
|
||||
String pred = t.getPredicate();
|
||||
if (!handled.contains(pred)) {
|
||||
comparePredicate(diffs, statedPath, pred, pl1, pl2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void comparePredicate(List<String> diffs, String statedPath, String pred, List<TypedTriple> pl1, List<TypedTriple> pl2) {
|
||||
List<TypedTriple> ml1 = listMatchingProperties(pl1, pred);
|
||||
List<TypedTriple> ml2 = listMatchingProperties(pl2, pred);
|
||||
if (ml1.size() != ml2.size()) {
|
||||
if (!isExpectedDifference(statedPath, pred, ml1.size(), ml2.size()))
|
||||
diffs.add("Difference at "+statedPath+" for "+pred+": "+name1+" has "+Integer.toString(ml1.size())+" values, but "+name2+" has "+Integer.toString(ml2.size())+" values");
|
||||
} else if (ml1.size() == 1) {
|
||||
compareObjects(diffs, statedPath, pred, ml1.get(0), ml2.get(0));
|
||||
} else for (int i = 0; i < ml1.size(); i++) {
|
||||
String id = pred+"["+Integer.toString(i)+"]";
|
||||
TypedTriple o1 = getByIndex(ml1, tl1, i, statedPath, id);
|
||||
if (o1 == null)
|
||||
diffs.add("Unable to find "+statedPath+" / "+id+" in "+name1);
|
||||
else {
|
||||
TypedTriple o2 = getByIndex(ml2, tl2, i, statedPath, id);
|
||||
if (o2 == null)
|
||||
diffs.add("Unable to find "+statedPath+" / "+id+" in "+name2);
|
||||
else
|
||||
compareObjects(diffs, statedPath, id, o1, o2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void compareObjects(List<String> diffs, String statedPath, String pred, TypedTriple o1, TypedTriple o2) {
|
||||
if (o1.getType() == TripleType.BNODE || o2.getType() == TripleType.BNODE ) {
|
||||
// bnodes: follow the nodes
|
||||
compare(diffs, o1.toString(), o2.toString(), statedPath+" / "+pred);
|
||||
} else if (o1.getType() == TripleType.URI && o2.getType() == TripleType.URI) {
|
||||
// if either is a url, just compare literal values
|
||||
String u1 = o1.getObject();
|
||||
String u2 = o2.getObject();
|
||||
if (u1.startsWith("\"") && u1.endsWith("\""))
|
||||
u1 = u1.substring(1, u1.length()-1);
|
||||
if (u2.startsWith("\"") && u2.endsWith("\""))
|
||||
u2 = u2.substring(1, u2.length()-1);
|
||||
if (!u1.equals(u2))
|
||||
diffs.add("Difference at "+statedPath+" for "+pred+": URL objects have different values: "+name1+" = "+u1+", "+name2+" = "+u2+"");
|
||||
} else if (o1.getType() == o2.getType()) {
|
||||
if (o1.getType().equals(TripleType.DECIMAL)) {
|
||||
BigDecimal d1 = new BigDecimal(o1.getObject());
|
||||
BigDecimal d2 = new BigDecimal(o2.getObject());
|
||||
if (d1.compareTo(d2) != 0)
|
||||
diffs.add("Difference at "+statedPath+" for "+pred+": Literal decimal objects have different values: "+name1+" = "+o1.getObject()+", "+name2+" = "+o2.getObject()+"");
|
||||
} else if (!o1.getObject().equals(o2.getObject())) {
|
||||
diffs.add("Difference at "+statedPath+" for "+pred+": Literal objects have different values: "+name1+" = "+o1.getObject()+", "+name2+" = "+o2.getObject()+"");
|
||||
}
|
||||
} else if (o1.getClass() != o2.getClass())
|
||||
diffs.add("Difference at "+statedPath+" for "+pred+": Literal objects have different types: "+name1+" = "+o1.getType().toString()+", "+name2+" = "+o2.getType().toString()+"");
|
||||
}
|
||||
|
||||
private TypedTriple getByIndex(List<TypedTriple> matches, List<TypedTriple> all, int index, String statedPath, String id) {
|
||||
for (TypedTriple t : matches) {
|
||||
for (TypedTriple s : all) {
|
||||
if (s.getSubject().equals(t.getObject()) &&
|
||||
s.getPredicate().equals("http://hl7.org/fhir/index") &&
|
||||
s.getObject().equals(Integer.toString(index))) {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private boolean isExpectedDifference(String statedPath, String pred, int c1, int c2) {
|
||||
// if (pred.equals("http://hl7.org/fhir/nodeRole") && c1 == 1 && c2 == 0)
|
||||
// return true;
|
||||
// if (pred.equals("http://hl7.org/fhir/index") && c1 == 1 && c2 == 0)
|
||||
// return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
private List<TypedTriple> listMatchingProperties(List<TypedTriple> list, String pred) {
|
||||
List<TypedTriple> props = new ArrayList<TypedTriple>();
|
||||
for (TypedTriple t : list) {
|
||||
if (t.getPredicate().equals(pred))
|
||||
props.add(t);
|
||||
}
|
||||
return props;
|
||||
}
|
||||
|
||||
private List<TypedTriple> listAllProperties(List<TypedTriple> list, String subject) {
|
||||
List<TypedTriple> props = new ArrayList<TypedTriple>();
|
||||
for (TypedTriple t : list) {
|
||||
if (t.getSubject().toString().equals(subject))
|
||||
props.add(t);
|
||||
}
|
||||
return props;
|
||||
}
|
||||
|
||||
private Set<String> listEntryPoints(List<TypedTriple> list) {
|
||||
Set<String> ep1 = new HashSet<String>();
|
||||
for (TypedTriple t : list) {
|
||||
boolean found = false;
|
||||
for (TypedTriple s : list) {
|
||||
if (t.getSubject().equals(s.getObject())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found)
|
||||
ep1.add(t.getSubject());
|
||||
};
|
||||
return ep1;
|
||||
}
|
||||
|
||||
private List<TypedTriple> listAllTriples(Model m1) {
|
||||
List<TypedTriple> tl1 = new ArrayList<TypedTriple>();
|
||||
for ( final StmtIterator res = m1.listStatements(); res.hasNext(); ) {
|
||||
final Statement r = res.next();
|
||||
tl1.add(new TypedTriple(r.asTriple()));
|
||||
}
|
||||
return tl1;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,96 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
|
||||
import org.apache.jena.rdf.model.Model;
|
||||
import org.apache.jena.rdf.model.Property;
|
||||
import org.apache.jena.rdf.model.Resource;
|
||||
import org.apache.jena.rdf.model.ResourceFactory;
|
||||
import org.apache.jena.vocabulary.DCTerms;
|
||||
import org.apache.jena.vocabulary.DC_11;
|
||||
import org.apache.jena.vocabulary.OWL2;
|
||||
import org.apache.jena.vocabulary.RDF;
|
||||
import org.apache.jena.vocabulary.RDFS;
|
||||
import org.apache.jena.vocabulary.XSD;
|
||||
|
||||
|
||||
/**
|
||||
* An RDF Namespace
|
||||
*/
|
||||
public class RDFNamespace {
|
||||
|
||||
private String prefix;
|
||||
private String URI;
|
||||
|
||||
public RDFNamespace(String prefix, String uri) {
|
||||
this.prefix = prefix;
|
||||
this.URI = uri;
|
||||
}
|
||||
|
||||
public String getPrefix() {
|
||||
return prefix;
|
||||
}
|
||||
|
||||
public String getURI() {
|
||||
return URI;
|
||||
}
|
||||
|
||||
public void addNsPrefix(Model model) {
|
||||
model.setNsPrefix(prefix, URI);
|
||||
}
|
||||
|
||||
public String uriFor(String name) {
|
||||
return URI + name;
|
||||
}
|
||||
|
||||
public Resource resourceRef(String name) {
|
||||
return ResourceFactory.createResource(uriFor(name));
|
||||
}
|
||||
|
||||
public Property property(String name) {
|
||||
return ResourceFactory.createProperty(uriFor(name));
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* FHIR specific namespaces
|
||||
*/
|
||||
public static final RDFNamespace FHIR = new RDFNamespace("fhir", "http://hl7.org/fhir/");
|
||||
public static final RDFNamespace FHIR_VS = new RDFNamespace("fhir-vs", "http://hl7.org/fhir/ValueSet/");
|
||||
public static final RDFNamespace EX = new RDFNamespace("ex", "http://hl7.org/fhir/StructureDefinition/");
|
||||
public static final RDFNamespace RIM = new RDFNamespace("rim", "http://hl7.org/owl/rim/");
|
||||
public static final RDFNamespace CS = new RDFNamespace("cs", "http://hl7.org/orim/codesystem/");
|
||||
public static final RDFNamespace VS = new RDFNamespace("vs", "http://hl7.org/orim/valueset/");
|
||||
public static final RDFNamespace DT = new RDFNamespace("dt", "http://hl7.org/orim/datatype/");
|
||||
public static final RDFNamespace LOINC = new RDFNamespace("loinc", "http://loinc.org/rdf#");
|
||||
public static final RDFNamespace W5 = new RDFNamespace("w5", "http://hl7.org/fhir/w5#");
|
||||
|
||||
// For some reason these aren't included in the XSD and RDF namespaces -- do we need to update Jena library?
|
||||
public static final Property XSDpattern;
|
||||
public static final Resource RDFXMLLiteral;
|
||||
static {
|
||||
XSDpattern = ResourceFactory.createProperty(XSD.getURI() + "pattern");
|
||||
RDFXMLLiteral = ResourceFactory.createResource(RDF.getURI() + "XMLLiteral");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the FHIR namespaces to the RDF model
|
||||
* @param model model to add namespaces to
|
||||
*/
|
||||
public static void addFHIRNamespaces(Model model) {
|
||||
model.setNsPrefix("rdf", RDF.getURI());
|
||||
model.setNsPrefix("rdfs", RDFS.getURI());
|
||||
FHIR.addNsPrefix(model);
|
||||
W5.addNsPrefix(model);
|
||||
FHIR_VS.addNsPrefix(model);
|
||||
EX.addNsPrefix(model);
|
||||
model.setNsPrefix("xsd", XSD.getURI());
|
||||
model.setNsPrefix("owl", OWL2.getURI());
|
||||
model.setNsPrefix("dc", DC_11.getURI());
|
||||
model.setNsPrefix("dcterms", DCTerms.getURI());
|
||||
RIM.addNsPrefix(model);
|
||||
CS.addNsPrefix(model);
|
||||
VS.addNsPrefix(model);
|
||||
DT.addNsPrefix(model);
|
||||
LOINC.addNsPrefix(model);
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.jena.rdf.model.Model;
|
||||
import org.apache.jena.riot.RDFDataMgr;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
|
||||
public class RDFTests {
|
||||
private static final String prefixes =
|
||||
"PREFIX dc: <http://purl.org/dc/elements/1.1/> \r\n"+
|
||||
"PREFIX dcterms: <http://purl.org/dc/terms/> \r\n"+
|
||||
"PREFIX owl: <http://www.w3.org/2002/07/owl#> \r\n"+
|
||||
"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \r\n"+
|
||||
"PREFIX rdfs: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \r\n"+
|
||||
"PREFIX rim: <http://hl7.org/orim/class/> \r\n"+
|
||||
"PREFIX dt: <http://hl7.org/orim/datatype/> \r\n"+
|
||||
"PREFIX vs: <http://hl7.org/orim/valueset/> \r\n"+
|
||||
"PREFIX cs: <http://hl7.org/orim/codesystem/> \r\n"+
|
||||
"PREFIX xs: <http://www.w3.org/2001/XMLSchema/> \r\n"+
|
||||
"PREFIX fhir: <http://hl7.org/fhir/> \r\n"+
|
||||
"PREFIX os: <http://open-services.net/ns/core#> \r\n";
|
||||
|
||||
Model model;
|
||||
|
||||
public static void main(String[] args) {
|
||||
try {
|
||||
|
||||
RDFTests tests = new RDFTests();
|
||||
tests.load("C:\\work\\org.hl7.fhir\\build\\publish");
|
||||
tests.execute();
|
||||
System.out.println("Completed OK");
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void load(String path) throws IOException {
|
||||
Model rim = RDFDataMgr.loadModel(Utilities.path(path, "rim.ttl")) ;
|
||||
Model fhir = RDFDataMgr.loadModel(Utilities.path(path, "fhir.ttl")) ;
|
||||
model = rim.union(fhir);
|
||||
}
|
||||
|
||||
private void execute() {
|
||||
// assertion("All domain resources have a w5 mapping", "SELECT ?x WHERE { ?x rdfs:subClassOf fhir:DomainResource. FILTER NOT EXISTS { ?x fhir:w5 ?anything }}", false);
|
||||
}
|
||||
|
||||
// private Asser assertion(String desc, String sparql, boolean wantTrue) {
|
||||
// Query query = QueryFactory.create(prefixes+sparql);
|
||||
//
|
||||
// // Execute the query and obtain results
|
||||
// QueryExecution qe = QueryExecutionFactory.create(query, model);
|
||||
// ResultSet results = qe.execSelect();
|
||||
// boolean res = results.hasNext() == wantTrue;
|
||||
//
|
||||
// if (!res) {
|
||||
// System.out.println("Sparql Assertion "+desc+" failed: ");
|
||||
// // Output query results
|
||||
// ResultSetFormatter.out(System.out, results, query);
|
||||
// System.out.println("Sparql: "+sparql);
|
||||
// }
|
||||
//
|
||||
// // Important - free up resources used running the query
|
||||
// qe.close();
|
||||
// return res;
|
||||
// }
|
||||
|
||||
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.jena.rdf.model.Resource;
|
||||
import org.apache.jena.vocabulary.XSD;
|
||||
|
||||
|
||||
public class RDFTypeMap {
|
||||
/**
|
||||
* FHIR to XSD data type map.
|
||||
*/
|
||||
static public final Map<String, Resource> ptMap = new HashMap<String, Resource>();
|
||||
static public final Map<Resource, Resource> owlTypeMap = new HashMap<Resource, Resource>();
|
||||
|
||||
static {
|
||||
ptMap.put("base64Binary", XSD.base64Binary);
|
||||
ptMap.put("boolean", XSD.xboolean);
|
||||
ptMap.put("code", XSD.xstring);
|
||||
ptMap.put("date", XSD.date);
|
||||
ptMap.put("dateTime", XSD.dateTime);
|
||||
ptMap.put("gYear", XSD.gYear);
|
||||
ptMap.put("gYearMonth", XSD.gYearMonth);
|
||||
ptMap.put("decimal", XSD.decimal);
|
||||
ptMap.put("instant", XSD.dateTime);
|
||||
ptMap.put("id", XSD.xstring);
|
||||
ptMap.put("int", XSD.integer);
|
||||
ptMap.put("integer", XSD.integer);
|
||||
ptMap.put("markdown", XSD.xstring);
|
||||
ptMap.put("oid", XSD.anyURI);
|
||||
ptMap.put("positiveInt", XSD.positiveInteger);
|
||||
ptMap.put("string", XSD.xstring);
|
||||
ptMap.put("time", XSD.time);
|
||||
ptMap.put("unsignedInt", XSD.nonNegativeInteger);
|
||||
ptMap.put("uri", XSD.anyURI);
|
||||
ptMap.put("url", XSD.anyURI);
|
||||
ptMap.put("canonical", XSD.anyURI);
|
||||
ptMap.put("uuid", XSD.anyURI);
|
||||
ptMap.put("anyURI", XSD.anyURI);
|
||||
ptMap.put("token", RDFNamespace.FHIR.resourceRef("token"));
|
||||
ptMap.put("nonNegativeInteger", XSD.nonNegativeInteger);
|
||||
ptMap.put("positiveInteger", XSD.positiveInteger);
|
||||
|
||||
owlTypeMap.put(XSD.gYear, XSD.dateTime);
|
||||
owlTypeMap.put(XSD.gYearMonth, XSD.dateTime);
|
||||
owlTypeMap.put(XSD.date, XSD.dateTime);
|
||||
owlTypeMap.put(XSD.time, XSD.xstring);
|
||||
owlTypeMap.put(RDFNamespace.FHIR.resourceRef("xhtml"), XSD.xstring);
|
||||
}
|
||||
|
||||
public static Resource xsd_type_for(String type, boolean owl_types_required) {
|
||||
// TODO: find why namespaces are part of some of these types...
|
||||
String key = type.startsWith("xs:")? type.substring(3) : type;
|
||||
Resource rval = ptMap.containsKey(key) ? ptMap.get(key) : RDFNamespace.FHIR.resourceRef(key);
|
||||
if(owl_types_required && owlTypeMap.containsKey(rval))
|
||||
rval = owlTypeMap.get(rval);
|
||||
return rval;
|
||||
}
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.jena.query.Query;
|
||||
import org.apache.jena.query.QueryExecution;
|
||||
import org.apache.jena.query.QueryExecutionFactory;
|
||||
import org.apache.jena.query.QueryFactory;
|
||||
import org.apache.jena.query.QuerySolution;
|
||||
import org.apache.jena.query.ResultSet;
|
||||
import org.apache.jena.rdf.model.Model;
|
||||
import org.apache.jena.riot.RDFDataMgr;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueType;
|
||||
import org.hl7.fhir.utilities.validation.ValidationMessage.Source;
|
||||
|
||||
/**
|
||||
* The purpose of this class is twofold:
|
||||
* - validate the the turtle syntax is correct
|
||||
* - check the semantics
|
||||
*
|
||||
* @author Grahame
|
||||
*n
|
||||
*/
|
||||
public class RDFValidator {
|
||||
private static final String prefixes =
|
||||
"PREFIX dc: <http://purl.org/dc/elements/1.1/> \r\n"+
|
||||
"PREFIX dcterms: <http://purl.org/dc/terms/> \r\n"+
|
||||
"PREFIX owl: <http://www.w3.org/2002/07/owl#> \r\n"+
|
||||
"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \r\n"+
|
||||
"PREFIX rdfs: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \r\n"+
|
||||
"PREFIX rim: <http://hl7.org/orim/class/> \r\n"+
|
||||
"PREFIX dt: <http://hl7.org/orim/datatype/> \r\n"+
|
||||
"PREFIX vs: <http://hl7.org/orim/valueset/> \r\n"+
|
||||
"PREFIX cs: <http://hl7.org/orim/codesystem/> \r\n"+
|
||||
"PREFIX xs: <http://www.w3.org/2001/XMLSchema/> \r\n"+
|
||||
"PREFIX fhir: <http://hl7.org/fhir/> \r\n"+
|
||||
"PREFIX os: <http://open-services.net/ns/core#> \r\n";
|
||||
|
||||
Model model;
|
||||
|
||||
public void validate(String filename) throws Exception {
|
||||
Model m = RDFDataMgr.loadModel(filename);
|
||||
// System.out.println(Integer.toString(m.getGraph().size())+" triples in RDF file "+filename);
|
||||
model = model == null ? m : model.union(m);
|
||||
// FileOutputStream strm = new FileOutputStream(Utilities.changeFileExt(filename, ".rdf.xml"));
|
||||
// try {
|
||||
// RDFDataMgr.write(strm, m, RDFFormat.RDFXML_PLAIN);
|
||||
// } finally {
|
||||
// strm.close();
|
||||
// }
|
||||
}
|
||||
|
||||
public List<ValidationMessage> assertion(String sparql, String id, String rowType, String message, String description, IssueSeverity level) {
|
||||
List<ValidationMessage> msgs = new ArrayList<ValidationMessage>();
|
||||
|
||||
Query query = QueryFactory.create(prefixes+sparql);
|
||||
|
||||
// Execute the query and obtain results
|
||||
QueryExecution qe = QueryExecutionFactory.create(query, model);
|
||||
ResultSet results = qe.execSelect();
|
||||
|
||||
if (results.hasNext()) {
|
||||
// Output query results
|
||||
ByteArrayOutputStream ba = new ByteArrayOutputStream();
|
||||
msgs.add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "rdf:"+id, description, level));
|
||||
while (results.hasNext()) {
|
||||
QuerySolution row = results.next();
|
||||
String cell = row.getResource(results.getResultVars().get(0)).getURI();
|
||||
if (cell.startsWith("http://hl7.org/fhir/"))
|
||||
cell = cell.substring(20);
|
||||
msgs.add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "rdf:"+id, cell+": "+message, level));
|
||||
}
|
||||
}
|
||||
|
||||
// Important - free up resources used running the query
|
||||
qe.close();
|
||||
return msgs;
|
||||
}
|
||||
}
|
|
@ -1,663 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.hl7.fhir.dstu3.formats.TurtleLexer;
|
||||
import org.hl7.fhir.dstu3.formats.TurtleLexer.TurtleTokenType;
|
||||
import org.hl7.fhir.utilities.Utilities;
|
||||
|
||||
public class RdfGenerator {
|
||||
|
||||
public abstract class Triple {
|
||||
private String uri;
|
||||
}
|
||||
|
||||
public class StringType extends Triple {
|
||||
private String value;
|
||||
|
||||
public StringType(String value) {
|
||||
super();
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
public class Complex extends Triple {
|
||||
protected List<Predicate> predicates = new ArrayList<Predicate>();
|
||||
|
||||
public boolean write(LineOutputStreamWriter writer, int indent) throws Exception {
|
||||
if (predicates.isEmpty())
|
||||
return false;
|
||||
if (predicates.size() == 1 && predicates.get(0).object instanceof StringType && Utilities.noString(predicates.get(0).comment)) {
|
||||
writer.write(" "+predicates.get(0).predicate+" "+((StringType) predicates.get(0).object).value);
|
||||
return false;
|
||||
}
|
||||
String left = Utilities.padLeft("", ' ', indent);
|
||||
int i = 0;
|
||||
for (Predicate po : predicates) {
|
||||
writer.write("\r\n");
|
||||
if (po.getObject() instanceof StringType)
|
||||
writer.write(left+" "+po.getPredicate()+" "+((StringType) po.getObject()).value);
|
||||
else {
|
||||
writer.write(left+" "+po.getPredicate()+" [");
|
||||
if (((Complex) po.getObject()).write(writer, indent+2))
|
||||
writer.write(left+" ]");
|
||||
else
|
||||
writer.write(" ]");
|
||||
}
|
||||
i++;
|
||||
if (i < predicates.size())
|
||||
writer.write(";");
|
||||
if (!Utilities.noString(po.comment))
|
||||
writer.write(" # "+escape(po.comment, false));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean write(StringBuilder b, int indent) throws Exception {
|
||||
if (predicates.isEmpty())
|
||||
return false;
|
||||
if (predicates.size() == 1 && predicates.get(0).object instanceof StringType && Utilities.noString(predicates.get(0).comment)) {
|
||||
b.append(" "+predicates.get(0).makelink()+" "+Utilities.escapeXml(((StringType) predicates.get(0).object).value));
|
||||
return false;
|
||||
}
|
||||
String left = Utilities.padLeft("", ' ', indent);
|
||||
int i = 0;
|
||||
for (Predicate po : predicates) {
|
||||
b.append("\r\n");
|
||||
if (po.getObject() instanceof StringType)
|
||||
b.append(left+" "+po.makelink()+" "+Utilities.escapeXml(((StringType) po.getObject()).value));
|
||||
else {
|
||||
b.append(left+" "+po.makelink()+" [");
|
||||
if (((Complex) po.getObject()).write(b, indent+2))
|
||||
b.append(left+" ]");
|
||||
else
|
||||
b.append(" ]");
|
||||
}
|
||||
i++;
|
||||
if (i < predicates.size())
|
||||
b.append(";");
|
||||
if (!Utilities.noString(po.comment))
|
||||
b.append(" # "+Utilities.escapeXml(escape(po.comment, false)));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public Complex predicate(String predicate, String object) {
|
||||
predicateSet.add(predicate);
|
||||
objectSet.add(object);
|
||||
return predicate(predicate, new StringType(object));
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, String object, String link) {
|
||||
predicateSet.add(predicate);
|
||||
objectSet.add(object);
|
||||
return linkedPredicate(predicate, new StringType(object), link);
|
||||
}
|
||||
|
||||
public Complex predicate(String predicate, Triple object) {
|
||||
Predicate p = new Predicate();
|
||||
p.predicate = predicate;
|
||||
predicateSet.add(predicate);
|
||||
if (object instanceof StringType)
|
||||
objectSet.add(((StringType) object).value);
|
||||
p.object = object;
|
||||
predicates.add(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, Triple object, String link) {
|
||||
Predicate p = new Predicate();
|
||||
p.predicate = predicate;
|
||||
p.link = link;
|
||||
predicateSet.add(predicate);
|
||||
if (object instanceof StringType)
|
||||
objectSet.add(((StringType) object).value);
|
||||
p.object = object;
|
||||
predicates.add(p);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Complex predicate(String predicate) {
|
||||
predicateSet.add(predicate);
|
||||
Complex c = complex();
|
||||
predicate(predicate, c);
|
||||
return c;
|
||||
}
|
||||
|
||||
public Complex linkedPredicate(String predicate, String link) {
|
||||
predicateSet.add(predicate);
|
||||
Complex c = complex();
|
||||
linkedPredicate(predicate, c, link);
|
||||
return c;
|
||||
}
|
||||
|
||||
public void prefix(String code, String url) {
|
||||
RdfGenerator.this.prefix(code, url);
|
||||
}
|
||||
}
|
||||
|
||||
private class Predicate {
|
||||
protected String predicate;
|
||||
protected String link;
|
||||
protected Triple object;
|
||||
protected String comment;
|
||||
|
||||
public String getPredicate() {
|
||||
return predicate;
|
||||
}
|
||||
public String makelink() {
|
||||
if (link == null)
|
||||
return predicate;
|
||||
else
|
||||
return "<a href=\""+link+"\">"+predicate+"</a>";
|
||||
}
|
||||
|
||||
public Triple getObject() {
|
||||
return object;
|
||||
}
|
||||
public String getComment() {
|
||||
return comment;
|
||||
}
|
||||
}
|
||||
|
||||
public class Subject extends Complex {
|
||||
private String id;
|
||||
|
||||
public Predicate predicate(String predicate, Triple object, String comment) {
|
||||
Predicate p = new Predicate();
|
||||
p.predicate = predicate;
|
||||
predicateSet.add(predicate);
|
||||
if (object instanceof StringType)
|
||||
objectSet.add(((StringType) object).value);
|
||||
p.object = object;
|
||||
predicates.add(p);
|
||||
p.comment = comment;
|
||||
return p;
|
||||
}
|
||||
|
||||
public void comment(String comment) {
|
||||
if (!Utilities.noString(comment)) {
|
||||
predicate("rdfs:comment", literal(comment));
|
||||
predicate("dcterms:description", literal(comment));
|
||||
}
|
||||
}
|
||||
|
||||
public void label(String label) {
|
||||
if (!Utilities.noString(label)) {
|
||||
predicate("rdfs:label", literal(label));
|
||||
predicate("dc:title", literal(label));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class Section {
|
||||
private String name;
|
||||
private List<Subject> subjects = new ArrayList<Subject>();
|
||||
|
||||
public Subject triple(String subject, String predicate, String object, String comment) {
|
||||
return triple(subject, predicate, new StringType(object), comment);
|
||||
}
|
||||
|
||||
public Subject triple(String subject, String predicate, String object) {
|
||||
return triple(subject, predicate, new StringType(object));
|
||||
}
|
||||
|
||||
public Subject triple(String subject, String predicate, Triple object) {
|
||||
return triple(subject, predicate, object, null);
|
||||
}
|
||||
|
||||
public Subject triple(String subject, String predicate, Triple object, String comment) {
|
||||
Subject s = subject(subject);
|
||||
s.predicate(predicate, object, comment);
|
||||
return s;
|
||||
}
|
||||
|
||||
public void comment(String subject, String comment) {
|
||||
triple(subject, "rdfs:comment", literal(comment));
|
||||
triple(subject, "dcterms:description", literal(comment));
|
||||
}
|
||||
|
||||
public void label(String subject, String comment) {
|
||||
triple(subject, "rdfs:label", literal(comment));
|
||||
triple(subject, "dc:title", literal(comment));
|
||||
}
|
||||
|
||||
public void importTtl(String ttl) throws Exception {
|
||||
if (!Utilities.noString(ttl)) {
|
||||
// System.out.println("import ttl: "+ttl);
|
||||
TurtleLexer lexer = new TurtleLexer(ttl);
|
||||
String subject = null;
|
||||
String predicate = null;
|
||||
while (!lexer.done()) {
|
||||
if (subject == null)
|
||||
subject = lexer.next();
|
||||
if (predicate == null)
|
||||
predicate = lexer.next();
|
||||
if (lexer.peekType() == null) {
|
||||
throw new Error("Unexpected end of input parsing turtle");
|
||||
} if (lexer.peekType() == TurtleTokenType.TOKEN) {
|
||||
triple(subject, predicate, lexer.next());
|
||||
} else if (lexer.peek() == null) {
|
||||
throw new Error("Unexected - turtle lexer found no token");
|
||||
} else if (lexer.peek().equals("[")) {
|
||||
triple(subject, predicate, importComplex(lexer));
|
||||
} else
|
||||
throw new Exception("Not done yet");
|
||||
String n = lexer.next();
|
||||
if (Utilities.noString(n))
|
||||
break;
|
||||
if (n.equals(".")) {
|
||||
subject = null;
|
||||
predicate = null;
|
||||
} else if (n.equals(";")) {
|
||||
predicate = null;
|
||||
} else if (!n.equals(","))
|
||||
throw new Exception("Unexpected token "+n);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Complex importComplex(TurtleLexer lexer) throws Exception {
|
||||
lexer.next(); // read [
|
||||
Complex obj = new Complex();
|
||||
while (!lexer.peek().equals("]")) {
|
||||
String predicate = lexer.next();
|
||||
if (lexer.peekType() == TurtleTokenType.TOKEN || lexer.peekType() == TurtleTokenType.LITERAL) {
|
||||
obj.predicate(predicate, lexer.next());
|
||||
} else if (lexer.peek().equals("[")) {
|
||||
obj.predicate(predicate, importComplex(lexer));
|
||||
} else
|
||||
throw new Exception("Not done yet");
|
||||
if (lexer.peek().equals(";"))
|
||||
lexer.next();
|
||||
}
|
||||
lexer.next(); // read ]
|
||||
return obj;
|
||||
}
|
||||
|
||||
public Subject subject(String subject) {
|
||||
for (Subject ss : subjects)
|
||||
if (ss.id.equals(subject))
|
||||
return ss;
|
||||
Subject s = new Subject();
|
||||
s.id = subject;
|
||||
subjects.add(s);
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
||||
private List<Section> sections = new ArrayList<Section>();
|
||||
protected Set<String> subjectSet = new HashSet<String>();
|
||||
protected Set<String> predicateSet = new HashSet<String>();
|
||||
protected Set<String> objectSet = new HashSet<String>();
|
||||
private OutputStream destination;
|
||||
protected Map<String, String> prefixes = new HashMap<String, String>();
|
||||
|
||||
|
||||
public RdfGenerator(OutputStream destination) {
|
||||
super();
|
||||
this.destination = destination;
|
||||
}
|
||||
|
||||
protected String pctEncode(String s) {
|
||||
if (s == null)
|
||||
return "";
|
||||
|
||||
StringBuilder b = new StringBuilder();
|
||||
for (char c : s.toCharArray()) {
|
||||
if (c >= 'A' && c <= 'Z')
|
||||
b.append(c);
|
||||
else if (c >= 'a' && c <= 'z')
|
||||
b.append(c);
|
||||
else if (c >= '0' && c <= '9')
|
||||
b.append(c);
|
||||
else if (c == '.')
|
||||
b.append(c);
|
||||
else
|
||||
b.append("%"+Integer.toHexString(c));
|
||||
}
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
protected List<String> sorted(Set<String> keys) {
|
||||
List<String> names = new ArrayList<String>();
|
||||
names.addAll(keys);
|
||||
Collections.sort(names);
|
||||
return names;
|
||||
}
|
||||
|
||||
|
||||
public void prefix(String code, String url) {
|
||||
if (!prefixes.containsKey(code))
|
||||
prefixes.put(code, url);
|
||||
else if (!prefixes.get(code).equals(url))
|
||||
throw new Error("The prefix "+code+" is already assigned to "+prefixes.get(code)+" so cannot be set to "+url);
|
||||
}
|
||||
|
||||
protected boolean hasSection(String sn) {
|
||||
for (Section s : sections)
|
||||
if (s.name.equals(sn))
|
||||
return true;
|
||||
return false;
|
||||
|
||||
}
|
||||
|
||||
public Section section(String sn) {
|
||||
if (hasSection(sn))
|
||||
throw new Error("Duplicate section name "+sn);
|
||||
Section s = new Section();
|
||||
s.name = sn;
|
||||
sections.add(s);
|
||||
return s;
|
||||
}
|
||||
|
||||
protected String matches(String url, String prefixUri, String prefix) {
|
||||
if (url.startsWith(prefixUri)) {
|
||||
prefixes.put(prefix, prefixUri);
|
||||
return prefix+":"+escape(url.substring(prefixUri.length()), false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// protected PredicateObject predicateObj(String predicate, TripleObject object) {
|
||||
// PredicateObject obj = new PredicateObject();
|
||||
// obj.predicate = predicate;
|
||||
// predicates.add(predicate);
|
||||
// obj.object = object;
|
||||
// return obj;
|
||||
// }
|
||||
//
|
||||
// protected PredicateObject predicate(String predicate, String object) {
|
||||
// PredicateObject obj = new PredicateObject();
|
||||
// obj.predicate = predicate;
|
||||
// predicates.add(predicate);
|
||||
// obj.object = new StringObject(object);
|
||||
// return obj;
|
||||
// }
|
||||
//
|
||||
// protected PredicateObject predicate(String predicate, String object, String comment) {
|
||||
// PredicateObject obj = new PredicateObject();
|
||||
// obj.predicate = predicate;
|
||||
// predicates.add(predicate);
|
||||
// obj.object = new StringObject(object);
|
||||
// obj.comment = comment;
|
||||
// return obj;
|
||||
// }
|
||||
//
|
||||
protected Complex complex() {
|
||||
return new Complex();
|
||||
}
|
||||
//
|
||||
// protected TripleObject complex(PredicateObject predicate1, PredicateObject predicate2) {
|
||||
// ComplexObject obj = new ComplexObject();
|
||||
// obj.predicates.add(predicate1);
|
||||
// obj.predicates.add(predicate2);
|
||||
// return obj;
|
||||
// }
|
||||
//
|
||||
// protected TripleObject complex(PredicateObject predicate1, PredicateObject predicate2, PredicateObject predicate3) {
|
||||
// ComplexObject obj = new ComplexObject();
|
||||
// obj.predicates.add(predicate1);
|
||||
// obj.predicates.add(predicate2);
|
||||
// obj.predicates.add(predicate3);
|
||||
// return obj;
|
||||
// }
|
||||
//
|
||||
// protected void triple(String section, String subject, String predicate, String object) {
|
||||
// triple(section, subject, predicate, new StringObject(object), null);
|
||||
// }
|
||||
//
|
||||
// protected void triple(String section, String subject, String predicate, TripleObject object) {
|
||||
// triple(section, subject, predicate, object, null);
|
||||
// }
|
||||
//
|
||||
// protected void triple(String section, String subject, String predicate, String object, String comment) {
|
||||
// triple(section, subject, predicate, new StringObject(object), comment);
|
||||
// }
|
||||
//
|
||||
// protected void primaryTriple(String section, String subject, String predicate, String object) {
|
||||
// Section s = sections.get(sections.size()-1);
|
||||
// if (s.primary != null)
|
||||
// throw new Error("multiple primary objects");
|
||||
// s.primary = triple(section, null, subject, predicate, new StringObject(object), null);
|
||||
// }
|
||||
//
|
||||
// protected Triple triple(String section, Integer order, String subject, String predicate, TripleObject object, String comment) {
|
||||
// if (!hasSection(section))
|
||||
// throw new Error("use undefined section "+section);
|
||||
// checkPrefix(subject);
|
||||
// checkPrefix(predicate);
|
||||
// checkPrefix(object);
|
||||
// predicates.add(predicate);
|
||||
// Triple t = new Triple(section, order, subject, predicate, object, comment == null ? "" : " # "+comment.replace("\r\n", " ").replace("\r", " ").replace("\n", " "));
|
||||
// triples.add(t);
|
||||
// return t;
|
||||
// }
|
||||
|
||||
private void checkPrefix(Triple object) {
|
||||
if (object instanceof StringType)
|
||||
checkPrefix(((StringType) object).value);
|
||||
else {
|
||||
Complex obj = (Complex) object;
|
||||
for (Predicate po : obj.predicates) {
|
||||
checkPrefix(po.getPredicate());
|
||||
checkPrefix(po.getObject());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected void checkPrefix(String pname) {
|
||||
if (pname.startsWith("("))
|
||||
return;
|
||||
if (pname.startsWith("\""))
|
||||
return;
|
||||
if (pname.startsWith("<"))
|
||||
return;
|
||||
|
||||
if (pname.contains(":")) {
|
||||
String prefix = pname.substring(0, pname.indexOf(":"));
|
||||
if (!prefixes.containsKey(prefix) && !prefix.equals("http")&& !prefix.equals("urn"))
|
||||
throw new Error("undefined prefix "+prefix);
|
||||
}
|
||||
}
|
||||
|
||||
protected StringType literal(String s) {
|
||||
return new StringType("\""+escape(s, true)+"\"");
|
||||
}
|
||||
|
||||
protected StringType literalTyped(String s, String t) {
|
||||
return new StringType("\""+escape(s, true)+"\"^^xs:"+t);
|
||||
}
|
||||
|
||||
public static String escape(String s, boolean string) {
|
||||
if (s == null)
|
||||
return "";
|
||||
|
||||
StringBuilder b = new StringBuilder();
|
||||
for (char c : s.toCharArray()) {
|
||||
if (c == '\r')
|
||||
b.append("\\r");
|
||||
else if (c == '\n')
|
||||
b.append("\\n");
|
||||
else if (c == '"')
|
||||
b.append("\\\"");
|
||||
else if (c == '\\')
|
||||
b.append("\\\\");
|
||||
else if (c == '/' && !string)
|
||||
b.append("\\/");
|
||||
else
|
||||
b.append(c);
|
||||
}
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
protected class LineOutputStreamWriter extends OutputStreamWriter {
|
||||
private LineOutputStreamWriter(OutputStream out) throws UnsupportedEncodingException {
|
||||
super(out, "UTF-8");
|
||||
}
|
||||
|
||||
private void ln() throws Exception {
|
||||
write("\r\n");
|
||||
}
|
||||
|
||||
private void ln(String s) throws Exception {
|
||||
write(s);
|
||||
write("\r\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public void commit(boolean header) throws Exception {
|
||||
LineOutputStreamWriter writer = new LineOutputStreamWriter(destination);
|
||||
commitPrefixes(writer, header);
|
||||
for (Section s : sections) {
|
||||
commitSection(writer, s);
|
||||
}
|
||||
writer.ln("# -------------------------------------------------------------------------------------");
|
||||
writer.ln();
|
||||
writer.flush();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
public String asHtml() throws Exception {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("<pre class\"rdf\">\r\n");
|
||||
commitPrefixes(b);
|
||||
for (Section s : sections) {
|
||||
commitSection(b, s);
|
||||
}
|
||||
b.append("</pre>\r\n");
|
||||
b.append("\r\n");
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
private void commitPrefixes(LineOutputStreamWriter writer, boolean header) throws Exception {
|
||||
if (header) {
|
||||
writer.ln("# FHIR Sub-definitions");
|
||||
writer.write("# This is work in progress, and may change rapidly \r\n");
|
||||
writer.ln();
|
||||
writer.write("# A note about policy: the focus here is providing the knowledge from \r\n");
|
||||
writer.write("# the FHIR specification as a set of triples for knowledge processing. \r\n");
|
||||
writer.write("# Where appopriate, predicates defined external to FHIR are used. \"Where \r\n");
|
||||
writer.write("# appropriate\" means that the predicates are a faithful representation \r\n");
|
||||
writer.write("# of the FHIR semantics, and do not involve insane (or owful) syntax. \r\n");
|
||||
writer.ln();
|
||||
writer.write("# Where the community agrees on additional predicate statements (such \r\n");
|
||||
writer.write("# as OWL constraints) these are added in addition to the direct FHIR \r\n");
|
||||
writer.write("# predicates \r\n");
|
||||
writer.ln();
|
||||
writer.write("# This it not a formal ontology, though it is possible it may start to become one eventually\r\n");
|
||||
writer.ln();
|
||||
writer.write("# this file refers to concepts defined in rim.ttl and to others defined elsewhere outside HL7 \r\n");
|
||||
writer.ln();
|
||||
}
|
||||
for (String p : sorted(prefixes.keySet()))
|
||||
writer.ln("@prefix "+p+": <"+prefixes.get(p)+"> .");
|
||||
writer.ln();
|
||||
if (header) {
|
||||
writer.ln("# Predicates used in this file:");
|
||||
for (String s : sorted(predicateSet))
|
||||
writer.ln(" # "+s);
|
||||
writer.ln();
|
||||
}
|
||||
}
|
||||
|
||||
private void commitPrefixes(StringBuilder b) throws Exception {
|
||||
for (String p : sorted(prefixes.keySet()))
|
||||
b.append("@prefix "+p+": <"+prefixes.get(p)+"> .\r\n");
|
||||
b.append("\r\n");
|
||||
}
|
||||
|
||||
// private String lastSubject = null;
|
||||
// private String lastComment = "";
|
||||
|
||||
private void commitSection(LineOutputStreamWriter writer, Section section) throws Exception {
|
||||
writer.ln("# - "+section.name+" "+Utilities.padLeft("", '-', 75-section.name.length()));
|
||||
writer.ln();
|
||||
for (Subject sbj : section.subjects) {
|
||||
writer.write(sbj.id);
|
||||
writer.write(" ");
|
||||
int i = 0;
|
||||
|
||||
for (Predicate p : sbj.predicates) {
|
||||
writer.write(p.getPredicate());
|
||||
writer.write(" ");
|
||||
if (p.getObject() instanceof StringType)
|
||||
writer.write(((StringType) p.getObject()).value);
|
||||
else {
|
||||
writer.write("[");
|
||||
if (((Complex) p.getObject()).write(writer, 4))
|
||||
writer.write("\r\n ]");
|
||||
else
|
||||
writer.write("]");
|
||||
}
|
||||
String comment = p.comment == null? "" : " # "+p.comment;
|
||||
i++;
|
||||
if (i < sbj.predicates.size())
|
||||
writer.write(";"+comment+"\r\n ");
|
||||
else
|
||||
writer.write("."+comment+"\r\n\r\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void commitSection(StringBuilder b, Section section) throws Exception {
|
||||
b.append("# - "+section.name+" "+Utilities.padLeft("", '-', 75-section.name.length())+"\r\n");
|
||||
b.append("\r\n");
|
||||
for (Subject sbj : section.subjects) {
|
||||
b.append(sbj.id);
|
||||
b.append(" ");
|
||||
int i = 0;
|
||||
|
||||
for (Predicate p : sbj.predicates) {
|
||||
b.append(p.makelink());
|
||||
b.append(" ");
|
||||
if (p.getObject() instanceof StringType)
|
||||
b.append(Utilities.escapeXml(((StringType) p.getObject()).value));
|
||||
else {
|
||||
b.append("[");
|
||||
if (((Complex) p.getObject()).write(b, 4))
|
||||
b.append("\r\n ]");
|
||||
else
|
||||
b.append("]");
|
||||
}
|
||||
String comment = p.comment == null? "" : " # "+p.comment;
|
||||
i++;
|
||||
if (i < sbj.predicates.size())
|
||||
b.append(";"+Utilities.escapeXml(comment)+"\r\n ");
|
||||
else
|
||||
b.append("."+Utilities.escapeXml(comment)+"\r\n\r\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// private void coomitTriple(LineOutputStreamWriter writer, Triple t) throws Exception, IOException {
|
||||
// boolean follow = false;
|
||||
// if (lastSubject != null) {
|
||||
// follow = lastSubject.equals(t.getSubject());
|
||||
// String c = follow ? ";" : ".";
|
||||
// writer.ln(c+lastComment);
|
||||
// if (!follow)
|
||||
// writer.ln();
|
||||
// }
|
||||
// String left = follow ? Utilities.padLeft("", ' ', 2) : t.getSubject();
|
||||
// lastComment = t.getComment();
|
||||
// lastSubject = t.getSubject();
|
||||
// }
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -1,86 +0,0 @@
|
|||
package org.hl7.fhir.rdf;
|
||||
//
|
||||
//import org.apache.jena.riot.RDFDataMgr;
|
||||
//
|
||||
//import java.nio.file.Files;
|
||||
//import java.nio.file.Paths;
|
||||
//import java.util.logging.Logger;
|
||||
|
||||
//import com.hp.hpl.jena.rdf.model.Model;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import org.apache.jena.rdf.model.Model;
|
||||
|
||||
import es.weso.rdf.RDFReader;
|
||||
import es.weso.rdf.jena.RDFAsJenaModel;
|
||||
import es.weso.schema.Result;
|
||||
import es.weso.schema.Schema;
|
||||
import es.weso.schema.ShExSchema$;
|
||||
import scala.Option;
|
||||
|
||||
public class ShExValidator {
|
||||
|
||||
// private Logger log = Logger.getLogger(ShExValidator.class.getName());
|
||||
private Schema schema;
|
||||
|
||||
public ShExValidator(String schemaFile) throws Exception {
|
||||
// load shex from the path
|
||||
// log.info("Reading ShEx file " + schemaFile);
|
||||
schema = readSchema(schemaFile);
|
||||
}
|
||||
|
||||
public Schema readSchema(String schemaFile) throws Exception {
|
||||
// Create a none, see: http://stackoverflow.com/questions/1997433/how-to-use-scala-none-from-java-code
|
||||
Option<String> none = Option.apply((String) null); // Create a none
|
||||
String contents = new String(Files.readAllBytes(Paths.get(schemaFile)));
|
||||
return ShExSchema$.MODULE$.fromString(contents, "SHEXC", none).get();
|
||||
}
|
||||
|
||||
public void validate(Model dataModel) {
|
||||
Option<String> none = Option.apply(null); // Create a none
|
||||
RDFReader rdf = new RDFAsJenaModel(dataModel);
|
||||
Result result = schema.validate(rdf,"TARGETDECLS",none,none, rdf.getPrefixMap(), schema.pm());
|
||||
if (result.isValid()) {
|
||||
// log.info("Result is valid");
|
||||
// System.out.println("Valid. Result: " + result.show());
|
||||
} else {
|
||||
System.out.println("Not valid");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//
|
||||
//
|
||||
//
|
||||
// public void validate(Model dataModel, Schema schema, PrefixMap pm) throws Exception {
|
||||
// RDFReader rdf = new RDFAsJenaModel(dataModel);
|
||||
// ShExMatcher matcher = new ShExMatcher(schema,rdf);
|
||||
//// ShExResult result = matcher.validate();
|
||||
//// if (result.isValid()) {
|
||||
//// log.info("Result is valid");
|
||||
//// System.out.println("Valid. Result: " + result.show(1,pm));
|
||||
//// } else {
|
||||
//// System.out.println("Not valid");
|
||||
//// }
|
||||
// }
|
||||
//
|
||||
// public void validate(String dataFile, String schemaFile) throws Exception {
|
||||
//// log.info("Reading data file " + dataFile);
|
||||
//// Model dataModel = RDFDataMgr.loadModel(dataFile);
|
||||
//// log.info("Model read. Size = " + dataModel.size());
|
||||
//
|
||||
//
|
||||
////
|
||||
//// Schema schema = pair._1();
|
||||
//// PrefixMap pm = pair._2();
|
||||
//////
|
||||
////// log.info("Schema read" + schema.show());
|
||||
////
|
||||
//// validate(dataModel,schema,pm);
|
||||
// }
|
||||
//
|
||||
//
|
||||
}
|
Loading…
Reference in New Issue