/** * Run the consistency check, returning a ValidityReport. * * @param rf The factory for the reasoner to test */ public ValidityReport testResults() { Model t = FileManager.get().loadModel(BASE_DIR + tbox); Model a = FileManager.get().loadModel(BASE_DIR + abox); // Work around non-deterministic bug in bindSchema // Reasoner r = rf.create(null).bindSchema(t); Reasoner r = rf.create(null); a.add(t); InfModel im = ModelFactory.createInfModel(r, a); return im.validate(); }
/** Initializes the servlet. */ @Override public void init(ServletConfig config) throws ServletException { super.init(config); // It seems that if the servlet fails to initialize the first time, // init can be called again (it has been observed in Tomcat log files // but not explained). if (initAttempted) { // This happens - the query and update servlets share this class // log.info("Re-initialization of servlet attempted") ; return; } initAttempted = true; servletConfig = config; // Modify the (Jena) global filemanager to include loading by servlet context FileManager fileManager = FileManager.get(); if (config != null) { servletContext = config.getServletContext(); fileManager.addLocator(new LocatorServletContext(servletContext)); } printName = config.getServletName(); String configURI = config.getInitParameter(Joseki.configurationFileProperty); servletEnv(); try { Dispatcher.initServiceRegistry(fileManager, configURI); } catch (ConfigurationErrorException confEx) { throw new ServletException("Joseki configuration error", confEx); } }
public static Model readModel(String fileNameOrUri) { long startTime = System.currentTimeMillis(); Model model = ModelFactory.createDefaultModel(); java.io.InputStream in = FileManager.get().open(fileNameOrUri); if (in == null) { throw new IllegalArgumentException(fileNameOrUri + " not found"); } if (fileNameOrUri.contains(".ttl") || fileNameOrUri.contains(".n3")) { logger.info("Opening Turtle file"); model.read(in, null, "TTL"); } else if (fileNameOrUri.contains(".rdf")) { logger.info("Opening RDFXML file"); model.read(in, null); } else if (fileNameOrUri.contains(".nt")) { logger.info("Opening N-Triples file"); model.read(in, null, "N-TRIPLE"); } else { logger.info("Content negotiation to get RDFXML from " + fileNameOrUri); model.read(fileNameOrUri); } logger.info( "Loading " + fileNameOrUri + " is done in " + (System.currentTimeMillis() - startTime) + "ms."); return model; }
public void hospitalQueryProcess() { Model model = ModelFactory.createDefaultModel(); InputStream in = FileManager.get().open("cancer_rdf.owl"); if (in == null) { throw new IllegalArgumentException("File: not found"); } // read the RDF/XML file model.read(in, null); for (int i = 0; i < treatment_list.size(); i++) { String treatment_name = treatment_list.get(i); issueSPARQLTreatment_Hospital(model, treatment_name, hos_treatment); } // System.out.println("Hospital list for treatments: " + Hospital_list); System.out.println("Hospital map for treatments: " + hos_treatment); Set set = hos_treatment.entrySet(); // Get an iterator Iterator i = set.iterator(); // Display elements while (i.hasNext()) { Map.Entry me = (Map.Entry) i.next(); System.out.print(me.getKey() + ": "); System.out.println(me.getValue()); if (!Hospital_list.contains(me.getValue().toString())) { Hospital_list.add(me.getValue().toString()); } } System.out.println("Hospital list for treatments: " + Hospital_list); model.close(); }
static String fetchPage(String[] fileNames, String ifAbsent) { for (String fileName : fileNames) { File f = new File(fileName); if (f.exists()) return FileManager.get().readWholeFileAsUTF8(f.getAbsolutePath()); } return ifAbsent; }
public void importForMobiOfOWL(String inputFileOWL) throws Exception { this.jena = ModelFactory.createOntologyModel(); InputStream in = FileManager.get().open(inputFileOWL); if (in == null) throw new IllegalArgumentException("OWL file not found."); try { this.jena.read(in, null); } catch (Exception ex) { throw new ExceptionMobiFile("Problem reading the OWL file."); } String domainname = null; if (this.jena.getNsPrefixMap().containsKey(this.validatorOWLGeneratedByMMobi)) { domainname = this.jena.getNsPrefixURI(this.validatorOWLGeneratedByMMobi); } else { throw new ExceptionMobiFile("OWL file not generated by M-MOBI."); } this.mobi = new Mobi( domainname.substring(domainname.lastIndexOf("/") + 1, domainname.lastIndexOf("#"))); // this.ImportClassesAndInstances(); this.ImportRelationsInheritanceOrEquivalence(Relation.INHERITANCE); this.ImportRelationsInheritanceOrEquivalence(Relation.EQUIVALENCE); this.ImportRelationsComposition(); }
/** Load local dataset into TDB */ private void TDBloading(String fileDump) { logger.info("TDB creation"); // create tdb from .nt local file FileManager fm = FileManager.get(); fm.addLocatorClassLoader(RDFTripleExtractor.class.getClassLoader()); InputStream in = fm.open(fileDump); Location location = new Location(tdbDirectory); // load some initial data try { TDBLoader.load( TDBInternal.getBaseDatasetGraphTDB(TDBFactory.createDatasetGraph(location)), in, true); } catch (Exception e) { logger.error("TDB loading error: " + e.getMessage()); } logger.info("TDB loading"); // create model from tdb Dataset dataset = TDBFactory.createDataset(tdbDirectory); // assume we want the default model, or we could get a named model here dataset.begin(ReadWrite.READ); model = dataset.getDefaultModel(); dataset.end(); }
public Model readFile(String filename) { // TDB.sync(dataset); // dataset.begin(ReadWrite.WRITE); Model model = FileManager.get().readModel(ontModel, filename, null, "RDF/XML"); // base.add(model); // om.read(is, ns); // dataset.commit(); // dataset.end(); return model; }
public Configuration(Model configurationModel) { model = configurationModel; StmtIterator it = model.listStatements(null, RDF.type, CONF.Configuration); if (!it.hasNext()) { throw new IllegalArgumentException("No conf:Configuration found in configuration model"); } config = it.nextStatement().getSubject(); datasets = new ArrayList(); it = model.listStatements(config, CONF.dataset, (RDFNode) null); while (it.hasNext()) { datasets.add(new Dataset(it.nextStatement().getResource())); } labelProperties = new ArrayList(); it = model.listStatements(config, CONF.labelProperty, (RDFNode) null); while (it.hasNext()) { labelProperties.add(it.nextStatement().getObject().as(Property.class)); } if (labelProperties.isEmpty()) { labelProperties.add(RDFS.label); labelProperties.add(DC.title); labelProperties.add(model.createProperty("http://xmlns.com/foaf/0.1/name")); } commentProperties = new ArrayList(); it = model.listStatements(config, CONF.commentProperty, (RDFNode) null); while (it.hasNext()) { commentProperties.add(it.nextStatement().getObject().as(Property.class)); } if (commentProperties.isEmpty()) { commentProperties.add(RDFS.comment); commentProperties.add(DC.description); } imageProperties = new ArrayList(); it = model.listStatements(config, CONF.imageProperty, (RDFNode) null); while (it.hasNext()) { imageProperties.add(it.nextStatement().getObject().as(Property.class)); } if (imageProperties.isEmpty()) { imageProperties.add(model.createProperty("http://xmlns.com/foaf/0.1/depiction")); } prefixes = new PrefixMappingImpl(); if (config.hasProperty(CONF.usePrefixesFrom)) { it = config.listProperties(CONF.usePrefixesFrom); while (it.hasNext()) { Statement stmt = it.nextStatement(); prefixes.setNsPrefixes(FileManager.get().loadModel(stmt.getResource().getURI())); } } else { prefixes.setNsPrefixes(model); } if (prefixes.getNsURIPrefix(CONF.NS) != null) { prefixes.removeNsPrefix(prefixes.getNsURIPrefix(CONF.NS)); } }
public void importOntology(String filename) { System.out.println("Load OWL File: " + filename); // use the FileManager to find the input file InputStream in = FileManager.get().open(filename); if (in == null) { throw new IllegalArgumentException("File: " + filename + " not found"); } // read the RDF/XML file model.read(in, null); }
public static void main(String args[]) { Model model = ModelFactory.createDefaultModel(); InputStream in = FileManager.get().open(args[0]); try { model.read(in, null); in.close(); System.out.println(getJSON(model)); } catch (Exception e) { e.printStackTrace(); } }
@Override @Before public void setUp() throws Exception { super.setUp(); folderResourceMapUri = URI.create(FOLDER_RESOURCE_MAP); Model model = FileManager.get() .loadModel(folderResourceMapUri.toString(), folderResourceMapUri.toString(), "TURTLE"); dataset.addNamedModel(folderResourceMapUri.toString(), model); folderBuilder = new FolderBuilder(); folder = researchObject.getFolders().values().iterator().next(); folderResourceMap = builder.buildFolderResourceMap(folderResourceMapUri, folder); }
public void setRules(String rulesFile) { try { log.info("Trying to load rule file: " + rulesFile); InputStream rulesStream = FileManager.get().open(rulesFile); Parser rules = Rule.rulesParserFromReader( new BufferedReader(new InputStreamReader(rulesStream, "UTF-8"))); reasoner = new GenericRuleReasoner(Rule.parseRules(rules)); log.info("Loaded!"); } catch (Throwable ex) { log.error("Problem loading rules <" + rulesFile + ">", ex); } }
private void compareRDF(URL htmlURL, URL compareURL) throws SAXException, IOException { String cf = compareURL.toExternalForm(); if (cf.matches("file:/[^/][^/].*")) cf = cf.replaceFirst("file:/", "file:///"); String hf = htmlURL.toExternalForm(); if (hf.matches("file:/[^/][^/].*")) hf = hf.replaceFirst("file:/", "file:///"); Model c = FileManager.get().loadModel(compareURL.toExternalForm()); Model m = ModelFactory.createDefaultModel(); StatementSink sink = new JenaStatementSink(m); XMLReader parser = ParserFactory.createReaderForFormat(sink, Format.XHTML, Setting.OnePointOne); parser.parse(hf); boolean result = c.isIsomorphicWith(m); if (!result) m.write(System.err, "TTL"); assertTrue("Files match (" + htmlURL + ")", result); }
public void reloadOWLFile(String file) { // create an empty model // this.model = ModelFactory.createDefaultModel(); this.model = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC); System.out.println("Load OWL File: " + file); // use the FileManager to find the input file InputStream in = FileManager.get().open(file); if (in == null) { throw new IllegalArgumentException("File: " + file + " not found"); } // read the RDF/XML file model.read(in, null); }
private void executeQuery(String queryString) { System.out.println("###########################################"); String prolog = "PREFIX vcard: <" + VCARD.getURI() + ">"; String queryStr = prolog + queryString; Model model = ModelFactory.createDefaultModel(); InputStream in = FileManager.get().open("/home/sush/code/experiments/vc-db-3.rdf"); model.read(in, null); Query query = QueryFactory.create(queryStr); query.serialize(new IndentedWriter(System.out, true)); // System.out.println() ; QueryExecution qexec = QueryExecutionFactory.create(query, model); ResultSet rs = qexec.execSelect(); for (; rs.hasNext(); ) { QuerySolution rb = rs.nextSolution(); System.out.println(rb); } }
public static Object build(String assemblerFile, Resource type) { if (assemblerFile == null) throw new ARQException("No assembler file"); Model spec = null; try { spec = FileManager.get().loadModel(assemblerFile); } catch (Exception ex) { throw new ARQException("Failed reading assembler description: " + ex.getMessage()); } Resource root = null; try { root = GraphUtils.findRootByType(spec, type); if (root == null) return null; } catch (TypeNotUniqueException ex) { throw new ARQException("Multiple types for: " + DatasetAssemblerVocab.tDataset); } return Assembler.general.open(root); }
public static void main(String args[]) { String filename = "example6.rdf"; // Create an empty model OntModel model = ModelFactory.createOntologyModel(OntModelSpec.RDFS_MEM); // Use the FileManager to find the input file InputStream in = FileManager.get().open(filename); if (in == null) throw new IllegalArgumentException("File: " + filename + " not found"); // Read the RDF/XML file model.read(in, null); // model.write(System.out,"Turtle"); ExtendedIterator<? extends OntResource> iterator; // ** TASK 7.1: List all individuals of "Person" ** OntClass person = model.getOntClass(ns + "Person"); iterator = person.listInstances(); System.out.println("Individuals"); System.out.println("------------------------------------"); while (iterator.hasNext()) { OntResource r = iterator.next(); System.out.println(r.getURI()); } System.out.println("------------------------------------"); // ** TASK 7.2: List all subclasses of "Person" ** iterator = person.listSubClasses(); System.out.println("Sublcasses"); System.out.println("------------------------------------"); while (iterator.hasNext()) { OntResource r = iterator.next(); System.out.println(r.getURI()); } System.out.println("------------------------------------"); // ** TASK 7.3: Make the necessary changes to get as well indirect instances and subclasses. // TIP: you need some inference... ** }
public static void main(String args[]) { String filename = "example6.rdf"; // Create an empty model OntModel model = ModelFactory.createOntologyModel(OntModelSpec.RDFS_MEM); // Use the FileManager to find the input file InputStream in = FileManager.get().open(filename); if (in == null) throw new IllegalArgumentException("File: " + filename + " not found"); // Read the RDF/XML file model.read(in, null); // ** TASK 7.1: List all individuals of "Person" ** // ** TASK 7.2: List all subclasses of "Person" ** // ** TASK 7.3: Make the necessary changes to get as well indirect instances and subclasses. // TIP: you need some inference... ** }
static IndexLARQ buildTitleIndex(Model model, String datafile) { // ---- Read and index just the title strings. IndexBuilderString larqBuilder = new IndexBuilderString(DC.title); // Index statements as they are added to the model. model.register(larqBuilder); // To just build the index, create a model that does not store statements // Model model2 = ModelFactory.createModelForGraph(new GraphSink()) ; FileManager.get().readModel(model, datafile); // ---- Alternatively build the index after the model has been created. // larqBuilder.indexStatements(model.listStatements()) ; // ---- Finish indexing larqBuilder.closeWriter(); model.unregister(larqBuilder); // ---- Create the access index IndexLARQ index = larqBuilder.getIndex(); return index; }
public void createJenaModel(RegisterContextRequest rcr) { OntModel ontModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); Model entityOnt = FileManager.get().loadModel(ONT_FILE); ontModel.addSubModel(entityOnt); ontModel.setNsPrefixes(entityOnt.getNsPrefixMap()); // ontModel.loadImports(); ExtendedIterator<OntProperty> iter = ontModel.listAllOntProperties(); while (iter.hasNext()) { OntProperty ontProp = iter.next(); System.out.println(ontProp.getLocalName()); // if (formParams.containsKey(ontProp.getLocalName())) { // regIndividual.addProperty(ontProp, // ontModel.getcreateTypedLiteral(formParams.get(ontProp.getLocalName())[0])); // } } // OntClass regClass = ontModel.getOntClass(ONT_URL + "iotReg"); // OntClass entClass = ontModel.createOntClass(ONT_URL + "entity"); // OntClass regClass = (OntClass) ontModel.createOntResource(OntClass.class, // null,ONT_URL+"Registration" ); // OntClass regClass = (OntClass) ontModel.createClass(ONT_URL + "Registration"); // OntClass entityClass = (OntClass) ontModel.createClass(ONT_URL + "Entity"); OntClass entityGroup = (OntClass) ontModel.getOntClass(ONT_URL + "EntityGroup"); OntClass entity = (OntClass) ontModel.getOntClass(ONT_URL + "Entity"); OntClass attribute = (OntClass) ontModel.getOntClass(ONT_URL + "Attribute"); OntClass metadata = (OntClass) ontModel.getOntClass(ONT_URL + "Metadata"); OntClass location = (OntClass) ontModel.getOntClass(entityOnt.getNsPrefixURI("geo") + "Point"); // System.out.println("Class type is: " + regClass.getLocalName()); String ngsiValue = ""; ngsiValue = rcr.getRegistrationId(); Individual entityGroupIndiv = ontModel.createIndividual(ONT_URL + ngsiValue, entityGroup); entityGroupIndiv.setPropertyValue( ontModel.getProperty(ONT_URL + "registrationId"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getTimestamp().toString(); entityGroupIndiv.setPropertyValue( ontModel.getProperty(ONT_URL + "regTimeStamp"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getDuration(); entityGroupIndiv.setPropertyValue( ontModel.getProperty(ONT_URL + "duration"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getContextRegistration().get(0).getEntityId().get(0).getId(); Individual entity1 = ontModel.createIndividual(ONT_URL + ngsiValue, entity); entityGroupIndiv.setPropertyValue(ontModel.getProperty(ONT_URL + "hasEntity"), entity1); ngsiValue = rcr.getContextRegistration().get(0).getEntityId().get(0).getId(); entity1.setPropertyValue( ontModel.getProperty(ONT_URL + "id"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getContextRegistration().get(0).getEntityId().get(0).getType(); entity1.setPropertyValue( ontModel.getProperty(ONT_URL + "type"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getContextRegistration().get(0).getContextRegistrationAttribute().get(0).getName(); Individual attribute1 = ontModel.createIndividual(ONT_URL + ngsiValue, attribute); entity1.setPropertyValue(ontModel.getProperty(ONT_URL + "hasAttribute"), attribute1); ngsiValue = rcr.getContextRegistration().get(0).getContextRegistrationAttribute().get(0).getType(); attribute1.setPropertyValue( ontModel.getProperty(ONT_URL + "type"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getContextRegistration() .get(0) .getContextRegistrationAttribute() .get(0) .getContextMetadata() .get(0) .getName(); Individual metadata1 = ontModel.createIndividual(ONT_URL + ngsiValue, metadata); attribute1.setPropertyValue(ontModel.getProperty(ONT_URL + "hasMetadata"), metadata1); ngsiValue = rcr.getContextRegistration() .get(0) .getContextRegistrationAttribute() .get(0) .getContextMetadata() .get(0) .getType(); metadata1.setPropertyValue( ontModel.getProperty(ONT_URL + "type"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getContextRegistration() .get(0) .getContextRegistrationAttribute() .get(0) .getContextMetadata() .get(0) .getValue() .toString(); metadata1.setPropertyValue( ontModel.getProperty(ONT_URL + "value"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getContextRegistration() .get(0) .getContextRegistrationAttribute() .get(0) .getContextMetadata() .get(1) .getName(); Individual metadata2 = ontModel.createIndividual(ONT_URL + ngsiValue, metadata); attribute1.addProperty(ontModel.getProperty(ONT_URL + "hasMetadata"), metadata2); ngsiValue = rcr.getContextRegistration() .get(0) .getContextRegistrationAttribute() .get(0) .getContextMetadata() .get(1) .getType(); metadata2.setPropertyValue( ontModel.getProperty(ONT_URL + "type"), ontModel.createLiteral(ngsiValue)); ngsiValue = rcr.getContextRegistration() .get(0) .getContextRegistrationAttribute() .get(0) .getContextMetadata() .get(1) .getValue() .toString(); metadata2.setPropertyValue( ontModel.getProperty(ONT_URL + "value"), ontModel.createLiteral(ngsiValue)); // ngsiValue = // rcr.getContextRegistration().get(0).getContextRegistrationAttribute().get(0).getContextMetadata().get(2).getName(); // Individual metadata3 = ontModel.createIndividual(ONT_URL + ngsiValue, metadata); // attribute1.addProperty(ontModel.getProperty(ONT_URL + "hasMetadata"), metadata3); // // ngsiValue = // rcr.getContextRegistration().get(0).getContextRegistrationAttribute().get(0).getContextMetadata().get(2).getType(); // metadata3.setPropertyValue(ontModel.getProperty(ONT_URL + "type"), // ontModel.createLiteral(ngsiValue)); // ngsiValue = // rcr.getContextRegistration().get(0).getContextRegistrationAttribute().get(0).getContextMetadata().get(2).getValue().toString(); // metadata3.setPropertyValue(ontModel.getProperty(ONT_URL + "value"), // ontModel.createLiteral(ngsiValue)); // System.out.println("has propertry // \"expiry\":"+entityIndiv.hasProperty(ontModel.getProperty(ONT_URL, "expiry"))); ontModel.write(System.out, "TURTLE"); // ontModel.write(System.out, "RDF/XML"); // ontModel.write(System.out, "JSON-LD"); }
public Model getMappingModel() { if (mappingModel == null && jdbcURL == null && mappingFile == null && input_mapping == null) { throw new D2RQException("no mapping file or JDBC URL specified"); } if (mappingModel == null && mappingFile == null) { if (input_mapping != null) { // gui staff mappingModel = ModelFactory.createDefaultModel(); mappingModel.read(new StringReader(input_mapping), getResourceBaseURI(), "TURTLE"); } else { if (d2rqMapping == null && r2rmlMapping == null) { generateMapping(); } StringWriter out = new StringWriter(); getWriter().write(out); mappingModel = ModelFactory.createDefaultModel(); mappingModel.read(new StringReader(out.toString()), getResourceBaseURI(), "TURTLE"); } } if (mappingModel == null) { log.info("Reading mapping file from " + mappingFile); // Guess the language/type of mapping file based on file extension. If it is not among the // known types then assume that the file has TURTLE syntax and force to use TURTLE parser String lang = FileUtils.guessLang(mappingFile, "unknown"); try { if (lang.equals("unknown")) { mappingModel = FileManager.get().loadModel(mappingFile, getResourceBaseURI(), "TURTLE"); } else { // if the type is known then let Jena auto-detect it and load the appropriate parser mappingModel = FileManager.get().loadModel(mappingFile, getResourceBaseURI(), null); } } catch (TurtleParseException ex) { // We have wired RIOT into Jena in the static initializer above, // so this should never happen (it's for the old Jena Turtle/N3 parser) throw new D2RQException("Error parsing " + mappingFile + ": " + ex.getMessage(), ex, 77); } catch (JenaException ex) { if (ex.getCause() != null && ex.getCause() instanceof RiotException) { throw new D2RQException( "Error parsing " + mappingFile + ": " + ex.getCause().getMessage(), ex, 77); } throw ex; } catch (AtlasException ex) { // Detect the specific case of non-UTF-8 encoded input files // and do a custom error message if (FileUtils.langTurtle.equals(lang) && ex.getCause() != null && (ex.getCause() instanceof MalformedInputException)) { throw new D2RQException( "Error parsing " + mappingFile + ": Turtle files must be in UTF-8 encoding; " + "bad encoding found at byte " + ((MalformedInputException) ex.getCause()).getInputLength(), ex, 77); } // Generic error message for other parse errors throw new D2RQException("Error parsing " + mappingFile + ": " + ex.getMessage(), ex, 77); } } return mappingModel; }
public static void main(String[] args) { try { Model schema = null; Store store = null; SDBConnection conn = null; if (args.length == 1) { store = ExtendedSDBFactory.connectStore(args[0]); store.getTableFormatter().create(); schema = ExtendedSDBFactory.connectPagedDefaultModel(store); } else { StoreDesc storeDesc = new StoreDesc(args[0], args[1]); Class.forName(args[2]); String jdbcURL = args[3] + args[4]; conn = new SDBConnection(jdbcURL, args[5], args[6]); store = ExtendedSDBFactory.connectStore(conn, storeDesc); store.getTableFormatter().create(); schema = ExtendedSDBFactory.connectPagedNamedModel(store, args[7]); } long startTime = System.currentTimeMillis(), endTime = 0L, count = 0L; schema.read("http://www.lehigh.edu/~zhp2/2004/0401/univ-bench.owl"); OntModel m = ExtendedModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC, schema); String sInputDirectory = args[8]; File inputDirectory = new File(sInputDirectory); String[] sFilenames = inputDirectory.list(new OWLFilenameFilter()); for (int i = 0; i < sFilenames.length; i++) { InputStream in = FileManager.get().open(sInputDirectory + sFilenames[i]); if (in == null) { throw new IllegalArgumentException("File: " + sFilenames[i] + " not found"); } m.read(in, "http://www.utdallas.edu/benchmark-test#", "RDF/XML-ABBREV"); in.close(); } endTime = System.currentTimeMillis(); System.out.println("time to read the model = " + (endTime - startTime) / 1000 + " seconds."); startTime = System.currentTimeMillis(); String queryString = " PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " + " PREFIX ub: <http://www.lehigh.edu/~zhp2/2004/0401/univ-bench.owl#> " + " SELECT * WHERE " + " { " + " ?x rdf:type ub:GraduateStudent . " + " ?x ub:takesCourse <http://www.Department0.University0.edu/GraduateCourse0> . " + " }"; Query query = QueryFactory.create(queryString); QueryExecution qexec = QueryExecutionFactory.create(query, m); ResultSet rs = qexec.execSelect(); while (rs.hasNext()) { count++; rs.nextSolution(); } qexec.close(); endTime = System.currentTimeMillis(); System.out.println("count = " + count); System.out.println("time to query = " + (endTime - startTime) + " milliseconds."); store.close(); conn.close(); } catch (Exception e) { e.printStackTrace(); } }