// TEMPORARY private boolean checkResultsByModel( Query query, Model expectedModel, ResultSetRewindable results) { // Fudge - can't cope with ordered results properly. The output writer for ResultSets does nto // add rs:index. results.reset(); Model actualModel = ResultSetFormatter.toModel(results); // Tidy the models. // Very regretable. expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSet); expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSolution); expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultBinding); expectedModel.removeAll(null, ResultSetGraphVocab.size, (RDFNode) null); expectedModel.removeAll(null, ResultSetGraphVocab.index, (RDFNode) null); actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSet); actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSolution); actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultBinding); actualModel.removeAll(null, ResultSetGraphVocab.size, (RDFNode) null); actualModel.removeAll(null, ResultSetGraphVocab.index, (RDFNode) null); boolean b = expectedModel.isIsomorphicWith(actualModel); if (!b) { System.out.println("---- Expected"); expectedModel.write(System.out, "TTL"); System.out.println("---- Actual"); actualModel.write(System.out, "TTL"); System.out.println("----"); } return b; }
public String display() { model.write(System.out, "TURTLE"); System.out.println("create certitifact file "); String certificatePath = pref.getCertificatePath(); FileOutputStream output; try { output = new FileOutputStream(certificatePath); model.write(output, "RDF/XML"); signature.signXMLData(certificatePath); output = new FileOutputStream(certificatePath.replace(".xml", ".ttl")); model.write(output, "TURTLE"); return certificatePath; } catch (FileNotFoundException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (TransformerFactoryConfigurationError e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
private void updateAddRemoveDataGetterStatements( OntModel displayModel, Model removeStatements, Model addStatements, Resource oldType, Resource newType) { log.debug("Old type: " + oldType.getURI() + " - newType: " + newType.getURI()); removeStatements.add(displayModel.listStatements(null, RDF.type, oldType)); StmtIterator oldStatements = displayModel.listStatements(null, RDF.type, oldType); while (oldStatements.hasNext()) { Statement stmt = oldStatements.nextStatement(); addStatements.add(stmt.getSubject(), RDF.type, newType); } // Print out remove and add statements here StringWriter sw = new StringWriter(); try { log.debug( "Adding statements with old data getter types to remove statements, remove statements is now"); removeStatements.write(sw, "N3"); log.debug(sw.toString()); sw.close(); sw = new StringWriter(); log.debug( "Adding statements with new data getter types to add statements, add statements is now"); addStatements.write(sw, "N3"); log.debug(sw.toString()); sw.close(); } catch (Exception ex) { log.error("Error occurred in writing out remove and statements for data getter types", ex); } }
void printFailedModelTest(Query query, Model expected, Model results) { PrintWriter out = FileUtils.asPrintWriterUTF8(System.out); out.println("======================================="); out.println("Failure: " + description()); results.write(out, "TTL"); out.println("---------------------------------------"); expected.write(out, "TTL"); out.println(); }
private void verbModel(Model model, String verb) { Model m = ModelFactory.createDefaultModel(); int testLimit = 1000; StmtIterator stmtIt = model.listStatements(); int count = 0; try { while (stmtIt.hasNext()) { count++; m.add(stmtIt.nextStatement()); if (count % testLimit == 0 || !stmtIt.hasNext()) { StringWriter sw = new StringWriter(); m.write(sw, "N-TRIPLE"); StringBuffer updateStringBuff = new StringBuffer(); String graphURI = graph.getGraphURI(); updateStringBuff.append( verb + " DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "")); updateStringBuff.append(sw); updateStringBuff.append(((graphURI != null) ? " } " : "") + " }"); String updateString = updateStringBuff.toString(); // log.info(updateString); graph.executeUpdate(updateString); m.removeAll(); } } } finally { stmtIt.close(); } }
public static void main(String[] args) throws IOException { // Model m = ModelFactory.createDefaultModel(); // m.read(new FileReader("nuts-rdf-0.91.ttl"), "http://nuts.geovocab.org/id/", "TTL"); GeoQualityMetric metric = new PropertiesPerClass(); Model r = metric.generateResultsDataCube("http://linkedgeodata.org/sparql"); r.write(new FileWriter("datacubes/LinkedGeoData/metric2.ttl"), "TTL"); }
private void processOutputModelRequest(VitroRequest vreq, HttpServletResponse response) { String modelNameStr = vreq.getParameter("modelName"); Model model = getModel(modelNameStr, vreq); JenaOutputUtils.setNameSpacePrefixes(model, vreq.getWebappDaoFactory()); model.enterCriticalSection(Lock.READ); try { OutputStream out = response.getOutputStream(); response.setContentType("application/x-turtle"); // out.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n".getBytes()); model.write(out, "TTL"); out.flush(); out.close(); } catch (com.hp.hpl.jena.shared.CannotEncodeCharacterException cece) { // there's got to be a better way to do this byte[] badCharBytes = String.valueOf(cece.getBadChar()).getBytes(); String errorMsg = "Cannot encode character with byte values: (decimal) "; for (int i = 0; i < badCharBytes.length; i++) { errorMsg += badCharBytes[i]; } throw new RuntimeException(errorMsg, cece); } catch (Exception e) { log.error(e, e); } finally { model.leaveCriticalSection(); } }
public static void writeModel(Model model, String format, String outputFile) throws IOException { logger.info("Saving dataset to " + outputFile + "..."); long starTime = System.currentTimeMillis(); FileWriter fileWriter = new FileWriter(outputFile); model.write(fileWriter, format); logger.info("Saving file done in " + (System.currentTimeMillis() - starTime) + "ms."); }
public static void main(String[] args) throws IOException { Model m = ModelFactory.createDefaultModel(); m.read(new FileReader("nuts-rdf-0.91.ttl"), "http://nuts.geovocab.org/id/", "TTL"); GeoQualityMetric metric = new AverageSurfaceMetric(); Model r = metric.generateResultsDataCube(m); r.write(new FileWriter("datacubes/NUTS/metric3.ttl"), "TTL"); }
public static void main(String args[]) { // some definitions String personURI = "http://somewhere/JohnSmith"; String givenName = "John"; String familyName = "Smith"; String fullName = givenName + " " + familyName; // create an empty model Model model = ModelFactory.createDefaultModel(); // create the resource // and add the properties cascading style Resource johnSmith = model .createResource(personURI) .addProperty(VCARD.FN, fullName) .addProperty( VCARD.N, model .createResource() .addProperty(VCARD.Given, givenName) .addProperty(VCARD.Family, familyName)); // now write the model in XML form to a file model.write(System.out); }
// replace private void replaceTboxAndDisplayMetadata( OntModel displayModel, Model addStatements, Model removeStatements, UpdateSettings settings) { OntModel oldDisplayModelTboxModel = settings.getOldDisplayModelTboxModel(); OntModel oldDisplayModelDisplayMetadataModel = settings.getOldDisplayModelDisplayMetadataModel(); OntModel newDisplayModelTboxModel = settings.getNewDisplayModelTboxModel(); OntModel newDisplayModelDisplayMetadataModel = settings.getNewDisplayModelDisplayMetadataModel(); OntModel loadedAtStartup = settings.getLoadedAtStartupDisplayModel(); OntModel oldVivoListView = settings.getVivoListViewConfigDisplayModel(); // Remove old display model tbox and display metadata statements from display model removeStatements.add(oldDisplayModelTboxModel); removeStatements.add(oldDisplayModelDisplayMetadataModel); // the old startup folder only contained by oldVivoListView removeStatements.add(oldVivoListView); StringWriter sw = new StringWriter(); try { log.debug( "Adding old display tbox model, display metadata model, and oldVivoListView to remove statements. Remove statements now include:"); removeStatements.write(sw, "N3"); log.debug(sw.toString()); sw.close(); } catch (Exception ex) { log.error("Exception occurred", ex); } // Add statements from new tbox and display metadata addStatements.add(newDisplayModelTboxModel); addStatements.add(newDisplayModelDisplayMetadataModel); // this should include the list view in addition to other files addStatements.add(loadedAtStartup); try { sw = new StringWriter(); log.debug( "Adding new display tbox model, display metadata model, and loaded at startup to add statements. Add statements now include:"); addStatements.write(sw, "N3"); log.debug(sw.toString()); sw.close(); } catch (Exception ex) { log.error( "Exception occurred in adding new display model tbox/metadata info to add statements ", ex); } log.debug( "Adding new display tbox model, display metadata model, and all models loaded at startup"); }
/** * Write the given model to a file with the given name. * * @param model the model to write to a file. * @param sFileName the name of the file to write to. * @throws FileNodeFountException if a new FileOutputStream cannot be created. * @throws IOException if the FileOutputStream cannot be closed. */ public void writeFile(com.hp.hpl.jena.rdf.model.Model model, String sFileName) throws FileNotFoundException, IOException { FileOutputStream out = new FileOutputStream(sFileName); model.write(out, "N3"); // $NON-NLS-1$ model.close(); out.close(); }
public void print(String format, String folder, Model jenaModel) { try { OutputStream out = new FileOutputStream(folder.trim() + "output.rdf"); jenaModel.write(out); } catch (Exception e) { e.printStackTrace(); } }
/** Saves the aligned model into a file */ private void saveSchema() { try { OutputStream output = new FileOutputStream("AlignedOntology.owl"); schema.write(output, "RDF/XML"); output.close(); } catch (IOException e) { System.out.println(e.getLocalizedMessage()); } } // saveSchema
public static String execSparQLQuery(String query) { System.out.println("execSPINQuery"); Model model = getUqModel(); // Register system functions (such as sp:gt (>)) SPINModuleRegistry.get().init(); Query arqQuery = ARQFactory.get().createQuery(model, query); ARQ2SPIN arq2SPIN = new ARQ2SPIN(model); Select spinQuery = (Select) arq2SPIN.createQuery(arqQuery, null); System.out.println("SPIN query in Turtle:"); model.write(System.out, FileUtils.langTurtle); System.out.println("-----"); String str = spinQuery.toString(); System.out.println("SPIN query:\n" + str); // Now turn it back into a Jena Query Query parsedBack = ARQFactory.get().createQuery(spinQuery); System.out.println("Jena query:\n" + parsedBack); com.hp.hpl.jena.query.Query arq = ARQFactory.get().createQuery(spinQuery); QueryExecution qexec = ARQFactory.get().createQueryExecution(arq, model); QuerySolutionMap arqBindings = new QuerySolutionMap(); arqBindings.add("predicate", RDFS.label); qexec.setInitialBinding(arqBindings); // Pre-assign the arguments ResultSet rs = qexec.execSelect(); // System.out.println("#####################################################################"); // // if (rs.hasNext()) { // QuerySolution row = rs.next(); // System.out.println("Row: " +row.toString()); // RDFNode user = row.get("User"); // Literal label = row.getLiteral("label"); // System.out.println(user.toString()); // } // RDFNode object = rs.next().get("object"); // System.out.println("Label is " + object); Collection<User> users = Sparql.exec(getUqModel(), User.class, query); String usersString = ""; for (User user : users) { System.out.println("User: "******"<br/>"; } System.out.println("execSPINQuery() done."); return usersString; }
// Multiple changes from 1.4 to 1.5 will occur // update migration model public void migrateDisplayModel(UpdateSettings settings) throws Exception { log.debug("Beginning migration of display model"); OntModel displayModel = settings.getDisplayModel(); Model addStatements = ModelFactory.createDefaultModel(); Model removeStatements = ModelFactory.createDefaultModel(); // remove old tbox and display metadata statements and add statements from new versions replaceTboxAndDisplayMetadata(displayModel, addStatements, removeStatements, settings); // Update statements for data getter class types that have changed in 1.5 updateDataGetterClassNames(displayModel, addStatements, removeStatements); // add cannot delete flags to pages that shouldn't allow deletion on page list addCannotDeleteFlagDisplayModel(displayModel, addStatements, removeStatements); // removes requiresTemplate statement for people page updatePeoplePageDisplayModel(displayModel, addStatements, removeStatements); // add page list addPageListDisplayModel(displayModel, addStatements, removeStatements, settings); // update data getter labels updateDataGetterLabels(displayModel, addStatements, removeStatements, settings); displayModel.enterCriticalSection(Lock.WRITE); try { if (log.isDebugEnabled()) { StringWriter sw = new StringWriter(); addStatements.write(sw, "N3"); log.debug("Statements to be added are: "); log.debug(sw.toString()); sw.close(); sw = new StringWriter(); removeStatements.write(sw, "N3"); log.debug("Statements to be removed are: "); log.debug(sw.toString()); sw.close(); } displayModel.remove(removeStatements); displayModel.add(addStatements); } finally { displayModel.leaveCriticalSection(); } }
private void processMergeResultRequest(VitroRequest vreq, HttpServletResponse response) { Model lmodel = (Model) vreq.getSession().getAttribute("leftoverModel"); response.setContentType("RDF/XML-ABBREV"); try { OutputStream outStream = response.getOutputStream(); outStream.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>".getBytes()); lmodel.write(outStream, "RDF/XML-ABBREV"); outStream.flush(); outStream.close(); } catch (IOException ioe) { throw new RuntimeException(ioe); } }
private void compareRDF(URL htmlURL, URL compareURL) throws SAXException, IOException { String cf = compareURL.toExternalForm(); if (cf.matches("file:/[^/][^/].*")) cf = cf.replaceFirst("file:/", "file:///"); String hf = htmlURL.toExternalForm(); if (hf.matches("file:/[^/][^/].*")) hf = hf.replaceFirst("file:/", "file:///"); Model c = FileManager.get().loadModel(compareURL.toExternalForm()); Model m = ModelFactory.createDefaultModel(); StatementSink sink = new JenaStatementSink(m); XMLReader parser = ParserFactory.createReaderForFormat(sink, Format.XHTML, Setting.OnePointOne); parser.parse(hf); boolean result = c.isIsomorphicWith(m); if (!result) m.write(System.err, "TTL"); assertTrue("Files match (" + htmlURL + ")", result); }
/** * @param model Input Model * @param lang See {@link com.hp.hpl.jena.rdf.model.Model#read(java.io.InputStream, String, * String)} * @param out Serialized model * @throws JenaException */ public static void serializeModel(Model model, String lang, OutputStream out) throws JenaException { if (model == null) throw new IllegalArgumentException( Messages.getServerString("model.util.model.null")); // $NON-NLS-1$ if (out == null) throw new IllegalArgumentException( Messages.getServerString("model.util.outputstream.null")); // $NON-NLS-1$ // Avoid 'WARN com.hp.hpl.jena.xmloutput.impl.BaseXMLWriter - Namespace prefix 'j.1' is // reserved by Jena.' for (String prefix : new HashSet<String>(model.getNsPrefixMap().keySet())) { model.removeNsPrefix(prefix); } model.write(out, lang); }
private void cachingForTriples(Table table, String endpoint) { Model model = ModelFactory.createDefaultModel(); List<String> resources = null; FileWriter fstream = null; try { fstream = new FileWriter("/home/mofeed/TrialStart/zicozico.nt"); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } BufferedWriter out = new BufferedWriter(fstream); try { for (Object id : table.getItemIds()) { Item item = table.getItem(id); Property sourceURI = item.getItemProperty("sourceURI"); // vaadin property not jena Resource resource = model.createResource(); try { String sparqlQuery = "select distinct * where { <" + sourceURI.getValue() + "> ?p ?o .}"; Query query = QueryFactory.create(sparqlQuery); QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, query); com.hp.hpl.jena.query.ResultSet results = qexec.execSelect(); com.hp.hpl.jena.query.QuerySolution binding = null; while (results.hasNext()) { binding = results.next(); String property = binding.getResource("?p").toString(); String value; if (binding.get("?o").isResource()) value = binding.getResource("?o").toString(); else value = binding.getLiteral("?o").toString(); com.hp.hpl.jena.rdf.model.Property pt = ResourceFactory.createProperty(property); // resource.addProperty(pt, value); model.add(resource, pt, value); } qexec.close(); model.write(out, null, "TURTLE"); } catch (Exception e) { Notification.show(e.toString()); } // model.add(s, p, o); // Property destinationURI = item.getItemProperty("destinationURI"); // out.write(sourceURI.getValue()+"\n"); } /*out.flush(); fstream.flush();*/ out.close(); } catch (Exception e) { // Catch exception if any System.err.println("Error: " + e.getMessage()); } }
private void runWithModel(File file) throws UnsupportedEncodingException, FileNotFoundException { Model model = ModelLoader.getModel(file.getAbsolutePath()); File out = new File(file.getAbsolutePath() + ".nt"); try { OutputStream fos = new FileOutputStream(out, false); // BZip2CompressorOutputStream outputStream = new BZip2CompressorOutputStream (fos); model.write(fos, "N-TRIPLE"); // outputStream.close(); fos.close(); run(out); } catch (Exception e) { e.printStackTrace(); } }
public static void main(String[] args) { try { String request = FileUtils.readFileToString(new File("src/test/requete.rq")); Model trace = ModelFactory.createDefaultModel(); trace.read(new FileInputStream("src/test/@obsels.rdf"), "", KtbsConstants.JENA_RDF_XML); Query query = QueryFactory.create(request, Syntax.syntaxARQ); // Execute the query and obtain results QueryExecution qe = QueryExecutionFactory.create(query, trace); Model resultModel = qe.execConstruct(); resultModel.write(System.out, KtbsConstants.JENA_TURTLE, null); } catch (IOException e) { e.printStackTrace(); } }
void writeRDF( String date, String title, String id, List<String> cls, String DURI, List<Person> auth, String abst, String doi, String file) { Model model = ModelFactory.createDefaultModel(); model.setNsPrefix("acm", "http://acm.rkbexplorer.com/ontologies/acm#"); model.setNsPrefix("foaf", FOAF.NS); model.setNsPrefix("dcterms", DCTerms.NS); Property Class = model.createProperty(acm + "class"); Resource doc1 = model.createResource(DURI); if (id != null) doc1.addProperty(DCTerms.identifier, id); if (title != null) doc1.addProperty(DCTerms.title, title); if (abst != null) doc1.addProperty(DCTerms.abstract_, abst); if (date != null) doc1.addProperty(DCTerms.date, date, XSDDateType.XSDdate); for (int i = 0; i < auth.size(); i++) { doc1.addProperty( DCTerms.creator, model .createResource(FOAF.Person) .addProperty(FOAF.firstName, auth.get(i).getForename()) .addProperty(FOAF.family_name, auth.get(i).getSurname())); } if (cls != null) { for (int i = 0; i < cls.size(); i++) { doc1.addProperty(Class, cls.get(i)); } } try { FileOutputStream out = new FileOutputStream(file); model.write(out, "RDF/XML-ABBREV"); } catch (IOException e) { System.out.println("Exception caught" + e.getMessage()); } }
/** * Define how the message is processed. * * @param exchange the current camel message exchange */ public void process(final Exchange exchange) throws IOException { final Message in = exchange.getIn(); final ByteArrayOutputStream serializedGraph = new ByteArrayOutputStream(); final String subject = ProcessorUtils.getSubjectUri(in); final String namedGraph = in.getHeader(FcrepoHeaders.FCREPO_NAMED_GRAPH, String.class); final Model model = createDefaultModel() .read( in.getBody(InputStream.class), subject, langFromMimeType(in.getHeader(Exchange.CONTENT_TYPE, String.class))); model.write(serializedGraph, "N-TRIPLE"); /* * Before inserting updated triples, the Sparql update command * below deletes all triples with the defined subject uri * (coming from the FCREPO_IDENTIFIER and FCREPO_BASE_URL headers). * It also deletes triples that have a subject corresponding to * that Fcrepo URI plus the "/fcr:export?format=jcr/xml" string * appended to it. This makes it possible to more completely * remove any triples for a given resource that were added * earlier. If fcrepo ever stops producing triples that are * appended with /fcr:export?format..., then that extra line * can be removed. It would also be possible to recursively delete * triples (by removing any triple whose subject is also an object * of the starting (or context) URI, but that approach tends to * delete too many triples from the triplestore. This command does * not delete blank nodes. */ final StringBuilder query = new StringBuilder(); query.append(ProcessorUtils.deleteWhere(subject, namedGraph)); query.append(";\n"); query.append(ProcessorUtils.deleteWhere(subject + "/fcr:export?format=jcr/xml", namedGraph)); query.append(";\n"); query.append(ProcessorUtils.insertData(serializedGraph.toString("UTF-8"), namedGraph)); in.setBody("update=" + encode(query.toString(), "UTF-8")); in.setHeader(Exchange.HTTP_METHOD, "POST"); in.setHeader(Exchange.CONTENT_TYPE, "application/x-www-form-urlencoded"); }
// add page list sparql query private void addPageListDisplayModel( OntModel displayModel, Model addStatements, Model removeStatements, UpdateSettings settings) { OntModel newDisplayModel = settings.getNewDisplayModelFromFile(); // Get all statements about pageListPage and pageListData Resource pageList = newDisplayModel.getResource(DisplayVocabulary.DISPLAY_NS + "pageListPage"); Resource pageListData = newDisplayModel.getResource(DisplayVocabulary.DISPLAY_NS + "pageListData"); addStatements.add(newDisplayModel.listStatements(pageList, null, (RDFNode) null)); addStatements.add(newDisplayModel.listStatements(pageListData, null, (RDFNode) null)); StringWriter sw = new StringWriter(); try { if (pageList != null) { log.debug("Page list uri is " + pageList.getURI()); } else { log.debug("Page list uri is null for some reason"); } log.debug("New Display model from file is "); newDisplayModel.write(sw, "N3"); log.debug(sw.toString()); sw.close(); sw = new StringWriter(); log.debug("Added statements now include "); addStatements.write(sw, "N3"); log.debug(sw.toString()); sw.close(); } catch (Exception ex) { log.error("Exception occurred in writing out new display model", ex); } log.debug("Checking: AFTER adding pageList resource, what do we have for pageList page"); Resource testResource = ResourceFactory.createResource(DisplayVocabulary.DISPLAY_NS + "pageListPage"); StmtIterator testIt = addStatements.listStatements(testResource, null, (RDFNode) null); if (!testIt.hasNext()) { log.debug( "Add statements does not have the page list page resource " + testResource.getURI()); } while (testIt.hasNext()) { log.debug("Statement for page list resource: " + testIt.nextStatement().toString()); } }
public void testLinking() throws Exception { // test source model String sourceNS = "http://bla.com/"; Model targetModel = ModelFactory.createDefaultModel(); Resource item1 = targetModel.createResource(sourceNS + "1"); item1.addProperty(RDF.type, OAI.Item); item1.addProperty(DC.title, "TestTitle"); // test target model String targetNS = "http://blub.com/"; Model remoteModel = ModelFactory.createDefaultModel(); Resource item2 = remoteModel.createResource(targetNS + "1"); item2.addProperty(RDF.type, OAI.Item); item2.addProperty(DC.title, "TestTitle"); // create dummy config OAI2LODConfig config = new OAI2LODConfig(); OAI2LODConfig.LinkedSparqlEndpoint endpoint_config = config .createServerConfig("http://bla.com") .createLinkedSparqlEndpoint("http://blub.com", 100); endpoint_config.createLinkingRule( OAI.Item.toString(), DC.title.toString(), OAI.Item.toString(), DC.title.toString(), RDFS.seeAlso.toString(), "uk.ac.shef.wit.simmetrics.similaritymetrics.Levenshtein", 1.0f); LinkingJob linkingJob = new LinkingJobMock(config, targetModel, remoteModel); linkingJob.linkData(); Model result = linkingJob.getTargetModel(); result.write(System.out); }
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String path = request.getPathInfo(); boolean htmlOutput = true; String language = null; String contentType = "text/html; charset='utf-8'"; if (path != null) { if (path.endsWith(".rdf")) { contentType = "application/rdf+xml; charset='utf-8'"; htmlOutput = false; language = "RDF/XML"; } else if (path.endsWith(".xml")) { contentType = "application/xml; charset='utf-8'"; htmlOutput = false; language = "RDF/XML"; } else if (path.endsWith(".n3")) { contentType = "text/n3; charset='utf-8'"; htmlOutput = false; language = "N3"; } } response.setContentType(contentType); response.setStatus(HttpServletResponse.SC_OK); Writer writer = response.getWriter(); synchronized (board) { if (htmlOutput) { writer.write( "<!DOCTYPE html>\n" + "<html lang='en'>" + "<head><meta charset='utf-8'/><title>MATe model</title></head>" + "<body><ul>"); StmtIterator it = model.listStatements(); /* TODO: well, this could be prettier */ while (it.hasNext()) writer.write("<li>" + it.nextStatement() + "</li>"); writer.write("<ul></body></html>"); } else model.write(writer, language); } }
@Override public boolean write(OutputStream out, String lang, String base) { Model graph = null; GraphConnection graphConnection = null; try { graphConnection = openGraph(); graph = graphConnection.getGraph(); graph.enterCriticalSection(Lock.WRITE); graph.write(out, lang, base); // default to true return true; } finally { if (graph != null) { graph.leaveCriticalSection(); } if (graphConnection != null) { graphConnection.close(); } } }
public static void main(String[] args) throws IOException, InvalidLinkedUSDLModelException { LinkedUSDLModel jmodel; jmodel = LinkedUSDLModelFactory.createEmptyModel(); CloudSigmaRecurrentSubsOffering(jmodel); jmodel.setBaseURI("http://PricingAPICloudSigma6MRecurrentSubscription.com"); Model instance = jmodel.WriteToModel(); // transform the java models to a semantic representation File outputFile = new File("./DebuggingFiles/CloudsigmaRS.ttl"); if (!outputFile.exists()) { outputFile.createNewFile(); } FileOutputStream out = new FileOutputStream(outputFile); instance.write(out, "Turtle"); out.close(); }
public long computeOrginalNTriple(Model model, File file) { File out = new File(file.getAbsolutePath() + ".nt.bz2"); try { OutputStream fos = new BufferedOutputStream(new FileOutputStream(out, false)); BZip2CompressorOutputStream outputStream = new BZip2CompressorOutputStream(fos); model.write(outputStream, "N-TRIPLE"); outputStream.close(); fos.close(); } catch (Exception e) { e.printStackTrace(); } if (out.exists()) return out.length(); else { System.err.println("Wasn't able to compute original ntriple size +bzip2!"); return file.length(); } }