/** * It setups the rewrite rules in Virtuoso for dereferencing the dataset URI-s. * * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains * information to setup the rewrite rules in Virtuoso * @param db The DDBB connection. * @return the {@link eu.aliada.linkedDataServerSetup.model.job} created. * @since 1.0 */ public Job setup(final JobConfiguration jobConf, final DBConnectionManager dbConn) { LOGGER.debug(MessageCatalog._00030_STARTING); // Update job start-date in DDBB dbConn.updateJobStartDate(jobConf.getId()); // URLEncode and prepare some command parameters for Virtuoso Rewrite Rules LOGGER.debug(MessageCatalog._00037_ENCODE_PARAMS); final boolean encoded = encodeParams(jobConf); if (encoded) { // Execute global ISQL commands file for rewriting rules in Virtuoso final boolean success = executeGlobalIsqlCommands(jobConf); if (success) { // Get subset ISQL commands file for rewriting rules in Virtuoso and execute them for (final Iterator<Subset> iterSubsets = jobConf.getSubsets().iterator(); iterSubsets.hasNext(); ) { final Subset subset = iterSubsets.next(); executeSubsetIsqlCommands(jobConf, subset); } } // Create Dataset default HTML page createDatasetDefaultPage(jobConf, dbConn); } else { LOGGER.error(MessageCatalog._00039_INPUT_PARAMS_ERROR, jobConf.getId()); } // Update job end_date of DDBB LOGGER.debug(MessageCatalog._00057_UPDATING_JOB_DDBB, jobConf.getId()); dbConn.updateJobEndDate(jobConf.getId()); final Job job = dbConn.getJob(jobConf.getId()); LOGGER.debug(MessageCatalog._00041_STOPPED); return job; }
/** * Encodes the parameters to pass to the ISQL commands. URLEncode and replace % by %%, for * Virtuoso Rewrite Rules. * * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains * the ISQL commands parameters. * @return true if the encoding has been carried out correctly. * @since 1.0 */ public boolean encodeParams(final JobConfiguration jobConf) { boolean encoded = false; try { // Remove leading/trailing slashes of URI Document section if (jobConf.getUriDocPart() != null) { uriDocPart = removeLeadingTralingSlashes(jobConf.getUriDocPart()); } else { uriDocPart = ""; } // Remove leading/trailing slashes of URI identifier section if (jobConf.getUriIdPart() != null) { uriIdPart = removeLeadingTralingSlashes(jobConf.getUriIdPart()); } // Remove leading/trailing slashes of URI Ontology section if (jobConf.getUriDefPart() != null) { uriDefPart = removeLeadingTralingSlashes(jobConf.getUriDefPart()); } // Encode dataset graphs graphsSelectEncoded = ""; graphsEncoded = ""; int subsetIndex = 0; for (final Iterator<Subset> iterSubsets = jobConf.getSubsets().iterator(); iterSubsets.hasNext(); ) { final Subset subset = iterSubsets.next(); final String graphSelectEncoded = URLEncoder.encode(" FROM <" + subset.getGraph() + ">", "UTF-8"); String linksGraphSelectEncoded = URLEncoder.encode(" FROM <" + subset.getLinksGraph() + ">", "UTF-8"); graphsSelectEncoded = graphsSelectEncoded + graphSelectEncoded + linksGraphSelectEncoded; String graphEncoded = ""; if (subsetIndex == 0) { graphEncoded = "&graph" + URLEncoder.encode("=" + subset.getGraph(), "UTF-8"); } else { graphEncoded = URLEncoder.encode("&graph=" + subset.getGraph(), "UTF-8"); } final String linksGraphEncoded = URLEncoder.encode("&graph=" + subset.getLinksGraph(), "UTF-8"); graphsEncoded = graphsEncoded + graphEncoded + linksGraphEncoded; subsetIndex++; } graphsSelectEncoded = graphsSelectEncoded.replace("%", "%%"); graphsEncoded = graphsEncoded.replace("%", "%%"); // Encode domain name domainNameEncoded = URLEncoder.encode(jobConf.getDomainName(), "UTF-8"); domainNameEncoded = domainNameEncoded.replace("%", "%%"); // Encode URI Identifier part uriIdPartEncoded = URLEncoder.encode(uriIdPart, "UTF-8"); uriIdPartEncoded = uriIdPartEncoded.replace("%", "%%"); // Encode Ontology URI ontologyEncoded = URLEncoder.encode(jobConf.getOntologyUri(), "UTF-8"); ontologyEncoded = ontologyEncoded.replace("%", "%%"); // Compose URI document part + URI Concept part if (jobConf.getUriDocPart() != null) { uriDocConcept = removeLeadingTralingSlashes(jobConf.getUriDocPart()); } if (jobConf.getUriConceptPart() != null) { final String datasetConceptPart = removeLeadingTralingSlashes(jobConf.getUriConceptPart()); if (datasetConceptPart.length() > 0) { if (uriDocConcept.length() > 0) { uriDocConcept = uriDocConcept + "/" + datasetConceptPart; } else { uriDocConcept = datasetConceptPart; } } } // Compose rules name suffix rulesNamesSuffix = jobConf.getDomainName().replace("http", ""); rulesNamesSuffix = rulesNamesSuffix.replace(":", ""); rulesNamesSuffix = rulesNamesSuffix.replace("/", ""); rulesNamesSuffix = rulesNamesSuffix.replace(".", ""); // Check that we have the parameter values if ((uriIdPart != null) && (uriDefPart != null) && (graphsEncoded.length() > 0) && (domainNameEncoded != null) && (ontologyEncoded != null) && (uriDocConcept != null) && (rulesNamesSuffix.length() > 0)) { if ((uriIdPart.length() > 0) && (uriDefPart.length() > 0) && (ontologyEncoded.length() > 0) && (uriDocConcept.length() > 0) && (domainNameEncoded.length() > 0)) { // Check that Identifier, Ontology and Document parts do not contain "/" if (!(uriIdPart.contains("/")) && !(uriDefPart.contains("/")) && !(uriDocPart.contains("/"))) { encoded = true; } } } } catch (UnsupportedEncodingException exception) { LOGGER.error(MessageCatalog._00038_ENCODING_ERROR, exception); } return encoded; }
/** * Create the dataset defult HTML page. * * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains * information to create the dataset HTML page. * @param db The DDBB connection. * @return * @since 2.0 */ public void createDatasetDefaultPage( final JobConfiguration jobConf, final DBConnectionManager dbConn) { // Create the folder where the page resides, if it does not exist final String pageFolder = jobConf.getVirtHttpServRoot() + File.separator + rulesNamesSuffix; final String pageURL = "http://" + jobConf.getDomainName(); final File fFolder = new File(pageFolder); if (!fFolder.exists()) { fFolder.mkdir(); } // Update the dataset web page root in the DB dbConn.updateDatasetWebPageRoot(jobConf.getDatasetId(), pageFolder); final String pagePath = pageFolder + File.separator + DATASET_INDEX_PAGE; // Remove the page if it already exists final File fPage = new File(pagePath); if (fPage.exists()) { fPage.delete(); } // Copy image and CSS files to web server folder copyFilesToWebServerPath(jobConf, pageFolder, pageURL); final String orgLogoPath = jobConf.getOrgImageURL(); final String styleSheetPath = jobConf.getCssFileURL(); // Get the number of triples of the dataset final int numTriples = calculateDatasetNumTriples( jobConf.getSparqlEndpointUri(), jobConf.getSparqlLogin(), jobConf.getSparqlPassword(), jobConf.getSubsets()); // Now, create a new one try { final FileWriter fstream = new FileWriter(pagePath); final BufferedWriter out = new BufferedWriter(fstream); String line = "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"; out.write(line); out.newLine(); line = "<html>"; out.write(line); out.newLine(); line = "<head>"; out.write(line); out.newLine(); line = "<meta http-equiv=\"Content-Type\" content=\"text/html; charset=ISO-8859-1\">"; out.write(line); out.newLine(); line = "<title>" + jobConf.getDatasetDesc().toUpperCase() + "</title>"; out.write(line); out.newLine(); line = "<link rel=\"stylesheet\" href=\"" + styleSheetPath + "\" type=\"text/css\">"; out.write(line); out.newLine(); line = "</head>"; out.write(line); out.newLine(); line = "<body>"; out.write(line); out.newLine(); line = "<img src=\"" + orgLogoPath + "\">"; out.write(line); out.newLine(); line = "<h1>" + jobConf.getDatasetDesc() + "</h1>"; out.write(line); out.newLine(); line = "<table><colgroup><col width=\"25%\"><col width=\"75%\"></colgroup>"; out.write(line); out.newLine(); // Description line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>", "description", jobConf.getDatasetLongDesc()); out.write(line); out.newLine(); // Publisher line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>", "publisher", jobConf.getOrgName().toUpperCase()); out.write(line); out.newLine(); // Source URL line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>", "source", jobConf.getDatasetSourceURL(), jobConf.getDatasetSourceURL()); out.write(line); out.newLine(); // Created line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>", "created", getStringNow()); out.write(line); out.newLine(); // Contributor line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>", "contributor", jobConf.getDatasetAuthor()); out.write(line); out.newLine(); // License URL line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>", "license", jobConf.getLicenseURL(), jobConf.getLicenseURL()); out.write(line); out.newLine(); // SPARQL endpoint URL line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>", "SPARQL endpoint", jobConf.getPublicSparqlEndpointUri(), jobConf.getPublicSparqlEndpointUri()); out.write(line); out.newLine(); // Vocabulary URL line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>", "vocabulary", jobConf.getOntologyUri(), jobConf.getOntologyUri()); out.write(line); out.newLine(); // Number of triples line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>", "number of triples", numTriples); out.write(line); out.newLine(); // List resources of dataset final String datasetUri = "http://" + jobConf.getDomainName() + "/" + uriDocConcept; line = String.format( "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>", "list of resources", datasetUri, datasetUri); out.write(line); out.newLine(); // List subsets line = String.format("<tr><td class=\"label\">%s</td><td class=\"input\"><ul>", "subsets"); out.write(line); out.newLine(); for (final Iterator<Subset> iterSubsets = jobConf.getSubsets().iterator(); iterSubsets.hasNext(); ) { final Subset subset = iterSubsets.next(); String uriDocConceptSubset = ""; if (subset.getUriConceptPart() != null) { uriDocConceptSubset = removeLeadingTralingSlashes(subset.getUriConceptPart()); } if (uriDocConceptSubset.length() > 0) { // List resources of subset final String subsetUri = datasetUri + "/" + uriDocConceptSubset; line = String.format( "<li>%s: <a href=\"%s\" target=\"_blank\">%s</a></li>", subset.getDescription(), subsetUri, subsetUri); out.write(line); out.newLine(); } } line = "</ul></td></tr>"; out.write(line); out.newLine(); line = "</table>"; out.write(line); out.newLine(); line = "</body>"; out.write(line); out.newLine(); line = "</html>"; out.write(line); out.newLine(); out.close(); } catch (IOException exception) { LOGGER.error(MessageCatalog._00034_FILE_CREATION_FAILURE, exception, pagePath); } }