Пример #1
1
 /**
  * Returns job information in the DDBB.
  *
  * @param jobId the job identification.
  * @return the {@link eu.aliada.ckancreation.model.Job} which contains the job information.
  * @since 2.0
  */
 public Job getJob(final int jobId) {
   // Get the job information from the DDBB
   final Job job = new Job();
   job.setId(jobId);
   try {
     final Statement sta = getConnection().createStatement();
     final String sql = "SELECT * FROM ckancreation_job_instances WHERE job_id=" + jobId;
     final ResultSet resultSet = sta.executeQuery(sql);
     while (resultSet.next()) {
       job.setStartDate(resultSet.getTimestamp("start_date"));
       job.setEndDate(resultSet.getTimestamp("end_date"));
       job.setCkanOrgURL(resultSet.getString("ckan_org_url"));
       job.setCkanDatasetURL(resultSet.getString("ckan_dataset_url"));
       // Determine job status
       String status = JOB_STATUS_IDLE;
       if (job.getStartDate() != null) {
         status = JOB_STATUS_RUNNING;
         if (job.getEndDate() != null) {
           status = JOB_STATUS_FINISHED;
         }
       }
       job.setStatus(status);
     }
     resultSet.close();
     sta.close();
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
     return null;
   }
   return job;
 }
  /**
   * Creates a new job on the Links Discovery Component.
   *
   * @param id the job identifier associated with this instance.
   * @return a response which includes the info of the new job.
   * @since 1.0
   */
  @POST
  @Path("/jobs")
  @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
  @Produces({MediaType.TEXT_XML, MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
  public Response newJob(
      @Context final HttpServletRequest request, @FormParam("jobid") final Integer jobid) {

    LOGGER.debug(MessageCatalog._00021_NEW_JOB_REQUEST);

    if (jobid == null) {
      LOGGER.error(MessageCatalog._00022_MISSING_INPUT_PARAM, "jobid");
      return Response.status(Status.BAD_REQUEST).build();
    }

    // Get configuration parameters
    final DBConnectionManager dbConn = (DBConnectionManager) context.getAttribute("db");
    final JobConfiguration jobConf = dbConn.getJobConfiguration(jobid);
    if (jobConf == null) {
      LOGGER.error(MessageCatalog._00023_JOB_CONFIGURATION_NOT_FOUND, jobid);
      return Response.status(Status.BAD_REQUEST).build();
    }
    final DDBBParams ddbbParams = new DDBBParams();
    ddbbParams.setUsername(context.getInitParameter("ddbb.username"));
    ddbbParams.setPassword(context.getInitParameter("ddbb.password"));
    ddbbParams.setDriverClassName(context.getInitParameter("ddbb.driverClassName"));
    ddbbParams.setUrl(context.getInitParameter("ddbb.url"));
    // Program linking processes
    final LinksDiscovery linksDisc = new LinksDiscovery();
    final Job job = linksDisc.programLinkingProcesses(jobConf, dbConn, ddbbParams);

    return Response.created(uriInfo.getAbsolutePathBuilder().build()).entity(job).build();
  }
Пример #3
0
 /**
  * Returns a new DDBB connection.
  *
  * @return the new DDBB connection.
  * @since 2.0
  */
 public void getNewConnection() {
   InitialContext ic;
   DataSource ds;
   try {
     ic = new InitialContext();
     ds = (DataSource) ic.lookup("java:comp/env/jdbc/aliada");
     conn = ds.getConnection();
   } catch (NamingException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
   }
 }
Пример #4
0
 /**
  * Gets the number of links from the specified ALIADA dataset to the specified target dataset.
  *
  * @param datasetId The dataset Identifier in the internal DB.
  * @param targetDataset The target dataset name.
  * @return the number of links.
  * @since 2.0
  */
 public int getNumLinksToExtDataset(final int datasetId, final String targetDataset) {
   int numLinks = 0;
   try {
     final Statement sta = getConnection().createStatement();
     String sql =
         "SELECT job.input_graph, subjob.num_links FROM subset, linksdiscovery_job_instances job,"
             + " linksdiscovery_subjob_instances subjob WHERE subset.datasetId="
             + datasetId
             + " AND job.input_graph=subset.graph_uri AND subjob.job_id=job.job_id"
             + " AND subjob.name='"
             + targetDataset
             +
             // Order the results to group together the links for the same graph
             // so that we only use the first row of them, as it is the latest linking
             // performed against that graph
             "' ORDER BY job.input_graph DESC,  subjob.end_date DESC";
     ResultSet resultSet = sta.executeQuery(sql);
     String prevGraph = "";
     while (resultSet.next()) {
       if (!prevGraph.equals(resultSet.getString("input_graph"))) {
         // Get only the latest number of links found.
         numLinks = numLinks + resultSet.getInt("num_links");
         prevGraph = resultSet.getString("input_graph");
       }
     }
     resultSet.close();
     sta.close();
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
   }
   return numLinks;
 }
 /**
  * Get the appropiate ISQL commands file. The ISQL commands are for creating rewrite rules in
  * Virtuoso for dereferencing the dataset URI-s.
  *
  * @param isqlCommandsFilename the specific ISQL commands file name.
  * @param isqlCommandsFilenameDefault the default ISQL commands file name.
  * @return the name of the commands file to use.
  * @since 2.0
  */
 public String getIsqlCommandsFile(
     final String isqlCommandsFilename, final String isqlCommandsFilenameDefault) {
   String fileNameToUse = null;
   // Check if isqlCommandsFilename exists
   if (isqlCommandsFilename != null) {
     final File isqlFile = new File(isqlCommandsFilename);
     if (isqlFile.exists()) {
       fileNameToUse = isqlCommandsFilename;
     }
   }
   if (fileNameToUse == null) {
     // If there is not a ISQL command file specifically for this subset,
     // use the default one
     // Check if default isqlCommandsFilename exists
     if (isqlCommandsFilenameDefault != null) {
       final File isqlFile = new File(isqlCommandsFilenameDefault);
       if (isqlFile.exists()) {
         fileNameToUse = isqlCommandsFilenameDefault;
       }
     }
     if (fileNameToUse == null) {
       LOGGER.error(MessageCatalog._00031_FILE_NOT_FOUND, isqlCommandsFilenameDefault);
     }
   }
   return fileNameToUse;
 }
Пример #6
0
 /**
  * Closes the DDBB connection.
  *
  * @since 2.0
  */
 public void closeConnection() {
   try {
     conn.close();
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
   }
 }
  /**
   * It setups the rewrite rules in Virtuoso for dereferencing the dataset URI-s.
   *
   * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains
   *     information to setup the rewrite rules in Virtuoso
   * @param db The DDBB connection.
   * @return the {@link eu.aliada.linkedDataServerSetup.model.job} created.
   * @since 1.0
   */
  public Job setup(final JobConfiguration jobConf, final DBConnectionManager dbConn) {
    LOGGER.debug(MessageCatalog._00030_STARTING);
    // Update job start-date in DDBB
    dbConn.updateJobStartDate(jobConf.getId());
    // URLEncode and prepare some command parameters for Virtuoso Rewrite Rules
    LOGGER.debug(MessageCatalog._00037_ENCODE_PARAMS);
    final boolean encoded = encodeParams(jobConf);
    if (encoded) {
      // Execute global ISQL commands file for rewriting rules in Virtuoso
      final boolean success = executeGlobalIsqlCommands(jobConf);
      if (success) {
        // Get subset ISQL commands file for rewriting rules in Virtuoso and execute them
        for (final Iterator<Subset> iterSubsets = jobConf.getSubsets().iterator();
            iterSubsets.hasNext(); ) {
          final Subset subset = iterSubsets.next();
          executeSubsetIsqlCommands(jobConf, subset);
        }
      }
      // Create Dataset default HTML page
      createDatasetDefaultPage(jobConf, dbConn);
    } else {
      LOGGER.error(MessageCatalog._00039_INPUT_PARAMS_ERROR, jobConf.getId());
    }

    // Update job end_date of DDBB
    LOGGER.debug(MessageCatalog._00057_UPDATING_JOB_DDBB, jobConf.getId());
    dbConn.updateJobEndDate(jobConf.getId());
    final Job job = dbConn.getJob(jobConf.getId());
    LOGGER.debug(MessageCatalog._00041_STOPPED);
    return job;
  }
Пример #8
0
 /**
  * Returns the DDBB connection.
  *
  * @return the DDBB connection.
  * @since 2.0
  */
 public Connection getConnection() {
   try {
     // Check first if the DB connection is still valid
     if (!this.conn.isValid(1)) {
       LOGGER.debug(MessageCatalog._00026_GET_NEW_DB_CONNECTION);
       getNewConnection();
     }
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
   }
   return this.conn;
 }
Пример #9
0
        @Override
        public void run() {
          while (cleanerIsActive) {
            try {
              if (!buffer.isEmpty()) {
                insertChunk();
              }

              Thread.sleep(cleanUpPeriod);
            } catch (final Exception exception) {
              failures.incrementAndGet();
              log.error(MessageCatalog._00034_NWS_SYSTEM_INTERNAL_FAILURE, exception);
            }
          }
        }
  /**
   * Gets job info.
   *
   * @param id the job identifier associated with this instance.
   * @return a response which includes the info of the job.
   * @since 1.0
   */
  @GET
  @Path("/jobs/{jobid}")
  @Produces({MediaType.TEXT_XML, MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
  public Response getJob(
      @Context final HttpServletRequest request, @PathParam("jobid") final Integer jobid) {
    LOGGER.debug(MessageCatalog._00025_GET_JOB_REQUEST);

    if (jobid == null) {
      LOGGER.error(MessageCatalog._00022_MISSING_INPUT_PARAM, "jobid");
      return Response.status(Status.BAD_REQUEST).build();
    }

    final DBConnectionManager dbConn = (DBConnectionManager) context.getAttribute("db");
    final Job job = dbConn.getJob(jobid);
    return Response.status(Response.Status.ACCEPTED).entity(job).build();
  }
Пример #11
0
 /**
  * Updates the ckan_dataset_url of the job.
  *
  * @param jobId the job identification.
  * @param ckanDatasetUrl the URL of the dataset in CKAN.
  * @return true if the URL has been updated correctly in the DDBB. False otherwise.
  * @since 2.0
  */
 public boolean updateCkanDatasetUrl(final int jobId, final String ckanDatasetUrl) {
   try {
     PreparedStatement preparedStatement = null;
     preparedStatement =
         getConnection()
             .prepareStatement(
                 "UPDATE ckancreation_job_instances SET ckan_dataset_url = ? WHERE job_id = ?");
     // (job_id, ckan_org_url)
     // parameters start with 1
     preparedStatement.setString(1, ckanDatasetUrl);
     preparedStatement.setInt(2, jobId);
     preparedStatement.executeUpdate();
     preparedStatement.close();
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
     return false;
   }
   return true;
 }
Пример #12
0
 @Override
 public void run() {
   while (dequeuerIsActive) {
     try {
       final Future<InsertStatResult> future = futures.poll();
       if (future != null) {
         final InsertStatResult result = future.get();
         triplesPerSecond.getAndSet(
             ((double) triplesIndexed.addAndGet(result.affectedCount)
                     / totalElapsed.addAndGet(result.elapsed))
                 * 1000);
       } else {
         Thread.sleep(1000);
       }
     } catch (final Exception exception) {
       failures.incrementAndGet();
       log.error(MessageCatalog._00034_NWS_SYSTEM_INTERNAL_FAILURE, exception);
     }
   }
 }
Пример #13
0
 /**
  * Gets the names of the external datasets to which the aliada dataset is linked.
  *
  * @return the list of the names of the external datasets.
  * @since 2.0
  */
 public ArrayList<ExternalDataset> getTargetDatasets() {
   ArrayList<ExternalDataset> targetDatasetList = new ArrayList<ExternalDataset>();
   try {
     final Statement sta = getConnection().createStatement();
     String sql =
         "SELECT DISTINCT external_dataset_name, external_dataset_name_ckan FROM t_external_dataset";
     ResultSet resultSet = sta.executeQuery(sql);
     while (resultSet.next()) {
       final ExternalDataset targetDataset = new ExternalDataset();
       targetDataset.setName(resultSet.getString("external_dataset_name"));
       targetDataset.setCkanName(resultSet.getString("external_dataset_name_ckan"));
       targetDatasetList.add(targetDataset);
     }
     resultSet.close();
     sta.close();
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
   }
   return targetDatasetList;
 }
Пример #14
0
 /**
  * Updates the end_date of the job.
  *
  * @param jobId the job identification.
  * @return true if the date has been updated correctly in the DDBB. False otherwise.
  * @since 2.0
  */
 public boolean updateJobEndDate(final int jobId) {
   try {
     PreparedStatement preparedStatement = null;
     preparedStatement =
         getConnection()
             .prepareStatement(
                 "UPDATE ckancreation_job_instances SET end_date = ? WHERE job_id = ?");
     // (job_id, end_date)
     // parameters start with 1
     final java.util.Date today = new java.util.Date();
     final java.sql.Timestamp todaySQL = new java.sql.Timestamp(today.getTime());
     preparedStatement.setTimestamp(1, todaySQL);
     preparedStatement.setInt(2, jobId);
     preparedStatement.executeUpdate();
     preparedStatement.close();
   } catch (SQLException exception) {
     LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
     return false;
   }
   return true;
 }
  /**
   * It copies some files (the organisation image file and CSS file) to the dataset web root.
   *
   * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains
   *     information to setup the rewrite rules in Virtuoso.
   * @param pageFolder the dataset web page folder.
   * @param pageURL the dataset web page URL.
   * @return true if the files has been copied correctly. False otherwise.
   * @since 2.0
   */
  public boolean copyFilesToWebServerPath(
      final JobConfiguration jobConf, final String pageFolder, final String pageURL) {
    boolean success = false;
    try {
      // Move the organization image file from TMP folder to the definitive folder
      final File orgImageInitFile = new File(jobConf.getOrgImagePath());
      final String definitiveImgFileName = pageFolder + File.separator + "orgLogo.jpeg";
      final File definitiveImgFile = new File(definitiveImgFileName);
      Files.move(
          orgImageInitFile.toPath(),
          definitiveImgFile.toPath(),
          java.nio.file.StandardCopyOption.REPLACE_EXISTING);
      jobConf.setOrgImagePath(definitiveImgFileName);
      final String orgImageURL = pageURL + "/" + definitiveImgFile.getName();
      jobConf.setOrgImageURL(orgImageURL);

      // Copy the CSS file to the definitive folder
      final InputStream cssInpuStream = getClass().getResourceAsStream("/" + DATASET_CSS_FILE);
      final String definitiveCssFileName = pageFolder + File.separator + "aliada_dataset.css";
      final File definitiveCssFile = new File(definitiveCssFileName);
      final FileOutputStream cssOutputStream = new FileOutputStream(definitiveCssFile);
      int read = 0;
      final byte[] bytes = new byte[1024];
      while ((read = cssInpuStream.read(bytes)) != -1) {
        cssOutputStream.write(bytes, 0, read);
      }
      cssOutputStream.close();
      jobConf.setCssFilePath(definitiveCssFileName);
      final String cssFileURL = pageURL + "/" + DATASET_CSS_FILE;
      jobConf.setCssFileURL(cssFileURL);

      success = true;
    } catch (Exception exception) {
      LOGGER.error(MessageCatalog._00035_FILE_ACCESS_FAILURE, exception);
    }
    return success;
  }
 /**
  * Encodes the parameters to pass to the ISQL commands. URLEncode and replace % by %%, for
  * Virtuoso Rewrite Rules.
  *
  * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains
  *     the ISQL commands parameters.
  * @return true if the encoding has been carried out correctly.
  * @since 1.0
  */
 public boolean encodeParams(final JobConfiguration jobConf) {
   boolean encoded = false;
   try {
     // Remove leading/trailing slashes of URI Document section
     if (jobConf.getUriDocPart() != null) {
       uriDocPart = removeLeadingTralingSlashes(jobConf.getUriDocPart());
     } else {
       uriDocPart = "";
     }
     // Remove leading/trailing slashes of URI identifier section
     if (jobConf.getUriIdPart() != null) {
       uriIdPart = removeLeadingTralingSlashes(jobConf.getUriIdPart());
     }
     // Remove leading/trailing slashes of URI Ontology section
     if (jobConf.getUriDefPart() != null) {
       uriDefPart = removeLeadingTralingSlashes(jobConf.getUriDefPart());
     }
     // Encode dataset graphs
     graphsSelectEncoded = "";
     graphsEncoded = "";
     int subsetIndex = 0;
     for (final Iterator<Subset> iterSubsets = jobConf.getSubsets().iterator();
         iterSubsets.hasNext(); ) {
       final Subset subset = iterSubsets.next();
       final String graphSelectEncoded =
           URLEncoder.encode(" FROM <" + subset.getGraph() + ">", "UTF-8");
       String linksGraphSelectEncoded =
           URLEncoder.encode(" FROM <" + subset.getLinksGraph() + ">", "UTF-8");
       graphsSelectEncoded = graphsSelectEncoded + graphSelectEncoded + linksGraphSelectEncoded;
       String graphEncoded = "";
       if (subsetIndex == 0) {
         graphEncoded = "&graph" + URLEncoder.encode("=" + subset.getGraph(), "UTF-8");
       } else {
         graphEncoded = URLEncoder.encode("&graph=" + subset.getGraph(), "UTF-8");
       }
       final String linksGraphEncoded =
           URLEncoder.encode("&graph=" + subset.getLinksGraph(), "UTF-8");
       graphsEncoded = graphsEncoded + graphEncoded + linksGraphEncoded;
       subsetIndex++;
     }
     graphsSelectEncoded = graphsSelectEncoded.replace("%", "%%");
     graphsEncoded = graphsEncoded.replace("%", "%%");
     // Encode domain name
     domainNameEncoded = URLEncoder.encode(jobConf.getDomainName(), "UTF-8");
     domainNameEncoded = domainNameEncoded.replace("%", "%%");
     // Encode URI Identifier part
     uriIdPartEncoded = URLEncoder.encode(uriIdPart, "UTF-8");
     uriIdPartEncoded = uriIdPartEncoded.replace("%", "%%");
     // Encode Ontology URI
     ontologyEncoded = URLEncoder.encode(jobConf.getOntologyUri(), "UTF-8");
     ontologyEncoded = ontologyEncoded.replace("%", "%%");
     // Compose URI document part + URI Concept part
     if (jobConf.getUriDocPart() != null) {
       uriDocConcept = removeLeadingTralingSlashes(jobConf.getUriDocPart());
     }
     if (jobConf.getUriConceptPart() != null) {
       final String datasetConceptPart = removeLeadingTralingSlashes(jobConf.getUriConceptPart());
       if (datasetConceptPart.length() > 0) {
         if (uriDocConcept.length() > 0) {
           uriDocConcept = uriDocConcept + "/" + datasetConceptPart;
         } else {
           uriDocConcept = datasetConceptPart;
         }
       }
     }
     // Compose rules name suffix
     rulesNamesSuffix = jobConf.getDomainName().replace("http", "");
     rulesNamesSuffix = rulesNamesSuffix.replace(":", "");
     rulesNamesSuffix = rulesNamesSuffix.replace("/", "");
     rulesNamesSuffix = rulesNamesSuffix.replace(".", "");
     // Check that we have the parameter values
     if ((uriIdPart != null)
         && (uriDefPart != null)
         && (graphsEncoded.length() > 0)
         && (domainNameEncoded != null)
         && (ontologyEncoded != null)
         && (uriDocConcept != null)
         && (rulesNamesSuffix.length() > 0)) {
       if ((uriIdPart.length() > 0)
           && (uriDefPart.length() > 0)
           && (ontologyEncoded.length() > 0)
           && (uriDocConcept.length() > 0)
           && (domainNameEncoded.length() > 0)) {
         // Check that Identifier, Ontology and Document parts do not contain "/"
         if (!(uriIdPart.contains("/"))
             && !(uriDefPart.contains("/"))
             && !(uriDocPart.contains("/"))) {
           encoded = true;
         }
       }
     }
   } catch (UnsupportedEncodingException exception) {
     LOGGER.error(MessageCatalog._00038_ENCODING_ERROR, exception);
   }
   return encoded;
 }
  /**
   * It executes the subset ISQL commands that set the rewrite rules in Virtuoso for dereferencing
   * the subset URI-s.
   *
   * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains
   *     information to setup the rewrite rules in Virtuoso
   * @param subset the {@link eu.aliada.linkeddataserversetup.model.Subset} that contains
   *     information to setup the rewrite rules in Virtuoso
   * @return if the ISQL commands have been executed successfully.
   * @since 2.0
   */
  public boolean executeSubsetIsqlCommands(final JobConfiguration jobConf, final Subset subset) {
    boolean success = false;

    // Get global ISQL commands file for rewriting rules in Virtuoso
    LOGGER.debug(MessageCatalog._00036_GET_ISQL_COMMANDS_FILE);
    final String isqlCommandsFilename =
        getIsqlCommandsFile(
            subset.getIsqlCommandsSubsetFilename(), jobConf.getIsqlCommandsSubsetFilenameDefault());

    // Compose URI document part + URI Concept part + Subset URI Concept part
    String uriDocConceptSubset = "";
    if (subset.getUriConceptPart() != null) {
      uriDocConceptSubset = removeLeadingTralingSlashes(subset.getUriConceptPart());
      if (uriDocConceptSubset.length() > 0) {
        uriDocConceptSubset = uriDocConcept + "/" + uriDocConceptSubset;
      }
    }

    // Variables for creating rules for Document listing with extension
    int createVirtualPath = 0;
    final int urrlListSubset = 1; // It is a subset
    final String rulesNamessuffixDataset = rulesNamesSuffix;
    String uriDocConceptParent = "";
    if (subset.getUriConceptPart().contains("/")) {
      // If the Concept part of the subset contains an /,
      // a parent virtual path must also be created
      // for the Document listing with extension
      createVirtualPath = 1;
      uriDocConceptParent = uriDocConceptSubset.substring(0, uriDocConceptSubset.lastIndexOf('/'));
    }
    // Add a "/" at the beginning and end of the Document concept part
    String uriDocSlash = "/" + uriDocPart;
    if (!uriDocSlash.endsWith("/")) {
      uriDocSlash = uriDocSlash + "/";
    }
    // If the URI Concept part of the subset is empty, its
    // corresponding URL Rewrite rules will not be created
    if ((isqlCommandsFilename != null) && (uriDocConceptSubset.length() > 0)) {
      // Compose Rules Names suffix for the subset,
      // adding the subset concept part of the URI
      String rulesNamesSuffixSubset = rulesNamesSuffix + uriDocConceptSubset;
      rulesNamesSuffixSubset = rulesNamesSuffixSubset.replace("/", "");
      // Compose ISQL command execution statement
      final String isqlCommand =
          String.format(
              ISQL_COMMAND_FORMAT,
              jobConf.getIsqlCommandPath(),
              jobConf.getStoreIp(),
              jobConf.getStoreSqlPort(),
              jobConf.getSqlLogin(),
              jobConf.getSqlPassword(),
              isqlCommandsFilename,
              jobConf.getListeningHost(),
              jobConf.getVirtualHost(),
              uriIdPart,
              uriDocSlash,
              uriDefPart,
              graphsSelectEncoded,
              graphsEncoded,
              domainNameEncoded,
              rulesNamesSuffixSubset,
              uriDocConceptSubset,
              DATASET_INDEX_PAGE,
              ontologyEncoded,
              uriIdPartEncoded,
              createVirtualPath,
              urrlListSubset,
              rulesNamessuffixDataset,
              uriDocConceptParent);
      LOGGER.debug(isqlCommand);
      // Execute ISQL command
      try {
        LOGGER.debug(MessageCatalog._00040_EXECUTING_ISQL);
        final Process commandProcess = Runtime.getRuntime().exec(isqlCommand);
        final BufferedReader stdInput =
            new BufferedReader(new InputStreamReader(commandProcess.getInputStream()));
        String comOutput = "";
        while ((comOutput = stdInput.readLine()) != null) {
          LOGGER.debug(comOutput);
        }
        success = true;
      } catch (IOException exception) {
        LOGGER.error(MessageCatalog._00033_EXTERNAL_PROCESS_START_FAILURE, exception, isqlCommand);
      }
    }
    return success;
  }
  /**
   * Create the dataset defult HTML page.
   *
   * @param jobConf the {@link eu.aliada.linkeddataserversetup.model.JobConfiguration} that contains
   *     information to create the dataset HTML page.
   * @param db The DDBB connection.
   * @return
   * @since 2.0
   */
  public void createDatasetDefaultPage(
      final JobConfiguration jobConf, final DBConnectionManager dbConn) {
    // Create the folder where the page resides, if it does not exist
    final String pageFolder = jobConf.getVirtHttpServRoot() + File.separator + rulesNamesSuffix;
    final String pageURL = "http://" + jobConf.getDomainName();
    final File fFolder = new File(pageFolder);
    if (!fFolder.exists()) {
      fFolder.mkdir();
    }
    // Update the dataset web page root in the DB
    dbConn.updateDatasetWebPageRoot(jobConf.getDatasetId(), pageFolder);

    final String pagePath = pageFolder + File.separator + DATASET_INDEX_PAGE;
    // Remove the page if it already exists
    final File fPage = new File(pagePath);
    if (fPage.exists()) {
      fPage.delete();
    }

    // Copy image and CSS files to web server folder
    copyFilesToWebServerPath(jobConf, pageFolder, pageURL);
    final String orgLogoPath = jobConf.getOrgImageURL();
    final String styleSheetPath = jobConf.getCssFileURL();
    // Get the number of triples of the dataset
    final int numTriples =
        calculateDatasetNumTriples(
            jobConf.getSparqlEndpointUri(),
            jobConf.getSparqlLogin(),
            jobConf.getSparqlPassword(),
            jobConf.getSubsets());
    // Now, create a new one
    try {
      final FileWriter fstream = new FileWriter(pagePath);
      final BufferedWriter out = new BufferedWriter(fstream);
      String line =
          "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">";
      out.write(line);
      out.newLine();
      line = "<html>";
      out.write(line);
      out.newLine();
      line = "<head>";
      out.write(line);
      out.newLine();
      line = "<meta http-equiv=\"Content-Type\" content=\"text/html; charset=ISO-8859-1\">";
      out.write(line);
      out.newLine();
      line = "<title>" + jobConf.getDatasetDesc().toUpperCase() + "</title>";
      out.write(line);
      out.newLine();
      line = "<link rel=\"stylesheet\" href=\"" + styleSheetPath + "\" type=\"text/css\">";
      out.write(line);
      out.newLine();
      line = "</head>";
      out.write(line);
      out.newLine();
      line = "<body>";
      out.write(line);
      out.newLine();
      line = "<img src=\"" + orgLogoPath + "\">";
      out.write(line);
      out.newLine();
      line = "<h1>" + jobConf.getDatasetDesc() + "</h1>";
      out.write(line);
      out.newLine();
      line = "<table><colgroup><col width=\"25%\"><col width=\"75%\"></colgroup>";
      out.write(line);
      out.newLine();
      // Description
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>",
              "description", jobConf.getDatasetLongDesc());
      out.write(line);
      out.newLine();
      // Publisher
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>",
              "publisher", jobConf.getOrgName().toUpperCase());
      out.write(line);
      out.newLine();
      // Source URL
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>",
              "source", jobConf.getDatasetSourceURL(), jobConf.getDatasetSourceURL());
      out.write(line);
      out.newLine();
      // Created
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>",
              "created", getStringNow());
      out.write(line);
      out.newLine();
      // Contributor
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>",
              "contributor", jobConf.getDatasetAuthor());
      out.write(line);
      out.newLine();
      // License URL
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>",
              "license", jobConf.getLicenseURL(), jobConf.getLicenseURL());
      out.write(line);
      out.newLine();
      // SPARQL endpoint URL
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>",
              "SPARQL endpoint",
              jobConf.getPublicSparqlEndpointUri(),
              jobConf.getPublicSparqlEndpointUri());
      out.write(line);
      out.newLine();
      // Vocabulary URL
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>",
              "vocabulary", jobConf.getOntologyUri(), jobConf.getOntologyUri());
      out.write(line);
      out.newLine();
      // Number of triples
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\">%s</td></tr>",
              "number of triples", numTriples);
      out.write(line);
      out.newLine();
      // List resources of dataset
      final String datasetUri = "http://" + jobConf.getDomainName() + "/" + uriDocConcept;
      line =
          String.format(
              "<tr><td class=\"label\">%s</td><td class=\"input\"><a href=\"%s\" target=\"_blank\">%s</a></td></tr>",
              "list of resources", datasetUri, datasetUri);
      out.write(line);
      out.newLine();
      // List subsets
      line = String.format("<tr><td class=\"label\">%s</td><td class=\"input\"><ul>", "subsets");
      out.write(line);
      out.newLine();
      for (final Iterator<Subset> iterSubsets = jobConf.getSubsets().iterator();
          iterSubsets.hasNext(); ) {
        final Subset subset = iterSubsets.next();
        String uriDocConceptSubset = "";
        if (subset.getUriConceptPart() != null) {
          uriDocConceptSubset = removeLeadingTralingSlashes(subset.getUriConceptPart());
        }
        if (uriDocConceptSubset.length() > 0) {
          // List resources of subset
          final String subsetUri = datasetUri + "/" + uriDocConceptSubset;
          line =
              String.format(
                  "<li>%s: <a href=\"%s\" target=\"_blank\">%s</a></li>",
                  subset.getDescription(), subsetUri, subsetUri);
          out.write(line);
          out.newLine();
        }
      }
      line = "</ul></td></tr>";
      out.write(line);
      out.newLine();
      line = "</table>";
      out.write(line);
      out.newLine();

      line = "</body>";
      out.write(line);
      out.newLine();
      line = "</html>";
      out.write(line);
      out.newLine();
      out.close();
    } catch (IOException exception) {
      LOGGER.error(MessageCatalog._00034_FILE_CREATION_FAILURE, exception, pagePath);
    }
  }
Пример #19
0
  /**
   * Gets the job configuration from the DDBB.
   *
   * @param jobId the job identification.
   * @return the {@link eu.aliada.ckancreation.model.JobConfiguration} which contains the
   *     configuration of the job.
   * @since 2.0
   */
  public JobConfiguration getJobConfiguration(final Integer jobId) {
    JobConfiguration jobConf = null;
    int datasetId = -1;
    int organisationId = -1;
    try {
      final Statement sta = getConnection().createStatement();
      String sql = "SELECT * FROM ckancreation_job_instances WHERE job_id=" + jobId;
      ResultSet resultSet = sta.executeQuery(sql);
      jobConf = new JobConfiguration();
      while (resultSet.next()) {
        jobConf = new JobConfiguration();
        jobConf.setId(jobId);
        jobConf.setCkanApiURL(resultSet.getString("ckan_api_url"));
        jobConf.setCkanApiKey(resultSet.getString("ckan_api_key"));
        jobConf.setTmpDir(resultSet.getString("tmp_dir"));
        jobConf.setStoreIp(resultSet.getString("store_ip"));
        jobConf.setStoreSqlPort(resultSet.getInt("store_sql_port"));
        jobConf.setSqlLogin(resultSet.getString("sql_login"));
        jobConf.setSqlPassword(resultSet.getString("sql_password"));
        jobConf.setIsqlCommandPath(resultSet.getString("isql_command_path"));
        jobConf.setIsqlCommandsGraphDumpFilename(
            resultSet.getString("isql_commands_file_graph_dump"));
        jobConf.setVirtHttpServRoot(resultSet.getString("virtuoso_http_server_root"));
        jobConf.setOntologyUri(resultSet.getString("aliada_ontology"));
        jobConf.setOrgName(resultSet.getString("org_name"));
        jobConf.setOrgTitle(resultSet.getString("org_name").toUpperCase());
        jobConf.setOrgDescription(resultSet.getString("org_description"));
        jobConf.setOrgHomePage(resultSet.getString("org_home_page"));
        datasetId = resultSet.getInt("datasetId");
        jobConf.setDatasetId(datasetId);
        organisationId = resultSet.getInt("organisationId");
      }
      resultSet.close();
      // Get dataset related information
      sql = "SELECT * FROM dataset WHERE datasetId=" + datasetId;
      resultSet = sta.executeQuery(sql);
      while (resultSet.next()) {
        jobConf.setDatasetAuthor(resultSet.getString("dataset_author"));
        jobConf.setDatasetAuthorEmail(resultSet.getString("dataset_author_email"));
        jobConf.setCkanDatasetName(resultSet.getString("ckan_dataset_name"));
        jobConf.setDatasetDesc(resultSet.getString("dataset_desc"));
        jobConf.setDatasetLongDesc(resultSet.getString("dataset_long_desc"));
        jobConf.setDatasetSourceURL(resultSet.getString("dataset_source_url"));
        jobConf.setSparqlEndpointUri(resultSet.getString("sparql_endpoint_uri"));
        jobConf.setSparqlLogin(resultSet.getString("sparql_endpoint_login"));
        jobConf.setSparqlPassword(resultSet.getString("sparql_endpoint_password"));
        jobConf.setPublicSparqlEndpointUri(resultSet.getString("public_sparql_endpoint_uri"));
        jobConf.setLicenseCKANId(resultSet.getString("license_ckan_id"));
        jobConf.setLicenseURL(resultSet.getString("license_url"));
        jobConf.setDomainName(resultSet.getString("domain_name"));
        jobConf.setVirtualHost(resultSet.getString("virtual_host"));
        jobConf.setUriIdPart(resultSet.getString("uri_id_part"));
        jobConf.setUriDocPart(resultSet.getString("uri_doc_part"));
        jobConf.setUriConceptPart(resultSet.getString("uri_concept_part"));
        jobConf.setUriSetPart(resultSet.getString("uri_set_part"));
      }
      resultSet.close();
      // Get subsets related information
      sql = "SELECT * FROM subset WHERE datasetId=" + datasetId;
      resultSet = sta.executeQuery(sql);
      while (resultSet.next()) {
        final Subset subset = new Subset();
        subset.setUriConceptPart(resultSet.getString("uri_concept_part"));
        subset.setDescription(resultSet.getString("subset_desc"));
        subset.setGraph(resultSet.getString("graph_uri"));
        subset.setLinksGraph(resultSet.getString("links_graph_uri"));
        subset.setDescription(resultSet.getString("subset_desc"));
        jobConf.setSubset(subset);
      }
      resultSet.close();
      // Get organisation LOGO from BLOB object in organisation table
      sql = "SELECT org_logo FROM organisation WHERE organisationId=" + organisationId;
      resultSet = sta.executeQuery(sql);
      if (resultSet.next() && resultSet.getBlob("org_logo") != null) {
        final Blob logo = resultSet.getBlob("org_logo");
        final int blobLength = (int) logo.length();
        byte[] blobAsBytes = null;
        blobAsBytes = logo.getBytes(1, blobLength);
        // Compose initial logo file name
        final String orgImagePathInit =
            jobConf.getTmpDir()
                + File.separator
                + "orgLogo"
                + "_"
                + System.currentTimeMillis()
                + ".jpeg";
        try {
          final FileOutputStream fos = new FileOutputStream(orgImagePathInit);
          fos.write(blobAsBytes);
          fos.close();
          jobConf.setOrgImagePath(orgImagePathInit);
        } catch (IOException exception) {
          LOGGER.error(MessageCatalog._00034_FILE_CREATION_FAILURE, exception, orgImagePathInit);
        }
        // release the blob and free up memory. (since JDBC 4.0)
        logo.free();
      }
      resultSet.close();
      sta.close();
    } catch (SQLException exception) {
      LOGGER.error(MessageCatalog._00024_DATA_ACCESS_FAILURE, exception);
      return null;
    }

    return jobConf;
  }