/** creates the list of {@link AwaitedJob} from the statusFile */ protected void loadAwaitedJobs() { statusFile = new File(statusFilename); if (!statusFile.isFile()) { return; } awaitedJobs = Collections.synchronizedMap(new HashMap<String, AwaitedJob>()); XMLDecoder decoder = null; try { decoder = new java.beans.XMLDecoder(new FileInputStream(statusFile)); } catch (FileNotFoundException e1) { logger.error( "Could not load the status file. No data will be retrieved for previousley submitted jobs.", e1); return; } boolean finishRead = false; while (!finishRead) { try { AwaitedJob aj = (AwaitedJob) decoder.readObject(); awaitedJobs.put(aj.getJobId(), aj); } catch (IndexOutOfBoundsException e) { finishRead = true; } } }
protected void addAwaitedJob(AwaitedJob aj) { this.awaitedJobs.put(aj.getJobId(), aj); try { this.saveAwaitedJobsToFile(); } catch (FileNotFoundException e) { logger.error( "Could not save status file after adding job on awaited jobs list " + aj.getJobId()); } catch (IOException e) { logger.error( "Could not save status file after adding job on awaited jobs list " + aj.getJobId(), e); } }
/** * This method will check, for each awaited job, if the result is available on the Scheduler. If * positive, the will call the performPostTreatment method in order to perform the post treatment. */ protected void checkResultsForAwaitedJobs() { // we make a copy of the awaitedJobsIds set in order to iterate over it. Set<AwaitedJob> awaitedJobsIdsCopy = new HashSet<AwaitedJob>(awaitedJobs.values()); Iterator<AwaitedJob> it = awaitedJobsIdsCopy.iterator(); while (it.hasNext()) { AwaitedJob awaitedJob = it.next(); String id = awaitedJob.getJobId(); try { JobState js = uischeduler.getJobState(id); if (js.isFinished()) { pullData(awaitedJob); } } catch (NotConnectedException e) { logger.error( "A connection error occured while trying to download output data of Job " + id + ". This job will remain in the list of awaited jobs. Another attempt to dowload the output data will be made next time the application is initialized. ", e); } catch (UnknownJobException e) { logger.error( "Could not retrieve output data for job " + id + " because this job is not known by the Scheduler. \n ", e); logger.warn( "Job " + id + " will be removed from the known job list. The system will not attempt again to retrieve data for this job. You could try to manually copy the data from the location " + awaitedJob.getPullURL()); removeAwaitedJob(id); } catch (PermissionException e) { logger.error( "Could not retrieve output data for job " + id + " because you don't have permmission to access this job. You need to use the same connection credentials you used for submitting the job. \n Another attempt to dowload the output data for this job will be made next time the application is initialized. ", e); } } }
/** * Retrieves the output files produced by the job having the id given as argument. If the transfer * finishes successfully it deletes the temporary folders (at push_url and pull_url location) and * send notification to the listeners. Otherwise it notifies the listeners of the failure. * * <p>The transfer data operation is executed by a fixed thread pool executor (see {@link * DataTransferProcessor}) * * @param awaitedjob * @return * @throws FileSystemException */ protected void pullData(AwaitedJob awaitedjob) { String localOutFolderPath = awaitedjob.getLocalOutputFolder(); if (localOutFolderPath == null) { logger.warn( "The job " + awaitedjob.getJobId() + " does not define an output folder on local machine. No output data will be retrieved"); return; } String jobId = awaitedjob.getJobId(); String pull_URL = awaitedjob.getPullURL(); String pushUrl = awaitedjob.getPushURL(); FileObject remotePullFolder = null; FileObject remotePushFolder = null; FileObject localfolder = null; Set<FileObject> foldersToDelete = new HashSet<FileObject>(); try { remotePullFolder = fsManager.resolveFile(pull_URL); remotePushFolder = fsManager.resolveFile(pushUrl); localfolder = fsManager.resolveFile(localOutFolderPath); } catch (Exception e) { logger.error("Could not retrieve data for job " + jobId, e); logger.info( "Job " + jobId + " will be removed from the known job list. The system will not attempt again to retrieve data for this job. You couyld try to manually copy the data from the location " + pull_URL); removeAwaitedJob(jobId); return; } try { foldersToDelete.add(remotePullFolder.getParent()); if (!remotePullFolder.getParent().equals(remotePushFolder.getParent())) foldersToDelete.add(remotePushFolder.getParent()); } catch (FileSystemException e) { logger.warn( "Data in folders " + pull_URL + " and " + pushUrl + " cannot be deleted due to an unexpected error ", e); e.printStackTrace(); } FileSelector fileSelector = Selectors.SELECT_ALL; // The code bellow has been commented: // We do not need to build a file selector because the files in the temporary folder // have been copied by the data space layer which already used a FastFileSelector // configured with includes and excludes patterns // DSFileSelector fileSelector = new DSFileSelector(); // try{ // JobState jobstate = uischeduler.getJobState(jobId); // for (TaskState ts : jobstate.getTasks() ) // { // List<OutputSelector> of = ts.getOutputFilesList(); // for (OutputSelector outputSelector : of) { // org.ow2.proactive.scheduler.common.task.dataspaces.FileSelector fs = // outputSelector.getOutputFiles(); // // fileSelector.addIncludes(fs.getIncludes()); // fileSelector.addExcludes(fs.getExcludes()); // } // } // }catch (Exception e) // { // logger_util.error("An exception occured while computing which output files to download // for job "+ jobId+". All available files will be downloaded for this job"); // e.printStackTrace(); // } DataTransferProcessor dtp = new DataTransferProcessor( remotePullFolder, localfolder, jobId, foldersToDelete, fileSelector); tpe.submit(dtp); }