/** * doImport called when "eventSubmit_doBatch_Import" is in the request parameters to run an * import. */ public void doBatch_Import(RunData data, Context context) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); Hashtable fTable = new Hashtable(); if (!securityService.isSuperUser()) { addAlert(state, rb.getString("archive.batch.auth")); return; } // String fileName = data.getParameters().getString("import-file"); FileItem fi = data.getParameters().getFileItem("importFile"); if (fi == null) { addAlert(state, rb.getString("archive.batch.missingname")); } else { // get content String content = fi.getString(); String[] lines = content.split("\n"); for (int i = 0; i < lines.length; i++) { String lineContent = (String) lines[i]; String[] lineContents = lineContent.split("\t"); if (lineContents.length == 2) { fTable.put(lineContents[0], lineContents[1]); } else { addAlert(state, rb.getString("archive.batch.wrongformat")); } } } if (!fTable.isEmpty()) { Enumeration importFileName = fTable.keys(); int count = 1; while (importFileName.hasMoreElements()) { String path = StringUtils.trimToNull((String) importFileName.nextElement()); String siteCreatorName = StringUtils.trimToNull((String) fTable.get(path)); if (path != null && siteCreatorName != null) { String nSiteId = idManager.createUuid(); try { Object[] params = new Object[] {count, path, nSiteId, siteCreatorName}; addAlert(state, rb.getFormattedMessage("archive.import1", params)); addAlert(state, archiveService.merge(path, nSiteId, siteCreatorName)); } catch (Exception ignore) { } } count++; } } }
/** * doArchive called when "eventSubmit_doArchive" is in the request parameters to run the archive. */ public void doArchive(RunData data, Context context) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); if (!securityService.isSuperUser()) { addAlert(state, rb.getString("archive.limited")); return; } String id = data.getParameters().getString("archive-id"); if (StringUtils.isNotBlank(id)) { String msg = archiveService.archive(id.trim()); addAlert(state, rb.getFormattedMessage("archive", new Object[] {id}) + " \n " + msg); } else { addAlert(state, rb.getString("archive.please")); } } // doArchive
/** doImport called when "eventSubmit_doImport" is in the request parameters to run an import. */ public void doImport(RunData data, Context context) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); if (!securityService.isSuperUser()) { addAlert(state, rb.getString("archive.import")); return; } String id = data.getParameters().getString("import-id"); String file = data.getParameters().getString("import-file"); if ((id != null) && (id.trim().length() > 0) && (file != null) && (file.trim().length() > 0)) { String msg = archiveService.merge(file.trim(), id.trim(), null); addAlert(state, rb.getFormattedMessage("archive.import2", new Object[] {file, id}) + msg); } else { addAlert(state, rb.getString("archive.file")); } } // doImport
/** * Process that archives the sites * * @param sites list of SparseSite * @throws InterruptedException */ private void archiveSites( List<SparseSite> sites, String selectedTerm, Session currentSession, User currentUser) throws InterruptedException { if (isLocked()) { throw new IllegalStateException( "Cannot run batch archive, an archive job is already in progress"); } batchArchiveStarted = System.currentTimeMillis(); batchArchiveMessage = rb.getFormattedMessage( "archive.batch.term.text.statusmessage.start", new Object[] {sites.size(), selectedTerm, 0}); batchArchiveStatus = "RUNNING"; log.info( "Batch archive started for term: " + selectedTerm + ". Archiving " + sites.size() + " sites."); Session threadSession = sessionManager.getCurrentSession(); if (threadSession == null) { threadSession = sessionManager.startSession(); } threadSession.setUserId(currentUser.getId()); threadSession.setActive(); sessionManager.setCurrentSession(threadSession); authzGroupService.refreshUser(currentUser.getId()); // counters so we can run this in batches if we have a number of sites to process int archiveCount = 0; try { for (SparseSite s : sites) { log.info("Processing site: " + s.getTitle()); // archive the site archiveService.archive(s.getId()); // compress it // TODO check return value? do we care? try { archiveService.archiveAndZip(s.getId()); } catch (IOException e) { e.printStackTrace(); } archiveCount++; // update message if (archiveCount % 1 == 0) { int percentComplete = (int) (archiveCount * 100) / sites.size(); batchArchiveMessage = rb.getFormattedMessage( "archive.batch.term.text.statusmessage.update", new Object[] {sites.size(), selectedTerm, archiveCount, percentComplete}); } // sleep if we need to and keep sessions alive if (archiveCount > 0 && archiveCount % NUM_SITES_PER_BATCH == 0) { log.info("Sleeping for " + PAUSE_TIME_MS + "ms"); Thread.sleep(PAUSE_TIME_MS); threadSession.setActive(); currentSession.setActive(); } // check timeout if (!isLocked()) { throw new RuntimeException("Timeout occurred while running batch archive"); } } // complete batchArchiveMessage = rb.getFormattedMessage( "archive.batch.term.text.statusmessage.complete", new Object[] {sites.size(), selectedTerm}); log.info("Batch archive complete."); } finally { // reset batchArchiveStatus = STATUS_COMPLETE; batchArchiveStarted = 0; threadSession.clear(); threadSession.invalidate(); } }