/** * Generate the cache validity object. * * <p>This validity object should never "over cache" because it will perform the search, and * serialize the results using the DSpaceValidity object. */ public SourceValidity getValidity() { if (this.validity == null) { try { DSpaceValidity validity = new DSpaceValidity(); DSpaceObject scope = getScope(); validity.add(scope); performSearch(); validity.add("total:" + queryResults.getHitCount()); validity.add("start:" + queryResults.getStart()); @SuppressWarnings("unchecked") // This cast is correct java.util.List<String> handles = queryResults.getHitHandles(); for (String handle : handles) { DSpaceObject resultDSO = HandleManager.resolveToObject(context, handle); validity.add(resultDSO); } this.validity = validity.complete(); } catch (RuntimeException re) { throw re; } catch (Exception e) { this.validity = null; } // add log message that we are viewing the item // done here, as the serialization may not occur if the cache is valid logSearch(); } return this.validity; }
/** * Delete the specified handle. It is assumed that the user has already confirmed this selection. * * @param context The current DSpace context. * @param handleID ID of handle to be removed. * @return A results object. */ public static FlowResult processDeleteHandle(Context context, int handleID) throws SQLException, AuthorizeException, IOException { FlowResult result = new FlowResult(); result.setContinue(true); result.setOutcome(true); result.setMessage(T_handle_deletion_failed); try { Handle handleDeleted = Handle.find(context, handleID); HandleManager.changeHandle(context, handleDeleted.getHandle(), null, false); handleDeleted.delete(); context.commit(); result.setContinue(true); result.setOutcome(true); result.setMessage(T_handle_successfully_deleted); } catch (Exception e) { log.error(e.getMessage()); context.abort(); } return result; }
/** * Match the URIs this subclass understands and return the corresponding resource. Since the * "dso_" format can lead to several different resource types, handle it here. * * @param context the context * @param request the request * @param response the response * @param pathElt the path elt * @return the DAV resource * @throws DAVStatusException the DAV status exception * @throws SQLException the SQL exception * @throws AuthorizeException the authorize exception */ protected static DAVResource matchResourceURI( Context context, HttpServletRequest request, HttpServletResponse response, String pathElt[]) throws DAVStatusException, SQLException, AuthorizeException { // Match /dso_<handle>{...} .. look for last "dso_" element if (pathElt[0].startsWith("dso_")) { int i = 1; for (; i < pathElt.length && pathElt[i].startsWith("dso_"); ++i) { // empty } --i; String handle = decodeHandle(pathElt[i].substring(4)); // Replace substituted handle separator char with '/' to // get back a normal handle: (inverse of getPathElt() above) int sepIndex = handle.indexOf(handleSeparator); if (sepIndex >= 0) { char hc[] = handle.toCharArray(); hc[sepIndex] = '/'; handle = String.copyValueOf(hc); } DSpaceObject dso = HandleManager.resolveToObject(context, handle); if (dso == null) { throw new DAVStatusException( HttpServletResponse.SC_NOT_FOUND, "Cannot resolve handle \"" + handle + "\""); } else if (dso.getType() == Constants.ITEM) { if (i + 1 < pathElt.length) { if (pathElt[i + 1].startsWith("bitstream_")) { Bitstream bs = DAVBitstream.findBitstream(context, (Item) dso, pathElt[i + 1]); if (bs == null) { throw new DAVStatusException( HttpServletResponse.SC_NOT_FOUND, "Bitstream \"" + pathElt[i + 1] + "\" not found in item: " + pathElt[i]); } return new DAVBitstream(context, request, response, pathElt, (Item) dso, bs); } else { throw new DAVStatusException( HttpServletResponse.SC_NOT_FOUND, "Illegal resource path, \"" + pathElt[i + 1] + "\" is not a Bitstream identifier for item: " + pathElt[i]); } } else { return new DAVItem(context, request, response, pathElt, (Item) dso); } } else if (dso.getType() == Constants.COLLECTION) { return new DAVCollection(context, request, response, pathElt, (Collection) dso); } else if (dso.getType() == Constants.COMMUNITY) { return new DAVCommunity(context, request, response, pathElt, (Community) dso); } else { throw new DAVStatusException( HttpServletResponse.SC_BAD_REQUEST, "Unrecognized DSpace object type for handle=" + handle); } } return null; }
private void replaceItems( Context c, Collection[] mycollections, String sourceDir, String mapFile, boolean template) throws Exception { // verify the source directory File d = new java.io.File(sourceDir); if (d == null || !d.isDirectory()) { System.out.println("Error, cannot open source directory " + sourceDir); System.exit(1); } // read in HashMap first, to get list of handles & source dirs Map<String, String> myHash = readMapFile(mapFile); // for each handle, re-import the item, discard the new handle // and re-assign the old handle for (Map.Entry<String, String> mapEntry : myHash.entrySet()) { // get the old handle String newItemName = mapEntry.getKey(); String oldHandle = mapEntry.getValue(); Item oldItem = null; if (oldHandle.indexOf('/') != -1) { System.out.println("\tReplacing: " + oldHandle); // add new item, locate old one oldItem = (Item) HandleManager.resolveToObject(c, oldHandle); } else { oldItem = Item.find(c, Integer.parseInt(oldHandle)); } /* Rather than exposing public item methods to change handles -- * two handles can't exist at the same time due to key constraints * so would require temp handle being stored, old being copied to new and * new being copied to old, all a bit messy -- a handle file is written to * the import directory containing the old handle, the existing item is * deleted and then the import runs as though it were loading an item which * had already been assigned a handle (so a new handle is not even assigned). * As a commit does not occur until after a successful add, it is safe to * do a delete as any error results in an aborted transaction without harming * the original item */ File handleFile = new File(sourceDir + File.separatorChar + newItemName + File.separatorChar + "handle"); PrintWriter handleOut = new PrintWriter(new FileWriter(handleFile, true)); if (handleOut == null) { throw new Exception("can't open handle file: " + handleFile.getCanonicalPath()); } handleOut.println(oldHandle); handleOut.close(); deleteItem(c, oldItem); addItem(c, mycollections, sourceDir, newItemName, null, template); c.clearCache(); } }
/** * For the DSpace implementation we just return a hash of one entry which contains a reference to * this repository's metadata. */ public Map<String, String> getAllManagedRepositories() throws WingException { String handlePrefix = HandleManager.getPrefix(); Map<String, String> allRepositories = new HashMap<String, String>(); allRepositories.put( handlePrefix, "/metadata/internal/repository/" + handlePrefix + "/mets.xml"); return allRepositories; }
// remove, given a handle private void deleteItem(Context c, String myhandle) throws Exception { // bit of a hack - to remove an item, you must remove it // from all collections it's a part of, then it will be removed Item myitem = (Item) HandleManager.resolveToObject(c, myhandle); if (myitem == null) { System.out.println("Error - cannot locate item - already deleted?"); } else { deleteItem(c, myitem); } }
/** notify the submitter that the item is archived */ private static void notifyOfArchive(Context c, Item i, Collection coll) throws SQLException, IOException { try { // Get submitter EPerson ep = i.getSubmitter(); // Get the Locale Locale supportedLocale = I18nUtil.getEPersonLocale(ep); Email email = ConfigurationManager.getEmail( I18nUtil.getEmailFilename(supportedLocale, "submit_archive")); // Get the item handle to email to user String handle = HandleManager.findHandle(c, i); // Get title DCValue[] titles = i.getDC("title", null, Item.ANY); String title = ""; try { title = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled"); } catch (MissingResourceException e) { title = "Untitled"; } if (titles.length > 0) { title = titles[0].value; } email.addRecipient(ep.getEmail()); email.addArgument(title); email.addArgument(coll.getMetadata("name")); email.addArgument(HandleManager.getCanonicalForm(handle)); email.send(); } catch (MessagingException e) { log.warn( LogManager.getHeader( c, "notifyOfArchive", "cannot email user" + " item_id=" + i.getID())); } }
/** * Determine the current scope. This may be derived from the current url handle if present or the * scope parameter is given. If no scope is specified then null is returned. * * @return The current scope. */ protected DSpaceObject getScope() throws SQLException { Request request = ObjectModelHelper.getRequest(objectModel); String scopeString = request.getParameter("scope"); // Are we in a community or collection? DSpaceObject dso; if (scopeString == null || "".equals(scopeString)) { // get the search scope from the url handle dso = HandleUtil.obtainHandle(objectModel); } else { // Get the search scope from the location parameter dso = HandleManager.resolveToObject(context, scopeString); } return dso; }
/** * Get the scope of the search using the parameter found in the request. * * @param context * @param request * @throws IllegalStateException * @throws SQLException */ public static DSpaceObject getSearchScope(Context context, HttpServletRequest request) throws IllegalStateException, SQLException { // Get the location parameter, if any String location = request.getParameter("location"); if (location == null) { if (UIUtil.getCollectionLocation(request) != null) { return UIUtil.getCollectionLocation(request); } if (UIUtil.getCommunityLocation(request) != null) { return UIUtil.getCommunityLocation(request); } return null; } DSpaceObject scope = HandleManager.resolveToObject(context, location); return scope; }
private Community resolveCommunity(Context c, String communityID) throws SQLException { Community community = null; if (communityID.indexOf('/') != -1) { // has a / must be a handle community = (Community) HandleManager.resolveToObject(c, communityID); // ensure it's a community if ((community == null) || (community.getType() != Constants.COMMUNITY)) { community = null; } } else { community = Community.find(c, Integer.parseInt(communityID)); } return community; }
/** * Change handle prefix. It is assumed that the user has already confirmed this selection. * * @param context The current DSpace context. * @param oldPrefix The prefix to be replace. * @param newPrefix The prefix to be used. * @param archiveOldHandles Should the former handles be archived? * @return A results object. */ public static FlowResult changeHandlePrefix( Context context, String oldPrefix, String newPrefix, boolean archiveOldHandles) throws SQLException, AuthorizeException, IOException { FlowResult result = new FlowResult(); result.setContinue(false); result.setOutcome(false); // If we have errors, the form needs to be resubmitted to fix those problems if (StringUtils.isEmpty(oldPrefix)) { result.addError("old_prefix_empty"); } if (StringUtils.isEmpty(newPrefix)) { result.addError("new_prefix_empty"); } if (result.getErrors() == null && oldPrefix.equals(newPrefix)) { result.addError("old_prefix_equals_new_prefix"); } if (result.getErrors() == null) { try { // change prefixes HandleManager.changePrefix(context, oldPrefix, newPrefix, archiveOldHandles); context.commit(); // reindex IndexBrowse.main(new String[] {"-i"}); result.setContinue(true); result.setOutcome(true); result.setMessage(T_prefix_successfully_changed); } catch (Exception e) { result.setMessage(T_prefix_change_failed); log.error(e.getMessage()); context.abort(); } } return result; }
/** * Tries to lookup all Identifiers of this DSpaceObject. * * @return An array containing all found identifiers or an array with a length of 0. */ public String[] getIdentifiers(Context context) { if (identifiers == null) { log.debug("This DSO's identifiers cache is empty, looking for identifiers..."); identifiers = new String[0]; IdentifierService identifierService = new DSpace().getSingletonService(IdentifierService.class); if (identifierService != null) { identifiers = identifierService.lookup(context, this); } else { log.warn( "No IdentifierService found, will return an array containing " + "the Handle only."); if (getHandle() != null) { identifiers = new String[] {HandleManager.getCanonicalForm(getHandle())}; } } } // it the DSO has no identifiers at all including handle, we should return an empty array. // G.e. items during submission (workspace items) have no handle and no other identifier. if (identifiers == null) { identifiers = new String[] {}; } if (log.isDebugEnabled()) { StringBuilder dbgMsg = new StringBuilder(); for (String id : identifiers) { if (dbgMsg.capacity() == 0) { dbgMsg.append("This DSO's Identifiers are: "); } else { dbgMsg.append(", "); } dbgMsg.append(id); } dbgMsg.append("."); log.debug(dbgMsg.toString()); } return identifiers; }
public static void main(String[] argv) throws Exception { Options options = new Options(); options.addOption("c", "collection", true, "destination collection(s) Handle (repeatable)"); options.addOption("e", "eperson", true, "email address of eperson doing importing"); options.addOption( "w", "install", false, "disable workflow; install immediately without going through collection's workflow"); options.addOption("t", "type", true, "package type or MIMEtype"); options.addOption( "o", "option", true, "Packager option to pass to plugin, \"name=value\" (repeatable)"); options.addOption( "d", "disseminate", false, "Disseminate package (output); default is to submit."); options.addOption("i", "item", true, "Handle of item to disseminate."); options.addOption("h", "help", false, "help"); CommandLineParser parser = new PosixParser(); CommandLine line = parser.parse(options, argv); String sourceFile = null; String eperson = null; String[] collections = null; boolean useWorkflow = true; String packageType = null; boolean submit = true; String itemHandle = null; PackageParameters pkgParams = new PackageParameters(); if (line.hasOption('h')) { HelpFormatter myhelp = new HelpFormatter(); myhelp.printHelp("Packager [options] package-file|-\n", options); System.out.println("\nAvailable Submission Package (SIP) types:"); String pn[] = PluginManager.getAllPluginNames(PackageIngester.class); for (int i = 0; i < pn.length; ++i) System.out.println(" " + pn[i]); System.out.println("\nAvailable Dissemination Package (DIP) types:"); pn = PluginManager.getAllPluginNames(PackageDisseminator.class); for (int i = 0; i < pn.length; ++i) System.out.println(" " + pn[i]); System.exit(0); } if (line.hasOption('w')) useWorkflow = false; if (line.hasOption('e')) eperson = line.getOptionValue('e'); if (line.hasOption('c')) collections = line.getOptionValues('c'); if (line.hasOption('t')) packageType = line.getOptionValue('t'); if (line.hasOption('i')) itemHandle = line.getOptionValue('i'); String files[] = line.getArgs(); if (files.length > 0) sourceFile = files[0]; if (line.hasOption('d')) submit = false; if (line.hasOption('o')) { String popt[] = line.getOptionValues('o'); for (int i = 0; i < popt.length; ++i) { String pair[] = popt[i].split("\\=", 2); if (pair.length == 2) pkgParams.addProperty(pair[0].trim(), pair[1].trim()); else if (pair.length == 1) pkgParams.addProperty(pair[0].trim(), ""); else System.err.println("Warning: Illegal package option format: \"" + popt[i] + "\""); } } // Sanity checks on arg list: required args if (sourceFile == null || eperson == null || packageType == null || (submit && collections == null)) { System.err.println("Error - missing a REQUIRED argument or option.\n"); HelpFormatter myhelp = new HelpFormatter(); myhelp.printHelp("PackageManager [options] package-file|-\n", options); System.exit(0); } // find the EPerson, assign to context Context context = new Context(); EPerson myEPerson = null; myEPerson = EPerson.findByEmail(context, eperson); if (myEPerson == null) usageError("Error, eperson cannot be found: " + eperson); context.setCurrentUser(myEPerson); if (submit) { // make sure we have an input file // GWaller 11/1/10 Disable piping of input in - we need to archive the package so it is // simpler to assume a file stream // rather than save the System.in bytes and re-read. if (sourceFile.equals("-")) { usageError( "Error, input piping not allowed. Specify a file name of a physical file to read"); } InputStream source = new FileInputStream(sourceFile); PackageIngester sip = (PackageIngester) PluginManager.getNamedPlugin(PackageIngester.class, packageType); if (sip == null) usageError("Error, Unknown package type: " + packageType); // find collections Collection[] mycollections = null; System.out.println("Destination collections:"); // validate each collection arg to see if it's a real collection mycollections = new Collection[collections.length]; for (int i = 0; i < collections.length; i++) { // sanity check: did handle resolve, and to a collection? DSpaceObject dso = HandleManager.resolveToObject(context, collections[i]); if (dso == null) throw new IllegalArgumentException( "Bad collection list -- " + "Cannot resolve collection handle \"" + collections[i] + "\""); else if (dso.getType() != Constants.COLLECTION) throw new IllegalArgumentException( "Bad collection list -- " + "Object at handle \"" + collections[i] + "\" is not a collection!"); mycollections[i] = (Collection) dso; System.out.println( (i == 0 ? " Owning " : " ") + " Collection: " + mycollections[i].getMetadata("name")); } try { // GWaller 26/08/09 Support array of collections WorkspaceItem wi = sip.ingest(context, mycollections, source, pkgParams, null); // GWaller 11/1/10 IssueID #157 Archive the package InputStream sourceCopy = new FileInputStream(sourceFile); Bundle archivedBundle = BundleUtils.getBundleByName(wi.getItem(), Constants.ARCHIVED_CONTENT_PACKAGE_BUNDLE); Bitstream bs = archivedBundle.createBitstream(sourceCopy); bs.setName(new File(sourceFile).getName()); bs.update(); archivedBundle.update(); if (useWorkflow) { String handle = null; // Check if workflow completes immediately, and // return Handle if so. WorkflowItem wfi = WorkflowManager.startWithoutNotify(context, wi); if (wfi.getState() == WorkflowManager.WFSTATE_ARCHIVE) { Item ni = wfi.getItem(); handle = HandleManager.findHandle(context, ni); } if (handle == null) System.out.println("Created Workflow item, ID=" + String.valueOf(wfi.getID())); else System.out.println("Created and installed item, handle=" + handle); } else { InstallItem.installItem(context, wi); System.out.println( "Created and installed item, handle=" + HandleManager.findHandle(context, wi.getItem())); } context.complete(); System.exit(0); } catch (Exception e) { // abort all operations context.abort(); e.printStackTrace(); System.out.println(e); System.exit(1); } } else { OutputStream dest = (sourceFile.equals("-")) ? (OutputStream) System.out : (OutputStream) (new FileOutputStream(sourceFile)); PackageDisseminator dip = (PackageDisseminator) PluginManager.getNamedPlugin(PackageDisseminator.class, packageType); if (dip == null) usageError("Error, Unknown package type: " + packageType); DSpaceObject dso = HandleManager.resolveToObject(context, itemHandle); if (dso == null) throw new IllegalArgumentException( "Bad Item handle -- " + "Cannot resolve handle \"" + itemHandle); dip.disseminate(context, dso, pkgParams, dest); } }
/** * Attach a division to the given search division named "search-results" which contains results * for this search query. * * @param search The search division to contain the search-results division. */ protected void buildSearchResultsDivision(Division search) throws IOException, SQLException, WingException { if (getQuery().length() > 0) { // Perform the actual search performSearch(); DSpaceObject searchScope = getScope(); Para para = search.addPara("result-query", "result-query"); String query = getQuery(); int hitCount = queryResults.getHitCount(); para.addContent(T_result_query.parameterize(query, hitCount)); Division results = search.addDivision("search-results", "primary"); if (searchScope instanceof Community) { Community community = (Community) searchScope; String communityName = community.getMetadata("name"); results.setHead(T_head1_community.parameterize(communityName)); } else if (searchScope instanceof Collection) { Collection collection = (Collection) searchScope; String collectionName = collection.getMetadata("name"); results.setHead(T_head1_collection.parameterize(collectionName)); } else { results.setHead(T_head1_none); } if (queryResults.getHitCount() > 0) { // Pagination variables. int itemsTotal = queryResults.getHitCount(); int firstItemIndex = queryResults.getStart() + 1; int lastItemIndex = queryResults.getStart() + queryResults.getPageSize(); if (itemsTotal < lastItemIndex) { lastItemIndex = itemsTotal; } int currentPage = (queryResults.getStart() / queryResults.getPageSize()) + 1; int pagesTotal = ((queryResults.getHitCount() - 1) / queryResults.getPageSize()) + 1; Map<String, String> parameters = new HashMap<String, String>(); parameters.put("page", "{pageNum}"); String pageURLMask = generateURL(parameters); results.setMaskedPagination( itemsTotal, firstItemIndex, lastItemIndex, currentPage, pagesTotal, pageURLMask); // Look for any communities or collections in the mix ReferenceSet referenceSet = null; boolean resultsContainsBothContainersAndItems = false; @SuppressWarnings("unchecked") // This cast is correct java.util.List<String> containerHandles = queryResults.getHitHandles(); for (String handle : containerHandles) { DSpaceObject resultDSO = HandleManager.resolveToObject(context, handle); if (resultDSO instanceof Community || resultDSO instanceof Collection) { if (referenceSet == null) { referenceSet = results.addReferenceSet( "search-results-repository", ReferenceSet.TYPE_SUMMARY_LIST, null, "repository-search-results"); // Set a heading showing that we will be listing containers that matched: referenceSet.setHead(T_head2); resultsContainsBothContainersAndItems = true; } referenceSet.addReference(resultDSO); } } // Look for any items in the result set. referenceSet = null; @SuppressWarnings("unchecked") // This cast is correct java.util.List<String> itemHandles = queryResults.getHitHandles(); for (String handle : itemHandles) { DSpaceObject resultDSO = HandleManager.resolveToObject(context, handle); if (resultDSO instanceof Item) { if (referenceSet == null) { referenceSet = results.addReferenceSet( "search-results-repository", ReferenceSet.TYPE_SUMMARY_LIST, null, "repository-search-results"); // Only set a heading if there are both containers and items. if (resultsContainsBothContainersAndItems) { referenceSet.setHead(T_head3); } } referenceSet.addReference(resultDSO); } } } else { results.addPara(T_no_results); } } // Empty query }
/** * item? try and add it to the archive. * * @param mycollections - add item to these Collections. * @param path - directory containing the item directories. * @param itemname handle - non-null means we have a pre-defined handle already * @param mapOut - mapfile we're writing */ private Item addItem( Context c, Collection[] mycollections, String path, String itemname, PrintWriter mapOut, boolean template) throws Exception { String mapOutput = null; System.out.println("Adding item from directory " + itemname); // create workspace item Item myitem = null; WorkspaceItem wi = null; if (!isTest) { wi = WorkspaceItem.create(c, mycollections[0], template); myitem = wi.getItem(); } // now fill out dublin core for item loadMetadata(c, myitem, path + File.separatorChar + itemname + File.separatorChar); // and the bitstreams from the contents file // process contents file, add bitstreams and bundles, return any // non-standard permissions List<String> options = processContentsFile(c, myitem, path + File.separatorChar + itemname, "contents"); if (useWorkflow) { // don't process handle file // start up a workflow if (!isTest) { // Should we send a workflow alert email or not? if (ConfigurationManager.getProperty("workflow", "workflow.framework") .equals("xmlworkflow")) { if (useWorkflowSendEmail) { XmlWorkflowManager.start(c, wi); } else { XmlWorkflowManager.startWithoutNotify(c, wi); } } else { if (useWorkflowSendEmail) { WorkflowManager.start(c, wi); } else { WorkflowManager.startWithoutNotify(c, wi); } } // send ID to the mapfile mapOutput = itemname + " " + myitem.getID(); } } else { // only process handle file if not using workflow system String myhandle = processHandleFile(c, myitem, path + File.separatorChar + itemname, "handle"); // put item in system if (!isTest) { InstallItem.installItem(c, wi, myhandle); // find the handle, and output to map file myhandle = HandleManager.findHandle(c, myitem); mapOutput = itemname + " " + myhandle; } // set permissions if specified in contents file if (options.size() > 0) { System.out.println("Processing options"); processOptions(c, myitem, options); } } // now add to multiple collections if requested if (mycollections.length > 1) { for (int i = 1; i < mycollections.length; i++) { if (!isTest) { mycollections[i].addItem(myitem); } } } // made it this far, everything is fine, commit transaction if (mapOut != null) { mapOut.println(mapOutput); } c.commit(); return myitem; }
protected void doDSGet(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException { // We will resolve the HTTP request parameters into a scope BrowseScope scope = new BrowseScope(context); // Will need to know whether to highlight the "focus" point boolean highlight = false; // Build up log information String logInfo = ""; // For browse by date, we'll need to work out the URL query string to // use when the user swaps the ordering, so that they stay at the same // point in the index String flipOrderingQuery = ""; // Grab HTTP request parameters String focus = request.getParameter("focus"); String startsWith = request.getParameter("starts_with"); String top = request.getParameter("top"); String bottom = request.getParameter("bottom"); // The following three are specific to browsing items by date String month = request.getParameter("month"); String year = request.getParameter("year"); String order = request.getParameter("order"); // For browse by date: oldest item first? boolean oldestFirst = false; if ((order != null) && order.equalsIgnoreCase("oldestfirst")) { oldestFirst = true; } if (browseDates && (year != null) && !year.equals("") && ((startsWith == null) || startsWith.equals(""))) { // We're browsing items by date, the user hasn't typed anything // into the "year" text box, and they've selected a year from // the drop-down list. From this we work out where to start // the browse. startsWith = year; if ((month != null) & !month.equals("-1")) { // They've selected a month as well if (month.length() == 1) { // Ensure double-digit month number month = "0" + month; } startsWith = year + "-" + month; } } // Set the scope according to the parameters passed in if (focus != null) { // ---------------------------------------------- // Browse should start at a specified focus point // ---------------------------------------------- if (browseAuthors || browseSubjects) { // For browsing authors, focus is just a text value scope.setFocus(focus); } else { // For browsing items by title or date, focus is a Handle Item item = (Item) HandleManager.resolveToObject(context, focus); if (item == null) { // Handle is invalid one. Show an error. JSPManager.showInvalidIDError(request, response, focus, Constants.ITEM); return; } scope.setFocus(item); } // Will need to highlight the focus highlight = true; logInfo = "focus=" + focus + ","; if (browseDates) { // if the date order is flipped, we'll keep the same focus flipOrderingQuery = "focus=" + URLEncoder.encode(focus, Constants.DEFAULT_ENCODING) + "&"; } } else if (startsWith != null) { // ---------------------------------------------- // Start the browse using user-specified text // ---------------------------------------------- if (browseDates) { // if the date order is flipped, we'll keep the same focus flipOrderingQuery = "starts_with=" + URLEncoder.encode(startsWith, Constants.DEFAULT_ENCODING) + "&"; /* * When the user is browsing with the most recent items first, * the browse code algorithm doesn't quite do what some people * might expect. For example, if in the index there are entries: * * Mar-2000 15-Feb-2000 6-Feb-2000 15-Jan-2000 * * and the user has selected "Feb 2000" as the start point for * the browse, the browse algorithm will start at the first * point in that index *after* "Feb 2000". "Feb 2000" would * appear in the index above between 6-Feb-2000 and 15-Jan-2000. * So, the browse code in this case will start the browse at * "15-Jan-2000". This isn't really what users are likely to * want: They're more likely to want the browse to start at the * first Feb 2000 date, i.e. 15-Feb-2000. A similar scenario * occurs when the user enters just a year. Our quick hack to * produce this behaviour is to add "-32" to the startsWith * variable, when sorting with most recent items first. This * means the browse code starts at the topmost item in the index * that matches the user's input, rather than the point in the * index where the user's input would appear. */ if (!oldestFirst) { startsWith = startsWith + "-32"; } } scope.setFocus(startsWith); highlight = true; logInfo = "starts_with=" + startsWith + ","; } else if ((top != null) || (bottom != null)) { // ---------------------------------------------- // Paginating: put specified entry at top or bottom // ---------------------------------------------- // Use a single value and a boolean to simplify the code below String val = bottom; boolean isTop = false; if (top != null) { val = top; isTop = true; } if (browseAuthors || browseSubjects) { // Value will be a text value for author browse scope.setFocus(val); } else { // Value is Handle if we're browsing items by title or date Item item = (Item) HandleManager.resolveToObject(context, val); if (item == null) { // Handle is invalid one. Show an error. JSPManager.showInvalidIDError(request, response, focus, Constants.ITEM); return; } scope.setFocus(item); } // This entry appears at the top or bottom, and so needs to have // 0 or 20 entries shown before it scope.setNumberBefore(isTop ? 0 : 20); logInfo = (isTop ? "top" : "bottom") + "=" + val + ","; if (browseDates) { // If the date order is flipped, we'll flip the table upside // down - i.e. the top will become the bottom and the bottom // the top. if (top != null) { flipOrderingQuery = "bottom=" + URLEncoder.encode(top, Constants.DEFAULT_ENCODING) + "&"; } else { flipOrderingQuery = "top=" + URLEncoder.encode(bottom, Constants.DEFAULT_ENCODING) + "&"; } } } // ---------------------------------------------- // If none of the above apply, no positioning parameters // set - use start of index // ---------------------------------------------- // Are we in a community or collection? Community community = UIUtil.getCommunityLocation(request); Collection collection = UIUtil.getCollectionLocation(request); if (collection != null) { logInfo = logInfo + ",collection_id=" + collection.getID() + ","; scope.setScope(collection); } else if (community != null) { logInfo = logInfo + ",community_id=" + community.getID() + ","; scope.setScope(community); } BrowseInfo browseInfo; try { // Query the browse index if (browseAuthors) { browseInfo = Browse.getAuthors(scope); } else if (browseDates) { browseInfo = Browse.getItemsByDate(scope, oldestFirst); } else if (browseSubjects) { browseInfo = Browse.getSubjects(scope); } else { browseInfo = Browse.getItemsByTitle(scope); } } catch (SQLException sqle) { // An invalid scope was given JSPManager.showIntegrityError(request, response); return; } // Write log entry String what = "title"; if (browseAuthors) { what = "author"; } else if (browseSubjects) { what = "subject"; } else if (browseDates) { what = "date"; } log.info( LogManager.getHeader( context, "browse_" + what, logInfo + "results=" + browseInfo.getResultCount())); if (browseInfo.getResultCount() == 0) { // No results! request.setAttribute("community", community); request.setAttribute("collection", collection); JSPManager.showJSP(request, response, "/browse/no-results.jsp"); } else { // Work out what the query strings will be for the previous // and next pages if (!browseInfo.isFirst()) { // Not the first page, so we'll need a "previous page" button // The top entry of the current page becomes the bottom // entry of the "previous page" String s; if (browseAuthors || browseSubjects) // aneesh { s = (browseInfo.getStringResults())[0]; } else { Item firstItem = (browseInfo.getItemResults())[0]; s = firstItem.getHandle(); } if (browseDates && oldestFirst) { // For browsing by date, oldest first, we need // to add the ordering parameter request.setAttribute( "previous.query", "order=oldestfirst&bottom=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } else { request.setAttribute( "previous.query", "bottom=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } } if (!browseInfo.isLast()) { // Not the last page, so we'll need a "next page" button // The bottom entry of the current page will be the top // entry in the next page String s; if (browseAuthors) { String[] authors = browseInfo.getStringResults(); s = authors[authors.length - 1]; } else if (browseSubjects) { String[] subjects = browseInfo.getStringResults(); s = subjects[subjects.length - 1]; } else { Item[] items = browseInfo.getItemResults(); Item lastItem = items[items.length - 1]; s = lastItem.getHandle(); } if (browseDates && oldestFirst) { // For browsing by date, oldest first, we need // to add the ordering parameter request.setAttribute( "next.query", "order=oldestfirst&top=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } else { request.setAttribute( "next.query", "top=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } } // Set appropriate attributes and forward to results page request.setAttribute("community", community); request.setAttribute("collection", collection); request.setAttribute("browse.info", browseInfo); request.setAttribute("highlight", new Boolean(highlight)); if (browseAuthors) { JSPManager.showJSP(request, response, "/browse/authors.jsp"); } else if (browseSubjects) { JSPManager.showJSP(request, response, "/browse/subjects.jsp"); } else if (browseDates) { request.setAttribute("oldest.first", new Boolean(oldestFirst)); request.setAttribute("flip.ordering.query", flipOrderingQuery); JSPManager.showJSP(request, response, "/browse/items-by-date.jsp"); } else { JSPManager.showJSP(request, response, "/browse/items-by-title.jsp"); } } }
public Map act( Redirector redirector, SourceResolver resolver, Map objectModel, String source, Parameters parameters) throws Exception { Request request = ObjectModelHelper.getRequest(objectModel); String requesterName = request.getParameter("requesterName"); String requesterEmail = request.getParameter("requesterEmail"); String allFiles = request.getParameter("allFiles"); String message = request.getParameter("message"); String bitstreamId = request.getParameter("bitstreamId"); // User email from context Context context = ContextUtil.obtainContext(objectModel); EPerson loggedin = context.getCurrentUser(); String eperson = null; if (loggedin != null) { eperson = loggedin.getEmail(); } // Check all data is there if (StringUtils.isEmpty(requesterName) || StringUtils.isEmpty(requesterEmail) || StringUtils.isEmpty(allFiles) || StringUtils.isEmpty(message)) { // Either the user did not fill out the form or this is the // first time they are visiting the page. Map<String, String> map = new HashMap<String, String>(); map.put("bitstreamId", bitstreamId); if (StringUtils.isEmpty(requesterEmail)) { map.put("requesterEmail", eperson); } else { map.put("requesterEmail", requesterEmail); } map.put("requesterName", requesterName); map.put("allFiles", allFiles); map.put("message", message); return map; } DSpaceObject dso = HandleUtil.obtainHandle(objectModel); if (!(dso instanceof Item)) { throw new Exception("Invalid DspaceObject at ItemRequest."); } Item item = (Item) dso; String title = ""; Metadatum[] titleDC = item.getDC("title", null, Item.ANY); if (titleDC == null || titleDC.length == 0) { titleDC = item.getDC("title", Item.ANY, Item.ANY); // dc.title with qualifier term } if (titleDC != null && titleDC.length > 0) { title = titleDC[0].value; } RequestItemAuthor requestItemAuthor = new DSpace() .getServiceManager() .getServiceByName( RequestItemAuthorExtractor.class.getName(), RequestItemAuthorExtractor.class) .getRequestItemAuthor(context, item); RequestItem requestItem = new RequestItem( item.getID(), Integer.parseInt(bitstreamId), requesterEmail, requesterName, message, Boolean.getBoolean(allFiles)); // All data is there, send the email Email email = Email.getEmail( I18nUtil.getEmailFilename(context.getCurrentLocale(), "request_item.author")); email.addRecipient(requestItemAuthor.getEmail()); email.addArgument(requesterName); email.addArgument(requesterEmail); email.addArgument( allFiles.equals("true") ? I18nUtil.getMessage("itemRequest.all") : Bitstream.find(context, Integer.parseInt(bitstreamId)).getName()); email.addArgument(HandleManager.getCanonicalForm(item.getHandle())); email.addArgument(title); // request item title email.addArgument(message); // message email.addArgument(getLinkTokenEmail(context, requestItem)); email.addArgument(requestItemAuthor.getFullName()); // corresponding author name email.addArgument(requestItemAuthor.getEmail()); // corresponding author email email.addArgument(ConfigurationManager.getProperty("dspace.name")); email.addArgument(ConfigurationManager.getProperty("mail.helpdesk")); email.setReplyTo(requesterEmail); email.send(); // Finished, allow to pass. return null; }
public static void main(String[] argv) throws Exception { DSIndexer.setBatchProcessingMode(true); Date startTime = new Date(); int status = 0; try { // create an options object and populate it CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption("a", "add", false, "add items to DSpace"); options.addOption("r", "replace", false, "replace items in mapfile"); options.addOption("d", "delete", false, "delete items listed in mapfile"); options.addOption("s", "source", true, "source of items (directory)"); options.addOption("z", "zip", true, "name of zip file"); options.addOption("c", "collection", true, "destination collection(s) Handle or database ID"); options.addOption("m", "mapfile", true, "mapfile items in mapfile"); options.addOption("e", "eperson", true, "email of eperson doing importing"); options.addOption("w", "workflow", false, "send submission through collection's workflow"); options.addOption( "n", "notify", false, "if sending submissions through the workflow, send notification emails"); options.addOption("t", "test", false, "test run - do not actually import items"); options.addOption("p", "template", false, "apply template"); options.addOption("R", "resume", false, "resume a failed import (add only)"); options.addOption("q", "quiet", false, "don't display metadata"); options.addOption("h", "help", false, "help"); CommandLine line = parser.parse(options, argv); String command = null; // add replace remove, etc String sourcedir = null; String mapfile = null; String eperson = null; // db ID or email String[] collections = null; // db ID or handles if (line.hasOption('h')) { HelpFormatter myhelp = new HelpFormatter(); myhelp.printHelp("ItemImport\n", options); System.out.println( "\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile"); System.out.println( "\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z filename.zip -m mapfile"); System.out.println( "replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile"); System.out.println("deleting items: ItemImport -d -e eperson -m mapfile"); System.out.println( "If multiple collections are specified, the first collection will be the one that owns the item."); System.exit(0); } if (line.hasOption('a')) { command = "add"; } if (line.hasOption('r')) { command = "replace"; } if (line.hasOption('d')) { command = "delete"; } if (line.hasOption('w')) { useWorkflow = true; if (line.hasOption('n')) { useWorkflowSendEmail = true; } } if (line.hasOption('t')) { isTest = true; System.out.println("**Test Run** - not actually importing items."); } if (line.hasOption('p')) { template = true; } if (line.hasOption('s')) // source { sourcedir = line.getOptionValue('s'); } if (line.hasOption('m')) // mapfile { mapfile = line.getOptionValue('m'); } if (line.hasOption('e')) // eperson { eperson = line.getOptionValue('e'); } if (line.hasOption('c')) // collections { collections = line.getOptionValues('c'); } if (line.hasOption('R')) { isResume = true; System.out.println("**Resume import** - attempting to import items not already imported"); } if (line.hasOption('q')) { isQuiet = true; } boolean zip = false; String zipfilename = ""; String ziptempdir = ConfigurationManager.getProperty("org.dspace.app.itemexport.work.dir"); if (line.hasOption('z')) { zip = true; zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z'); } // now validate // must have a command set if (command == null) { System.out.println( "Error - must run with either add, replace, or remove (run with -h flag for details)"); System.exit(1); } else if ("add".equals(command) || "replace".equals(command)) { if (sourcedir == null) { System.out.println("Error - a source directory containing items must be set"); System.out.println(" (run with -h flag for details)"); System.exit(1); } if (mapfile == null) { System.out.println("Error - a map file to hold importing results must be specified"); System.out.println(" (run with -h flag for details)"); System.exit(1); } if (eperson == null) { System.out.println("Error - an eperson to do the importing must be specified"); System.out.println(" (run with -h flag for details)"); System.exit(1); } if (collections == null) { System.out.println("Error - at least one destination collection must be specified"); System.out.println(" (run with -h flag for details)"); System.exit(1); } } else if ("delete".equals(command)) { if (eperson == null) { System.out.println("Error - an eperson to do the importing must be specified"); System.exit(1); } if (mapfile == null) { System.out.println("Error - a map file must be specified"); System.exit(1); } } // can only resume for adds if (isResume && !"add".equals(command)) { System.out.println("Error - resume option only works with --add command"); System.exit(1); } // do checks around mapfile - if mapfile exists and 'add' is selected, // resume must be chosen File myFile = new File(mapfile); if (!isResume && "add".equals(command) && myFile.exists()) { System.out.println("Error - the mapfile " + mapfile + " already exists."); System.out.println( "Either delete it or use --resume if attempting to resume an aborted import."); System.exit(1); } // does the zip file exist and can we write to the temp directory if (zip) { File zipfile = new File(sourcedir); if (!zipfile.canRead()) { System.out.println("Zip file '" + sourcedir + "' does not exist, or is not readable."); System.exit(1); } if (ziptempdir == null) { System.out.println( "Unable to unzip import file as the key 'org.dspace.app.itemexport.work.dir' is not set in dspace.cfg"); System.exit(1); } zipfile = new File(ziptempdir); if (!zipfile.isDirectory()) { System.out.println( "'" + ConfigurationManager.getProperty("org.dspace.app.itemexport.work.dir") + "' as defined by the key 'org.dspace.app.itemexport.work.dir' in dspace.cfg " + "is not a valid directory"); System.exit(1); } File tempdir = new File(ziptempdir); if (!tempdir.exists() && !tempdir.mkdirs()) { log.error("Unable to create temporary directory"); } sourcedir = ziptempdir + System.getProperty("file.separator") + line.getOptionValue("z"); ziptempdir = ziptempdir + System.getProperty("file.separator") + line.getOptionValue("z") + System.getProperty("file.separator"); } ItemImport myloader = new ItemImport(); // create a context Context c = new Context(); // find the EPerson, assign to context EPerson myEPerson = null; if (eperson.indexOf('@') != -1) { // @ sign, must be an email myEPerson = EPerson.findByEmail(c, eperson); } else { myEPerson = EPerson.find(c, Integer.parseInt(eperson)); } if (myEPerson == null) { System.out.println("Error, eperson cannot be found: " + eperson); System.exit(1); } c.setCurrentUser(myEPerson); // find collections Collection[] mycollections = null; // don't need to validate collections set if command is "delete" if (!"delete".equals(command)) { System.out.println("Destination collections:"); mycollections = new Collection[collections.length]; // validate each collection arg to see if it's a real collection for (int i = 0; i < collections.length; i++) { // is the ID a handle? if (collections[i].indexOf('/') != -1) { // string has a / so it must be a handle - try and resolve // it mycollections[i] = (Collection) HandleManager.resolveToObject(c, collections[i]); // resolved, now make sure it's a collection if ((mycollections[i] == null) || (mycollections[i].getType() != Constants.COLLECTION)) { mycollections[i] = null; } } // not a handle, try and treat it as an integer collection // database ID else if (collections[i] != null) { mycollections[i] = Collection.find(c, Integer.parseInt(collections[i])); } // was the collection valid? if (mycollections[i] == null) { throw new IllegalArgumentException( "Cannot resolve " + collections[i] + " to collection"); } // print progress info String owningPrefix = ""; if (i == 0) { owningPrefix = "Owning "; } System.out.println(owningPrefix + " Collection: " + mycollections[i].getMetadata("name")); } } // end of validating collections try { // If this is a zip archive, unzip it first if (zip) { ZipFile zf = new ZipFile(zipfilename); ZipEntry entry; Enumeration<? extends ZipEntry> entries = zf.entries(); while (entries.hasMoreElements()) { entry = entries.nextElement(); if (entry.isDirectory()) { if (!new File(ziptempdir + entry.getName()).mkdir()) { log.error("Unable to create contents directory"); } } else { System.out.println("Extracting file: " + entry.getName()); int index = entry.getName().lastIndexOf('/'); if (index == -1) { // Was it created on Windows instead? index = entry.getName().lastIndexOf('\\'); } if (index > 0) { File dir = new File(ziptempdir + entry.getName().substring(0, index)); if (!dir.mkdirs()) { log.error("Unable to create directory"); } } byte[] buffer = new byte[1024]; int len; InputStream in = zf.getInputStream(entry); BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(ziptempdir + entry.getName())); while ((len = in.read(buffer)) >= 0) { out.write(buffer, 0, len); } in.close(); out.close(); } } } c.turnOffAuthorisationSystem(); if ("add".equals(command)) { myloader.addItems(c, mycollections, sourcedir, mapfile, template); } else if ("replace".equals(command)) { myloader.replaceItems(c, mycollections, sourcedir, mapfile, template); } else if ("delete".equals(command)) { myloader.deleteItems(c, mapfile); } // complete all transactions c.complete(); } catch (Exception e) { // abort all operations if (mapOut != null) { mapOut.close(); } mapOut = null; c.abort(); e.printStackTrace(); System.out.println(e); status = 1; } // Delete the unzipped file try { if (zip) { System.gc(); System.out.println("Deleting temporary zip directory: " + ziptempdir); ItemImport.deleteDirectory(new File(ziptempdir)); } } catch (Exception ex) { System.out.println("Unable to delete temporary zip archive location: " + ziptempdir); } if (mapOut != null) { mapOut.close(); } if (isTest) { System.out.println("***End of Test Run***"); } } finally { DSIndexer.setBatchProcessingMode(false); Date endTime = new Date(); System.out.println("Started: " + startTime.getTime()); System.out.println("Ended: " + endTime.getTime()); System.out.println( "Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime.getTime() - startTime.getTime()) + " msecs)"); } System.exit(status); }
public static void main(String[] argv) throws Exception { // set headless for non-gui workstations System.setProperty("java.awt.headless", "true"); // create an options object and populate it CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption( "v", "verbose", false, "print all extracted text and other details to STDOUT"); options.addOption("f", "force", false, "force all bitstreams to be processed"); options.addOption( "n", "noindex", false, "do NOT update the search index after filtering bitstreams"); options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier"); options.addOption("m", "maximum", true, "process no more than maximum items"); options.addOption("h", "help", false, "help"); CommandLine line = parser.parse(options, argv); if (line.hasOption('h')) { HelpFormatter myhelp = new HelpFormatter(); myhelp.printHelp("MediaFilter\n", options); System.exit(0); } if (line.hasOption('v')) { isVerbose = true; } if (line.hasOption('n')) { updateIndex = false; } if (line.hasOption('f')) { isForce = true; } if (line.hasOption('i')) { identifier = line.getOptionValue('i'); } if (line.hasOption('m')) { max2Process = Integer.parseInt(line.getOptionValue('m')); if (max2Process <= 1) { System.out.println("Invalid maximum value '" + line.getOptionValue('m') + "' - ignoring"); max2Process = Integer.MAX_VALUE; } } // set up filters filterClasses = (MediaFilter[]) PluginManager.getPluginSequence(MediaFilter.class); for (int i = 0; i < filterClasses.length; i++) { String filterName = filterClasses[i].getClass().getName(); String formats = ConfigurationManager.getProperty("filter." + filterName + ".inputFormats"); if (formats != null) { filterFormats.put(filterName, Arrays.asList(formats.split(",[\\s]*"))); } } Context c = null; try { c = new Context(); // have to be super-user to do the filtering c.setIgnoreAuthorization(true); // now apply the filters if (identifier == null) { applyFiltersAllItems(c); } else // restrict application scope to identifier { DSpaceObject dso = HandleManager.resolveToObject(c, identifier); if (dso == null) { throw new IllegalArgumentException( "Cannot resolve " + identifier + " to a DSpace object"); } switch (dso.getType()) { case Constants.COMMUNITY: applyFiltersCommunity(c, (Community) dso); break; case Constants.COLLECTION: applyFiltersCollection(c, (Collection) dso); break; case Constants.ITEM: applyFiltersItem(c, (Item) dso); break; } } // update search index? if (updateIndex) { System.out.println("Updating search index:"); DSIndexer.updateIndex(c); } c.complete(); c = null; } finally { if (c != null) { c.abort(); } } }
/** * Return a globally unique identifier for the repository. For dspace, we use the handle prefix. */ public String getRepositoryIdentifier(Object object) throws WingException { return HandleManager.getPrefix(); }
/** * Save the handle. * * <p>If the handleID is -1 then a new handle is created. * * @param context The current dspace context * @param handleID The handle ID, or -1 for a new handle. * @param url The handle URL * @param resourceTypeID The type of referenced resource * @param resourceID ID of referenced resource * @return A result */ public static FlowResult processSaveHandle( Context context, int handleID, String handle, String url, int resourceTypeID, int resourceID, boolean archiveOldHandle) throws SQLException, AuthorizeException, UIException { FlowResult result = new FlowResult(); result.setParameter("handle_id", handleID); result.setContinue(false); result.setOutcome(false); // If we have errors, the form needs to be resubmitted to fix those problems if (StringUtils.isEmpty(handle)) { result.addError("handle_empty"); } if (resourceTypeID == -1 && resourceID == -1 && StringUtils.isEmpty(url)) { result.addError("url_empty"); } else if (StringUtils.isEmpty(url)) { if (resourceTypeID == -1) { result.addError("resource_type_id_empty"); } if (resourceID == -1) { result.addError("resource_id_empty"); } } if (result.getErrors() == null) { try { Handle h = null; if (handleID == -1) { h = Handle.create(context, null, handle); } else { h = Handle.find(context, handleID); if (h.getHandle() != handle) { HandleManager.changeHandle(context, h.getHandle(), handle, archiveOldHandle); } } h.setHandle(handle); h.setURL(url); h.setResourceTypeID(resourceTypeID); h.setResourceID(resourceID); h.update(); context.commit(); result.setContinue(true); result.setOutcome(true); result.setMessage(T_handle_successfully_saved); } catch (Exception e) { result.setMessage(T_handle_saving_failed); log.error(e.getMessage()); context.abort(); } } return result; }
/** * Sends an email to the given e-person with details of new items in the given dspace object (MUST * be a community or a collection), items that appeared yesterday. No e-mail is sent if there * aren't any new items in any of the dspace objects. * * @param context DSpace context object * @param eperson eperson to send to * @param rpkeys List of DSpace Objects * @param test * @throws SearchServiceException */ public static void sendEmail( Researcher researcher, Context context, EPerson eperson, List<String> rpkeys, boolean test, List<String> relationFields) throws IOException, MessagingException, SQLException, SearchServiceException { CrisSearchService searchService = researcher.getCrisSearchService(); // Get a resource bundle according to the eperson language preferences Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); StringBuffer emailText = new StringBuffer(); boolean isFirst = true; for (String rpkey : rpkeys) { SolrQuery query = new SolrQuery(); query.setFields("search.resourceid"); query.addFilterQuery( "{!field f=search.resourcetype}" + Constants.ITEM, "{!field f=inarchive}true"); for (String tmpRelations : relationFields) { String fq = "{!field f=" + tmpRelations + "}" + rpkey; query.addFilterQuery(fq); } query.setRows(Integer.MAX_VALUE); if (ConfigurationManager.getBooleanProperty("eperson.subscription.onlynew", false)) { // get only the items archived yesterday query.setQuery("dateaccessioned:(NOW/DAY-1DAY)"); } else { // get all item modified yesterday but not published the day // before // and all the item modified today and archived yesterday query.setQuery( "(item.lastmodified:(NOW/DAY-1DAY) AND dateaccessioned:(NOW/DAY-1DAY)) OR ((item.lastmodified:(NOW/DAY) AND dateaccessioned:(NOW/DAY-1DAY)))"); } QueryResponse qResponse = searchService.search(query); SolrDocumentList results = qResponse.getResults(); // Only add to buffer if there are new items if (results.getNumFound() > 0) { if (!isFirst) { emailText.append("\n---------------------------------------\n"); } else { isFirst = false; } emailText .append(I18nUtil.getMessage("org.dspace.eperson.Subscribe.new-items", supportedLocale)) .append(" ") .append(rpkey) .append(": ") .append(results.getNumFound()) .append("\n\n"); for (SolrDocument solrDoc : results) { Item item = Item.find(context, (Integer) solrDoc.getFieldValue("search.resourceid")); DCValue[] titles = item.getDC("title", null, Item.ANY); emailText .append(" ") .append(I18nUtil.getMessage("org.dspace.eperson.Subscribe.title", supportedLocale)) .append(" "); if (titles.length > 0) { emailText.append(titles[0].value); } else { emailText.append( I18nUtil.getMessage("org.dspace.eperson.Subscribe.untitled", supportedLocale)); } DCValue[] authors = item.getDC("contributor", Item.ANY, Item.ANY); if (authors.length > 0) { emailText .append("\n ") .append( I18nUtil.getMessage("org.dspace.eperson.Subscribe.authors", supportedLocale)) .append(" ") .append(authors[0].value); for (int k = 1; k < authors.length; k++) { emailText.append("\n ").append(authors[k].value); } } emailText .append("\n ") .append(I18nUtil.getMessage("org.dspace.eperson.Subscribe.id", supportedLocale)) .append(" ") .append(HandleManager.getCanonicalForm(item.getHandle())) .append("\n\n"); context.removeCached(item, item.getID()); } } } // Send an e-mail if there were any new items if (emailText.length() > 0) { if (test) { log.info(LogManager.getHeader(context, "subscription:", "eperson=" + eperson.getEmail())); log.info(LogManager.getHeader(context, "subscription:", "text=" + emailText.toString())); } else { Email email = ConfigurationManager.getEmail( I18nUtil.getEmailFilename(supportedLocale, "subscription")); email.addRecipient(eperson.getEmail()); email.addArgument(emailText.toString()); email.send(); log.info( LogManager.getHeader(context, "sent_subscription", "eperson_id=" + eperson.getID())); } } }
protected void doDSGet(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException { // dispense with simple service document requests String scope = request.getParameter("scope"); if (scope != null && "".equals(scope)) { scope = null; } String path = request.getPathInfo(); if (path != null && path.endsWith("description.xml")) { String svcDescrip = OpenSearch.getDescription(scope); response.setContentType(OpenSearch.getContentType("opensearchdescription")); response.setContentLength(svcDescrip.length()); response.getWriter().write(svcDescrip); return; } // get enough request parameters to decide on action to take String format = request.getParameter("format"); if (format == null || "".equals(format)) { // default to atom format = "atom"; } // do some sanity checking if (!OpenSearch.getFormats().contains(format)) { response.sendError(HttpServletResponse.SC_BAD_REQUEST); return; } // then the rest - we are processing the query String query = request.getParameter("query"); int start = Util.getIntParameter(request, "start"); int rpp = Util.getIntParameter(request, "rpp"); int sort = Util.getIntParameter(request, "sort_by"); String order = request.getParameter("order"); String sortOrder = (order == null || order.length() == 0 || order.toLowerCase().startsWith("asc")) ? SortOption.ASCENDING : SortOption.DESCENDING; QueryArgs qArgs = new QueryArgs(); // can't start earlier than 0 in the results! if (start < 0) { start = 0; } qArgs.setStart(start); if (rpp > 0) { qArgs.setPageSize(rpp); } qArgs.setSortOrder(sortOrder); if (sort > 0) { try { qArgs.setSortOption(SortOption.getSortOption(sort)); } catch (Exception e) { // invalid sort id - do nothing } } qArgs.setSortOrder(sortOrder); // Ensure the query is non-null if (query == null) { query = ""; } // If there is a scope parameter, attempt to dereference it // failure will only result in its being ignored DSpaceObject container = (scope != null) ? HandleManager.resolveToObject(context, scope) : null; // Build log information String logInfo = ""; // get the start of the query results page qArgs.setQuery(query); // Perform the search QueryResults qResults = null; if (container == null) { qResults = DSQuery.doQuery(context, qArgs); } else if (container instanceof Collection) { logInfo = "collection_id=" + container.getID() + ","; qResults = DSQuery.doQuery(context, qArgs, (Collection) container); } else if (container instanceof Community) { logInfo = "community_id=" + container.getID() + ","; qResults = DSQuery.doQuery(context, qArgs, (Community) container); } // now instantiate the results DSpaceObject[] results = new DSpaceObject[qResults.getHitHandles().size()]; for (int i = 0; i < qResults.getHitHandles().size(); i++) { String myHandle = (String) qResults.getHitHandles().get(i); DSpaceObject dso = HandleManager.resolveToObject(context, myHandle); if (dso == null) { throw new SQLException("Query \"" + query + "\" returned unresolvable handle: " + myHandle); } results[i] = dso; } // Log log.info( LogManager.getHeader( context, "search", logInfo + "query=\"" + query + "\",results=(" + results.length + ")")); // format and return results Map<String, String> labelMap = getLabels(request); Document resultsDoc = OpenSearch.getResultsDoc(format, query, qResults, container, results, labelMap); try { Transformer xf = TransformerFactory.newInstance().newTransformer(); response.setContentType(OpenSearch.getContentType(format)); xf.transform(new DOMSource(resultsDoc), new StreamResult(response.getWriter())); } catch (TransformerException e) { log.error(e); throw new ServletException(e.toString()); } }
protected void doDSGet(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException { includeAll = ConfigurationManager.getBooleanProperty("harvest.includerestricted.rss", true); String path = request.getPathInfo(); String feedType = null; String handle = null; // build label map from localized Messages resource bundle Locale locale = request.getLocale(); ResourceBundle msgs = ResourceBundle.getBundle("Messages", locale); Map<String, String> labelMap = new HashMap<String, String>(); labelMap.put(SyndicationFeed.MSG_UNTITLED, msgs.getString(clazz + ".notitle")); labelMap.put(SyndicationFeed.MSG_LOGO_TITLE, msgs.getString(clazz + ".logo.title")); labelMap.put( SyndicationFeed.MSG_FEED_DESCRIPTION, msgs.getString(clazz + ".general-feed.description")); labelMap.put(SyndicationFeed.MSG_UITYPE, SyndicationFeed.UITYPE_JSPUI); for (String selector : SyndicationFeed.getDescriptionSelectors()) { labelMap.put("metadata." + selector, msgs.getString(SyndicationFeed.MSG_METADATA + selector)); } if (path != null) { // substring(1) is to remove initial '/' path = path.substring(1); int split = path.indexOf('/'); if (split != -1) { feedType = path.substring(0, split); handle = path.substring(split + 1); } } DSpaceObject dso = null; // as long as this is not a site wide feed, // attempt to retrieve the Collection or Community object if (handle != null && !handle.equals(SITE_FEED_KEY)) { // Determine if handle is a valid reference dso = HandleManager.resolveToObject(context, handle); if (dso == null) { log.info(LogManager.getHeader(context, "invalid_handle", "path=" + path)); JSPManager.showInvalidIDError(request, response, handle, -1); return; } } if (!enabled || (dso != null && (dso.getType() != Constants.COLLECTION && dso.getType() != Constants.COMMUNITY))) { log.info(LogManager.getHeader(context, "invalid_id", "path=" + path)); JSPManager.showInvalidIDError(request, response, path, -1); return; } // Determine if requested format is supported if (feedType == null || !formats.contains(feedType)) { log.info(LogManager.getHeader(context, "invalid_syndformat", "path=" + path)); JSPManager.showInvalidIDError(request, response, path, -1); return; } if (dso != null && dso.getType() == Constants.COLLECTION) { labelMap.put( SyndicationFeed.MSG_FEED_TITLE, MessageFormat.format( msgs.getString(clazz + ".feed.title"), msgs.getString(clazz + ".feed-type.collection"), dso.getMetadata("short_description"))); } else if (dso != null && dso.getType() == Constants.COMMUNITY) { labelMap.put( SyndicationFeed.MSG_FEED_TITLE, MessageFormat.format( msgs.getString(clazz + ".feed.title"), msgs.getString(clazz + ".feed-type.community"), dso.getMetadata("short_description"))); } // Lookup or generate the feed // Cache key is handle + locale String cacheKey = (handle == null ? "site" : handle) + "." + locale.toString(); SyndicationFeed feed = null; if (feedCache != null) { CacheFeed cFeed = feedCache.get(cacheKey); if (cFeed != null) // cache hit, but... { // Is the feed current? boolean cacheFeedCurrent = false; if (cFeed.timeStamp + (cacheAge * HOUR_MSECS) < System.currentTimeMillis()) { cacheFeedCurrent = true; } // Not current, but have any items changed since feed was created/last checked? else if (!itemsChanged(context, dso, cFeed.timeStamp)) { // no items have changed, re-stamp feed and use it cFeed.timeStamp = System.currentTimeMillis(); cacheFeedCurrent = true; } if (cacheFeedCurrent) { feed = cFeed.access(); } } } // either not caching, not found in cache, or feed in cache not current if (feed == null) { feed = new SyndicationFeed(SyndicationFeed.UITYPE_JSPUI); feed.populate(request, dso, getItems(context, dso), labelMap); if (feedCache != null) { cache(cacheKey, new CacheFeed(feed)); } } // set the feed to the requested type & return it try { feed.setType(feedType); response.setContentType("text/xml; charset=UTF-8"); feed.output(response.getWriter()); } catch (FeedException fex) { throw new IOException(fex.getMessage(), fex); } }
@Override protected void doDSGet(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException { Item item = null; Bitstream bitstream = null; // Get the ID from the URL String idString = request.getPathInfo(); String handle = ""; String sequenceText = ""; String filename = null; int sequenceID; // Parse 'handle' and 'sequence' (bitstream seq. number) out // of remaining URL path, which is typically of the format: // {handle}/{sequence}/{bitstream-name} // But since the bitstream name MAY have any number of "/"s in // it, and the handle is guaranteed to have one slash, we // scan from the start to pick out handle and sequence: // Remove leading slash if any: if (idString.startsWith("/")) { idString = idString.substring(1); } // skip first slash within handle int slashIndex = idString.indexOf('/'); if (slashIndex != -1) { slashIndex = idString.indexOf('/', slashIndex + 1); if (slashIndex != -1) { handle = idString.substring(0, slashIndex); int slash2 = idString.indexOf('/', slashIndex + 1); if (slash2 != -1) { sequenceText = idString.substring(slashIndex + 1, slash2); filename = idString.substring(slash2 + 1); } } } try { sequenceID = Integer.parseInt(sequenceText); } catch (NumberFormatException nfe) { sequenceID = -1; } // Now try and retrieve the item DSpaceObject dso = HandleManager.resolveToObject(context, handle); // Make sure we have valid item and sequence number if (dso != null && dso.getType() == Constants.ITEM && sequenceID >= 0) { item = (Item) dso; if (item.isWithdrawn()) { log.info( LogManager.getHeader( context, "view_bitstream", "handle=" + handle + ",withdrawn=true")); JSPManager.showJSP(request, response, "/tombstone.jsp"); return; } boolean found = false; Bundle[] bundles = item.getBundles(); for (int i = 0; (i < bundles.length) && !found; i++) { Bitstream[] bitstreams = bundles[i].getBitstreams(); for (int k = 0; (k < bitstreams.length) && !found; k++) { if (sequenceID == bitstreams[k].getSequenceID()) { bitstream = bitstreams[k]; found = true; } } } } if (bitstream == null || filename == null || !filename.equals(bitstream.getName())) { // No bitstream found or filename was wrong -- ID invalid log.info(LogManager.getHeader(context, "invalid_id", "path=" + idString)); JSPManager.showInvalidIDError(request, response, idString, Constants.BITSTREAM); return; } log.info(LogManager.getHeader(context, "view_bitstream", "bitstream_id=" + bitstream.getID())); // Modification date // Only use last-modified if this is an anonymous access // - caching content that may be generated under authorisation // is a security problem if (context.getCurrentUser() == null) { // TODO: Currently the date of the item, since we don't have dates // for files response.setDateHeader("Last-Modified", item.getLastModified().getTime()); // Check for if-modified-since header long modSince = request.getDateHeader("If-Modified-Since"); if (modSince != -1 && item.getLastModified().getTime() < modSince) { // Item has not been modified since requested date, // hence bitstream has not; return 304 response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); return; } } // Pipe the bits InputStream is = bitstream.retrieve(); // Set the response MIME type response.setContentType(bitstream.getFormat().getMIMEType()); // Response length response.setHeader("Content-Length", String.valueOf(bitstream.getSize())); if (threshold != -1 && bitstream.getSize() >= threshold) { setBitstreamDisposition(bitstream.getName(), request, response); } Utils.bufferedCopy(is, response.getOutputStream()); is.close(); response.getOutputStream().flush(); }