@Override public void update(Collection collection) throws AuthorizeException { try { TableRow row = DatabaseManager.find(context, "collection", collection.getID()); if (row != null) { populateTableRowFromCollection(collection, row); DatabaseManager.update(context, row); } else { throw new RuntimeException("Didn't find collection " + collection.getID()); } } catch (SQLException sqle) { throw new RuntimeException(sqle); } }
/** * Commit the contained item to the main archive. The item is associated with the relevant * collection, added to the search index, and any other tasks such as assigning dates are * performed. * * @return the fully archived item. */ @Override public Item archive(Context context, BasicWorkflowItem workflowItem) throws SQLException, IOException, AuthorizeException { // FIXME: Check auth Item item = workflowItem.getItem(); Collection collection = workflowItem.getCollection(); log.info( LogManager.getHeader( context, "archive_item", "workflow_item_id=" + workflowItem.getID() + "item_id=" + item.getID() + "collection_id=" + collection.getID())); installItemService.installItem(context, workflowItem); // Log the event log.info( LogManager.getHeader( context, "install_item", "workflow_id=" + workflowItem.getID() + ", item_id=" + item.getID() + "handle=FIXME")); return item; }
/** * Change default privileges from the anonymous group to a new group that will be created and * appropriate privileges assigned. The id of this new group will be returned. * * @param context The current DSpace context. * @param collectionID The collection id. * @return The group ID of the new group. */ public static int createCollectionDefaultReadGroup(Context context, int collectionID) throws SQLException, AuthorizeException, UIException { int roleID = getCollectionDefaultRead(context, collectionID); if (roleID != 0) { throw new UIException( "Unable to create a new default read group because either the group already exists or multiple groups are assigned the default privileges."); } Collection collection = Collection.find(context, collectionID); Group role = Group.create(context); role.setName("COLLECTION_" + collection.getID() + "_DEFAULT_READ"); // Remove existing privileges from the anonymous group. AuthorizeManager.removePoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); AuthorizeManager.removePoliciesActionFilter( context, collection, Constants.DEFAULT_BITSTREAM_READ); // Grant our new role the default privileges. AuthorizeManager.addPolicy(context, collection, Constants.DEFAULT_ITEM_READ, role); AuthorizeManager.addPolicy(context, collection, Constants.DEFAULT_BITSTREAM_READ, role); // Commit the changes role.update(); context.commit(); return role.getID(); }
/** * Commit the contained item to the main archive. The item is associated with the relevant * collection, added to the search index, and any other tasks such as assigning dates are * performed. * * @return the fully archived item. */ private static Item archive(Context c, WorkflowItem wfi) throws SQLException, IOException, AuthorizeException { // FIXME: Check auth Item item = wfi.getItem(); Collection collection = wfi.getCollection(); log.info( LogManager.getHeader( c, "archive_item", "workflow_item_id=" + wfi.getID() + "item_id=" + item.getID() + "collection_id=" + collection.getID())); InstallItem.installItem(c, wfi); // Log the event log.info( LogManager.getHeader( c, "install_item", "workflow_id=" + wfi.getID() + ", item_id=" + item.getID() + "handle=FIXME")); return item; }
/** * Get all workflow items for a particular collection. * * @param context the context object * @param c the collection * @return array of the corresponding workflow items */ public static WorkflowItem[] findByCollection(Context context, Collection c) throws SQLException { List wsItems = new ArrayList(); TableRowIterator tri = DatabaseManager.queryTable( context, "workflowitem", "SELECT workflowitem.* FROM workflowitem WHERE " + "workflowitem.collection_id= ? ", c.getID()); try { while (tri.hasNext()) { TableRow row = tri.next(); // Check the cache WorkflowItem wi = (WorkflowItem) context.fromCache(WorkflowItem.class, row.getIntColumn("workflow_id")); // not in cache? turn row into workflowitem if (wi == null) { wi = new WorkflowItem(context, row); } wsItems.add(wi); } } finally { if (tri != null) tri.close(); } WorkflowItem[] wsArray = new WorkflowItem[wsItems.size()]; wsArray = (WorkflowItem[]) wsItems.toArray(wsArray); return wsArray; }
/** * Return the workflow item to the workspace of the submitter. The workflow item is removed, and a * workspace item created. * * @param c Context * @param wfi WorkflowItem to be 'dismantled' * @return the workspace item */ private static WorkspaceItem returnToWorkspace(Context c, WorkflowItem wfi) throws SQLException, IOException, AuthorizeException { Item myitem = wfi.getItem(); Collection mycollection = wfi.getCollection(); // FIXME: How should this interact with the workflow system? // FIXME: Remove license // FIXME: Provenance statement? // Create the new workspace item row TableRow row = DatabaseManager.create(c, "workspaceitem"); row.setColumn("item_id", myitem.getID()); row.setColumn("collection_id", mycollection.getID()); DatabaseManager.update(c, row); int wsi_id = row.getIntColumn("workspace_item_id"); WorkspaceItem wi = WorkspaceItem.find(c, wsi_id); wi.setMultipleFiles(wfi.hasMultipleFiles()); wi.setMultipleTitles(wfi.hasMultipleTitles()); wi.setPublishedBefore(wfi.isPublishedBefore()); wi.update(); // myitem.update(); log.info( LogManager.getHeader( c, "return_to_workspace", "workflow_item_id=" + wfi.getID() + "workspace_item_id=" + wi.getID())); // Now remove the workflow object manually from the database DatabaseManager.updateQuery(c, "DELETE FROM WorkflowItem WHERE workflow_id=" + wfi.getID()); return wi; }
@Override public Group getWorkflowRoleGroup( Context context, Collection collection, String roleName, Group roleGroup) throws SQLException, IOException, WorkflowException, AuthorizeException { try { Role role = WorkflowUtils.getCollectionAndRepositoryRoles(collection).get(roleName); if (role.getScope() == Role.Scope.COLLECTION || role.getScope() == Role.Scope.REPOSITORY) { roleGroup = WorkflowUtils.getRoleGroup(context, collection, role); if (roleGroup == null) { authorizeService.authorizeAction(context, collection, Constants.WRITE); roleGroup = groupService.create(context); if (role.getScope() == Role.Scope.COLLECTION) { groupService.setName( roleGroup, "COLLECTION_" + collection.getID().toString() + "_WORKFLOW_ROLE_" + roleName); } else { groupService.setName(roleGroup, role.getName()); } groupService.update(context, roleGroup); authorizeService.addPolicy(context, collection, Constants.ADD, roleGroup); if (role.getScope() == Role.Scope.COLLECTION) { WorkflowUtils.createCollectionWorkflowRole(context, collection, roleName, roleGroup); } } } return roleGroup; } catch (WorkflowConfigurationException e) { throw new WorkflowException(e); } }
public CollectionEntity( final Collection collection, final List<Entity> items, final List<Entity> communities, final int itemsCount) throws SQLException { super( collection.getID(), collection.getName(), collection.getType(), items, communities, itemsCount); this.canEdit = collection.canEditBoolean(); this.handle = collection.getHandle(); this.licence = collection.getLicense(); this.short_description = collection.getMetadata("short_description"); this.intro_text = collection.getMetadata("introductory_text"); this.copyright_text = collection.getMetadata("copyright_text"); this.sidebar_text = collection.getMetadata("side_bar_text"); this.provenance = collection.getMetadata("provenance_description"); if (collection.getLogo() == null) { this.logo = null; } else { this.logo = new BitstreamEntityId(collection.getLogo()); } }
@Override public void unlink(Collection collection, Item item) throws AuthorizeException { if (linked(collection, item)) { try { DatabaseManager.updateQuery( context, "DELETE FROM collection2item WHERE collection_id= ? " + "AND item_id= ? ", collection.getID(), item.getID()); } catch (SQLException sqle) { throw new RuntimeException(sqle); } } }
@Override public void link(Collection collection, Item item) throws AuthorizeException { if (!linked(collection, item)) { try { TableRow row = DatabaseManager.create(context, "collection2item"); row.setColumn("collection_id", collection.getID()); row.setColumn("item_id", item.getID()); DatabaseManager.update(context, row); } catch (SQLException sqle) { throw new RuntimeException(sqle); } } }
private void populateTableRowFromCollection(Collection collection, TableRow row) { int id = collection.getID(); Bitstream logo = collection.getLogo(); Item templateItem = collection.getTemplateItem(); Group admins = collection.getAdministrators(); Group[] workflowGroups = collection.getWorkflowGroups(); if (logo == null) { row.setColumnNull("logo_bitstream_id"); } else { row.setColumn("logo_bitstream_id", logo.getID()); } if (templateItem == null) { row.setColumnNull("template_item_id"); } else { row.setColumn("template_item_id", templateItem.getID()); } if (admins == null) { row.setColumnNull("admin"); } else { row.setColumn("admin", admins.getID()); } for (int i = 1; i <= workflowGroups.length; i++) { Group g = workflowGroups[i - 1]; if (g == null) { row.setColumnNull("workflow_step_" + i); } else { row.setColumn("workflow_step_" + i, g.getID()); } } // Now loop over all allowed metadata fields and set the value into the // TableRow. for (CollectionMetadataField field : CollectionMetadataField.values()) { String value = collection.getMetadata(field.toString()); if (value == null) { row.setColumnNull(field.toString()); } else { row.setColumn(field.toString(), value); } } row.setColumn("uuid", collection.getIdentifier().getUUID().toString()); }
private void populateTableRowFromItem(Item item, TableRow row) { EPerson submitter = item.getSubmitter(); Collection owningCollection = item.getOwningCollection(); row.setColumn("item_id", item.getID()); row.setColumn("in_archive", item.isArchived()); row.setColumn("withdrawn", item.isWithdrawn()); row.setColumn("last_modified", item.getLastModified()); if (submitter != null) { row.setColumn("submitter_id", submitter.getID()); } if (owningCollection != null) { row.setColumn("owning_collection", owningCollection.getID()); } }
@Override public boolean linked(Collection collection, Item item) { try { TableRowIterator tri = DatabaseManager.query( context, "SELECT id FROM collection2item " + "WHERE collection_id = ? AND item_id = ? ", collection.getID(), item.getID()); boolean result = tri.hasNext(); tri.close(); return result; } catch (SQLException sqle) { throw new RuntimeException(sqle); } }
@Override public List<Item> getItemsByCollection(Collection collection) { try { TableRowIterator tri = DatabaseManager.queryTable( context, "item", "SELECT i.item_id " + "FROM item i, collection2item c2i " + "WHERE i.item_id = c2i.item_id " + "AND c2i.collection_id = ? " + "AND i.in_archive = '1'", collection.getID()); return returnAsList(tri); } catch (SQLException sqle) { throw new RuntimeException(sqle); } }
/** * startWorkflow() begins a workflow - in a single transaction do away with the PersonalWorkspace * entry and turn it into a WorkflowItem. * * @param c Context * @param wsi The WorkspaceItem to convert to a workflow item * @return The resulting workflow item */ public static WorkflowItem start(Context c, WorkspaceItem wsi) throws SQLException, AuthorizeException, IOException { // FIXME Check auth Item myitem = wsi.getItem(); Collection collection = wsi.getCollection(); log.info( LogManager.getHeader( c, "start_workflow", "workspace_item_id=" + wsi.getID() + "item_id=" + myitem.getID() + "collection_id=" + collection.getID())); // record the start of the workflow w/provenance message recordStart(c, myitem); // create the WorkflowItem TableRow row = DatabaseManager.create(c, "workflowitem"); row.setColumn("item_id", myitem.getID()); row.setColumn("collection_id", wsi.getCollection().getID()); WorkflowItem wfi = new WorkflowItem(c, row); wfi.setMultipleFiles(wsi.hasMultipleFiles()); wfi.setMultipleTitles(wsi.hasMultipleTitles()); wfi.setPublishedBefore(wsi.isPublishedBefore()); // remove the WorkspaceItem wsi.deleteWrapper(); // now get the worflow started doState(c, wfi, WFSTATE_STEP1POOL, null); // Return the workflow item return wfi; }
/** * Commit the contained item to the main archive. The item is associated with the relevant * collection, added to the search index, and any other tasks such as assigning dates are * performed. * * @param context The relevant DSpace Context. * @param wfi workflow item * @return the fully archived item. * @throws IOException A general class of exceptions produced by failed or interrupted I/O * operations. * @throws SQLException An exception that provides information on a database access error or other * errors. * @throws AuthorizeException Exception indicating the current user of the context does not have * permission to perform a particular action. */ @Override public Item archive(Context context, XmlWorkflowItem wfi) throws SQLException, IOException, AuthorizeException { // FIXME: Check auth Item item = wfi.getItem(); Collection collection = wfi.getCollection(); // Remove (if any) the workflowItemroles for this item workflowItemRoleService.deleteForWorkflowItem(context, wfi); log.info( LogManager.getHeader( context, "archive_item", "workflow_item_id=" + wfi.getID() + "item_id=" + item.getID() + "collection_id=" + collection.getID())); installItemService.installItem(context, wfi); // Notify notifyOfArchive(context, item, collection); // Clear any remaining workflow metadata itemService.clearMetadata( context, item, WorkflowRequirementsService.WORKFLOW_SCHEMA, Item.ANY, Item.ANY, Item.ANY); itemService.update(context, item); // Log the event log.info( LogManager.getHeader( context, "install_item", "workflow_item_id=" + wfi.getID() + ", item_id=" + item.getID() + "handle=FIXME")); return item; }
private static Group getXMLWorkflowRole( Context context, int collectionID, String roleName, Collection collection, Group roleGroup) throws IOException, WorkflowConfigurationException, SQLException, AuthorizeException { Role role = WorkflowUtils.getCollectionAndRepositoryRoles(collection).get(roleName); if (role.getScope() == Role.Scope.COLLECTION || role.getScope() == Role.Scope.REPOSITORY) { roleGroup = WorkflowUtils.getRoleGroup(context, collectionID, role); if (roleGroup == null) { AuthorizeManager.authorizeAction(context, collection, Constants.WRITE); roleGroup = Group.create(context); if (role.getScope() == Role.Scope.COLLECTION) { roleGroup.setName("COLLECTION_" + collection.getID() + "_WORKFLOW_ROLE_" + roleName); } else { roleGroup.setName(role.getName()); } roleGroup.update(); AuthorizeManager.addPolicy(context, collection, Constants.ADD, roleGroup); if (role.getScope() == Role.Scope.COLLECTION) { WorkflowUtils.createCollectionWorkflowRole(context, collectionID, roleName, roleGroup); } } } return roleGroup; }
@Override public BasicWorkflowItem start(Context context, WorkspaceItem wsi) throws SQLException, AuthorizeException, IOException { // FIXME Check auth Item myitem = wsi.getItem(); Collection collection = wsi.getCollection(); log.info( LogManager.getHeader( context, "start_workflow", "workspace_item_id=" + wsi.getID() + "item_id=" + myitem.getID() + "collection_id=" + collection.getID())); // record the start of the workflow w/provenance message recordStart(context, myitem); // create the WorkflowItem BasicWorkflowItem wfi = workflowItemService.create(context, myitem, collection); wfi.setMultipleFiles(wsi.hasMultipleFiles()); wfi.setMultipleTitles(wsi.hasMultipleTitles()); wfi.setPublishedBefore(wsi.isPublishedBefore()); // remove the WorkspaceItem workspaceItemService.deleteWrapper(context, wsi); // now get the workflow started wfi.setState(WFSTATE_SUBMIT); advance(context, wfi, null); // Return the workflow item return wfi; }
@Override public int itemCount(Collection collection) { try { String query = "SELECT count(*) FROM collection2item, item WHERE " + "collection2item.collection_id = ? " + "AND collection2item.item_id = item.item_id " + "AND in_archive ='1' AND item.withdrawn='0' "; PreparedStatement statement = context.getDBConnection().prepareStatement(query); statement.setInt(1, collection.getID()); ResultSet rs = statement.executeQuery(); rs.next(); int itemcount = rs.getInt(1); statement.close(); return itemcount; } catch (SQLException sqle) { throw new RuntimeException(sqle); } }
/** * Create a new collection * * @param context The current DSpace context. * @param communityID The id of the parent community. * @return A process result's object. */ public static FlowResult processCreateCollection( Context context, int communityID, Request request) throws SQLException, AuthorizeException, IOException { FlowResult result = new FlowResult(); Community parent = Community.find(context, communityID); Collection newCollection = parent.createCollection(); // Get the metadata String name = request.getParameter("name"); String shortDescription = request.getParameter("short_description"); String introductoryText = request.getParameter("introductory_text"); String copyrightText = request.getParameter("copyright_text"); String sideBarText = request.getParameter("side_bar_text"); String license = request.getParameter("license"); String provenanceDescription = request.getParameter("provenance_description"); // If they don't have a name then make it untitled. if (name == null || name.length() == 0) { name = "Untitled"; } // If empty, make it null. if (shortDescription != null && shortDescription.length() == 0) { shortDescription = null; } if (introductoryText != null && introductoryText.length() == 0) { introductoryText = null; } if (copyrightText != null && copyrightText.length() == 0) { copyrightText = null; } if (sideBarText != null && sideBarText.length() == 0) { sideBarText = null; } if (license != null && license.length() == 0) { license = null; } if (provenanceDescription != null && provenanceDescription.length() == 0) { provenanceDescription = null; } // Save the metadata newCollection.setMetadata("name", name); newCollection.setMetadata("short_description", shortDescription); newCollection.setMetadata("introductory_text", introductoryText); newCollection.setMetadata("copyright_text", copyrightText); newCollection.setMetadata("side_bar_text", sideBarText); newCollection.setMetadata("license", license); newCollection.setMetadata("provenance_description", provenanceDescription); // Set the logo Object object = request.get("logo"); Part filePart = null; if (object instanceof Part) { filePart = (Part) object; } if (filePart != null && filePart.getSize() > 0) { InputStream is = filePart.getInputStream(); newCollection.setLogo(is); } // Save everything newCollection.update(); context.commit(); // success result.setContinue(true); result.setOutcome(true); result.setMessage(new Message("default", "The collection was successfully created.")); result.setParameter("collectionID", newCollection.getID()); return result; }
/** * Perform a deposit, using the supplied SWORD Deposit object. * * @param deposit * @throws SWORDErrorException * @throws DSpaceSWORDException */ public DepositResult doDeposit(Deposit deposit) throws SWORDErrorException, DSpaceSWORDException { // get the things out of the service that we need Context context = swordService.getContext(); SWORDConfiguration swordConfig = swordService.getSwordConfig(); SWORDUrlManager urlManager = swordService.getUrlManager(); // FIXME: the spec is unclear what to do in this situation. I'm going // the throw a 415 (ERROR_CONTENT) until further notice // // determine if this is an acceptable file format if (!swordConfig.isAcceptableContentType(context, deposit.getContentType(), collection)) { log.error( "Unacceptable content type detected: " + deposit.getContentType() + " for collection " + collection.getID()); throw new SWORDErrorException( ErrorCodes.ERROR_CONTENT, "Unacceptable content type in deposit request: " + deposit.getContentType()); } // determine if this is an acceptable packaging type for the deposit // if not, we throw a 415 HTTP error (Unsupported Media Type, ERROR_CONTENT) if (!swordConfig.isSupportedMediaType(deposit.getPackaging(), this.collection)) { log.error( "Unacceptable packaging type detected: " + deposit.getPackaging() + "for collection" + collection.getID()); throw new SWORDErrorException( ErrorCodes.ERROR_CONTENT, "Unacceptable packaging type in deposit request: " + deposit.getPackaging()); } // Obtain the relevant ingester from the factory SWORDIngester si = SWORDIngesterFactory.getInstance(context, deposit, collection); swordService.message("Loaded ingester: " + si.getClass().getName()); // do the deposit DepositResult result = si.ingest(swordService, deposit, collection); swordService.message("Archive ingest completed successfully"); // if there's an item availalble, and we want to keep the original // then do that try { if (swordConfig.isKeepOriginal()) { swordService.message( "DSpace will store an original copy of the deposit, " + "as well as ingesting the item into the archive"); // in order to be allowed to add the file back to the item, we need to ignore authorisations // for a moment boolean ignoreAuth = context.ignoreAuthorization(); context.setIgnoreAuthorization(true); String bundleName = ConfigurationManager.getProperty("sword-server", "bundle.name"); if (bundleName == null || "".equals(bundleName)) { bundleName = "SWORD"; } Item item = result.getItem(); Bundle[] bundles = item.getBundles(bundleName); Bundle swordBundle = null; if (bundles.length > 0) { swordBundle = bundles[0]; } if (swordBundle == null) { swordBundle = item.createBundle(bundleName); } String fn = swordService.getFilename(context, deposit, true); Bitstream bitstream; FileInputStream fis = null; try { fis = new FileInputStream(deposit.getFile()); bitstream = swordBundle.createBitstream(fis); } finally { if (fis != null) { fis.close(); } } bitstream.setName(fn); bitstream.setDescription("SWORD deposit package"); BitstreamFormat bf = BitstreamFormat.findByMIMEType(context, deposit.getContentType()); if (bf != null) { bitstream.setFormat(bf); } bitstream.update(); swordBundle.update(); item.update(); swordService.message( "Original package stored as " + fn + ", in item bundle " + swordBundle); // now reset the context ignore authorisation context.setIgnoreAuthorization(ignoreAuth); // set the media link for the created item result.setMediaLink(urlManager.getMediaLink(bitstream)); } else { // set the vanilla media link, which doesn't resolve to anything result.setMediaLink(urlManager.getBaseMediaLinkUrl()); } } catch (SQLException e) { log.error("caught exception: ", e); throw new DSpaceSWORDException(e); } catch (AuthorizeException e) { log.error("caught exception: ", e); throw new DSpaceSWORDException(e); } catch (FileNotFoundException e) { log.error("caught exception: ", e); throw new DSpaceSWORDException(e); } catch (IOException e) { log.error("caught exception: ", e); throw new DSpaceSWORDException(e); } return result; }
/** * Perform a limited browse, which only returns the results requested, without any extraneous * information. To perform a full browse, use BrowseEngine.browse() above. This supports Item * browse only, and does not currently support focus or values. This method is used, for example, * to generate the Recently Submitted Items results. * * @param bs the scope of the browse * @return the results of the browse */ public BrowseInfo browseMini(BrowserScope bs) throws BrowseException { log.info(LogManager.getHeader(context, "browse_mini", "")); // load the scope into the object this.scope = bs; // since we use it so much, get the browse index out of the // scope and store as a member browseIndex = scope.getBrowseIndex(); // get the table name that we are going to be getting our data from dao.setTable(browseIndex.getTableName()); // tell the browse query whether we are ascending or descending on the value dao.setAscending(scope.isAscending()); // define a clause for the WHERE clause which will allow us to constrain // our browse to a specified community or collection if (scope.inCollection() || scope.inCommunity()) { if (scope.inCollection()) { Collection col = (Collection) scope.getBrowseContainer(); dao.setContainerTable("collection2item"); dao.setContainerIDField("collection_id"); dao.setContainerID(col.getID()); } else if (scope.inCommunity()) { Community com = (Community) scope.getBrowseContainer(); dao.setContainerTable("communities2item"); dao.setContainerIDField("community_id"); dao.setContainerID(com.getID()); } } dao.setOffset(scope.getOffset()); dao.setLimit(scope.getResultsPerPage()); // assemble the ORDER BY clause String orderBy = browseIndex.getSortField(scope.isSecondLevel()); if (scope.getSortBy() > 0) { orderBy = "sort_" + Integer.toString(scope.getSortBy()); } dao.setOrderField(orderBy); // now run the query List<BrowseItem> results = dao.doQuery(); // construct the mostly empty BrowseInfo object to pass back BrowseInfo browseInfo = new BrowseInfo(results, 0, scope.getResultsPerPage(), 0); // add the browse index to the Browse Info browseInfo.setBrowseIndex(browseIndex); // set the sort option for the Browse Info browseInfo.setSortOption(scope.getSortOption()); // tell the Browse Info which way we are sorting browseInfo.setAscending(scope.isAscending()); // tell the browse info what the container for the browse was if (scope.inCollection() || scope.inCommunity()) { browseInfo.setBrowseContainer(scope.getBrowseContainer()); } browseInfo.setResultsPerPage(scope.getResultsPerPage()); browseInfo.setEtAl(scope.getEtAl()); return browseInfo; }
/** * Browse the archive by the full item browse mechanism. This produces a BrowseInfo object which * contains full BrowseItem objects as its result set. * * @param bs the scope of the browse * @return the results of the browse * @throws BrowseException */ private BrowseInfo browseByItem(BrowserScope bs) throws BrowseException { log.info(LogManager.getHeader(context, "browse_by_item", "")); try { // get the table name that we are going to be getting our data from dao.setTable(browseIndex.getTableName()); // tell the browse query whether we are ascending or descending on the value dao.setAscending(scope.isAscending()); // assemble the value clause String rawValue = null; if (scope.hasFilterValue() && scope.isSecondLevel()) { String value = scope.getFilterValue(); rawValue = value; // make sure the incoming value is normalised value = OrderFormat.makeSortString( value, scope.getFilterValueLang(), scope.getBrowseIndex().getDataType()); dao.setAuthorityValue(scope.getAuthorityValue()); // set the values in the Browse Query if (scope.isSecondLevel()) { dao.setFilterValueField("value"); dao.setFilterValue(rawValue); } else { dao.setFilterValueField("sort_value"); dao.setFilterValue(value); } dao.setFilterValuePartial(scope.getFilterValuePartial()); // to apply the filtering, we need the distinct and map tables for the index dao.setFilterMappingTables( browseIndex.getDistinctTableName(), browseIndex.getMapTableName()); } // define a clause for the WHERE clause which will allow us to constrain // our browse to a specified community or collection if (scope.inCollection() || scope.inCommunity()) { if (scope.inCollection()) { Collection col = (Collection) scope.getBrowseContainer(); dao.setContainerTable("collection2item"); dao.setContainerIDField("collection_id"); dao.setContainerID(col.getID()); } else if (scope.inCommunity()) { Community com = (Community) scope.getBrowseContainer(); dao.setContainerTable("communities2item"); dao.setContainerIDField("community_id"); dao.setContainerID(com.getID()); } } // this is the total number of results in answer to the query int total = getTotalResults(); // assemble the ORDER BY clause String orderBy = browseIndex.getSortField(scope.isSecondLevel()); if (scope.getSortBy() > 0) { orderBy = "sort_" + Integer.toString(scope.getSortBy()); } dao.setOrderField(orderBy); int offset = scope.getOffset(); String rawFocusValue = null; if (offset < 1 && (scope.hasJumpToItem() || scope.hasJumpToValue() || scope.hasStartsWith())) { // We need to convert these to an offset for the actual browse query. // First, get a value that we can look up in the ordering field rawFocusValue = getJumpToValue(); // make sure the incoming value is normalised String focusValue = normalizeJumpToValue(rawFocusValue); log.debug("browsing using focus: " + focusValue); // Convert the focus value into an offset offset = getOffsetForValue(focusValue); } dao.setOffset(offset); // assemble the LIMIT clause dao.setLimit(scope.getResultsPerPage()); // Holder for the results List<BrowseItem> results = null; // Does this browse have any contents? if (total > 0) { // now run the query results = dao.doQuery(); // now, if we don't have any results, we are at the end of the browse. This will // be because a starts_with value has been supplied for which we don't have // any items. if (results.size() == 0) { // In this case, we will calculate a new offset for the last page of results offset = total - scope.getResultsPerPage(); if (offset < 0) { offset = 0; } // And rerun the query dao.setOffset(offset); results = dao.doQuery(); } } else { // No records, so make an empty list results = new ArrayList<BrowseItem>(); } // construct the BrowseInfo object to pass back // BrowseInfo browseInfo = new BrowseInfo(results, position, total, offset); BrowseInfo browseInfo = new BrowseInfo(results, offset, total, offset); if (offset + scope.getResultsPerPage() < total) { browseInfo.setNextOffset(offset + scope.getResultsPerPage()); } if (offset - scope.getResultsPerPage() > -1) { browseInfo.setPrevOffset(offset - scope.getResultsPerPage()); } // add the browse index to the Browse Info browseInfo.setBrowseIndex(browseIndex); // set the sort option for the Browse Info browseInfo.setSortOption(scope.getSortOption()); // tell the Browse Info which way we are sorting browseInfo.setAscending(scope.isAscending()); // tell the Browse Info which level of browse we are at browseInfo.setBrowseLevel(scope.getBrowseLevel()); // set the browse value if there is one browseInfo.setValue(rawValue); // set the browse authority key if there is one browseInfo.setAuthority(scope.getAuthorityValue()); // set the focus value if there is one browseInfo.setFocus(rawFocusValue); if (scope.hasJumpToItem()) { browseInfo.setFocusItem(scope.getJumpToItem()); } // tell the browse info if it is working from a starts with parameter browseInfo.setStartsWith(scope.hasStartsWith()); // tell the browse info what the container for the browse was if (scope.inCollection() || scope.inCommunity()) { browseInfo.setBrowseContainer(scope.getBrowseContainer()); } browseInfo.setResultsPerPage(scope.getResultsPerPage()); browseInfo.setEtAl(scope.getEtAl()); return browseInfo; } catch (SQLException e) { log.error("caught exception: ", e); throw new BrowseException(e); } }
/** * Respond to a post request for metadata bulk importing via csv * * @param context a DSpace Context object * @param request the HTTP request * @param response the HTTP response * @throws ServletException * @throws IOException * @throws SQLException * @throws AuthorizeException */ @Override protected void doDSPost(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException { // First, see if we have a multipart request (uploading a metadata file) String contentType = request.getContentType(); if ((contentType != null) && (contentType.indexOf("multipart/form-data") != -1)) { String message = null; // Process the file uploaded try { // Wrap multipart request to get the submission info FileUploadRequest wrapper = new FileUploadRequest(request); String inputType = wrapper.getParameter("inputType"); List<String> reqCollectionsTmp = getRepeatedParameter(wrapper, "collections", "collections"); String[] reqCollections = new String[reqCollectionsTmp.size()]; reqCollectionsTmp.toArray(reqCollections); // Get all collections List<Collection> collections = null; String colIdS = wrapper.getParameter("colId"); if (colIdS != null) { collections = new ArrayList<>(); collections.add(collectionService.findByIdOrLegacyId(context, colIdS)); } else { collections = collectionService.findAll(context); } request.setAttribute("collections", collections); Collection owningCollection = null; if (wrapper.getParameter("collection") != null) { owningCollection = collectionService.findByIdOrLegacyId(context, wrapper.getParameter("collection")); } // Get all the possible data loaders from the Spring configuration BTEBatchImportService dls = new DSpace().getSingletonService(BTEBatchImportService.class); List<String> inputTypes = dls.getFileDataLoaders(); request.setAttribute("input-types", inputTypes); if (reqCollectionsTmp != null) request.setAttribute("otherCollections", reqCollectionsTmp); if (owningCollection != null) request.setAttribute("owningCollection", owningCollection.getID()); request.setAttribute("inputType", inputType); File f = null; String zipurl = null; if (inputType.equals("saf")) { zipurl = wrapper.getParameter("zipurl"); if (StringUtils.isEmpty(zipurl)) { request.setAttribute("has-error", "true"); Locale locale = request.getLocale(); ResourceBundle msgs = ResourceBundle.getBundle("Messages", locale); try { message = msgs.getString("jsp.layout.navbar-admin.batchimport.fileurlempty"); } catch (Exception e) { message = "???jsp.layout.navbar-admin.batchimport.fileurlempty???"; } request.setAttribute("message", message); JSPManager.showJSP(request, response, "/dspace-admin/batchimport.jsp"); return; } } else { f = wrapper.getFile("file"); if (f == null) { request.setAttribute("has-error", "true"); Locale locale = request.getLocale(); ResourceBundle msgs = ResourceBundle.getBundle("Messages", locale); try { message = msgs.getString("jsp.layout.navbar-admin.batchimport.fileempty"); } catch (Exception e) { message = "???jsp.layout.navbar-admin.batchimport.fileempty???"; } request.setAttribute("message", message); JSPManager.showJSP(request, response, "/dspace-admin/batchimport.jsp"); return; } else if (owningCollection == null) { request.setAttribute("has-error", "true"); Locale locale = request.getLocale(); ResourceBundle msgs = ResourceBundle.getBundle("Messages", locale); try { message = msgs.getString("jsp.layout.navbar-admin.batchimport.owningcollectionempty"); } catch (Exception e) { message = "???jsp.layout.navbar-admin.batchimport.owningcollectionempty???"; } request.setAttribute("message", message); JSPManager.showJSP(request, response, "/dspace-admin/batchimport.jsp"); return; } } String uploadId = wrapper.getParameter("uploadId"); if (uploadId != null) { request.setAttribute("uploadId", uploadId); } if (owningCollection == null && reqCollections != null && reqCollections.length > 0) { request.setAttribute("has-error", "true"); Locale locale = request.getLocale(); ResourceBundle msgs = ResourceBundle.getBundle("Messages", locale); String ms = msgs.getString("jsp.layout.navbar-admin.batchimport.owningcollection"); if (ms == null) { ms = "???jsp.layout.navbar-admin.batchimport.owningcollection???"; } request.setAttribute("message", ms); JSPManager.showJSP(request, response, "/dspace-admin/batchimport.jsp"); return; } try { String finalInputType = "saf"; String filePath = zipurl; if (f != null) { finalInputType = inputType; filePath = f.getAbsolutePath(); } itemImportService.processUIImport( filePath, owningCollection, reqCollections, uploadId, finalInputType, context, true); request.setAttribute("has-error", "false"); request.setAttribute("uploadId", null); } catch (Exception e) { request.setAttribute("has-error", "true"); message = e.getMessage(); e.printStackTrace(); } } catch (FileSizeLimitExceededException e) { request.setAttribute("has-error", "true"); message = e.getMessage(); e.printStackTrace(); } catch (Exception e) { request.setAttribute("has-error", "true"); message = e.getMessage(); e.printStackTrace(); } request.setAttribute("message", message); // Show the upload screen JSPManager.showJSP(request, response, "/dspace-admin/batchimport.jsp"); } else { request.setAttribute("has-error", "true"); // Show the upload screen JSPManager.showJSP(request, response, "/dspace-admin/batchimport.jsp"); } }
/** * Show a collection home page, or deal with button press on home page * * @param context Context object * @param request the HTTP request * @param response the HTTP response * @param community the community * @param collection the collection */ private void collectionHome( Context context, HttpServletRequest request, HttpServletResponse response, Community community, Collection collection) throws ServletException, IOException, SQLException, AuthorizeException { // Handle click on a browse or search button if (!handleButton(request, response, IdentifierService.getURL(community))) { // Will need to know whether to commit to DB boolean updated = false; // No search or browse button pressed, check for if (request.getParameter("submit_subscribe") != null) { // Subscribe button pressed. // Only registered can subscribe, so redirect unless logged in. if (context.getCurrentUser() == null && !Authenticate.startAuthentication(context, request, response)) return; else { SubscriptionManager.subscribe(context, context.getCurrentUser(), collection); updated = true; } } else if (request.getParameter("submit_unsubscribe") != null) { SubscriptionManager.unsubscribe(context, context.getCurrentUser(), collection); updated = true; } // display collection home page log.info( LogManager.getHeader(context, "view_collection", "collection_id=" + collection.getID())); // perform any necessary pre-processing preProcessCollectionHome(context, request, response, collection); // Is the user logged in/subscribed? EPerson e = context.getCurrentUser(); boolean subscribed = false; if (e != null) { subscribed = SubscriptionManager.isSubscribed(context, e, collection); // is the user a COLLECTION_EDITOR? // if (collection.canEditBoolean()) if (AuthorizeManager.canEdit(collection, context)) { // set a variable to create an edit button request.setAttribute("editor_button", new Boolean(true)); } // can they admin this collection? if (AuthorizeManager.authorizeActionBoolean( context, collection, Constants.COLLECTION_ADMIN)) { request.setAttribute("admin_button", new Boolean(true)); // give them a button to manage submitter list // what group is the submitter? Group group = collection.getSubmitters(); if (group != null) { request.setAttribute("submitters", group); } } // can they submit to this collection? if (AuthorizeManager.authorizeActionBoolean(context, collection, Constants.ADD)) { request.setAttribute("can_submit_button", new Boolean(true)); } else { request.setAttribute("can_submit_button", new Boolean(false)); } } // Forward to collection home page request.setAttribute("collection", collection); request.setAttribute("community", community); request.setAttribute("logged.in", new Boolean(e != null)); request.setAttribute("subscribed", new Boolean(subscribed)); JSPManager.showJSP(request, response, "/collection-home.jsp"); if (updated) { context.complete(); } } }
protected void doDSGet(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException { // Get the query String query = request.getParameter("query"); int start = UIUtil.getIntParameter(request, "start"); String advanced = request.getParameter("advanced"); String fromAdvanced = request.getParameter("from_advanced"); int sortBy = UIUtil.getIntParameter(request, "sort_by"); String order = request.getParameter("order"); int rpp = UIUtil.getIntParameter(request, "rpp"); String advancedQuery = ""; HashMap queryHash = new HashMap(); // can't start earlier than 0 in the results! if (start < 0) { start = 0; } int collCount = 0; int commCount = 0; int itemCount = 0; Item[] resultsItems; Collection[] resultsCollections; Community[] resultsCommunities; QueryResults qResults = null; QueryArgs qArgs = new QueryArgs(); SortOption sortOption = null; if (request.getParameter("etal") != null) qArgs.setEtAl(UIUtil.getIntParameter(request, "etal")); try { if (sortBy > 0) { sortOption = SortOption.getSortOption(sortBy); qArgs.setSortOption(sortOption); } if (SortOption.ASCENDING.equalsIgnoreCase(order)) { qArgs.setSortOrder(SortOption.ASCENDING); } else { qArgs.setSortOrder(SortOption.DESCENDING); } } catch (Exception e) { } if (rpp > 0) { qArgs.setPageSize(rpp); } // if the "advanced" flag is set, build the query string from the // multiple query fields if (advanced != null) { query = qArgs.buildQuery(request); advancedQuery = qArgs.buildHTTPQuery(request); } // Ensure the query is non-null if (query == null) { query = ""; } // Get the location parameter, if any String location = request.getParameter("location"); String newURL; // If there is a location parameter, we should redirect to // do the search with the correct location. if ((location != null) && !location.equals("")) { String url = ""; if (!location.equals("/")) { // Location points to a resource url = "/resource/" + location; } // Encode the query query = URLEncoder.encode(query, Constants.DEFAULT_ENCODING); if (advancedQuery.length() > 0) { query = query + "&from_advanced=true&" + advancedQuery; } // Do the redirect response.sendRedirect( response.encodeRedirectURL( request.getContextPath() + url + "/simple-search?query=" + query)); return; } // Build log information String logInfo = ""; // Get our location Community community = UIUtil.getCommunityLocation(request); Collection collection = UIUtil.getCollectionLocation(request); // get the start of the query results page // List resultObjects = null; qArgs.setQuery(query); qArgs.setStart(start); // Perform the search if (collection != null) { logInfo = "collection_id=" + collection.getID() + ","; // Values for drop-down box request.setAttribute("community", community); request.setAttribute("collection", collection); qResults = DSQuery.doQuery(context, qArgs, collection); } else if (community != null) { logInfo = "community_id=" + community.getID() + ","; request.setAttribute("community", community); // Get the collections within the community for the dropdown box request.setAttribute("collection.array", community.getCollections()); qResults = DSQuery.doQuery(context, qArgs, community); } else { // Get all communities for dropdown box // Community[] communities = Community.findAll(context); Community[] communities = (Community[]) ApplicationService.findAllCommunities(context).toArray(); request.setAttribute("community.array", communities); qResults = DSQuery.doQuery(context, qArgs); } // now instantiate the results and put them in their buckets for (int i = 0; i < qResults.getHitTypes().size(); i++) { String myURI = (String) qResults.getHitURIs().get(i); Integer myType = (Integer) qResults.getHitTypes().get(i); // add the URI to the appropriate lists switch (myType.intValue()) { case Constants.ITEM: itemCount++; break; case Constants.COLLECTION: collCount++; break; case Constants.COMMUNITY: commCount++; break; } } // Make objects from the URIs - make arrays, fill them out resultsCommunities = new Community[commCount]; resultsCollections = new Collection[collCount]; resultsItems = new Item[itemCount]; for (int i = 0; i < qResults.getHitTypes().size(); i++) { Integer myId = (Integer) qResults.getHitIds().get(i); String myURI = (String) qResults.getHitURIs().get(i); Integer myType = (Integer) qResults.getHitTypes().get(i); switch (myType.intValue()) { case Constants.ITEM: if (myId != null) { // resultsItems[itemCount] = Item.find(context, myId); resultsItems[itemCount] = ApplicationService.get(context, Item.class, myId); } else { ObjectIdentifier oi = ObjectIdentifier.parseCanonicalForm(myURI); resultsItems[itemCount] = (Item) oi.getObject(context); } if (resultsItems[itemCount] == null) { throw new SQLException("Query \"" + query + "\" returned unresolvable item"); } itemCount++; break; case Constants.COLLECTION: if (myId != null) { // resultsCollections[collCount] = Collection.find(context, myId); resultsCollections[collCount] = ApplicationService.get(context, Collection.class, myId); } else { ObjectIdentifier oi = ObjectIdentifier.parseCanonicalForm(myURI); resultsCollections[collCount] = (Collection) oi.getObject(context); } if (resultsCollections[collCount] == null) { throw new SQLException("Query \"" + query + "\" returned unresolvable collection"); } collCount++; break; case Constants.COMMUNITY: if (myId != null) { // resultsCommunities[commCount] = Community.find(context, myId); resultsCommunities[commCount] = ApplicationService.get(context, Community.class, myId); } else { ObjectIdentifier oi = ObjectIdentifier.parseCanonicalForm(myURI); resultsCommunities[commCount] = (Community) oi.getObject(context); } if (resultsCommunities[commCount] == null) { throw new SQLException("Query \"" + query + "\" returned unresolvable community"); } commCount++; break; } } // Log log.info( LogManager.getHeader( context, "search", logInfo + "query=\"" + query + "\",results=(" + resultsCommunities.length + "," + resultsCollections.length + "," + resultsItems.length + ")")); // Pass in some page qualities // total number of pages int pageTotal = 1 + ((qResults.getHitCount() - 1) / qResults.getPageSize()); // current page being displayed int pageCurrent = 1 + (qResults.getStart() / qResults.getPageSize()); // pageLast = min(pageCurrent+9,pageTotal) int pageLast = ((pageCurrent + 9) > pageTotal) ? pageTotal : (pageCurrent + 9); // pageFirst = max(1,pageCurrent-9) int pageFirst = ((pageCurrent - 9) > 1) ? (pageCurrent - 9) : 1; // Pass the results to the display JSP request.setAttribute("items", resultsItems); request.setAttribute("communities", resultsCommunities); request.setAttribute("collections", resultsCollections); request.setAttribute("pagetotal", new Integer(pageTotal)); request.setAttribute("pagecurrent", new Integer(pageCurrent)); request.setAttribute("pagelast", new Integer(pageLast)); request.setAttribute("pagefirst", new Integer(pageFirst)); request.setAttribute("queryresults", qResults); // And the original query string request.setAttribute("query", query); request.setAttribute("order", qArgs.getSortOrder()); request.setAttribute("sortedBy", sortOption); if ((fromAdvanced != null) && (qResults.getHitCount() == 0)) { // send back to advanced form if no results Community[] communities = (Community[]) ApplicationService.findAllCommunities(context).toArray(); // Community[] communities = Community.findAll(context); request.setAttribute("communities", communities); request.setAttribute("no_results", "yes"); queryHash = qArgs.buildQueryHash(request); Iterator i = queryHash.keySet().iterator(); while (i.hasNext()) { String key = (String) i.next(); String value = (String) queryHash.get(key); request.setAttribute(key, value); } JSPManager.showJSP(request, response, "/search/advanced.jsp"); } else { JSPManager.showJSP(request, response, "/search/results.jsp"); } }
protected void doDSGet(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException { // We will resolve the HTTP request parameters into a scope BrowseScope scope = new BrowseScope(context); // Will need to know whether to highlight the "focus" point boolean highlight = false; // Build up log information String logInfo = ""; // For browse by date, we'll need to work out the URL query string to // use when the user swaps the ordering, so that they stay at the same // point in the index String flipOrderingQuery = ""; // Grab HTTP request parameters String focus = request.getParameter("focus"); String startsWith = request.getParameter("starts_with"); String top = request.getParameter("top"); String bottom = request.getParameter("bottom"); // The following three are specific to browsing items by date String month = request.getParameter("month"); String year = request.getParameter("year"); String order = request.getParameter("order"); // For browse by date: oldest item first? boolean oldestFirst = false; if ((order != null) && order.equalsIgnoreCase("oldestfirst")) { oldestFirst = true; } if (browseDates && (year != null) && !year.equals("") && ((startsWith == null) || startsWith.equals(""))) { // We're browsing items by date, the user hasn't typed anything // into the "year" text box, and they've selected a year from // the drop-down list. From this we work out where to start // the browse. startsWith = year; if ((month != null) & !month.equals("-1")) { // They've selected a month as well if (month.length() == 1) { // Ensure double-digit month number month = "0" + month; } startsWith = year + "-" + month; } } // Set the scope according to the parameters passed in if (focus != null) { // ---------------------------------------------- // Browse should start at a specified focus point // ---------------------------------------------- if (browseAuthors || browseSubjects) { // For browsing authors, focus is just a text value scope.setFocus(focus); } else { // For browsing items by title or date, focus is a Handle Item item = (Item) HandleManager.resolveToObject(context, focus); if (item == null) { // Handle is invalid one. Show an error. JSPManager.showInvalidIDError(request, response, focus, Constants.ITEM); return; } scope.setFocus(item); } // Will need to highlight the focus highlight = true; logInfo = "focus=" + focus + ","; if (browseDates) { // if the date order is flipped, we'll keep the same focus flipOrderingQuery = "focus=" + URLEncoder.encode(focus, Constants.DEFAULT_ENCODING) + "&"; } } else if (startsWith != null) { // ---------------------------------------------- // Start the browse using user-specified text // ---------------------------------------------- if (browseDates) { // if the date order is flipped, we'll keep the same focus flipOrderingQuery = "starts_with=" + URLEncoder.encode(startsWith, Constants.DEFAULT_ENCODING) + "&"; /* * When the user is browsing with the most recent items first, * the browse code algorithm doesn't quite do what some people * might expect. For example, if in the index there are entries: * * Mar-2000 15-Feb-2000 6-Feb-2000 15-Jan-2000 * * and the user has selected "Feb 2000" as the start point for * the browse, the browse algorithm will start at the first * point in that index *after* "Feb 2000". "Feb 2000" would * appear in the index above between 6-Feb-2000 and 15-Jan-2000. * So, the browse code in this case will start the browse at * "15-Jan-2000". This isn't really what users are likely to * want: They're more likely to want the browse to start at the * first Feb 2000 date, i.e. 15-Feb-2000. A similar scenario * occurs when the user enters just a year. Our quick hack to * produce this behaviour is to add "-32" to the startsWith * variable, when sorting with most recent items first. This * means the browse code starts at the topmost item in the index * that matches the user's input, rather than the point in the * index where the user's input would appear. */ if (!oldestFirst) { startsWith = startsWith + "-32"; } } scope.setFocus(startsWith); highlight = true; logInfo = "starts_with=" + startsWith + ","; } else if ((top != null) || (bottom != null)) { // ---------------------------------------------- // Paginating: put specified entry at top or bottom // ---------------------------------------------- // Use a single value and a boolean to simplify the code below String val = bottom; boolean isTop = false; if (top != null) { val = top; isTop = true; } if (browseAuthors || browseSubjects) { // Value will be a text value for author browse scope.setFocus(val); } else { // Value is Handle if we're browsing items by title or date Item item = (Item) HandleManager.resolveToObject(context, val); if (item == null) { // Handle is invalid one. Show an error. JSPManager.showInvalidIDError(request, response, focus, Constants.ITEM); return; } scope.setFocus(item); } // This entry appears at the top or bottom, and so needs to have // 0 or 20 entries shown before it scope.setNumberBefore(isTop ? 0 : 20); logInfo = (isTop ? "top" : "bottom") + "=" + val + ","; if (browseDates) { // If the date order is flipped, we'll flip the table upside // down - i.e. the top will become the bottom and the bottom // the top. if (top != null) { flipOrderingQuery = "bottom=" + URLEncoder.encode(top, Constants.DEFAULT_ENCODING) + "&"; } else { flipOrderingQuery = "top=" + URLEncoder.encode(bottom, Constants.DEFAULT_ENCODING) + "&"; } } } // ---------------------------------------------- // If none of the above apply, no positioning parameters // set - use start of index // ---------------------------------------------- // Are we in a community or collection? Community community = UIUtil.getCommunityLocation(request); Collection collection = UIUtil.getCollectionLocation(request); if (collection != null) { logInfo = logInfo + ",collection_id=" + collection.getID() + ","; scope.setScope(collection); } else if (community != null) { logInfo = logInfo + ",community_id=" + community.getID() + ","; scope.setScope(community); } BrowseInfo browseInfo; try { // Query the browse index if (browseAuthors) { browseInfo = Browse.getAuthors(scope); } else if (browseDates) { browseInfo = Browse.getItemsByDate(scope, oldestFirst); } else if (browseSubjects) { browseInfo = Browse.getSubjects(scope); } else { browseInfo = Browse.getItemsByTitle(scope); } } catch (SQLException sqle) { // An invalid scope was given JSPManager.showIntegrityError(request, response); return; } // Write log entry String what = "title"; if (browseAuthors) { what = "author"; } else if (browseSubjects) { what = "subject"; } else if (browseDates) { what = "date"; } log.info( LogManager.getHeader( context, "browse_" + what, logInfo + "results=" + browseInfo.getResultCount())); if (browseInfo.getResultCount() == 0) { // No results! request.setAttribute("community", community); request.setAttribute("collection", collection); JSPManager.showJSP(request, response, "/browse/no-results.jsp"); } else { // Work out what the query strings will be for the previous // and next pages if (!browseInfo.isFirst()) { // Not the first page, so we'll need a "previous page" button // The top entry of the current page becomes the bottom // entry of the "previous page" String s; if (browseAuthors || browseSubjects) // aneesh { s = (browseInfo.getStringResults())[0]; } else { Item firstItem = (browseInfo.getItemResults())[0]; s = firstItem.getHandle(); } if (browseDates && oldestFirst) { // For browsing by date, oldest first, we need // to add the ordering parameter request.setAttribute( "previous.query", "order=oldestfirst&bottom=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } else { request.setAttribute( "previous.query", "bottom=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } } if (!browseInfo.isLast()) { // Not the last page, so we'll need a "next page" button // The bottom entry of the current page will be the top // entry in the next page String s; if (browseAuthors) { String[] authors = browseInfo.getStringResults(); s = authors[authors.length - 1]; } else if (browseSubjects) { String[] subjects = browseInfo.getStringResults(); s = subjects[subjects.length - 1]; } else { Item[] items = browseInfo.getItemResults(); Item lastItem = items[items.length - 1]; s = lastItem.getHandle(); } if (browseDates && oldestFirst) { // For browsing by date, oldest first, we need // to add the ordering parameter request.setAttribute( "next.query", "order=oldestfirst&top=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } else { request.setAttribute( "next.query", "top=" + URLEncoder.encode(s, Constants.DEFAULT_ENCODING)); } } // Set appropriate attributes and forward to results page request.setAttribute("community", community); request.setAttribute("collection", collection); request.setAttribute("browse.info", browseInfo); request.setAttribute("highlight", new Boolean(highlight)); if (browseAuthors) { JSPManager.showJSP(request, response, "/browse/authors.jsp"); } else if (browseSubjects) { JSPManager.showJSP(request, response, "/browse/subjects.jsp"); } else if (browseDates) { request.setAttribute("oldest.first", new Boolean(oldestFirst)); request.setAttribute("flip.ordering.query", flipOrderingQuery); JSPManager.showJSP(request, response, "/browse/items-by-date.jsp"); } else { JSPManager.showJSP(request, response, "/browse/items-by-title.jsp"); } } }
/** * Browse the archive by single values (such as the name of an author). This produces a BrowseInfo * object that contains Strings as the results of the browse * * @param bs the scope of the browse * @return the results of the browse * @throws BrowseException */ private BrowseInfo browseByValue(BrowserScope bs) throws BrowseException { log.info(LogManager.getHeader(context, "browse_by_value", "focus=" + bs.getJumpToValue())); try { // get the table name that we are going to be getting our data from // this is the distinct table constrained to either community or collection dao.setTable(browseIndex.getDistinctTableName()); // remind the DAO that this is a distinct value browse, so it knows what sort // of query to build dao.setDistinct(true); // tell the browse query whether we are ascending or descending on the value dao.setAscending(scope.isAscending()); // set our constraints on community or collection if (scope.inCollection() || scope.inCommunity()) { // Scoped browsing of distinct metadata requires the mapping // table to be specified. dao.setFilterMappingTables(null, browseIndex.getMapTableName()); if (scope.inCollection()) { Collection col = (Collection) scope.getBrowseContainer(); dao.setContainerTable("collection2item"); dao.setContainerIDField("collection_id"); dao.setContainerID(col.getID()); } else if (scope.inCommunity()) { Community com = (Community) scope.getBrowseContainer(); dao.setContainerTable("communities2item"); dao.setContainerIDField("community_id"); dao.setContainerID(com.getID()); } } // this is the total number of results in answer to the query int total = getTotalResults(true); // set the ordering field (there is only one option) dao.setOrderField("sort_value"); // assemble the focus clause if we are to have one // it will look like one of the following // - sort_value < myvalue // = sort_1 > myvalue dao.setJumpToField("sort_value"); int offset = scope.getOffset(); String rawFocusValue = null; if (offset < 1 && scope.hasJumpToValue() || scope.hasStartsWith()) { String focusValue = getJumpToValue(); // store the value to tell the Browse Info object which value we are browsing on rawFocusValue = focusValue; // make sure the incoming value is normalised focusValue = normalizeJumpToValue(focusValue); offset = getOffsetForDistinctValue(focusValue); } // assemble the offset and limit dao.setOffset(offset); dao.setLimit(scope.getResultsPerPage()); // Holder for the results List<String[]> results = null; // Does this browse have any contents? if (total > 0) { // now run the query results = dao.doValueQuery(); // now, if we don't have any results, we are at the end of the browse. This will // be because a starts_with value has been supplied for which we don't have // any items. if (results.size() == 0) { // In this case, we will calculate a new offset for the last page of results offset = total - scope.getResultsPerPage(); if (offset < 0) { offset = 0; } // And rerun the query dao.setOffset(offset); results = dao.doValueQuery(); } } else { // No records, so make an empty list results = new ArrayList<String[]>(); } // construct the BrowseInfo object to pass back BrowseInfo browseInfo = new BrowseInfo(results, offset, total, offset); if (offset + scope.getResultsPerPage() < total) { browseInfo.setNextOffset(offset + scope.getResultsPerPage()); } if (offset - scope.getResultsPerPage() > -1) { browseInfo.setPrevOffset(offset - scope.getResultsPerPage()); } // add the browse index to the Browse Info browseInfo.setBrowseIndex(browseIndex); // set the sort option for the Browse Info browseInfo.setSortOption(scope.getSortOption()); // tell the Browse Info which way we are sorting browseInfo.setAscending(scope.isAscending()); // tell the Browse Info which level of browse we are at browseInfo.setBrowseLevel(scope.getBrowseLevel()); // set the browse value if there is one browseInfo.setFocus(rawFocusValue); // tell the browse info if it is working from a starts with parameter browseInfo.setStartsWith(scope.hasStartsWith()); // tell the browse info what the container for the browse was if (scope.inCollection() || scope.inCommunity()) { browseInfo.setBrowseContainer(scope.getBrowseContainer()); } browseInfo.setResultsPerPage(scope.getResultsPerPage()); return browseInfo; } catch (SQLException e) { log.error("caught exception: ", e); throw new BrowseException(e); } }