private TemplateResponseValues doBadQuery(Portal portal, String query, Format f) { Map<String, Object> body = new HashMap<String, Object>(); body.put("title", "Search " + portal.getAppName()); body.put("query", query); return new TemplateResponseValues(getTemplate(f, Result.BAD_QUERY), body); }
/** * Sets up the portal mappings. May be called at context init or at any other time. * * <p>Notice that this isn't thread safe. The main problem is reading the prefix2portalid and * isPortalPickingActive by the doFilter() method while setupPortalMappings() is being run. This * could be solved by making doFilter synchronized but then only one thread could be in doFilter * at a time and that might be a performance issue. * * <p>Since setupPortalMappings is synchronized we cannot get an inconsistent state after it * returns. There will be a short time when it is running when the prefix2portalid and * isPortalPickingActive might be inconsistent. If a different thread calls doFilter at that time * and gets a bad result we should be able to live with it. */ public static synchronized void setupPortalMappings(Collection<Portal> portals) { if (portals == null || portals.size() < 1) { log.debug("No portal mappings found in db. Filtering for portals will be inactive"); prefix2portalid = new HashMap<String, Integer>(); isPortalPickingActive = false; return; } int mappingCount = 0; boolean prefixCollision = false; HashMap<String, Integer> newPrefixMap = new HashMap<String, Integer>(); HashMap<Integer, String> newPortalIdMap = new HashMap<Integer, String>(); for (Portal portal : portals) { if (portal == null) continue; String urlprefix = portal.getUrlprefix(); if (urlprefix == null || urlprefix.trim().length() == 0 || " ".equalsIgnoreCase(urlprefix)) { log.debug( "no url prefix mapping for portal " + portal.getAppName() + " id " + portal.getPortalId()); continue; } urlprefix = urlprefix.trim(); if (protectedPrefixes.contains(urlprefix)) { log.error( "the prefix " + urlprefix + " is a directory that is in the webapp" + " and may not be used as a prefix for portal " + portal.getPortalId()); continue; } if (newPrefixMap.containsKey(urlprefix)) { Integer i = newPrefixMap.get(urlprefix); log.debug( "multiple portals have the url prefix " + urlprefix + ", both portals id:" + i.toString() + " and id:" + portal.getPortalId()); newPrefixMap.remove(urlprefix); newPortalIdMap.remove(i); prefixCollision = true; } log.debug("urlprefix:'" + urlprefix + "' -> \t\tportalid: " + portal.getPortalId()); newPrefixMap.put(urlprefix, new Integer(portal.getPortalId())); newPortalIdMap.put(new Integer(portal.getPortalId()), urlprefix); mappingCount++; } if (mappingCount > 0) { prefix2portalid = newPrefixMap; portalid2prefix = newPortalIdMap; isPortalPickingActive = true; } else { prefix2portalid = new HashMap<String, Integer>(); portalid2prefix = new HashMap<Integer, String>(); isPortalPickingActive = false; } log.info("final mappings:"); for (String key : prefix2portalid.keySet()) { Integer id = prefix2portalid.get(key); log.info("portalid: " + id + "\turlprefix:'" + key + "'"); } if (prefixCollision) log.info("there were at least two portals that had the same prefix."); }
@Override protected ResponseValues processRequest(VitroRequest vreq) { // There may be other non-html formats in the future Format format = getFormat(vreq); boolean wasXmlRequested = Format.XML == format; log.debug("xml was the requested format"); boolean wasHtmlRequested = !wasXmlRequested; try { Portal portal = vreq.getPortal(); PortalFlag portalFlag = vreq.getPortalFlag(); // make sure an IndividualDao is available if (vreq.getWebappDaoFactory() == null || vreq.getWebappDaoFactory().getIndividualDao() == null) { log.error("Could not get webappDaoFactory or IndividualDao"); throw new Exception("Could not access model."); } IndividualDao iDao = vreq.getWebappDaoFactory().getIndividualDao(); VClassGroupDao grpDao = vreq.getWebappDaoFactory().getVClassGroupDao(); VClassDao vclassDao = vreq.getWebappDaoFactory().getVClassDao(); String alphaFilter = vreq.getParameter("alpha"); int startIndex = 0; try { startIndex = Integer.parseInt(vreq.getParameter("startIndex")); } catch (Throwable e) { startIndex = 0; } log.debug("startIndex is " + startIndex); int hitsPerPage = defaultHitsPerPage; try { hitsPerPage = Integer.parseInt(vreq.getParameter("hitsPerPage")); } catch (Throwable e) { hitsPerPage = defaultHitsPerPage; } log.debug("hitsPerPage is " + hitsPerPage); int maxHitSize = defaultMaxSearchSize; if (startIndex >= defaultMaxSearchSize - hitsPerPage) maxHitSize = startIndex + defaultMaxSearchSize; if (alphaFilter != null) { maxHitSize = maxHitSize * 2; hitsPerPage = maxHitSize; } log.debug("maxHitSize is " + maxHitSize); String qtxt = vreq.getParameter(VitroQuery.QUERY_PARAMETER_NAME); Analyzer analyzer = getAnalyzer(getServletContext()); Query query = null; try { query = getQuery(vreq, portalFlag, analyzer, qtxt); log.debug("query for '" + qtxt + "' is " + query.toString()); } catch (ParseException e) { return doBadQuery(portal, qtxt, format); } IndexSearcher searcherForRequest = LuceneIndexFactory.getIndexSearcher(getServletContext()); TopDocs topDocs = null; try { topDocs = searcherForRequest.search(query, null, maxHitSize); } catch (Throwable t) { log.error("in first pass at search: " + t); // this is a hack to deal with odd cases where search and index threads interact try { wait(150); topDocs = searcherForRequest.search(query, null, maxHitSize); } catch (Exception ex) { log.error(ex); String msg = makeBadSearchMessage(qtxt, ex.getMessage()); if (msg == null) { msg = "The search request contained errors."; } return doFailedSearch(msg, qtxt, format); } } if (topDocs == null || topDocs.scoreDocs == null) { log.error("topDocs for a search was null"); String msg = "The search request contained errors."; return doFailedSearch(msg, qtxt, format); } int hitsLength = topDocs.scoreDocs.length; if (hitsLength < 1) { return doNoHits(qtxt, format); } log.debug("found " + hitsLength + " hits"); int lastHitToShow = 0; if ((startIndex + hitsPerPage) > hitsLength) { lastHitToShow = hitsLength; } else { lastHitToShow = startIndex + hitsPerPage - 1; } List<Individual> beans = new LinkedList<Individual>(); for (int i = startIndex; i < topDocs.scoreDocs.length; i++) { try { if ((i >= startIndex) && (i <= lastHitToShow)) { Document doc = searcherForRequest.doc(topDocs.scoreDocs[i].doc); String uri = doc.get(Entity2LuceneDoc.term.URI); Individual ent = new IndividualImpl(); ent.setURI(uri); ent = iDao.getIndividualByURI(uri); if (ent != null) beans.add(ent); } } catch (Exception e) { log.error("problem getting usable Individuals from search " + "hits" + e.getMessage()); } } ParamMap pagingLinkParams = new ParamMap(); pagingLinkParams.put("querytext", qtxt); pagingLinkParams.put("hitsPerPage", String.valueOf(hitsPerPage)); if (wasXmlRequested) { pagingLinkParams.put(XML_REQUEST_PARAM, "1"); } /* Start putting together the data for the templates */ Map<String, Object> body = new HashMap<String, Object>(); String classGroupParam = vreq.getParameter("classgroup"); boolean classGroupFilterRequested = false; if (!StringUtils.isEmpty(classGroupParam)) { VClassGroup grp = grpDao.getGroupByURI(classGroupParam); classGroupFilterRequested = true; if (grp != null && grp.getPublicName() != null) body.put("classGroupName", grp.getPublicName()); } String typeParam = vreq.getParameter("type"); boolean typeFiltereRequested = false; if (!StringUtils.isEmpty(typeParam)) { VClass type = vclassDao.getVClassByURI(typeParam); typeFiltereRequested = true; if (type != null && type.getName() != null) body.put("typeName", type.getName()); } /* Add classgroup and type refinement links to body */ if (wasHtmlRequested) { // Search request includes no classgroup and no type, so add classgroup search refinement // links. if (!classGroupFilterRequested && !typeFiltereRequested) { List<VClassGroup> classgroups = getClassGroups(grpDao, topDocs, searcherForRequest); List<VClassGroupSearchLink> classGroupLinks = new ArrayList<VClassGroupSearchLink>(classgroups.size()); for (VClassGroup vcg : classgroups) { if (vcg.getPublicName() != null) { classGroupLinks.add(new VClassGroupSearchLink(qtxt, vcg)); } } body.put("classGroupLinks", classGroupLinks); // Search request is for a classgroup, so add rdf:type search refinement links // but try to filter out classes that are subclasses } else if (classGroupFilterRequested && !typeFiltereRequested) { List<VClass> vClasses = getVClasses(vclassDao, topDocs, searcherForRequest); List<VClassSearchLink> vClassLinks = new ArrayList<VClassSearchLink>(vClasses.size()); for (VClass vc : vClasses) { vClassLinks.add(new VClassSearchLink(qtxt, vc)); } body.put("classLinks", vClassLinks); pagingLinkParams.put("classgroup", classGroupParam); // This case is never displayed } else if (!StringUtils.isEmpty(alphaFilter)) { body.put("alphas", getAlphas(topDocs, searcherForRequest)); alphaSortIndividuals(beans); } else { pagingLinkParams.put("type", typeParam); } } // Convert search result individuals to template model objects body.put( "individuals", ListedIndividualTemplateModel.getIndividualTemplateModelList(beans, vreq)); body.put("querytext", qtxt); body.put("title", qtxt + " - " + portal.getAppName() + " Search Results"); body.put("hitsLength", hitsLength); body.put("startIndex", startIndex); body.put( "pagingLinks", getPagingLinks( startIndex, hitsPerPage, hitsLength, maxHitSize, vreq.getServletPath(), pagingLinkParams)); if (startIndex != 0) { body.put( "prevPage", getPreviousPageLink(startIndex, hitsPerPage, vreq.getServletPath(), pagingLinkParams)); } if (startIndex < (hitsLength - hitsPerPage)) { body.put( "nextPage", getNextPageLink(startIndex, hitsPerPage, vreq.getServletPath(), pagingLinkParams)); } String template = templateTable.get(format).get(Result.PAGED); return new TemplateResponseValues(template, body); } catch (Throwable e) { return doSearchError(e, format); } }