@Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { System.out.println("GETNEXT DOC " + "USER:"******"daveleray")) { System.out.println("logging in from:" + req.getUserPrincipal()); resp.setContentType("text/plain"); resp.getWriter().println("PLEASE SIGN IN"); return; } System.out.println("PROCESSING"); CloudReviewerUser user = UserHandler.getCurrentUser(); String currentID = req.getParameter("currentDocID"); String selectedFolder = req.getParameter("folder"); String docdirection = req.getParameter("docdirection"); if (selectedFolder.contentEquals("undefined")) { resp.getWriter().println("UNDEFINED BATCH"); return; } if (currentID == null || currentID.contentEquals("undefined")) { currentID = user.getCurrentDoc(); } PersistenceManager pm = PMF.get().getPersistenceManager(); DocumentBatch batch = pm.getObjectById(DocumentBatch.class, selectedFolder); pm.close(); System.out.println("Identified Batch Size of:" + batch.getDocIDCollection().size()); Document d; if (docdirection == null || docdirection.contentEquals("current")) { d = DocumentHandler.getDocument(currentID); } else if (docdirection.equals("next")) { d = DocumentHandler.getNextDocument(batch, null, currentID); } else if (docdirection.equals("prev")) { d = DocumentHandler.getPreviousDocument(batch, null, currentID); } else { d = DocumentHandler.getDocument(currentID); } DocJSONResponse response = DocHelper.getJSONResponse(d, batch); resp.setContentType("application/json"); Gson gson = new Gson(); String sending = gson.toJson(response); resp.getWriter().println(sending); return; }
/** * Begins the indexing * * @exception BuildException If an error occurs indexing the fileset */ @Override public void execute() throws BuildException { // construct handler and analyzer dynamically try { handler = Class.forName(handlerClassName).asSubclass(DocumentHandler.class).newInstance(); analyzer = IndexTask.createAnalyzer(analyzerClassName); } catch (Exception e) { throw new BuildException(e); } log("Document handler = " + handler.getClass(), Project.MSG_VERBOSE); log("Analyzer = " + analyzer.getClass(), Project.MSG_VERBOSE); if (handler instanceof ConfigurableDocumentHandler) { ((ConfigurableDocumentHandler) handler).configure(handlerConfig.getProperties()); } try { indexDocs(); } catch (IOException e) { throw new BuildException(e); } }
public void startElement(String uri, String localName, String name, Attributes attributes) throws SAXException { IDocumentElementNode parent = getLastParsedDocumentNode(); if ((parent != null) && (parent.isContentCollapsed() == true)) { setCollapsibleParentName(parent.getXMLTagName()); processCollapsedStartElement(name, attributes, parent); } else { super.startElement(uri, localName, name, attributes); } }
public void endElement(String uri, String localName, String name) throws SAXException { if ((getCollapsibleParentName() != null) && (getCollapsibleParentName().equals(name))) { setCollapsibleParentName(null); } if ((getCollapsibleParentName() != null)) { IDocumentElementNode parent = getLastParsedDocumentNode(); processCollapsedEndElement(name, parent); } else { super.endElement(uri, localName, name); } }
/** * Index the fileset. * * @exception IOException if Lucene I/O exception TODO: refactor!!!!! */ private void indexDocs() throws IOException { Date start = new Date(); boolean create = overwrite; // If the index directory doesn't exist, // create it and force create mode if (indexDir.mkdirs() && !overwrite) { create = true; } FSDirectory dir = FSDirectory.open(indexDir); try { Searcher searcher = null; boolean checkLastModified = false; if (!create) { try { searcher = new IndexSearcher(dir, true); checkLastModified = true; } catch (IOException ioe) { log("IOException: " + ioe.getMessage()); // Empty - ignore, which indicates to index all // documents } } log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE); IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer) .setOpenMode(create ? OpenMode.CREATE : OpenMode.APPEND); LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy(); lmp.setUseCompoundFile(useCompoundIndex); lmp.setMergeFactor(mergeFactor); IndexWriter writer = new IndexWriter(dir, conf); int totalFiles = 0; int totalIndexed = 0; int totalIgnored = 0; try { for (int i = 0; i < rcs.size(); i++) { ResourceCollection rc = rcs.elementAt(i); if (rc.isFilesystemOnly()) { Iterator resources = rc.iterator(); while (resources.hasNext()) { Resource r = (Resource) resources.next(); if (!r.isExists() || !(r instanceof FileResource)) { continue; } totalFiles++; File file = ((FileResource) r).getFile(); if (!file.exists() || !file.canRead()) { throw new BuildException( "File \"" + file.getAbsolutePath() + "\" does not exist or is not readable."); } boolean indexIt = true; if (checkLastModified) { Term pathTerm = new Term("path", file.getPath()); TermQuery query = new TermQuery(pathTerm); ScoreDoc[] hits = searcher.search(query, null, 1).scoreDocs; // if document is found, compare the // indexed last modified time with the // current file // - don't index if up to date if (hits.length > 0) { Document doc = searcher.doc(hits[0].doc); String indexModified = doc.get("modified").trim(); if (indexModified != null) { long lastModified = 0; try { lastModified = DateTools.stringToTime(indexModified); } catch (ParseException e) { // if modified time is not parsable, skip } if (lastModified == file.lastModified()) { // TODO: remove existing document indexIt = false; } } } } if (indexIt) { try { log("Indexing " + file.getPath(), Project.MSG_VERBOSE); Document doc = handler.getDocument(file); if (doc == null) { totalIgnored++; } else { // Add the path of the file as a field named "path". Use a Keyword field, so // that the index stores the path, and so that the path is searchable doc.add( new Field( "path", file.getPath(), Field.Store.YES, Field.Index.NOT_ANALYZED)); // Add the last modified date of the file a field named "modified". Use a // Keyword field, so that it's searchable, but so that no attempt is made // to tokenize the field into words. doc.add( new Field( "modified", DateTools.timeToString( file.lastModified(), DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.addDocument(doc); totalIndexed++; } } catch (DocumentHandlerException e) { throw new BuildException(e); } } } // for j } // if (fs != null) } // for i writer.optimize(); } // try finally { // always make sure everything gets closed, // no matter how we exit. writer.close(); if (searcher != null) { searcher.close(); } } Date end = new Date(); log( totalIndexed + " out of " + totalFiles + " indexed (" + totalIgnored + " ignored) in " + (end.getTime() - start.getTime()) + " milliseconds"); } finally { dir.close(); } }
/** * 主方法 * * @param args */ public static void main(String[] args) { DocumentHandler dh = new DocumentHandler(); // 文档处理类 dh.createDoc(); // 创建文档 }