private void doCompact( LazyDocumentLoader documentLoader, LazyCompactor compactor, SolrQueryResponse rsp, String tsId, String q) throws IOException, SyntaxError { Query query = depProvider.parser(q).getQuery(); Iterable<Document> docs = documentLoader.load(query, SORT); Iterable<CompactionResult> compactionResults = compactor.compact(docs); List<Document> docsToDelete = new LinkedList<>(); List<SolrInputDocument> docsToAdd = new LinkedList<>(); compactionResults.forEach( it -> { docsToDelete.addAll(it.getInputDocuments()); docsToAdd.addAll(it.getOutputDocuments()); }); depProvider.solrUpdateService().add(docsToAdd); depProvider.solrUpdateService().delete(docsToDelete); rsp.add("timeseries " + tsId + " oldNumDocs:", docsToDelete.size()); rsp.add("timeseries " + tsId + " newNumDocs:", docsToAdd.size()); }
@Override @SuppressWarnings("PMD.SignatureDeclareThrowsException") public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { String joinKey = req.getParams().get(JOIN_KEY); String fq = req.getParams().get(FQ); int ppc = req.getParams().getInt(POINTS_PER_CHUNK, 100000); int pageSize = req.getParams().getInt(PAGE_SIZE, 100); depProvider.init(req, rsp); LazyCompactor compactor = depProvider.compactor(ppc, req.getSearcher().getSchema()); LazyDocumentLoader documentLoader = depProvider.documentLoader(pageSize, req.getSearcher()); if (isBlank(joinKey) && isBlank(fq)) { LOGGER.error("Neither join key nor filter query given."); rsp.add( "error", join( "", "Neither join key nor filter query given.", "Get help at https://chronix.gitbooks.io/chronix/content/document_compaction.html.")); return; } // no join key => compact documents matching fq if (isBlank(joinKey)) { compact(documentLoader, compactor, rsp, fq, fq); depProvider.solrUpdateService().commit(); return; } // determine time series identified by joinKey SolrFacetService facetService = depProvider.solrFacetService(); Query filterQuery = isBlank(fq) ? new MatchAllDocsQuery() : depProvider.parser(fq).getQuery(); List<NamedList<Object>> pivotResult = facetService.pivot(joinKey, filterQuery); // compact each time series' constituting documents facetService .toTimeSeriesIds(pivotResult) .parallelStream() .forEach( tsId -> compact(documentLoader, compactor, rsp, tsId.toString(), and(tsId.toQuery(), fq))); depProvider.solrUpdateService().commit(); }