// Callback from one of the proxies to load the entire cache based // on a hit of getXXXXX (except for getId which doesn't need to go to database) public void loadCacheIfNeeded() { if (cacheLoaded) return; DboTableMeta metaDbo = metaClass.getMetaDbo(); IndiceToVirtual virtKeys = new IndiceToVirtual(metaDbo, new ListWrappingCursor<byte[]>(keys)); AbstractCursor<KeyValue<Row>> rows = session.find(metaDbo, virtKeys, false, true, null); String name = getClass().getSimpleName(); log.info( name + ":just loaded rows for keylist(next convert to proxies)=" + keys.size() + " for field=" + field); int counter = 0; while (true) { com.alvazan.orm.api.z8spi.iter.AbstractCursor.Holder<KeyValue<Row>> holder = rows.nextImpl(); if (holder == null) break; KeyValue<Row> kv = holder.getValue(); Row row = kv.getValue(); if (row != null || metaDbo.isEmbeddable()) { Holder<T> h = (Holder) originalHolders.get(counter); T value = h.getValue(); if (value instanceof NoSqlProxy) { // inject the row into the proxy object here to load it's fields metaClass.fillInInstance(row, session, value); // ((NoSqlProxy)value).__injectData(row); } } counter++; } cacheLoaded = true; }
private TypedRow getLastVal(Cursor<KeyValue<TypedRow>> cursor) { cursor.afterLast(); if (!cursor.previous()) return null; KeyValue<TypedRow> kv = cursor.getCurrent(); TypedRow value = kv.getValue(); if (value == null) return null; return value; }
@Util public static void reindex() throws IOException, SAXException, ParserConfigurationException, SolrServerException { deleteExistingCores(); indexSchemas(); Cursor<KeyValue<SecureTable>> tablesCursor = SecureTable.findAllCursor(NoSql.em()); int i = 0; // very important to ignore any error and continue indexing. If an index has gotten // corrupted (which happens...) this will throw com.alvazan.orm.api.exc.RowNotFoundException // and will kill the entire reindex. long docsindexed = 0; long startTime = System.currentTimeMillis() - 1; Collection<SolrInputDocument> solrDocs = new ArrayList<SolrInputDocument>(); while (tablesCursor.next()) { if (++i % 200 == 0) NoSql.em().clear(); KeyValue<SecureTable> kv = tablesCursor.getCurrent(); try { if (kv.getValue() == null) continue; } catch (RowNotFoundException rnfe) { if (log.isInfoEnabled()) log.error( "got a corrupt index while reindexing, ignoring the error and continuing with indexing of other data."); // rnfe.printStackTrace(); continue; } SecureTable table = kv.getValue(); DboTableMeta meta = table.getTableMeta(); SearchUtils.indexTable(table, meta, solrDocs); if (table.isSearchable()) { log.info("found a searchable table " + table.getName() + " indexing it."); String sql = "select c from " + table.getTableName() + " as c"; Collection<SolrInputDocument> tablesolrDocs = new ArrayList<SolrInputDocument>(); try { QueryResult result = NoSql.em().getTypedSession().createQueryCursor(sql, SqlPullProcessor.BATCH_SIZE); Iterator<List<TypedRow>> cursor = result.getAllViewsIter().iterator(); while (true) { // I hate this, but cursor.hasNext() can throw an exception which means we need to skip // over // that item but continue on with the cursor till it runs out: List<TypedRow> typedRows = getNext(cursor); if (typedRows == null) break; for (TypedRow prow : typedRows) { SearchPosting.addSolrDataDoc(prow, table, tablesolrDocs); } if (tablesolrDocs.size() > REINDEX_BATCH_SIZE) { docsindexed += solrDocs.size(); System.out.println( "hit solr doc batch size in a searchable table, " + docsindexed + " docs so far, " + (System.currentTimeMillis() - startTime) + " millis elapsed " + (docsindexed / ((System.currentTimeMillis() - startTime) / 1000)) + " docs per sec."); SearchPosting.saveSolr("reindex", tablesolrDocs, null); tablesolrDocs = new ArrayList<SolrInputDocument>(); } } SearchPosting.saveSolr("reindex", tablesolrDocs, null); docsindexed += solrDocs.size(); } catch (Exception e) { System.out.println( "got an exception while indexing a searchable table with the query (probably a corrupt index in playorm):"); System.out.println(sql); // e.printStackTrace(); } } if (solrDocs.size() > REINDEX_BATCH_SIZE) { docsindexed += solrDocs.size(); System.out.println( "hit solr doc batch size in metadata, " + docsindexed + " docs so far, " + (System.currentTimeMillis() - startTime) + " millis elapsed " + (docsindexed / ((System.currentTimeMillis() - startTime) / 1000)) + " docs per sec."); SearchPosting.saveSolr("reindex", solrDocs, "databusmeta"); solrDocs = new ArrayList<SolrInputDocument>(); } } if (solrDocs.size() > 0) { docsindexed += solrDocs.size(); System.out.println( "hit solr doc batch size during finalization, " + docsindexed + " docs so far, " + (System.currentTimeMillis() - startTime) + " millis elapsed " + (docsindexed / ((System.currentTimeMillis() - startTime) / 1000)) + " docs per sec."); SearchPosting.saveSolr("reindex", solrDocs, "databusmeta"); solrDocs = new ArrayList<SolrInputDocument>(); } }