@BeforeClass public static void setUpBeforeClass() throws Exception { lilyProxy = new LilyProxy(); InputStream is = BatchBuildTest.class.getResourceAsStream("solrschema.xml"); byte[] solrSchema = IOUtils.toByteArray(is); IOUtils.closeQuietly(is); lilyProxy.start(solrSchema); solrProxy = lilyProxy.getSolrProxy(); solrServer = solrProxy.getSolrServer(); lilyServerProxy = lilyProxy.getLilyServerProxy(); lilyClient = lilyServerProxy.getClient(); repository = lilyClient.getRepository(); typeManager = repository.getTypeManager(); FieldType ft1 = typeManager.createFieldType( "STRING", new QName("batchindex-test", "field1"), Scope.NON_VERSIONED); FieldType ft2 = typeManager.createFieldType( "LINK", new QName("batchindex-test", "linkField"), Scope.NON_VERSIONED); typeManager .recordTypeBuilder() .defaultNamespace("batchindex-test") .name("rt1") .fieldEntry() .use(ft1) .add() .fieldEntry() .use(ft2) .add() .create(); model = lilyServerProxy.getIndexerModel(); is = BatchBuildTest.class.getResourceAsStream("indexerconf.xml"); byte[] indexerConfiguration = IOUtils.toByteArray(is); IOUtils.closeQuietly(is); IndexDefinition index = model.newIndex(INDEX_NAME); Map<String, String> solrShards = new HashMap<String, String>(); solrShards.put("shard1", "http://localhost:8983/solr"); index.setSolrShards(solrShards); index.setConfiguration(indexerConfiguration); index.setUpdateState(IndexUpdateState.DO_NOT_SUBSCRIBE); model.addIndex(index); }
private static void setBatchIndexConf(byte[] defaultConf, byte[] customConf, boolean buildNow) throws Exception { String lock = model.lockIndex(INDEX_NAME); try { IndexDefinition index = model.getMutableIndex(INDEX_NAME); if (defaultConf != null) index.setDefaultBatchIndexConfiguration(defaultConf); if (customConf != null) index.setBatchIndexConfiguration(customConf); if (buildNow) index.setBatchBuildState(IndexBatchBuildState.BUILD_REQUESTED); model.updateIndex(index, lock); } finally { model.unlockIndex(lock); } }
private void waitForIndexAndCommit(long timeout) throws Exception { boolean indexSuccess = false; try { // Now wait until its finished long tryUntil = System.currentTimeMillis() + timeout; while (System.currentTimeMillis() < tryUntil) { Thread.sleep(100); IndexDefinition definition = model.getIndex(INDEX_NAME); if (definition.getBatchBuildState() == IndexBatchBuildState.INACTIVE) { Long amountFailed = definition.getLastBatchBuildInfo().getCounters().get(COUNTER_NUM_FAILED_RECORDS); boolean successFlag = definition.getLastBatchBuildInfo().getSuccess(); indexSuccess = successFlag && (amountFailed == null || amountFailed == 0L); if (!indexSuccess) { fail( "Batch index build did not finish successfully: success flag = " + successFlag + ", amount failed records = " + amountFailed + ", job state = " + definition.getLastBatchBuildInfo().getJobState() + ", job id = " + definition.getLastBatchBuildInfo().getJobId() + ", job url = " + definition.getLastBatchBuildInfo().getTrackingUrl()); } else { break; } } } } catch (Exception e) { throw new Exception("Error checking if batch index job ended.", e); } if (!indexSuccess) { fail("Batch build did not end after " + BUILD_TIMEOUT + " millis"); } else { solrServer.commit(); } }
/** * Test if the default batch index conf setting works * * @throws Exception */ @Test public void testDefaultBatchIndexConf() throws Exception { byte[] defaultConf = getResourceAsByteArray("defaultBatchIndexConf-test2.json"); setBatchIndexConf(defaultConf, null, false); String assertId = "batch-index-test2"; // // First create some content // repository .recordBuilder() .id(assertId) .recordType(rt1.getName()) .field(ft1.getName(), "test2 index") .create(); repository .recordBuilder() .id("batch-noindex-test2") .recordType(rt1.getName()) .field(ft1.getName(), "test2 noindex") .create(); // Now start the batch index this.buildAndCommit(); // Check if 1 record and not 2 are in the index QueryResponse response = solrServer.query(new SolrQuery("field1:test2*")); assertEquals(1, response.getResults().size()); assertEquals( "USER." + "batch-index-test2", response.getResults().get(0).getFieldValue("lily.id")); // check that the last used batch index conf = default IndexDefinition index = model.getMutableIndex(INDEX_NAME); assertEquals( JsonFormat.deserialize(defaultConf), JsonFormat.deserialize(index.getLastBatchBuildInfo().getBatchIndexConfiguration())); }
/** * Test setting a custom batch index conf. * * @throws Exception */ @Test public void testCustomBatchIndexConf() throws Exception { byte[] defaultConf = getResourceAsByteArray("defaultBatchIndexConf-test2.json"); setBatchIndexConf(defaultConf, null, false); String assertId1 = "batch-index-custom-test3"; String assertId2 = "batch-index-test3"; // // First create some content // Record recordToChange1 = repository .recordBuilder() .id(assertId2) .recordType(rt1.getName()) .field(ft1.getName(), "test3 index run1") .create(); Record recordToChange2 = repository .recordBuilder() .id(assertId1) .recordType(rt1.getName()) .field(ft1.getName(), "test3 index run1") .create(); repository .recordBuilder() .id("batch-noindex-test3") .recordType(rt1.getName()) .field(ft1.getName(), "test3 noindex run1") .create(); // Index everything with the default conf this.buildAndCommit(); SolrDocumentList results = solrServer .query(new SolrQuery("field1:test3*").addSortField("lily.id", ORDER.asc)) .getResults(); assertEquals(2, results.size()); assertEquals("USER." + assertId1, results.get(0).getFieldValue("lily.id")); assertEquals("USER." + assertId2, results.get(1).getFieldValue("lily.id")); // change some fields and reindex using a specific configuration. Only one of the 2 changes // should be picked up recordToChange1.setField(ft1.getName(), "test3 index run2"); recordToChange2.setField(ft1.getName(), "test3 index run2"); repository.update(recordToChange1); repository.update(recordToChange2); byte[] batchConf = getResourceAsByteArray("batchIndexConf-test3.json"); setBatchIndexConf(defaultConf, batchConf, true); waitForIndexAndCommit(BUILD_TIMEOUT); // Check if 1 record and not 2 are in the index QueryResponse response = solrServer.query(new SolrQuery("field1:test3\\ index\\ run2")); assertEquals(1, response.getResults().size()); assertEquals("USER." + assertId1, response.getResults().get(0).getFieldValue("lily.id")); // check that the last used batch index conf = default assertEquals( JsonFormat.deserialize(batchConf), JsonFormat.deserialize( model .getMutableIndex(INDEX_NAME) .getLastBatchBuildInfo() .getBatchIndexConfiguration())); // Set things up for run 3 where the default configuration should be used again recordToChange1.setField(ft1.getName(), "test3 index run3"); recordToChange2.setField(ft1.getName(), "test3 index run3"); repository.update(recordToChange1); repository.update(recordToChange2); // Now rebuild the index and see if the default indexer has kicked in this.buildAndCommit(); response = solrServer.query( new SolrQuery("field1:test3\\ index\\ run3").addSortField("lily.id", ORDER.asc)); assertEquals(2, response.getResults().size()); assertEquals("USER." + assertId1, response.getResults().get(0).getFieldValue("lily.id")); assertEquals("USER." + assertId2, response.getResults().get(1).getFieldValue("lily.id")); // check that the last used batch index conf = default assertEquals( JsonFormat.deserialize(defaultConf), JsonFormat.deserialize( model .getMutableIndex(INDEX_NAME) .getLastBatchBuildInfo() .getBatchIndexConfiguration())); }
@Test public void testSomeBasics() throws Exception { ZooKeeperItf zk1 = ZkUtil.connect("localhost:" + ZK_CLIENT_PORT, 3000); ZooKeeperItf zk2 = ZkUtil.connect("localhost:" + ZK_CLIENT_PORT, 3000); WriteableIndexerModel model1 = null; WriteableIndexerModel model2 = null; try { TestListener listener = new TestListener(); model1 = new IndexerModelImpl(zk1); model1.registerListener(listener); // Create an index IndexDefinition index1 = model1.newIndex("index1"); index1.setConfiguration("<indexer/>".getBytes("UTF-8")); index1.setSolrShards(Collections.singletonMap("shard1", "http://localhost:8983/solr")); model1.addIndex(index1); listener.waitForEvents(1); listener.verifyEvents(new IndexerModelEvent(IndexerModelEventType.INDEX_ADDED, "index1")); // Verify that a fresh indexer model has the index model2 = new IndexerModelImpl(zk2); assertEquals(1, model2.getIndexes().size()); assertTrue(model2.hasIndex("index1")); // Update the index index1.setGeneralState(IndexGeneralState.DISABLED); String lock = model1.lockIndex("index1"); model1.updateIndex(index1, lock); listener.waitForEvents(1); listener.verifyEvents(new IndexerModelEvent(IndexerModelEventType.INDEX_UPDATED, "index1")); // Do not release the lock, updating through model2 should fail index1.setConfiguration("<indexer></indexer>".getBytes("UTF-8")); try { model2.updateIndex(index1, lock + "foo"); fail("expected exception"); } catch (IndexUpdateException e) { // verify the exception says something about locks assertTrue(e.getMessage().indexOf("lock") != -1); } model1.unlockIndex(lock); model1.deleteIndex("index1"); listener.waitForEvents(1); listener.verifyEvents(new IndexerModelEvent(IndexerModelEventType.INDEX_REMOVED, "index1")); // Create some more indexes IndexerModelEvent[] expectedEvents = new IndexerModelEvent[9]; for (int i = 2; i <= 10; i++) { String name = "index" + i; IndexDefinition index = model1.newIndex(name); index.setConfiguration("<indexer/>".getBytes("UTF-8")); index.setSolrShards(Collections.singletonMap("shard1", "http://localhost:8983/solr")); model1.addIndex(index); expectedEvents[i - 2] = new IndexerModelEvent(IndexerModelEventType.INDEX_ADDED, name); } listener.waitForEvents(9); listener.verifyEvents(expectedEvents); } finally { Closer.close(model1); Closer.close(model2); Closer.close(zk1); Closer.close(zk2); } }