private void enableClassesIncrementally(Class<?>[] classes, boolean locked) {
   ArrayList<Class<?>> toAdd = null;
   for (Class<?> type : classes) {
     if (!knownClasses.containsKey(type)) {
       if (toAdd == null) toAdd = new ArrayList<Class<?>>(classes.length);
       toAdd.add(type);
     }
   }
   if (toAdd == null) {
     return;
   }
   if (locked) {
     Set<Class<?>> existingClasses = knownClasses.keySet();
     int index = existingClasses.size();
     Class[] all = existingClasses.toArray(new Class[existingClasses.size() + toAdd.size()]);
     for (Class<?> toAddClass : toAdd) {
       all[index++] = toAddClass;
     }
     searchFactory.addClasses(all);
     for (Class<?> type : toAdd) {
       if (searchFactory.getIndexBindingForEntity(type) != null) {
         knownClasses.put(type, Boolean.TRUE);
       } else {
         knownClasses.put(type, Boolean.FALSE);
       }
     }
   } else {
     mutating.lock();
     try {
       enableClassesIncrementally(classes, true);
     } finally {
       mutating.unlock();
     }
   }
 }
  public void testCorrectShutdown() {
    CacheContainer cc = null;

    try {
      ConfigurationBuilder cfg = new ConfigurationBuilder();
      cfg.transaction()
          .transactionMode(TransactionMode.TRANSACTIONAL)
          .indexing()
          .index(Index.ALL)
          .addProperty("default.directory_provider", "ram")
          .addProperty("lucene_version", "LUCENE_CURRENT");
      cc = TestCacheManagerFactory.createCacheManager(cfg);
      Cache<?, ?> cache = cc.getCache();
      SearchFactoryIntegrator sfi =
          TestingUtil.extractComponent(cache, SearchFactoryIntegrator.class);

      assert !sfi.isStopped();

      cc.stop();

      assert sfi.isStopped();
    } finally {
      // proper cleanup for exceptional execution
      TestingUtil.killCacheManagers(cc);
    }
  }
  @Override
  public void cacheStopped(ComponentRegistry cr, String cacheName) {
    SearchFactoryIntegrator searchFactoryIntegrator = searchFactoriesToShutdown.remove(cacheName);
    if (searchFactoryIntegrator != null) {
      searchFactoryIntegrator.close();
    }

    Configuration cfg = cr.getComponent(Configuration.class);
    removeQueryInterceptorFromConfiguration(cfg);
  }
 @Test(
     dependsOnMethods =
         "simpleIndexTest") // depends as otherwise the Person index is not initialized yet
 public void testPropertiesWhereRead() {
   SearchFactoryIntegrator searchFactory = TestQueryHelperFactory.extractSearchFactory(cache);
   DirectoryProvider[] directoryProviders = searchFactory.getDirectoryProviders(Person.class);
   assertEquals(1, directoryProviders.length);
   assertNotNull(directoryProviders[0]);
   assertTrue(directoryProviders[0] instanceof RAMDirectoryProvider);
 }
  @Override
  public Object visitClearCommand(InvocationContext ctx, ClearCommand command) throws Throwable {

    // This method is called when somebody calls a cache.clear() and we will need to wipe everything
    // in the indexes.
    Object returnValue = invokeNextInterceptor(ctx, command);

    if (shouldModifyIndexes(ctx)) {
      if (getLog().isTraceEnabled())
        getLog().trace("shouldModifyIndexes() is true and we can clear the indexes");

      for (Class c : this.knownClasses.keySet()) {
        EntityIndexBinder binder = this.searchFactory.getIndexBindingForEntity(c);
        if (binder != null) { // check as not all known classes are indexed
          searchFactory
              .getWorker()
              .performWork(
                  new Work<Object>(c, (Serializable) null, WorkType.PURGE_ALL),
                  new TransactionalEventTransactionContext(
                      transactionManager, transactionSynchronizationRegistry));
        }
      }
    }
    return returnValue;
  }
 protected void updateIndexes(Object value, Object key) {
   // The key here is the String representation of the key that is stored in the cache.
   // The key is going to be the documentID for Lucene.
   // The object parameter is the actual value that needs to be removed from lucene.
   if (value == null) throw new NullPointerException("Cannot handle a null value!");
   TransactionContext transactionContext =
       new TransactionalEventTransactionContext(
           transactionManager, transactionSynchronizationRegistry);
   searchFactory
       .getWorker()
       .performWork(
           new Work<Object>(value, keyToString(key), WorkType.UPDATE), transactionContext);
 }
  private void registerQueryMBeans(AdvancedCache cache, ComponentRegistry cr, String cacheName) {
    Configuration cfg = cache.getCacheConfiguration();
    SearchFactoryIntegrator sf = getSearchFactory(cfg.indexing().properties(), cr);

    // Resolve MBean server instance
    GlobalConfiguration globalCfg = cr.getGlobalComponentRegistry().getGlobalConfiguration();
    mbeanServer = JmxUtil.lookupMBeanServer(globalCfg);

    // Resolve jmx domain to use for query mbeans
    String queryGroupName = getQueryGroupName(cacheName);
    jmxDomain = JmxUtil.buildJmxDomain(globalCfg, mbeanServer, queryGroupName);

    // Register statistics MBean, but only enable if Infinispan config says so
    Statistics stats = sf.getStatistics();
    stats.setStatisticsEnabled(cfg.jmxStatistics().enabled());
    try {
      ObjectName statsObjName =
          new ObjectName(jmxDomain + ":" + queryGroupName + ",component=Statistics");
      JmxUtil.registerMBean(new StatisticsInfo(stats), statsObjName, mbeanServer);
    } catch (Exception e) {
      throw new CacheException("Unable to register query module statistics mbean", e);
    }

    // Register mass indexer MBean, picking metadata from repo
    ManageableComponentMetadata metadata =
        metadataRepo.findComponentMetadata(MassIndexer.class).toManageableComponentMetadata();
    try {
      // TODO: MassIndexer should be some kind of query cache component?
      MapReduceMassIndexer maxIndexer = new MapReduceMassIndexer(cache, sf);
      ResourceDMBean mbean = new ResourceDMBean(maxIndexer, metadata);
      ObjectName massIndexerObjName =
          new ObjectName(
              jmxDomain + ":" + queryGroupName + ",component=" + metadata.getJmxObjectName());
      JmxUtil.registerMBean(mbean, massIndexerObjName, mbeanServer);
    } catch (Exception e) {
      throw new CacheException("Unable to create ", e);
    }
  }
 private Collection<IndexManager> uniqueIndexManagerForTypes(Collection<Class<?>> entityTypes) {
   HashMap<String, IndexManager> uniqueBackends =
       new HashMap<String, IndexManager>(entityTypes.size());
   for (Class<?> type : entityTypes) {
     EntityIndexBinding indexBindingForEntity = searchFactoryImplementor.getIndexBinding(type);
     if (indexBindingForEntity != null) {
       IndexManager[] indexManagers = indexBindingForEntity.getIndexManagers();
       for (IndexManager im : indexManagers) {
         uniqueBackends.put(im.getIndexName(), im);
       }
     }
   }
   return uniqueBackends.values();
 }
 private void sendWorkToShards(LuceneWork work, boolean forceAsync) {
   final Class<?> entityType = work.getEntityClass();
   EntityIndexBinding entityIndexBinding = searchFactoryImplementor.getIndexBinding(entityType);
   IndexShardingStrategy shardingStrategy = entityIndexBinding.getSelectionStrategy();
   if (forceAsync) {
     work.getWorkDelegate(StreamingSelectionVisitor.INSTANCE)
         .performStreamOperation(work, shardingStrategy, progressMonitor, forceAsync);
   } else {
     WorkQueuePerIndexSplitter workContext = new WorkQueuePerIndexSplitter();
     work.getWorkDelegate(TransactionalSelectionVisitor.INSTANCE)
         .performOperation(work, shardingStrategy, workContext);
     workContext.commitOperations(
         progressMonitor); // FIXME I need a "Force sync" actually for when using PurgeAll before
                           // the indexing starts
   }
 }
 public ClusteredCacheQueryImpl(
     Query luceneQuery,
     SearchFactoryIntegrator searchFactory,
     ExecutorService asyncExecutor,
     AdvancedCache<?, ?> cache,
     KeyTransformationHandler keyTransformationHandler,
     Class<?>... classes) {
   super(luceneQuery, searchFactory, cache, keyTransformationHandler, classes);
   this.asyncExecutor = asyncExecutor;
   this.hSearchQuery =
       searchFactory
           .createHSQuery()
           .luceneQuery(luceneQuery)
           .targetedEntities(Arrays.asList(classes));
   this.marshaller =
       ComponentRegistryUtils.getComponent(
           cache, StreamingMarshaller.class, KnownComponentNames.CACHE_MARSHALLER);
 }