/**
 * Provides access to IndexReaders. IndexReaders opened through this service need to be closed using
 * this service.
 *
 * @author Sanne Grinovero (C) 2011 Red Hat Inc.
 */
public class DefaultIndexReaderAccessor implements IndexReaderAccessor {

  private static final Log log = LoggerFactory.make();

  private final ImmutableSearchFactory searchFactory;

  public DefaultIndexReaderAccessor(ImmutableSearchFactory immutableSearchFactory) {
    this.searchFactory = immutableSearchFactory;
  }

  @Override
  public void close(IndexReader indexReader) {
    MultiReaderFactory.closeReader(indexReader);
  }

  @Override
  public IndexReader open(Class<?>... entities) {
    if (entities.length == 0) {
      throw log.needAtLeastOneIndexedEntityType();
    }

    HashMap<String, IndexManager> indexManagers = new HashMap<String, IndexManager>();
    for (Class<?> type : entities) {
      EntityIndexBinding entityIndexBinding = searchFactory.getSafeIndexBindingForEntity(type);
      IndexManager[] indexManagersForAllShards =
          entityIndexBinding.getSelectionStrategy().getIndexManagersForAllShards();
      for (IndexManager im : indexManagersForAllShards) {
        indexManagers.put(im.getIndexName(), im);
      }
    }
    IndexManager[] uniqueIndexManagers =
        indexManagers.values().toArray(new IndexManager[indexManagers.size()]);
    return MultiReaderFactory.openReader(uniqueIndexManagers);
  }

  @Override
  public IndexReader open(String... indexNames) {
    TreeSet<String> names = new TreeSet<String>();
    for (String name : indexNames) {
      if (name != null) {
        names.add(name);
      }
    }
    final int size = names.size();
    if (size == 0) {
      throw log.needAtLeastOneIndexName();
    }
    String[] indexManagerNames = names.toArray(new String[size]);
    IndexManagerHolder managerSource = searchFactory.getIndexManagerHolder();
    IndexManager[] managers = new IndexManager[size];
    for (int i = 0; i < size; i++) {
      String indexName = indexManagerNames[i];
      managers[i] = managerSource.getIndexManager(indexName);
      if (managers[i] == null) {
        throw log.requestedIndexNotDefined(indexName);
      }
    }
    return MultiReaderFactory.openReader(managers);
  }
}
/**
 * This applies the index update operation using the Lucene operation {@link
 * org.apache.lucene.index.IndexWriter#updateDocument}. This is the most efficient way to update the
 * index, but underlying store must guarantee that the term is unique across documents and entity
 * types.
 *
 * @author gustavonalle
 */
public final class ByTermUpdateWorkExecutor extends UpdateWorkExecutor {

  private static final Log log = LoggerFactory.make();

  private final AddWorkExecutor addDelegate;
  private final Workspace workspace;

  ByTermUpdateWorkExecutor(Workspace workspace, AddWorkExecutor addDelegate) {
    super(null, null);
    this.workspace = workspace;
    this.addDelegate = addDelegate;
  }

  @Override
  public void performWork(LuceneWork work, IndexWriterDelegate delegate, IndexingMonitor monitor) {
    final Serializable id = work.getId();
    final String tenantId = work.getTenantId();
    final Class<?> managedType = work.getEntityClass();
    DocumentBuilderIndexedEntity builder = workspace.getDocumentBuilder(managedType);
    try {
      if (DeleteWorkExecutor.isIdNumeric(builder)) {
        log.tracef(
            "Deleting %s#%s by query using an IndexWriter#updateDocument as id is Numeric",
            managedType, id);
        Query exactMatchQuery =
            NumericFieldUtils.createExactMatchQuery(builder.getIdKeywordName(), id);
        BooleanQuery.Builder deleteDocumentsQueryBuilder = new BooleanQuery.Builder();
        deleteDocumentsQueryBuilder.add(exactMatchQuery, Occur.FILTER);
        if (tenantId != null) {
          TermQuery tenantTermQuery =
              new TermQuery(new Term(DocumentBuilderIndexedEntity.TENANT_ID_FIELDNAME, tenantId));
          deleteDocumentsQueryBuilder.add(tenantTermQuery, Occur.FILTER);
        }
        delegate.deleteDocuments(deleteDocumentsQueryBuilder.build());
        // no need to log the Add operation as we'll log in the delegate
        this.addDelegate.performWork(work, delegate, monitor);
      } else {
        log.tracef("Updating %s#%s by id using an IndexWriter#updateDocument.", managedType, id);
        Term idTerm = new Term(builder.getIdKeywordName(), work.getIdInString());
        Map<String, String> fieldToAnalyzerMap = work.getFieldToAnalyzerMap();
        ScopedAnalyzerReference analyzerReference = builder.getAnalyzerReference();
        analyzerReference =
            AddWorkExecutor.updateAnalyzerMappings(
                workspace, analyzerReference, fieldToAnalyzerMap);
        delegate.updateDocument(idTerm, work.getDocument(), analyzerReference);
      }
      workspace.notifyWorkApplied(work);
    } catch (Exception e) {
      String message = "Unable to update " + managedType + "#" + id + " in index.";
      throw new SearchException(message, e);
    }
    if (monitor != null) {
      monitor.documentsAdded(1l);
    }
  }
}
  private static class Foo {

    private static Log log = LoggerFactory.make();

    Foo(String bar) {
      if (bar == null) {
        throw log.parametersShouldNotBeNull("bar");
      }
    }
  }
/**
 * Stateless implementation that performs a <code>DeleteLuceneWork</code>.
 *
 * @author Emmanuel Bernard
 * @author Hardy Ferentschik
 * @author John Griffin
 * @author Sanne Grinovero
 * @see LuceneWorkVisitor
 * @see LuceneWorkDelegate
 */
class DeleteWorkDelegate implements LuceneWorkDelegate {

  private static final Log log = LoggerFactory.make();
  private final Workspace workspace;

  DeleteWorkDelegate(Workspace workspace) {
    this.workspace = workspace;
  }

  public void performWork(LuceneWork work, IndexWriter writer) {
    final Class<?> entityType = work.getEntityClass();
    final Serializable id = work.getId();
    log.tracef("Removing %s#%s by query.", entityType, id);
    DocumentBuilderIndexedEntity<?> builder = workspace.getDocumentBuilder(entityType);

    BooleanQuery entityDeletionQuery = new BooleanQuery();

    Query idQueryTerm;
    if (isIdNumeric(builder)) {
      idQueryTerm = NumericFieldUtils.createExactMatchQuery(builder.getIdKeywordName(), id);
    } else {
      idQueryTerm = new TermQuery(builder.getTerm(id));
    }
    entityDeletionQuery.add(idQueryTerm, BooleanClause.Occur.MUST);

    Term classNameQueryTerm = new Term(ProjectionConstants.OBJECT_CLASS, entityType.getName());
    TermQuery classNameQuery = new TermQuery(classNameQueryTerm);
    entityDeletionQuery.add(classNameQuery, BooleanClause.Occur.MUST);

    try {
      writer.deleteDocuments(entityDeletionQuery);
    } catch (Exception e) {
      String message = "Unable to remove " + entityType + "#" + id + " from index.";
      throw new SearchException(message, e);
    }
  }

  protected static boolean isIdNumeric(DocumentBuilderIndexedEntity<?> documentBuilder) {
    TwoWayFieldBridge idBridge = documentBuilder.getIdBridge();
    return idBridge instanceof NumericFieldBridge;
  }

  public void logWorkDone(LuceneWork work, MassIndexerProgressMonitor monitor) {
    // TODO Auto-generated method stub
  }
}
/**
 * Open a reader each time
 *
 * @author Emmanuel Bernard
 * @author Sanne Grinovero
 */
public class NotSharedReaderProvider implements DirectoryBasedReaderProvider {

  private static final Log log = LoggerFactory.make();

  private DirectoryProvider directoryProvider;
  private String indexName;

  @Override
  public IndexReader openIndexReader() {
    // #getDirectory must be invoked each time as the underlying directory might "dance" as in
    // org.hibernate.search.store.impl.FSSlaveDirectoryProvider
    Directory directory = directoryProvider.getDirectory();
    try {
      return IndexReader.open(directory, true);
    } catch (IOException e) {
      throw new SearchException("Could not open index \"" + indexName + "\"", e);
    }
  }

  @Override
  public void closeIndexReader(IndexReader reader) {
    try {
      reader.close();
    } catch (IOException e) {
      log.unableToCLoseLuceneIndexReader(e);
    }
  }

  @Override
  public void initialize(DirectoryBasedIndexManager indexManager, Properties props) {
    directoryProvider = indexManager.getDirectoryProvider();
    indexName = indexManager.getIndexName();
  }

  @Override
  public void stop() {
    // nothing to do for this implementation
  }
}
/**
 * Provides access to Infinispan's CacheManager; one CacheManager is needed for all caches, it can
 * be taken via JNDI or started by this ServiceProvider; in this case it will also be stopped when
 * no longer needed.
 *
 * @author Sanne Grinovero
 */
public class CacheManagerServiceProvider implements ServiceProvider<EmbeddedCacheManager> {

  private static final Log log = LoggerFactory.make(Log.class);

  /**
   * If no configuration is defined an no JNDI lookup name is provided, than a new Infinispan
   * CacheManager will be started using this configuration. Such a configuration file is provided in
   * Hibernate Search's jar.
   */
  public static final String DEFAULT_INFINISPAN_CONFIGURATION_RESOURCENAME =
      "default-hibernatesearch-infinispan.xml";

  /**
   * Reuses the same JNDI name from the second level cache implementation based on Infinispan
   *
   * @see org.hibernate.cache.infinispan.JndiInfinispanRegionFactory.CACHE_MANAGER_RESOURCE_PROP
   */
  public static final String CACHE_MANAGER_RESOURCE_PROP =
      "hibernate.search.infinispan.cachemanager_jndiname";

  /**
   * The configuration property to use as key to define a custom configuration for Infinispan.
   * Ignored if hibernate.search.infinispan.cachemanager_jndiname is defined.
   */
  public static final String INFINISPAN_CONFIGURATION_RESOURCENAME =
      "hibernate.search.infinispan.configuration_resourcename";

  private EmbeddedCacheManager cacheManager;

  /** JNDI retrieved cachemanagers are not started by us, so avoid attempting to close them. */
  private volatile boolean manageCacheManager = false;

  @Override
  public void start(Properties properties, BuildContext context) {
    String name = ConfigurationParseHelper.getString(properties, CACHE_MANAGER_RESOURCE_PROP, null);
    if (name == null) {
      // No JNDI lookup configured: start the CacheManager
      String cfgName =
          properties.getProperty(
              INFINISPAN_CONFIGURATION_RESOURCENAME, DEFAULT_INFINISPAN_CONFIGURATION_RESOURCENAME);
      try {
        InfinispanConfigurationParser ispnConfiguration =
            new InfinispanConfigurationParser(CacheManagerServiceProvider.class.getClassLoader());
        ConfigurationBuilderHolder configurationBuilderHolder =
            ispnConfiguration.parseFile(cfgName);
        cacheManager = new DefaultCacheManager(configurationBuilderHolder, true);
        manageCacheManager = true;
      } catch (IOException e) {
        throw new SearchException(
            "Could not start Infinispan CacheManager using as configuration file: " + cfgName, e);
      }
    } else {
      // use the CacheManager via JNDI
      cacheManager =
          locateCacheManager(
              name, JNDIHelper.getJndiProperties(properties, JNDIHelper.HIBERNATE_JNDI_PREFIX));
      manageCacheManager = false;
    }
  }

  private EmbeddedCacheManager locateCacheManager(String jndiNamespace, Properties jndiProperties) {
    Context ctx = null;
    try {
      ctx = new InitialContext(jndiProperties);
      return (EmbeddedCacheManager) ctx.lookup(jndiNamespace);
    } catch (NamingException ne) {
      String msg = "Unable to retrieve CacheManager from JNDI [" + jndiNamespace + "]";
      log.unableToRetrieveCacheManagerFromJndi(jndiNamespace, ne);
      throw new SearchException(msg);
    } finally {
      if (ctx != null) {
        try {
          ctx.close();
        } catch (NamingException ne) {
          log.unableToReleaseInitialContext(ne);
        }
      }
    }
  }

  @Override
  public EmbeddedCacheManager getService() {
    return cacheManager;
  }

  @Override
  public void stop() {
    if (cacheManager != null && manageCacheManager) {
      cacheManager.stop();
    }
  }
}
Ejemplo n.º 7
0
/**
 * Provides access to some default configuration settings (eg default {@code Analyzer} or default
 * {@code Similarity}) and checks whether certain optional libraries are available.
 *
 * @author Emmanuel Bernard
 * @author Hardy Ferentschik
 */
public final class ConfigContext {

  private static final Log log = LoggerFactory.make();

  /**
   * The default token for indexing null values. See {@link
   * org.hibernate.search.annotations.Field#indexNullAs()}
   */
  private static final String DEFAULT_NULL_INDEX_TOKEN = "_null_";

  /**
   * Constant used as definition point for a global (programmatic) analyzer definition. In this case
   * no annotated element is available to be used as definition point.
   */
  private static final String PROGRAMMATIC_ANALYZER_DEFINITION = "PROGRAMMATIC_ANALYZER_DEFINITION";

  /**
   * Constant used as definition point for a global (programmatic) filter definition. In this case
   * no annotated element is available to be used as definition point.
   */
  private static final String PROGRAMMATIC_FILTER_DEFINITION = "PROGRAMMATIC_FILTER_DEFINITION";

  /**
   * Used to keep track of duplicated analyzer definitions. The key of the map is the analyzer
   * definition name and the value is a string defining the location of the definition. In most
   * cases the fully specified class name together with the annotated element name is used. See also
   * {@link #PROGRAMMATIC_ANALYZER_DEFINITION}.
   */
  private final Map<String, String> analyzerDefinitionPoints = new HashMap<String, String>();

  /**
   * Used to keep track of duplicated filter definitions. The key of the map is the filter
   * definition name and the value is a string defining the location of the definition. In most
   * cases the fully specified class name together with the annotated element name is used.
   */
  private final Map<String, String> filterDefinitionPoints = new HashMap<String, String>();

  /**
   * Map of discovered analyzer definitions. The key of the map is the analyzer def name and the
   * value is the {@code AnalyzerDef} annotation.
   */
  private final Map<String, AnalyzerDef> analyzerDefs = new HashMap<String, AnalyzerDef>();

  /**
   * Map of discovered filter definitions. The key of the map is the filter def name and the value
   * is the {@code FilterDef} instance.
   */
  private final Map<String, FilterDef> filterDefs = new HashMap<String, FilterDef>();

  private final List<LuceneAnalyzerReference> lazyLuceneAnalyzerReferences =
      new ArrayList<LuceneAnalyzerReference>();
  private final List<RemoteAnalyzerReference> lazyRemoteAnalyzerReferences =
      new ArrayList<RemoteAnalyzerReference>();
  private final AnalyzerReference defaultLuceneAnalyzerReference;
  private final boolean jpaPresent;
  private final Version luceneMatchVersion;
  private final String nullToken;
  private final boolean implicitProvidedId;
  private final SearchMapping searchMapping;
  private final ServiceManager serviceManager;

  public ConfigContext(SearchConfiguration searchConfiguration, BuildContext buildContext) {
    this(searchConfiguration, buildContext, null);
  }

  public ConfigContext(
      SearchConfiguration searchConfiguration,
      BuildContext buildContext,
      SearchMapping searchMapping) {
    this.serviceManager = buildContext.getServiceManager();
    this.luceneMatchVersion = getLuceneMatchVersion(searchConfiguration);
    this.defaultLuceneAnalyzerReference = initDefaultLuceneAnalyzerReference(searchConfiguration);
    this.jpaPresent = isPresent("javax.persistence.Id");
    this.nullToken = initNullToken(searchConfiguration);
    this.implicitProvidedId = searchConfiguration.isIdProvidedImplicit();
    this.searchMapping = searchMapping;
  }

  public ServiceManager getServiceManager() {
    return serviceManager;
  }

  /**
   * Add an analyzer definition which was defined as annotation.
   *
   * @param analyzerDef the analyzer definition annotation
   * @param annotatedElement the annotated element it was defined on
   */
  public void addAnalyzerDef(AnalyzerDef analyzerDef, XAnnotatedElement annotatedElement) {
    if (analyzerDef == null) {
      return;
    }
    addAnalyzerDef(analyzerDef, buildAnnotationDefinitionPoint(annotatedElement));
  }

  /**
   * Add a full-filter definition which was defined as annotation
   *
   * @param filterDef the filter definition annotation
   * @param annotatedElement the annotated element it was defined on
   */
  public void addFullTextFilterDef(
      FullTextFilterDef filterDef, XAnnotatedElement annotatedElement) {
    if (filterDef == null) {
      return;
    }
    addFullTextFilterDef(filterDef, buildAnnotationDefinitionPoint(annotatedElement));
  }

  public void addGlobalAnalyzerDef(AnalyzerDef analyzerDef) {
    addAnalyzerDef(analyzerDef, PROGRAMMATIC_ANALYZER_DEFINITION);
  }

  public void addGlobalFullTextFilterDef(FullTextFilterDef filterDef) {
    addFullTextFilterDef(filterDef, PROGRAMMATIC_FILTER_DEFINITION);
  }

  private void addAnalyzerDef(AnalyzerDef analyzerDef, String annotationDefinitionPoint) {
    String analyzerDefinitionName = analyzerDef.name();

    if (analyzerDefinitionPoints.containsKey(analyzerDefinitionName)) {
      if (!analyzerDefinitionPoints.get(analyzerDefinitionName).equals(annotationDefinitionPoint)) {
        throw new SearchException(
            "Multiple analyzer definitions with the same name: " + analyzerDef.name());
      }
    } else {
      analyzerDefs.put(analyzerDefinitionName, analyzerDef);
      analyzerDefinitionPoints.put(analyzerDefinitionName, annotationDefinitionPoint);
    }
  }

  public AnalyzerReference buildLazyLuceneAnalyzerReference(String name) {
    final LuceneAnalyzerReference reference =
        new LuceneAnalyzerReference(new LazyLuceneAnalyzer(name));
    lazyLuceneAnalyzerReferences.add(reference);
    return reference;
  }

  public AnalyzerReference buildRemoteAnalyzerReference(String name) {
    final RemoteAnalyzerReference reference =
        new RemoteAnalyzerReference(new LazyRemoteAnalyzer(name));
    lazyRemoteAnalyzerReferences.add(reference);
    return reference;
  }

  /**
   * Initializes the default Lucene analyzer reference to use by reading the analyzer class from the
   * configuration and instantiating it.
   *
   * @param cfg The current configuration.
   * @return The default Lucene analyzer reference to use for tokenization.
   */
  @SuppressWarnings("unchecked")
  private AnalyzerReference initDefaultLuceneAnalyzerReference(SearchConfiguration cfg) {
    Class<? extends Analyzer> analyzerClass;
    String analyzerClassName = cfg.getProperty(Environment.ANALYZER_CLASS);
    if (analyzerClassName != null) {
      try {
        analyzerClass = ClassLoaderHelper.classForName(analyzerClassName, serviceManager);
      } catch (Exception e) {
        return buildLazyLuceneAnalyzerReference(analyzerClassName);
      }
    } else {
      analyzerClass = StandardAnalyzer.class;
    }
    Analyzer analyzer =
        ClassLoaderHelper.analyzerInstanceFromClass(analyzerClass, luceneMatchVersion);
    AnalyzerReference reference = new LuceneAnalyzerReference(analyzer);
    return reference;
  }

  private String initNullToken(SearchConfiguration cfg) {
    String defaultNullIndexToken = cfg.getProperty(Environment.DEFAULT_NULL_TOKEN);
    if (StringHelper.isEmpty(defaultNullIndexToken)) {
      defaultNullIndexToken = DEFAULT_NULL_INDEX_TOKEN;
    }
    return defaultNullIndexToken;
  }

  public String getDefaultNullToken() {
    return nullToken;
  }

  public AnalyzerReference getDefaultLuceneAnalyzerReference() {
    return defaultLuceneAnalyzerReference;
  }

  public Version getLuceneMatchVersion() {
    return luceneMatchVersion;
  }

  private void addFullTextFilterDef(FullTextFilterDef filterDef, String filterDefinitionPoint) {
    String filterDefinitionName = filterDef.name();

    if (filterDefinitionPoints.containsKey(filterDefinitionName)) {
      if (!filterDefinitionPoints.get(filterDefinitionName).equals(filterDefinitionPoint)) {
        throw new SearchException(
            "Multiple filter definitions with the same name: " + filterDef.name());
      }
    } else {
      filterDefinitionPoints.put(filterDefinitionName, filterDefinitionPoint);
      addFilterDef(filterDef);
    }
  }

  private void addFilterDef(FullTextFilterDef defAnn) {
    FilterDef filterDef = new FilterDef(defAnn);
    if (filterDef.getImpl().equals(ShardSensitiveOnlyFilter.class)) {
      // this is a placeholder don't process regularly
      filterDefs.put(defAnn.name(), filterDef);
      return;
    }
    try {
      filterDef.getImpl().newInstance();
    } catch (IllegalAccessException e) {
      throw new SearchException(
          "Unable to create Filter class: " + filterDef.getImpl().getName(), e);
    } catch (InstantiationException e) {
      throw new SearchException(
          "Unable to create Filter class: " + filterDef.getImpl().getName(), e);
    }
    for (Method method : filterDef.getImpl().getMethods()) {
      if (method.isAnnotationPresent(Factory.class)) {
        if (filterDef.getFactoryMethod() != null) {
          throw new SearchException(
              "Multiple @Factory methods found"
                  + defAnn.name()
                  + ": "
                  + filterDef.getImpl().getName()
                  + "."
                  + method.getName());
        }
        ReflectionHelper.setAccessible(method);
        filterDef.setFactoryMethod(method);
      }
      if (method.isAnnotationPresent(Key.class)) {
        if (filterDef.getKeyMethod() != null) {
          throw new SearchException(
              "Multiple @Key methods found"
                  + defAnn.name()
                  + ": "
                  + filterDef.getImpl().getName()
                  + "."
                  + method.getName());
        }
        ReflectionHelper.setAccessible(method);
        filterDef.setKeyMethod(method);
      }

      String name = method.getName();
      if (name.startsWith("set") && method.getParameterTypes().length == 1) {
        filterDef.addSetter(Introspector.decapitalize(name.substring(3)), method);
      }
    }
    filterDefs.put(defAnn.name(), filterDef);
  }

  public Map<String, AnalyzerReference> initLazyAnalyzerReferences(
      IndexManagerHolder indexesFactory) {
    final Map<String, AnalyzerReference> initializedAnalyzers = new HashMap<>(analyzerDefs.size());

    for (LuceneAnalyzerReference lazyAnalyzerReference : lazyLuceneAnalyzerReferences) {
      initLazyLuceneAnalyzer(initializedAnalyzers, lazyAnalyzerReference);
    }

    for (RemoteAnalyzerReference remoteAnalyzerReference : lazyRemoteAnalyzerReferences) {
      initLazyRemoteAnalyzer(initializedAnalyzers, remoteAnalyzerReference, indexesFactory);
    }

    // init default remote analyzers
    initLazyRemoteAnalyzer(initializedAnalyzers, RemoteAnalyzerReference.DEFAULT, indexesFactory);
    initLazyRemoteAnalyzer(
        initializedAnalyzers, RemoteAnalyzerReference.PASS_THROUGH, indexesFactory);

    // initialize the remaining definitions
    for (Map.Entry<String, AnalyzerDef> entry : analyzerDefs.entrySet()) {
      if (!initializedAnalyzers.containsKey(entry.getKey())) {
        final Analyzer analyzer = buildAnalyzer(entry.getValue());
        final AnalyzerReference reference = new LuceneAnalyzerReference(analyzer);
        initializedAnalyzers.put(entry.getKey(), reference);
      }
    }
    return Collections.unmodifiableMap(initializedAnalyzers);
  }

  private void initLazyRemoteAnalyzer(
      Map<String, AnalyzerReference> initializedAnalyzers,
      RemoteAnalyzerReference lazyRemoteAnalyzerReference,
      IndexManagerHolder indexesFactory) {
    LazyRemoteAnalyzer lazyAnalyzer =
        (LazyRemoteAnalyzer) lazyRemoteAnalyzerReference.getAnalyzer();

    if (initializedAnalyzers.containsKey(lazyAnalyzer.getName())) {
      AnalyzerReference analyzerReference = initializedAnalyzers.get(lazyAnalyzer.getName());
      if (!(analyzerReference instanceof RemoteAnalyzerReference)) {
        throw log.remoteAnalyzerAlreadyDefinedAsLuceneAnalyzer(lazyAnalyzer.getName());
      }
      lazyAnalyzer.setDelegate(((RemoteAnalyzerReference) analyzerReference).getAnalyzer());
      return;
    }

    Collection<IndexManagerType> indexManagerTypes = indexesFactory.getIndexManagerTypes();
    for (IndexManagerType indexManagerType : indexManagerTypes) {
      if (indexManagerType instanceof RemoteAnalyzerProvider) {
        final RemoteAnalyzer remoteAnalyzer =
            ((RemoteAnalyzerProvider) indexManagerType).getRemoteAnalyzer(lazyAnalyzer.getName());
        lazyAnalyzer.setDelegate(remoteAnalyzer);
        initializedAnalyzers.put(
            lazyAnalyzer.getName(), new RemoteAnalyzerReference(remoteAnalyzer));
        break;
      }
    }
  }

  private void initLazyLuceneAnalyzer(
      Map<String, AnalyzerReference> initializedAnalyzers,
      LuceneAnalyzerReference lazyLuceneAnalyzerReference) {
    LazyLuceneAnalyzer lazyAnalyzer =
        (LazyLuceneAnalyzer) lazyLuceneAnalyzerReference.getAnalyzer();

    if (initializedAnalyzers.containsKey(lazyAnalyzer.getName())) {
      lazyAnalyzer.setDelegate(
          ((LuceneAnalyzerReference) initializedAnalyzers.get(lazyAnalyzer.getName()))
              .getAnalyzer());
      return;
    }

    if (!analyzerDefs.containsKey(lazyAnalyzer.getName())) {
      // Does not have a definition and it's not a remote analyzer
      throw new SearchException(
          "Analyzer found with an unknown definition: " + lazyAnalyzer.getName());
    }
    Analyzer analyzer = buildAnalyzer(analyzerDefs.get(lazyAnalyzer.getName()));
    lazyAnalyzer.setDelegate(analyzer);
    initializedAnalyzers.put(lazyAnalyzer.getName(), new LuceneAnalyzerReference(analyzer));
  }

  public Map<String, FilterDef> initFilters() {
    return Collections.unmodifiableMap(filterDefs);
  }

  private Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
    try {
      return AnalyzerBuilder.buildAnalyzer(analyzerDef, luceneMatchVersion, serviceManager);
    } catch (IOException e) {
      throw new SearchException("Could not initialize Analyzer definition " + analyzerDef, e);
    }
  }

  public boolean isJpaPresent() {
    return jpaPresent;
  }

  private boolean isPresent(String className) {
    try {
      ClassLoaderHelper.classForName(className, serviceManager);
      return true;
    } catch (Exception e) {
      return false;
    }
  }

  private Version getLuceneMatchVersion(SearchConfiguration cfg) {
    final Version version;
    String tmp = cfg.getProperty(Environment.LUCENE_MATCH_VERSION);
    if (StringHelper.isEmpty(tmp)) {
      log.recommendConfiguringLuceneVersion();
      version = Environment.DEFAULT_LUCENE_MATCH_VERSION;
    } else {
      try {
        version = Version.parseLeniently(tmp);
        if (log.isDebugEnabled()) {
          log.debug("Setting Lucene compatibility to Version " + version);
        }
      } catch (IllegalArgumentException e) {
        throw log.illegalLuceneVersionFormat(tmp, e.getMessage());
      } catch (ParseException e) {
        throw log.illegalLuceneVersionFormat(tmp, e.getMessage());
      }
    }
    return version;
  }

  /**
   * @param annotatedElement an annotated element
   * @return a string which identifies the location/point the annotation was placed on. Something of
   *     the form package.[[className].[field|member]]
   */
  private String buildAnnotationDefinitionPoint(XAnnotatedElement annotatedElement) {
    if (annotatedElement instanceof XClass) {
      return ((XClass) annotatedElement).getName();
    } else if (annotatedElement instanceof XMember) {
      XMember member = (XMember) annotatedElement;
      return member.getType().getName() + '.' + member.getName();
    } else if (annotatedElement instanceof XPackage) {
      return ((XPackage) annotatedElement).getName();
    } else {
      throw new SearchException("Unknown XAnnotatedElement: " + annotatedElement);
    }
  }

  /** @return true if we have to assume entities are annotated with @ProvidedId implicitly */
  public boolean isProvidedIdImplicit() {
    return implicitProvidedId;
  }

  /**
   * Returns class bridge instances configured via the programmatic API, if any. The returned map's
   * values are {@code @ClassBridge} annotations representing the corresponding analyzer etc.
   * configuration.
   *
   * @param type the type for which to return the configured class bridge instances
   * @return a map with class bridge instances and their configuration; May be empty but never
   *     {@code null}
   */
  public Map<FieldBridge, ClassBridge> getClassBridgeInstances(Class<?> type) {
    Map<FieldBridge, ClassBridge> classBridgeInstances = null;

    if (searchMapping != null) {
      EntityDescriptor entityDescriptor = searchMapping.getEntityDescriptor(type);
      if (entityDescriptor != null) {
        classBridgeInstances = entityDescriptor.getClassBridgeConfigurations();
      }
    }

    return classBridgeInstances != null
        ? classBridgeInstances
        : Collections.<FieldBridge, ClassBridge>emptyMap();
  }
}
/**
 * A helper classes dealing with the processing of annotation. It is there to share some annotation
 * processing between the document builder and other metadata classes, eg {@code FieldMetadata}. In
 * the long run this class might become obsolete.
 *
 * @author Hardy Ferentschik
 */
public final class AnnotationProcessingHelper {

  private static final Log log = LoggerFactory.make();

  private AnnotationProcessingHelper() {
    // not allowed
  }

  /**
   * Using the passed field (or class bridge) settings determines the Lucene {@link
   * org.apache.lucene.document.Field.Index}
   *
   * @param index is the field indexed or not
   * @param analyze should the field be analyzed
   * @param norms are norms to be added to index
   * @return Returns the Lucene {@link org.apache.lucene.document.Field.Index} value for a given
   *     field
   */
  public static Field.Index getIndex(Index index, Analyze analyze, Norms norms) {
    if (Index.YES.equals(index)) {
      if (Analyze.YES.equals(analyze)) {
        if (Norms.YES.equals(norms)) {
          return Field.Index.ANALYZED;
        } else {
          return Field.Index.ANALYZED_NO_NORMS;
        }
      } else {
        if (Norms.YES.equals(norms)) {
          return Field.Index.NOT_ANALYZED;
        } else {
          return Field.Index.NOT_ANALYZED_NO_NORMS;
        }
      }
    } else {
      return Field.Index.NO;
    }
  }

  public static Float getBoost(XProperty member, Annotation fieldAnn) {
    float computedBoost = 1.0f;
    Boost boostAnn = member.getAnnotation(Boost.class);
    if (boostAnn != null) {
      computedBoost = boostAnn.value();
    }
    if (fieldAnn != null) {
      float boost;
      if (fieldAnn instanceof org.hibernate.search.annotations.Field) {
        boost = ((org.hibernate.search.annotations.Field) fieldAnn).boost().value();
      } else if (fieldAnn instanceof Spatial) {
        boost = ((Spatial) fieldAnn).boost().value();
      } else {
        raiseAssertionOnIncorrectAnnotation(fieldAnn);
        boost = 0; // never reached
      }
      computedBoost *= boost;
    }
    return computedBoost;
  }

  public static BoostStrategy getDynamicBoost(final XAnnotatedElement element) {
    if (element == null) {
      return DefaultBoostStrategy.INSTANCE;
    }
    DynamicBoost boostAnnotation = element.getAnnotation(DynamicBoost.class);
    if (boostAnnotation == null) {
      return DefaultBoostStrategy.INSTANCE;
    }
    Class<? extends BoostStrategy> boostStrategyClass = boostAnnotation.impl();
    return ClassLoaderHelper.instanceFromClass(
        BoostStrategy.class, boostStrategyClass, "boost strategy");
  }

  public static Field.TermVector getTermVector(TermVector vector) {
    switch (vector) {
      case NO:
        return Field.TermVector.NO;
      case YES:
        return Field.TermVector.YES;
      case WITH_OFFSETS:
        return Field.TermVector.WITH_OFFSETS;
      case WITH_POSITIONS:
        return Field.TermVector.WITH_POSITIONS;
      case WITH_POSITION_OFFSETS:
        return Field.TermVector.WITH_POSITIONS_OFFSETS;
      default:
        throw new AssertionFailure("Unexpected TermVector: " + vector);
    }
  }

  public static AnalyzerReference getAnalyzerReference(
      org.hibernate.search.annotations.Analyzer analyzerAnn,
      ConfigContext configContext,
      boolean isRemote) {
    Class<?> analyzerClass = analyzerAnn == null ? void.class : analyzerAnn.impl();
    if (isRemote) {
      return remoteAnalyzerReferenceFromDefinition(analyzerAnn, configContext);
    } else {
      if (analyzerClass == void.class) {
        return luceneAnalyzerReferenceFromDefinition(analyzerAnn, configContext);
      } else {
        return luceneAnalyzerReferenceFromClass(configContext, analyzerClass);
      }
    }
  }

  private static AnalyzerReference remoteAnalyzerReferenceFromDefinition(
      org.hibernate.search.annotations.Analyzer analyzerAnn, ConfigContext configContext) {
    String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
    if (StringHelper.isEmpty(definition)) {
      return null;
    }
    return configContext.buildRemoteAnalyzerReference(definition);
  }

  private static AnalyzerReference luceneAnalyzerReferenceFromDefinition(
      org.hibernate.search.annotations.Analyzer analyzerAnn, ConfigContext configContext) {
    String definition = analyzerAnn == null ? "" : analyzerAnn.definition();
    if (StringHelper.isEmpty(definition)) {
      return null;
    }
    return configContext.buildLazyLuceneAnalyzerReference(definition);
  }

  private static AnalyzerReference luceneAnalyzerReferenceFromClass(
      ConfigContext configContext, Class<?> analyzerClass) {
    try {
      // For now only local analyzer can be created from a class
      // this should be easy to extend to remote analyzer using a common interface/super-class
      Analyzer analyzer =
          ClassLoaderHelper.analyzerInstanceFromClass(
              analyzerClass, configContext.getLuceneMatchVersion());
      AnalyzerReference reference = new LuceneAnalyzerReference(analyzer);
      return reference;
    } catch (ClassCastException e) {
      throw new SearchException(
          "Lucene analyzer does not extend "
              + Analyzer.class.getName()
              + ": "
              + analyzerClass.getName(),
          e);
    } catch (Exception e) {
      throw new SearchException(
          "Failed to instantiate lucene analyzer with type " + analyzerClass.getName(), e);
    }
  }

  public static Integer getPrecisionStep(NumericField numericFieldAnn) {
    return numericFieldAnn == null
        ? NumericField.PRECISION_STEP_DEFAULT
        : numericFieldAnn.precisionStep();
  }

  public static String getFieldName(Annotation fieldAnn) {
    final String fieldName;
    if (fieldAnn instanceof org.hibernate.search.annotations.Field) {
      fieldName = ((org.hibernate.search.annotations.Field) fieldAnn).name();
    } else if (fieldAnn instanceof Spatial) {
      fieldName = ((Spatial) fieldAnn).name();
    } else if (fieldAnn instanceof SortableField) {
      fieldName = ((SortableField) fieldAnn).forField();
    } else if (fieldAnn instanceof NumericField) {
      fieldName = ((NumericField) fieldAnn).forField();
    } else {
      return raiseAssertionOnIncorrectAnnotation(fieldAnn);
    }
    return fieldName;
  }

  private static String raiseAssertionOnIncorrectAnnotation(Annotation fieldAnn) {
    throw new AssertionFailure(
        "Cannot process instances other than @Field, @Spatial and @SortableField. Found: "
            + fieldAnn.getClass());
  }
}
/**
 * A very simple implementation of {@code MassIndexerProgressMonitor} which uses the logger at INFO
 * level to output indexing speed statistics.
 *
 * @author Sanne Grinovero
 */
public class SimpleIndexingProgressMonitor implements MassIndexerProgressMonitor {

  private static final Log log = LoggerFactory.make();
  private final AtomicLong documentsDoneCounter = new AtomicLong();
  private final AtomicLong totalCounter = new AtomicLong();
  private volatile long startTime;
  private final int logAfterNumberOfDocuments;

  /** Logs progress of indexing job every 50 documents written. */
  public SimpleIndexingProgressMonitor() {
    this(50);
  }

  /**
   * Logs progress of indexing job every {@code logAfterNumberOfDocuments} documents written.
   *
   * @param logAfterNumberOfDocuments log each time the specified number of documents has been added
   */
  public SimpleIndexingProgressMonitor(int logAfterNumberOfDocuments) {
    this.logAfterNumberOfDocuments = logAfterNumberOfDocuments;
  }

  @Override
  public void entitiesLoaded(int size) {
    // not used
  }

  @Override
  public void documentsAdded(long increment) {
    long current = documentsDoneCounter.addAndGet(increment);
    if (current == increment) {
      startTime = System.nanoTime();
    }
    if (current % getStatusMessagePeriod() == 0) {
      printStatusMessage(startTime, totalCounter.get(), current);
    }
  }

  @Override
  public void documentsBuilt(int number) {
    // not used
  }

  @Override
  public void addToTotalCount(long count) {
    totalCounter.addAndGet(count);
    log.indexingEntities(count);
  }

  @Override
  public void indexingCompleted() {
    log.indexingEntitiesCompleted(totalCounter.get());
  }

  protected int getStatusMessagePeriod() {
    return logAfterNumberOfDocuments;
  }

  protected void printStatusMessage(long startTime, long totalTodoCount, long doneCount) {
    long elapsedMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
    log.indexingDocumentsCompleted(doneCount, elapsedMs);
    float estimateSpeed = doneCount * 1000f / elapsedMs;
    float estimatePercentileComplete = doneCount * 100f / totalTodoCount;
    log.indexingSpeed(estimateSpeed, estimatePercentileComplete);
  }
}
/**
 * Commit policy that will commit at a regular intervals defined by configuration or immediately on
 * explicit flush requests
 *
 * @author gustavonalle
 */
public final class ScheduledCommitPolicy extends AbstractCommitPolicy {

  public static final int DEFAULT_DELAY_MS = 1000;
  private static final Log log = LoggerFactory.make();

  private volatile ScheduledExecutorService scheduledExecutorService;
  private final ErrorHandler errorHandler;
  private final int delay;
  private final String indexName;
  private final AtomicBoolean running = new AtomicBoolean(false);

  public ScheduledCommitPolicy(
      IndexWriterHolder indexWriterHolder, String indexName, int delay, ErrorHandler errorHandler) {
    super(indexWriterHolder);
    this.indexName = indexName;
    this.delay = delay;
    this.errorHandler = errorHandler;
  }

  public int getDelay() {
    return delay;
  }

  @Override
  public void onChangeSetApplied(boolean someFailureHappened, boolean streaming) {
    if (running.get() == false) {
      startScheduledExecutor();
    }
    if (someFailureHappened) {
      indexWriterHolder.forceLockRelease();
    }
  }

  /** Exposed as public method for tests only */
  public synchronized ScheduledExecutorService getScheduledExecutorService() {
    if (scheduledExecutorService == null) {
      scheduledExecutorService =
          Executors.newScheduledThreadPool("Commit Scheduler for index " + indexName);
    }
    return scheduledExecutorService;
  }

  @Override
  public void onFlush() {
    indexWriterHolder.commitIndexWriter();
  }

  @Override
  public void onClose() {
    if (scheduledExecutorService != null) {
      stopScheduledExecutor();
    }
  }

  private synchronized void stopScheduledExecutor() {
    if (scheduledExecutorService == null) {
      return;
    }
    try {
      scheduledExecutorService.shutdown();
      scheduledExecutorService.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
      running.set(false);
      scheduledExecutorService = null;
    } catch (InterruptedException e) {
      log.timedOutWaitingShutdown(indexName);
    }
  }

  private synchronized void startScheduledExecutor() {
    if (running.get()) {
      return;
    }
    getScheduledExecutorService()
        .scheduleWithFixedDelay(new CommitTask(), delay, delay, TimeUnit.MILLISECONDS);
    running.set(true);
  }

  private final class CommitTask implements Runnable {

    @Override
    public void run() {
      // This is technically running in a race condition with a possible shutdown
      // (indexwriter getting closed), which would cause an AlreadyClosedException exception,
      // but gets swallowed as it's running in the service thread (which is also shutting down).
      try {
        indexWriterHolder.commitIndexWriter();
      } catch (Exception e) {
        errorHandler.handleException(
            "Error caught in background thread of ScheduledCommitPolicy", e);
      }
    }
  }
}
/**
 * Queue works per transaction. If out of transaction, the work is executed right away
 *
 * <p>When <code>hibernate.search.worker.type</code> is set to <code>async</code> the work is done
 * in a separate thread (threads are pooled)
 *
 * @author Emmanuel Bernard
 */
public class PerTransactionWorker implements Worker {

  // note: there is only one Worker instance, reused concurrently for all sessions.

  private static final Log log = LoggerFactory.make();

  // this is being used from different threads, but doesn't need a
  // synchronized map since for a given transaction, we have not concurrent access
  protected final ConcurrentMap<Object, WorkQueueSynchronization> synchronizationPerTransaction =
      Maps.createIdentityWeakKeyConcurrentMap(64, 32);
  private QueueingProcessor queueingProcessor;
  private ExtendedSearchIntegrator factory;
  private InstanceInitializer instanceInitializer;

  private boolean transactionExpected;
  private boolean enlistInTransaction;

  @Override
  public void performWork(Work work, TransactionContext transactionContext) {
    final Class<?> entityType = instanceInitializer.getClassFromWork(work);
    EntityIndexBinding indexBindingForEntity = factory.getIndexBinding(entityType);
    if (indexBindingForEntity == null
        && factory.getDocumentBuilderContainedEntity(entityType) == null) {
      throw new SearchException(
          "Unable to perform work. Entity Class is not @Indexed nor hosts @ContainedIn: "
              + entityType);
    }
    work = interceptWork(indexBindingForEntity, work);
    if (work == null) {
      // nothing to do
      return;
    }
    if (transactionContext.isTransactionInProgress()) {
      final Object transactionIdentifier = transactionContext.getTransactionIdentifier();
      WorkQueueSynchronization txSync = synchronizationPerTransaction.get(transactionIdentifier);
      if (txSync == null || txSync.isConsumed()) {
        txSync = createTransactionWorkQueueSynchronization(transactionIdentifier);
        transactionContext.registerSynchronization(txSync);
        synchronizationPerTransaction.put(transactionIdentifier, txSync);
      }
      txSync.add(work);
    } else {
      if (transactionExpected) {
        // this is a workaround: isTransactionInProgress should return "true"
        // for correct configurations.
        log.pushedChangesOutOfTransaction();
      }
      WorkQueue queue = new WorkQueue(factory);
      queueingProcessor.add(work, queue);
      queueingProcessor.prepareWorks(queue);
      queueingProcessor.performWorks(queue);
    }
  }

  private WorkQueueSynchronization createTransactionWorkQueueSynchronization(
      Object transactionIdentifier) {
    if (enlistInTransaction) {
      return new InTransactionWorkQueueSynchronization(
          transactionIdentifier, queueingProcessor, synchronizationPerTransaction, factory);
    } else {
      return new PostTransactionWorkQueueSynchronization(
          transactionIdentifier, queueingProcessor, synchronizationPerTransaction, factory);
    }
  }

  private Work interceptWork(EntityIndexBinding indexBindingForEntity, Work work) {
    if (indexBindingForEntity == null) {
      return work;
    }
    EntityIndexingInterceptor interceptor = indexBindingForEntity.getEntityIndexingInterceptor();
    if (interceptor == null) {
      return work;
    }
    IndexingOverride operation;
    switch (work.getType()) {
      case ADD:
        operation = interceptor.onAdd(work.getEntity());
        break;
      case UPDATE:
        operation = interceptor.onUpdate(work.getEntity());
        break;
      case DELETE:
        operation = interceptor.onDelete(work.getEntity());
        break;
      case COLLECTION:
        operation = interceptor.onCollectionUpdate(work.getEntity());
        break;
      case PURGE:
      case PURGE_ALL:
      case INDEX:
      case DELETE_BY_QUERY:
        operation = IndexingOverride.APPLY_DEFAULT;
        break;
      default:
        throw new AssertionFailure("Unknown work type: " + work.getType());
    }
    Work result = work;
    Class<?> entityClass = work.getEntityClass();
    switch (operation) {
      case APPLY_DEFAULT:
        break;
      case SKIP:
        result = null;
        log.forceSkipIndexOperationViaInterception(entityClass, work.getType());
        break;
      case UPDATE:
        result =
            new Work(work.getTenantIdentifier(), work.getEntity(), work.getId(), WorkType.UPDATE);
        log.forceUpdateOnIndexOperationViaInterception(entityClass, work.getType());
        break;
      case REMOVE:
        // This works because other Work constructors are never used from WorkType ADD, UPDATE,
        // REMOVE, COLLECTION
        // TODO should we force isIdentifierRollback to false if the operation is not a delete?
        result =
            new Work(
                work.getTenantIdentifier(),
                work.getEntity(),
                work.getId(),
                WorkType.DELETE,
                work.isIdentifierWasRolledBack());
        log.forceRemoveOnIndexOperationViaInterception(entityClass, work.getType());
        break;
      default:
        throw new AssertionFailure("Unknown action type: " + operation);
    }
    return result;
  }

  @Override
  public void initialize(
      Properties props, WorkerBuildContext context, QueueingProcessor queueingProcessor) {
    this.queueingProcessor = queueingProcessor;
    this.factory = context.getUninitializedSearchIntegrator();
    this.transactionExpected = context.isTransactionManagerExpected();
    this.instanceInitializer = context.getInstanceInitializer();
    this.enlistInTransaction =
        ConfigurationParseHelper.getBooleanValue(
            props, Environment.WORKER_ENLIST_IN_TRANSACTION, false);
  }

  @Override
  public void close() {}

  @Override
  public void flushWorks(TransactionContext transactionContext) {
    if (transactionContext.isTransactionInProgress()) {
      Object transaction = transactionContext.getTransactionIdentifier();
      WorkQueueSynchronization txSync = synchronizationPerTransaction.get(transaction);
      if (txSync != null && !txSync.isConsumed()) {
        txSync.flushWorks();
      }
    }
  }
}
Ejemplo n.º 12
0
/**
 * Various utilities to transform Hibernate Search API into Elasticsearch JSON.
 *
 * @author Guillaume Smet
 * @author Gunnar Morling
 */
public class ToElasticsearch {

  private static final Log LOG = LoggerFactory.make(Log.class);

  private ToElasticsearch() {}

  public static void addFacetingRequest(
      JsonBuilder.Object jsonQuery, FacetingRequest facetingRequest) {
    String fieldName = facetingRequest.getFieldName();
    if (facetingRequest instanceof DiscreteFacetRequest) {
      JsonObject termsJsonQuery =
          JsonBuilder.object()
              .add(
                  "terms",
                  JsonBuilder.object()
                      .addProperty("field", fieldName)
                      .addProperty(
                          "size",
                          facetingRequest.getMaxNumberOfFacets() == -1
                              ? 0
                              : facetingRequest.getMaxNumberOfFacets())
                      .add("order", fromFacetSortOrder(facetingRequest.getSort()))
                      .addProperty(
                          "min_doc_count", facetingRequest.hasZeroCountsIncluded() ? 0 : 1))
              .build();

      if (isNested(fieldName)) {
        JsonBuilder.Object facetJsonQuery = JsonBuilder.object();
        facetJsonQuery.add(
            "nested",
            JsonBuilder.object().addProperty("path", FieldHelper.getEmbeddedFieldPath(fieldName)));
        facetJsonQuery.add(
            "aggregations",
            JsonBuilder.object().add(facetingRequest.getFacetingName(), termsJsonQuery));
        jsonQuery.add(facetingRequest.getFacetingName(), facetJsonQuery);
      } else {
        jsonQuery.add(facetingRequest.getFacetingName(), termsJsonQuery);
      }
    } else if (facetingRequest instanceof RangeFacetRequest<?>) {
      RangeFacetRequest<?> rangeFacetingRequest = (RangeFacetRequest<?>) facetingRequest;
      for (FacetRange<?> facetRange : rangeFacetingRequest.getFacetRangeList()) {
        JsonBuilder.Object comparisonFragment = JsonBuilder.object();
        if (facetRange.getMin() != null) {
          comparisonFragment.addProperty(
              facetRange.isMinIncluded() ? "gte" : "gt", facetRange.getMin());
        }
        if (facetRange.getMax() != null) {
          comparisonFragment.addProperty(
              facetRange.isMaxIncluded() ? "lte" : "lt", facetRange.getMax());
        }

        JsonObject rangeQuery =
            wrapQueryForNestedIfRequired(
                fieldName,
                JsonBuilder.object()
                    .add("range", JsonBuilder.object().add(fieldName, comparisonFragment))
                    .build());

        jsonQuery.add(
            facetingRequest.getFacetingName() + "-" + facetRange.getIdentifier(),
            JsonBuilder.object().add("filter", rangeQuery));
      }
    } else {
      throw new IllegalArgumentException(
          "Faceting request of type " + facetingRequest.getClass().getName() + " not supported");
    }
  }

  private static JsonObject fromFacetSortOrder(FacetSortOrder sortOrder) {
    JsonObject sort = new JsonObject();
    switch (sortOrder) {
      case COUNT_ASC:
        sort.addProperty("_count", "asc");
        break;
      case COUNT_DESC:
        sort.addProperty("_count", "desc");
        break;
      case FIELD_VALUE:
        sort.addProperty("_term", "asc");
        break;
      case RANGE_DEFINITION_ORDER:
        throw LOG.cannotSendRangeDefinitionOrderToElasticsearchBackend();
    }
    return sort;
  }

  public static JsonObject condition(String operator, JsonArray conditions) {
    JsonObject jsonCondition;
    if (conditions.size() == 1) {
      jsonCondition = conditions.get(0).getAsJsonObject();
    } else {
      jsonCondition =
          JsonBuilder.object().add("bool", JsonBuilder.object().add(operator, conditions)).build();
    }
    return jsonCondition;
  }

  public static JsonObject fromLuceneQuery(Query query) {
    if (query instanceof MatchAllDocsQuery) {
      return convertMatchAllDocsQuery((MatchAllDocsQuery) query);
    } else if (query instanceof TermQuery) {
      return convertTermQuery((TermQuery) query);
    } else if (query instanceof BooleanQuery) {
      return convertBooleanQuery((BooleanQuery) query);
    } else if (query instanceof TermRangeQuery) {
      return convertTermRangeQuery((TermRangeQuery) query);
    } else if (query instanceof NumericRangeQuery) {
      return convertNumericRangeQuery((NumericRangeQuery<?>) query);
    } else if (query instanceof WildcardQuery) {
      return convertWildcardQuery((WildcardQuery) query);
    } else if (query instanceof FuzzyQuery) {
      return convertFuzzyQuery((FuzzyQuery) query);
    } else if (query instanceof RemotePhraseQuery) {
      return convertRemotePhraseQuery((RemotePhraseQuery) query);
    } else if (query instanceof RemoteMatchQuery) {
      return convertRemoteMatchQuery((RemoteMatchQuery) query);
    } else if (query instanceof ConstantScoreQuery) {
      return convertConstantScoreQuery((ConstantScoreQuery) query);
    } else if (query instanceof FilteredQuery) {
      return convertFilteredQuery((FilteredQuery) query);
    } else if (query instanceof Filter) {
      return fromLuceneFilter((Filter) query);
    } else if (query instanceof PhraseQuery) {
      return convertPhraseQuery((PhraseQuery) query);
    }

    throw LOG.cannotTransformLuceneQueryIntoEsQuery(query);
  }

  public static JsonObject fromDeletionQuery(
      DocumentBuilderIndexedEntity documentBuilder, DeletionQuery deletionQuery) {
    return fromLuceneQuery(deletionQuery.toLuceneQuery(documentBuilder));
  }

  private static JsonObject convertMatchAllDocsQuery(MatchAllDocsQuery matchAllDocsQuery) {
    return JsonBuilder.object().add("match_all", new JsonObject()).build();
  }

  private static JsonObject convertBooleanQuery(BooleanQuery booleanQuery) {
    JsonArray musts = new JsonArray();
    JsonArray shoulds = new JsonArray();
    JsonArray mustNots = new JsonArray();
    JsonArray filters = new JsonArray();

    for (BooleanClause clause : booleanQuery.clauses()) {
      switch (clause.getOccur()) {
        case MUST:
          musts.add(fromLuceneQuery(clause.getQuery()));
          break;
        case FILTER:
          filters.add(fromLuceneQuery(clause.getQuery()));
          break;
        case MUST_NOT:
          mustNots.add(fromLuceneQuery(clause.getQuery()));
          break;
        case SHOULD:
          shoulds.add(fromLuceneQuery(clause.getQuery()));
          break;
      }
    }

    JsonObject clauses = new JsonObject();

    if (musts.size() > 1) {
      clauses.add("must", musts);
    } else if (musts.size() == 1) {
      clauses.add("must", musts.iterator().next());
    }

    if (shoulds.size() > 1) {
      clauses.add("should", shoulds);
    } else if (shoulds.size() == 1) {
      clauses.add("should", shoulds.iterator().next());
    }

    if (mustNots.size() > 1) {
      clauses.add("must_not", mustNots);
    } else if (mustNots.size() == 1) {
      clauses.add("must_not", mustNots.iterator().next());
    }

    if (filters.size() > 1) {
      clauses.add("filter", filters);
    } else if (filters.size() == 1) {
      clauses.add("filter", filters.iterator().next());
    }

    JsonObject bool = new JsonObject();
    bool.add("bool", clauses);
    return bool;
  }

  private static JsonObject convertTermQuery(TermQuery termQuery) {
    String field = termQuery.getTerm().field();

    JsonObject matchQuery =
        JsonBuilder.object()
            .add(
                "term",
                JsonBuilder.object()
                    .add(
                        field,
                        JsonBuilder.object()
                            .addProperty("value", termQuery.getTerm().text())
                            .addProperty("boost", termQuery.getBoost())))
            .build();

    return wrapQueryForNestedIfRequired(field, matchQuery);
  }

  private static JsonObject convertWildcardQuery(WildcardQuery query) {
    String field = query.getTerm().field();

    JsonObject wildcardQuery =
        JsonBuilder.object()
            .add(
                "wildcard",
                JsonBuilder.object()
                    .add(
                        field,
                        JsonBuilder.object()
                            .addProperty("value", query.getTerm().text())
                            .addProperty("boost", query.getBoost())))
            .build();

    return wrapQueryForNestedIfRequired(field, wildcardQuery);
  }

  private static JsonObject convertFuzzyQuery(FuzzyQuery query) {
    String field = query.getTerm().field();

    JsonObject fuzzyQuery =
        JsonBuilder.object()
            .add(
                "fuzzy",
                JsonBuilder.object()
                    .add(
                        field,
                        JsonBuilder.object()
                            .addProperty("value", query.getTerm().text())
                            .addProperty("fuzziness", query.getMaxEdits())
                            .addProperty("prefix_length", query.getPrefixLength())
                            .addProperty("boost", query.getBoost())))
            .build();

    return wrapQueryForNestedIfRequired(field, fuzzyQuery);
  }

  /**
   * This is best effort only: the PhraseQuery may contain multiple terms at the same position
   * (think synonyms) or gaps (think stopwords) and it's in this case impossible to translate it
   * into a correct ElasticsearchQuery.
   */
  private static JsonObject convertPhraseQuery(PhraseQuery query) {
    Term[] terms = query.getTerms();

    if (terms.length == 0) {
      throw LOG.cannotQueryOnEmptyPhraseQuery();
    }

    String field = terms[0].field(); // phrase queries are only supporting one field
    StringBuilder phrase = new StringBuilder();
    for (Term term : terms) {
      phrase.append(" ").append(term.text());
    }

    JsonObject phraseQuery =
        JsonBuilder.object()
            .add(
                "match_phrase",
                JsonBuilder.object()
                    .add(
                        field,
                        JsonBuilder.object()
                            .addProperty("query", phrase.toString().trim())
                            .addProperty("slop", query.getSlop())
                            .addProperty("boost", query.getBoost())))
            .build();

    return wrapQueryForNestedIfRequired(field, phraseQuery);
  }

  private static JsonObject convertRemotePhraseQuery(RemotePhraseQuery query) {
    if (StringHelper.isEmpty(query.getPhrase())) {
      throw LOG.cannotQueryOnEmptyPhraseQuery();
    }

    JsonObject phraseQuery =
        JsonBuilder.object()
            .add(
                "match_phrase",
                JsonBuilder.object()
                    .add(
                        query.getField(),
                        JsonBuilder.object()
                            .addProperty("query", query.getPhrase().trim())
                            .addProperty(
                                "analyzer",
                                query
                                    .getAnalyzerReference()
                                    .getAnalyzer()
                                    .getName(query.getField()))
                            .addProperty("slop", query.getSlop())
                            .addProperty("boost", query.getBoost())))
            .build();

    return wrapQueryForNestedIfRequired(query.getField(), phraseQuery);
  }

  private static JsonObject convertRemoteMatchQuery(RemoteMatchQuery query) {
    JsonObject matchQuery =
        JsonBuilder.object()
            .add(
                "match",
                JsonBuilder.object()
                    .add(
                        query.getField(),
                        JsonBuilder.object()
                            .addProperty("query", query.getSearchTerms())
                            .addProperty(
                                "analyzer",
                                query
                                    .getAnalyzerReference()
                                    .getAnalyzer()
                                    .getName(query.getField()))
                            .addProperty("fuzziness", query.getMaxEditDistance())
                            .addProperty("boost", query.getBoost())))
            .build();

    return wrapQueryForNestedIfRequired(query.getField(), matchQuery);
  }

  private static JsonObject convertTermRangeQuery(TermRangeQuery query) {
    JsonObject interval = new JsonObject();

    if (query.getLowerTerm() != null) {
      interval.addProperty(
          query.includesLower() ? "gte" : "gt", query.getLowerTerm().utf8ToString());
    }
    if (query.getUpperTerm() != null) {
      interval.addProperty(
          query.includesUpper() ? "lte" : "lt", query.getUpperTerm().utf8ToString());
    }
    interval.addProperty("boost", query.getBoost());

    JsonObject range =
        JsonBuilder.object()
            .add("range", JsonBuilder.object().add(query.getField(), interval))
            .build();

    return wrapQueryForNestedIfRequired(query.getField(), range);
  }

  private static JsonObject convertNumericRangeQuery(NumericRangeQuery<?> query) {
    JsonObject interval = new JsonObject();
    if (query.getMin() != null) {
      interval.addProperty(query.includesMin() ? "gte" : "gt", query.getMin());
    }
    if (query.getMax() != null) {
      interval.addProperty(query.includesMax() ? "lte" : "lt", query.getMax());
    }
    interval.addProperty("boost", query.getBoost());

    JsonObject range =
        JsonBuilder.object()
            .add("range", JsonBuilder.object().add(query.getField(), interval))
            .build();

    return wrapQueryForNestedIfRequired(query.getField(), range);
  }

  private static JsonObject convertConstantScoreQuery(ConstantScoreQuery query) {
    JsonObject constantScoreQuery =
        JsonBuilder.object()
            .add(
                "constant_score",
                JsonBuilder.object()
                    .add("filter", fromLuceneQuery(query.getQuery()))
                    .addProperty("boost", query.getBoost()))
            .build();

    return constantScoreQuery;
  }

  private static JsonObject convertFilteredQuery(FilteredQuery query) {
    JsonObject filteredQuery =
        JsonBuilder.object()
            .add(
                "filtered",
                JsonBuilder.object()
                    .add("query", fromLuceneQuery(query.getQuery()))
                    .add("filter", fromLuceneQuery(query.getFilter()))
                    .addProperty("boost", query.getBoost()))
            .build();

    return filteredQuery;
  }

  private static JsonObject convertDistanceFilter(DistanceFilter filter) {
    JsonObject distanceQuery =
        JsonBuilder.object()
            .add(
                "geo_distance",
                JsonBuilder.object()
                    .addProperty("distance", filter.getRadius() + "km")
                    .add(
                        filter.getCoordinatesField(),
                        JsonBuilder.object()
                            .addProperty("lat", filter.getCenter().getLatitude())
                            .addProperty("lon", filter.getCenter().getLongitude())))
            .build();

    distanceQuery = wrapQueryForNestedIfRequired(filter.getCoordinatesField(), distanceQuery);

    // we only implement the previous filter optimization when we use the hash method as
    // Elasticsearch
    // automatically optimize the geo_distance query with a bounding box filter so we don't need to
    // do it
    // ourselves when we use the range method.
    Filter previousFilter = filter.getPreviousFilter();
    if (previousFilter instanceof SpatialHashFilter) {
      distanceQuery =
          JsonBuilder.object()
              .add(
                  "filtered",
                  JsonBuilder.object()
                      .add("query", distanceQuery)
                      .add("filter", convertSpatialHashFilter((SpatialHashFilter) previousFilter)))
              .build();
    }

    return distanceQuery;
  }

  private static JsonObject convertSpatialHashFilter(SpatialHashFilter filter) {
    JsonArray cellsIdsJsonArray = new JsonArray();
    for (String cellId : filter.getSpatialHashCellsIds()) {
      cellsIdsJsonArray.add(cellId);
    }

    JsonObject spatialHashFilter =
        JsonBuilder.object()
            .add("terms", JsonBuilder.object().add(filter.getFieldName(), cellsIdsJsonArray))
            .build();

    return wrapQueryForNestedIfRequired(filter.getFieldName(), spatialHashFilter);
  }

  private static JsonObject wrapQueryForNestedIfRequired(String field, JsonObject query) {
    if (!isNested(field)) {
      return query;
    }
    String path = FieldHelper.getEmbeddedFieldPath(field);

    return JsonBuilder.object()
        .add("nested", JsonBuilder.object().addProperty("path", path).add("query", query))
        .build();
  }

  private static boolean isNested(String field) {
    // TODO Drive through meta-data
    //		return FieldHelper.isEmbeddedField( field );
    return false;
  }

  public static JsonObject fromLuceneFilter(Filter luceneFilter) {
    if (luceneFilter instanceof QueryWrapperFilter) {
      Query query = ((QueryWrapperFilter) luceneFilter).getQuery();
      query.setBoost(luceneFilter.getBoost() * query.getBoost());
      return fromLuceneQuery(query);
    } else if (luceneFilter instanceof DistanceFilter) {
      return convertDistanceFilter((DistanceFilter) luceneFilter);
    } else if (luceneFilter instanceof SpatialHashFilter) {
      return convertSpatialHashFilter((SpatialHashFilter) luceneFilter);
    } else if (luceneFilter instanceof CachingWrapperFilter) {
      return fromLuceneFilter(((CachingWrapperFilter) luceneFilter).getCachedFilter());
    }
    throw LOG.cannotTransformLuceneFilterIntoEsQuery(luceneFilter);
  }
}
// TODO rename copy?
public class FSMasterDirectoryProvider implements DirectoryProvider<FSDirectory> {

  private static final String CURRENT1 = "current1";
  private static final String CURRENT2 = "current2";
  // defined to have CURRENT_DIR_NAME[1] == "current"+"1":
  private static final String[] CURRENT_DIR_NAME = {null, CURRENT1, CURRENT2};

  private static final Log log = LoggerFactory.make();
  private final Timer timer = new Timer(true); // daemon thread, the copy algorithm is robust

  private volatile int current;

  // variables having visibility granted by a read of "current"
  private FSDirectory directory;
  private String indexName;
  // get rid of it after start()
  private BuildContext context;
  private long copyChunkSize;

  // variables needed between initialize and start (used by same thread: no special care needed)
  private File sourceDir;
  private File indexDir;
  private String directoryProviderName;
  private Properties properties;
  private TriggerTask task;
  private Lock directoryProviderLock;

  public void initialize(
      String directoryProviderName, Properties properties, BuildContext context) {
    this.properties = properties;
    this.directoryProviderName = directoryProviderName;
    // source guessing
    sourceDir = DirectoryProviderHelper.getSourceDirectory(directoryProviderName, properties, true);
    log.debugf("Source directory: %s", sourceDir.getPath());
    indexDir = DirectoryProviderHelper.getVerifiedIndexDir(directoryProviderName, properties, true);
    log.debugf("Index directory: %s", indexDir.getPath());
    try {
      indexName = indexDir.getCanonicalPath();
      directory = DirectoryProviderHelper.createFSIndex(indexDir, properties);
    } catch (IOException e) {
      throw new SearchException("Unable to initialize index: " + directoryProviderName, e);
    }
    copyChunkSize = DirectoryProviderHelper.getCopyBufferSize(directoryProviderName, properties);
    this.context = context;
    current = 0; // write to volatile to publish all state
  }

  public void start() {
    int currentLocal = 0;
    this.directoryProviderLock = this.context.getDirectoryProviderLock(this);
    this.context = null;
    try {
      // copy to source
      if (new File(sourceDir, CURRENT1).exists()) {
        currentLocal = 2;
      } else if (new File(sourceDir, CURRENT2).exists()) {
        currentLocal = 1;
      } else {
        log.debugf("Source directory for '%s' will be initialized", indexName);
        currentLocal = 1;
      }
      String currentString = Integer.valueOf(currentLocal).toString();
      File subDir = new File(sourceDir, currentString);
      FileHelper.synchronize(indexDir, subDir, true, copyChunkSize);
      new File(sourceDir, CURRENT1).delete();
      new File(sourceDir, CURRENT2).delete();
      // TODO small hole, no file can be found here
      new File(sourceDir, CURRENT_DIR_NAME[currentLocal]).createNewFile();
      log.debugf("Current directory: %d", currentLocal);
    } catch (IOException e) {
      throw new SearchException("Unable to initialize index: " + directoryProviderName, e);
    }
    task = new FSMasterDirectoryProvider.TriggerTask(indexDir, sourceDir);
    long period = DirectoryProviderHelper.getRefreshPeriod(properties, directoryProviderName);
    timer.scheduleAtFixedRate(task, period, period);
    this.current = currentLocal; // write to volatile to publish all state
  }

  public FSDirectory getDirectory() {
    @SuppressWarnings("unused")
    int readCurrentState =
        current; // Unneeded value, needed to ensure visibility of state protected by memory barrier
    return directory;
  }

  @Override
  public boolean equals(Object obj) {
    // this code is actually broken since the value change after initialize call
    // but from a practical POV this is fine since we only call this method
    // after initialize call
    if (obj == this) return true;
    if (obj == null || !(obj instanceof FSMasterDirectoryProvider)) return false;
    FSMasterDirectoryProvider other = (FSMasterDirectoryProvider) obj;
    // break both memory barriers by reading volatile variables:
    @SuppressWarnings("unused")
    int readCurrentState = other.current;
    readCurrentState = this.current;
    return indexName.equals(other.indexName);
  }

  @Override
  public int hashCode() {
    // this code is actually broken since the value change after initialize call
    // but from a practical POV this is fine since we only call this method
    // after initialize call
    @SuppressWarnings("unused")
    int readCurrentState =
        current; // Unneeded value, to ensure visibility of state protected by memory barrier
    int hash = 11;
    return 37 * hash + indexName.hashCode();
  }

  public void stop() {
    @SuppressWarnings("unused")
    int readCurrentState =
        current; // Another unneeded value, to ensure visibility of state protected by memory
                 // barrier
    timer.cancel();
    task.stop();
    try {
      directory.close();
    } catch (Exception e) {
      log.unableToCloseLuceneDirectory(directory.getDirectory(), e);
    }
  }

  private class TriggerTask extends TimerTask {

    private final ExecutorService executor;
    private final FSMasterDirectoryProvider.CopyDirectory copyTask;

    public TriggerTask(File source, File destination) {
      executor = Executors.newSingleThreadExecutor();
      copyTask = new FSMasterDirectoryProvider.CopyDirectory(source, destination);
    }

    public void run() {
      if (copyTask.inProgress.compareAndSet(false, true)) {
        executor.execute(copyTask);
      } else {
        log.skippingDirectorySynchronization(indexName);
      }
    }

    public void stop() {
      executor.shutdownNow();
    }
  }

  private class CopyDirectory implements Runnable {
    private final File source;
    private final File destination;
    private final AtomicBoolean inProgress = new AtomicBoolean(false);

    public CopyDirectory(File source, File destination) {
      this.source = source;
      this.destination = destination;
    }

    public void run() {
      // TODO get rid of current and use the marker file instead?
      directoryProviderLock.lock();
      try {
        long start = System.nanoTime(); // keep time after lock is acquired for correct measure
        int oldIndex = current;
        int index = oldIndex == 1 ? 2 : 1;
        File destinationFile = new File(destination, Integer.valueOf(index).toString());
        try {
          log.tracef("Copying %s into %s", source, destinationFile);
          FileHelper.synchronize(source, destinationFile, true, copyChunkSize);
          current = index;
        } catch (IOException e) {
          // don't change current
          log.unableToSynchronizeSource(indexName, e);
          return;
        }
        if (!new File(destination, CURRENT_DIR_NAME[oldIndex]).delete()) {
          log.unableToRemovePreviousMarket(indexName);
        }
        try {
          new File(destination, CURRENT_DIR_NAME[index]).createNewFile();
        } catch (IOException e) {
          log.unableToCreateCurrentMarker(indexName, e);
        }
        log.tracef(
            "Copy for %s took %d ms",
            indexName, TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
      } finally {
        directoryProviderLock.unlock();
        inProgress.set(false);
      }
    }
  }
}
/**
 * Abstract base class for the document builders.
 *
 * @author Hardy Ferentschik
 * @author Davide D'Alto
 * @author Sanne Grinovero
 */
public abstract class AbstractDocumentBuilder {
  private static final Log log = LoggerFactory.make();

  private final XClass beanXClass;
  private final Class<?> beanClass;
  private final TypeMetadata typeMetadata;
  private final InstanceInitializer instanceInitializer;

  private boolean isRoot;
  private Set<Class<?>> mappedSubclasses = new HashSet<Class<?>>();

  protected EntityState entityState;

  /**
   * Constructor.
   *
   * @param xClass The class for which to build a document builder
   * @param typeMetadata metadata for the specified class
   * @param reflectionManager Reflection manager to use for processing the annotations
   * @param optimizationBlackList keeps track of types on which we need to disable collection events
   *     optimizations
   * @param instanceInitializer a {@link org.hibernate.search.spi.InstanceInitializer} object.
   */
  public AbstractDocumentBuilder(
      XClass xClass,
      TypeMetadata typeMetadata,
      ReflectionManager reflectionManager,
      Set<XClass> optimizationBlackList,
      InstanceInitializer instanceInitializer) {
    if (xClass == null) {
      throw new AssertionFailure(
          "Unable to build a DocumentBuilderContainedEntity with a null class");
    }

    this.instanceInitializer = instanceInitializer;
    this.entityState = EntityState.CONTAINED_IN_ONLY;
    this.beanXClass = xClass;
    this.beanClass = reflectionManager.toClass(xClass);
    this.typeMetadata = typeMetadata;

    optimizationBlackList.addAll(typeMetadata.getOptimizationBlackList());
  }

  public abstract void addWorkToQueue(
      String tenantIdentifier,
      Class<?> entityClass,
      Object entity,
      Serializable id,
      boolean delete,
      boolean add,
      List<LuceneWork> queue,
      ConversionContext contextualBridge);

  /**
   * In case of an indexed entity, return the value of it's identifier: what is marked as @Id
   * or @DocumentId; in case the entity uses @ProvidedId, it's illegal to call this method.
   *
   * @param entity the instance for which to retrieve the id
   * @return the value, or null if it's not an indexed entity
   * @throws IllegalStateException when used with a @ProvidedId annotated entity
   */
  public abstract Serializable getId(Object entity);

  public TypeMetadata getTypeMetadata() {
    return typeMetadata;
  }

  public boolean isRoot() {
    return isRoot;
  }

  public Class<?> getBeanClass() {
    return beanClass;
  }

  public XClass getBeanXClass() {
    return beanXClass;
  }

  public TypeMetadata getMetadata() {
    return typeMetadata;
  }

  public ScopedAnalyzerReference getAnalyzerReference() {
    return typeMetadata.getDefaultAnalyzerReference();
  }

  public EntityState getEntityState() {
    return entityState;
  }

  public Set<Class<?>> getMappedSubclasses() {
    return mappedSubclasses;
  }

  public void postInitialize(Set<Class<?>> indexedClasses) {
    // we initialize only once because we no longer have a reference to the reflectionManager
    // in theory
    Class<?> plainClass = beanClass;
    if (entityState == EntityState.NON_INDEXABLE) {
      throw new AssertionFailure("A non indexed entity is post processed");
    }
    Set<Class<?>> tempMappedSubclasses = new HashSet<Class<?>>();
    // together with the caller this creates a o(2), but I think it's still faster than create the
    // up hierarchy for each class
    for (Class<?> currentClass : indexedClasses) {
      if (plainClass != currentClass && plainClass.isAssignableFrom(currentClass)) {
        tempMappedSubclasses.add(currentClass);
      }
    }
    this.mappedSubclasses = Collections.unmodifiableSet(tempMappedSubclasses);
    Class<?> superClass = plainClass.getSuperclass();
    this.isRoot = true;
    while (superClass != null) {
      if (indexedClasses.contains(superClass)) {
        this.isRoot = false;
        break;
      }
      superClass = superClass.getSuperclass();
    }
  }

  /**
   * If we have a work instance we have to check whether the instance to be indexed is contained in
   * any other indexed entities.
   *
   * @see #appendContainedInWorkForInstance(Object, WorkPlan, ContainedInRecursionContext, String)
   * @param instance the instance to be indexed
   * @param workPlan the current work plan
   * @param currentRecursionContext the current {@link
   *     org.hibernate.search.engine.spi.ContainedInRecursionContext} object used to check the graph
   *     traversal
   */
  public void appendContainedInWorkForInstance(
      Object instance, WorkPlan workPlan, ContainedInRecursionContext currentRecursionContext) {
    appendContainedInWorkForInstance(instance, workPlan, currentRecursionContext, null);
  }

  /**
   * If we have a work instance we have to check whether the instance to be indexed is contained in
   * any other indexed entities for a tenant.
   *
   * @param instance the instance to be indexed
   * @param workPlan the current work plan
   * @param currentRecursionContext the current {@link
   *     org.hibernate.search.engine.spi.ContainedInRecursionContext} object used to check the graph
   *     traversal
   * @param tenantIdentifier the identifier of the tenant or null, if there isn't one
   * @see #appendContainedInWorkForInstance(Object, WorkPlan, ContainedInRecursionContext)
   */
  public void appendContainedInWorkForInstance(
      Object instance,
      WorkPlan workPlan,
      ContainedInRecursionContext currentRecursionContext,
      String tenantIdentifier) {
    for (ContainedInMetadata containedInMetadata : typeMetadata.getContainedInMetadata()) {
      XMember member = containedInMetadata.getContainedInMember();
      Object unproxiedInstance = instanceInitializer.unproxy(instance);

      ContainedInRecursionContext recursionContext =
          updateContainedInRecursionContext(
              unproxiedInstance, containedInMetadata, currentRecursionContext);

      if (recursionContext.isTerminal()) {
        continue;
      }

      Object value = ReflectionHelper.getMemberValue(unproxiedInstance, member);

      if (value == null) {
        continue;
      }

      if (member.isArray()) {
        Object[] array = (Object[]) value;
        for (Object arrayValue : array) {
          processSingleContainedInInstance(
              workPlan, arrayValue, recursionContext, tenantIdentifier);
        }
      } else if (member.isCollection()) {
        Collection<?> collection = null;
        try {
          collection = getActualCollection(member, value);
          collection.size(); // load it
        } catch (Exception e) {
          if (e.getClass().getName().contains("org.hibernate.LazyInitializationException")) {
            /* A deleted entity not having its collection initialized
             * leads to a LIE because the collection is no longer attached to the session
             *
             * But that's ok as the collection update event has been processed before
             * or the fk would have been cleared and thus triggering the cleaning
             */
            collection = null;
          }
        }
        if (collection != null) {
          for (Object collectionValue : collection) {
            processSingleContainedInInstance(
                workPlan, collectionValue, recursionContext, tenantIdentifier);
          }
        }
      } else {
        processSingleContainedInInstance(workPlan, value, recursionContext, tenantIdentifier);
      }
    }
  }

  protected InstanceInitializer getInstanceInitializer() {
    return instanceInitializer;
  }

  private ContainedInRecursionContext updateContainedInRecursionContext(
      Object containedInstance,
      ContainedInMetadata containedInMetadata,
      ContainedInRecursionContext containedContext) {
    int maxDepth;
    int depth;

    // Handle @IndexedEmbedded.depth-induced limits

    Integer metadataMaxDepth = containedInMetadata.getMaxDepth();
    if (containedInstance != null && metadataMaxDepth != null) {
      maxDepth = metadataMaxDepth;
    } else {
      maxDepth = containedContext != null ? containedContext.getMaxDepth() : Integer.MAX_VALUE;
    }

    depth = containedContext != null ? containedContext.getDepth() : 0;
    if (depth < Integer.MAX_VALUE) { // Avoid integer overflow
      ++depth;
    }

    /*
     * Handle @IndexedEmbedded.includePaths-induced limits If the context for the contained element has a
     * comprehensive set of included paths, and if the @IndexedEmbedded matching the @ContainedIn we're currently
     * processing also has a comprehensive set of embedded paths, *then* we can compute the resulting set of
     * embedded fields (which is the intersection of those two sets). If this resulting set is empty, we can safely
     * stop the @ContainedIn processing: any changed field wouldn't be included in the Lucene document for
     * "containerInstance" anyway.
     */

    Set<String> comprehensivePaths;
    Set<String> metadataIncludePaths = containedInMetadata.getIncludePaths();

    /*
     * See @IndexedEmbedded.depth: it should be considered as zero if it has its default value and if includePaths
     * contains elements
     */
    if (metadataIncludePaths != null
        && !metadataIncludePaths.isEmpty()
        && metadataMaxDepth != null
        && metadataMaxDepth.equals(Integer.MAX_VALUE)) {
      String metadataPrefix = containedInMetadata.getPrefix();

      /*
       * If the contained context Filter by contained context's included paths if they are comprehensive This
       * allows to detect when a @ContainedIn is irrelevant because the matching @IndexedEmbedded would not
       * capture any property.
       */
      Set<String> containedComprehensivePaths =
          containedContext != null ? containedContext.getComprehensivePaths() : null;

      comprehensivePaths = new HashSet<>();
      for (String includedPath : metadataIncludePaths) {
        /*
         * If the contained context has a comprehensive list of included paths, use it to filter out our own
         * list
         */
        if (containedComprehensivePaths == null
            || containedComprehensivePaths.contains(includedPath)) {
          comprehensivePaths.add(metadataPrefix + includedPath);
        }
      }
    } else {
      comprehensivePaths = null;
    }

    return new ContainedInRecursionContext(maxDepth, depth, comprehensivePaths);
  }

  @Override
  public String toString() {
    return "DocumentBuilder for {" + beanClass.getName() + "}";
  }

  /**
   * A {@code XMember } instance treats a map as a collection as well in which case the map values
   * are returned as collection.
   *
   * @param member The member instance
   * @param value The value
   * @return The {@code value} cast to collection or in case of {@code value} being a map the map
   *     values as collection.
   */
  @SuppressWarnings("unchecked")
  private <T> Collection<T> getActualCollection(XMember member, Object value) {
    Collection<T> collection;
    if (Map.class.equals(member.getCollectionClass())) {
      collection = ((Map<?, T>) value).values();
    } else {
      collection = (Collection<T>) value;
    }
    return collection;
  }

  private <T> void processSingleContainedInInstance(
      WorkPlan workplan, T value, ContainedInRecursionContext depth, String tenantId) {
    workplan.recurseContainedIn(value, depth, tenantId);
  }

  /**
   * Hibernate entities might be dirty (their state has changed), but none of these changes would
   * affect the index state. This method will return {@code true} if any of changed entity
   * properties identified by their names ({@code dirtyPropertyNames}) will effect the index state.
   *
   * @param dirtyPropertyNames array of property names for the changed entity properties, {@code
   *     null} in case the changed properties cannot be specified.
   * @return {@code true} if the entity changes will effect the index state, {@code false} otherwise
   * @since 3.4
   */
  public boolean isDirty(String[] dirtyPropertyNames) {
    if (dirtyPropertyNames == null || dirtyPropertyNames.length == 0) {
      return true; // it appears some collection work has no oldState -> reindex
    }
    if (!stateInspectionOptimizationsEnabled()) {
      return true;
    }

    for (String dirtyPropertyName : dirtyPropertyNames) {
      PropertyMetadata propertyMetadata =
          typeMetadata.getPropertyMetadataForProperty(dirtyPropertyName);
      if (propertyMetadata != null) {
        // if there is a property metadata it means that there is at least one @Field.
        // Fields are either indexed or stored, so we need to re-index
        return true;
      }

      // consider IndexedEmbedded:
      for (EmbeddedTypeMetadata embeddedTypeMetadata : typeMetadata.getEmbeddedTypeMetadata()) {
        String name = embeddedTypeMetadata.getEmbeddedFieldName();
        if (name.equals(dirtyPropertyName)) {
          return true;
        }
      }
    }
    return false;
  }

  /**
   * To be removed, see org.hibernate.search.engine.DocumentBuilderIndexedEntity.isIdMatchingJpaId()
   *
   * @return true if a providedId needs to be provided for indexing
   */
  public boolean requiresProvidedId() {
    return false;
  }

  /**
   * To be removed, see org.hibernate.search.engine.DocumentBuilderIndexedEntity.isIdMatchingJpaId()
   *
   * @return true if @DocumentId and @Id are found on the same property
   */
  public boolean isIdMatchingJpaId() {
    return true;
  }

  /**
   * Returns {@code true} if the collection event is going to affect the index state. In this case
   * the indexing event can be ignored. {@code false} otherwise.
   *
   * @param collectionRoleName a {@link java.lang.String} object.
   * @return {@code true} if an update to the collection identified by the given role name effects
   *     the index state, {@code false} otherwise.
   */
  public boolean collectionChangeRequiresIndexUpdate(String collectionRoleName) {
    if (collectionRoleName == null) {
      // collection name will only be non null for PostCollectionUpdateEvents
      return true;
    }

    // don't check stateInspectionOptimizationsEnabled() as it might ignore depth limit:
    // it will disable optimization even if we have class bridges, but we're too deep
    // to be reachable. The evaluation of stateInspectionOptimizationsEnabled() was
    // actually stored in stateInspectionOptimizationsEnabled, but limiting to depth recursion.
    if (!typeMetadata.areStateInspectionOptimizationsEnabled()) {
      // if optimizations are not enabled we need to re-index
      return true;
    }

    return this.typeMetadata.containsCollectionRole(collectionRoleName);
  }

  /**
   * Verifies entity level preconditions to know if it's safe to skip index updates based on
   * specific field or collection updates.
   *
   * @return true if it seems safe to apply such optimizations
   */
  boolean stateInspectionOptimizationsEnabled() {
    if (!typeMetadata.areStateInspectionOptimizationsEnabled()) {
      return false;
    }
    if (typeMetadata.areClassBridgesUsed()) {
      log.tracef(
          "State inspection optimization disabled as entity %s uses class bridges",
          this.beanClass.getName());
      return false; // can't know what a class bridge is going to look at -> reindex // TODO nice
      // new feature to have?
    }
    BoostStrategy boostStrategy = typeMetadata.getDynamicBoost();
    if (boostStrategy != null && !(boostStrategy instanceof DefaultBoostStrategy)) {
      log.tracef(
          "State inspection optimization disabled as DynamicBoost is enabled on entity %s",
          this.beanClass.getName());
      return false; // as with class bridge: might be affected by any field // TODO nice new feature
      // to have?
    }
    return true;
  }

  /**
   * Makes sure isCollectionRoleExcluded will always return false, so that collection update events
   * are always processed.
   *
   * @see #collectionChangeRequiresIndexUpdate(String)
   */
  public void forceStateInspectionOptimizationsDisabled() {
    typeMetadata.disableStateInspectionOptimizations();
  }

  /** Closes any resource */
  public void close() {
    typeMetadata.getDefaultAnalyzerReference().close();
  }
}
/** @author Emmanuel Bernard */
public class ProgrammaticMappingTest extends SearchTestBase {

  private static final Log log = LoggerFactory.make();

  @Test
  public void testMapping() throws Exception {
    Address address = new Address();
    address.setStreet1("3340 Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("" + address.getAddressId());
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("documentId does not work properly", 1, query.getResultSize());

    luceneQuery = parser.parse("street1:peachtree");
    query = s.createFullTextQuery(luceneQuery).setProjection("idx_street2", FullTextQuery.THIS);
    assertEquals("Not properly indexed", 1, query.getResultSize());
    Object[] firstResult = (Object[]) query.list().get(0);
    assertEquals("@Field.store not respected", "JBoss", firstResult[0]);

    // Verify that AddressClassBridge was applied as well:
    luceneQuery = parser.parse("AddressClassBridge:Applied\\!");
    assertEquals(1, s.createFullTextQuery(luceneQuery).getResultSize());

    s.delete(firstResult[1]);
    tx.commit();
    s.close();
  }

  @Test
  public void testNumeric() throws Exception {
    assertEquals(
        NumericFieldBridge.SHORT_FIELD_BRIDGE,
        getUnwrappedBridge(Item.class, "price", NumericFieldBridge.class));

    assertNotNull(getUnwrappedBridge(Item.class, "price_string", ShortBridge.class));
  }

  private Object getUnwrappedBridge(Class<?> clazz, String string, Class<?> expectedBridgeClass) {
    FieldBridge bridge =
        getExtendedSearchIntegrator()
            .getIndexBinding(clazz)
            .getDocumentBuilder()
            .getMetadata()
            .getDocumentFieldMetadataFor(string)
            .getFieldBridge();
    return unwrapBridge(bridge, expectedBridgeClass);
  }

  private Object unwrapBridge(Object bridge, Class<?> expectedBridgeClass) {
    if (bridge instanceof BridgeAdaptor) {
      return ((BridgeAdaptor) bridge).unwrap(expectedBridgeClass);
    } else {
      return bridge;
    }
  }

  @Test
  public void testSortableField() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    Item item1 = new Item();
    item1.setId(3);
    item1.setPrice((short) 3454);
    s.persist(item1);

    Item item2 = new Item();
    item2.setId(2);
    item2.setPrice((short) 3354);
    s.persist(item2);

    Item item3 = new Item();
    item3.setId(1);
    item3.setPrice((short) 3554);
    s.persist(item3);

    tx.commit();
    s.clear();

    tx = s.beginTransaction();

    Query q =
        s.getSearchFactory().buildQueryBuilder().forEntity(Item.class).get().all().createQuery();
    FullTextQuery query = s.createFullTextQuery(q, Item.class);
    query.setSort(new Sort(new SortField("price", SortField.Type.INT)));

    List<?> results = query.list();
    assertThat(results)
        .onProperty("price")
        .describedAs("Sortable field via programmatic config")
        .containsExactly((short) 3354, (short) 3454, (short) 3554);

    query.setSort(new Sort(new SortField("id", SortField.Type.STRING)));

    results = query.list();
    assertThat(results)
        .onProperty("id")
        .describedAs("Sortable field via programmatic config")
        .containsExactly(1, 2, 3);

    s.delete(results.get(0));
    s.delete(results.get(1));
    s.delete(results.get(2));
    tx.commit();
    s.close();
  }

  @Test
  public void testAnalyzerDef() throws Exception {
    Address address = new Address();
    address.setStreet1("3340 Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("street1_ngram:pea");

    final FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("Analyzer inoperant", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }

  @Test
  public void testBridgeMapping() throws Exception {
    Address address = new Address();
    address.setStreet1("Peachtree Rd NE");
    address.setStreet2("JBoss");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(address);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("street1:peac");
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("PrefixQuery should not be on", 0, query.getResultSize());

    luceneQuery = parser.parse("street1_abridged:peac");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Bridge not used", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }

  @Test
  public void testBoost() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    Address address = new Address();
    address.setStreet1("Peachtree Rd NE");
    address.setStreet2("Peachtnot Rd NE");
    s.persist(address);

    address = new Address();
    address.setStreet1("Peachtnot Rd NE");
    address.setStreet2("Peachtree Rd NE");
    s.persist(address);

    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery =
        parser.parse("street1:peachtree OR idx_street2:peachtree");
    FullTextQuery query =
        s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    assertEquals("expecting two results", 2, query.getResultSize());

    @SuppressWarnings("unchecked")
    List<Object[]> results = query.list();

    assertTrue(
        "first result should be strictly higher",
        (Float) results.get(0)[1] > (Float) results.get(1)[1] * 1.9f);
    assertEquals(
        "Wrong result ordered", address.getStreet1(), ((Address) results.get(0)[0]).getStreet1());
    for (Object[] result : results) {
      s.delete(result[0]);
    }
    tx.commit();
    s.close();
  }

  @Test
  @Category(
      ElasticsearchSupportInProgress
          .class) // HSEARCH-2428 Provide an alternative to
                  // org.hibernate.search.analyzer.Discriminator for Elasticsearch?
  public void testAnalyzerDiscriminator() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    BlogEntry deEntry = new BlogEntry();
    deEntry.setTitle("aufeinanderschl\u00FCgen");
    deEntry.setDescription("aufeinanderschl\u00FCgen");
    deEntry.setLanguage("de");
    s.persist(deEntry);

    BlogEntry enEntry = new BlogEntry();
    enEntry.setTitle("acknowledgment");
    enEntry.setDescription("acknowledgment");
    enEntry.setLanguage("en");
    s.persist(enEntry);

    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    // at query time we use a standard analyzer. We explicitly search for tokens which can only be
    // found if the
    // right language specific stemmer was used at index time
    assertEquals(1, nbrOfMatchingResults("description", "aufeinanderschlug", s));
    assertEquals(1, nbrOfMatchingResults("description", "acknowledg", s));
    assertEquals(0, nbrOfMatchingResults("title", "aufeinanderschlug", s));
    assertEquals(1, nbrOfMatchingResults("title", "acknowledgment", s));

    for (Object result : s.createQuery("from " + BlogEntry.class.getName()).list()) {
      s.delete(result);
    }
    tx.commit();
    s.close();
  }

  @Test
  public void testDateBridgeMapping() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    Address address = new Address();
    address.setStreet1("Peachtree Rd NE");
    address.setStreet2("Peachtnot Rd NE");
    Calendar c =
        GregorianCalendar.getInstance(
            TimeZone.getTimeZone("GMT"), Locale.ROOT); // for the sake of tests
    c.set(2009, Calendar.NOVEMBER, 15);

    Date date = new Date(c.getTimeInMillis());
    address.setDateCreated(date);
    s.persist(address);

    address = new Address();
    address.setStreet1("Peachtnot Rd NE");
    address.setStreet2("Peachtree Rd NE");
    address.setDateCreated(date);
    s.persist(address);

    BlogEntry enEntry = new BlogEntry();
    enEntry.setTitle("acknowledgment");
    enEntry.setDescription("acknowledgment");
    enEntry.setLanguage("en");
    enEntry.setDateCreated(date);
    s.persist(enEntry);

    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    long searchTimeStamp = DateTools.round(date.getTime(), DateTools.Resolution.DAY);
    BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
    booleanQueryBuilder.add(
        NumericRangeQuery.newLongRange(
            "date-created", searchTimeStamp, searchTimeStamp, true, true),
        BooleanClause.Occur.SHOULD);
    booleanQueryBuilder.add(
        NumericRangeQuery.newLongRange(
            "blog-entry-created", searchTimeStamp, searchTimeStamp, true, true),
        BooleanClause.Occur.SHOULD);

    FullTextQuery query =
        s.createFullTextQuery(booleanQueryBuilder.build())
            .setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    assertEquals("expecting 3 results", 3, query.getResultSize());

    @SuppressWarnings("unchecked")
    List<Object[]> results = query.list();

    for (Object[] result : results) {
      s.delete(result[0]);
    }
    tx.commit();
    s.close();
  }

  @Test
  public void testCalendarBridgeMapping() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    Address address = new Address();
    address.setStreet1("Peachtree Rd NE");
    address.setStreet2("Peachtnot Rd NE");
    Calendar calendar =
        GregorianCalendar.getInstance(
            TimeZone.getTimeZone("GMT"), Locale.ROOT); // for the sake of tests
    calendar.set(2009, Calendar.NOVEMBER, 15);

    address.setLastUpdated(calendar);
    s.persist(address);

    address = new Address();
    address.setStreet1("Peachtnot Rd NE");
    address.setStreet2("Peachtree Rd NE");
    address.setLastUpdated(calendar);
    s.persist(address);

    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    long searchTimeStamp = DateTools.round(calendar.getTime().getTime(), DateTools.Resolution.DAY);
    org.apache.lucene.search.Query luceneQuery =
        NumericRangeQuery.newLongRange(
            "last-updated", searchTimeStamp, searchTimeStamp, true, true);

    FullTextQuery query =
        s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    assertEquals("expecting 2 results", 2, query.getResultSize());

    @SuppressWarnings("unchecked")
    List<Object[]> results = query.list();

    for (Object[] result : results) {
      s.delete(result[0]);
    }
    tx.commit();
    s.close();
  }

  @Test
  public void testProvidedIdMapping() throws Exception {
    FullTextSession fullTextSession = Search.getFullTextSession(openSession());
    SearchIntegrator sf = fullTextSession.getSearchFactory().unwrap(SearchIntegrator.class);

    ProvidedIdEntry person1 = new ProvidedIdEntry();
    person1.setName("Big Goat");
    person1.setBlurb("Eats grass");

    ProvidedIdEntry person2 = new ProvidedIdEntry();
    person2.setName("Mini Goat");
    person2.setBlurb("Eats cheese");

    ProvidedIdEntry person3 = new ProvidedIdEntry();
    person3.setName("Regular goat");
    person3.setBlurb("Is anorexic");

    TransactionContextForTest tc = new TransactionContextForTest();

    Work work = new Work(person1, 1, WorkType.INDEX);
    sf.getWorker().performWork(work, tc);
    work = new Work(person2, 2, WorkType.INDEX);
    sf.getWorker().performWork(work, tc);
    Work work2 = new Work(person3, 3, WorkType.INDEX);
    sf.getWorker().performWork(work2, tc);

    tc.end();

    Transaction transaction = fullTextSession.beginTransaction();

    QueryParser parser = new QueryParser("providedidentry.name", TestConstants.standardAnalyzer);
    Query luceneQuery = parser.parse("Goat");

    // we cannot use FTQuery because @ProvidedId does not provide the getter id and Hibernate
    // Hsearch Query extension
    // needs it. So we use plain HSQuery

    HSQuery query = getExtendedSearchIntegrator().createHSQuery(luceneQuery, ProvidedIdEntry.class);

    assertEquals(3, query.queryResultSize());

    transaction.commit();
    getSession().close();
  }

  @Test
  public void testFullTextFilterDefAtMappingLevel() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    Address address = new Address();
    address.setStreet1("Peachtree Rd NE");
    address.setStreet2("Peachtnot Rd NE");
    address.setOwner("test");
    Calendar c =
        GregorianCalendar.getInstance(
            TimeZone.getTimeZone("GMT"), Locale.ROOT); // for the sake of tests
    c.set(2009, Calendar.NOVEMBER, 15);

    address.setLastUpdated(c);
    s.persist(address);

    address = new Address();

    address.setStreet1("Peachtnot Rd NE");
    address.setStreet2("Peachtree Rd NE");
    address.setLastUpdated(c);
    address.setOwner("testowner");
    s.persist(address);

    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("street1:Peachtnot");
    FullTextQuery query =
        s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    query.enableFullTextFilter("security").setParameter("ownerName", "testowner");
    assertEquals("expecting 1 results", 1, query.getResultSize());

    @SuppressWarnings("unchecked")
    List<Object[]> results = query.list();

    for (Object[] result : results) {
      s.delete(result[0]);
    }
    tx.commit();
    s.close();
  }

  @Test
  public void testIndexEmbedded() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    ProductCatalog productCatalog = new ProductCatalog();
    productCatalog.setName("Cars");
    Item item = new Item();
    item.setId(1);
    item.setDescription("Ferrari");
    item.setProductCatalog(productCatalog);
    productCatalog.addItem(item);

    s.persist(item);
    s.persist(productCatalog);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("items.description:Ferrari");
    FullTextQuery query =
        s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    assertEquals("expecting 1 results", 1, query.getResultSize());

    @SuppressWarnings("unchecked")
    List<Object[]> results = query.list();

    for (Object[] result : results) {
      s.delete(result[0]);
    }
    tx.commit();
    s.close();
  }

  @Test
  public void testContainedIn() throws Exception {
    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();

    ProductCatalog productCatalog = new ProductCatalog();
    productCatalog.setName("Cars");
    Item item = new Item();
    item.setId(1);
    item.setDescription("test");
    item.setProductCatalog(productCatalog);
    productCatalog.addItem(item);

    s.persist(item);
    s.persist(productCatalog);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("items.description:test");
    FullTextQuery query =
        s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    assertEquals("expecting 1 results", 1, query.getResultSize());
    tx.commit();

    tx = s.beginTransaction();

    Item loaded = s.get(Item.class, item.getId());
    loaded.setDescription("Ferrari");

    s.update(loaded);
    tx.commit();

    tx = s.beginTransaction();

    parser = new QueryParser("id", TestConstants.standardAnalyzer);
    luceneQuery = parser.parse("items.description:test");
    query =
        s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    assertEquals("expecting 0 results", 0, query.getResultSize());

    parser = new QueryParser("id", TestConstants.standardAnalyzer);
    luceneQuery = parser.parse("items.description:Ferrari");
    query =
        s.createFullTextQuery(luceneQuery).setProjection(FullTextQuery.THIS, FullTextQuery.SCORE);
    assertEquals("expecting 1 results", 1, query.getResultSize());
    tx.commit();

    tx = s.beginTransaction();
    @SuppressWarnings("unchecked")
    List<Object[]> results = query.list();

    for (Object[] result : results) {
      s.delete(result[0]);
    }
    tx.commit();
    s.close();
  }

  @SuppressWarnings("unchecked")
  @Test
  public void testClassBridgeMapping() throws Exception {
    org.hibernate.Session s = openSession();
    Transaction tx = s.beginTransaction();
    s.persist(getDepts1());
    s.persist(getDepts2());
    s.persist(getDepts3());
    s.persist(getDepts4());
    s.flush();
    tx.commit();

    tx = s.beginTransaction();
    FullTextSession session = Search.getFullTextSession(s);

    // The equipment field is the manufacturer field  in the
    // Departments entity after being massaged by passing it
    // through the EquipmentType class. This field is in
    // the Lucene document but not in the Department entity itself.
    QueryParser parser = new QueryParser("equipment", new SimpleAnalyzer());

    // Check the second ClassBridge annotation
    Query query = parser.parse("equiptype:Cisco");
    org.hibernate.search.FullTextQuery hibQuery =
        session.createFullTextQuery(query, Departments.class);
    List<Departments> result = hibQuery.list();
    assertNotNull(result);
    assertEquals("incorrect number of results returned", 2, result.size());
    for (Departments d : result) {
      assertEquals("incorrect manufacturer", "C", d.getManufacturer());
    }

    // No data cross-ups.
    query = parser.parse("branchnetwork:Kent Lewin");
    hibQuery = session.createFullTextQuery(query, Departments.class);
    result = hibQuery.list();
    assertNotNull(result);
    assertTrue("problem with field cross-ups", result.size() == 0);

    // Non-ClassBridge field.
    parser = new QueryParser("branchHead", new SimpleAnalyzer());
    query = parser.parse("branchHead:Kent Lewin");
    hibQuery = session.createFullTextQuery(query, Departments.class);
    result = hibQuery.list();
    assertNotNull(result);
    assertTrue("incorrect entity returned, wrong branch head", result.size() == 1);
    assertEquals("incorrect entity returned", "Kent Lewin", (result.get(0)).getBranchHead());

    // Check other ClassBridge annotation.
    parser = new QueryParser("branchnetwork", new SimpleAnalyzer());
    query = parser.parse("branchnetwork:st. george 1D");
    hibQuery = session.createFullTextQuery(query, Departments.class);
    result = hibQuery.list();
    assertNotNull(result);
    assertEquals("incorrect entity returned, wrong network", "1D", (result.get(0)).getNetwork());
    assertEquals(
        "incorrect entity returned, wrong branch", "St. George", (result.get(0)).getBranch());
    assertEquals("incorrect number of results returned", 1, result.size());

    // cleanup
    for (Object element : s.createQuery("from " + Departments.class.getName()).list()) {
      s.delete(element);
    }
    tx.commit();
    s.close();
  }

  @Test
  @Category(SkipOnElasticsearch.class) // Dynamic boosting is not supported on Elasticsearch
  public void testDynamicBoosts() throws Exception {

    Session session = openSession();
    session.beginTransaction();

    DynamicBoostedDescLibrary lib1 = new DynamicBoostedDescLibrary();
    lib1.setName("one");
    session.persist(lib1);

    DynamicBoostedDescLibrary lib2 = new DynamicBoostedDescLibrary();
    lib2.setName("two");
    session.persist(lib2);

    session.getTransaction().commit();
    session.close();

    float lib1Score = getScore(new TermQuery(new Term("name", "one")));
    float lib2Score = getScore(new TermQuery(new Term("name", "two")));
    assertEquals("The scores should be equal", lib1Score, lib2Score, 0f);

    // set dynamic score and reindex!
    session = openSession();
    session.beginTransaction();

    session.refresh(lib2);
    lib2.setDynScore(2.0f);

    session.getTransaction().commit();
    session.close();

    lib1Score = getScore(new TermQuery(new Term("name", "one")));
    lib2Score = getScore(new TermQuery(new Term("name", "two")));
    assertTrue("lib2score should be greater than lib1score", lib1Score < lib2Score);

    lib1Score = getScore(new TermQuery(new Term("name", "foobar")));
    assertEquals("lib1score should be 0 since term is not yet indexed.", 0.0f, lib1Score, 0f);

    // index foobar
    session = openSession();
    session.beginTransaction();

    session.refresh(lib1);
    lib1.setName("foobar");

    session.getTransaction().commit();
    session.close();

    lib1Score = getScore(new TermQuery(new Term("name", "foobar")));
    lib2Score = getScore(new TermQuery(new Term("name", "two")));
    assertTrue("lib1score should be greater than lib2score", lib1Score > lib2Score);
  }

  @Test
  public void testSpatial() {
    org.hibernate.Session s = openSession();
    Transaction tx = s.beginTransaction();
    MemberLevelTestPoI memberLevelTestPoI = new MemberLevelTestPoI("test", 24.0, 32.0d);
    s.persist(memberLevelTestPoI);
    s.flush();
    tx.commit();

    tx = s.beginTransaction();
    FullTextSession session = Search.getFullTextSession(s);

    QueryBuilder builder =
        session.getSearchFactory().buildQueryBuilder().forEntity(MemberLevelTestPoI.class).get();

    double centerLatitude = 24;
    double centerLongitude = 31.5;

    org.apache.lucene.search.Query luceneQuery =
        builder
            .spatial()
            .onField("location")
            .within(50, Unit.KM)
            .ofLatitude(centerLatitude)
            .andLongitude(centerLongitude)
            .createQuery();

    org.hibernate.Query hibQuery =
        session.createFullTextQuery(luceneQuery, MemberLevelTestPoI.class);
    List<?> results = hibQuery.list();
    assertEquals(0, results.size());

    org.apache.lucene.search.Query luceneQuery2 =
        builder
            .spatial()
            .onField("location")
            .within(51, Unit.KM)
            .ofLatitude(centerLatitude)
            .andLongitude(centerLongitude)
            .createQuery();

    org.hibernate.Query hibQuery2 =
        session.createFullTextQuery(luceneQuery2, MemberLevelTestPoI.class);
    List<?> results2 = hibQuery2.list();
    assertEquals(1, results2.size());

    List<?> testPoIs = session.createQuery("from " + MemberLevelTestPoI.class.getName()).list();
    for (Object entity : testPoIs) {
      session.delete(entity);
    }
    tx.commit();
    session.close();

    s = openSession();
    tx = s.beginTransaction();
    ClassLevelTestPoI classLevelTestPoI = new ClassLevelTestPoI("test", 24.0, 32.0d);
    s.persist(classLevelTestPoI);
    s.flush();
    tx.commit();

    tx = s.beginTransaction();
    session = Search.getFullTextSession(s);

    builder =
        session.getSearchFactory().buildQueryBuilder().forEntity(ClassLevelTestPoI.class).get();

    centerLatitude = 24;
    centerLongitude = 31.5;

    luceneQuery =
        SpatialQueryBuilder.buildSpatialQueryByHash(
            centerLatitude, centerLongitude, 50, "location");
    hibQuery = session.createFullTextQuery(luceneQuery, ClassLevelTestPoI.class);
    results = hibQuery.list();
    assertEquals(0, results.size());

    luceneQuery2 =
        SpatialQueryBuilder.buildSpatialQueryByHash(
            centerLatitude, centerLongitude, 51, "location");
    hibQuery2 = session.createFullTextQuery(luceneQuery2, ClassLevelTestPoI.class);
    results2 = hibQuery2.list();
    assertEquals(1, results2.size());

    testPoIs = session.createQuery("from " + ClassLevelTestPoI.class.getName()).list();
    for (Object entity : testPoIs) {
      session.delete(entity);
    }
    tx.commit();
    session.close();

    s = openSession();
    tx = s.beginTransaction();
    LatLongAnnTestPoi latLongAnnTestPoi = new LatLongAnnTestPoi("test", 24.0, 32.0d);
    s.persist(latLongAnnTestPoi);
    s.flush();
    tx.commit();
    tx = s.beginTransaction();
    session = Search.getFullTextSession(s);

    builder =
        session.getSearchFactory().buildQueryBuilder().forEntity(LatLongAnnTestPoi.class).get();

    centerLatitude = 24;
    centerLongitude = 31.5;

    luceneQuery =
        builder
            .spatial()
            .onField("location")
            .within(50, Unit.KM)
            .ofLatitude(centerLatitude)
            .andLongitude(centerLongitude)
            .createQuery();

    hibQuery = session.createFullTextQuery(luceneQuery, LatLongAnnTestPoi.class);
    results = hibQuery.list();
    assertEquals(0, results.size());

    luceneQuery2 =
        builder
            .spatial()
            .onField("location")
            .within(51, Unit.KM)
            .ofLatitude(centerLatitude)
            .andLongitude(centerLongitude)
            .createQuery();

    hibQuery2 = session.createFullTextQuery(luceneQuery2, LatLongAnnTestPoi.class);
    results2 = hibQuery2.list();
    assertEquals(1, results2.size());

    testPoIs = session.createQuery("from " + LatLongAnnTestPoi.class.getName()).list();
    for (Object entity : testPoIs) {
      session.delete(entity);
    }
    tx.commit();

    session.close();
  }

  @Test
  public void testClassBridgeInstanceMapping() throws Exception {
    OrderLine orderLine = new OrderLine();
    orderLine.setName("Sequoia");

    FullTextSession s = Search.getFullTextSession(openSession());
    Transaction tx = s.beginTransaction();
    s.persist(orderLine);
    tx.commit();

    s.clear();

    tx = s.beginTransaction();

    QueryParser parser = new QueryParser("id", TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse("orderLineName:Sequoia");
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    assertEquals("Bridge not used", 1, query.getResultSize());

    luceneQuery = parser.parse("orderLineName_ngram:quo");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Analyzer configuration not applied", 1, query.getResultSize());

    luceneQuery = parser.parse("orderLineNameViaParam:Sequoia");
    query = s.createFullTextQuery(luceneQuery);
    assertEquals("Parameter configuration not applied", 1, query.getResultSize());

    s.delete(query.list().get(0));
    tx.commit();
    s.close();
  }

  private float getScore(Query query) {
    Session session = openSession();
    Object[] queryResult;
    float score;
    try {
      FullTextSession fullTextSession = Search.getFullTextSession(session);
      List<?> resultList =
          fullTextSession
              .createFullTextQuery(query, DynamicBoostedDescLibrary.class)
              .setProjection(ProjectionConstants.SCORE, ProjectionConstants.EXPLANATION)
              .setMaxResults(1)
              .list();

      if (resultList.size() == 0) {
        score = 0.0f;
      } else {
        queryResult = (Object[]) resultList.get(0);
        score = (Float) queryResult[0];
        String explanation = queryResult[1].toString();
        log.debugf("score: %f explanation: %s", score, explanation);
      }
    } finally {
      session.close();
    }
    return score;
  }

  private int nbrOfMatchingResults(String field, String token, FullTextSession s)
      throws ParseException {
    QueryParser parser = new QueryParser(field, TestConstants.standardAnalyzer);
    org.apache.lucene.search.Query luceneQuery = parser.parse(token);
    FullTextQuery query = s.createFullTextQuery(luceneQuery);
    return query.getResultSize();
  }

  private Departments getDepts1() {
    Departments depts = new Departments();

    depts.setBranch("Salt Lake City");
    depts.setBranchHead("Kent Lewin");
    depts.setMaxEmployees(100);
    depts.setNetwork("1A");
    depts.setManufacturer("C");

    return depts;
  }

  private Departments getDepts2() {
    Departments depts = new Departments();

    depts.setBranch("Layton");
    depts.setBranchHead("Terry Poperszky");
    depts.setMaxEmployees(20);
    depts.setNetwork("2B");
    depts.setManufacturer("3");

    return depts;
  }

  private Departments getDepts3() {
    Departments depts = new Departments();

    depts.setBranch("West Valley");
    depts.setBranchHead("Pat Kelley");
    depts.setMaxEmployees(15);
    depts.setNetwork("3C");
    depts.setManufacturer("D");

    return depts;
  }

  private Departments getDepts4() {
    Departments depts = new Departments();

    depts.setBranch("St. George");
    depts.setBranchHead("Spencer Stajskal");
    depts.setMaxEmployees(10);
    depts.setNetwork("1D");
    depts.setManufacturer("C");
    return depts;
  }

  @Override
  public void configure(Map<String, Object> cfg) {
    cfg.put(Environment.MODEL_MAPPING, ProgrammaticSearchMappingFactory.class.getName());
  }

  @Override
  public Class<?>[] getAnnotatedClasses() {
    return new Class<?>[] {
      Address.class,
      Country.class,
      BlogEntry.class,
      ProvidedIdEntry.class,
      ProductCatalog.class,
      Item.class,
      Departments.class,
      DynamicBoostedDescLibrary.class,
      MemberLevelTestPoI.class,
      ClassLevelTestPoI.class,
      LatLongAnnTestPoi.class,
      OrderLine.class
    };
  }
}
Ejemplo n.º 16
0
/**
 * Prepares and configures a BatchIndexingWorkspace to start rebuilding the indexes for all entity
 * instances in the database. The type of these entities is either all indexed entities or a subset,
 * always including all subtypes.
 *
 * @author Sanne Grinovero
 */
public class MassIndexerImpl implements MassIndexerWithTenant {

  private static final Log log = LoggerFactory.make();

  private final ExtendedSearchIntegrator extendedIntegrator;
  private final SessionFactoryImplementor sessionFactory;

  protected Set<Class<?>> rootEntities = new HashSet<Class<?>>();

  // default settings defined here:
  private int typesToIndexInParallel = 1;
  private int documentBuilderThreads =
      6; // loading the main entity, also responsible for loading of lazy @IndexedEmbedded
         // collections
  private int objectLoadingBatchSize = 10;
  private long objectsLimit = 0; // means no limit at all
  private CacheMode cacheMode = CacheMode.IGNORE;
  private boolean optimizeAtEnd = true;
  private boolean purgeAtStart = true;
  private boolean optimizeAfterPurge = true;
  private MassIndexerProgressMonitor monitor;
  private int idFetchSize = 100; // reasonable default as we only load IDs
  private String tenantIdentifier;
  private Integer idLoadingTransactionTimeout;

  protected MassIndexerImpl(
      SearchIntegrator searchIntegrator,
      SessionFactoryImplementor sessionFactory,
      Class<?>... entities) {
    this.extendedIntegrator = searchIntegrator.unwrap(ExtendedSearchIntegrator.class);
    this.sessionFactory = sessionFactory;
    rootEntities = toRootEntities(extendedIntegrator, entities);
    if (extendedIntegrator.isJMXEnabled()) {
      monitor = new JMXRegistrar.IndexingProgressMonitor();
    } else {
      monitor = new SimpleIndexingProgressMonitor();
    }
  }

  /**
   * From the set of classes a new set is built containing all indexed subclasses, but removing then
   * all subtypes of indexed entities.
   *
   * @param selection
   * @return a new set of entities
   */
  private static Set<Class<?>> toRootEntities(
      ExtendedSearchIntegrator extendedIntegrator, Class<?>... selection) {
    Set<Class<?>> entities = new HashSet<Class<?>>();
    // first build the "entities" set containing all indexed subtypes of "selection".
    for (Class<?> entityType : selection) {
      Set<Class<?>> targetedClasses =
          extendedIntegrator.getIndexedTypesPolymorphic(new Class[] {entityType});
      if (targetedClasses.isEmpty()) {
        String msg =
            entityType.getName() + " is not an indexed entity or a subclass of an indexed entity";
        throw new IllegalArgumentException(msg);
      }
      entities.addAll(targetedClasses);
    }
    Set<Class<?>> cleaned = new HashSet<Class<?>>();
    Set<Class<?>> toRemove = new HashSet<Class<?>>();
    // now remove all repeated types to avoid duplicate loading by polymorphic query loading
    for (Class<?> type : entities) {
      boolean typeIsOk = true;
      for (Class<?> existing : cleaned) {
        if (existing.isAssignableFrom(type)) {
          typeIsOk = false;
          break;
        }
        if (type.isAssignableFrom(existing)) {
          toRemove.add(existing);
        }
      }
      if (typeIsOk) {
        cleaned.add(type);
      }
    }
    cleaned.removeAll(toRemove);
    log.debugf("Targets for indexing job: %s", cleaned);
    return cleaned;
  }

  @Override
  public MassIndexer typesToIndexInParallel(int numberOfThreads) {
    if (numberOfThreads < 1) {
      throw new IllegalArgumentException("numberOfThreads must be at least 1");
    }
    this.typesToIndexInParallel = Math.min(numberOfThreads, rootEntities.size());
    return this;
  }

  @Override
  public MassIndexer cacheMode(CacheMode cacheMode) {
    if (cacheMode == null) {
      throw new IllegalArgumentException("cacheMode must not be null");
    }
    this.cacheMode = cacheMode;
    return this;
  }

  @Override
  public MassIndexer threadsToLoadObjects(int numberOfThreads) {
    if (numberOfThreads < 1) {
      throw new IllegalArgumentException("numberOfThreads must be at least 1");
    }
    this.documentBuilderThreads = numberOfThreads;
    return this;
  }

  @Override
  public MassIndexer batchSizeToLoadObjects(int batchSize) {
    if (batchSize < 1) {
      throw new IllegalArgumentException("batchSize must be at least 1");
    }
    this.objectLoadingBatchSize = batchSize;
    return this;
  }

  @Override
  @Deprecated
  public MassIndexer threadsForSubsequentFetching(int numberOfThreads) {
    if (numberOfThreads < 1) {
      throw new IllegalArgumentException("numberOfThreads must be at least 1");
    }
    // currently a no-op
    return this;
  }

  @Override
  public MassIndexer progressMonitor(MassIndexerProgressMonitor monitor) {
    this.monitor = monitor;
    return this;
  }

  @Override
  public MassIndexer optimizeOnFinish(boolean optimize) {
    this.optimizeAtEnd = optimize;
    return this;
  }

  @Override
  public MassIndexer optimizeAfterPurge(boolean optimize) {
    this.optimizeAfterPurge = optimize;
    return this;
  }

  @Override
  public MassIndexer purgeAllOnStart(boolean purgeAll) {
    this.purgeAtStart = purgeAll;
    return this;
  }

  @Override
  public MassIndexer transactionTimeout(int timeoutInSeconds) {
    this.idLoadingTransactionTimeout = timeoutInSeconds;
    return this;
  }

  @Override
  public MassIndexerWithTenant tenantIdentifier(String tenantIdentifier) {
    this.tenantIdentifier = tenantIdentifier;
    return this;
  }

  @Override
  public Future<?> start() {
    BatchCoordinator coordinator = createCoordinator();
    ExecutorService executor = Executors.newFixedThreadPool(1, "batch coordinator");
    try {
      Future<?> submit = executor.submit(coordinator);
      return submit;
    } finally {
      executor.shutdown();
    }
  }

  @Override
  public void startAndWait() throws InterruptedException {
    BatchCoordinator coordinator = createCoordinator();
    coordinator.run();
    if (Thread.currentThread().isInterrupted()) {
      throw new InterruptedException();
    }
  }

  protected BatchCoordinator createCoordinator() {
    return new BatchCoordinator(
        rootEntities,
        extendedIntegrator,
        sessionFactory,
        typesToIndexInParallel,
        documentBuilderThreads,
        cacheMode,
        objectLoadingBatchSize,
        objectsLimit,
        optimizeAtEnd,
        purgeAtStart,
        optimizeAfterPurge,
        monitor,
        idFetchSize,
        idLoadingTransactionTimeout,
        tenantIdentifier);
  }

  @Override
  public MassIndexer limitIndexedObjectsTo(long maximum) {
    this.objectsLimit = maximum;
    return this;
  }

  @Override
  public MassIndexer idFetchSize(int idFetchSize) {
    // don't check for positive/zero values as it's actually used by some databases
    // as special values which might be useful.
    this.idFetchSize = idFetchSize;
    return this;
  }
}
Ejemplo n.º 17
0
/**
 * @author Emmanuel Bernard &lt;[email protected]&gt;
 * @author Hardy Ferentschik
 */
public class AvroDeserializer implements Deserializer {

  private static final Log log = LoggerFactory.make();
  private final KnownProtocols protocols;
  private List<Utf8> classReferences;

  public AvroDeserializer(KnownProtocols protocols) {
    this.protocols = protocols;
  }

  @Override
  public void deserialize(byte[] data, LuceneWorksBuilder hydrator) {
    final ByteArrayInputStream inputStream = new ByteArrayInputStream(data);
    final int majorVersion = inputStream.read();
    final int minorVersion = inputStream.read();
    final Protocol protocol = protocols.getProtocol(majorVersion, minorVersion);

    Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null);
    GenericDatumReader<GenericRecord> reader =
        new GenericDatumReader<>(protocol.getType("Message"));
    GenericRecord result;
    try {
      result = reader.read(null, decoder);
    } catch (IOException e) {
      throw log.unableToDeserializeAvroStream(e);
    }

    classReferences = asListOfString(result, "classReferences");
    final List<GenericRecord> operations = asListOfGenericRecords(result, "operations");
    final ConversionContext conversionContext = new ContextualExceptionBridgeHelper();
    for (GenericRecord operation : operations) {
      String schema = operation.getSchema().getName();
      if ("OptimizeAll".equals(schema)) {
        hydrator.addOptimizeAll();
      } else if ("PurgeAll".equals(schema)) {
        hydrator.addPurgeAllLuceneWork(asClass(operation, "class"));
      } else if ("Flush".equals(schema)) {
        hydrator.addFlush();
      } else if ("Delete".equals(schema)) {
        processId(operation, hydrator);
        hydrator.addDeleteLuceneWork(asClass(operation, "class"), conversionContext);
      } else if ("DeleteByQuery".equals(schema)) {
        String entityClassName = asClass(operation, "class");
        int queryKey = asInt(operation, "key");
        DeleteByQuerySupport.StringToQueryMapper mapper =
            DeleteByQuerySupport.getStringToQueryMapper(queryKey);
        List<Utf8> stringList = asListOfString(operation, "query");
        String[] query = new String[stringList.size()];
        for (int i = 0; i < stringList.size(); ++i) {
          query[i] = stringList.get(i).toString();
        }
        hydrator.addDeleteByQueryLuceneWork(entityClassName, mapper.fromString(query));
      } else if ("Add".equals(schema)) {
        buildLuceneDocument(asGenericRecord(operation, "document"), hydrator);
        Map<String, String> analyzers = getAnalyzers(operation);
        processId(operation, hydrator);
        hydrator.addAddLuceneWork(asClass(operation, "class"), analyzers, conversionContext);
      } else if ("Update".equals(schema)) {
        buildLuceneDocument(asGenericRecord(operation, "document"), hydrator);
        Map<String, String> analyzers = getAnalyzers(operation);
        processId(operation, hydrator);
        hydrator.addUpdateLuceneWork(asClass(operation, "class"), analyzers, conversionContext);
      } else {
        throw log.cannotDeserializeOperation(schema);
      }
    }
  }

  private String asClass(GenericRecord operation, String attribute) {
    Integer index = (Integer) operation.get(attribute);
    return classReferences.get(index).toString();
  }

  @SuppressWarnings("unchecked")
  private List<Utf8> asListOfString(GenericRecord result, String attribute) {
    return (List<Utf8>) result.get(attribute);
  }

  private void processId(GenericRecord operation, LuceneWorksBuilder hydrator) {
    GenericRecord id = (GenericRecord) operation.get("id");
    Object value = id.get("value");
    if (value instanceof ByteBuffer) {
      hydrator.addIdAsJavaSerialized(asByteArray((ByteBuffer) value));
    } else if (value instanceof Utf8) {
      hydrator.addId(value.toString());
    } else {
      // the rest are serialized objects
      hydrator.addId((Serializable) value);
    }
  }

  private Map<String, String> getAnalyzers(GenericRecord operation) {
    Map<?, ?> analyzersWithUtf8 = (Map<?, ?>) operation.get("fieldToAnalyzerMap");
    if (analyzersWithUtf8 == null) {
      return null;
    }
    Map<String, String> analyzers = new HashMap<>(analyzersWithUtf8.size());
    for (Map.Entry<?, ?> entry : analyzersWithUtf8.entrySet()) {
      analyzers.put(entry.getKey().toString(), entry.getValue().toString());
    }
    return analyzers;
  }

  private void buildLuceneDocument(GenericRecord document, LuceneWorksBuilder hydrator) {
    hydrator.defineDocument();
    List<GenericRecord> fieldables = asListOfGenericRecords(document, "fieldables");
    for (GenericRecord field : fieldables) {
      String schema = field.getSchema().getName();
      if ("CustomFieldable".equals(schema)) {
        hydrator.addFieldable(asByteArray(field, "instance"));
      } else if ("NumericIntField".equals(schema)) {
        hydrator.addIntNumericField(
            asInt(field, "value"),
            asString(field, "name"),
            asInt(field, "precisionStep"),
            asStore(field),
            asBoolean(field, "indexed"),
            asFloat(field, "boost"),
            asBoolean(field, "omitNorms"),
            asBoolean(field, "omitTermFreqAndPositions"));
      } else if ("NumericFloatField".equals(schema)) {
        hydrator.addFloatNumericField(
            asFloat(field, "value"),
            asString(field, "name"),
            asInt(field, "precisionStep"),
            asStore(field),
            asBoolean(field, "indexed"),
            asFloat(field, "boost"),
            asBoolean(field, "omitNorms"),
            asBoolean(field, "omitTermFreqAndPositions"));
      } else if ("NumericLongField".equals(schema)) {
        hydrator.addLongNumericField(
            asLong(field, "value"),
            asString(field, "name"),
            asInt(field, "precisionStep"),
            asStore(field),
            asBoolean(field, "indexed"),
            asFloat(field, "boost"),
            asBoolean(field, "omitNorms"),
            asBoolean(field, "omitTermFreqAndPositions"));
      } else if ("NumericDoubleField".equals(schema)) {
        hydrator.addDoubleNumericField(
            asDouble(field, "value"),
            asString(field, "name"),
            asInt(field, "precisionStep"),
            asStore(field),
            asBoolean(field, "indexed"),
            asFloat(field, "boost"),
            asBoolean(field, "omitNorms"),
            asBoolean(field, "omitTermFreqAndPositions"));
      } else if ("BinaryField".equals(schema)) {
        hydrator.addFieldWithBinaryData(
            asString(field, "name"),
            asByteArray(field, "value"),
            asInt(field, "offset"),
            asInt(field, "length"));
      } else if ("StringField".equals(schema)) {
        hydrator.addFieldWithStringData(
            asString(field, "name"),
            asString(field, "value"),
            asStore(field),
            asIndex(field),
            asTermVector(field),
            asFloat(field, "boost"),
            asBoolean(field, "omitNorms"),
            asBoolean(field, "omitTermFreqAndPositions"));
      } else if ("TokenStreamField".equals(schema)) {
        buildAttributes(field, "value", hydrator);
        hydrator.addFieldWithTokenStreamData(
            asString(field, "name"),
            asTermVector(field),
            asFloat(field, "boost"),
            asBoolean(field, "omitNorms"),
            asBoolean(field, "omitTermFreqAndPositions"));
      } else if ("ReaderField".equals(schema)) {
        hydrator.addFieldWithSerializableReaderData(
            asString(field, "name"),
            asByteArray(field, "value"),
            asTermVector(field),
            asFloat(field, "boost"),
            asBoolean(field, "omitNorms"),
            asBoolean(field, "omitTermFreqAndPositions"));
      } else if ("BinaryDocValuesField".equals(schema)) {
        hydrator.addDocValuesFieldWithBinaryData(
            asString(field, "name"),
            asString(field, "type"),
            asByteArray(field, "value"),
            asInt(field, "offset"),
            asInt(field, "length"));
      } else if ("NumericDocValuesField".equals(schema)) {
        hydrator.addDocValuesFieldWithNumericData(
            asString(field, "name"), asString(field, "type"), asLong(field, "value"));
      } else {
        throw log.cannotDeserializeField(schema);
      }
    }
  }

  private void buildAttributes(GenericRecord record, String field, LuceneWorksBuilder hydrator) {
    @SuppressWarnings("unchecked")
    List<List<?>> tokens = (List<List<?>>) record.get(field);
    for (List<?> token : tokens) {
      for (Object attribute : token) {
        buildAttribute(attribute, hydrator);
      }
      hydrator.addToken();
    }
  }

  private void buildAttribute(Object element, LuceneWorksBuilder hydrator) {
    if (element instanceof GenericRecord) {
      GenericRecord record = (GenericRecord) element;
      String name = record.getSchema().getName();
      if ("TokenTrackingAttribute".equals(name)) {
        @SuppressWarnings("unchecked")
        List<Integer> positionList = (List<Integer>) record.get("positions");
        hydrator.addTokenTrackingAttribute(positionList);
      } else if ("CharTermAttribute".equals(name)) {
        hydrator.addCharTermAttribute((CharSequence) record.get("sequence"));
      } else if ("PayloadAttribute".equals(name)) {
        hydrator.addPayloadAttribute(asByteArray(record, "payload"));
      } else if ("KeywordAttribute".equals(name)) {
        hydrator.addKeywordAttribute(asBoolean(record, "isKeyword"));
      } else if ("PositionIncrementAttribute".equals(name)) {
        hydrator.addPositionIncrementAttribute(asInt(record, "positionIncrement"));
      } else if ("FlagsAttribute".equals(name)) {
        hydrator.addFlagsAttribute(asInt(record, "flags"));
      } else if ("TypeAttribute".equals(name)) {
        hydrator.addTypeAttribute(asString(record, "type"));
      } else if ("OffsetAttribute".equals(name)) {
        hydrator.addOffsetAttribute(asInt(record, "startOffset"), asInt(record, "endOffset"));
      } else {
        throw log.unknownAttributeSerializedRepresentation(name);
      }
    } else if (element instanceof ByteBuffer) {
      hydrator.addSerializedAttribute(asByteArray((ByteBuffer) element));
    } else {
      throw log.unknownAttributeSerializedRepresentation(element.getClass().getName());
    }
  }

  private GenericRecord asGenericRecord(GenericRecord operation, String field) {
    return (GenericRecord) operation.get(field);
  }

  @SuppressWarnings("unchecked")
  private List<GenericRecord> asListOfGenericRecords(GenericRecord result, String field) {
    return (List<GenericRecord>) result.get(field);
  }

  private float asFloat(GenericRecord record, String field) {
    return ((Float) record.get(field)).floatValue();
  }

  private int asInt(GenericRecord record, String field) {
    return ((Integer) record.get(field)).intValue();
  }

  private long asLong(GenericRecord record, String field) {
    return ((Long) record.get(field)).longValue();
  }

  private double asDouble(GenericRecord record, String field) {
    return ((Double) record.get(field)).doubleValue();
  }

  private String asString(GenericRecord record, String field) {
    return record.get(field).toString();
  }

  private boolean asBoolean(GenericRecord record, String field) {
    return ((Boolean) record.get(field)).booleanValue();
  }

  private SerializableStore asStore(GenericRecord field) {
    String string = field.get("store").toString();
    return SerializableStore.valueOf(string);
  }

  private SerializableIndex asIndex(GenericRecord field) {
    String string = field.get("index").toString();
    return SerializableIndex.valueOf(string);
  }

  private SerializableTermVector asTermVector(GenericRecord field) {
    String string = field.get("termVector").toString();
    return SerializableTermVector.valueOf(string);
  }

  private byte[] asByteArray(GenericRecord operation, String field) {
    ByteBuffer buffer = (ByteBuffer) operation.get(field);
    return asByteArray(buffer);
  }

  private byte[] asByteArray(ByteBuffer buffer) {
    byte[] copy = new byte[buffer.remaining()];
    buffer.get(copy);
    return copy;
  }
}
/**
 * This {@code SessionAwareRunnable} is consuming entity identifiers and producing corresponding
 * {@code AddLuceneWork} instances being forwarded to the index writing backend. It will finish when
 * the queue it is consuming from will signal there are no more identifiers.
 *
 * @author Sanne Grinovero
 */
public class IdentifierConsumerDocumentProducer implements Runnable {

  private static final Log log = LoggerFactory.make();

  private final ProducerConsumerQueue<List<Serializable>> source;
  private final SessionFactory sessionFactory;
  private final CacheMode cacheMode;
  private final Class<?> type;
  private final MassIndexerProgressMonitor monitor;
  private final Map<Class<?>, EntityIndexBinding> entityIndexBindings;
  private final String idName;
  private final ErrorHandler errorHandler;
  private final BatchBackend backend;
  private final CountDownLatch producerEndSignal;
  private final Integer transactionTimeout;
  private final String tenantId;

  /** The JTA transaction manager or {@code null} if not in a JTA environment */
  private final TransactionManager transactionManager;

  public IdentifierConsumerDocumentProducer(
      ProducerConsumerQueue<List<Serializable>> fromIdentifierListToEntities,
      MassIndexerProgressMonitor monitor,
      SessionFactory sessionFactory,
      CountDownLatch producerEndSignal,
      CacheMode cacheMode,
      Class<?> type,
      ExtendedSearchIntegrator searchFactory,
      String idName,
      BatchBackend backend,
      ErrorHandler errorHandler,
      Integer transactionTimeout,
      String tenantId) {
    this.source = fromIdentifierListToEntities;
    this.monitor = monitor;
    this.sessionFactory = sessionFactory;
    this.cacheMode = cacheMode;
    this.type = type;
    this.idName = idName;
    this.backend = backend;
    this.errorHandler = errorHandler;
    this.producerEndSignal = producerEndSignal;
    this.entityIndexBindings = searchFactory.getIndexBindings();
    this.transactionTimeout = transactionTimeout;
    this.tenantId = tenantId;
    this.transactionManager =
        ((SessionFactoryImplementor) sessionFactory)
            .getServiceRegistry()
            .getService(JtaPlatform.class)
            .retrieveTransactionManager();

    log.trace("created");
  }

  @Override
  public void run() {
    log.trace("started");
    Session session = sessionFactory.withOptions().tenantIdentifier(tenantId).openSession();
    session.setFlushMode(FlushMode.MANUAL);
    session.setCacheMode(cacheMode);
    session.setDefaultReadOnly(true);
    try {
      loadAllFromQueue(session);
    } catch (Exception exception) {
      errorHandler.handleException(log.massIndexerExceptionWhileTransformingIds(), exception);
    } finally {
      producerEndSignal.countDown();
      session.close();
    }
    log.trace("finished");
  }

  private void loadAllFromQueue(Session session) throws Exception {
    final InstanceInitializer sessionInitializer =
        new HibernateSessionLoadingInitializer((SessionImplementor) session);

    try {
      List<Serializable> idList;
      do {
        idList = source.take();
        if (idList != null) {
          log.tracef("received list of ids %s", idList);
          loadList(idList, session, sessionInitializer);
        }
      } while (idList != null);
    } catch (InterruptedException e) {
      // just quit
      Thread.currentThread().interrupt();
    }
  }

  /**
   * Loads a list of entities of defined type using their identifiers. The loaded objects are then
   * transformed into Lucene Documents and forwarded to the indexing backend.
   *
   * @param listIds the list of entity identifiers (of type
   * @param session the session to be used
   * @param sessionInitializer the initilization strategies for entities and collections
   * @throws InterruptedException
   */
  private void loadList(
      List<Serializable> listIds, Session session, InstanceInitializer sessionInitializer)
      throws Exception {
    try {
      beginTransaction(session);

      Criteria criteria =
          session
              .createCriteria(type)
              .setCacheMode(cacheMode)
              .setLockMode(LockMode.NONE)
              .setCacheable(false)
              .setFlushMode(FlushMode.MANUAL)
              .setFetchSize(listIds.size())
              .setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY)
              .add(Restrictions.in(idName, listIds));
      List<?> list = criteria.list();
      monitor.entitiesLoaded(list.size());
      indexAllQueue(session, list, sessionInitializer);
      session.clear();
    } finally {
      // it's read-only, so no need to commit
      rollbackTransaction(session);
    }
  }

  private void beginTransaction(Session session) throws Exception {
    if (transactionManager != null) {
      if (transactionTimeout != null) {
        transactionManager.setTransactionTimeout(transactionTimeout);
      }

      transactionManager.begin();
    } else {
      session.beginTransaction();
    }
  }

  private void rollbackTransaction(Session session) throws Exception {
    try {
      if (transactionManager != null) {
        transactionManager.rollback();
      } else {
        session.getTransaction().rollback();
      }
    } catch (Exception e) {
      log.errorRollingBackTransaction(e.getMessage(), e);
    }
  }

  private void indexAllQueue(
      Session session, List<?> entities, InstanceInitializer sessionInitializer)
      throws InterruptedException {
    ConversionContext contextualBridge = new ContextualExceptionBridgeHelper();

    if (entities == null || entities.isEmpty()) {
      return;
    } else {
      log.tracef("received a list of objects to index: %s", entities);
      for (Object object : entities) {
        try {
          index(object, session, sessionInitializer, contextualBridge);
          monitor.documentsBuilt(1);
        } catch (RuntimeException e) {
          String errorMsg =
              log.massIndexerUnableToIndexInstance(object.getClass().getName(), object.toString());
          errorHandler.handleException(errorMsg, e);
        }
      }
    }
  }

  @SuppressWarnings("unchecked")
  private void index(
      Object entity,
      Session session,
      InstanceInitializer sessionInitializer,
      ConversionContext conversionContext)
      throws InterruptedException {

    // abort if the thread has been interrupted while not in wait(), I/O or similar which themselves
    // would have
    // raised the InterruptedException
    if (Thread.currentThread().isInterrupted()) {
      throw new InterruptedException();
    }

    Serializable id = session.getIdentifier(entity);
    Class<?> clazz = HibernateHelper.getClass(entity);
    EntityIndexBinding entityIndexBinding = entityIndexBindings.get(clazz);
    if (entityIndexBinding == null) {
      // it might be possible to receive not-indexes subclasses of the currently indexed type;
      // being not-indexed, we skip them.
      // FIXME for improved performance: avoid loading them in an early phase.
      return;
    }

    EntityIndexingInterceptor interceptor = entityIndexBinding.getEntityIndexingInterceptor();
    if (interceptor != null) {
      IndexingOverride onAdd = interceptor.onAdd(entity);
      switch (onAdd) {
        case REMOVE:
        case SKIP:
          return;
      }
      // default: continue indexing this instance
    }

    DocumentBuilderIndexedEntity docBuilder = entityIndexBinding.getDocumentBuilder();
    TwoWayFieldBridge idBridge = docBuilder.getIdBridge();
    conversionContext.pushProperty(docBuilder.getIdKeywordName());
    String idInString = null;
    try {
      idInString =
          conversionContext.setClass(clazz).twoWayConversionContext(idBridge).objectToString(id);
    } finally {
      conversionContext.popProperty();
    }
    // depending on the complexity of the object graph going to be indexed it's possible
    // that we hit the database several times during work construction.
    AddLuceneWork addWork =
        docBuilder.createAddWork(
            tenantId, clazz, entity, id, idInString, sessionInitializer, conversionContext);
    backend.enqueueAsyncWork(addWork);
  }
}
/**
 * Collects all resources needed to apply changes to one index, and are reused across several
 * WorkQueues.
 *
 * @author Sanne Grinovero
 */
public final class LuceneBackendResources {

  private static final Log log = LoggerFactory.make();

  private volatile LuceneWorkVisitor visitor;
  private final AbstractWorkspaceImpl workspace;
  private final ErrorHandler errorHandler;
  private final int maxQueueLength;
  private final String indexName;

  private final ReadLock readLock;
  private final WriteLock writeLock;

  private volatile ExecutorService asyncIndexingExecutor;

  LuceneBackendResources(
      WorkerBuildContext context,
      DirectoryBasedIndexManager indexManager,
      Properties props,
      AbstractWorkspaceImpl workspace) {
    this.indexName = indexManager.getIndexName();
    this.errorHandler = context.getErrorHandler();
    this.workspace = workspace;
    this.maxQueueLength = PropertiesParseHelper.extractMaxQueueSize(indexName, props);
    ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
    readLock = readWriteLock.readLock();
    writeLock = readWriteLock.writeLock();
  }

  private LuceneBackendResources(LuceneBackendResources previous) {
    this.indexName = previous.indexName;
    this.errorHandler = previous.errorHandler;
    this.workspace = previous.workspace;
    this.maxQueueLength = previous.maxQueueLength;
    this.asyncIndexingExecutor = previous.asyncIndexingExecutor;
    this.readLock = previous.readLock;
    this.writeLock = previous.writeLock;
  }

  public ExecutorService getAsynchIndexingExecutor() {
    ExecutorService executor = asyncIndexingExecutor;
    if (executor != null) {
      return executor;
    } else {
      return getAsynchIndexingExecutorSynchronized();
    }
  }

  private synchronized ExecutorService getAsynchIndexingExecutorSynchronized() {
    ExecutorService executor = asyncIndexingExecutor;
    if (executor != null) {
      return executor;
    } else {
      this.asyncIndexingExecutor =
          Executors.newFixedThreadPool(
              1, "Index updates queue processor for index " + indexName, maxQueueLength);
      return this.asyncIndexingExecutor;
    }
  }

  public int getMaxQueueLength() {
    return maxQueueLength;
  }

  public String getIndexName() {
    return indexName;
  }

  public LuceneWorkVisitor getVisitor() {
    if (visitor == null) {
      visitor = new LuceneWorkVisitor(workspace);
    }
    return visitor;
  }

  public AbstractWorkspaceImpl getWorkspace() {
    return workspace;
  }

  public void shutdown() {
    // need to close them in this specific order:
    try {
      flushCloseExecutor();
    } finally {
      workspace.shutDownNow();
    }
  }

  private void flushCloseExecutor() {
    if (asyncIndexingExecutor == null) {
      return;
    }
    asyncIndexingExecutor.shutdown();
    try {
      asyncIndexingExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
      log.interruptedWhileWaitingForIndexActivity(e);
      Thread.currentThread().interrupt();
    }
    if (!asyncIndexingExecutor.isTerminated()) {
      log.unableToShutdownAsynchronousIndexingByTimeout(indexName);
    }
  }

  public ErrorHandler getErrorHandler() {
    return errorHandler;
  }

  public Lock getParallelModificationLock() {
    return readLock;
  }

  public Lock getExclusiveModificationLock() {
    return writeLock;
  }

  /**
   * Creates a replacement for this same LuceneBackendResources: reuses the existing locks and
   * executors (which can't be reconfigured on the fly), reuses the same Workspace and ErrorHandler,
   * but will use a new LuceneWorkVisitor. The LuceneWorkVisitor contains the strategies we use to
   * apply update operations on the index, and we might need to change them after the backend is
   * started.
   *
   * @return the new LuceneBackendResources to replace this one.
   */
  public LuceneBackendResources onTheFlyRebuild() {
    return new LuceneBackendResources(this);
  }
}
Ejemplo n.º 20
0
/**
 * This factory is responsible for creating and initializing build-in and custom
 * <i>FieldBridges</i>.
 *
 * @author Emmanuel Bernard
 * @author John Griffin
 */
public final class BridgeFactory {
  private static final Log LOG = LoggerFactory.make();
  private Set<BridgeProvider> annotationBasedProviders;
  private Set<BridgeProvider> regularProviders;

  public BridgeFactory(ServiceManager serviceManager) {
    annotationBasedProviders = new HashSet<BridgeProvider>(5);
    annotationBasedProviders.add(new CalendarBridgeProvider());
    annotationBasedProviders.add(new DateBridgeProvider());
    annotationBasedProviders.add(new NumericBridgeProvider());
    annotationBasedProviders.add(new SpatialBridgeProvider());
    annotationBasedProviders.add(new TikaBridgeProvider());

    ClassLoaderService classLoaderService = serviceManager.requestService(ClassLoaderService.class);
    try {
      regularProviders = classLoaderService.loadJavaServices(BridgeProvider.class);
      regularProviders.add(new EnumBridgeProvider());
      regularProviders.add(new BasicJDKTypesBridgeProvider(serviceManager));
    } finally {
      serviceManager.releaseService(ClassLoaderService.class);
    }
  }

  /**
   * This extracts and instantiates the implementation class from a {@code ClassBridge} annotation.
   *
   * @param cb the class bridge annotation
   * @param clazz the {@code Class} on which the annotation is defined on
   * @return Returns the specified {@code FieldBridge} instance
   */
  public FieldBridge extractType(ClassBridge cb, Class<?> clazz) {
    FieldBridge bridge = null;
    Class<?> bridgeType = null;

    if (cb != null) {
      bridgeType = cb.impl();
      if (bridgeType != null) {
        try {
          Object instance = bridgeType.newInstance();
          if (FieldBridge.class.isAssignableFrom(bridgeType)) {
            bridge = (FieldBridge) instance;
          } else if (org.hibernate.search.bridge.TwoWayStringBridge.class.isAssignableFrom(
              bridgeType)) {
            bridge =
                new TwoWayString2FieldBridgeAdaptor(
                    (org.hibernate.search.bridge.TwoWayStringBridge) instance);
          } else if (org.hibernate.search.bridge.StringBridge.class.isAssignableFrom(bridgeType)) {
            bridge =
                new String2FieldBridgeAdaptor((org.hibernate.search.bridge.StringBridge) instance);
          } else {
            throw LOG.noFieldBridgeInterfaceImplementedByClassBridge(bridgeType.getName());
          }
        } catch (Exception e) {
          throw LOG.cannotInstantiateClassBridgeOfType(bridgeType.getName(), clazz.getName(), e);
        }
      }
    }
    if (bridge == null) {
      throw LOG.unableToDetermineClassBridge(ClassBridge.class.getName());
    }

    populateReturnType(clazz, bridgeType, bridge);

    return bridge;
  }

  /**
   * Injects any parameters configured via the given {@code ClassBridge} annotation into the given
   * object, in case this is a {@link ParameterizedBridge}.
   *
   * @param classBridgeConfiguration the parameter source
   * @param classBridge the object to inject the parameters into
   */
  public void injectParameters(ClassBridge classBridgeConfiguration, Object classBridge) {
    if (classBridgeConfiguration.params().length > 0
        && ParameterizedBridge.class.isAssignableFrom(classBridge.getClass())) {
      Map<String, String> params =
          new HashMap<String, String>(classBridgeConfiguration.params().length);
      for (Parameter param : classBridgeConfiguration.params()) {
        params.put(param.name(), param.value());
      }
      ((ParameterizedBridge) classBridge).setParameterValues(params);
    }
  }

  /**
   * This instantiates the SpatialFieldBridge from a {@code Spatial} annotation.
   *
   * @param spatial the {@code Spatial} annotation
   * @param clazz the {@code XClass} on which the annotation is defined on
   * @return Returns the {@code SpatialFieldBridge} instance
   * @param latitudeField a {@link java.lang.String} object.
   * @param longitudeField a {@link java.lang.String} object.
   */
  public FieldBridge buildSpatialBridge(
      Spatial spatial, XClass clazz, String latitudeField, String longitudeField) {
    FieldBridge bridge;
    try {
      bridge = SpatialBridgeProvider.buildSpatialBridge(spatial, latitudeField, longitudeField);
    } catch (Exception e) {
      throw LOG.unableToInstantiateSpatial(clazz.getName(), e);
    }
    if (bridge == null) {
      throw LOG.unableToInstantiateSpatial(clazz.getName(), null);
    }

    return bridge;
  }

  public FieldBridge guessType(
      Field field,
      NumericField numericField,
      XMember member,
      ReflectionManager reflectionManager,
      ServiceManager serviceManager) {
    FieldBridge bridge = findExplicitFieldBridge(field, member, reflectionManager);
    if (bridge != null) {
      return bridge;
    }

    ExtendedBridgeProvider.ExtendedBridgeProviderContext context =
        new XMemberBridgeProviderContext(member, numericField, reflectionManager, serviceManager);
    ContainerType containerType = getContainerType(member, reflectionManager);

    // We do annotation based providers as Tike at least needs priority over
    // default providers because it might override the type for String
    // TODO: introduce the notion of bridge contributor annotations to cope with this in the future
    for (BridgeProvider provider : annotationBasedProviders) {
      bridge = getFieldBridgeFromBridgeProvider(provider, context, containerType);
      if (bridge != null) {
        return bridge;
      }
    }

    // walk through all regular bridges and if multiple match
    // raise an exception containing the conflicting bridges
    StringBuilder multipleMatchError = null;
    BridgeProvider initialMatchingBridgeProvider = null;
    for (BridgeProvider provider : regularProviders) {
      FieldBridge createdBridge =
          getFieldBridgeFromBridgeProvider(provider, context, containerType);
      if (createdBridge != null) {
        // oops we found a duplicate
        if (bridge != null) {
          // first duplicate, add the initial bridge
          if (multipleMatchError == null) {
            multipleMatchError =
                new StringBuilder("\n")
                    .append("FieldBridge: ")
                    .append(bridge)
                    .append(" - BridgeProvider: ")
                    .append(initialMatchingBridgeProvider.getClass());
          }
          multipleMatchError
              .append("\n")
              .append("FieldBridge: ")
              .append(createdBridge)
              .append(" - BridgeProvider: ")
              .append(provider.getClass());
        } else {
          bridge = createdBridge;
          initialMatchingBridgeProvider = provider;
        }
      }
    }
    if (multipleMatchError != null) {
      throw LOG.multipleMatchingFieldBridges(
          member, member.getType(), multipleMatchError.toString());
    }
    if (bridge != null) {
      return bridge;
    }

    throw LOG.unableToGuessFieldBridge(member.getType().getName(), member.getName());
  }

  private ContainerType getContainerType(XMember member, ReflectionManager reflectionManager) {
    if (!member.isAnnotationPresent(IndexedEmbedded.class)) {
      return ContainerType.SINGLE;
    }
    if (member.isArray()) {
      return ContainerType.ARRAY;
    }
    Class<?> typeClass = reflectionManager.toClass(member.getType());
    if (Iterable.class.isAssignableFrom(typeClass)) {
      return ContainerType.ITERABLE;
    }
    if (member.isCollection() && Map.class.equals(member.getCollectionClass())) {
      return ContainerType.MAP;
    }
    // marked @IndexedEmbedded but not a container
    // => probably a @Field @IndexedEmbedded Foo foo;
    return ContainerType.SINGLE;
  }

  private FieldBridge getFieldBridgeFromBridgeProvider(
      BridgeProvider bridgeProvider,
      ExtendedBridgeProvider.ExtendedBridgeProviderContext context,
      ContainerType containerType) {
    FieldBridge bridge = bridgeProvider.provideFieldBridge(context);
    if (bridge == null) {
      return null;
    }
    populateReturnType(context.getReturnType(), bridge.getClass(), bridge);
    switch (containerType) {
      case SINGLE:
        return bridge;
      case ITERABLE:
        // Should we cache these per bridge instance?
        // would make sense at least for the known built-in bridges
        // but is that worth it?
        return new BuiltinIterableBridge(bridge);
      case ARRAY:
        return new BuiltinArrayBridge(bridge);
      case MAP:
        return new BuiltinMapBridge(bridge);
      default:
        throw new AssertionFailure("Unknown ContainerType " + containerType);
    }
  }

  /**
   * Extract the field bridge from @Field.bridge or @FieldBridge. Return null if none is present.
   */
  private FieldBridge findExplicitFieldBridge(
      Field field, XMember member, ReflectionManager reflectionManager) {
    // TODO Should explicit FieldBridge also support the notion of container like numeric fields and
    // provider based fields?
    //     the main problem is that support for a bridge accepting a Map would break
    FieldBridge bridge = null;
    org.hibernate.search.annotations.FieldBridge bridgeAnn;
    // @Field bridge has priority over @FieldBridge
    if (field != null && void.class != field.bridge().impl()) {
      bridgeAnn = field.bridge();
    } else {
      bridgeAnn = member.getAnnotation(org.hibernate.search.annotations.FieldBridge.class);
    }
    if (bridgeAnn != null) {
      bridge = doExtractType(bridgeAnn, member, reflectionManager);
    }
    return bridge;
  }

  private FieldBridge doExtractType(
      org.hibernate.search.annotations.FieldBridge bridgeAnn,
      XMember member,
      ReflectionManager reflectionManager) {
    return doExtractType(bridgeAnn, member.getName(), reflectionManager.toClass(member.getType()));
  }

  private FieldBridge doExtractType(
      org.hibernate.search.annotations.FieldBridge bridgeAnn,
      String appliedOnName,
      Class<?> appliedOnType) {
    assert bridgeAnn != null : "@FieldBridge instance cannot be null";
    FieldBridge bridge;
    Class<?> impl = bridgeAnn.impl();
    if (impl == void.class) {
      throw LOG.noImplementationClassInFieldBridge(appliedOnName);
    }
    try {
      Object instance = impl.newInstance();
      if (FieldBridge.class.isAssignableFrom(impl)) {
        bridge = (FieldBridge) instance;
      } else if (TwoWayStringBridge.class.isAssignableFrom(impl)) {
        bridge = new TwoWayString2FieldBridgeAdaptor((TwoWayStringBridge) instance);
      } else if (org.hibernate.search.bridge.StringBridge.class.isAssignableFrom(impl)) {
        bridge = new String2FieldBridgeAdaptor((org.hibernate.search.bridge.StringBridge) instance);
      } else {
        throw LOG.noFieldBridgeInterfaceImplementedByFieldBridge(impl.getName(), appliedOnName);
      }
      if (bridgeAnn.params().length > 0 && ParameterizedBridge.class.isAssignableFrom(impl)) {
        Map<String, String> params = new HashMap<String, String>(bridgeAnn.params().length);
        for (Parameter param : bridgeAnn.params()) {
          params.put(param.name(), param.value());
        }
        ((ParameterizedBridge) instance).setParameterValues(params);
      }
      populateReturnType(appliedOnType, impl, instance);
    } catch (Exception e) {
      throw LOG.unableToInstantiateFieldBridge(appliedOnName, appliedOnType.getName(), e);
    }
    return bridge;
  }

  private void populateReturnType(
      Class<?> appliedOnType, Class<?> bridgeType, Object bridgeInstance) {
    if (AppliedOnTypeAwareBridge.class.isAssignableFrom(bridgeType)) {
      ((AppliedOnTypeAwareBridge) bridgeInstance).setAppliedOnType(appliedOnType);
    }
  }

  /**
   * Takes in a fieldBridge and will return you a TwoWayFieldBridge instance.
   *
   * @param fieldBridge the field bridge annotation
   * @param appliedOnType the type the bridge is applied on
   * @param reflectionManager The reflection manager instance
   * @return a TwoWayFieldBridge instance if the Field Bridge is an instance of a TwoWayFieldBridge.
   * @throws org.hibernate.search.exception.SearchException if the FieldBridge passed in is not an
   *     instance of a TwoWayFieldBridge.
   */
  public TwoWayFieldBridge extractTwoWayType(
      org.hibernate.search.annotations.FieldBridge fieldBridge,
      XClass appliedOnType,
      ReflectionManager reflectionManager) {
    FieldBridge fb = extractType(fieldBridge, appliedOnType, reflectionManager);
    if (fb instanceof TwoWayFieldBridge) {
      return (TwoWayFieldBridge) fb;
    } else {
      throw LOG.fieldBridgeNotAnInstanceof(TwoWayFieldBridge.class.getSimpleName());
    }
  }

  /**
   * This extracts and instantiates the implementation class from a ClassBridge annotation.
   *
   * @param fieldBridgeAnnotation the FieldBridge annotation
   * @param appliedOnType the type the bridge is applied on
   * @param reflectionManager The reflection manager instance
   * @return FieldBridge
   */
  private FieldBridge extractType(
      org.hibernate.search.annotations.FieldBridge fieldBridgeAnnotation,
      XClass appliedOnType,
      ReflectionManager reflectionManager) {
    FieldBridge bridge = null;

    if (fieldBridgeAnnotation != null) {
      bridge =
          doExtractType(
              fieldBridgeAnnotation,
              appliedOnType.getName(),
              reflectionManager.toClass(appliedOnType));
    }

    if (bridge == null) {
      throw LOG.unableToDetermineClassBridge(appliedOnType.getName());
    }

    return bridge;
  }

  private static enum ContainerType {
    SINGLE,
    ARRAY,
    ITERABLE,
    MAP,
  }
}
/** @author Emmanuel Bernard */
public class PersistenceContextObjectInitializer implements ObjectInitializer {
  private static final Log log = LoggerFactory.make();
  private final ObjectInitializer delegate;

  public PersistenceContextObjectInitializer(ObjectInitializer delegate) {
    this.delegate = delegate;
  }

  @Override
  public void initializeObjects(
      List<EntityInfo> entityInfos,
      LinkedHashMap<EntityInfoLoadKey, Object> idToObjectMap,
      ObjectInitializationContext objectInitializationContext) {
    // Do not call isTimeOut here as the caller might be the last biggie on the list.
    final int numberOfObjectsToInitialize = entityInfos.size();

    if (numberOfObjectsToInitialize == 0) {
      if (log.isTraceEnabled()) {
        log.tracef("No object to initialize");
      }
      return;
    }

    SessionImplementor sessionImplementor =
        (SessionImplementor) objectInitializationContext.getSession();
    String entityName =
        objectInitializationContext
            .getSession()
            .getSessionFactory()
            .getClassMetadata(objectInitializationContext.getEntityType())
            .getEntityName();
    EntityPersister persister = sessionImplementor.getFactory().getEntityPersister(entityName);
    PersistenceContext persistenceContext = sessionImplementor.getPersistenceContext();

    // check the persistence context
    List<EntityInfo> remainingEntityInfos = new ArrayList<>(numberOfObjectsToInitialize);
    for (EntityInfo entityInfo : entityInfos) {
      if (ObjectLoaderHelper.areDocIdAndEntityIdIdentical(
          entityInfo, objectInitializationContext.getSession())) {
        EntityKey entityKey = sessionImplementor.generateEntityKey(entityInfo.getId(), persister);
        Object o = persistenceContext.getEntity(entityKey);
        if (o == null) {
          remainingEntityInfos.add(entityInfo);
        } else {
          EntityInfoLoadKey key = new EntityInfoLoadKey(entityInfo.getClazz(), entityInfo.getId());
          idToObjectMap.put(key, o);
        }
      } else {
        // if document id !=  entity id we can't use PC lookup
        remainingEntityInfos.add(entityInfo);
      }
    }

    // update entityInfos to only contains the remaining ones
    final int remainingSize = remainingEntityInfos.size();
    if (log.isTraceEnabled()) {
      log.tracef(
          "Initialized %d objects out of %d in the persistence context",
          (Integer) (numberOfObjectsToInitialize - remainingSize),
          (Integer) numberOfObjectsToInitialize);
    }

    if (remainingSize > 0) {
      delegate.initializeObjects(remainingEntityInfos, idToObjectMap, objectInitializationContext);
    }
  }
}
/**
 * This implementation is never directly exposed to the user, it is always wrapped into a {@link
 * org.hibernate.search.engine.impl.MutableSearchFactory}
 *
 * @author Emmanuel Bernard
 */
public class ImmutableSearchFactory
    implements ExtendedSearchIntegratorWithShareableState, WorkerBuildContext {

  static {
    Version.touch();
  }

  private static final Log log = LoggerFactory.make();

  private final Map<Class<?>, EntityIndexBinding> indexBindingForEntities;
  private final Map<Class<?>, DocumentBuilderContainedEntity> documentBuildersContainedEntities;
  /** Lazily populated map of type descriptors */
  private final ConcurrentHashMap<Class, IndexedTypeDescriptor> indexedTypeDescriptors;

  private final Worker worker;
  private final Map<String, FilterDef> filterDefinitions;
  private final FilterCachingStrategy filterCachingStrategy;
  private final Map<String, Analyzer> analyzers;
  private final AtomicBoolean stopped = new AtomicBoolean(false);
  private final int cacheBitResultsSize;
  private final Properties configurationProperties;
  private final PolymorphicIndexHierarchy indexHierarchy;
  private final StatisticsImpl statistics;
  private final boolean transactionManagerExpected;
  private final IndexManagerHolder allIndexesManager;
  private final ErrorHandler errorHandler;
  private final IndexingMode indexingMode;
  private final ServiceManager serviceManager;
  private final boolean enableDirtyChecks;
  private final DefaultIndexReaderAccessor indexReaderAccessor;
  private final InstanceInitializer instanceInitializer;
  private final TimeoutExceptionFactory timeoutExceptionFactory;
  private final TimingSource timingSource;
  private final SearchMapping mapping;
  private final boolean indexMetadataIsComplete;
  private final boolean isDeleteByTermEnforced;
  private final boolean isIdProvidedImplicit;
  private final String statisticsMBeanName;
  private final IndexManagerFactory indexManagerFactory;
  private final ObjectLookupMethod defaultObjectLookupMethod;
  private final DatabaseRetrievalMethod defaultDatabaseRetrievalMethod;
  private final boolean enlistWorkerInTransaction;
  private final boolean indexUninvertingAllowed;

  public ImmutableSearchFactory(SearchFactoryState state) {
    this.analyzers = state.getAnalyzers();
    this.cacheBitResultsSize = state.getCacheBitResultsSize();
    this.configurationProperties = state.getConfigurationProperties();
    this.indexBindingForEntities = state.getIndexBindings();
    this.documentBuildersContainedEntities = state.getDocumentBuildersContainedEntities();
    this.filterCachingStrategy = state.getFilterCachingStrategy();
    this.filterDefinitions = state.getFilterDefinitions();
    this.indexHierarchy = state.getIndexHierarchy();
    this.indexingMode = state.getIndexingMode();
    this.worker = state.getWorker();
    this.serviceManager = state.getServiceManager();
    this.transactionManagerExpected = state.isTransactionManagerExpected();
    this.allIndexesManager = state.getAllIndexesManager();
    this.errorHandler = state.getErrorHandler();
    this.instanceInitializer = state.getInstanceInitializer();
    this.timeoutExceptionFactory = state.getDefaultTimeoutExceptionFactory();
    this.timingSource = state.getTimingSource();
    this.mapping = state.getProgrammaticMapping();
    if (state.getStatistics() == null) {
      this.statistics = new StatisticsImpl(this);
      boolean statsEnabled =
          ConfigurationParseHelper.getBooleanValue(
              configurationProperties, Environment.GENERATE_STATS, false);
      this.statistics.setStatisticsEnabled(statsEnabled);
    } else {
      this.statistics = (StatisticsImpl) state.getStatistics();
    }
    this.indexMetadataIsComplete = state.isIndexMetadataComplete();
    this.isDeleteByTermEnforced = state.isDeleteByTermEnforced();
    this.isIdProvidedImplicit = state.isIdProvidedImplicit();
    this.indexManagerFactory = state.getIndexManagerFactory();

    this.enableDirtyChecks =
        ConfigurationParseHelper.getBooleanValue(
            configurationProperties, Environment.ENABLE_DIRTY_CHECK, true);

    if (isJMXEnabled()) {
      this.statisticsMBeanName = registerMBeans();
    } else {
      this.statisticsMBeanName = null;
    }

    this.indexReaderAccessor = new DefaultIndexReaderAccessor(this);
    this.indexedTypeDescriptors = new ConcurrentHashMap<>();

    this.defaultObjectLookupMethod = determineDefaultObjectLookupMethod();
    this.defaultDatabaseRetrievalMethod = determineDefaultDatabaseRetrievalMethod();
    this.enlistWorkerInTransaction =
        ConfigurationParseHelper.getBooleanValue(
            configurationProperties, Environment.WORKER_ENLIST_IN_TRANSACTION, false);

    this.indexUninvertingAllowed =
        ConfigurationParseHelper.getBooleanValue(
            configurationProperties, Environment.INDEX_UNINVERTING_ALLOWED, true);
  }

  private ObjectLookupMethod determineDefaultObjectLookupMethod() {
    String objectLookupMethod =
        configurationProperties.getProperty(Environment.OBJECT_LOOKUP_METHOD);
    if (objectLookupMethod == null) {
      return ObjectLookupMethod.SKIP; // default
    } else {
      try {
        return Enum.valueOf(ObjectLookupMethod.class, objectLookupMethod.toUpperCase(Locale.ROOT));
      } catch (IllegalArgumentException e) {
        throw log.invalidPropertyValue(objectLookupMethod, Environment.OBJECT_LOOKUP_METHOD);
      }
    }
  }

  private DatabaseRetrievalMethod determineDefaultDatabaseRetrievalMethod() {
    String databaseRetrievalMethod =
        configurationProperties.getProperty(Environment.DATABASE_RETRIEVAL_METHOD);
    if (databaseRetrievalMethod == null) {
      return DatabaseRetrievalMethod.QUERY; // default
    } else {
      try {
        return Enum.valueOf(
            DatabaseRetrievalMethod.class, databaseRetrievalMethod.toUpperCase(Locale.ROOT));
      } catch (IllegalArgumentException e) {
        throw log.invalidPropertyValue(databaseRetrievalMethod, Environment.OBJECT_LOOKUP_METHOD);
      }
    }
  }

  @Override
  public Map<String, FilterDef> getFilterDefinitions() {
    return filterDefinitions;
  }

  @Override
  @Deprecated
  public String getIndexingStrategy() {
    return indexingMode.toExternalRepresentation();
  }

  @Override
  public IndexingMode getIndexingMode() {
    return indexingMode;
  }

  @Override
  public void close() {
    if (stopped.compareAndSet(false, true)) { // make sure we only stop once
      try {
        worker.close();
      } catch (Exception e) {
        log.workerException(e);
      }

      this.allIndexesManager.stop();
      this.timingSource.stop();

      serviceManager.releaseAllServices();

      for (Analyzer an : this.analyzers.values()) {
        an.close();
      }
      for (AbstractDocumentBuilder documentBuilder :
          this.documentBuildersContainedEntities.values()) {
        documentBuilder.close();
      }
      for (EntityIndexBinding entityIndexBinding : this.indexBindingForEntities.values()) {
        entityIndexBinding.getDocumentBuilder().close();
      }

      // unregister statistic mbean
      if (statisticsMBeanName != null) {
        JMXRegistrar.unRegisterMBean(statisticsMBeanName);
      }
    }
  }

  @Override
  public HSQuery createHSQuery() {
    return new LuceneHSQuery(this);
  }

  @Override
  public Map<Class<?>, DocumentBuilderContainedEntity> getDocumentBuildersContainedEntities() {
    return documentBuildersContainedEntities;
  }

  @Override
  public Map<Class<?>, EntityIndexBinding> getIndexBindings() {
    return indexBindingForEntities;
  }

  @Override
  public EntityIndexBinding getIndexBinding(Class<?> entityType) {
    return indexBindingForEntities.get(entityType);
  }

  @SuppressWarnings("unchecked")
  @Override
  public DocumentBuilderContainedEntity getDocumentBuilderContainedEntity(Class entityType) {
    return documentBuildersContainedEntities.get(entityType);
  }

  @Override
  public void addClasses(Class<?>... classes) {
    throw new AssertionFailure(
        "Cannot add classes to an " + ImmutableSearchFactory.class.getName());
  }

  @Override
  public Worker getWorker() {
    return worker;
  }

  @Override
  public void optimize() {
    for (IndexManager im : this.allIndexesManager.getIndexManagers()) {
      im.optimize();
    }
  }

  @Override
  public void optimize(Class entityType) {
    EntityIndexBinding entityIndexBinding = getSafeIndexBindingForEntity(entityType);
    for (IndexManager im : entityIndexBinding.getIndexManagers()) {
      im.optimize();
    }
  }

  @Override
  public Analyzer getAnalyzer(String name) {
    final Analyzer analyzer = analyzers.get(name);
    if (analyzer == null) {
      throw new SearchException("Unknown Analyzer definition: " + name);
    }
    return analyzer;
  }

  @Override
  public Analyzer getAnalyzer(Class<?> clazz) {
    EntityIndexBinding entityIndexBinding = getSafeIndexBindingForEntity(clazz);
    DocumentBuilderIndexedEntity builder = entityIndexBinding.getDocumentBuilder();
    return builder.getAnalyzer();
  }

  @Override
  public QueryContextBuilder buildQueryBuilder() {
    return new ConnectedQueryContextBuilder(this);
  }

  @Override
  public Statistics getStatistics() {
    return statistics;
  }

  @Override
  public StatisticsImplementor getStatisticsImplementor() {
    return statistics;
  }

  @Override
  public FilterCachingStrategy getFilterCachingStrategy() {
    return filterCachingStrategy;
  }

  @Override
  public Map<String, Analyzer> getAnalyzers() {
    return analyzers;
  }

  @Override
  public int getCacheBitResultsSize() {
    return cacheBitResultsSize;
  }

  @Override
  public Properties getConfigurationProperties() {
    return configurationProperties;
  }

  @Override
  public FilterDef getFilterDefinition(String name) {
    return filterDefinitions.get(name);
  }

  @Override
  public int getFilterCacheBitResultsSize() {
    return cacheBitResultsSize;
  }

  @Override
  public Set<Class<?>> getIndexedTypesPolymorphic(Class<?>[] classes) {
    return indexHierarchy.getIndexedClasses(classes);
  }

  @Override
  public BatchBackend makeBatchBackend(MassIndexerProgressMonitor progressMonitor) {
    return new DefaultBatchBackend(this, progressMonitor);
  }

  @Override
  public PolymorphicIndexHierarchy getIndexHierarchy() {
    return indexHierarchy;
  }

  @Override
  public ServiceManager getServiceManager() {
    return serviceManager;
  }

  @Override
  public DatabaseRetrievalMethod getDefaultDatabaseRetrievalMethod() {
    return defaultDatabaseRetrievalMethod;
  }

  @Override
  public ObjectLookupMethod getDefaultObjectLookupMethod() {
    return defaultObjectLookupMethod;
  }

  @Override
  public ExtendedSearchIntegrator getUninitializedSearchIntegrator() {
    return this;
  }

  @Override
  public boolean isJMXEnabled() {
    String enableJMX = getConfigurationProperties().getProperty(Environment.JMX_ENABLED);
    return "true".equalsIgnoreCase(enableJMX);
  }

  private String registerMBeans() {
    String mbeanNameSuffix = getConfigurationProperties().getProperty(Environment.JMX_BEAN_SUFFIX);
    String objectName =
        JMXRegistrar.buildMBeanName(
            StatisticsInfoMBean.STATISTICS_MBEAN_OBJECT_NAME, mbeanNameSuffix);

    // since the Searchintegrator is mutable we might have an already existing MBean which we have
    // to unregister first
    if (JMXRegistrar.isNameRegistered(objectName)) {
      JMXRegistrar.unRegisterMBean(objectName);
    }
    JMXRegistrar.StatisticsInfo statisticsInfo = new JMXRegistrar.StatisticsInfo(statistics);
    JMXRegistrar.registerMBean(statisticsInfo, StatisticsInfoMBean.class, objectName);
    return objectName;
  }

  @Override
  public boolean isDirtyChecksEnabled() {
    return enableDirtyChecks;
  }

  @Override
  public boolean isStopped() {
    return stopped.get();
  }

  @Override
  public boolean isTransactionManagerExpected() {
    return this.transactionManagerExpected;
  }

  @Override
  public IndexManagerHolder getAllIndexesManager() {
    return getIndexManagerHolder();
  }

  @Override
  public IndexManagerHolder getIndexManagerHolder() {
    return this.allIndexesManager;
  }

  public EntityIndexBinding getSafeIndexBindingForEntity(Class<?> entityType) {
    if (entityType == null) {
      throw log.nullIsInvalidIndexedType();
    }
    EntityIndexBinding entityIndexBinding = getIndexBinding(entityType);
    if (entityIndexBinding == null) {
      throw log.notAnIndexedType(entityType.getName());
    }
    return entityIndexBinding;
  }

  @Override
  public ErrorHandler getErrorHandler() {
    return this.errorHandler;
  }

  @Override
  public IndexReaderAccessor getIndexReaderAccessor() {
    return indexReaderAccessor;
  }

  @Override
  public IndexedTypeDescriptor getIndexedTypeDescriptor(Class<?> entityType) {
    IndexedTypeDescriptor typeDescriptor;
    if (indexedTypeDescriptors.containsKey(entityType)) {
      typeDescriptor = indexedTypeDescriptors.get(entityType);
    } else {
      EntityIndexBinding indexBinder = indexBindingForEntities.get(entityType);
      IndexedTypeDescriptor indexedTypeDescriptor;
      if (indexBinder == null) {
        indexedTypeDescriptor = new IndexedTypeDescriptorForUnindexedType(entityType);
      } else {
        indexedTypeDescriptor =
            new IndexedTypeDescriptorImpl(
                indexBinder.getDocumentBuilder().getMetadata(), indexBinder.getIndexManagers());
      }
      indexedTypeDescriptors.put(entityType, indexedTypeDescriptor);
      typeDescriptor = indexedTypeDescriptor;
    }
    return typeDescriptor;
  }

  @Override
  public Set<Class<?>> getIndexedTypes() {
    return indexBindingForEntities.keySet();
  }

  @Override
  public InstanceInitializer getInstanceInitializer() {
    return instanceInitializer;
  }

  @Override
  public TimeoutExceptionFactory getDefaultTimeoutExceptionFactory() {
    return timeoutExceptionFactory;
  }

  @Override
  public TimingSource getTimingSource() {
    return this.timingSource;
  }

  @Override
  public SearchMapping getProgrammaticMapping() {
    return mapping;
  }

  @Override
  public boolean isIndexMetadataComplete() {
    return this.indexMetadataIsComplete;
  }

  @Override
  public boolean isDeleteByTermEnforced() {
    return this.isDeleteByTermEnforced;
  }

  @Override
  public boolean isIdProvidedImplicit() {
    return isIdProvidedImplicit;
  }

  @Override
  public IndexManagerFactory getIndexManagerFactory() {
    return indexManagerFactory;
  }

  @Override
  public boolean enlistWorkerInTransaction() {
    return enlistWorkerInTransaction;
  }

  @Override
  public IndexManager getIndexManager(String indexName) {
    return getIndexManagerHolder().getIndexManager(indexName);
  }

  @Override
  public boolean isIndexUninvertingAllowed() {
    return indexUninvertingAllowed;
  }

  @SuppressWarnings("unchecked")
  @Override
  public <T> T unwrap(Class<T> cls) {
    if (SearchIntegrator.class.isAssignableFrom(cls)
        || ExtendedSearchIntegrator.class.isAssignableFrom(cls)
        || SearchFactoryState.class.isAssignableFrom(cls)) {
      return (T) this;
    } else {
      throw new SearchException("Can not unwrap an ImmutableSearchFactory into a '" + cls + "'");
    }
  }
}
Ejemplo n.º 23
0
/**
 * Apply the operations to Lucene directories.
 *
 * @author Emmanuel Bernard
 * @author Hardy Ferentschik
 * @author John Griffin
 * @author Sanne Grinovero
 */
final class LuceneBackendQueueTask implements Runnable {

  private static final Log log = LoggerFactory.make();

  private final Lock modificationLock;
  private final LuceneBackendResources resources;
  private final List<LuceneWork> workList;
  private final IndexingMonitor monitor;

  LuceneBackendQueueTask(
      List<LuceneWork> workList, LuceneBackendResources resources, IndexingMonitor monitor) {
    this.workList = workList;
    this.resources = resources;
    this.monitor = monitor;
    this.modificationLock = resources.getParallelModificationLock();
  }

  @Override
  public void run() {
    modificationLock.lock();
    try {
      applyUpdates();
    } catch (InterruptedException e) {
      log.interruptedWhileWaitingForIndexActivity(e);
      Thread.currentThread().interrupt();
      handleException(e);
    } catch (Exception e) {
      log.backendError(e);
      handleException(e);
    } finally {
      modificationLock.unlock();
    }
  }

  private void handleException(Exception e) {
    ErrorContextBuilder builder = new ErrorContextBuilder();
    builder.allWorkToBeDone(workList);
    builder.errorThatOccurred(e);
    resources.getErrorHandler().handle(builder.createErrorContext());
  }

  /**
   * Applies all modifications to the index in parallel using the workers executor
   *
   * @throws ExecutionException
   * @throws InterruptedException
   */
  private void applyUpdates() throws InterruptedException, ExecutionException {
    AbstractWorkspaceImpl workspace = resources.getWorkspace();

    ErrorContextBuilder errorContextBuilder = new ErrorContextBuilder();
    errorContextBuilder.allWorkToBeDone(workList);

    IndexWriter indexWriter = workspace.getIndexWriter(errorContextBuilder);
    if (indexWriter == null) {
      log.cannotOpenIndexWriterCausePreviousError();
      return;
    }

    boolean taskExecutionSuccessful = true;

    try {
      if (workList.size() == 1) {
        taskExecutionSuccessful = runSingleTask(workList.get(0), indexWriter, errorContextBuilder);
      } else {
        taskExecutionSuccessful = runMultipleTasks(indexWriter, errorContextBuilder);
      }
      if (!taskExecutionSuccessful) {
        resources.getErrorHandler().handle(errorContextBuilder.createErrorContext());
      } else {
        workspace.optimizerPhase();
      }
    } finally {
      workspace.afterTransactionApplied(!taskExecutionSuccessful, false);
    }
  }

  /**
   * Applies each modification in parallel using the backend workers pool
   *
   * @throws InterruptedException
   */
  private boolean runMultipleTasks(
      final IndexWriter indexWriter, final ErrorContextBuilder errorContextBuilder)
      throws InterruptedException {
    final int queueSize = workList.size();
    final ExecutorService executor = resources.getWorkersExecutor();
    final Future<LuceneWork>[] submittedTasks = new Future[queueSize];

    for (int i = 0; i < queueSize; i++) {
      LuceneWork luceneWork = workList.get(i);
      SingleTaskRunnable task = new SingleTaskRunnable(luceneWork, resources, indexWriter, monitor);
      submittedTasks[i] = executor.submit(task, luceneWork);
    }

    boolean allTasksSuccessful = true;

    // now wait for all tasks being completed before releasing our lock
    // (this thread waits even in async backend mode)
    for (int i = 0; i < queueSize; i++) {
      Future<LuceneWork> task = submittedTasks[i];
      try {
        LuceneWork work = task.get();
        errorContextBuilder.workCompleted(work);
      } catch (ExecutionException e) {
        errorContextBuilder.addWorkThatFailed(workList.get(i));
        errorContextBuilder.errorThatOccurred(e.getCause());
        allTasksSuccessful = false;
      }
    }

    return allTasksSuccessful;
  }

  /**
   * Applies a single modification using the caller's thread to avoid pointless context switching.
   */
  private boolean runSingleTask(
      final LuceneWork luceneWork,
      final IndexWriter indexWriter,
      final ErrorContextBuilder errorContextBuilder) {
    try {
      SingleTaskRunnable.performWork(luceneWork, resources, indexWriter, monitor);
      return true;
    } catch (RuntimeException re) {
      errorContextBuilder.errorThatOccurred(re);
      errorContextBuilder.addWorkThatFailed(luceneWork);
      return false;
    }
  }
}
Ejemplo n.º 24
0
// DO NOT AUTO INDENT THIS FILE.
// MY DSL IS BEAUTIFUL, DUMB INDENTATION IS SCREWING IT UP
public class DSLTest extends SearchTestBase {
  private static final Log log = LoggerFactory.make();

  private final Calendar calendar = Calendar.getInstance();

  private FullTextSession fullTextSession;
  private Date february;

  @Before
  public void setUp() throws Exception {
    super.setUp();
    Session session = openSession();
    fullTextSession = Search.getFullTextSession(session);
    indexTestData();
  }

  @After
  public void tearDown() throws Exception {
    super.tearDown();
  }

  @Test
  public void testUseOfFieldBridge() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    Query query = monthQb.keyword().onField("monthValue").matching(2).createQuery();
    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    transaction.commit();
  }

  @Test
  public void testUseOfCustomFieldBridgeInstance() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    ConnectedTermMatchingContext termMatchingContext =
        (ConnectedTermMatchingContext) monthQb.keyword().onField(MonthClassBridge.FIELD_NAME_1);

    Query query =
        termMatchingContext
            .withFieldBridge(new String2FieldBridgeAdaptor(new RomanNumberFieldBridge()))
            .matching(2)
            .createQuery();

    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());
    transaction.commit();
  }

  @Test
  public void testUseOfMultipleCustomFieldBridgeInstances() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // Rather complex code here as we're not exposing the #withFieldBridge methods on the public
    // interface
    final ConnectedTermMatchingContext field1Context =
        (ConnectedTermMatchingContext) monthQb.keyword().onField(MonthClassBridge.FIELD_NAME_1);

    final ConnectedTermMatchingContext field2Context =
        (ConnectedTermMatchingContext)
            field1Context
                .withFieldBridge(new String2FieldBridgeAdaptor(new RomanNumberFieldBridge()))
                .andField(MonthClassBridge.FIELD_NAME_2);

    Query query =
        field2Context
            .withFieldBridge(new String2FieldBridgeAdaptor(new RomanNumberFieldBridge()))
            .matching(2)
            .createQuery();

    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());
    transaction.commit();
  }

  @Test
  public void testTermQueryOnAnalyzer() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // regular term query
    Query query = monthQb.keyword().onField("mythology").matching("cold").createQuery();

    assertEquals(0, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    // term query based on several words
    query = monthQb.keyword().onField("mythology").matching("colder darker").createQuery();

    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    // term query applying the analyzer and generating one term per word
    query = monthQb.keyword().onField("mythology_stem").matching("snowboard").createQuery();

    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    // term query applying the analyzer and generating several terms per word
    query = monthQb.keyword().onField("mythology_ngram").matching("snobored").createQuery();

    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    // term query not using analyzers
    query = monthQb.keyword().onField("mythology").ignoreAnalyzer().matching("Month").createQuery();

    assertEquals(0, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    transaction.commit();
  }

  @Test
  public void testFuzzyAndWildcardQuery() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // fuzzy search with custom threshold and prefix
    Query query =
        monthQb
            .keyword()
            .fuzzy()
            .withThreshold(.8f)
            .withPrefixLength(1)
            .onField("mythology")
            .matching("calder")
            .createQuery();

    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    // fuzzy search on multiple fields
    query =
        monthQb
            .keyword()
            .fuzzy()
            .withThreshold(.8f)
            .withPrefixLength(1)
            .onFields("mythology", "history")
            .matching("showboarding")
            .createQuery();

    assertEquals(2, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    // wildcard query
    query = monthQb.keyword().wildcard().onField("mythology").matching("mon*").createQuery();

    assertEquals(3, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    transaction.commit();
  }

  @Test
  @SuppressWarnings("unchecked")
  public void testQueryCustomization() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // combined query, January and february both contain whitening but February in a longer text
    Query query =
        monthQb
            .bool()
            .should(monthQb.keyword().onField("mythology").matching("whitening").createQuery())
            .should(monthQb.keyword().onField("history").matching("whitening").createQuery())
            .createQuery();

    List<Month> results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("January", results.get(0).getName());

    // boosted query, January and february both contain whitening but February in a longer text
    // since history is boosted, February should come first though
    query =
        monthQb
            .bool()
            .should(monthQb.keyword().onField("mythology").matching("whitening").createQuery())
            .should(
                monthQb
                    .keyword()
                    .onField("history")
                    .boostedTo(30)
                    .matching("whitening")
                    .createQuery())
            .createQuery();

    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("February", results.get(0).getName());

    // FIXME add other method tests besides boostedTo

    transaction.commit();
  }

  @Test
  @SuppressWarnings("unchecked")
  public void testMultipleFields() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // combined query, January and february both contain whitening but February in a longer text
    Query query =
        monthQb
            .keyword()
            .onField("mythology")
            .andField("history")
            .matching("whitening")
            .createQuery();

    List<Month> results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("January", results.get(0).getName());

    // combined query, January and february both contain whitening but February in a longer text
    query =
        monthQb
            .keyword()
            .onFields("mythology", "history")
            .boostedTo(30)
            .matching("whitening")
            .createQuery();

    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("January", results.get(0).getName());

    // boosted query, January and february both contain whitening but February in a longer text
    // since history is boosted, February should come first though
    query =
        monthQb
            .keyword()
            .onField("mythology")
            .andField("history")
            .boostedTo(30)
            .matching("whitening")
            .createQuery();

    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("February", results.get(0).getName());

    transaction.commit();
  }

  @Test
  @SuppressWarnings("unchecked")
  public void testBoolean() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // must
    Query query =
        monthQb
            .bool()
            .must(monthQb.keyword().onField("mythology").matching("colder").createQuery())
            .createQuery();

    List<Month> results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(1, results.size());
    assertEquals("January", results.get(0).getName());

    // must not + all
    query =
        monthQb
            .bool()
            .should(monthQb.all().createQuery())
            .must(monthQb.keyword().onField("mythology").matching("colder").createQuery())
            .not()
            .createQuery();

    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("February", results.get(0).getName());
    assertEquals("March", results.get(1).getName());

    // implicit must not + all (not recommended)
    query =
        monthQb
            .bool()
            .must(monthQb.keyword().onField("mythology").matching("colder").createQuery())
            .not()
            .createQuery();
    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("February", results.get(0).getName());
    assertEquals("March", results.get(1).getName());

    // all except (recommended)
    query =
        monthQb
            .all()
            .except(monthQb.keyword().onField("mythology").matching("colder").createQuery())
            .createQuery();

    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertEquals(2, results.size());
    assertEquals("February", results.get(0).getName());
    assertEquals("March", results.get(1).getName());

    transaction.commit();
  }

  @Test(expected = SearchException.class)
  public void testIllegalBooleanJunction() {
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();
    // forgetting to set any condition on the boolean, an exception shall be thrown:
    BooleanJunction<BooleanJunction> booleanJunction = monthQb.bool();
    assertTrue(booleanJunction.isEmpty());
    Query query = booleanJunction.createQuery();
    Assert.fail("should not reach this point");
  }

  @Test
  public void testRangeQueryFromTo() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
    calendar.set(1900, 2, 12, 0, 0, 0);
    calendar.set(Calendar.MILLISECOND, 0);
    Date from = calendar.getTime();
    calendar.set(1910, 2, 12, 0, 0, 0);
    Date to = calendar.getTime();

    Query query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .from(from)
            .to(to)
            .excludeLimit()
            .createQuery();

    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .ignoreFieldBridge()
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .from(DateTools.round(from, DateTools.Resolution.MINUTE))
            .to(DateTools.round(to, DateTools.Resolution.MINUTE))
            .excludeLimit()
            .createQuery();
    assertEquals(1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());
    transaction.commit();
  }

  @Test
  public void testRangeQueryBelow() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
    calendar.set(10 + 1800, 2, 12, 0, 0, 0);
    Date to = calendar.getTime();

    Query query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .below(to)
            .createQuery();

    FullTextQuery hibQuery = fullTextSession.createFullTextQuery(query, Month.class);
    assertEquals(1, hibQuery.getResultSize());
    assertEquals("March", ((Month) hibQuery.list().get(0)).getName());

    query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .ignoreFieldBridge()
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .below(DateTools.round(to, DateTools.Resolution.MINUTE))
            .createQuery();

    hibQuery = fullTextSession.createFullTextQuery(query, Month.class);
    assertEquals(1, hibQuery.getResultSize());
    assertEquals("March", ((Month) hibQuery.list().get(0)).getName());

    query = monthQb.range().onField("raindropInMm").below(0.24d).createQuery();

    assertTrue(query.getClass().isAssignableFrom(NumericRangeQuery.class));

    List<?> results = fullTextSession.createFullTextQuery(query, Month.class).list();

    assertEquals("test range numeric ", 1, results.size());
    assertEquals("test range numeric ", "January", ((Month) results.get(0)).getName());

    transaction.commit();
  }

  @Test
  public void testRangeQueryAbove() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
    calendar.set(10 + 1900, 2, 12, 0, 0, 0);
    Date to = calendar.getTime();

    Query query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .above(to)
            .createQuery();
    FullTextQuery hibQuery = fullTextSession.createFullTextQuery(query, Month.class);
    assertEquals(1, hibQuery.getResultSize());
    assertEquals("February", ((Month) hibQuery.list().get(0)).getName());

    query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .ignoreFieldBridge()
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .above(DateTools.round(to, DateTools.Resolution.MINUTE))
            .createQuery();
    hibQuery = fullTextSession.createFullTextQuery(query, Month.class);
    assertEquals(1, hibQuery.getResultSize());
    assertEquals("February", ((Month) hibQuery.list().get(0)).getName());

    transaction.commit();
  }

  @Test
  public void testRangeQueryAboveInclusive() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // test the limits, inclusive
    Query query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .above(february)
            .createQuery();
    FullTextQuery hibQuery = fullTextSession.createFullTextQuery(query, Month.class);
    assertEquals("Wrong number of query results", 1, hibQuery.getResultSize());
    assertEquals("February", ((Month) hibQuery.list().get(0)).getName());

    transaction.commit();
  }

  @Test
  public void testRangeQueryAboveExclusive() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // test the limits, exclusive
    Query query =
        monthQb
            .range()
            .onField("estimatedCreation")
            .andField("justfortest")
            .ignoreFieldBridge()
            .ignoreAnalyzer()
            .above(february)
            .excludeLimit()
            .createQuery();
    FullTextQuery hibQuery = fullTextSession.createFullTextQuery(query, Month.class);
    assertEquals(0, hibQuery.getResultSize());

    transaction.commit();
  }

  @Test
  public void testPhraseQuery() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    Query query =
        monthQb.phrase().onField("mythology").sentence("colder and whitening").createQuery();

    assertEquals(
        "test exact phrase",
        1,
        fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    query = monthQb.phrase().onField("mythology").sentence("Month whitening").createQuery();

    assertEquals(
        "test slop", 0, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    query =
        monthQb.phrase().withSlop(3).onField("mythology").sentence("Month whitening").createQuery();

    assertEquals(
        "test slop", 1, fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    query = monthQb.phrase().onField("mythology").sentence("whitening").createQuery();

    assertEquals(
        "test one term optimization",
        1,
        fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    // Does not work as the NGram filter does not seem to be skipping posiional increment between
    // ngrams.
    //		query = monthQb
    //				.phrase()
    //					.onField( "mythology_ngram" )
    //					.sentence( "snobored" )
    //					.createQuery();
    //
    //		assertEquals( 1, fullTextSession.createFullTextQuery( query, Month.class ).getResultSize()
    // );

    transaction.commit();
  }

  @Test
  @TestForIssue(jiraKey = "HSEARCH-1074")
  public void testPhraseQueryWithNoTermsAfterAnalyzerApplication() throws Exception {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    Query query = monthQb.phrase().onField("mythology").sentence("and").createQuery();

    assertEquals(
        "there should be no results, since all terms are stop words",
        0,
        fullTextSession.createFullTextQuery(query, Month.class).getResultSize());
    transaction.commit();
  }

  @Test
  public void testNumericRangeQueries() {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    Query query = monthQb.range().onField("raindropInMm").from(0.23d).to(0.24d).createQuery();

    assertTrue(query.getClass().isAssignableFrom(NumericRangeQuery.class));

    List<?> results = fullTextSession.createFullTextQuery(query, Month.class).list();

    assertEquals("test range numeric ", 1, results.size());
    assertEquals("test range numeric ", "January", ((Month) results.get(0)).getName());

    transaction.commit();
  }

  @Test
  @TestForIssue(jiraKey = "HSEARCH-1378")
  public void testNumericRangeQueryAbove() {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // inclusive
    Query query = monthQb.range().onField("raindropInMm").above(0.231d).createQuery();

    assertTrue(query.getClass().isAssignableFrom(NumericRangeQuery.class));

    List<?> results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertThat(results).onProperty("name").containsOnly("January", "February", "March");

    // exclusive
    query = monthQb.range().onField("raindropInMm").above(0.231d).excludeLimit().createQuery();

    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertThat(results).onProperty("name").containsOnly("February", "March");

    transaction.commit();
  }

  @Test
  @TestForIssue(jiraKey = "HSEARCH-1378")
  public void testNumericRangeQueryBelow() {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    // inclusive
    Query query = monthQb.range().onField("raindropInMm").below(0.435d).createQuery();

    assertTrue(query.getClass().isAssignableFrom(NumericRangeQuery.class));

    List<?> results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertThat(results).onProperty("name").containsOnly("January", "February", "March");

    // exclusive
    query = monthQb.range().onField("raindropInMm").below(0.435d).excludeLimit().createQuery();

    results = fullTextSession.createFullTextQuery(query, Month.class).list();
    assertThat(results).onProperty("name").containsOnly("January");

    transaction.commit();
  }

  @Test
  public void testNumericFieldsTermQuery() {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();

    Query query = monthQb.keyword().onField("raindropInMm").matching(0.231d).createQuery();

    assertTrue(query.getClass().isAssignableFrom(NumericRangeQuery.class));

    assertEquals(
        "test term numeric ",
        1,
        fullTextSession.createFullTextQuery(query, Month.class).getResultSize());

    transaction.commit();
  }

  @Test
  public void testFieldBridge() {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder monthQb =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Month.class).get();
    Query query = monthQb.keyword().onField("monthRomanNumber").matching(2).createQuery();
    FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery(query, Month.class);
    List<?> results = fullTextQuery.list();
    assertEquals(1, results.size());
    Month february = (Month) results.get(0);
    assertEquals(2, february.getMonthValue());
    transaction.commit();
  }

  @Test
  public void testSpatialQueries() {
    Transaction transaction = fullTextSession.beginTransaction();
    final QueryBuilder builder =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(POI.class).get();

    Coordinates coordinates = Point.fromDegrees(24d, 31.5d);
    Query query =
        builder
            .spatial()
            .onField("location")
            .within(51, Unit.KM)
            .ofCoordinates(coordinates)
            .createQuery();

    List<?> results = fullTextSession.createFullTextQuery(query, POI.class).list();

    assertEquals("test spatial hash based spatial query", 1, results.size());
    assertEquals("test spatial hash based spatial query", "Bozo", ((POI) results.get(0)).getName());

    query =
        builder
            .spatial()
            .onField("location")
            .within(500, Unit.KM)
            .ofLatitude(48.858333d)
            .andLongitude(2.294444d)
            .createQuery();
    results = fullTextSession.createFullTextQuery(query, POI.class).list();

    assertEquals("test spatial hash based spatial query", 1, results.size());
    assertEquals(
        "test spatial hash based spatial query", "Tour Eiffel", ((POI) results.get(0)).getName());

    transaction.commit();
  }

  @Test
  @TestForIssue(jiraKey = "HSEARCH-703")
  public void testPolymorphicQueryForUnindexedSuperTypeReturnsIndexedSubType() {
    Transaction transaction = fullTextSession.beginTransaction();

    final QueryBuilder builder =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Object.class).get();

    Query query = builder.all().createQuery();
    List<?> results = fullTextSession.createFullTextQuery(query, Object.class).list();

    assertEquals("expected all instances of all indexed types", 7, results.size());

    transaction.commit();
  }

  @Test
  @TestForIssue(jiraKey = "HSEARCH-703")
  public void testPolymorphicQueryWithKeywordTermForUnindexedSuperTypeReturnsIndexedSubType() {
    Transaction transaction = fullTextSession.beginTransaction();

    final QueryBuilder builder =
        fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Car.class).get();

    Query query = builder.keyword().onField("name").matching("Morris").createQuery();
    List<?> results = fullTextSession.createFullTextQuery(query).list();

    assertEquals("expected one instance of indexed sub-type", 1, results.size());
    assertEquals(180, ((SportsCar) results.get(0)).getEnginePower());

    transaction.commit();
  }

  @Test
  @TestForIssue(jiraKey = "HSEARCH-703")
  public void testObtainingBuilderForUnindexedTypeWithoutIndexedSubTypesCausesException() {
    Transaction transaction = fullTextSession.beginTransaction();

    try {
      fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Animal.class).get();

      fail("Obtaining a builder not allowed for unindexed type without any indexed sub-types.");
    } catch (SearchException e) {
      // success
    } finally {
      transaction.commit();
    }
  }

  private void outputQueryAndResults(
      boolean outputLogs, Coffee originalInstance, Query mltQuery, List<Object[]> results) {
    // set to true to display results
    if (outputLogs) {
      StringBuilder builder =
          new StringBuilder("Initial coffee: ")
              .append(originalInstance)
              .append("\n\n")
              .append("Query: ")
              .append(mltQuery.toString())
              .append("\n\n")
              .append("Matching coffees")
              .append("\n");
      for (Object[] entry : results) {
        builder.append("    Score: ").append(entry[1]);
        builder.append(" | Coffee: ").append(entry[0]).append("\n");
      }
      log.debug(builder.toString());
    }
  }

  private void indexTestData() {
    Transaction tx = fullTextSession.beginTransaction();
    final Calendar calendar = Calendar.getInstance();
    calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
    calendar.set(1900, 2, 12, 0, 0, 0);
    calendar.set(Calendar.MILLISECOND, 0);
    Date january = calendar.getTime();
    fullTextSession.persist(
        new Month(
            "January",
            1,
            "Month of colder and whitening",
            "Historically colder than any other month in the northern hemisphere",
            january,
            0.231d));
    calendar.set(100 + 1900, 2, 12, 0, 0, 0);
    february = calendar.getTime();
    fullTextSession.persist(
        new Month(
            "February",
            2,
            "Month of snowboarding",
            "Historically, the month where we make babies while watching the whitening landscape",
            february,
            0.435d));
    calendar.set(1800, 2, 12, 0, 0, 0);
    Date march = calendar.getTime();
    fullTextSession.persist(
        new Month(
            "March",
            3,
            "Month of fake spring",
            "Historically, the month in which we actually find time to go snowboarding.",
            march,
            0.435d));

    POI poi = new POI(1, "Tour Eiffel", 48.858333d, 2.294444d, "Monument");
    fullTextSession.persist(poi);
    poi = new POI(2, "Bozo", 24d, 32d, "Monument");
    fullTextSession.persist(poi);

    Car car = new SportsCar(1, "Leyland", 100);
    fullTextSession.persist(car);

    car = new SportsCar(2, "Morris", 180);
    fullTextSession.persist(car);

    tx.commit();
    fullTextSession.clear();
  }

  @Override
  protected Class<?>[] getAnnotatedClasses() {
    return new Class<?>[] {Month.class, POI.class, Car.class, SportsCar.class, Animal.class};
  }

  @Override
  protected void configure(Configuration cfg) {
    super.configure(cfg);
    cfg.getProperties().put(Environment.MODEL_MAPPING, MappingFactory.class.getName());
  }

  public static class MappingFactory {
    @Factory
    public SearchMapping build() {
      SearchMapping mapping = new SearchMapping();
      mapping
          .analyzerDef("stemmer", StandardTokenizerFactory.class)
          .filter(StandardFilterFactory.class)
          .filter(LowerCaseFilterFactory.class)
          .filter(StopFilterFactory.class)
          .filter(SnowballPorterFilterFactory.class)
          .param("language", "English")
          .analyzerDef("ngram", StandardTokenizerFactory.class)
          .filter(StandardFilterFactory.class)
          .filter(LowerCaseFilterFactory.class)
          .filter(StopFilterFactory.class)
          .filter(NGramFilterFactory.class)
          .param("minGramSize", "3")
          .param("maxGramSize", "3");
      return mapping;
    }
  }
}
Ejemplo n.º 25
0
/** @author John Griffin */
public class FieldBoostTest extends SearchTestCase {

  private static final Log log = LoggerFactory.make();

  public void testBoostedGetDesc() throws Exception {
    FullTextSession fullTextSession = Search.getFullTextSession(openSession());
    buildBoostedGetIndex(fullTextSession);

    fullTextSession.clear();
    Transaction tx = fullTextSession.beginTransaction();

    QueryParser authorParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "author", TestConstants.standardAnalyzer);
    QueryParser descParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "description", TestConstants.standardAnalyzer);
    Query author = authorParser.parse("Wells");
    Query desc = descParser.parse("martians");

    BooleanQuery query = new BooleanQuery();
    query.add(author, BooleanClause.Occur.SHOULD);
    query.add(desc, BooleanClause.Occur.SHOULD);
    log.debug(query.toString());

    org.hibernate.search.FullTextQuery hibQuery =
        fullTextSession.createFullTextQuery(query, BoostedGetDescriptionLibrary.class);
    List results = hibQuery.list();

    log.debug(hibQuery.explain(0).toString());
    log.debug(hibQuery.explain(1).toString());

    assertTrue(
        "incorrect document returned",
        ((BoostedGetDescriptionLibrary) results.get(0)).getDescription().startsWith("Martians"));

    // cleanup
    for (Object element :
        fullTextSession
            .createQuery("from " + BoostedGetDescriptionLibrary.class.getName())
            .list()) {
      fullTextSession.delete(element);
    }
    tx.commit();
    fullTextSession.close();
  }

  public void testBoostedFieldDesc() throws Exception {
    FullTextSession fullTextSession = Search.getFullTextSession(openSession());
    buildBoostedFieldIndex(fullTextSession);

    fullTextSession.clear();
    Transaction tx = fullTextSession.beginTransaction();

    QueryParser authorParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "author", TestConstants.standardAnalyzer);
    QueryParser descParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "description", TestConstants.standardAnalyzer);
    Query author = authorParser.parse("Wells");
    Query desc = descParser.parse("martians");

    BooleanQuery query = new BooleanQuery();
    query.add(author, BooleanClause.Occur.SHOULD);
    query.add(desc, BooleanClause.Occur.SHOULD);
    log.debug(query.toString());

    org.hibernate.search.FullTextQuery hibQuery =
        fullTextSession.createFullTextQuery(query, BoostedFieldDescriptionLibrary.class);
    List results = hibQuery.list();

    assertTrue(
        "incorrect document boost",
        ((BoostedFieldDescriptionLibrary) results.get(0)).getDescription().startsWith("Martians"));

    log.debug(hibQuery.explain(0).toString());
    log.debug(hibQuery.explain(1).toString());

    // cleanup
    for (Object element :
        fullTextSession
            .createQuery("from " + BoostedFieldDescriptionLibrary.class.getName())
            .list()) {
      fullTextSession.delete(element);
    }
    tx.commit();
    fullTextSession.close();
  }

  public void testBoostedDesc() throws Exception {
    FullTextSession fullTextSession = Search.getFullTextSession(openSession());
    buildBoostedDescIndex(fullTextSession);

    fullTextSession.clear();
    Transaction tx = fullTextSession.beginTransaction();

    QueryParser authorParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "author", TestConstants.standardAnalyzer);
    QueryParser descParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "description", TestConstants.standardAnalyzer);
    Query author = authorParser.parse("Wells");
    Query desc = descParser.parse("martians");

    BooleanQuery query = new BooleanQuery();
    query.add(author, BooleanClause.Occur.SHOULD);
    query.add(desc, BooleanClause.Occur.SHOULD);
    log.debug(query.toString());

    org.hibernate.search.FullTextQuery hibQuery =
        fullTextSession.createFullTextQuery(query, BoostedDescriptionLibrary.class);
    List results = hibQuery.list();

    log.debug(hibQuery.explain(0).toString());
    log.debug(hibQuery.explain(1).toString());

    assertTrue(
        "incorrect document returned",
        ((BoostedDescriptionLibrary) results.get(0)).getDescription().startsWith("Martians"));

    // cleanup
    for (Object element :
        fullTextSession.createQuery("from " + BoostedDescriptionLibrary.class.getName()).list()) {
      fullTextSession.delete(element);
    }
    tx.commit();
    fullTextSession.close();
  }

  private void buildBoostedDescIndex(FullTextSession session) {
    Transaction tx = session.beginTransaction();
    BoostedDescriptionLibrary l = new BoostedDescriptionLibrary();
    l.setAuthor("H.G. Wells");
    l.setTitle("The Invisible Man");
    l.setDescription("Scientist discovers invisibility and becomes insane.");
    session.save(l);

    l = new BoostedDescriptionLibrary();
    l.setAuthor("H.G. Wells");
    l.setTitle("War of the Worlds");
    l.setDescription("Martians invade earth to eliminate mankind.");
    session.save(l);

    tx.commit();
  }

  private void buildBoostedFieldIndex(FullTextSession session) {
    Transaction tx = session.beginTransaction();
    BoostedFieldDescriptionLibrary l = new BoostedFieldDescriptionLibrary();
    l.setAuthor("H.G. Wells");
    l.setTitle("The Invisible Man");
    l.setDescription("Scientist discovers invisibility and becomes insane.");
    session.save(l);

    l = new BoostedFieldDescriptionLibrary();
    l.setAuthor("H.G. Wells");
    l.setTitle("War of the Worlds");
    l.setDescription("Martians invade earth to eliminate mankind.");
    session.save(l);

    tx.commit();
  }

  private void buildBoostedGetIndex(FullTextSession session) {
    Transaction tx = session.beginTransaction();
    BoostedGetDescriptionLibrary l = new BoostedGetDescriptionLibrary();
    l.setAuthor("H.G. Wells");
    l.setTitle("The Invisible Man");
    l.setDescription("Scientist discovers invisibility and becomes insane.");
    session.save(l);

    l = new BoostedGetDescriptionLibrary();
    l.setAuthor("H.G. Wells");
    l.setTitle("War of the Worlds");
    l.setDescription("Martians invade earth to eliminate mankind.");
    session.save(l);

    tx.commit();
  }

  @Override
  protected Class<?>[] getAnnotatedClasses() {
    return new Class[] {
      BoostedDescriptionLibrary.class,
      BoostedFieldDescriptionLibrary.class,
      BoostedGetDescriptionLibrary.class,
    };
  }
}