protected void printStatusMessage(long startTime, long totalTodoCount, long doneCount) {
   long elapsedMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
   log.indexingDocumentsCompleted(doneCount, elapsedMs);
   float estimateSpeed = doneCount * 1000f / elapsedMs;
   float estimatePercentileComplete = doneCount * 100f / totalTodoCount;
   log.indexingSpeed(estimateSpeed, estimatePercentileComplete);
 }
 private FieldBridge doExtractType(
     org.hibernate.search.annotations.FieldBridge bridgeAnn,
     String appliedOnName,
     Class<?> appliedOnType) {
   assert bridgeAnn != null : "@FieldBridge instance cannot be null";
   FieldBridge bridge;
   Class<?> impl = bridgeAnn.impl();
   if (impl == void.class) {
     throw LOG.noImplementationClassInFieldBridge(appliedOnName);
   }
   try {
     Object instance = impl.newInstance();
     if (FieldBridge.class.isAssignableFrom(impl)) {
       bridge = (FieldBridge) instance;
     } else if (TwoWayStringBridge.class.isAssignableFrom(impl)) {
       bridge = new TwoWayString2FieldBridgeAdaptor((TwoWayStringBridge) instance);
     } else if (org.hibernate.search.bridge.StringBridge.class.isAssignableFrom(impl)) {
       bridge = new String2FieldBridgeAdaptor((org.hibernate.search.bridge.StringBridge) instance);
     } else {
       throw LOG.noFieldBridgeInterfaceImplementedByFieldBridge(impl.getName(), appliedOnName);
     }
     if (bridgeAnn.params().length > 0 && ParameterizedBridge.class.isAssignableFrom(impl)) {
       Map<String, String> params = new HashMap<String, String>(bridgeAnn.params().length);
       for (Parameter param : bridgeAnn.params()) {
         params.put(param.name(), param.value());
       }
       ((ParameterizedBridge) instance).setParameterValues(params);
     }
     populateReturnType(appliedOnType, impl, instance);
   } catch (Exception e) {
     throw LOG.unableToInstantiateFieldBridge(appliedOnName, appliedOnType.getName(), e);
   }
   return bridge;
 }
  /**
   * This extracts and instantiates the implementation class from a {@code ClassBridge} annotation.
   *
   * @param cb the class bridge annotation
   * @param clazz the {@code Class} on which the annotation is defined on
   * @return Returns the specified {@code FieldBridge} instance
   */
  public FieldBridge extractType(ClassBridge cb, Class<?> clazz) {
    FieldBridge bridge = null;
    Class<?> bridgeType = null;

    if (cb != null) {
      bridgeType = cb.impl();
      if (bridgeType != null) {
        try {
          Object instance = bridgeType.newInstance();
          if (FieldBridge.class.isAssignableFrom(bridgeType)) {
            bridge = (FieldBridge) instance;
          } else if (org.hibernate.search.bridge.TwoWayStringBridge.class.isAssignableFrom(
              bridgeType)) {
            bridge =
                new TwoWayString2FieldBridgeAdaptor(
                    (org.hibernate.search.bridge.TwoWayStringBridge) instance);
          } else if (org.hibernate.search.bridge.StringBridge.class.isAssignableFrom(bridgeType)) {
            bridge =
                new String2FieldBridgeAdaptor((org.hibernate.search.bridge.StringBridge) instance);
          } else {
            throw LOG.noFieldBridgeInterfaceImplementedByClassBridge(bridgeType.getName());
          }
        } catch (Exception e) {
          throw LOG.cannotInstantiateClassBridgeOfType(bridgeType.getName(), clazz.getName(), e);
        }
      }
    }
    if (bridge == null) {
      throw LOG.unableToDetermineClassBridge(ClassBridge.class.getName());
    }

    populateReturnType(clazz, bridgeType, bridge);

    return bridge;
  }
 private void buildAttribute(Object element, LuceneWorksBuilder hydrator) {
   if (element instanceof GenericRecord) {
     GenericRecord record = (GenericRecord) element;
     String name = record.getSchema().getName();
     if ("TokenTrackingAttribute".equals(name)) {
       @SuppressWarnings("unchecked")
       List<Integer> positionList = (List<Integer>) record.get("positions");
       hydrator.addTokenTrackingAttribute(positionList);
     } else if ("CharTermAttribute".equals(name)) {
       hydrator.addCharTermAttribute((CharSequence) record.get("sequence"));
     } else if ("PayloadAttribute".equals(name)) {
       hydrator.addPayloadAttribute(asByteArray(record, "payload"));
     } else if ("KeywordAttribute".equals(name)) {
       hydrator.addKeywordAttribute(asBoolean(record, "isKeyword"));
     } else if ("PositionIncrementAttribute".equals(name)) {
       hydrator.addPositionIncrementAttribute(asInt(record, "positionIncrement"));
     } else if ("FlagsAttribute".equals(name)) {
       hydrator.addFlagsAttribute(asInt(record, "flags"));
     } else if ("TypeAttribute".equals(name)) {
       hydrator.addTypeAttribute(asString(record, "type"));
     } else if ("OffsetAttribute".equals(name)) {
       hydrator.addOffsetAttribute(asInt(record, "startOffset"), asInt(record, "endOffset"));
     } else {
       throw log.unknownAttributeSerializedRepresentation(name);
     }
   } else if (element instanceof ByteBuffer) {
     hydrator.addSerializedAttribute(asByteArray((ByteBuffer) element));
   } else {
     throw log.unknownAttributeSerializedRepresentation(element.getClass().getName());
   }
 }
 public void start() {
   int currentLocal = 0;
   this.directoryProviderLock = this.context.getDirectoryProviderLock(this);
   this.context = null;
   try {
     // copy to source
     if (new File(sourceDir, CURRENT1).exists()) {
       currentLocal = 2;
     } else if (new File(sourceDir, CURRENT2).exists()) {
       currentLocal = 1;
     } else {
       log.debugf("Source directory for '%s' will be initialized", indexName);
       currentLocal = 1;
     }
     String currentString = Integer.valueOf(currentLocal).toString();
     File subDir = new File(sourceDir, currentString);
     FileHelper.synchronize(indexDir, subDir, true, copyChunkSize);
     new File(sourceDir, CURRENT1).delete();
     new File(sourceDir, CURRENT2).delete();
     // TODO small hole, no file can be found here
     new File(sourceDir, CURRENT_DIR_NAME[currentLocal]).createNewFile();
     log.debugf("Current directory: %d", currentLocal);
   } catch (IOException e) {
     throw new SearchException("Unable to initialize index: " + directoryProviderName, e);
   }
   task = new FSMasterDirectoryProvider.TriggerTask(indexDir, sourceDir);
   long period = DirectoryProviderHelper.getRefreshPeriod(properties, directoryProviderName);
   timer.scheduleAtFixedRate(task, period, period);
   this.current = currentLocal; // write to volatile to publish all state
 }
  @Override
  public void initializeObjects(
      List<EntityInfo> entityInfos,
      LinkedHashMap<EntityInfoLoadKey, Object> idToObjectMap,
      ObjectInitializationContext objectInitializationContext) {
    // Do not call isTimeOut here as the caller might be the last biggie on the list.
    final int numberOfObjectsToInitialize = entityInfos.size();

    if (numberOfObjectsToInitialize == 0) {
      if (log.isTraceEnabled()) {
        log.tracef("No object to initialize");
      }
      return;
    }

    SessionImplementor sessionImplementor =
        (SessionImplementor) objectInitializationContext.getSession();
    String entityName =
        objectInitializationContext
            .getSession()
            .getSessionFactory()
            .getClassMetadata(objectInitializationContext.getEntityType())
            .getEntityName();
    EntityPersister persister = sessionImplementor.getFactory().getEntityPersister(entityName);
    PersistenceContext persistenceContext = sessionImplementor.getPersistenceContext();

    // check the persistence context
    List<EntityInfo> remainingEntityInfos = new ArrayList<>(numberOfObjectsToInitialize);
    for (EntityInfo entityInfo : entityInfos) {
      if (ObjectLoaderHelper.areDocIdAndEntityIdIdentical(
          entityInfo, objectInitializationContext.getSession())) {
        EntityKey entityKey = sessionImplementor.generateEntityKey(entityInfo.getId(), persister);
        Object o = persistenceContext.getEntity(entityKey);
        if (o == null) {
          remainingEntityInfos.add(entityInfo);
        } else {
          EntityInfoLoadKey key = new EntityInfoLoadKey(entityInfo.getClazz(), entityInfo.getId());
          idToObjectMap.put(key, o);
        }
      } else {
        // if document id !=  entity id we can't use PC lookup
        remainingEntityInfos.add(entityInfo);
      }
    }

    // update entityInfos to only contains the remaining ones
    final int remainingSize = remainingEntityInfos.size();
    if (log.isTraceEnabled()) {
      log.tracef(
          "Initialized %d objects out of %d in the persistence context",
          (Integer) (numberOfObjectsToInitialize - remainingSize),
          (Integer) numberOfObjectsToInitialize);
    }

    if (remainingSize > 0) {
      delegate.initializeObjects(remainingEntityInfos, idToObjectMap, objectInitializationContext);
    }
  }
  @Override
  public void deserialize(byte[] data, LuceneWorksBuilder hydrator) {
    final ByteArrayInputStream inputStream = new ByteArrayInputStream(data);
    final int majorVersion = inputStream.read();
    final int minorVersion = inputStream.read();
    final Protocol protocol = protocols.getProtocol(majorVersion, minorVersion);

    Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null);
    GenericDatumReader<GenericRecord> reader =
        new GenericDatumReader<>(protocol.getType("Message"));
    GenericRecord result;
    try {
      result = reader.read(null, decoder);
    } catch (IOException e) {
      throw log.unableToDeserializeAvroStream(e);
    }

    classReferences = asListOfString(result, "classReferences");
    final List<GenericRecord> operations = asListOfGenericRecords(result, "operations");
    final ConversionContext conversionContext = new ContextualExceptionBridgeHelper();
    for (GenericRecord operation : operations) {
      String schema = operation.getSchema().getName();
      if ("OptimizeAll".equals(schema)) {
        hydrator.addOptimizeAll();
      } else if ("PurgeAll".equals(schema)) {
        hydrator.addPurgeAllLuceneWork(asClass(operation, "class"));
      } else if ("Flush".equals(schema)) {
        hydrator.addFlush();
      } else if ("Delete".equals(schema)) {
        processId(operation, hydrator);
        hydrator.addDeleteLuceneWork(asClass(operation, "class"), conversionContext);
      } else if ("DeleteByQuery".equals(schema)) {
        String entityClassName = asClass(operation, "class");
        int queryKey = asInt(operation, "key");
        DeleteByQuerySupport.StringToQueryMapper mapper =
            DeleteByQuerySupport.getStringToQueryMapper(queryKey);
        List<Utf8> stringList = asListOfString(operation, "query");
        String[] query = new String[stringList.size()];
        for (int i = 0; i < stringList.size(); ++i) {
          query[i] = stringList.get(i).toString();
        }
        hydrator.addDeleteByQueryLuceneWork(entityClassName, mapper.fromString(query));
      } else if ("Add".equals(schema)) {
        buildLuceneDocument(asGenericRecord(operation, "document"), hydrator);
        Map<String, String> analyzers = getAnalyzers(operation);
        processId(operation, hydrator);
        hydrator.addAddLuceneWork(asClass(operation, "class"), analyzers, conversionContext);
      } else if ("Update".equals(schema)) {
        buildLuceneDocument(asGenericRecord(operation, "document"), hydrator);
        Map<String, String> analyzers = getAnalyzers(operation);
        processId(operation, hydrator);
        hydrator.addUpdateLuceneWork(asClass(operation, "class"), analyzers, conversionContext);
      } else {
        throw log.cannotDeserializeOperation(schema);
      }
    }
  }
 public EntityIndexBinding getSafeIndexBindingForEntity(Class<?> entityType) {
   if (entityType == null) {
     throw log.nullIsInvalidIndexedType();
   }
   EntityIndexBinding entityIndexBinding = getIndexBinding(entityType);
   if (entityIndexBinding == null) {
     throw log.notAnIndexedType(entityType.getName());
   }
   return entityIndexBinding;
 }
  /**
   * This instantiates the SpatialFieldBridge from a {@code Spatial} annotation.
   *
   * @param spatial the {@code Spatial} annotation
   * @param clazz the {@code XClass} on which the annotation is defined on
   * @return Returns the {@code SpatialFieldBridge} instance
   * @param latitudeField a {@link java.lang.String} object.
   * @param longitudeField a {@link java.lang.String} object.
   */
  public FieldBridge buildSpatialBridge(
      Spatial spatial, XClass clazz, String latitudeField, String longitudeField) {
    FieldBridge bridge;
    try {
      bridge = SpatialBridgeProvider.buildSpatialBridge(spatial, latitudeField, longitudeField);
    } catch (Exception e) {
      throw LOG.unableToInstantiateSpatial(clazz.getName(), e);
    }
    if (bridge == null) {
      throw LOG.unableToInstantiateSpatial(clazz.getName(), null);
    }

    return bridge;
  }
 private void flushCloseExecutor() {
   if (asyncIndexingExecutor == null) {
     return;
   }
   asyncIndexingExecutor.shutdown();
   try {
     asyncIndexingExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
   } catch (InterruptedException e) {
     log.interruptedWhileWaitingForIndexActivity(e);
     Thread.currentThread().interrupt();
   }
   if (!asyncIndexingExecutor.isTerminated()) {
     log.unableToShutdownAsynchronousIndexingByTimeout(indexName);
   }
 }
  public void performWork(LuceneWork work, IndexWriter writer) {
    final Class<?> entityType = work.getEntityClass();
    final Serializable id = work.getId();
    log.tracef("Removing %s#%s by query.", entityType, id);
    DocumentBuilderIndexedEntity<?> builder = workspace.getDocumentBuilder(entityType);

    BooleanQuery entityDeletionQuery = new BooleanQuery();

    Query idQueryTerm;
    if (isIdNumeric(builder)) {
      idQueryTerm = NumericFieldUtils.createExactMatchQuery(builder.getIdKeywordName(), id);
    } else {
      idQueryTerm = new TermQuery(builder.getTerm(id));
    }
    entityDeletionQuery.add(idQueryTerm, BooleanClause.Occur.MUST);

    Term classNameQueryTerm = new Term(ProjectionConstants.OBJECT_CLASS, entityType.getName());
    TermQuery classNameQuery = new TermQuery(classNameQueryTerm);
    entityDeletionQuery.add(classNameQuery, BooleanClause.Occur.MUST);

    try {
      writer.deleteDocuments(entityDeletionQuery);
    } catch (Exception e) {
      String message = "Unable to remove " + entityType + "#" + id + " from index.";
      throw new SearchException(message, e);
    }
  }
  /**
   * Applies all modifications to the index in parallel using the workers executor
   *
   * @throws ExecutionException
   * @throws InterruptedException
   */
  private void applyUpdates() throws InterruptedException, ExecutionException {
    AbstractWorkspaceImpl workspace = resources.getWorkspace();

    ErrorContextBuilder errorContextBuilder = new ErrorContextBuilder();
    errorContextBuilder.allWorkToBeDone(workList);

    IndexWriter indexWriter = workspace.getIndexWriter(errorContextBuilder);
    if (indexWriter == null) {
      log.cannotOpenIndexWriterCausePreviousError();
      return;
    }

    boolean taskExecutionSuccessful = true;

    try {
      if (workList.size() == 1) {
        taskExecutionSuccessful = runSingleTask(workList.get(0), indexWriter, errorContextBuilder);
      } else {
        taskExecutionSuccessful = runMultipleTasks(indexWriter, errorContextBuilder);
      }
      if (!taskExecutionSuccessful) {
        resources.getErrorHandler().handle(errorContextBuilder.createErrorContext());
      } else {
        workspace.optimizerPhase();
      }
    } finally {
      workspace.afterTransactionApplied(!taskExecutionSuccessful, false);
    }
  }
  private float getScore(Query query) {
    Session session = openSession();
    Object[] queryResult;
    float score;
    try {
      FullTextSession fullTextSession = Search.getFullTextSession(session);
      List<?> resultList =
          fullTextSession
              .createFullTextQuery(query, DynamicBoostedDescLibrary.class)
              .setProjection(ProjectionConstants.SCORE, ProjectionConstants.EXPLANATION)
              .setMaxResults(1)
              .list();

      if (resultList.size() == 0) {
        score = 0.0f;
      } else {
        queryResult = (Object[]) resultList.get(0);
        score = (Float) queryResult[0];
        String explanation = queryResult[1].toString();
        log.debugf("score: %f explanation: %s", score, explanation);
      }
    } finally {
      session.close();
    }
    return score;
  }
  public IdentifierConsumerDocumentProducer(
      ProducerConsumerQueue<List<Serializable>> fromIdentifierListToEntities,
      MassIndexerProgressMonitor monitor,
      SessionFactory sessionFactory,
      CountDownLatch producerEndSignal,
      CacheMode cacheMode,
      Class<?> type,
      ExtendedSearchIntegrator searchFactory,
      String idName,
      BatchBackend backend,
      ErrorHandler errorHandler,
      Integer transactionTimeout,
      String tenantId) {
    this.source = fromIdentifierListToEntities;
    this.monitor = monitor;
    this.sessionFactory = sessionFactory;
    this.cacheMode = cacheMode;
    this.type = type;
    this.idName = idName;
    this.backend = backend;
    this.errorHandler = errorHandler;
    this.producerEndSignal = producerEndSignal;
    this.entityIndexBindings = searchFactory.getIndexBindings();
    this.transactionTimeout = transactionTimeout;
    this.tenantId = tenantId;
    this.transactionManager =
        ((SessionFactoryImplementor) sessionFactory)
            .getServiceRegistry()
            .getService(JtaPlatform.class)
            .retrieveTransactionManager();

    log.trace("created");
  }
 @Override
 public void run() {
   modificationLock.lock();
   try {
     applyUpdates();
   } catch (InterruptedException e) {
     log.interruptedWhileWaitingForIndexActivity(e);
     Thread.currentThread().interrupt();
     handleException(e);
   } catch (Exception e) {
     log.backendError(e);
     handleException(e);
   } finally {
     modificationLock.unlock();
   }
 }
  @Override
  public void close() {
    if (stopped.compareAndSet(false, true)) { // make sure we only stop once
      try {
        worker.close();
      } catch (Exception e) {
        log.workerException(e);
      }

      this.allIndexesManager.stop();
      this.timingSource.stop();

      serviceManager.releaseAllServices();

      for (Analyzer an : this.analyzers.values()) {
        an.close();
      }
      for (AbstractDocumentBuilder documentBuilder :
          this.documentBuildersContainedEntities.values()) {
        documentBuilder.close();
      }
      for (EntityIndexBinding entityIndexBinding : this.indexBindingForEntities.values()) {
        entityIndexBinding.getDocumentBuilder().close();
      }

      // unregister statistic mbean
      if (statisticsMBeanName != null) {
        JMXRegistrar.unRegisterMBean(statisticsMBeanName);
      }
    }
  }
示例#17
0
  private void initLazyRemoteAnalyzer(
      Map<String, AnalyzerReference> initializedAnalyzers,
      RemoteAnalyzerReference lazyRemoteAnalyzerReference,
      IndexManagerHolder indexesFactory) {
    LazyRemoteAnalyzer lazyAnalyzer =
        (LazyRemoteAnalyzer) lazyRemoteAnalyzerReference.getAnalyzer();

    if (initializedAnalyzers.containsKey(lazyAnalyzer.getName())) {
      AnalyzerReference analyzerReference = initializedAnalyzers.get(lazyAnalyzer.getName());
      if (!(analyzerReference instanceof RemoteAnalyzerReference)) {
        throw log.remoteAnalyzerAlreadyDefinedAsLuceneAnalyzer(lazyAnalyzer.getName());
      }
      lazyAnalyzer.setDelegate(((RemoteAnalyzerReference) analyzerReference).getAnalyzer());
      return;
    }

    Collection<IndexManagerType> indexManagerTypes = indexesFactory.getIndexManagerTypes();
    for (IndexManagerType indexManagerType : indexManagerTypes) {
      if (indexManagerType instanceof RemoteAnalyzerProvider) {
        final RemoteAnalyzer remoteAnalyzer =
            ((RemoteAnalyzerProvider) indexManagerType).getRemoteAnalyzer(lazyAnalyzer.getName());
        lazyAnalyzer.setDelegate(remoteAnalyzer);
        initializedAnalyzers.put(
            lazyAnalyzer.getName(), new RemoteAnalyzerReference(remoteAnalyzer));
        break;
      }
    }
  }
 @Override
 public void run() {
   log.trace("started");
   Session session = sessionFactory.withOptions().tenantIdentifier(tenantId).openSession();
   session.setFlushMode(FlushMode.MANUAL);
   session.setCacheMode(cacheMode);
   session.setDefaultReadOnly(true);
   try {
     loadAllFromQueue(session);
   } catch (Exception exception) {
     errorHandler.handleException(log.massIndexerExceptionWhileTransformingIds(), exception);
   } finally {
     producerEndSignal.countDown();
     session.close();
   }
   log.trace("finished");
 }
 @Override
 public void closeIndexReader(IndexReader reader) {
   try {
     reader.close();
   } catch (IOException e) {
     log.unableToCLoseLuceneIndexReader(e);
   }
 }
 public void initialize(
     String directoryProviderName, Properties properties, BuildContext context) {
   this.properties = properties;
   this.directoryProviderName = directoryProviderName;
   // source guessing
   sourceDir = DirectoryProviderHelper.getSourceDirectory(directoryProviderName, properties, true);
   log.debugf("Source directory: %s", sourceDir.getPath());
   indexDir = DirectoryProviderHelper.getVerifiedIndexDir(directoryProviderName, properties, true);
   log.debugf("Index directory: %s", indexDir.getPath());
   try {
     indexName = indexDir.getCanonicalPath();
     directory = DirectoryProviderHelper.createFSIndex(indexDir, properties);
   } catch (IOException e) {
     throw new SearchException("Unable to initialize index: " + directoryProviderName, e);
   }
   copyChunkSize = DirectoryProviderHelper.getCopyBufferSize(directoryProviderName, properties);
   this.context = context;
   current = 0; // write to volatile to publish all state
 }
 @Override
 public void performWork(LuceneWork work, IndexWriterDelegate delegate, IndexingMonitor monitor) {
   final Serializable id = work.getId();
   final String tenantId = work.getTenantId();
   final Class<?> managedType = work.getEntityClass();
   DocumentBuilderIndexedEntity builder = workspace.getDocumentBuilder(managedType);
   try {
     if (DeleteWorkExecutor.isIdNumeric(builder)) {
       log.tracef(
           "Deleting %s#%s by query using an IndexWriter#updateDocument as id is Numeric",
           managedType, id);
       Query exactMatchQuery =
           NumericFieldUtils.createExactMatchQuery(builder.getIdKeywordName(), id);
       BooleanQuery.Builder deleteDocumentsQueryBuilder = new BooleanQuery.Builder();
       deleteDocumentsQueryBuilder.add(exactMatchQuery, Occur.FILTER);
       if (tenantId != null) {
         TermQuery tenantTermQuery =
             new TermQuery(new Term(DocumentBuilderIndexedEntity.TENANT_ID_FIELDNAME, tenantId));
         deleteDocumentsQueryBuilder.add(tenantTermQuery, Occur.FILTER);
       }
       delegate.deleteDocuments(deleteDocumentsQueryBuilder.build());
       // no need to log the Add operation as we'll log in the delegate
       this.addDelegate.performWork(work, delegate, monitor);
     } else {
       log.tracef("Updating %s#%s by id using an IndexWriter#updateDocument.", managedType, id);
       Term idTerm = new Term(builder.getIdKeywordName(), work.getIdInString());
       Map<String, String> fieldToAnalyzerMap = work.getFieldToAnalyzerMap();
       ScopedAnalyzerReference analyzerReference = builder.getAnalyzerReference();
       analyzerReference =
           AddWorkExecutor.updateAnalyzerMappings(
               workspace, analyzerReference, fieldToAnalyzerMap);
       delegate.updateDocument(idTerm, work.getDocument(), analyzerReference);
     }
     workspace.notifyWorkApplied(work);
   } catch (Exception e) {
     String message = "Unable to update " + managedType + "#" + id + " in index.";
     throw new SearchException(message, e);
   }
   if (monitor != null) {
     monitor.documentsAdded(1l);
   }
 }
示例#22
0
  public void testBoostedFieldDesc() throws Exception {
    FullTextSession fullTextSession = Search.getFullTextSession(openSession());
    buildBoostedFieldIndex(fullTextSession);

    fullTextSession.clear();
    Transaction tx = fullTextSession.beginTransaction();

    QueryParser authorParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "author", TestConstants.standardAnalyzer);
    QueryParser descParser =
        new QueryParser(
            TestConstants.getTargetLuceneVersion(), "description", TestConstants.standardAnalyzer);
    Query author = authorParser.parse("Wells");
    Query desc = descParser.parse("martians");

    BooleanQuery query = new BooleanQuery();
    query.add(author, BooleanClause.Occur.SHOULD);
    query.add(desc, BooleanClause.Occur.SHOULD);
    log.debug(query.toString());

    org.hibernate.search.FullTextQuery hibQuery =
        fullTextSession.createFullTextQuery(query, BoostedFieldDescriptionLibrary.class);
    List results = hibQuery.list();

    assertTrue(
        "incorrect document boost",
        ((BoostedFieldDescriptionLibrary) results.get(0)).getDescription().startsWith("Martians"));

    log.debug(hibQuery.explain(0).toString());
    log.debug(hibQuery.explain(1).toString());

    // cleanup
    for (Object element :
        fullTextSession
            .createQuery("from " + BoostedFieldDescriptionLibrary.class.getName())
            .list()) {
      fullTextSession.delete(element);
    }
    tx.commit();
    fullTextSession.close();
  }
 /**
  * Takes in a fieldBridge and will return you a TwoWayFieldBridge instance.
  *
  * @param fieldBridge the field bridge annotation
  * @param appliedOnType the type the bridge is applied on
  * @param reflectionManager The reflection manager instance
  * @return a TwoWayFieldBridge instance if the Field Bridge is an instance of a TwoWayFieldBridge.
  * @throws org.hibernate.search.exception.SearchException if the FieldBridge passed in is not an
  *     instance of a TwoWayFieldBridge.
  */
 public TwoWayFieldBridge extractTwoWayType(
     org.hibernate.search.annotations.FieldBridge fieldBridge,
     XClass appliedOnType,
     ReflectionManager reflectionManager) {
   FieldBridge fb = extractType(fieldBridge, appliedOnType, reflectionManager);
   if (fb instanceof TwoWayFieldBridge) {
     return (TwoWayFieldBridge) fb;
   } else {
     throw LOG.fieldBridgeNotAnInstanceof(TwoWayFieldBridge.class.getSimpleName());
   }
 }
示例#24
0
 private Version getLuceneMatchVersion(SearchConfiguration cfg) {
   final Version version;
   String tmp = cfg.getProperty(Environment.LUCENE_MATCH_VERSION);
   if (StringHelper.isEmpty(tmp)) {
     log.recommendConfiguringLuceneVersion();
     version = Environment.DEFAULT_LUCENE_MATCH_VERSION;
   } else {
     try {
       version = Version.parseLeniently(tmp);
       if (log.isDebugEnabled()) {
         log.debug("Setting Lucene compatibility to Version " + version);
       }
     } catch (IllegalArgumentException e) {
       throw log.illegalLuceneVersionFormat(tmp, e.getMessage());
     } catch (ParseException e) {
       throw log.illegalLuceneVersionFormat(tmp, e.getMessage());
     }
   }
   return version;
 }
 private void rollbackTransaction(Session session) throws Exception {
   try {
     if (transactionManager != null) {
       transactionManager.rollback();
     } else {
       session.getTransaction().rollback();
     }
   } catch (Exception e) {
     log.errorRollingBackTransaction(e.getMessage(), e);
   }
 }
  private void indexAllQueue(
      Session session, List<?> entities, InstanceInitializer sessionInitializer)
      throws InterruptedException {
    ConversionContext contextualBridge = new ContextualExceptionBridgeHelper();

    if (entities == null || entities.isEmpty()) {
      return;
    } else {
      log.tracef("received a list of objects to index: %s", entities);
      for (Object object : entities) {
        try {
          index(object, session, sessionInitializer, contextualBridge);
          monitor.documentsBuilt(1);
        } catch (RuntimeException e) {
          String errorMsg =
              log.massIndexerUnableToIndexInstance(object.getClass().getName(), object.toString());
          errorHandler.handleException(errorMsg, e);
        }
      }
    }
  }
 /**
  * Verifies entity level preconditions to know if it's safe to skip index updates based on
  * specific field or collection updates.
  *
  * @return true if it seems safe to apply such optimizations
  */
 boolean stateInspectionOptimizationsEnabled() {
   if (!typeMetadata.areStateInspectionOptimizationsEnabled()) {
     return false;
   }
   if (typeMetadata.areClassBridgesUsed()) {
     log.tracef(
         "State inspection optimization disabled as entity %s uses class bridges",
         this.beanClass.getName());
     return false; // can't know what a class bridge is going to look at -> reindex // TODO nice
     // new feature to have?
   }
   BoostStrategy boostStrategy = typeMetadata.getDynamicBoost();
   if (boostStrategy != null && !(boostStrategy instanceof DefaultBoostStrategy)) {
     log.tracef(
         "State inspection optimization disabled as DynamicBoost is enabled on entity %s",
         this.beanClass.getName());
     return false; // as with class bridge: might be affected by any field // TODO nice new feature
     // to have?
   }
   return true;
 }
 private ObjectLookupMethod determineDefaultObjectLookupMethod() {
   String objectLookupMethod =
       configurationProperties.getProperty(Environment.OBJECT_LOOKUP_METHOD);
   if (objectLookupMethod == null) {
     return ObjectLookupMethod.SKIP; // default
   } else {
     try {
       return Enum.valueOf(ObjectLookupMethod.class, objectLookupMethod.toUpperCase(Locale.ROOT));
     } catch (IllegalArgumentException e) {
       throw log.invalidPropertyValue(objectLookupMethod, Environment.OBJECT_LOOKUP_METHOD);
     }
   }
 }
 private synchronized void stopScheduledExecutor() {
   if (scheduledExecutorService == null) {
     return;
   }
   try {
     scheduledExecutorService.shutdown();
     scheduledExecutorService.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
     running.set(false);
     scheduledExecutorService = null;
   } catch (InterruptedException e) {
     log.timedOutWaitingShutdown(indexName);
   }
 }
 public void stop() {
   @SuppressWarnings("unused")
   int readCurrentState =
       current; // Another unneeded value, to ensure visibility of state protected by memory
                // barrier
   timer.cancel();
   task.stop();
   try {
     directory.close();
   } catch (Exception e) {
     log.unableToCloseLuceneDirectory(directory.getDirectory(), e);
   }
 }