/** test range query performance */
  public void XtestFilteringPerformance() throws Exception {
    int indexSize = 19999;
    float fractionCovered = .1f;

    String l = t(0);
    String u = t((int) (indexSize * 10 * fractionCovered));

    SolrQueryRequest req = lrf.makeRequest();

    QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req);
    Query rangeQ = parser.parse();
    List<Query> filters = new ArrayList<Query>();
    filters.add(rangeQ);
    req.close();

    parser =
        QParser.getParser(
            "{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}" + t(0) + ' ' + t(1) + ' ' + t(2),
            null,
            req);
    Query q = parser.parse();

    // SolrIndexSearcher searcher = req.getSearcher();
    // DocSet range = searcher.getDocSet(rangeQ, null);

    createIndex2(indexSize, "foomany_s", "t10_100_ws");

    // doListGen(100, q, filters, false, true);
    doListGen(500, q, filters, false, true);

    req.close();
  }
 public Sort toSort(List<? extends OrderSpecifier<?>> orderBys) {
   List<SortField> sorts = new ArrayList<SortField>(orderBys.size());
   for (OrderSpecifier<?> order : orderBys) {
     if (!(order.getTarget() instanceof Path<?>)) {
       throw new IllegalArgumentException("argument was not of type Path.");
     }
     Class<?> type = order.getTarget().getType();
     boolean reverse = !order.isAscending();
     Path<?> path = getPath(order.getTarget());
     if (Number.class.isAssignableFrom(type)) {
       sorts.add(new SortField(toField(path), sortFields.get(type), reverse));
     } else {
       sorts.add(new SortField(toField(path), sortLocale, reverse));
     }
   }
   Sort sort = new Sort();
   sort.setSort(sorts.toArray(new SortField[sorts.size()]));
   return sort;
 }
 private static Object getReferencedId(DataRecord next, ReferenceFieldMetadata field) {
   DataRecord record = (DataRecord) next.get(field);
   if (record != null) {
     Collection<FieldMetadata> keyFields = record.getType().getKeyFields();
     if (keyFields.size() == 1) {
       return record.get(keyFields.iterator().next());
     } else {
       List<Object> compositeKeyValues = new ArrayList<Object>(keyFields.size());
       for (FieldMetadata keyField : keyFields) {
         compositeKeyValues.add(record.get(keyField));
       }
       return compositeKeyValues;
     }
   } else {
     return StringUtils.EMPTY;
   }
 }
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {

    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrQueryResponse rsp = rb.rsp;

    // Set field flags
    String fl = params.get(CommonParams.FL);
    int fieldFlags = 0;
    if (fl != null) {
      fieldFlags |= SolrPluginUtils.setReturnFields(fl, rsp);
    }
    rb.setFieldFlags(fieldFlags);

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);

    // get it from the response builder to give a different component a chance
    // to set it.
    String queryString = rb.getQueryString();
    if (queryString == null) {
      // this is the normal way it's set.
      queryString = params.get(CommonParams.Q);
      rb.setQueryString(queryString);
    }

    try {
      QParser parser = QParser.getParser(rb.getQueryString(), defType, req);
      Query q = parser.getQuery();
      if (q == null) {
        // normalize a null query to a query that matches nothing
        q = new BooleanQuery();
      }
      rb.setQuery(q);
      rb.setSortSpec(parser.getSort(true));
      rb.setQparser(parser);

      String[] fqs = req.getParams().getParams(CommonParams.FQ);
      if (fqs != null && fqs.length != 0) {
        List<Query> filters = rb.getFilters();
        if (filters == null) {
          filters = new ArrayList<Query>();
          rb.setFilters(filters);
        }
        for (String fq : fqs) {
          if (fq != null && fq.trim().length() != 0) {
            QParser fqp = QParser.getParser(fq, null, req);
            filters.add(fqp.getQuery());
          }
        }
      }
    } catch (ParseException e) {
      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    // TODO: temporary... this should go in a different component.
    String shards = params.get(ShardParams.SHARDS);
    if (shards != null) {
      List<String> lst = StrUtils.splitSmart(shards, ",", true);
      rb.shards = lst.toArray(new String[lst.size()]);
    }
    String shards_rows = params.get(ShardParams.SHARDS_ROWS);
    if (shards_rows != null) {
      rb.shards_rows = Integer.parseInt(shards_rows);
    }
    String shards_start = params.get(ShardParams.SHARDS_START);
    if (shards_start != null) {
      rb.shards_start = Integer.parseInt(shards_start);
    }
  }
  /** Actually run the query */
  @Override
  public void process(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrIndexSearcher searcher = req.getSearcher();

    if (rb.getQueryCommand().getOffset() < 0) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "'start' parameter cannot be negative");
    }

    // -1 as flag if not set.
    long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1);

    // Optional: This could also be implemented by the top-level searcher sending
    // a filter that lists the ids... that would be transparent to
    // the request handler, but would be more expensive (and would preserve score
    // too if desired).
    String ids = params.get(ShardParams.IDS);
    if (ids != null) {
      SchemaField idField = req.getSchema().getUniqueKeyField();
      List<String> idArr = StrUtils.splitSmart(ids, ",", true);
      int[] luceneIds = new int[idArr.size()];
      int docs = 0;
      for (int i = 0; i < idArr.size(); i++) {
        int id =
            req.getSearcher()
                .getFirstMatch(
                    new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
        if (id >= 0) luceneIds[docs++] = id;
      }

      DocListAndSet res = new DocListAndSet();
      res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
      if (rb.isNeedDocSet()) {
        List<Query> queries = new ArrayList<Query>();
        queries.add(rb.getQuery());
        List<Query> filters = rb.getFilters();
        if (filters != null) queries.addAll(filters);
        res.docSet = searcher.getDocSet(queries);
      }
      rb.setResults(res);
      rsp.add("response", rb.getResults().docList);
      return;
    }

    SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
    cmd.setTimeAllowed(timeAllowed);
    SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();

    //
    // grouping / field collapsing
    //
    boolean doGroup = params.getBool(GroupParams.GROUP, false);
    if (doGroup) {
      try {
        cmd.groupCommands = new ArrayList<Grouping.Command>();

        String[] fields = params.getParams(GroupParams.GROUP_FIELD);
        String[] funcs = params.getParams(GroupParams.GROUP_FUNC);
        String[] queries = params.getParams(GroupParams.GROUP_QUERY);
        String groupSortStr = params.get(GroupParams.GROUP_SORT);
        Sort groupSort =
            groupSortStr != null ? QueryParsing.parseSort(groupSortStr, req.getSchema()) : null;

        int limitDefault = cmd.getLen(); // this is normally from "rows"
        int docsPerGroupDefault = params.getInt(GroupParams.GROUP_LIMIT, 1);

        // temporary: implement all group-by-field as group-by-func
        if (funcs == null) {
          funcs = fields;
        } else if (fields != null) {
          // catenate functions and fields
          String[] both = new String[fields.length + funcs.length];
          System.arraycopy(fields, 0, both, 0, fields.length);
          System.arraycopy(funcs, 0, both, fields.length, funcs.length);
          funcs = both;
        }

        if (funcs != null) {
          for (String groupByStr : funcs) {
            QParser parser = QParser.getParser(groupByStr, "func", rb.req);
            Query q = parser.getQuery();
            Grouping.CommandFunc gc = new Grouping.CommandFunc();
            gc.groupSort = groupSort;

            if (q instanceof FunctionQuery) {
              gc.groupBy = ((FunctionQuery) q).getValueSource();
            } else {
              gc.groupBy = new QueryValueSource(q, 0.0f);
            }
            gc.key = groupByStr;
            gc.groupLimit = limitDefault;
            gc.docsPerGroup = docsPerGroupDefault;

            cmd.groupCommands.add(gc);
          }
        }

        if (cmd.groupCommands.size() == 0) cmd.groupCommands = null;

        if (cmd.groupCommands != null) {
          if (rb.doHighlights || rb.isDebug()) {
            // we need a single list of the returned docs
            cmd.setFlags(SolrIndexSearcher.GET_DOCLIST);
          }

          searcher.search(result, cmd);
          rb.setResult(result);
          rsp.add("grouped", result.groupedResults);
          // TODO: get "hits" a different way to log
          return;
        }
      } catch (ParseException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
      }
    }

    // normal search result
    searcher.search(result, cmd);
    rb.setResult(result);

    rsp.add("response", rb.getResults().docList);
    rsp.getToLog().add("hits", rb.getResults().docList.matches());

    doFieldSortValues(rb, searcher);
    doPrefetch(rb);
  }
 @Override
 public StorageResults visit(Select select) {
   // TMDM-4654: Checks if entity has a composite PK.
   Set<ComplexTypeMetadata> compositeKeyTypes = new HashSet<ComplexTypeMetadata>();
   // TMDM-7496: Search should include references to reused types
   Collection<ComplexTypeMetadata> types =
       new HashSet<ComplexTypeMetadata>(select.accept(new SearchTransitiveClosure()));
   for (ComplexTypeMetadata type : types) {
     if (type.getKeyFields().size() > 1) {
       compositeKeyTypes.add(type);
     }
   }
   if (!compositeKeyTypes.isEmpty()) {
     StringBuilder message = new StringBuilder();
     Iterator it = compositeKeyTypes.iterator();
     while (it.hasNext()) {
       ComplexTypeMetadata compositeKeyType = (ComplexTypeMetadata) it.next();
       message.append(compositeKeyType.getName());
       if (it.hasNext()) {
         message.append(',');
       }
     }
     throw new FullTextQueryCompositeKeyException(message.toString());
   }
   // Removes Joins and joined fields.
   List<Join> joins = select.getJoins();
   if (!joins.isEmpty()) {
     Set<ComplexTypeMetadata> joinedTypes = new HashSet<ComplexTypeMetadata>();
     for (Join join : joins) {
       joinedTypes.add(join.getRightField().getFieldMetadata().getContainingType());
     }
     for (ComplexTypeMetadata joinedType : joinedTypes) {
       types.remove(joinedType);
     }
     List<TypedExpression> filteredFields = new LinkedList<TypedExpression>();
     for (TypedExpression expression : select.getSelectedFields()) {
       if (expression instanceof Field) {
         FieldMetadata fieldMetadata = ((Field) expression).getFieldMetadata();
         if (joinedTypes.contains(fieldMetadata.getContainingType())) {
           TypeMapping mapping =
               mappings.getMappingFromDatabase(fieldMetadata.getContainingType());
           filteredFields.add(
               new Alias(
                   new StringConstant(StringUtils.EMPTY),
                   mapping.getUser(fieldMetadata).getName()));
         } else {
           filteredFields.add(expression);
         }
       } else {
         filteredFields.add(expression);
       }
     }
     selectedFields.clear();
     selectedFields.addAll(filteredFields);
   }
   // Handle condition
   Condition condition = select.getCondition();
   if (condition == null) {
     throw new IllegalArgumentException("Expected a condition in select clause but got 0.");
   }
   // Create Lucene query (concatenates all sub queries together).
   FullTextSession fullTextSession = Search.getFullTextSession(session);
   Query parsedQuery = select.getCondition().accept(new LuceneQueryGenerator(types));
   // Create Hibernate Search query
   Set<Class> classes = new HashSet<Class>();
   for (ComplexTypeMetadata type : types) {
     String className = ClassCreator.getClassName(type.getName());
     try {
       ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
       classes.add(contextClassLoader.loadClass(className));
     } catch (ClassNotFoundException e) {
       throw new RuntimeException("Could not find class '" + className + "'.", e);
     }
   }
   FullTextQuery fullTextQuery =
       fullTextSession.createFullTextQuery(
           parsedQuery, classes.toArray(new Class<?>[classes.size()]));
   // Very important to leave this null (would disable ability to search across different types)
   fullTextQuery.setCriteriaQuery(null);
   fullTextQuery.setSort(Sort.RELEVANCE); // Default sort (if no order by specified).
   query =
       EntityFinder.wrap(
           fullTextQuery,
           (HibernateStorage) storage,
           session); // ensures only MDM entity objects are returned.
   // Order by
   for (OrderBy current : select.getOrderBy()) {
     current.accept(this);
   }
   // Paging
   Paging paging = select.getPaging();
   paging.accept(this);
   pageSize = paging.getLimit();
   boolean hasPaging = pageSize < Integer.MAX_VALUE;
   if (!hasPaging) {
     return createResults(query.scroll(ScrollMode.FORWARD_ONLY));
   } else {
     return createResults(query.list());
   }
 }
  public InternalSearchResponse merge(
      ScoreDoc[] sortedDocs,
      AtomicArray<? extends QuerySearchResultProvider> queryResultsArr,
      AtomicArray<? extends FetchSearchResultProvider> fetchResultsArr) {

    List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> queryResults =
        queryResultsArr.asList();
    List<? extends AtomicArray.Entry<? extends FetchSearchResultProvider>> fetchResults =
        fetchResultsArr.asList();

    if (queryResults.isEmpty()) {
      return InternalSearchResponse.empty();
    }

    QuerySearchResult firstResult = queryResults.get(0).value.queryResult();

    boolean sorted = false;
    int sortScoreIndex = -1;
    if (firstResult.topDocs() instanceof TopFieldDocs) {
      sorted = true;
      TopFieldDocs fieldDocs = (TopFieldDocs) firstResult.queryResult().topDocs();
      for (int i = 0; i < fieldDocs.fields.length; i++) {
        if (fieldDocs.fields[i].getType() == SortField.Type.SCORE) {
          sortScoreIndex = i;
        }
      }
    }

    // merge facets
    InternalFacets facets = null;
    if (!queryResults.isEmpty()) {
      // we rely on the fact that the order of facets is the same on all query results
      if (firstResult.facets() != null
          && firstResult.facets().facets() != null
          && !firstResult.facets().facets().isEmpty()) {
        List<Facet> aggregatedFacets = Lists.newArrayList();
        List<Facet> namedFacets = Lists.newArrayList();
        for (Facet facet : firstResult.facets()) {
          // aggregate each facet name into a single list, and aggregate it
          namedFacets.clear();
          for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
            for (Facet facet1 : entry.value.queryResult().facets()) {
              if (facet.getName().equals(facet1.getName())) {
                namedFacets.add(facet1);
              }
            }
          }
          if (!namedFacets.isEmpty()) {
            Facet aggregatedFacet =
                ((InternalFacet) namedFacets.get(0))
                    .reduce(new InternalFacet.ReduceContext(cacheRecycler, namedFacets));
            aggregatedFacets.add(aggregatedFacet);
          }
        }
        facets = new InternalFacets(aggregatedFacets);
      }
    }

    // count the total (we use the query result provider here, since we might not get any hits (we
    // scrolled past them))
    long totalHits = 0;
    float maxScore = Float.NEGATIVE_INFINITY;
    boolean timedOut = false;
    Boolean terminatedEarly = null;
    for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
      QuerySearchResult result = entry.value.queryResult();
      if (result.searchTimedOut()) {
        timedOut = true;
      }
      if (result.terminatedEarly() != null) {
        if (terminatedEarly == null) {
          terminatedEarly = result.terminatedEarly();
        } else if (result.terminatedEarly()) {
          terminatedEarly = true;
        }
      }
      totalHits += result.topDocs().totalHits;
      if (!Float.isNaN(result.topDocs().getMaxScore())) {
        maxScore = Math.max(maxScore, result.topDocs().getMaxScore());
      }
    }
    if (Float.isInfinite(maxScore)) {
      maxScore = Float.NaN;
    }

    // clean the fetch counter
    for (AtomicArray.Entry<? extends FetchSearchResultProvider> entry : fetchResults) {
      entry.value.fetchResult().initCounter();
    }

    // merge hits
    List<InternalSearchHit> hits = new ArrayList<>();
    if (!fetchResults.isEmpty()) {
      for (ScoreDoc shardDoc : sortedDocs) {
        FetchSearchResultProvider fetchResultProvider = fetchResultsArr.get(shardDoc.shardIndex);
        if (fetchResultProvider == null) {
          continue;
        }
        FetchSearchResult fetchResult = fetchResultProvider.fetchResult();
        int index = fetchResult.counterGetAndIncrement();
        if (index < fetchResult.hits().internalHits().length) {
          InternalSearchHit searchHit = fetchResult.hits().internalHits()[index];
          searchHit.score(shardDoc.score);
          searchHit.shard(fetchResult.shardTarget());

          if (sorted) {
            FieldDoc fieldDoc = (FieldDoc) shardDoc;
            searchHit.sortValues(fieldDoc.fields);
            if (sortScoreIndex != -1) {
              searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue());
            }
          }

          hits.add(searchHit);
        }
      }
    }

    // merge suggest results
    Suggest suggest = null;
    if (!queryResults.isEmpty()) {
      final Map<String, List<Suggest.Suggestion>> groupedSuggestions = new HashMap<>();
      boolean hasSuggestions = false;
      for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
        Suggest shardResult = entry.value.queryResult().queryResult().suggest();

        if (shardResult == null) {
          continue;
        }
        hasSuggestions = true;
        Suggest.group(groupedSuggestions, shardResult);
      }

      suggest =
          hasSuggestions
              ? new Suggest(Suggest.Fields.SUGGEST, Suggest.reduce(groupedSuggestions))
              : null;
    }

    // merge addAggregation
    InternalAggregations aggregations = null;
    if (!queryResults.isEmpty()) {
      if (firstResult.aggregations() != null && firstResult.aggregations().asList() != null) {
        List<InternalAggregations> aggregationsList = new ArrayList<>(queryResults.size());
        for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
          aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations());
        }
        aggregations =
            InternalAggregations.reduce(
                aggregationsList, new ReduceContext(null, bigArrays, scriptService));
      }
    }

    InternalSearchHits searchHits =
        new InternalSearchHits(
            hits.toArray(new InternalSearchHit[hits.size()]), totalHits, maxScore);

    return new InternalSearchResponse(
        searchHits, facets, aggregations, suggest, timedOut, terminatedEarly);
  }