private void initKerberos() {
   String keytabFile = params.get(KERBEROS_KEYTAB, "").trim();
   if (keytabFile.length() == 0) {
     throw new IllegalArgumentException(
         KERBEROS_KEYTAB + " required because " + KERBEROS_ENABLED + " set to true");
   }
   String principal = params.get(KERBEROS_PRINCIPAL, "");
   if (principal.length() == 0) {
     throw new IllegalArgumentException(
         KERBEROS_PRINCIPAL + " required because " + KERBEROS_ENABLED + " set to true");
   }
   synchronized (HdfsDirectoryFactory.class) {
     if (kerberosInit == null) {
       kerberosInit = new Boolean(true);
       Configuration conf = new Configuration();
       conf.set("hadoop.security.authentication", "kerberos");
       // UserGroupInformation.setConfiguration(conf);
       LOG.info(
           "Attempting to acquire kerberos ticket with keytab: {}, principal: {} ",
           keytabFile,
           principal);
       // try {
       //  UserGroupInformation.loginUserFromKeytab(principal, keytabFile);
       // } catch (IOException ioe) {
       //  throw new RuntimeException(ioe);
       // }
       LOG.info("Got Kerberos ticket");
     }
   }
 }
Exemplo n.º 2
0
 @Override
 public void finishStage(ResponseBuilder rb) {
   SolrParams params = rb.req.getParams();
   if (!params.getBool(COMPONENT_NAME, false)
       || !params.getBool(ClusteringParams.USE_SEARCH_RESULTS, false)) {
     return;
   }
   if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
     SearchClusteringEngine engine = getSearchClusteringEngine(rb);
     if (engine != null) {
       SolrDocumentList solrDocList = (SolrDocumentList) rb.rsp.getValues().get("response");
       // TODO: Currently, docIds is set to null in distributed environment.
       // This causes CarrotParams.PRODUCE_SUMMARY doesn't work.
       // To work CarrotParams.PRODUCE_SUMMARY under distributed mode, we can choose either one of:
       // (a) In each shard, ClusteringComponent produces summary and finishStage()
       //     merges these summaries.
       // (b) Adding doHighlighting(SolrDocumentList, ...) method to SolrHighlighter and
       //     making SolrHighlighter uses "external text" rather than stored values to produce
       // snippets.
       Map<SolrDocument, Integer> docIds = null;
       Object clusters = engine.cluster(rb.getQuery(), solrDocList, docIds, rb.req);
       rb.rsp.add("clusters", clusters);
     } else {
       String name = getClusteringEngineName(rb);
       log.warn("No engine for: " + name);
     }
   }
 }
    private boolean needsScores(SolrParams params) {

      String sortSpec = params.get("sort");
      if (sortSpec != null && sortSpec.length() != 0) {
        String[] sorts = sortSpec.split(",");
        for (String s : sorts) {
          String parts[] = s.split(" ");
          if (parts[0].equals("score")) {
            return true;
          }
        }
      } else {
        // No sort specified so it defaults to score.
        return true;
      }

      String fl = params.get("fl");
      if (fl != null) {
        String[] fls = fl.split(",");
        for (String f : fls) {
          if (f.trim().equals("score")) {
            return true;
          }
        }
      }

      if (this.boosted != null) {
        return true;
      }

      return false;
    }
Exemplo n.º 4
0
  private PivotFacetField(ResponseBuilder rb, PivotFacetValue parent, String fieldName) {

    field = fieldName;
    parentValue = parent;

    // facet params
    SolrParams parameters = rb.req.getParams();
    facetFieldMinimumCount = parameters.getFieldInt(field, FacetParams.FACET_PIVOT_MINCOUNT, 1);
    facetFieldOffset = parameters.getFieldInt(field, FacetParams.FACET_OFFSET, 0);
    facetFieldLimit = parameters.getFieldInt(field, FacetParams.FACET_LIMIT, 100);
    String defaultSort =
        (facetFieldLimit > 0) ? FacetParams.FACET_SORT_COUNT : FacetParams.FACET_SORT_INDEX;
    facetFieldSort = parameters.getFieldParam(field, FacetParams.FACET_SORT, defaultSort);

    valueCollection =
        new PivotFacetFieldValueCollection(
            facetFieldMinimumCount, facetFieldOffset, facetFieldLimit, facetFieldSort);

    if ((facetFieldLimit < 0)
        ||
        // TODO: possible refinement issue if limit=0 & mincount=0 & missing=true
        // (ie: we only want the missing count for this field)
        (facetFieldLimit <= 0 && facetFieldMinimumCount == 0)
        || (facetFieldSort.equals(FacetParams.FACET_SORT_INDEX) && facetFieldMinimumCount <= 0)) {
      // in any of these cases, there's no need to refine this level of the pivot
      needRefinementAtThisLevel = false;
    }
  }
Exemplo n.º 5
0
 public static String toQueryString(SolrParams params, boolean xml) {
   StringBuilder sb = new StringBuilder(128);
   try {
     String amp = xml ? "&amp;" : "&";
     boolean first = true;
     Iterator<String> names = params.getParameterNamesIterator();
     while (names.hasNext()) {
       String key = names.next();
       String[] valarr = params.getParams(key);
       if (valarr == null) {
         sb.append(first ? "?" : amp);
         sb.append(key);
         first = false;
       } else {
         for (String val : valarr) {
           sb.append(first ? "?" : amp);
           sb.append(key);
           if (val != null) {
             sb.append('=');
             sb.append(URLEncoder.encode(val, "UTF-8"));
           }
           first = false;
         }
       }
     }
   } catch (IOException e) {
     throw new RuntimeException(e);
   } // can't happen
   return sb.toString();
 }
  private void alternateField(
      NamedList docSummaries, SolrParams params, Document doc, String fieldName) {
    String alternateField = params.getFieldParam(fieldName, HighlightParams.ALTERNATE_FIELD);
    if (alternateField != null && alternateField.length() > 0) {
      IndexableField[] docFields = doc.getFields(alternateField);
      List<String> listFields = new ArrayList<String>();
      for (IndexableField field : docFields) {
        if (field.binaryValue() == null) listFields.add(field.stringValue());
      }

      String[] altTexts = listFields.toArray(new String[listFields.size()]);

      if (altTexts != null && altTexts.length > 0) {
        Encoder encoder = getEncoder(fieldName, params);
        int alternateFieldLen =
            params.getFieldInt(fieldName, HighlightParams.ALTERNATE_FIELD_LENGTH, 0);
        List<String> altList = new ArrayList<String>();
        int len = 0;
        for (String altText : altTexts) {
          if (alternateFieldLen <= 0) {
            altList.add(encoder.encodeText(altText));
          } else {
            altList.add(
                len + altText.length() > alternateFieldLen
                    ? encoder.encodeText(new String(altText.substring(0, alternateFieldLen - len)))
                    : encoder.encodeText(altText));
            len += altText.length();
            if (len >= alternateFieldLen) break;
          }
        }
        docSummaries.add(fieldName, altList);
      }
    }
  }
Exemplo n.º 7
0
  public Fragmenter getFragmenter(String fieldName, SolrParams params) {
    numRequests++;
    params = SolrParams.wrapDefaults(params, defaults);

    int fragsize = params.getFieldInt(fieldName, HighlightParams.FRAGSIZE, 100);
    return (fragsize <= 0) ? new NullFragmenter() : new LuceneGapFragmenter(fragsize);
  }
Exemplo n.º 8
0
 private void handleAdmin(SolrQueryRequest req, SolrQueryResponse rsp, SolrParams params) {
   String action = params.get("action");
   if ("stop".equalsIgnoreCase(action)) {
     String id = params.get("id");
     DaemonStream d = daemons.get(id);
     if (d != null) {
       d.close();
       rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " stopped on " + coreName));
     } else {
       rsp.add(
           "result-set", new DaemonResponseStream("Deamon:" + id + " not found on " + coreName));
     }
   } else if ("start".equalsIgnoreCase(action)) {
     String id = params.get("id");
     DaemonStream d = daemons.get(id);
     d.open();
     rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " started on " + coreName));
   } else if ("list".equalsIgnoreCase(action)) {
     Collection<DaemonStream> vals = daemons.values();
     rsp.add("result-set", new DaemonCollectionStream(vals));
   } else if ("kill".equalsIgnoreCase(action)) {
     String id = params.get("id");
     DaemonStream d = daemons.remove(id);
     if (d != null) {
       d.close();
     }
     rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " killed on " + coreName));
   }
 }
Exemplo n.º 9
0
  /**
   * Helper method for determining the list of fields that we should try to find term vectors on.
   *
   * <p>Does simple (non-glob-supporting) parsing on the {@link TermVectorParams#FIELDS} param if
   * specified, otherwise it returns the concrete field values specified in {@link CommonParams#FL}
   * -- ignoring functions, transformers, or literals.
   *
   * <p>If "fl=*" is used, or neither param is specified, then <code>null</code> will be returned.
   * If the empty set is returned, it means the "fl" specified consisted entirely of things that are
   * not real fields (ie: functions, transformers, partial-globs, score, etc...) and not supported
   * by this component.
   */
  private Set<String> getFields(ResponseBuilder rb) {
    SolrParams params = rb.req.getParams();
    String[] fldLst = params.getParams(TermVectorParams.FIELDS);
    if (null == fldLst || 0 == fldLst.length || (1 == fldLst.length && 0 == fldLst[0].length())) {

      // no tv.fl, parse the main fl
      ReturnFields rf = new SolrReturnFields(params.getParams(CommonParams.FL), rb.req);

      if (rf.wantsAllFields()) {
        return null;
      }

      Set<String> fieldNames = rf.getLuceneFieldNames();
      return (null != fieldNames)
          ? fieldNames
          :
          // return empty set indicating no fields should be used
          Collections.<String>emptySet();
    }

    // otherwise us the raw fldList as is, no special parsing or globs
    Set<String> fieldNames = new LinkedHashSet<String>();
    for (String fl : fldLst) {
      fieldNames.addAll(Arrays.asList(SolrPluginUtils.split(fl)));
    }
    return fieldNames;
  }
Exemplo n.º 10
0
  public void processGetVersions(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();

    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }

    int nVersions = params.getInt("getVersions", -1);
    if (nVersions == -1) return;

    String sync = params.get("sync");
    if (sync != null) {
      processSync(rb, nVersions, sync);
      return;
    }

    UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog();
    if (ulog == null) return;

    UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
    try {
      rb.rsp.add("versions", recentUpdates.getVersions(nVersions));
    } finally {
      recentUpdates.close(); // cache this somehow?
    }
  }
Exemplo n.º 11
0
  public Fragmenter getFragmenter(String fieldName, SolrParams params) {
    numRequests++;
    if (defaults != null) {
      params = new DefaultSolrParams(params, defaults);
    }
    int fragsize =
        params.getFieldInt(
            fieldName, HighlightParams.FRAGSIZE, LuceneRegexFragmenter.DEFAULT_FRAGMENT_SIZE);
    int increment =
        params.getFieldInt(
            fieldName, HighlightParams.INCREMENT, LuceneRegexFragmenter.DEFAULT_INCREMENT_GAP);
    float slop =
        params.getFieldFloat(fieldName, HighlightParams.SLOP, LuceneRegexFragmenter.DEFAULT_SLOP);
    int maxchars =
        params.getFieldInt(
            fieldName,
            HighlightParams.MAX_RE_CHARS,
            LuceneRegexFragmenter.DEFAULT_MAX_ANALYZED_CHARS);
    String rawpat =
        params.getFieldParam(
            fieldName, HighlightParams.PATTERN, LuceneRegexFragmenter.DEFAULT_PATTERN_RAW);

    Pattern p = rawpat == defaultPatternRaw ? defaultPattern : Pattern.compile(rawpat);

    if (fragsize <= 0) {
      return new NullFragmenter();
    }

    return new LuceneRegexFragmenter(fragsize, increment, slop, maxchars, p);
  }
Exemplo n.º 12
0
  /** Generate external process results (if they have not already been generated). */
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {
    SolrParams params = rb.req.getParams();
    if (!params.getBool(getName(), false)) {
      return;
    }

    XJoinResults<?> results = (XJoinResults<?>) rb.req.getContext().get(getResultsTag());
    if (results != null) {
      return;
    }

    // generate external process results, by passing 'external' prefixed parameters
    // from the query string to our factory
    String prefix = getName() + "." + XJoinParameters.EXTERNAL_PREFIX + ".";
    ModifiableSolrParams externalParams = new ModifiableSolrParams();
    for (Iterator<String> it = params.getParameterNamesIterator(); it.hasNext(); ) {
      String name = it.next();
      if (name.startsWith(prefix)) {
        externalParams.set(name.substring(prefix.length()), params.get(name));
      }
    }
    results = factory.getResults(externalParams);
    rb.req.getContext().put(getResultsTag(), results);
  }
  @Test
  public void testStandardParseParamsAndFillStreams() throws Exception {
    ArrayList<ContentStream> streams = new ArrayList<ContentStream>();
    Map<String, String[]> params = new HashMap<String, String[]>();
    params.put("q", new String[] {"hello"});

    // Set up the expected behavior
    String[] ct =
        new String[] {
          "application/x-www-form-urlencoded",
          "Application/x-www-form-urlencoded",
          "application/x-www-form-urlencoded; charset=utf-8",
          "application/x-www-form-urlencoded;"
        };

    for (String contentType : ct) {
      HttpServletRequest request = createMock(HttpServletRequest.class);
      expect(request.getMethod()).andReturn("POST").anyTimes();
      expect(request.getContentType()).andReturn(contentType).anyTimes();
      expect(request.getParameterMap()).andReturn(params).anyTimes();
      replay(request);

      MultipartRequestParser multipart = new MultipartRequestParser(1000000);
      RawRequestParser raw = new RawRequestParser();
      StandardRequestParser standard = new StandardRequestParser(multipart, raw);

      SolrParams p = standard.parseParamsAndFillStreams(request, streams);

      assertEquals("contentType: " + contentType, "hello", p.get("q"));
    }
  }
 private void alternateField(
     NamedList docSummaries, SolrParams params, Document doc, String fieldName) {
   String alternateField = params.getFieldParam(fieldName, HighlightParams.ALTERNATE_FIELD);
   if (alternateField != null && alternateField.length() > 0) {
     String[] altTexts = doc.getValues(alternateField);
     if (altTexts != null && altTexts.length > 0) {
       int alternateFieldLen =
           params.getFieldInt(fieldName, HighlightParams.ALTERNATE_FIELD_LENGTH, 0);
       if (alternateFieldLen <= 0) {
         docSummaries.add(fieldName, altTexts);
       } else {
         List<String> altList = new ArrayList<String>();
         int len = 0;
         for (String altText : altTexts) {
           altList.add(
               len + altText.length() > alternateFieldLen
                   ? new String(altText.substring(0, alternateFieldLen - len))
                   : altText);
           len += altText.length();
           if (len >= alternateFieldLen) break;
         }
         docSummaries.add(fieldName, altList);
       }
     }
   }
 }
Exemplo n.º 15
0
 private void extractRemotePath(String corename, String origCorename, int idx)
     throws UnsupportedEncodingException, KeeperException, InterruptedException {
   if (core == null && idx > 0) {
     coreUrl = getRemotCoreUrl(corename, origCorename);
     // don't proxy for internal update requests
     invalidStates = checkStateIsValid(queryParams.get(CloudSolrClient.STATE_VERSION));
     if (coreUrl != null
         && queryParams.get(DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM) == null) {
       path = path.substring(idx);
       if (invalidStates != null) {
         // it does not make sense to send the request to a remote node
         throw new SolrException(
             SolrException.ErrorCode.INVALID_STATE,
             new String(Utils.toJSON(invalidStates), org.apache.lucene.util.IOUtils.UTF_8));
       }
       action = REMOTEQUERY;
     } else {
       if (!retry) {
         // we couldn't find a core to work with, try reloading aliases
         // TODO: it would be nice if admin ui elements skipped this...
         ZkStateReader reader = cores.getZkController().getZkStateReader();
         reader.updateAliases();
         action = RETRY;
       }
     }
   }
 }
 private void doHighlightingByFastVectorHighlighter(
     FastVectorHighlighter highlighter,
     FieldQuery fieldQuery,
     SolrQueryRequest req,
     NamedList docSummaries,
     int docId,
     Document doc,
     String fieldName)
     throws IOException {
   SolrParams params = req.getParams();
   SolrFragmentsBuilder solrFb = getSolrFragmentsBuilder(fieldName, params);
   String[] snippets =
       highlighter.getBestFragments(
           fieldQuery,
           req.getSearcher().getIndexReader(),
           docId,
           fieldName,
           params.getFieldInt(fieldName, HighlightParams.FRAGSIZE, 100),
           params.getFieldInt(fieldName, HighlightParams.SNIPPETS, 1),
           getFragListBuilder(fieldName, params),
           getFragmentsBuilder(fieldName, params),
           solrFb.getPreTags(params, fieldName),
           solrFb.getPostTags(params, fieldName),
           getEncoder(fieldName, params));
   if (snippets != null && snippets.length > 0) docSummaries.add(fieldName, snippets);
   else alternateField(docSummaries, params, doc, fieldName);
 }
Exemplo n.º 17
0
 @Override
 public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq) {
   SolrParams params = rb.req.getParams();
   if (!params.getBool(COMPONENT_NAME, false)
       || !params.getBool(ClusteringParams.USE_SEARCH_RESULTS, false)) {
     return;
   }
   sreq.params.remove(COMPONENT_NAME);
   if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
     String fl = sreq.params.get(CommonParams.FL, "*");
     // if fl=* then we don't need check
     if (fl.indexOf('*') >= 0) return;
     Set<String> fields = getSearchClusteringEngine(rb).getFieldsToLoad(rb.req);
     if (fields == null || fields.size() == 0) return;
     StringBuilder sb = new StringBuilder();
     String[] flparams = fl.split("[,\\s]+");
     Set<String> flParamSet = new HashSet<String>(flparams.length);
     for (String flparam : flparams) {
       // no need trim() because of split() by \s+
       flParamSet.add(flparam);
     }
     for (String aFieldToLoad : fields) {
       if (!flParamSet.contains(aFieldToLoad)) {
         sb.append(',').append(aFieldToLoad);
       }
     }
     if (sb.length() > 0) {
       sreq.params.set(CommonParams.FL, fl + sb.toString());
     }
   }
 }
Exemplo n.º 18
0
 @Override
 public void prepare(ResponseBuilder rb) throws IOException {
   SolrParams params = rb.req.getParams();
   if (!params.getBool(COMPONENT_NAME, false)) {
     return;
   }
 }
  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrParams params = req.getParams();
    params = adjustParams(params);
    req.setParams(params);
    TupleStream tupleStream = null;

    try {
      tupleStream = this.streamFactory.constructStream(params.get("expr"));
    } catch (Exception e) {
      // Catch exceptions that occur while the stream is being created. This will include streaming
      // expression parse rules.
      SolrException.log(logger, e);
      rsp.add("result-set", new DummyErrorStream(e));

      return;
    }

    int worker = params.getInt("workerID", 0);
    int numWorkers = params.getInt("numWorkers", 1);
    StreamContext context = new StreamContext();
    context.workerID = worker;
    context.numWorkers = numWorkers;
    context.setSolrClientCache(clientCache);
    tupleStream.setStreamContext(context);
    rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream)));
  }
Exemplo n.º 20
0
  /**
   * Handle "UNLOAD" Action
   *
   * @param req
   * @param rsp
   * @return true if a modification has resulted that requires persistance of the CoreContainer
   *     configuration.
   */
  protected boolean handleUnloadAction(SolrQueryRequest req, SolrQueryResponse rsp)
      throws SolrException {
    SolrParams params = req.getParams();
    String cname = params.get(CoreAdminParams.CORE);
    SolrCore core = coreContainer.remove(cname);
    if (core == null) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "No such core exists '" + cname + "'");
    }
    if (params.getBool(CoreAdminParams.DELETE_INDEX, false)) {
      core.addCloseHook(
          new CloseHook() {
            @Override
            public void preClose(SolrCore core) {}

            @Override
            public void postClose(SolrCore core) {
              File dataDir = new File(core.getIndexDir());
              for (File file : dataDir.listFiles()) {
                if (!file.delete()) {
                  log.error(file.getAbsolutePath() + " could not be deleted on core unload");
                }
              }
              if (!dataDir.delete())
                log.error(dataDir.getAbsolutePath() + " could not be deleted on core unload");
            }
          });
    }
    core.close();
    return coreContainer.isPersistent();
  }
  @Test
  public void testGroupCollapseFilterFieldAllFiltered() throws IOException {
    mockResponse(true);
    when(rb.grouping()).thenReturn(true);
    when(params.getBool(GroupCollapseParams.GROUP_COLLAPSE, false)).thenReturn(true);
    when(params.get(GroupCollapseParams.GROUP_COLLAPSE_FL))
        .thenReturn("price,discount,isCloseout,color,colorFamily");
    when(params.get(GroupCollapseParams.GROUP_COLLAPSE_FF)).thenReturn(FIELD_CLOSEOUT);
    component.process(rb);
    verify(rb).grouping();
    verify(rb).getGroupingSpec();
    verify(params).getBool(GroupCollapseParams.GROUP_COLLAPSE, false);
    verify(params).get(GroupCollapseParams.GROUP_COLLAPSE_FL);
    verify(params).get(GroupCollapseParams.GROUP_COLLAPSE_FF);
    verify(params).getParams(GroupCollapseParams.GROUP_COLLAPSE_FQ);
    verifyNoMoreInteractions(rb);
    verifyNoMoreInteractions(params);

    ArgumentCaptor<NamedList> namedListArgument = ArgumentCaptor.forClass(NamedList.class);
    verify(rsp).add(eq("groups_summary"), namedListArgument.capture());

    NamedList groupsSummary = namedListArgument.getValue();
    NamedList productId = (NamedList) groupsSummary.get("productId");
    assertNotNull(productId);
    Set<String> colorFamilies = new HashSet<String>();
    colorFamilies.add("RedColorFamily");
    colorFamilies.add("BlackColorFamily");
    verifyProductSummary(
        (NamedList) productId.get("product1"), 80.0f, 100.0f, 0.0f, 20.0f, 2, colorFamilies);
    colorFamilies = new HashSet<String>();
    colorFamilies.add("OrangeColorFamily");
    colorFamilies.add("BrownColorFamily");
    verifyProductSummary(
        (NamedList) productId.get("product2"), 60.0f, 80.0f, 20.0f, 40.0f, 2, colorFamilies);
  }
 private void initParameters(SolrParams parameters) {
   if (parameters != null) {
     this.setEnabled(parameters.getBool("enabled", true));
     this.inputFieldname = parameters.get(INPUT_FIELD_PARAM, DEFAULT_INPUT_FIELDNAME);
     this.boostFieldname = parameters.get(BOOST_FIELD_PARAM, DEFAULT_BOOST_FIELDNAME);
     this.boostFilename = parameters.get(BOOST_FILENAME_PARAM);
   }
 }
 static void setWt(SolrQueryRequest req, String wt) {
   SolrParams params = req.getParams();
   if (params.get(CommonParams.WT) != null) return; // wt is set by user
   Map<String, String> map = new HashMap<>(1);
   map.put(CommonParams.WT, wt);
   map.put("indent", "true");
   req.setParams(SolrParams.wrapDefaults(params, new MapSolrParams(map)));
 }
Exemplo n.º 24
0
    public NamedList buildResponse() {
      NamedList<Object> response = new SimpleOrderedMap<>();

      // determine if we are going index or count sort
      boolean sort =
          !TermsParams.TERMS_SORT_INDEX.equals(
              params.get(TermsParams.TERMS_SORT, TermsParams.TERMS_SORT_COUNT));

      // init minimum frequency
      long freqmin = 1;
      String s = params.get(TermsParams.TERMS_MINCOUNT);
      if (s != null) freqmin = Long.parseLong(s);

      // init maximum frequency, default to max int
      long freqmax = -1;
      s = params.get(TermsParams.TERMS_MAXCOUNT);
      if (s != null) freqmax = Long.parseLong(s);
      if (freqmax < 0) {
        freqmax = Long.MAX_VALUE;
      }

      // init limit, default to max int
      long limit = 10;
      s = params.get(TermsParams.TERMS_LIMIT);
      if (s != null) limit = Long.parseLong(s);
      if (limit < 0) {
        limit = Long.MAX_VALUE;
      }

      // loop though each field we want terms from
      for (String key : fieldmap.keySet()) {
        NamedList<Number> fieldterms = new SimpleOrderedMap<>();
        TermsResponse.Term[] data = null;
        if (sort) {
          data = getCountSorted(fieldmap.get(key));
        } else {
          data = getLexSorted(fieldmap.get(key));
        }

        // loop though each term until we hit limit
        int cnt = 0;
        for (TermsResponse.Term tc : data) {
          if (tc.getFrequency() >= freqmin && tc.getFrequency() <= freqmax) {
            fieldterms.add(tc.getTerm(), num(tc.getFrequency()));
            cnt++;
          }

          if (cnt >= limit) {
            break;
          }
        }

        response.add(key, fieldterms);
      }

      return response;
    }
 @Override
 public NamedList<Object> request(SolrRequest request) throws SolrServerException, IOException {
   SolrParams reqParams = request.getParams();
   String collection =
       (reqParams != null)
           ? reqParams.get("collection", getDefaultCollection())
           : getDefaultCollection();
   return requestWithRetryOnStaleState(request, 0, collection);
 }
Exemplo n.º 26
0
  public void processGetUpdates(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();

    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }

    String versionsStr = params.get("getUpdates");
    if (versionsStr == null) return;

    UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog();
    if (ulog == null) return;

    List<String> versions = StrUtils.splitSmart(versionsStr, ",", true);

    List<Object> updates = new ArrayList<Object>(versions.size());

    long minVersion = Long.MAX_VALUE;

    // TODO: get this from cache instead of rebuilding?
    UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
    try {
      for (String versionStr : versions) {
        long version = Long.parseLong(versionStr);
        try {
          Object o = recentUpdates.lookup(version);
          if (o == null) continue;

          if (version > 0) {
            minVersion = Math.min(minVersion, version);
          }

          // TODO: do any kind of validation here?
          updates.add(o);

        } catch (SolrException e) {
          log.warn("Exception reading log for updates", e);
        } catch (ClassCastException e) {
          log.warn("Exception reading log for updates", e);
        }
      }

      // Must return all delete-by-query commands that occur after the first add requested
      // since they may apply.
      updates.addAll(recentUpdates.getDeleteByQuery(minVersion));

      rb.rsp.add("updates", updates);

    } finally {
      recentUpdates.close(); // cache this somehow?
    }
  }
Exemplo n.º 27
0
 /**
  * Handler "RELOAD" action
  *
  * @param req
  * @param rsp
  * @return true if a modification has resulted that requires persistance of the CoreContainer
  *     configuration.
  */
 protected boolean handleReloadAction(SolrQueryRequest req, SolrQueryResponse rsp) {
   SolrParams params = req.getParams();
   String cname = params.get(CoreAdminParams.CORE);
   try {
     coreContainer.reload(cname);
     return false; // no change on reload
   } catch (Exception ex) {
     throw new SolrException(
         SolrException.ErrorCode.SERVER_ERROR, "Error handling 'reload' action", ex);
   }
 }
Exemplo n.º 28
0
  /**
   * For example:
   *
   * <p>String json = solr.request( "/select?qt=dismax&wt=json&q=...", null ); String xml =
   * solr.request( "/update", "&lt;add><doc><field ..." );
   */
  public String request(String pathAndParams, String body) throws Exception {
    String path = null;
    SolrParams params = null;
    int idx = pathAndParams.indexOf('?');
    if (idx > 0) {
      path = pathAndParams.substring(0, idx);
      params = SolrRequestParsers.parseQueryString(pathAndParams.substring(idx + 1));
    } else {
      path = pathAndParams;
      params = new MapSolrParams(new HashMap<String, String>());
    }

    // Extract the handler from the path or params
    SolrRequestHandler handler = core.getRequestHandler(path);
    if (handler == null) {
      if ("/select".equals(path) || "/select/".equalsIgnoreCase(path)) {
        String qt = params.get(CommonParams.QT);
        handler = core.getRequestHandler(qt);
        if (handler == null) {
          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt);
        }
      }
    }
    if (handler == null) {
      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + path);
    }

    // Make a stream for the 'body' content
    List<ContentStream> streams = new ArrayList<ContentStream>(1);
    if (body != null && body.length() > 0) {
      streams.add(new ContentStreamBase.StringStream(body));
    }

    SolrQueryRequest req = null;
    try {
      req = parser.buildRequestFrom(core, params, streams);
      SolrQueryResponse rsp = new SolrQueryResponse();
      core.execute(handler, req, rsp);
      if (rsp.getException() != null) {
        throw rsp.getException();
      }

      // Now write it out
      QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
      StringWriter out = new StringWriter();
      responseWriter.write(out, req, rsp);
      return out.toString();
    } finally {
      if (req != null) {
        req.close();
      }
    }
  }
 @Override
 public void init(NamedList args) {
   params = SolrParams.toSolrParams(args);
   this.hdfsDataDir = params.get(HDFS_HOME);
   if (this.hdfsDataDir != null && this.hdfsDataDir.length() == 0) {
     this.hdfsDataDir = null;
   }
   boolean kerberosEnabled = params.getBool(KERBEROS_ENABLED, false);
   LOG.info("Solr Kerberos Authentication " + (kerberosEnabled ? "enabled" : "disabled"));
   if (kerberosEnabled) {
     initKerberos();
   }
 }
  @Test
  public void withFieldAliasesWhenNoSupportedLocales() {
    Map<String, String> parameters = new HashMap<String, String>();
    parameters.put("qf", "comment^0.40");
    parameters.put("xwiki.multilingualFields", "title, comment");

    SolrParams paramsWithAliases =
        plugin.withFieldAliases("title:text", new MapSolrParams(parameters));

    // Aliases for the ROOT locale.
    assertEquals("title__", paramsWithAliases.get("f.title.qf"));
    assertEquals("comment__", paramsWithAliases.get("f.comment.qf"));
  }