Пример #1
0
    @Override
    public <OutType> Yielder<OutType> toYielder(
        final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
      notifyLatch.countDown();

      try {
        waitYieldLatch.await(25, TimeUnit.MILLISECONDS);
      } catch (Exception e) {
        throw Throwables.propagate(e);
      }

      final Yielder<OutType> baseYielder = baseSequence.toYielder(initValue, accumulator);
      return new Yielder<OutType>() {
        @Override
        public OutType get() {
          try {
            waitLatch.await(25, TimeUnit.MILLISECONDS);
          } catch (Exception e) {
            throw Throwables.propagate(e);
          }
          return baseYielder.get();
        }

        @Override
        public Yielder<OutType> next(OutType initValue) {
          return baseYielder.next(initValue);
        }

        @Override
        public boolean isDone() {
          return baseYielder.isDone();
        }

        @Override
        public void close() throws IOException {
          baseYielder.close();
        }
      };
    }
Пример #2
0
  @POST
  @Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
  @Consumes({
    MediaType.APPLICATION_JSON,
    SmileMediaTypes.APPLICATION_JACKSON_SMILE,
    APPLICATION_SMILE
  })
  public Response doPost(
      InputStream in,
      @QueryParam("pretty") String pretty,
      @Context
          final HttpServletRequest req // used only to get request content-type and remote address
      ) throws IOException {
    final long start = System.currentTimeMillis();
    Query query = null;
    String queryId = null;

    final String reqContentType = req.getContentType();
    final boolean isSmile =
        SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(reqContentType)
            || APPLICATION_SMILE.equals(reqContentType);
    final String contentType =
        isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON;

    ObjectMapper objectMapper = isSmile ? smileMapper : jsonMapper;
    final ObjectWriter jsonWriter =
        pretty != null ? objectMapper.writerWithDefaultPrettyPrinter() : objectMapper.writer();

    final String currThreadName = Thread.currentThread().getName();
    try {
      query = objectMapper.readValue(in, Query.class);
      queryId = query.getId();
      if (queryId == null) {
        queryId = UUID.randomUUID().toString();
        query = query.withId(queryId);
      }
      if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
        query =
            query.withOverriddenContext(
                ImmutableMap.of(
                    QueryContextKeys.TIMEOUT,
                    config.getMaxIdleTime().toStandardDuration().getMillis()));
      }

      Thread.currentThread()
          .setName(
              String.format(
                  "%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource(), queryId));
      if (log.isDebugEnabled()) {
        log.debug("Got query [%s]", query);
      }

      final Map<String, Object> responseContext = new MapMaker().makeMap();
      final Sequence res = query.run(texasRanger, responseContext);
      final Sequence results;
      if (res == null) {
        results = Sequences.empty();
      } else {
        results = res;
      }

      final Yielder yielder =
          results.toYielder(
              null,
              new YieldingAccumulator() {
                @Override
                public Object accumulate(Object accumulated, Object in) {
                  yield();
                  return in;
                }
              });

      try {
        final Query theQuery = query;
        Response.ResponseBuilder builder =
            Response.ok(
                    new StreamingOutput() {
                      @Override
                      public void write(OutputStream outputStream)
                          throws IOException, WebApplicationException {
                        // json serializer will always close the yielder
                        CountingOutputStream os = new CountingOutputStream(outputStream);
                        jsonWriter.writeValue(os, yielder);

                        os
                            .flush(); // Some types of OutputStream suppress flush errors in the
                                      // .close() method.
                        os.close();

                        final long queryTime = System.currentTimeMillis() - start;
                        emitter.emit(
                            DruidMetrics.makeQueryTimeMetric(
                                    jsonMapper, theQuery, req.getRemoteAddr())
                                .setDimension("success", "true")
                                .build("query/time", queryTime));
                        emitter.emit(
                            DruidMetrics.makeQueryTimeMetric(
                                    jsonMapper, theQuery, req.getRemoteAddr())
                                .build("query/bytes", os.getCount()));

                        requestLogger.log(
                            new RequestLogLine(
                                new DateTime(start),
                                req.getRemoteAddr(),
                                theQuery,
                                new QueryStats(
                                    ImmutableMap.<String, Object>of(
                                        "query/time",
                                        queryTime,
                                        "query/bytes",
                                        os.getCount(),
                                        "success",
                                        true))));
                      }
                    },
                    contentType)
                .header("X-Druid-Query-Id", queryId);

        // Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
        // Note that Response.ResponseBuilder.header(String key,Object value).build() calls
        // value.toString()
        // and encodes the string using ASCII, so 1 char is = 1 byte
        String responseCtxString = jsonMapper.writeValueAsString(responseContext);
        if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
          log.warn(
              "Response Context truncated for id [%s] . Full context is [%s].",
              queryId, responseCtxString);
          responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
        }

        return builder.header("X-Druid-Response-Context", responseCtxString).build();
      } catch (Exception e) {
        // make sure to close yielder if anything happened before starting to serialize the
        // response.
        yielder.close();
        throw Throwables.propagate(e);
      } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
      }
    } catch (QueryInterruptedException e) {
      try {
        log.info("%s [%s]", e.getMessage(), queryId);
        final long queryTime = System.currentTimeMillis() - start;
        emitter.emit(
            DruidMetrics.makeQueryTimeMetric(jsonMapper, query, req.getRemoteAddr())
                .setDimension("success", "false")
                .build("query/time", queryTime));
        requestLogger.log(
            new RequestLogLine(
                new DateTime(start),
                req.getRemoteAddr(),
                query,
                new QueryStats(
                    ImmutableMap.<String, Object>of(
                        "query/time",
                        queryTime,
                        "success",
                        false,
                        "interrupted",
                        true,
                        "reason",
                        e.toString()))));
      } catch (Exception e2) {
        log.error(e2, "Unable to log query [%s]!", query);
      }
      return Response.serverError()
          .type(contentType)
          .entity(
              jsonWriter.writeValueAsBytes(
                  ImmutableMap.of(
                      "error", e.getMessage() == null ? "null exception" : e.getMessage())))
          .build();
    } catch (Exception e) {
      // Input stream has already been consumed by the json object mapper if query == null
      final String queryString = query == null ? "unparsable query" : query.toString();

      log.warn(e, "Exception occurred on request [%s]", queryString);

      try {
        final long queryTime = System.currentTimeMillis() - start;
        emitter.emit(
            DruidMetrics.makeQueryTimeMetric(jsonMapper, query, req.getRemoteAddr())
                .setDimension("success", "false")
                .build("query/time", queryTime));
        requestLogger.log(
            new RequestLogLine(
                new DateTime(start),
                req.getRemoteAddr(),
                query,
                new QueryStats(
                    ImmutableMap.<String, Object>of(
                        "query/time", queryTime, "success", false, "exception", e.toString()))));
      } catch (Exception e2) {
        log.error(e2, "Unable to log query [%s]!", queryString);
      }

      log.makeAlert(e, "Exception handling request")
          .addData("exception", e.toString())
          .addData("query", queryString)
          .addData("peer", req.getRemoteAddr())
          .emit();

      return Response.serverError()
          .type(contentType)
          .entity(
              jsonWriter.writeValueAsBytes(
                  ImmutableMap.of(
                      "error", e.getMessage() == null ? "null exception" : e.getMessage())))
          .build();
    } finally {
      Thread.currentThread().setName(currThreadName);
    }
  }
Пример #3
0
  @Override
  public Sequence<Result<SearchResultValue>> run(
      final Query<Result<SearchResultValue>> input, Map<String, Object> responseContext) {
    if (!(input instanceof SearchQuery)) {
      throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SearchQuery.class);
    }

    final SearchQuery query = (SearchQuery) input;
    final Filter filter = Filters.convertDimensionFilters(query.getDimensionsFilter());
    final List<DimensionSpec> dimensions = query.getDimensions();
    final SearchQuerySpec searchQuerySpec = query.getQuery();
    final int limit = query.getLimit();
    final boolean descending = query.isDescending();

    // Closing this will cause segfaults in unit tests.
    final QueryableIndex index = segment.asQueryableIndex();

    if (index != null) {
      final TreeMap<SearchHit, MutableInt> retVal =
          Maps.newTreeMap(query.getSort().getComparator());

      Iterable<DimensionSpec> dimsToSearch;
      if (dimensions == null || dimensions.isEmpty()) {
        dimsToSearch =
            Iterables.transform(index.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
      } else {
        dimsToSearch = dimensions;
      }

      final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();

      final ImmutableBitmap baseFilter =
          filter == null
              ? null
              : filter.getBitmapIndex(new ColumnSelectorBitmapIndexSelector(bitmapFactory, index));

      for (DimensionSpec dimension : dimsToSearch) {
        final Column column = index.getColumn(dimension.getDimension());
        if (column == null) {
          continue;
        }

        final BitmapIndex bitmapIndex = column.getBitmapIndex();
        ExtractionFn extractionFn = dimension.getExtractionFn();
        if (extractionFn == null) {
          extractionFn = IdentityExtractionFn.getInstance();
        }
        if (bitmapIndex != null) {
          for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
            String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
            if (!searchQuerySpec.accept(dimVal)) {
              continue;
            }
            ImmutableBitmap bitmap = bitmapIndex.getBitmap(i);
            if (baseFilter != null) {
              bitmap = bitmapFactory.intersection(Arrays.asList(baseFilter, bitmap));
            }
            if (bitmap.size() > 0) {
              MutableInt counter = new MutableInt(bitmap.size());
              MutableInt prev =
                  retVal.put(new SearchHit(dimension.getOutputName(), dimVal), counter);
              if (prev != null) {
                counter.add(prev.intValue());
              }
              if (retVal.size() >= limit) {
                return makeReturnResult(limit, retVal);
              }
            }
          }
        }
      }

      return makeReturnResult(limit, retVal);
    }

    final StorageAdapter adapter = segment.asStorageAdapter();

    if (adapter == null) {
      log.makeAlert("WTF!? Unable to process search query on segment.")
          .addData("segment", segment.getIdentifier())
          .addData("query", query)
          .emit();
      throw new ISE(
          "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
    }

    final Iterable<DimensionSpec> dimsToSearch;
    if (dimensions == null || dimensions.isEmpty()) {
      dimsToSearch =
          Iterables.transform(adapter.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
    } else {
      dimsToSearch = dimensions;
    }

    final Sequence<Cursor> cursors =
        adapter.makeCursors(filter, segment.getDataInterval(), QueryGranularity.ALL, descending);

    final TreeMap<SearchHit, MutableInt> retVal =
        cursors.accumulate(
            Maps.<SearchHit, SearchHit, MutableInt>newTreeMap(query.getSort().getComparator()),
            new Accumulator<TreeMap<SearchHit, MutableInt>, Cursor>() {
              @Override
              public TreeMap<SearchHit, MutableInt> accumulate(
                  TreeMap<SearchHit, MutableInt> set, Cursor cursor) {
                if (set.size() >= limit) {
                  return set;
                }

                Map<String, DimensionSelector> dimSelectors = Maps.newHashMap();
                for (DimensionSpec dim : dimsToSearch) {
                  dimSelectors.put(dim.getOutputName(), cursor.makeDimensionSelector(dim));
                }

                while (!cursor.isDone()) {
                  for (Map.Entry<String, DimensionSelector> entry : dimSelectors.entrySet()) {
                    final DimensionSelector selector = entry.getValue();

                    if (selector != null) {
                      final IndexedInts vals = selector.getRow();
                      for (int i = 0; i < vals.size(); ++i) {
                        final String dimVal = selector.lookupName(vals.get(i));
                        if (searchQuerySpec.accept(dimVal)) {
                          MutableInt counter = new MutableInt(1);
                          MutableInt prev = set.put(new SearchHit(entry.getKey(), dimVal), counter);
                          if (prev != null) {
                            counter.add(prev.intValue());
                          }
                          if (set.size() >= limit) {
                            return set;
                          }
                        }
                      }
                    }
                  }

                  cursor.advance();
                }

                return set;
              }
            });

    return makeReturnResult(limit, retVal);
  }