示例#1
0
  public void addSegments(Iterable<DataSegment> segments) {
    try {
      final List<String> segmentFailures = Lists.newArrayList();
      final List<DataSegment> validSegments = Lists.newArrayList();

      for (DataSegment segment : segments) {
        log.info("Loading segment %s", segment.getIdentifier());

        try {
          serverManager.loadSegment(segment);
        } catch (Exception e) {
          log.error(e, "Exception loading segment[%s]", segment.getIdentifier());
          removeSegment(segment);
          segmentFailures.add(segment.getIdentifier());
          continue;
        }

        File segmentInfoCacheFile = new File(config.getInfoDir(), segment.getIdentifier());
        if (!segmentInfoCacheFile.exists()) {
          try {
            jsonMapper.writeValue(segmentInfoCacheFile, segment);
          } catch (IOException e) {
            log.error(
                e, "Failed to write to disk segment info cache file[%s]", segmentInfoCacheFile);
            removeSegment(segment);
            segmentFailures.add(segment.getIdentifier());
            continue;
          }
        }

        validSegments.add(segment);
      }

      try {
        announcer.announceSegments(validSegments);
      } catch (IOException e) {
        throw new SegmentLoadingException(e, "Failed to announce segments[%s]", segments);
      }

      if (!segmentFailures.isEmpty()) {
        for (String segmentFailure : segmentFailures) {
          log.error("%s failed to load", segmentFailure);
        }
        throw new SegmentLoadingException(
            "%,d errors seen while loading segments", segmentFailures.size());
      }
    } catch (SegmentLoadingException e) {
      log.makeAlert(e, "Failed to load segments for dataSource")
          .addData("segments", segments)
          .emit();
    }
  }
    @Override
    public TaskStatus call() {
      final long startTime = System.currentTimeMillis();
      final File taskDir = toolbox.getTaskWorkDir();

      TaskStatus status;

      try {
        log.info("Running task: %s", task.getId());
        status = task.run(toolbox);
      } catch (InterruptedException e) {
        log.error(e, "Interrupted while running task[%s]", task);
        throw Throwables.propagate(e);
      } catch (Exception e) {
        log.error(e, "Exception while running task[%s]", task);
        status = TaskStatus.failure(task.getId());
      } catch (Throwable t) {
        log.error(t, "Uncaught Throwable while running task[%s]", task);
        throw Throwables.propagate(t);
      }

      try {
        if (taskDir.exists()) {
          log.info("Removing task directory: %s", taskDir);
          FileUtils.deleteDirectory(taskDir);
        }
      } catch (Exception e) {
        log.makeAlert(e, "Failed to delete task directory")
            .addData("taskDir", taskDir.toString())
            .addData("task", task.getId())
            .emit();
      }

      try {
        return status.withDuration(System.currentTimeMillis() - startTime);
      } catch (Exception e) {
        log.error(e, "Uncaught Exception during callback for task[%s]", task);
        throw Throwables.propagate(e);
      }
    }
示例#3
0
  @Override
  public Sequence<T> run(final Query<T> query) {
    final QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query);
    final CacheStrategy<T, Object, Query<T>> strategy = toolChest.getCacheStrategy(query);

    final Map<DruidServer, List<SegmentDescriptor>> serverSegments = Maps.newTreeMap();

    final List<Pair<DateTime, byte[]>> cachedResults = Lists.newArrayList();
    final Map<String, CachePopulator> cachePopulatorMap = Maps.newHashMap();

    final boolean useCache =
        Boolean.parseBoolean(query.getContextValue("useCache", "true")) && strategy != null;
    final boolean populateCache =
        Boolean.parseBoolean(query.getContextValue("populateCache", "true")) && strategy != null;
    final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false"));

    ImmutableMap.Builder<String, String> contextBuilder = new ImmutableMap.Builder<>();

    final String priority = query.getContextValue("priority", "0");
    contextBuilder.put("priority", priority);

    if (populateCache) {
      contextBuilder.put("bySegment", "true");
    }
    contextBuilder.put("intermediate", "true");

    final Query<T> rewrittenQuery = query.withOverriddenContext(contextBuilder.build());

    VersionedIntervalTimeline<String, ServerSelector> timeline =
        serverView.getTimeline(query.getDataSource());
    if (timeline == null) {
      return Sequences.empty();
    }

    // build set of segments to query
    Set<Pair<ServerSelector, SegmentDescriptor>> segments = Sets.newLinkedHashSet();

    List<TimelineObjectHolder<String, ServerSelector>> serversLookup = Lists.newLinkedList();

    for (Interval interval : rewrittenQuery.getIntervals()) {
      serversLookup.addAll(timeline.lookup(interval));
    }

    // Let tool chest filter out unneeded segments
    final List<TimelineObjectHolder<String, ServerSelector>> filteredServersLookup =
        toolChest.filterSegments(query, serversLookup);

    for (TimelineObjectHolder<String, ServerSelector> holder : filteredServersLookup) {
      for (PartitionChunk<ServerSelector> chunk : holder.getObject()) {
        ServerSelector selector = chunk.getObject();
        final SegmentDescriptor descriptor =
            new SegmentDescriptor(
                holder.getInterval(), holder.getVersion(), chunk.getChunkNumber());

        segments.add(Pair.of(selector, descriptor));
      }
    }

    final byte[] queryCacheKey;
    if (strategy != null) {
      queryCacheKey = strategy.computeCacheKey(query);
    } else {
      queryCacheKey = null;
    }

    if (queryCacheKey != null) {
      Map<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> cacheKeys = Maps.newHashMap();
      for (Pair<ServerSelector, SegmentDescriptor> segment : segments) {
        final Cache.NamedKey segmentCacheKey =
            computeSegmentCacheKey(
                segment.lhs.getSegment().getIdentifier(), segment.rhs, queryCacheKey);
        cacheKeys.put(segment, segmentCacheKey);
      }

      // Pull cached segments from cache and remove from set of segments to query
      final Map<Cache.NamedKey, byte[]> cachedValues;
      if (useCache) {
        cachedValues = cache.getBulk(cacheKeys.values());
      } else {
        cachedValues = ImmutableMap.of();
      }

      for (Map.Entry<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> entry :
          cacheKeys.entrySet()) {
        Pair<ServerSelector, SegmentDescriptor> segment = entry.getKey();
        Cache.NamedKey segmentCacheKey = entry.getValue();
        final Interval segmentQueryInterval = segment.rhs.getInterval();

        final byte[] cachedValue = cachedValues.get(segmentCacheKey);
        if (cachedValue != null) {
          // remove cached segment from set of segments to query
          segments.remove(segment);
          cachedResults.add(Pair.of(segmentQueryInterval.getStart(), cachedValue));
        } else if (populateCache) {
          final String segmentIdentifier = segment.lhs.getSegment().getIdentifier();
          cachePopulatorMap.put(
              String.format("%s_%s", segmentIdentifier, segmentQueryInterval),
              new CachePopulator(cache, objectMapper, segmentCacheKey));
        }
      }
    }

    // Compile list of all segments not pulled from cache
    for (Pair<ServerSelector, SegmentDescriptor> segment : segments) {
      final QueryableDruidServer queryableDruidServer = segment.lhs.pick();

      if (queryableDruidServer == null) {
        log.error("No servers found for %s?! How can this be?!", segment.rhs);
      } else {
        final DruidServer server = queryableDruidServer.getServer();
        List<SegmentDescriptor> descriptors = serverSegments.get(server);

        if (descriptors == null) {
          descriptors = Lists.newArrayList();
          serverSegments.put(server, descriptors);
        }

        descriptors.add(segment.rhs);
      }
    }

    return new LazySequence<>(
        new Supplier<Sequence<T>>() {
          @Override
          public Sequence<T> get() {
            ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences = Lists.newArrayList();

            addSequencesFromServer(listOfSequences);
            addSequencesFromCache(listOfSequences);

            Collections.sort(
                listOfSequences,
                Ordering.natural().onResultOf(Pair.<DateTime, Sequence<T>>lhsFn()));

            final Sequence<Sequence<T>> seq =
                Sequences.simple(
                    Iterables.transform(listOfSequences, Pair.<DateTime, Sequence<T>>rhsFn()));
            if (strategy == null) {
              return toolChest.mergeSequences(seq);
            } else {
              return strategy.mergeSequences(seq);
            }
          }

          private void addSequencesFromCache(
              ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences) {
            if (strategy == null) {
              return;
            }

            final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache();
            final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz();
            for (Pair<DateTime, byte[]> cachedResultPair : cachedResults) {
              final byte[] cachedResult = cachedResultPair.rhs;
              Sequence<Object> cachedSequence =
                  new BaseSequence<>(
                      new BaseSequence.IteratorMaker<Object, Iterator<Object>>() {
                        @Override
                        public Iterator<Object> make() {
                          try {
                            if (cachedResult.length == 0) {
                              return Iterators.emptyIterator();
                            }

                            return objectMapper.readValues(
                                objectMapper.getFactory().createParser(cachedResult),
                                cacheObjectClazz);
                          } catch (IOException e) {
                            throw Throwables.propagate(e);
                          }
                        }

                        @Override
                        public void cleanup(Iterator<Object> iterFromMake) {}
                      });
              listOfSequences.add(
                  Pair.of(
                      cachedResultPair.lhs, Sequences.map(cachedSequence, pullFromCacheFunction)));
            }
          }

          @SuppressWarnings("unchecked")
          private void addSequencesFromServer(
              ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences) {
            for (Map.Entry<DruidServer, List<SegmentDescriptor>> entry :
                serverSegments.entrySet()) {
              final DruidServer server = entry.getKey();
              final List<SegmentDescriptor> descriptors = entry.getValue();

              final QueryRunner clientQueryable = serverView.getQueryRunner(server);
              if (clientQueryable == null) {
                log.makeAlert("WTF!? server[%s] doesn't have a client Queryable?", server).emit();
                continue;
              }

              final Sequence<T> resultSeqToAdd;
              final MultipleSpecificSegmentSpec segmentSpec =
                  new MultipleSpecificSegmentSpec(descriptors);
              List<Interval> intervals = segmentSpec.getIntervals();

              if (!server.isAssignable() || !populateCache || isBySegment) {
                resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec));
              } else {
                resultSeqToAdd =
                    toolChest.mergeSequences(
                        Sequences.map(
                            clientQueryable.run(rewrittenQuery.withQuerySegmentSpec(segmentSpec)),
                            new Function<Object, Sequence<T>>() {
                              private final Function<T, Object> prepareForCache =
                                  strategy.prepareForCache();

                              @Override
                              public Sequence<T> apply(Object input) {
                                Result<Object> result = (Result<Object>) input;
                                final BySegmentResultValueClass<T> value =
                                    (BySegmentResultValueClass<T>) result.getValue();
                                String segmentIdentifier = value.getSegmentId();
                                final Iterable<T> segmentResults = value.getResults();

                                CachePopulator cachePopulator =
                                    cachePopulatorMap.get(
                                        String.format(
                                            "%s_%s", segmentIdentifier, value.getInterval()));
                                if (cachePopulator != null) {
                                  cachePopulator.populate(
                                      Iterables.transform(segmentResults, prepareForCache));
                                }

                                return Sequences.simple(
                                    Iterables.transform(
                                        segmentResults,
                                        toolChest.makeMetricManipulatorFn(
                                            rewrittenQuery,
                                            new MetricManipulationFn() {
                                              @Override
                                              public Object manipulate(
                                                  AggregatorFactory factory, Object object) {
                                                return factory.deserialize(object);
                                              }
                                            })));
                              }
                            }));
              }

              listOfSequences.add(Pair.of(intervals.get(0).getStart(), resultSeqToAdd));
            }
          }
        });
  }
示例#4
0
  @POST
  @Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
  @Consumes({
    MediaType.APPLICATION_JSON,
    SmileMediaTypes.APPLICATION_JACKSON_SMILE,
    APPLICATION_SMILE
  })
  public Response doPost(
      InputStream in,
      @QueryParam("pretty") String pretty,
      @Context
          final HttpServletRequest req // used only to get request content-type and remote address
      ) throws IOException {
    final long start = System.currentTimeMillis();
    Query query = null;
    String queryId = null;

    final String reqContentType = req.getContentType();
    final boolean isSmile =
        SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(reqContentType)
            || APPLICATION_SMILE.equals(reqContentType);
    final String contentType =
        isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON;

    ObjectMapper objectMapper = isSmile ? smileMapper : jsonMapper;
    final ObjectWriter jsonWriter =
        pretty != null ? objectMapper.writerWithDefaultPrettyPrinter() : objectMapper.writer();

    final String currThreadName = Thread.currentThread().getName();
    try {
      query = objectMapper.readValue(in, Query.class);
      queryId = query.getId();
      if (queryId == null) {
        queryId = UUID.randomUUID().toString();
        query = query.withId(queryId);
      }
      if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
        query =
            query.withOverriddenContext(
                ImmutableMap.of(
                    QueryContextKeys.TIMEOUT,
                    config.getMaxIdleTime().toStandardDuration().getMillis()));
      }

      Thread.currentThread()
          .setName(
              String.format(
                  "%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource(), queryId));
      if (log.isDebugEnabled()) {
        log.debug("Got query [%s]", query);
      }

      final Map<String, Object> responseContext = new MapMaker().makeMap();
      final Sequence res = query.run(texasRanger, responseContext);
      final Sequence results;
      if (res == null) {
        results = Sequences.empty();
      } else {
        results = res;
      }

      final Yielder yielder =
          results.toYielder(
              null,
              new YieldingAccumulator() {
                @Override
                public Object accumulate(Object accumulated, Object in) {
                  yield();
                  return in;
                }
              });

      try {
        final Query theQuery = query;
        Response.ResponseBuilder builder =
            Response.ok(
                    new StreamingOutput() {
                      @Override
                      public void write(OutputStream outputStream)
                          throws IOException, WebApplicationException {
                        // json serializer will always close the yielder
                        CountingOutputStream os = new CountingOutputStream(outputStream);
                        jsonWriter.writeValue(os, yielder);

                        os
                            .flush(); // Some types of OutputStream suppress flush errors in the
                                      // .close() method.
                        os.close();

                        final long queryTime = System.currentTimeMillis() - start;
                        emitter.emit(
                            DruidMetrics.makeQueryTimeMetric(
                                    jsonMapper, theQuery, req.getRemoteAddr())
                                .setDimension("success", "true")
                                .build("query/time", queryTime));
                        emitter.emit(
                            DruidMetrics.makeQueryTimeMetric(
                                    jsonMapper, theQuery, req.getRemoteAddr())
                                .build("query/bytes", os.getCount()));

                        requestLogger.log(
                            new RequestLogLine(
                                new DateTime(start),
                                req.getRemoteAddr(),
                                theQuery,
                                new QueryStats(
                                    ImmutableMap.<String, Object>of(
                                        "query/time",
                                        queryTime,
                                        "query/bytes",
                                        os.getCount(),
                                        "success",
                                        true))));
                      }
                    },
                    contentType)
                .header("X-Druid-Query-Id", queryId);

        // Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
        // Note that Response.ResponseBuilder.header(String key,Object value).build() calls
        // value.toString()
        // and encodes the string using ASCII, so 1 char is = 1 byte
        String responseCtxString = jsonMapper.writeValueAsString(responseContext);
        if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
          log.warn(
              "Response Context truncated for id [%s] . Full context is [%s].",
              queryId, responseCtxString);
          responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
        }

        return builder.header("X-Druid-Response-Context", responseCtxString).build();
      } catch (Exception e) {
        // make sure to close yielder if anything happened before starting to serialize the
        // response.
        yielder.close();
        throw Throwables.propagate(e);
      } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
      }
    } catch (QueryInterruptedException e) {
      try {
        log.info("%s [%s]", e.getMessage(), queryId);
        final long queryTime = System.currentTimeMillis() - start;
        emitter.emit(
            DruidMetrics.makeQueryTimeMetric(jsonMapper, query, req.getRemoteAddr())
                .setDimension("success", "false")
                .build("query/time", queryTime));
        requestLogger.log(
            new RequestLogLine(
                new DateTime(start),
                req.getRemoteAddr(),
                query,
                new QueryStats(
                    ImmutableMap.<String, Object>of(
                        "query/time",
                        queryTime,
                        "success",
                        false,
                        "interrupted",
                        true,
                        "reason",
                        e.toString()))));
      } catch (Exception e2) {
        log.error(e2, "Unable to log query [%s]!", query);
      }
      return Response.serverError()
          .type(contentType)
          .entity(
              jsonWriter.writeValueAsBytes(
                  ImmutableMap.of(
                      "error", e.getMessage() == null ? "null exception" : e.getMessage())))
          .build();
    } catch (Exception e) {
      // Input stream has already been consumed by the json object mapper if query == null
      final String queryString = query == null ? "unparsable query" : query.toString();

      log.warn(e, "Exception occurred on request [%s]", queryString);

      try {
        final long queryTime = System.currentTimeMillis() - start;
        emitter.emit(
            DruidMetrics.makeQueryTimeMetric(jsonMapper, query, req.getRemoteAddr())
                .setDimension("success", "false")
                .build("query/time", queryTime));
        requestLogger.log(
            new RequestLogLine(
                new DateTime(start),
                req.getRemoteAddr(),
                query,
                new QueryStats(
                    ImmutableMap.<String, Object>of(
                        "query/time", queryTime, "success", false, "exception", e.toString()))));
      } catch (Exception e2) {
        log.error(e2, "Unable to log query [%s]!", queryString);
      }

      log.makeAlert(e, "Exception handling request")
          .addData("exception", e.toString())
          .addData("query", queryString)
          .addData("peer", req.getRemoteAddr())
          .emit();

      return Response.serverError()
          .type(contentType)
          .entity(
              jsonWriter.writeValueAsBytes(
                  ImmutableMap.of(
                      "error", e.getMessage() == null ? "null exception" : e.getMessage())))
          .build();
    } finally {
      Thread.currentThread().setName(currThreadName);
    }
  }