@Override public ServiceMetricEvent.Builder makeMetricBuilder(TopNQuery query) { return DruidMetrics.makePartialQueryTimeMetric(query) .setDimension("threshold", String.valueOf(query.getThreshold())) .setDimension("dimension", query.getDimensionSpec().getDimension()) .setDimension("numMetrics", String.valueOf(query.getAggregatorSpecs().size())) .setDimension( "numComplexMetrics", String.valueOf(DruidMetrics.findNumComplexAggs(query.getAggregatorSpecs()))); }
@Override public ServiceMetricEvent.Builder makeMetricBuilder(SearchQuery query) { return DruidMetrics.makePartialQueryTimeMetric(query); }
@POST @Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE}) @Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE }) public Response doPost( InputStream in, @QueryParam("pretty") String pretty, @Context final HttpServletRequest req // used only to get request content-type and remote address ) throws IOException { final long start = System.currentTimeMillis(); Query query = null; String queryId = null; final String reqContentType = req.getContentType(); final boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(reqContentType) || APPLICATION_SMILE.equals(reqContentType); final String contentType = isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON; ObjectMapper objectMapper = isSmile ? smileMapper : jsonMapper; final ObjectWriter jsonWriter = pretty != null ? objectMapper.writerWithDefaultPrettyPrinter() : objectMapper.writer(); final String currThreadName = Thread.currentThread().getName(); try { query = objectMapper.readValue(in, Query.class); queryId = query.getId(); if (queryId == null) { queryId = UUID.randomUUID().toString(); query = query.withId(queryId); } if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) { query = query.withOverriddenContext( ImmutableMap.of( QueryContextKeys.TIMEOUT, config.getMaxIdleTime().toStandardDuration().getMillis())); } Thread.currentThread() .setName( String.format( "%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource(), queryId)); if (log.isDebugEnabled()) { log.debug("Got query [%s]", query); } final Map<String, Object> responseContext = new MapMaker().makeMap(); final Sequence res = query.run(texasRanger, responseContext); final Sequence results; if (res == null) { results = Sequences.empty(); } else { results = res; } final Yielder yielder = results.toYielder( null, new YieldingAccumulator() { @Override public Object accumulate(Object accumulated, Object in) { yield(); return in; } }); try { final Query theQuery = query; Response.ResponseBuilder builder = Response.ok( new StreamingOutput() { @Override public void write(OutputStream outputStream) throws IOException, WebApplicationException { // json serializer will always close the yielder CountingOutputStream os = new CountingOutputStream(outputStream); jsonWriter.writeValue(os, yielder); os .flush(); // Some types of OutputStream suppress flush errors in the // .close() method. os.close(); final long queryTime = System.currentTimeMillis() - start; emitter.emit( DruidMetrics.makeQueryTimeMetric( jsonMapper, theQuery, req.getRemoteAddr()) .setDimension("success", "true") .build("query/time", queryTime)); emitter.emit( DruidMetrics.makeQueryTimeMetric( jsonMapper, theQuery, req.getRemoteAddr()) .build("query/bytes", os.getCount())); requestLogger.log( new RequestLogLine( new DateTime(start), req.getRemoteAddr(), theQuery, new QueryStats( ImmutableMap.<String, Object>of( "query/time", queryTime, "query/bytes", os.getCount(), "success", true)))); } }, contentType) .header("X-Druid-Query-Id", queryId); // Limit the response-context header, see https://github.com/druid-io/druid/issues/2331 // Note that Response.ResponseBuilder.header(String key,Object value).build() calls // value.toString() // and encodes the string using ASCII, so 1 char is = 1 byte String responseCtxString = jsonMapper.writeValueAsString(responseContext); if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) { log.warn( "Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString); responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT); } return builder.header("X-Druid-Response-Context", responseCtxString).build(); } catch (Exception e) { // make sure to close yielder if anything happened before starting to serialize the // response. yielder.close(); throw Throwables.propagate(e); } finally { // do not close yielder here, since we do not want to close the yielder prior to // StreamingOutput having iterated over all the results } } catch (QueryInterruptedException e) { try { log.info("%s [%s]", e.getMessage(), queryId); final long queryTime = System.currentTimeMillis() - start; emitter.emit( DruidMetrics.makeQueryTimeMetric(jsonMapper, query, req.getRemoteAddr()) .setDimension("success", "false") .build("query/time", queryTime)); requestLogger.log( new RequestLogLine( new DateTime(start), req.getRemoteAddr(), query, new QueryStats( ImmutableMap.<String, Object>of( "query/time", queryTime, "success", false, "interrupted", true, "reason", e.toString())))); } catch (Exception e2) { log.error(e2, "Unable to log query [%s]!", query); } return Response.serverError() .type(contentType) .entity( jsonWriter.writeValueAsBytes( ImmutableMap.of( "error", e.getMessage() == null ? "null exception" : e.getMessage()))) .build(); } catch (Exception e) { // Input stream has already been consumed by the json object mapper if query == null final String queryString = query == null ? "unparsable query" : query.toString(); log.warn(e, "Exception occurred on request [%s]", queryString); try { final long queryTime = System.currentTimeMillis() - start; emitter.emit( DruidMetrics.makeQueryTimeMetric(jsonMapper, query, req.getRemoteAddr()) .setDimension("success", "false") .build("query/time", queryTime)); requestLogger.log( new RequestLogLine( new DateTime(start), req.getRemoteAddr(), query, new QueryStats( ImmutableMap.<String, Object>of( "query/time", queryTime, "success", false, "exception", e.toString())))); } catch (Exception e2) { log.error(e2, "Unable to log query [%s]!", queryString); } log.makeAlert(e, "Exception handling request") .addData("exception", e.toString()) .addData("query", queryString) .addData("peer", req.getRemoteAddr()) .emit(); return Response.serverError() .type(contentType) .entity( jsonWriter.writeValueAsBytes( ImmutableMap.of( "error", e.getMessage() == null ? "null exception" : e.getMessage()))) .build(); } finally { Thread.currentThread().setName(currThreadName); } }