/** * Executes the HTTP request. * * <p>In case of any exception thrown by HttpClient, it will release the connection. In other * cases it is the duty of caller to do it, or process the input stream. * * @param repository to execute the HTTP method for * @param request resource store request that triggered the HTTP request * @param httpRequest HTTP request to be executed * @param baseUrl The BaseURL used to construct final httpRequest * @return response of making the request * @throws RemoteStorageException If an error occurred during execution of HTTP request */ @VisibleForTesting HttpResponse executeRequest( final ProxyRepository repository, final ResourceStoreRequest request, final HttpUriRequest httpRequest, final String baseUrl, final boolean contentRequest) throws RemoteStorageException { final Timer timer = timer(repository, httpRequest, baseUrl); final TimerContext timerContext = timer.time(); Stopwatch stopwatch = null; if (outboundRequestLog.isDebugEnabled()) { stopwatch = new Stopwatch().start(); } try { return doExecuteRequest(repository, request, httpRequest, contentRequest); } finally { timerContext.stop(); if (stopwatch != null) { outboundRequestLog.debug( "[{}] {} {} - {}", repository.getId(), httpRequest.getMethod(), httpRequest.getURI(), stopwatch); } } }
public static void main(String[] args) throws Exception { ConsoleReporter.enable(2, TimeUnit.SECONDS); Random random = new Random(); while (true) { TimerContext context = timer.time(); Thread.sleep(random.nextInt(1000)); context.stop(); } }
@Override public void dispatch(Object resource, HttpContext context) { final TimerContext time = timer.time(); try { wrappedDispatcher.dispatch(resource, context); } finally { time.stop(); } }
@Override public void map(Result result, SolrUpdateWriter solrUpdateWriter) { TimerContext timerContext = mappingTimer.time(); try { SolrInputDocument solrInputDocument = new SolrInputDocument(); for (SolrDocumentExtractor documentExtractor : resultDocumentExtractors) { documentExtractor.extractDocument(result, solrInputDocument); } solrUpdateWriter.add(solrInputDocument); } finally { timerContext.stop(); } }
/** * Main Entry point * * @param args Command line arguments * @throws TTransportException thrift errors * @throws IOException I/O errors * @throws InterruptedException thread errors */ public static void main(String[] args) throws Exception { HadoopNative.requireHadoopNative(); Optional<BenchmarkArgs> parsedArgs = handleCommandLine(args); if (!parsedArgs.isPresent()) { return; } Timer allTime = Metrics.newTimer( InputBenchmark.class, "all-time", TimeUnit.MILLISECONDS, TimeUnit.MILLISECONDS); TimerContext allTimerContext = allTime.time(); run(parsedArgs.get()); allTimerContext.stop(); new ConsoleReporter(System.err).run(); }
public SnapshotAllResult snapshotAll() throws Exception { final SnapshotAllResult result = new SnapshotAllResult(); result.beginTimestamp = System.currentTimeMillis(); final TimerContext timerContext = snapshotAllTimer.time(); try { snapshotAllAttemptedCounter.incrementAndGet(); doSnapshotAll(result); // if we get to here then the snapshotAll call worked snapshotAllCompletedCounter.incrementAndGet(); } catch (Exception e) { // save exception into result result.exception = e; throw e; } finally { result.completeTimestamp = System.currentTimeMillis(); timerContext.stop(); // always save reference to result this.lastSnapshotAllResult.set(result); } return result; }
/** * Stops request timer. * * @param requestTimerContext timer context */ public void stopTimer(TimerContext requestTimerContext) { requestTimerContext.stop(); }
/** Stops sink timer. */ public void stopSinkTimer() { sinkTimerContext.stop(); }
protected void run(CQLQueryExecutor executor) throws IOException { if (session.getColumnFamilyType() == ColumnFamilyType.Super) throw new RuntimeException("Super columns are not implemented for CQL"); if (values == null) values = generateValues(); // Construct a query string once. if (cqlQuery == null) { StringBuilder query = new StringBuilder("UPDATE ").append(wrapInQuotesIfRequired("Standard1")); if (session.cqlVersion.startsWith("2")) query.append(" USING CONSISTENCY ").append(session.getConsistencyLevel().toString()); query.append(" SET "); for (int i = 0; i < session.getColumnsPerKey(); i++) { if (i > 0) query.append(','); if (session.timeUUIDComparator) { if (session.cqlVersion.startsWith("3")) throw new UnsupportedOperationException("Cannot use UUIDs in column names with CQL3"); query.append(wrapInQuotesIfRequired(UUIDGen.getTimeUUID().toString())).append(" = ?"); } else { query.append(wrapInQuotesIfRequired("C" + i)).append(" = ?"); } } query.append(" WHERE KEY=?"); cqlQuery = query.toString(); } List<String> queryParms = new ArrayList<String>(); for (int i = 0; i < session.getColumnsPerKey(); i++) { // Column value queryParms.add( getUnQuotedCqlBlob( values.get(i % values.size()).array(), session.cqlVersion.startsWith("3"))); } String key = String.format("%0" + session.getTotalKeysLength() + "d", index); queryParms.add(getUnQuotedCqlBlob(key, session.cqlVersion.startsWith("3"))); TimerContext context = session.latency.time(); boolean success = false; String exceptionMessage = null; for (int t = 0; t < session.getRetryTimes(); t++) { if (success) break; try { success = executor.execute(cqlQuery, queryParms); } catch (Exception e) { exceptionMessage = getExceptionMessage(e); success = false; } } if (!success) { error( String.format( "Operation [%d] retried %d times - error inserting key %s %s%n with query %s", index, session.getRetryTimes(), key, (exceptionMessage == null) ? "" : "(" + exceptionMessage + ")", cqlQuery)); } session.operations.getAndIncrement(); session.keys.getAndIncrement(); context.stop(); }
@Override public void evictionPassCompleted() { evictionInvokeDurationContext.stop(); }
@Override public void outputInvokeCompleted() { outputInvokeDurationContext.stop(); }
@Override public void preInstantiationCompleted() { preInstantiationDurationContext.stop(); }