private void innerFinishHim() {
   InternalSearchResponse internalResponse =
       searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
   String scrollId = null;
   if (request.scroll() != null) {
     scrollId = request.scrollId();
   }
   listener.onResponse(
       new SearchResponse(
           internalResponse,
           scrollId,
           this.scrollId.getContext().length,
           successfulOps.get(),
           System.currentTimeMillis() - startTime,
           buildShardFailures()));
 }
  @Test
  public void testScrollAndUpdateIndex() throws Exception {
    client()
        .admin()
        .indices()
        .prepareCreate("test")
        .setSettings(Settings.settingsBuilder().put("index.number_of_shards", 5))
        .execute()
        .actionGet();
    client()
        .admin()
        .cluster()
        .prepareHealth()
        .setWaitForEvents(Priority.LANGUID)
        .setWaitForGreenStatus()
        .execute()
        .actionGet();

    for (int i = 0; i < 500; i++) {
      client()
          .prepareIndex("test", "tweet", Integer.toString(i))
          .setSource(
              jsonBuilder()
                  .startObject()
                  .field("user", "kimchy")
                  .field("postDate", System.currentTimeMillis())
                  .field("message", "test")
                  .endObject())
          .execute()
          .actionGet();
    }

    client().admin().indices().prepareRefresh().execute().actionGet();

    assertThat(
        client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(),
        equalTo(500l));
    assertThat(
        client()
            .prepareCount()
            .setQuery(termQuery("message", "test"))
            .execute()
            .actionGet()
            .getCount(),
        equalTo(500l));
    assertThat(
        client()
            .prepareCount()
            .setQuery(termQuery("message", "test"))
            .execute()
            .actionGet()
            .getCount(),
        equalTo(500l));
    assertThat(
        client()
            .prepareCount()
            .setQuery(termQuery("message", "update"))
            .execute()
            .actionGet()
            .getCount(),
        equalTo(0l));
    assertThat(
        client()
            .prepareCount()
            .setQuery(termQuery("message", "update"))
            .execute()
            .actionGet()
            .getCount(),
        equalTo(0l));

    SearchResponse searchResponse =
        client()
            .prepareSearch()
            .setQuery(queryStringQuery("user:kimchy"))
            .setSize(35)
            .setScroll(TimeValue.timeValueMinutes(2))
            .addSort("postDate", SortOrder.ASC)
            .execute()
            .actionGet();
    try {
      do {
        for (SearchHit searchHit : searchResponse.getHits().hits()) {
          Map<String, Object> map = searchHit.sourceAsMap();
          map.put("message", "update");
          client()
              .prepareIndex("test", "tweet", searchHit.id())
              .setSource(map)
              .execute()
              .actionGet();
        }
        searchResponse =
            client()
                .prepareSearchScroll(searchResponse.getScrollId())
                .setScroll(TimeValue.timeValueMinutes(2))
                .execute()
                .actionGet();
      } while (searchResponse.getHits().hits().length > 0);

      client().admin().indices().prepareRefresh().execute().actionGet();
      assertThat(
          client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(),
          equalTo(500l));
      assertThat(
          client()
              .prepareCount()
              .setQuery(termQuery("message", "test"))
              .execute()
              .actionGet()
              .getCount(),
          equalTo(0l));
      assertThat(
          client()
              .prepareCount()
              .setQuery(termQuery("message", "test"))
              .execute()
              .actionGet()
              .getCount(),
          equalTo(0l));
      assertThat(
          client()
              .prepareCount()
              .setQuery(termQuery("message", "update"))
              .execute()
              .actionGet()
              .getCount(),
          equalTo(500l));
      assertThat(
          client()
              .prepareCount()
              .setQuery(termQuery("message", "update"))
              .execute()
              .actionGet()
              .getCount(),
          equalTo(500l));
    } finally {
      clearScroll(searchResponse.getScrollId());
    }
  }
  private class AsyncAction {

    private final SearchScrollRequest request;

    private final ActionListener<SearchResponse> listener;

    private final ParsedScrollId scrollId;

    private final DiscoveryNodes nodes;

    protected volatile Queue<ShardSearchFailure> shardFailures;

    private final Map<SearchShardTarget, QuerySearchResultProvider> queryResults =
        searchCache.obtainQueryResults();

    private final Map<SearchShardTarget, FetchSearchResult> fetchResults =
        searchCache.obtainFetchResults();

    private volatile ShardDoc[] sortedShardList;

    private final AtomicInteger successfulOps;

    private final long startTime = System.currentTimeMillis();

    private AsyncAction(
        SearchScrollRequest request,
        ParsedScrollId scrollId,
        ActionListener<SearchResponse> listener) {
      this.request = request;
      this.listener = listener;
      this.scrollId = scrollId;
      this.nodes = clusterService.state().nodes();
      this.successfulOps = new AtomicInteger(scrollId.getContext().length);
    }

    protected final ShardSearchFailure[] buildShardFailures() {
      Queue<ShardSearchFailure> localFailures = shardFailures;
      if (localFailures == null) {
        return ShardSearchFailure.EMPTY_ARRAY;
      }
      return localFailures.toArray(ShardSearchFailure.EMPTY_ARRAY);
    }

    // we do our best to return the shard failures, but its ok if its not fully concurrently safe
    // we simply try and return as much as possible
    protected final void addShardFailure(ShardSearchFailure failure) {
      if (shardFailures == null) {
        shardFailures = ConcurrentCollections.newQueue();
      }
      shardFailures.add(failure);
    }

    public void start() {
      if (scrollId.getContext().length == 0) {
        listener.onFailure(
            new SearchPhaseExecutionException("query", "no nodes to search on", null));
        return;
      }
      final AtomicInteger counter = new AtomicInteger(scrollId.getContext().length);

      int localOperations = 0;
      for (Tuple<String, Long> target : scrollId.getContext()) {
        DiscoveryNode node = nodes.get(target.v1());
        if (node != null) {
          if (nodes.localNodeId().equals(node.id())) {
            localOperations++;
          } else {
            executeQueryPhase(counter, node, target.v2());
          }
        } else {
          if (logger.isDebugEnabled()) {
            logger.debug(
                "Node ["
                    + target.v1()
                    + "] not available for scroll request ["
                    + scrollId.getSource()
                    + "]");
          }
          successfulOps.decrementAndGet();
          if (counter.decrementAndGet() == 0) {
            executeFetchPhase();
          }
        }
      }

      if (localOperations > 0) {
        if (request.getOperationThreading() == SearchOperationThreading.SINGLE_THREAD) {
          threadPool
              .executor(ThreadPool.Names.SEARCH)
              .execute(
                  new Runnable() {
                    @Override
                    public void run() {
                      for (Tuple<String, Long> target : scrollId.getContext()) {
                        DiscoveryNode node = nodes.get(target.v1());
                        if (node != null && nodes.localNodeId().equals(node.id())) {
                          executeQueryPhase(counter, node, target.v2());
                        }
                      }
                    }
                  });
        } else {
          boolean localAsync =
              request.getOperationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
          for (final Tuple<String, Long> target : scrollId.getContext()) {
            final DiscoveryNode node = nodes.get(target.v1());
            if (node != null && nodes.localNodeId().equals(node.id())) {
              if (localAsync) {
                threadPool
                    .executor(ThreadPool.Names.SEARCH)
                    .execute(
                        new Runnable() {
                          @Override
                          public void run() {
                            executeQueryPhase(counter, node, target.v2());
                          }
                        });
              } else {
                executeQueryPhase(counter, node, target.v2());
              }
            }
          }
        }
      }
    }

    private void executeQueryPhase(
        final AtomicInteger counter, DiscoveryNode node, final long searchId) {
      searchService.sendExecuteQuery(
          node,
          internalScrollSearchRequest(searchId, request),
          new SearchServiceListener<QuerySearchResult>() {
            @Override
            public void onResult(QuerySearchResult result) {
              queryResults.put(result.shardTarget(), result);
              if (counter.decrementAndGet() == 0) {
                executeFetchPhase();
              }
            }

            @Override
            public void onFailure(Throwable t) {
              if (logger.isDebugEnabled()) {
                logger.debug("[{}] Failed to execute query phase", t, searchId);
              }
              addShardFailure(new ShardSearchFailure(t));
              successfulOps.decrementAndGet();
              if (counter.decrementAndGet() == 0) {
                executeFetchPhase();
              }
            }
          });
    }

    private void executeFetchPhase() {
      sortedShardList = searchPhaseController.sortDocs(queryResults.values());
      Map<SearchShardTarget, ExtTIntArrayList> docIdsToLoad =
          searchPhaseController.docIdsToLoad(sortedShardList);

      if (docIdsToLoad.isEmpty()) {
        finishHim();
      }

      final AtomicInteger counter = new AtomicInteger(docIdsToLoad.size());

      for (final Map.Entry<SearchShardTarget, ExtTIntArrayList> entry : docIdsToLoad.entrySet()) {
        SearchShardTarget shardTarget = entry.getKey();
        ExtTIntArrayList docIds = entry.getValue();
        FetchSearchRequest fetchSearchRequest =
            new FetchSearchRequest(request, queryResults.get(shardTarget).id(), docIds);
        DiscoveryNode node = nodes.get(shardTarget.nodeId());
        searchService.sendExecuteFetch(
            node,
            fetchSearchRequest,
            new SearchServiceListener<FetchSearchResult>() {
              @Override
              public void onResult(FetchSearchResult result) {
                result.shardTarget(entry.getKey());
                fetchResults.put(result.shardTarget(), result);
                if (counter.decrementAndGet() == 0) {
                  finishHim();
                }
              }

              @Override
              public void onFailure(Throwable t) {
                if (logger.isDebugEnabled()) {
                  logger.debug("Failed to execute fetch phase", t);
                }
                successfulOps.decrementAndGet();
                if (counter.decrementAndGet() == 0) {
                  finishHim();
                }
              }
            });
      }
    }

    private void finishHim() {
      try {
        innerFinishHim();
      } catch (Exception e) {
        listener.onFailure(new ReduceSearchPhaseException("fetch", "", e, buildShardFailures()));
      }
    }

    private void innerFinishHim() {
      InternalSearchResponse internalResponse =
          searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
      String scrollId = null;
      if (request.getScroll() != null) {
        scrollId = request.getScrollId();
      }
      listener.onResponse(
          new SearchResponse(
              internalResponse,
              scrollId,
              this.scrollId.getContext().length,
              successfulOps.get(),
              System.currentTimeMillis() - startTime,
              buildShardFailures()));
      searchCache.releaseQueryResults(queryResults);
      searchCache.releaseFetchResults(fetchResults);
    }
  }
  private class AsyncAction {

    private final SearchScrollRequest request;

    private final ActionListener<SearchResponse> listener;

    private final ParsedScrollId scrollId;

    private final DiscoveryNodes nodes;

    private volatile AtomicArray<ShardSearchFailure> shardFailures;
    final AtomicArray<QuerySearchResult> queryResults;
    final AtomicArray<FetchSearchResult> fetchResults;

    private volatile ScoreDoc[] sortedShardList;

    private final AtomicInteger successfulOps;

    private final long startTime = System.currentTimeMillis();

    private AsyncAction(
        SearchScrollRequest request,
        ParsedScrollId scrollId,
        ActionListener<SearchResponse> listener) {
      this.request = request;
      this.listener = listener;
      this.scrollId = scrollId;
      this.nodes = clusterService.state().nodes();
      this.successfulOps = new AtomicInteger(scrollId.getContext().length);

      this.queryResults = new AtomicArray<QuerySearchResult>(scrollId.getContext().length);
      this.fetchResults = new AtomicArray<FetchSearchResult>(scrollId.getContext().length);
    }

    protected final ShardSearchFailure[] buildShardFailures() {
      if (shardFailures == null) {
        return ShardSearchFailure.EMPTY_ARRAY;
      }
      List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
      ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
      for (int i = 0; i < failures.length; i++) {
        failures[i] = entries.get(i).value;
      }
      return failures;
    }

    // we do our best to return the shard failures, but its ok if its not fully concurrently safe
    // we simply try and return as much as possible
    protected final void addShardFailure(final int shardIndex, ShardSearchFailure failure) {
      if (shardFailures == null) {
        shardFailures = new AtomicArray<ShardSearchFailure>(scrollId.getContext().length);
      }
      shardFailures.set(shardIndex, failure);
    }

    public void start() {
      if (scrollId.getContext().length == 0) {
        listener.onFailure(
            new SearchPhaseExecutionException("query", "no nodes to search on", null));
        return;
      }
      final AtomicInteger counter = new AtomicInteger(scrollId.getContext().length);

      int localOperations = 0;
      Tuple<String, Long>[] context = scrollId.getContext();
      for (int i = 0; i < context.length; i++) {
        Tuple<String, Long> target = context[i];
        DiscoveryNode node = nodes.get(target.v1());
        if (node != null) {
          if (nodes.localNodeId().equals(node.id())) {
            localOperations++;
          } else {
            executeQueryPhase(i, counter, node, target.v2());
          }
        } else {
          if (logger.isDebugEnabled()) {
            logger.debug(
                "Node ["
                    + target.v1()
                    + "] not available for scroll request ["
                    + scrollId.getSource()
                    + "]");
          }
          successfulOps.decrementAndGet();
          if (counter.decrementAndGet() == 0) {
            executeFetchPhase();
          }
        }
      }

      if (localOperations > 0) {
        if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
          threadPool
              .executor(ThreadPool.Names.SEARCH)
              .execute(
                  new Runnable() {
                    @Override
                    public void run() {
                      Tuple<String, Long>[] context1 = scrollId.getContext();
                      for (int i = 0; i < context1.length; i++) {
                        Tuple<String, Long> target = context1[i];
                        DiscoveryNode node = nodes.get(target.v1());
                        if (node != null && nodes.localNodeId().equals(node.id())) {
                          executeQueryPhase(i, counter, node, target.v2());
                        }
                      }
                    }
                  });
        } else {
          boolean localAsync =
              request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
          Tuple<String, Long>[] context1 = scrollId.getContext();
          for (int i = 0; i < context1.length; i++) {
            final Tuple<String, Long> target = context1[i];
            final int shardIndex = i;
            final DiscoveryNode node = nodes.get(target.v1());
            if (node != null && nodes.localNodeId().equals(node.id())) {
              try {
                if (localAsync) {
                  threadPool
                      .executor(ThreadPool.Names.SEARCH)
                      .execute(
                          new Runnable() {
                            @Override
                            public void run() {
                              executeQueryPhase(shardIndex, counter, node, target.v2());
                            }
                          });
                } else {
                  executeQueryPhase(shardIndex, counter, node, target.v2());
                }
              } catch (Throwable t) {
                onQueryPhaseFailure(shardIndex, counter, target.v2(), t);
              }
            }
          }
        }
      }
    }

    private void executeQueryPhase(
        final int shardIndex,
        final AtomicInteger counter,
        DiscoveryNode node,
        final long searchId) {
      searchService.sendExecuteQuery(
          node,
          internalScrollSearchRequest(searchId, request),
          new SearchServiceListener<QuerySearchResult>() {
            @Override
            public void onResult(QuerySearchResult result) {
              queryResults.set(shardIndex, result);
              if (counter.decrementAndGet() == 0) {
                executeFetchPhase();
              }
            }

            @Override
            public void onFailure(Throwable t) {
              onQueryPhaseFailure(shardIndex, counter, searchId, t);
            }
          });
    }

    void onQueryPhaseFailure(
        final int shardIndex, final AtomicInteger counter, final long searchId, Throwable t) {
      if (logger.isDebugEnabled()) {
        logger.debug("[{}] Failed to execute query phase", t, searchId);
      }
      addShardFailure(shardIndex, new ShardSearchFailure(t));
      successfulOps.decrementAndGet();
      if (counter.decrementAndGet() == 0) {
        executeFetchPhase();
      }
    }

    private void executeFetchPhase() {
      sortedShardList = searchPhaseController.sortDocs(queryResults);
      AtomicArray<ExtTIntArrayList> docIdsToLoad =
          new AtomicArray<ExtTIntArrayList>(queryResults.length());
      searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);

      if (docIdsToLoad.asList().isEmpty()) {
        finishHim();
      }

      final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());

      for (final AtomicArray.Entry<ExtTIntArrayList> entry : docIdsToLoad.asList()) {
        ExtTIntArrayList docIds = entry.value;
        final QuerySearchResult querySearchResult = queryResults.get(entry.index);
        FetchSearchRequest fetchSearchRequest =
            new FetchSearchRequest(request, querySearchResult.id(), docIds);
        DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId());
        searchService.sendExecuteFetch(
            node,
            fetchSearchRequest,
            new SearchServiceListener<FetchSearchResult>() {
              @Override
              public void onResult(FetchSearchResult result) {
                result.shardTarget(querySearchResult.shardTarget());
                fetchResults.set(entry.index, result);
                if (counter.decrementAndGet() == 0) {
                  finishHim();
                }
              }

              @Override
              public void onFailure(Throwable t) {
                if (logger.isDebugEnabled()) {
                  logger.debug("Failed to execute fetch phase", t);
                }
                successfulOps.decrementAndGet();
                if (counter.decrementAndGet() == 0) {
                  finishHim();
                }
              }
            });
      }
    }

    private void finishHim() {
      try {
        innerFinishHim();
      } catch (Throwable e) {
        listener.onFailure(new ReduceSearchPhaseException("fetch", "", e, buildShardFailures()));
      }
    }

    private void innerFinishHim() {
      InternalSearchResponse internalResponse =
          searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
      String scrollId = null;
      if (request.scroll() != null) {
        scrollId = request.scrollId();
      }
      listener.onResponse(
          new SearchResponse(
              internalResponse,
              scrollId,
              this.scrollId.getContext().length,
              successfulOps.get(),
              System.currentTimeMillis() - startTime,
              buildShardFailures()));
    }
  }