private IndexRequest prepareInsert(
      DocTableInfo tableInfo, ShardUpsertRequest request, ShardUpsertRequest.Item item)
      throws IOException {
    List<GeneratedReferenceInfo> generatedReferencesWithValue = new ArrayList<>();
    BytesReference source;
    if (request.isRawSourceInsert()) {
      assert item.insertValues().length > 0 : "empty insert values array";
      source = new BytesArray((BytesRef) item.insertValues()[0]);
    } else {
      XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
      for (int i = 0; i < item.insertValues().length; i++) {
        Reference ref = request.insertColumns()[i];
        if (ref.info().granularity() == RowGranularity.DOC) {
          // don't include values for partitions in the _source
          // ideally columns with partition granularity shouldn't be part of the request
          builder.field(ref.ident().columnIdent().fqn(), item.insertValues()[i]);
          if (ref.info() instanceof GeneratedReferenceInfo) {
            generatedReferencesWithValue.add((GeneratedReferenceInfo) ref.info());
          }
        }
      }
      source = builder.bytes();
    }

    int generatedColumnSize = 0;
    for (GeneratedReferenceInfo generatedReferenceInfo : tableInfo.generatedColumns()) {
      if (!tableInfo.partitionedByColumns().contains(generatedReferenceInfo)) {
        generatedColumnSize++;
      }
    }

    int numMissingGeneratedColumns = generatedColumnSize - generatedReferencesWithValue.size();
    if (numMissingGeneratedColumns > 0
        || (generatedReferencesWithValue.size() > 0 && request.validateGeneratedColumns())) {
      // we need to evaluate some generated column expressions
      Map<String, Object> sourceMap =
          processGeneratedColumnsOnInsert(
              tableInfo,
              request.insertColumns(),
              item.insertValues(),
              request.isRawSourceInsert(),
              request.validateGeneratedColumns());
      source = XContentFactory.jsonBuilder().map(sourceMap).bytes();
    }

    IndexRequest indexRequest =
        Requests.indexRequest(request.index())
            .type(request.type())
            .id(item.id())
            .routing(request.routing())
            .source(source)
            .create(!request.overwriteDuplicates())
            .operationThreaded(false);
    if (logger.isTraceEnabled()) {
      logger.trace(
          "Inserting document with id {}, source: {}", item.id(), indexRequest.source().toUtf8());
    }
    return indexRequest;
  }
  @Override
  protected ShardResponse processRequestItems(
      ShardId shardId, ShardUpsertRequest request, AtomicBoolean killed) {
    ShardResponse shardResponse = new ShardResponse();
    DocTableInfo tableInfo = schemas.getWritableTable(TableIdent.fromIndexName(request.index()));
    for (int i = 0; i < request.itemIndices().size(); i++) {
      int location = request.itemIndices().get(i);
      ShardUpsertRequest.Item item = request.items().get(i);
      if (killed.get()) {
        throw new CancellationException();
      }
      try {
        indexItem(
            tableInfo,
            request,
            item,
            shardId,
            item.insertValues() != null, // try insert first
            0);
        shardResponse.add(location);
      } catch (Throwable t) {
        if (!TransportActions.isShardNotAvailableException(t) && !request.continueOnError()) {
          throw t;
        } else {
          logger.debug(
              "{} failed to execute upsert for [{}]/[{}]",
              t,
              request.shardId(),
              request.type(),
              item.id());
          shardResponse.add(
              location,
              new ShardResponse.Failure(
                  item.id(),
                  ExceptionsHelper.detailedMessage(t),
                  (t instanceof VersionConflictEngineException)));
        }
      }
    }

    return shardResponse;
  }
  /**
   * Prepares an update request by converting it into an index request.
   *
   * <p>TODO: detect a NOOP and return an update response if true
   */
  @SuppressWarnings("unchecked")
  public IndexRequest prepareUpdate(
      DocTableInfo tableInfo,
      ShardUpsertRequest request,
      ShardUpsertRequest.Item item,
      ShardId shardId)
      throws ElasticsearchException {
    IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
    IndexShard indexShard = indexService.shardSafe(shardId.id());
    final GetResult getResult =
        indexShard
            .getService()
            .get(
                request.type(),
                item.id(),
                new String[] {RoutingFieldMapper.NAME, ParentFieldMapper.NAME, TTLFieldMapper.NAME},
                true,
                item.version(),
                VersionType.INTERNAL,
                FetchSourceContext.FETCH_SOURCE,
                false);

    if (!getResult.isExists()) {
      throw new DocumentMissingException(
          new ShardId(request.index(), request.shardId()), request.type(), item.id());
    }

    if (getResult.internalSourceRef() == null) {
      // no source, we can't do nothing, through a failure...
      throw new DocumentSourceMissingException(
          new ShardId(request.index(), request.shardId()), request.type(), item.id());
    }

    Tuple<XContentType, Map<String, Object>> sourceAndContent =
        XContentHelper.convertToMap(getResult.internalSourceRef(), true);
    final Map<String, Object> updatedSourceAsMap;
    final XContentType updateSourceContentType = sourceAndContent.v1();
    String routing =
        getResult.getFields().containsKey(RoutingFieldMapper.NAME)
            ? getResult.field(RoutingFieldMapper.NAME).getValue().toString()
            : null;
    String parent =
        getResult.getFields().containsKey(ParentFieldMapper.NAME)
            ? getResult.field(ParentFieldMapper.NAME).getValue().toString()
            : null;

    updatedSourceAsMap = sourceAndContent.v2();

    SymbolToFieldExtractorContext ctx =
        new SymbolToFieldExtractorContext(functions, item.insertValues());

    Map<String, Object> pathsToUpdate = new LinkedHashMap<>();
    Map<String, Object> updatedGeneratedColumns = new LinkedHashMap<>();
    for (int i = 0; i < request.updateColumns().length; i++) {
      /**
       * NOTE: mapping isn't applied. So if an Insert was done using the ES Rest Endpoint the data
       * might be returned in the wrong format (date as string instead of long)
       */
      String columnPath = request.updateColumns()[i];
      Object value =
          SYMBOL_TO_FIELD_EXTRACTOR.convert(item.updateAssignments()[i], ctx).extract(getResult);
      ReferenceInfo referenceInfo = tableInfo.getReferenceInfo(ColumnIdent.fromPath(columnPath));
      if (referenceInfo instanceof GeneratedReferenceInfo) {
        updatedGeneratedColumns.put(columnPath, value);

      } else {
        pathsToUpdate.put(columnPath, value);
      }
    }

    processGeneratedColumns(
        tableInfo,
        pathsToUpdate,
        updatedGeneratedColumns,
        request.validateGeneratedColumns(),
        getResult);

    updateSourceByPaths(updatedSourceAsMap, pathsToUpdate);

    final IndexRequest indexRequest =
        Requests.indexRequest(request.index())
            .type(request.type())
            .id(item.id())
            .routing(routing)
            .parent(parent)
            .source(updatedSourceAsMap, updateSourceContentType)
            .version(getResult.getVersion());
    indexRequest.operationThreaded(false);
    return indexRequest;
  }