@Override
 public String toString() {
   String sSource = "_na_";
   try {
     sSource = XContentHelper.convertToJson(source, false);
   } catch (Exception e) {
     // ignore
   }
   return "index {[" + index + "][" + type + "][" + id + "], source[" + sSource + "]}";
 }
 private MetaData readMetaData(byte[] data) throws IOException {
   XContentParser parser = null;
   try {
     parser = XContentHelper.createParser(data, 0, data.length);
     return MetaData.Builder.fromXContent(parser);
   } finally {
     if (parser != null) {
       parser.close();
     }
   }
 }
 @Nullable
 private IndexMetaData loadIndex(String index) {
   long highestVersion = -1;
   IndexMetaData indexMetaData = null;
   for (File indexLocation : nodeEnv.indexLocations(new Index(index))) {
     File stateDir = new File(indexLocation, "_state");
     if (!stateDir.exists() || !stateDir.isDirectory()) {
       continue;
     }
     // now, iterate over the current versions, and find latest one
     File[] stateFiles = stateDir.listFiles();
     if (stateFiles == null) {
       continue;
     }
     for (File stateFile : stateFiles) {
       if (!stateFile.getName().startsWith("state-")) {
         continue;
       }
       try {
         long version = Long.parseLong(stateFile.getName().substring("state-".length()));
         if (version > highestVersion) {
           byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
           if (data.length == 0) {
             logger.debug(
                 "[{}]: no data for [" + stateFile.getAbsolutePath() + "], ignoring...", index);
             continue;
           }
           XContentParser parser = null;
           try {
             parser = XContentHelper.createParser(data, 0, data.length);
             parser.nextToken(); // move to START_OBJECT
             indexMetaData = IndexMetaData.Builder.fromXContent(parser);
             highestVersion = version;
           } finally {
             if (parser != null) {
               parser.close();
             }
           }
         }
       } catch (Exception e) {
         logger.debug(
             "[{}]: failed to read [" + stateFile.getAbsolutePath() + "], ignoring...", e, index);
       }
     }
   }
   return indexMetaData;
 }
  private MetaData loadGlobalState() {
    long highestVersion = -1;
    MetaData metaData = null;
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        String name = stateFile.getName();
        if (!name.startsWith("global-")) {
          continue;
        }
        try {
          long version = Long.parseLong(stateFile.getName().substring("global-".length()));
          if (version > highestVersion) {
            byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
            if (data.length == 0) {
              logger.debug(
                  "[_global] no data for [" + stateFile.getAbsolutePath() + "], ignoring...");
              continue;
            }

            XContentParser parser = null;
            try {
              parser = XContentHelper.createParser(data, 0, data.length);
              metaData = MetaData.Builder.fromXContent(parser);
              highestVersion = version;
            } finally {
              if (parser != null) {
                parser.close();
              }
            }
          }
        } catch (Exception e) {
          logger.debug("failed to load global state from [{}]", e, stateFile.getAbsolutePath());
        }
      }
    }

    return metaData;
  }
  public void process(
      MetaData metaData,
      String aliasOrIndex,
      @Nullable MappingMetaData mappingMd,
      boolean allowIdGeneration)
      throws ElasticsearchException {
    // resolve the routing if needed
    routing(metaData.resolveIndexRouting(routing, aliasOrIndex));
    // resolve timestamp if provided externally
    if (timestamp != null) {
      timestamp =
          MappingMetaData.Timestamp.parseStringTimestamp(
              timestamp,
              mappingMd != null
                  ? mappingMd.timestamp().dateTimeFormatter()
                  : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER);
    }
    // extract values if needed
    if (mappingMd != null) {
      MappingMetaData.ParseContext parseContext =
          mappingMd.createParseContext(id, routing, timestamp);

      if (parseContext.shouldParse()) {
        XContentParser parser = null;
        try {
          parser = XContentHelper.createParser(source);
          mappingMd.parse(parser, parseContext);
          if (parseContext.shouldParseId()) {
            id = parseContext.id();
          }
          if (parseContext.shouldParseRouting()) {
            routing = parseContext.routing();
          }
          if (parseContext.shouldParseTimestamp()) {
            timestamp = parseContext.timestamp();
            timestamp =
                MappingMetaData.Timestamp.parseStringTimestamp(
                    timestamp, mappingMd.timestamp().dateTimeFormatter());
          }
        } catch (Exception e) {
          throw new ElasticsearchParseException(
              "failed to parse doc to extract routing/timestamp", e);
        } finally {
          if (parser != null) {
            parser.close();
          }
        }
      }

      // might as well check for routing here
      if (mappingMd.routing().required() && routing == null) {
        throw new RoutingMissingException(index, type, id);
      }

      if (parent != null && !mappingMd.hasParentField()) {
        throw new ElasticsearchIllegalArgumentException(
            "Can't specify parent if no parent field has been configured");
      }
    } else {
      if (parent != null) {
        throw new ElasticsearchIllegalArgumentException(
            "Can't specify parent if no parent field has been configured");
      }
    }

    // generate id if not already provided and id generation is allowed
    if (allowIdGeneration) {
      if (id == null) {
        id(Strings.randomBase64UUID());
        // since we generate the id, change it to CREATE
        opType(IndexRequest.OpType.CREATE);
      }
    }

    // generate timestamp if not provided, we always have one post this stage...
    if (timestamp == null) {
      timestamp = Long.toString(System.currentTimeMillis());
    }
  }
 public Map<String, Object> sourceAsMap() {
   return XContentHelper.convertToMap(source, false).v2();
 }
  public ParsedDocument parse(SourceToParse source, @Nullable ParseListener listener)
      throws MapperParsingException {
    ParseContext context = cache.get();

    if (source.type() != null && !source.type().equals(this.type)) {
      throw new MapperParsingException(
          "Type mismatch, provide type ["
              + source.type()
              + "] but mapper is of type ["
              + this.type
              + "]");
    }
    source.type(this.type);

    XContentParser parser = source.parser();
    try {
      if (parser == null) {
        parser = XContentHelper.createParser(source.source());
      }
      context.reset(parser, new Document(), source, listener);
      // on a newly created instance of document mapper, we always consider it as new mappers that
      // have been added
      if (initMappersAdded) {
        context.setMappingsModified();
        initMappersAdded = false;
      }

      // will result in START_OBJECT
      int countDownTokens = 0;
      XContentParser.Token token = parser.nextToken();
      if (token != XContentParser.Token.START_OBJECT) {
        throw new MapperParsingException("Malformed content, must start with an object");
      }
      boolean emptyDoc = false;
      token = parser.nextToken();
      if (token == XContentParser.Token.END_OBJECT) {
        // empty doc, we can handle it...
        emptyDoc = true;
      } else if (token != XContentParser.Token.FIELD_NAME) {
        throw new MapperParsingException(
            "Malformed content, after first object, either the type field or the actual properties should exist");
      }
      if (type.equals(parser.currentName())) {
        // first field is the same as the type, this might be because the type is provided, and the
        // object exists within it
        // or because there is a valid field that by chance is named as the type

        // Note, in this case, we only handle plain value types, an object type will be analyzed as
        // if it was the type itself
        // and other same level fields will be ignored
        token = parser.nextToken();
        countDownTokens++;
        // commented out, allow for same type with START_OBJECT, we do our best to handle it except
        // for the above corner case
        //                if (token != XContentParser.Token.START_OBJECT) {
        //                    throw new MapperException("Malformed content, a field with the same
        // name as the type must be an object with the properties/fields within it");
        //                }
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.preParse(context);
      }

      if (!emptyDoc) {
        rootObjectMapper.parse(context);
      }

      for (int i = 0; i < countDownTokens; i++) {
        parser.nextToken();
      }

      // fire up any new mappers if exists
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.postParse(context);
      }

      for (RootMapper rootMapper : rootMappersOrdered) {
        rootMapper.validate(context);
      }
    } catch (Throwable e) {
      // we have to fire up any new mappers even on a failure, because they
      // have been added internally to each compound mapper...
      // ... we have no option to "rollback" a change, which is very tricky in our copy on change
      // system...
      if (!context.newFieldMappers().mappers.isEmpty()) {
        addFieldMappers(context.newFieldMappers().mappers);
        context.newFieldMappers().mappers.clear();
      }
      if (!context.newObjectMappers().mappers.isEmpty()) {
        addObjectMappers(context.newObjectMappers().mappers);
        context.newObjectMappers().mappers.clear();
      }

      // if its already a mapper parsing exception, no need to wrap it...
      if (e instanceof MapperParsingException) {
        throw (MapperParsingException) e;
      }

      throw new MapperParsingException("failed to parse", e);
    } finally {
      // only close the parser when its not provided externally
      if (source.parser() == null && parser != null) {
        parser.close();
      }
    }
    // reverse the order of docs for nested docs support, parent should be last
    if (context.docs().size() > 1) {
      Collections.reverse(context.docs());
    }
    // apply doc boost
    if (context.docBoost() != 1.0f) {
      Set<String> encounteredFields = Sets.newHashSet();
      for (Document doc : context.docs()) {
        encounteredFields.clear();
        for (IndexableField field : doc) {
          if (field.fieldType().indexed() && !field.fieldType().omitNorms()) {
            if (!encounteredFields.contains(field.name())) {
              ((Field) field).setBoost(context.docBoost() * field.boost());
              encounteredFields.add(field.name());
            }
          }
        }
      }
    }

    ParsedDocument doc =
        new ParsedDocument(
                context.uid(),
                context.id(),
                context.type(),
                source.routing(),
                source.timestamp(),
                source.ttl(),
                context.docs(),
                context.analyzer(),
                context.source(),
                context.mappingsModified())
            .parent(source.parent());
    // reset the context to free up memory
    context.reset(null, null, null, null);
    return doc;
  }
  private void pre019Upgrade() throws Exception {
    long index = -1;
    File metaDataFile = null;
    MetaData metaData = null;
    long version = -1;
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        if (logger.isTraceEnabled()) {
          logger.trace("[upgrade]: processing [" + stateFile.getName() + "]");
        }
        String name = stateFile.getName();
        if (!name.startsWith("metadata-")) {
          continue;
        }
        long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1));
        if (fileIndex >= index) {
          // try and read the meta data
          try {
            byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile));
            if (data.length == 0) {
              continue;
            }
            XContentParser parser = XContentHelper.createParser(data, 0, data.length);
            try {
              String currentFieldName = null;
              XContentParser.Token token = parser.nextToken();
              if (token != null) {
                while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
                  if (token == XContentParser.Token.FIELD_NAME) {
                    currentFieldName = parser.currentName();
                  } else if (token == XContentParser.Token.START_OBJECT) {
                    if ("meta-data".equals(currentFieldName)) {
                      metaData = MetaData.Builder.fromXContent(parser);
                    }
                  } else if (token.isValue()) {
                    if ("version".equals(currentFieldName)) {
                      version = parser.longValue();
                    }
                  }
                }
              }
            } finally {
              parser.close();
            }
            index = fileIndex;
            metaDataFile = stateFile;
          } catch (IOException e) {
            logger.warn("failed to read pre 0.19 state from [" + name + "], ignoring...", e);
          }
        }
      }
    }
    if (metaData == null) {
      return;
    }

    logger.info(
        "found old metadata state, loading metadata from [{}] and converting to new metadata location and strucutre...",
        metaDataFile.getAbsolutePath());

    writeGlobalState(
        "upgrade", MetaData.builder().metaData(metaData).version(version).build(), null);
    for (IndexMetaData indexMetaData : metaData) {
      IndexMetaData.Builder indexMetaDataBuilder =
          IndexMetaData.newIndexMetaDataBuilder(indexMetaData).version(version);
      // set the created version to 0.18
      indexMetaDataBuilder.settings(
          ImmutableSettings.settingsBuilder()
              .put(indexMetaData.settings())
              .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_18_0));
      writeIndex("upgrade", indexMetaDataBuilder.build(), null);
    }

    // rename shards state to backup state
    File backupFile = new File(metaDataFile.getParentFile(), "backup-" + metaDataFile.getName());
    if (!metaDataFile.renameTo(backupFile)) {
      throw new IOException(
          "failed to rename old state to backup state [" + metaDataFile.getAbsolutePath() + "]");
    }

    // delete all other shards state files
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      if (!stateLocation.exists()) {
        continue;
      }
      File[] stateFiles = stateLocation.listFiles();
      if (stateFiles == null) {
        continue;
      }
      for (File stateFile : stateFiles) {
        String name = stateFile.getName();
        if (!name.startsWith("metadata-")) {
          continue;
        }
        stateFile.delete();
      }
    }

    logger.info(
        "conversion to new metadata location and format done, backup create at [{}]",
        backupFile.getAbsolutePath());
  }