コード例 #1
0
 private boolean commitPointFileExistsInBlobs(
     CommitPoint.FileInfo fileInfo, ImmutableMap<String, BlobMetaData> blobs) {
   BlobMetaData blobMetaData = blobs.get(fileInfo.name());
   if (blobMetaData != null) {
     if (blobMetaData.length() != fileInfo.length()) {
       return false;
     }
   } else if (blobs.containsKey(fileInfo.name() + ".part0")) {
     // multi part file sum up the size and check
     int part = 0;
     long totalSize = 0;
     while (true) {
       blobMetaData = blobs.get(fileInfo.name() + ".part" + part++);
       if (blobMetaData == null) {
         break;
       }
       totalSize += blobMetaData.length();
     }
     if (totalSize != fileInfo.length()) {
       return false;
     }
   } else {
     // no file, not exact and not multipart
     return false;
   }
   return true;
 }
コード例 #2
0
 public ImmutableMap<String, BlobMetaData> listBlobs() throws IOException {
   FileStatus[] files = blobStore.fileSystem().listStatus(path);
   if (files == null || files.length == 0) {
     return ImmutableMap.of();
   }
   ImmutableMap.Builder<String, BlobMetaData> builder = ImmutableMap.builder();
   for (FileStatus file : files) {
     builder.put(
         file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen()));
   }
   return builder.build();
 }
コード例 #3
0
 private void serializeNullValue(ParseContext context, String lastFieldName) throws IOException {
   // we can only handle null values if we have mappings for them
   Mapper mapper = mappers.get(lastFieldName);
   if (mapper != null) {
     mapper.parse(context);
   }
 }
コード例 #4
0
 @Override
 public void traverse(ObjectMapperListener objectMapperListener) {
   objectMapperListener.objectMapper(this);
   for (Mapper mapper : mappers.values()) {
     mapper.traverse(objectMapperListener);
   }
 }
コード例 #5
0
  @Override
  public synchronized IndexShard createShard(int sShardId) throws ElasticSearchException {
    ShardId shardId = new ShardId(index, sShardId);
    if (shardsInjectors.containsKey(shardId.id())) {
      throw new IndexShardAlreadyExistsException(shardId + " already exists");
    }

    indicesLifecycle.beforeIndexShardCreated(shardId);

    logger.debug("creating shard_id [{}]", shardId.id());

    ModulesBuilder modules = new ModulesBuilder();
    modules.add(new ShardsPluginsModule(indexSettings, pluginsService));
    modules.add(new IndexShardModule(shardId));
    modules.add(new StoreModule(indexSettings, injector.getInstance(IndexStore.class)));
    modules.add(new DeletionPolicyModule(indexSettings));
    modules.add(new MergePolicyModule(indexSettings));
    modules.add(new MergeSchedulerModule(indexSettings));
    modules.add(new TranslogModule(indexSettings));
    modules.add(new EngineModule(indexSettings));
    modules.add(new IndexShardGatewayModule(injector.getInstance(IndexGateway.class)));

    Injector shardInjector = modules.createChildInjector(injector);

    shardsInjectors =
        newMapBuilder(shardsInjectors).put(shardId.id(), shardInjector).immutableMap();

    IndexShard indexShard = shardInjector.getInstance(IndexShard.class);

    indicesLifecycle.afterIndexShardCreated(indexShard);

    shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap();

    return indexShard;
  }
コード例 #6
0
 private void updateIndexRefresh(String name, Object value) {
   client
       .admin()
       .indices()
       .prepareUpdateSettings(name)
       .setSettings(ImmutableMap.of("index.refresh_interval", value))
       .get();
 }
コード例 #7
0
  @Test
  public void test_verbosenessSettings() {
    final List<Object> l_true = Arrays.asList(1, "1", "true", true, "TRUE", "True");
    final List<Object> l_false = Arrays.asList(0, "0", "false", false, "FALSE", "False");

    for (Object o : l_true) {
      final DataSchemaBean.SearchIndexSchemaBean s =
          BeanTemplateUtils.build(DataSchemaBean.SearchIndexSchemaBean.class)
              .with(
                  DataSchemaBean.SearchIndexSchemaBean::technology_override_schema,
                  ImmutableMap.builder().put("verbose", o).build())
              .done()
              .get();
      assertEquals(true, ElasticsearchIndexService.is_verbose(s));
    }
    for (Object o : l_false) {
      final DataSchemaBean.SearchIndexSchemaBean s =
          BeanTemplateUtils.build(DataSchemaBean.SearchIndexSchemaBean.class)
              .with(
                  DataSchemaBean.SearchIndexSchemaBean::technology_override_schema,
                  ImmutableMap.builder().put("verbose", o).build())
              .done()
              .get();
      assertEquals(false, ElasticsearchIndexService.is_verbose(s));
    }

    // (not present)
    {
      final DataSchemaBean.SearchIndexSchemaBean s =
          BeanTemplateUtils.build(DataSchemaBean.SearchIndexSchemaBean.class)
              .with(
                  DataSchemaBean.SearchIndexSchemaBean::technology_override_schema,
                  ImmutableMap.builder().build())
              .done()
              .get();
      assertEquals(false, ElasticsearchIndexService.is_verbose(s));
    }
    {
      final DataSchemaBean.SearchIndexSchemaBean s =
          BeanTemplateUtils.build(DataSchemaBean.SearchIndexSchemaBean.class).done().get();
      assertEquals(false, ElasticsearchIndexService.is_verbose(s));
    }
  }
コード例 #8
0
 @Override
 public void includeInAllIfNotSet(Boolean includeInAll) {
   if (this.includeInAll == null) {
     this.includeInAll = includeInAll;
   }
   // when called from outside, apply this on all the inner mappers
   for (Mapper mapper : mappers.values()) {
     if (mapper instanceof AllFieldMapper.IncludeInAll) {
       ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
     }
   }
 }
コード例 #9
0
 private CommitPoints buildCommitPoints(ImmutableMap<String, BlobMetaData> blobs) {
   List<CommitPoint> commitPoints = Lists.newArrayList();
   for (String name : blobs.keySet()) {
     if (name.startsWith("commit-")) {
       try {
         commitPoints.add(CommitPoints.fromXContent(blobContainer.readBlobFully(name)));
       } catch (Exception e) {
         logger.warn("failed to read commit point [{}]", e, name);
       }
     }
   }
   return new CommitPoints(commitPoints);
 }
コード例 #10
0
 @Override
 public ImmutableMap<String, BlobMetaData> listBlobsByPrefix(final @Nullable String blobNamePrefix)
     throws IOException {
   FileStatus[] files =
       blobStore
           .fileSystem()
           .listStatus(
               path,
               new PathFilter() {
                 @Override
                 public boolean accept(Path path) {
                   return path.getName().startsWith(blobNamePrefix);
                 }
               });
   if (files == null || files.length == 0) {
     return ImmutableMap.of();
   }
   ImmutableMap.Builder<String, BlobMetaData> builder = ImmutableMap.builder();
   for (FileStatus file : files) {
     builder.put(
         file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen()));
   }
   return builder.build();
 }
コード例 #11
0
  private long findLatestFileNameGeneration(ImmutableMap<String, BlobMetaData> blobs) {
    long generation = -1;
    for (String name : blobs.keySet()) {
      if (!name.startsWith("__")) {
        continue;
      }
      if (name.contains(".part")) {
        name = name.substring(0, name.indexOf(".part"));
      }

      try {
        long currentGen = Long.parseLong(name.substring(2) /*__*/, Character.MAX_RADIX);
        if (currentGen > generation) {
          generation = currentGen;
        }
      } catch (NumberFormatException e) {
        logger.warn("file [{}] does not conform to the '__' schema");
      }
    }
    return generation;
  }
コード例 #12
0
 private void serializeArray(ParseContext context, String lastFieldName) throws IOException {
   Mapper mapper = mappers.get(lastFieldName);
   if (mapper != null && mapper instanceof ArrayValueMapperParser) {
     mapper.parse(context);
   } else {
     XContentParser parser = context.parser();
     XContentParser.Token token;
     while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
       if (token == XContentParser.Token.START_OBJECT) {
         serializeObject(context, lastFieldName);
       } else if (token == XContentParser.Token.START_ARRAY) {
         serializeArray(context, lastFieldName);
       } else if (token == XContentParser.Token.FIELD_NAME) {
         lastFieldName = parser.currentName();
       } else if (token == XContentParser.Token.VALUE_NULL) {
         serializeNullValue(context, lastFieldName);
       } else {
         serializeValue(context, lastFieldName, token);
       }
     }
   }
 }
コード例 #13
0
 @Override
 public ImmutableSet<Integer> shardIds() {
   return ImmutableSet.copyOf(shards.keySet());
 }
コード例 #14
0
 @Override
 public boolean hasShard(int shardId) {
   return shards.containsKey(shardId);
 }
コード例 #15
0
 @Override
 public IndexShard shard(int shardId) {
   return shards.get(shardId);
 }
コード例 #16
0
 @Override
 public int numberOfShards() {
   return shards.size();
 }
コード例 #17
0
 @Override
 public UnmodifiableIterator<IndexShard> iterator() {
   return shards.values().iterator();
 }
コード例 #18
0
 @Override
 public void traverse(FieldMapperListener fieldMapperListener) {
   for (Mapper mapper : mappers.values()) {
     mapper.traverse(fieldMapperListener);
   }
 }
コード例 #19
0
ファイル: EsQueryExecutor.java プロジェクト: denshade/opal
  private static class EsRestRequest extends HttpRequest {

    private final String body;

    private final Map<String, String> params;

    private final String esUri;

    private Method httpMethod = Method.GET;

    private final Map<String, String> headers = ImmutableMap.of("Content-Type", "application/json");

    EsRestRequest(String body, String path) {
      this(body, path, new HashMap<String, String>());
    }

    EsRestRequest(String body, String path, Map<String, String> params) {
      this.body = body;
      this.params = params;

      StringBuilder pathBuilder = new StringBuilder("/");

      if (!Strings.isNullOrEmpty(path)) {
        pathBuilder.append(path).append("/");
      }

      esUri = pathBuilder.append("_search").toString();
    }

    public EsRestRequest setHttpMethod(Method method) {
      httpMethod = method;
      return this;
    }

    @Override
    public Method method() {
      return httpMethod;
    }

    @Override
    public String uri() {
      return esUri;
    }

    @Override
    public String rawPath() {
      int pathEndPos = esUri.indexOf('?');
      return pathEndPos < 0 ? esUri : esUri.substring(0, pathEndPos);
    }

    @Override
    public boolean hasContent() {
      return body != null && body.length() > 0;
    }

    @Override
    public BytesReference content() {
      return new BytesArray(body);
    }

    @Override
    public String header(String name) {
      return headers.get(name);
    }

    @Override
    public Iterable<Map.Entry<String, String>> headers() {
      return headers.entrySet();
    }

    @Override
    public boolean hasParam(String key) {
      return params.containsKey(key);
    }

    @Override
    public String param(String key) {
      return params.get(key);
    }

    @Override
    public Map<String, String> params() {
      return params;
    }

    @Override
    public String param(String key, String defaultValue) {
      return hasParam(key) ? param(key) : defaultValue;
    }
  }
コード例 #20
0
  public void toXContent(
      XContentBuilder builder, Params params, ToXContent custom, Mapper... additionalMappers)
      throws IOException {
    builder.startObject(name);
    if (nested.isNested()) {
      builder.field("type", NESTED_CONTENT_TYPE);
      if (nested.isIncludeInParent()) {
        builder.field("include_in_parent", true);
      }
      if (nested.isIncludeInRoot()) {
        builder.field("include_in_root", true);
      }
    } else if (mappers
        .isEmpty()) { // only write the object content type if there are no properties, otherwise,
                      // it is automatically detected
      builder.field("type", CONTENT_TYPE);
    }
    // grr, ugly! on root, dynamic defaults to TRUE, on children, it defaults to null to
    // inherit the root behavior
    if (this instanceof RootObjectMapper) {
      if (dynamic != Dynamic.TRUE) {
        builder.field("dynamic", dynamic.name().toLowerCase());
      }
    } else {
      if (dynamic != Defaults.DYNAMIC) {
        builder.field("dynamic", dynamic.name().toLowerCase());
      }
    }
    if (enabled != Defaults.ENABLED) {
      builder.field("enabled", enabled);
    }
    if (pathType != Defaults.PATH_TYPE) {
      builder.field("path", pathType.name().toLowerCase());
    }
    if (includeInAll != null) {
      builder.field("include_in_all", includeInAll);
    }

    if (custom != null) {
      custom.toXContent(builder, params);
    }

    doXContent(builder, params);

    // sort the mappers so we get consistent serialization format
    TreeMap<String, Mapper> sortedMappers = new TreeMap<String, Mapper>(mappers);

    // check internal mappers first (this is only relevant for root object)
    for (Mapper mapper : sortedMappers.values()) {
      if (mapper instanceof InternalMapper) {
        mapper.toXContent(builder, params);
      }
    }
    if (additionalMappers != null && additionalMappers.length > 0) {
      TreeMap<String, Mapper> additionalSortedMappers = new TreeMap<String, Mapper>();
      for (Mapper mapper : additionalMappers) {
        additionalSortedMappers.put(mapper.name(), mapper);
      }

      for (Mapper mapper : additionalSortedMappers.values()) {
        mapper.toXContent(builder, params);
      }
    }

    if (!mappers.isEmpty()) {
      builder.startObject("properties");
      for (Mapper mapper : sortedMappers.values()) {
        if (!(mapper instanceof InternalMapper)) {
          mapper.toXContent(builder, params);
        }
      }
      builder.endObject();
    }
    builder.endObject();
  }
コード例 #21
0
  private void serializeObject(final ParseContext context, String currentFieldName)
      throws IOException {
    if (currentFieldName == null) {
      throw new MapperParsingException(
          "object mapping ["
              + name
              + "] trying to serialize an object with no field associated with it, current value ["
              + context.parser().textOrNull()
              + "]");
    }
    context.path().add(currentFieldName);

    Mapper objectMapper = mappers.get(currentFieldName);
    if (objectMapper != null) {
      objectMapper.parse(context);
    } else {
      Dynamic dynamic = this.dynamic;
      if (dynamic == null) {
        dynamic = context.root().dynamic();
      }
      if (dynamic == Dynamic.STRICT) {
        throw new StrictDynamicMappingException(currentFieldName);
      } else if (dynamic == Dynamic.TRUE) {
        // we sync here just so we won't add it twice. Its not the end of the world
        // to sync here since next operations will get it before
        boolean newMapper = false;
        synchronized (mutex) {
          objectMapper = mappers.get(currentFieldName);
          if (objectMapper == null) {
            newMapper = true;
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "object");
            if (builder == null) {
              builder =
                  MapperBuilders.object(currentFieldName)
                      .enabled(true)
                      .dynamic(dynamic)
                      .pathType(pathType);
            }
            // remove the current field name from path, since the object builder adds it as well...
            context.path().remove();
            BuilderContext builderContext =
                new BuilderContext(context.indexSettings(), context.path());
            objectMapper = builder.build(builderContext);
            putMapper(objectMapper);
            // now re add it
            context.path().add(currentFieldName);
            context.addedMapper();
          }
        }
        // traverse and parse outside of the mutex
        if (newMapper) {
          // we need to traverse in case we have a dynamic template and need to add field mappers
          // introduced by it
          objectMapper.traverse(
              new FieldMapperListener() {
                @Override
                public void fieldMapper(FieldMapper fieldMapper) {
                  context.docMapper().addFieldMapper(fieldMapper);
                }
              });
          objectMapper.traverse(
              new ObjectMapperListener() {
                @Override
                public void objectMapper(ObjectMapper objectMapper) {
                  context.docMapper().addObjectMapper(objectMapper);
                }
              });
        }
        // now, parse it
        objectMapper.parse(context);
      } else {
        // not dynamic, read everything up to end object
        context.parser().skipChildren();
      }
    }

    context.path().remove();
  }
コード例 #22
0
/** @author kimchy (shay.banon) */
@ThreadSafe
public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {

  public static final String CONTENT_TYPE = "object";
  public static final String NESTED_CONTENT_TYPE = "nested";

  public static class Defaults {
    public static final boolean ENABLED = true;
    public static final Nested NESTED = Nested.NO;
    public static final Dynamic DYNAMIC = null; // not set, inherited from father
    public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
  }

  public static enum Dynamic {
    TRUE,
    FALSE,
    STRICT
  }

  public static class Nested {

    public static final Nested NO = new Nested(false, false, false);

    public static Nested newNested(boolean includeInParent, boolean includeInRoot) {
      return new Nested(true, includeInParent, includeInRoot);
    }

    private final boolean nested;

    private final boolean includeInParent;

    private final boolean includeInRoot;

    private Nested(boolean nested, boolean includeInParent, boolean includeInRoot) {
      this.nested = nested;
      this.includeInParent = includeInParent;
      this.includeInRoot = includeInRoot;
    }

    public boolean isNested() {
      return nested;
    }

    public boolean isIncludeInParent() {
      return includeInParent;
    }

    public boolean isIncludeInRoot() {
      return includeInRoot;
    }
  }

  public static class Builder<T extends Builder, Y extends ObjectMapper>
      extends Mapper.Builder<T, Y> {

    protected boolean enabled = Defaults.ENABLED;

    protected Nested nested = Defaults.NESTED;

    protected Dynamic dynamic = Defaults.DYNAMIC;

    protected ContentPath.Type pathType = Defaults.PATH_TYPE;

    protected Boolean includeInAll;

    protected final List<Mapper.Builder> mappersBuilders = newArrayList();

    public Builder(String name) {
      super(name);
      this.builder = (T) this;
    }

    public T enabled(boolean enabled) {
      this.enabled = enabled;
      return builder;
    }

    public T dynamic(Dynamic dynamic) {
      this.dynamic = dynamic;
      return builder;
    }

    public T nested(Nested nested) {
      this.nested = nested;
      return builder;
    }

    public T pathType(ContentPath.Type pathType) {
      this.pathType = pathType;
      return builder;
    }

    public T includeInAll(boolean includeInAll) {
      this.includeInAll = includeInAll;
      return builder;
    }

    public T add(Mapper.Builder builder) {
      mappersBuilders.add(builder);
      return this.builder;
    }

    @Override
    public Y build(BuilderContext context) {
      ContentPath.Type origPathType = context.path().pathType();
      context.path().pathType(pathType);
      context.path().add(name);

      Map<String, Mapper> mappers = new HashMap<String, Mapper>();
      for (Mapper.Builder builder : mappersBuilders) {
        Mapper mapper = builder.build(context);
        mappers.put(mapper.name(), mapper);
      }
      context.path().pathType(origPathType);
      context.path().remove();

      ObjectMapper objectMapper =
          createMapper(
              name,
              context.path().fullPathAsText(name),
              enabled,
              nested,
              dynamic,
              pathType,
              mappers);
      objectMapper.includeInAllIfNotSet(includeInAll);

      return (Y) objectMapper;
    }

    protected ObjectMapper createMapper(
        String name,
        String fullPath,
        boolean enabled,
        Nested nested,
        Dynamic dynamic,
        ContentPath.Type pathType,
        Map<String, Mapper> mappers) {
      return new ObjectMapper(name, fullPath, enabled, nested, dynamic, pathType, mappers);
    }
  }

  public static class TypeParser implements Mapper.TypeParser {
    @Override
    public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)
        throws MapperParsingException {
      Map<String, Object> objectNode = node;
      ObjectMapper.Builder builder = createBuilder(name);

      boolean nested = false;
      boolean nestedIncludeInParent = false;
      boolean nestedIncludeInRoot = false;
      for (Map.Entry<String, Object> entry : objectNode.entrySet()) {
        String fieldName = Strings.toUnderscoreCase(entry.getKey());
        Object fieldNode = entry.getValue();

        if (fieldName.equals("dynamic")) {
          String value = fieldNode.toString();
          if (value.equalsIgnoreCase("strict")) {
            builder.dynamic(Dynamic.STRICT);
          } else {
            builder.dynamic(nodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE);
          }
        } else if (fieldName.equals("type")) {
          String type = fieldNode.toString();
          if (type.equals(CONTENT_TYPE)) {
            builder.nested = Nested.NO;
          } else if (type.equals(NESTED_CONTENT_TYPE)) {
            nested = true;
          } else {
            throw new MapperParsingException(
                "Trying to parse an object but has a different type ["
                    + type
                    + "] for ["
                    + name
                    + "]");
          }
        } else if (fieldName.equals("include_in_parent")) {
          nestedIncludeInParent = nodeBooleanValue(fieldNode);
        } else if (fieldName.equals("include_in_root")) {
          nestedIncludeInRoot = nodeBooleanValue(fieldNode);
        } else if (fieldName.equals("enabled")) {
          builder.enabled(nodeBooleanValue(fieldNode));
        } else if (fieldName.equals("path")) {
          builder.pathType(parsePathType(name, fieldNode.toString()));
        } else if (fieldName.equals("properties")) {
          parseProperties(builder, (Map<String, Object>) fieldNode, parserContext);
        } else if (fieldName.equals("include_in_all")) {
          builder.includeInAll(nodeBooleanValue(fieldNode));
        } else {
          processField(builder, fieldName, fieldNode);
        }
      }

      if (nested) {
        builder.nested = Nested.newNested(nestedIncludeInParent, nestedIncludeInRoot);
      }

      return builder;
    }

    private void parseProperties(
        ObjectMapper.Builder objBuilder,
        Map<String, Object> propsNode,
        ParserContext parserContext) {
      for (Map.Entry<String, Object> entry : propsNode.entrySet()) {
        String propName = entry.getKey();
        Map<String, Object> propNode = (Map<String, Object>) entry.getValue();

        String type;
        Object typeNode = propNode.get("type");
        if (typeNode != null) {
          type = typeNode.toString();
        } else {
          // lets see if we can derive this...
          if (propNode.get("properties") != null) {
            type = ObjectMapper.CONTENT_TYPE;
          } else if (propNode.get("fields") != null) {
            type = MultiFieldMapper.CONTENT_TYPE;
          } else {
            throw new MapperParsingException("No type specified for property [" + propName + "]");
          }
        }

        Mapper.TypeParser typeParser = parserContext.typeParser(type);
        if (typeParser == null) {
          throw new MapperParsingException(
              "No handler for type [" + type + "] declared on field [" + propName + "]");
        }
        objBuilder.add(typeParser.parse(propName, propNode, parserContext));
      }
    }

    protected Builder createBuilder(String name) {
      return object(name);
    }

    protected void processField(Builder builder, String fieldName, Object fieldNode) {}
  }

  private final String name;

  private final String fullPath;

  private final boolean enabled;

  private final Nested nested;

  private final String nestedTypePath;

  private final Filter nestedTypeFilter;

  private final Dynamic dynamic;

  private final ContentPath.Type pathType;

  private Boolean includeInAll;

  private volatile ImmutableMap<String, Mapper> mappers = ImmutableMap.of();

  private final Object mutex = new Object();

  ObjectMapper(
      String name,
      String fullPath,
      boolean enabled,
      Nested nested,
      Dynamic dynamic,
      ContentPath.Type pathType,
      Map<String, Mapper> mappers) {
    this.name = name;
    this.fullPath = fullPath;
    this.enabled = enabled;
    this.nested = nested;
    this.dynamic = dynamic;
    this.pathType = pathType;
    if (mappers != null) {
      this.mappers = copyOf(mappers);
    }
    this.nestedTypePath = "__" + fullPath;
    this.nestedTypeFilter = new TermFilter(TypeFieldMapper.TERM_FACTORY.createTerm(nestedTypePath));
  }

  @Override
  public String name() {
    return this.name;
  }

  @Override
  public void includeInAll(Boolean includeInAll) {
    if (includeInAll == null) {
      return;
    }
    this.includeInAll = includeInAll;
    // when called from outside, apply this on all the inner mappers
    for (Mapper mapper : mappers.values()) {
      if (mapper instanceof AllFieldMapper.IncludeInAll) {
        ((AllFieldMapper.IncludeInAll) mapper).includeInAll(includeInAll);
      }
    }
  }

  @Override
  public void includeInAllIfNotSet(Boolean includeInAll) {
    if (this.includeInAll == null) {
      this.includeInAll = includeInAll;
    }
    // when called from outside, apply this on all the inner mappers
    for (Mapper mapper : mappers.values()) {
      if (mapper instanceof AllFieldMapper.IncludeInAll) {
        ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
      }
    }
  }

  public Nested nested() {
    return this.nested;
  }

  public Filter nestedTypeFilter() {
    return this.nestedTypeFilter;
  }

  public ObjectMapper putMapper(Mapper mapper) {
    if (mapper instanceof AllFieldMapper.IncludeInAll) {
      ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
    }
    synchronized (mutex) {
      mappers = newMapBuilder(mappers).put(mapper.name(), mapper).immutableMap();
    }
    return this;
  }

  @Override
  public void traverse(FieldMapperListener fieldMapperListener) {
    for (Mapper mapper : mappers.values()) {
      mapper.traverse(fieldMapperListener);
    }
  }

  @Override
  public void traverse(ObjectMapperListener objectMapperListener) {
    objectMapperListener.objectMapper(this);
    for (Mapper mapper : mappers.values()) {
      mapper.traverse(objectMapperListener);
    }
  }

  public String fullPath() {
    return this.fullPath;
  }

  public String nestedTypePath() {
    return nestedTypePath;
  }

  public final Dynamic dynamic() {
    return this.dynamic;
  }

  public void parse(ParseContext context) throws IOException {
    if (!enabled) {
      context.parser().skipChildren();
      return;
    }
    XContentParser parser = context.parser();

    String currentFieldName = parser.currentName();
    XContentParser.Token token = parser.currentToken();
    if (token == XContentParser.Token.VALUE_NULL) {
      // the object is null ("obj1" : null), simply bail
      return;
    }

    Document restoreDoc = null;
    if (nested.isNested()) {
      Document nestedDoc = new Document();
      // pre add the uid field if possible (id was already provided)
      Fieldable uidField = context.doc().getFieldable(UidFieldMapper.NAME);
      if (uidField != null) {
        // we don't need to add it as a full uid field in nested docs, since we don't need
        // versioning
        // we also rely on this for UidField#loadVersion

        // this is a deeply nested field
        if (uidField.stringValue() != null) {
          nestedDoc.add(
              new Field(
                  UidFieldMapper.NAME,
                  uidField.stringValue(),
                  Field.Store.NO,
                  Field.Index.NOT_ANALYZED));
        } else {
          nestedDoc.add(
              new Field(
                  UidFieldMapper.NAME,
                  ((UidField) uidField).uid(),
                  Field.Store.NO,
                  Field.Index.NOT_ANALYZED));
        }
      }
      // the type of the nested doc starts with __, so we can identify that its a nested one in
      // filters
      // note, we don't prefix it with the type of the doc since it allows us to execute a nested
      // query
      // across types (for example, with similar nested objects)
      nestedDoc.add(
          new Field(
              TypeFieldMapper.NAME, nestedTypePath, Field.Store.NO, Field.Index.NOT_ANALYZED));
      restoreDoc = context.switchDoc(nestedDoc);
      context.addDoc(nestedDoc);
    }

    ContentPath.Type origPathType = context.path().pathType();
    context.path().pathType(pathType);

    // if we are at the end of the previous object, advance
    if (token == XContentParser.Token.END_OBJECT) {
      token = parser.nextToken();
    }
    if (token == XContentParser.Token.START_OBJECT) {
      // if we are just starting an OBJECT, advance, this is the object we are parsing, we need the
      // name first
      token = parser.nextToken();
    }

    while (token != XContentParser.Token.END_OBJECT) {
      if (token == XContentParser.Token.START_OBJECT) {
        serializeObject(context, currentFieldName);
      } else if (token == XContentParser.Token.START_ARRAY) {
        serializeArray(context, currentFieldName);
      } else if (token == XContentParser.Token.FIELD_NAME) {
        currentFieldName = parser.currentName();
      } else if (token == XContentParser.Token.VALUE_NULL) {
        serializeNullValue(context, currentFieldName);
      } else if (token == null) {
        throw new MapperParsingException(
            "object mapping for ["
                + name
                + "] tried to parse as object, but got EOF, has a concrete value been provided to it?");
      } else if (token.isValue()) {
        serializeValue(context, currentFieldName, token);
      }
      token = parser.nextToken();
    }
    // restore the enable path flag
    context.path().pathType(origPathType);
    if (nested.isNested()) {
      Document nestedDoc = context.switchDoc(restoreDoc);
      if (nested.isIncludeInParent()) {
        for (Fieldable field : nestedDoc.getFields()) {
          if (field.name().equals(UidFieldMapper.NAME)
              || field.name().equals(TypeFieldMapper.NAME)) {
            continue;
          } else {
            context.doc().add(field);
          }
        }
      }
      if (nested.isIncludeInRoot()) {
        // don't add it twice, if its included in parent, and we are handling the master doc...
        if (!(nested.isIncludeInParent() && context.doc() == context.rootDoc())) {
          for (Fieldable field : nestedDoc.getFields()) {
            if (field.name().equals(UidFieldMapper.NAME)
                || field.name().equals(TypeFieldMapper.NAME)) {
              continue;
            } else {
              context.rootDoc().add(field);
            }
          }
        }
      }
    }
  }

  private void serializeNullValue(ParseContext context, String lastFieldName) throws IOException {
    // we can only handle null values if we have mappings for them
    Mapper mapper = mappers.get(lastFieldName);
    if (mapper != null) {
      mapper.parse(context);
    }
  }

  private void serializeObject(final ParseContext context, String currentFieldName)
      throws IOException {
    if (currentFieldName == null) {
      throw new MapperParsingException(
          "object mapping ["
              + name
              + "] trying to serialize an object with no field associated with it, current value ["
              + context.parser().textOrNull()
              + "]");
    }
    context.path().add(currentFieldName);

    Mapper objectMapper = mappers.get(currentFieldName);
    if (objectMapper != null) {
      objectMapper.parse(context);
    } else {
      Dynamic dynamic = this.dynamic;
      if (dynamic == null) {
        dynamic = context.root().dynamic();
      }
      if (dynamic == Dynamic.STRICT) {
        throw new StrictDynamicMappingException(currentFieldName);
      } else if (dynamic == Dynamic.TRUE) {
        // we sync here just so we won't add it twice. Its not the end of the world
        // to sync here since next operations will get it before
        boolean newMapper = false;
        synchronized (mutex) {
          objectMapper = mappers.get(currentFieldName);
          if (objectMapper == null) {
            newMapper = true;
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "object");
            if (builder == null) {
              builder =
                  MapperBuilders.object(currentFieldName)
                      .enabled(true)
                      .dynamic(dynamic)
                      .pathType(pathType);
            }
            // remove the current field name from path, since the object builder adds it as well...
            context.path().remove();
            BuilderContext builderContext =
                new BuilderContext(context.indexSettings(), context.path());
            objectMapper = builder.build(builderContext);
            putMapper(objectMapper);
            // now re add it
            context.path().add(currentFieldName);
            context.addedMapper();
          }
        }
        // traverse and parse outside of the mutex
        if (newMapper) {
          // we need to traverse in case we have a dynamic template and need to add field mappers
          // introduced by it
          objectMapper.traverse(
              new FieldMapperListener() {
                @Override
                public void fieldMapper(FieldMapper fieldMapper) {
                  context.docMapper().addFieldMapper(fieldMapper);
                }
              });
          objectMapper.traverse(
              new ObjectMapperListener() {
                @Override
                public void objectMapper(ObjectMapper objectMapper) {
                  context.docMapper().addObjectMapper(objectMapper);
                }
              });
        }
        // now, parse it
        objectMapper.parse(context);
      } else {
        // not dynamic, read everything up to end object
        context.parser().skipChildren();
      }
    }

    context.path().remove();
  }

  private void serializeArray(ParseContext context, String lastFieldName) throws IOException {
    Mapper mapper = mappers.get(lastFieldName);
    if (mapper != null && mapper instanceof ArrayValueMapperParser) {
      mapper.parse(context);
    } else {
      XContentParser parser = context.parser();
      XContentParser.Token token;
      while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
        if (token == XContentParser.Token.START_OBJECT) {
          serializeObject(context, lastFieldName);
        } else if (token == XContentParser.Token.START_ARRAY) {
          serializeArray(context, lastFieldName);
        } else if (token == XContentParser.Token.FIELD_NAME) {
          lastFieldName = parser.currentName();
        } else if (token == XContentParser.Token.VALUE_NULL) {
          serializeNullValue(context, lastFieldName);
        } else {
          serializeValue(context, lastFieldName, token);
        }
      }
    }
  }

  private void serializeValue(
      final ParseContext context, String currentFieldName, XContentParser.Token token)
      throws IOException {
    if (currentFieldName == null) {
      throw new MapperParsingException(
          "object mapping ["
              + name
              + "] trying to serialize a value with no field associated with it, current value ["
              + context.parser().textOrNull()
              + "]");
    }
    Mapper mapper = mappers.get(currentFieldName);
    if (mapper != null) {
      mapper.parse(context);
      return;
    }
    Dynamic dynamic = this.dynamic;
    if (dynamic == null) {
      dynamic = context.root().dynamic();
    }
    if (dynamic == Dynamic.STRICT) {
      throw new StrictDynamicMappingException(currentFieldName);
    }
    if (dynamic == Dynamic.FALSE) {
      return;
    }
    // we sync here since we don't want to add this field twice to the document mapper
    // its not the end of the world, since we add it to the mappers once we create it
    // so next time we won't even get here for this field
    boolean newMapper = false;
    synchronized (mutex) {
      mapper = mappers.get(currentFieldName);
      if (mapper == null) {
        newMapper = true;
        BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
        if (token == XContentParser.Token.VALUE_STRING) {
          boolean resolved = false;

          // do a quick test to see if its fits a dynamic template, if so, use it.
          // we need to do it here so we can handle things like attachment templates, where calling
          // text (to see if its a date) causes the binary value to be cleared
          if (!resolved) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "string", null);
            if (builder != null) {
              mapper = builder.build(builderContext);
              resolved = true;
            }
          }

          if (!resolved && context.parser().textLength() == 0) {
            // empty string with no mapping, treat it like null value
            return;
          }

          if (!resolved && context.root().dateDetection()) {
            String text = context.parser().text();
            // a safe check since "1" gets parsed as well
            if (text.contains(":") || text.contains("-") || text.contains("/")) {
              for (FormatDateTimeFormatter dateTimeFormatter :
                  context.root().dynamicDateTimeFormatters()) {
                try {
                  dateTimeFormatter.parser().parseMillis(text);
                  Mapper.Builder builder =
                      context.root().findTemplateBuilder(context, currentFieldName, "date");
                  if (builder == null) {
                    builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
                  }
                  mapper = builder.build(builderContext);
                  resolved = true;
                  break;
                } catch (Exception e) {
                  // failure to parse this, continue
                }
              }
            }
          }
          if (!resolved && context.root().numericDetection()) {
            String text = context.parser().text();
            try {
              Long.parseLong(text);
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "long");
              if (builder == null) {
                builder = longField(currentFieldName);
              }
              mapper = builder.build(builderContext);
              resolved = true;
            } catch (Exception e) {
              // not a long number
            }
            if (!resolved) {
              try {
                Double.parseDouble(text);
                Mapper.Builder builder =
                    context.root().findTemplateBuilder(context, currentFieldName, "double");
                if (builder == null) {
                  builder = doubleField(currentFieldName);
                }
                mapper = builder.build(builderContext);
                resolved = true;
              } catch (Exception e) {
                // not a long number
              }
            }
          }
          // DON'T do automatic ip detection logic, since it messes up with docs that have hosts and
          // ips
          // check if its an ip
          //                if (!resolved && text.indexOf('.') != -1) {
          //                    try {
          //                        IpFieldMapper.ipToLong(text);
          //                        XContentMapper.Builder builder =
          // context.root().findTemplateBuilder(context, currentFieldName, "ip");
          //                        if (builder == null) {
          //                            builder = ipField(currentFieldName);
          //                        }
          //                        mapper = builder.build(builderContext);
          //                        resolved = true;
          //                    } catch (Exception e) {
          //                        // failure to parse, not ip...
          //                    }
          //                }
          if (!resolved) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "string");
            if (builder == null) {
              builder = stringField(currentFieldName);
            }
            mapper = builder.build(builderContext);
          }
        } else if (token == XContentParser.Token.VALUE_NUMBER) {
          XContentParser.NumberType numberType = context.parser().numberType();
          if (numberType == XContentParser.NumberType.INT) {
            if (context.parser().estimatedNumberType()) {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "long");
              if (builder == null) {
                builder = longField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            } else {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "integer");
              if (builder == null) {
                builder = integerField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            }
          } else if (numberType == XContentParser.NumberType.LONG) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "long");
            if (builder == null) {
              builder = longField(currentFieldName);
            }
            mapper = builder.build(builderContext);
          } else if (numberType == XContentParser.NumberType.FLOAT) {
            if (context.parser().estimatedNumberType()) {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "double");
              if (builder == null) {
                builder = doubleField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            } else {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "float");
              if (builder == null) {
                builder = floatField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            }
          } else if (numberType == XContentParser.NumberType.DOUBLE) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "double");
            if (builder == null) {
              builder = doubleField(currentFieldName);
            }
            mapper = builder.build(builderContext);
          }
        } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
          Mapper.Builder builder =
              context.root().findTemplateBuilder(context, currentFieldName, "boolean");
          if (builder == null) {
            builder = booleanField(currentFieldName);
          }
          mapper = builder.build(builderContext);
        } else {
          Mapper.Builder builder =
              context.root().findTemplateBuilder(context, currentFieldName, null);
          if (builder != null) {
            mapper = builder.build(builderContext);
          } else {
            // TODO how do we identify dynamically that its a binary value?
            throw new ElasticSearchIllegalStateException(
                "Can't handle serializing a dynamic type with content token ["
                    + token
                    + "] and field name ["
                    + currentFieldName
                    + "]");
          }
        }
        putMapper(mapper);
        context.addedMapper();
      }
    }
    if (newMapper) {
      mapper.traverse(
          new FieldMapperListener() {
            @Override
            public void fieldMapper(FieldMapper fieldMapper) {
              context.docMapper().addFieldMapper(fieldMapper);
            }
          });
    }
    mapper.parse(context);
  }

  @Override
  public void merge(final Mapper mergeWith, final MergeContext mergeContext)
      throws MergeMappingException {
    if (!(mergeWith instanceof ObjectMapper)) {
      mergeContext.addConflict(
          "Can't merge a non object mapping ["
              + mergeWith.name()
              + "] with an object mapping ["
              + name()
              + "]");
      return;
    }
    ObjectMapper mergeWithObject = (ObjectMapper) mergeWith;

    doMerge(mergeWithObject, mergeContext);

    List<Mapper> mappersToTraverse = new ArrayList<Mapper>();
    synchronized (mutex) {
      for (Mapper mergeWithMapper : mergeWithObject.mappers.values()) {
        Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
        if (mergeIntoMapper == null) {
          // no mapping, simply add it if not simulating
          if (!mergeContext.mergeFlags().simulate()) {
            putMapper(mergeWithMapper);
            mappersToTraverse.add(mergeWithMapper);
          }
        } else {
          if ((mergeWithMapper instanceof MultiFieldMapper)
              && !(mergeIntoMapper instanceof MultiFieldMapper)) {
            MultiFieldMapper mergeWithMultiField = (MultiFieldMapper) mergeWithMapper;
            mergeWithMultiField.merge(mergeIntoMapper, mergeContext);
            if (!mergeContext.mergeFlags().simulate()) {
              putMapper(mergeWithMultiField);
              // now, record mappers to traverse events for all mappers
              for (Mapper mapper : mergeWithMultiField.mappers().values()) {
                mappersToTraverse.add(mapper);
              }
            }
          } else {
            mergeIntoMapper.merge(mergeWithMapper, mergeContext);
          }
        }
      }
    }
    // call this outside of the mutex
    for (Mapper mapper : mappersToTraverse) {
      mapper.traverse(
          new FieldMapperListener() {
            @Override
            public void fieldMapper(FieldMapper fieldMapper) {
              mergeContext.docMapper().addFieldMapper(fieldMapper);
            }
          });
      mapper.traverse(
          new ObjectMapperListener() {
            @Override
            public void objectMapper(ObjectMapper objectMapper) {
              mergeContext.docMapper().addObjectMapper(objectMapper);
            }
          });
    }
  }

  protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) {}

  @Override
  public void close() {
    for (Mapper mapper : mappers.values()) {
      mapper.close();
    }
  }

  @Override
  public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
    toXContent(builder, params, null, Mapper.EMPTY_ARRAY);
    return builder;
  }

  public void toXContent(
      XContentBuilder builder, Params params, ToXContent custom, Mapper... additionalMappers)
      throws IOException {
    builder.startObject(name);
    if (nested.isNested()) {
      builder.field("type", NESTED_CONTENT_TYPE);
      if (nested.isIncludeInParent()) {
        builder.field("include_in_parent", true);
      }
      if (nested.isIncludeInRoot()) {
        builder.field("include_in_root", true);
      }
    } else if (mappers
        .isEmpty()) { // only write the object content type if there are no properties, otherwise,
                      // it is automatically detected
      builder.field("type", CONTENT_TYPE);
    }
    // grr, ugly! on root, dynamic defaults to TRUE, on children, it defaults to null to
    // inherit the root behavior
    if (this instanceof RootObjectMapper) {
      if (dynamic != Dynamic.TRUE) {
        builder.field("dynamic", dynamic.name().toLowerCase());
      }
    } else {
      if (dynamic != Defaults.DYNAMIC) {
        builder.field("dynamic", dynamic.name().toLowerCase());
      }
    }
    if (enabled != Defaults.ENABLED) {
      builder.field("enabled", enabled);
    }
    if (pathType != Defaults.PATH_TYPE) {
      builder.field("path", pathType.name().toLowerCase());
    }
    if (includeInAll != null) {
      builder.field("include_in_all", includeInAll);
    }

    if (custom != null) {
      custom.toXContent(builder, params);
    }

    doXContent(builder, params);

    // sort the mappers so we get consistent serialization format
    TreeMap<String, Mapper> sortedMappers = new TreeMap<String, Mapper>(mappers);

    // check internal mappers first (this is only relevant for root object)
    for (Mapper mapper : sortedMappers.values()) {
      if (mapper instanceof InternalMapper) {
        mapper.toXContent(builder, params);
      }
    }
    if (additionalMappers != null && additionalMappers.length > 0) {
      TreeMap<String, Mapper> additionalSortedMappers = new TreeMap<String, Mapper>();
      for (Mapper mapper : additionalMappers) {
        additionalSortedMappers.put(mapper.name(), mapper);
      }

      for (Mapper mapper : additionalSortedMappers.values()) {
        mapper.toXContent(builder, params);
      }
    }

    if (!mappers.isEmpty()) {
      builder.startObject("properties");
      for (Mapper mapper : sortedMappers.values()) {
        if (!(mapper instanceof InternalMapper)) {
          mapper.toXContent(builder, params);
        }
      }
      builder.endObject();
    }
    builder.endObject();
  }

  protected void doXContent(XContentBuilder builder, Params params) throws IOException {}
}
コード例 #23
0
  private void deleteShard(
      int shardId, boolean delete, boolean snapshotGateway, boolean deleteGateway)
      throws ElasticSearchException {
    Injector shardInjector;
    IndexShard indexShard;
    synchronized (this) {
      Map<Integer, Injector> tmpShardInjectors = newHashMap(shardsInjectors);
      shardInjector = tmpShardInjectors.remove(shardId);
      if (shardInjector == null) {
        if (!delete) {
          return;
        }
        throw new IndexShardMissingException(new ShardId(index, shardId));
      }
      shardsInjectors = ImmutableMap.copyOf(tmpShardInjectors);
      if (delete) {
        logger.debug("deleting shard_id [{}]", shardId);
      }

      Map<Integer, IndexShard> tmpShardsMap = newHashMap(shards);
      indexShard = tmpShardsMap.remove(shardId);
      shards = ImmutableMap.copyOf(tmpShardsMap);
    }

    ShardId sId = new ShardId(index, shardId);

    indicesLifecycle.beforeIndexShardClosed(sId, indexShard, delete);

    for (Class<? extends CloseableIndexComponent> closeable : pluginsService.shardServices()) {
      try {
        shardInjector.getInstance(closeable).close(delete);
      } catch (Exception e) {
        logger.debug("failed to clean plugin shard service [{}]", e, closeable);
      }
    }

    try {
      // now we can close the translog service, we need to close it before the we close the shard
      shardInjector.getInstance(TranslogService.class).close();
    } catch (Exception e) {
      // ignore
    }

    // close shard actions
    if (indexShard != null) {
      shardInjector.getInstance(IndexShardManagement.class).close();
    }

    // this logic is tricky, we want to close the engine so we rollback the changes done to it
    // and close the shard so no operations are allowed to it
    if (indexShard != null) {
      indexShard.close();
    }
    try {
      shardInjector.getInstance(Engine.class).close();
    } catch (Exception e) {
      // ignore
    }

    try {
      // now, we can snapshot to the gateway, it will be only the translog
      if (snapshotGateway) {
        shardInjector.getInstance(IndexShardGatewayService.class).snapshotOnClose();
      }
    } catch (Exception e) {
      // ignore
    }
    try {
      shardInjector.getInstance(IndexShardGatewayService.class).close(deleteGateway);
    } catch (Exception e) {
      // ignore
    }
    try {
      // now we can close the translog
      shardInjector.getInstance(Translog.class).close(delete);
    } catch (Exception e) {
      // ignore
    }

    // call this before we close the store, so we can release resources for it
    indicesLifecycle.afterIndexShardClosed(sId, delete);

    // if we delete or have no gateway or the store is not persistent, clean the store...
    Store store = shardInjector.getInstance(Store.class);
    if (delete || indexGateway.type().equals(NoneGateway.TYPE) || !indexStore.persistent()) {
      try {
        store.fullDelete();
      } catch (IOException e) {
        logger.warn("failed to clean store on shard deletion", e);
      }
    }
    // and close it
    try {
      store.close();
    } catch (IOException e) {
      logger.warn("failed to close store on shard deletion", e);
    }

    Injectors.close(injector);

    // delete the shard location if needed
    if (delete || indexGateway.type().equals(NoneGateway.TYPE)) {
      FileSystemUtils.deleteRecursively(nodeEnv.shardLocation(sId));
    }
  }
コード例 #24
0
 @Override
 public Injector shardInjector(int shardId) throws ElasticSearchException {
   return shardsInjectors.get(shardId);
 }
コード例 #25
0
  private void serializeValue(
      final ParseContext context, String currentFieldName, XContentParser.Token token)
      throws IOException {
    if (currentFieldName == null) {
      throw new MapperParsingException(
          "object mapping ["
              + name
              + "] trying to serialize a value with no field associated with it, current value ["
              + context.parser().textOrNull()
              + "]");
    }
    Mapper mapper = mappers.get(currentFieldName);
    if (mapper != null) {
      mapper.parse(context);
      return;
    }
    Dynamic dynamic = this.dynamic;
    if (dynamic == null) {
      dynamic = context.root().dynamic();
    }
    if (dynamic == Dynamic.STRICT) {
      throw new StrictDynamicMappingException(currentFieldName);
    }
    if (dynamic == Dynamic.FALSE) {
      return;
    }
    // we sync here since we don't want to add this field twice to the document mapper
    // its not the end of the world, since we add it to the mappers once we create it
    // so next time we won't even get here for this field
    boolean newMapper = false;
    synchronized (mutex) {
      mapper = mappers.get(currentFieldName);
      if (mapper == null) {
        newMapper = true;
        BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
        if (token == XContentParser.Token.VALUE_STRING) {
          boolean resolved = false;

          // do a quick test to see if its fits a dynamic template, if so, use it.
          // we need to do it here so we can handle things like attachment templates, where calling
          // text (to see if its a date) causes the binary value to be cleared
          if (!resolved) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "string", null);
            if (builder != null) {
              mapper = builder.build(builderContext);
              resolved = true;
            }
          }

          if (!resolved && context.parser().textLength() == 0) {
            // empty string with no mapping, treat it like null value
            return;
          }

          if (!resolved && context.root().dateDetection()) {
            String text = context.parser().text();
            // a safe check since "1" gets parsed as well
            if (text.contains(":") || text.contains("-") || text.contains("/")) {
              for (FormatDateTimeFormatter dateTimeFormatter :
                  context.root().dynamicDateTimeFormatters()) {
                try {
                  dateTimeFormatter.parser().parseMillis(text);
                  Mapper.Builder builder =
                      context.root().findTemplateBuilder(context, currentFieldName, "date");
                  if (builder == null) {
                    builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
                  }
                  mapper = builder.build(builderContext);
                  resolved = true;
                  break;
                } catch (Exception e) {
                  // failure to parse this, continue
                }
              }
            }
          }
          if (!resolved && context.root().numericDetection()) {
            String text = context.parser().text();
            try {
              Long.parseLong(text);
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "long");
              if (builder == null) {
                builder = longField(currentFieldName);
              }
              mapper = builder.build(builderContext);
              resolved = true;
            } catch (Exception e) {
              // not a long number
            }
            if (!resolved) {
              try {
                Double.parseDouble(text);
                Mapper.Builder builder =
                    context.root().findTemplateBuilder(context, currentFieldName, "double");
                if (builder == null) {
                  builder = doubleField(currentFieldName);
                }
                mapper = builder.build(builderContext);
                resolved = true;
              } catch (Exception e) {
                // not a long number
              }
            }
          }
          // DON'T do automatic ip detection logic, since it messes up with docs that have hosts and
          // ips
          // check if its an ip
          //                if (!resolved && text.indexOf('.') != -1) {
          //                    try {
          //                        IpFieldMapper.ipToLong(text);
          //                        XContentMapper.Builder builder =
          // context.root().findTemplateBuilder(context, currentFieldName, "ip");
          //                        if (builder == null) {
          //                            builder = ipField(currentFieldName);
          //                        }
          //                        mapper = builder.build(builderContext);
          //                        resolved = true;
          //                    } catch (Exception e) {
          //                        // failure to parse, not ip...
          //                    }
          //                }
          if (!resolved) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "string");
            if (builder == null) {
              builder = stringField(currentFieldName);
            }
            mapper = builder.build(builderContext);
          }
        } else if (token == XContentParser.Token.VALUE_NUMBER) {
          XContentParser.NumberType numberType = context.parser().numberType();
          if (numberType == XContentParser.NumberType.INT) {
            if (context.parser().estimatedNumberType()) {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "long");
              if (builder == null) {
                builder = longField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            } else {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "integer");
              if (builder == null) {
                builder = integerField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            }
          } else if (numberType == XContentParser.NumberType.LONG) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "long");
            if (builder == null) {
              builder = longField(currentFieldName);
            }
            mapper = builder.build(builderContext);
          } else if (numberType == XContentParser.NumberType.FLOAT) {
            if (context.parser().estimatedNumberType()) {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "double");
              if (builder == null) {
                builder = doubleField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            } else {
              Mapper.Builder builder =
                  context.root().findTemplateBuilder(context, currentFieldName, "float");
              if (builder == null) {
                builder = floatField(currentFieldName);
              }
              mapper = builder.build(builderContext);
            }
          } else if (numberType == XContentParser.NumberType.DOUBLE) {
            Mapper.Builder builder =
                context.root().findTemplateBuilder(context, currentFieldName, "double");
            if (builder == null) {
              builder = doubleField(currentFieldName);
            }
            mapper = builder.build(builderContext);
          }
        } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
          Mapper.Builder builder =
              context.root().findTemplateBuilder(context, currentFieldName, "boolean");
          if (builder == null) {
            builder = booleanField(currentFieldName);
          }
          mapper = builder.build(builderContext);
        } else {
          Mapper.Builder builder =
              context.root().findTemplateBuilder(context, currentFieldName, null);
          if (builder != null) {
            mapper = builder.build(builderContext);
          } else {
            // TODO how do we identify dynamically that its a binary value?
            throw new ElasticSearchIllegalStateException(
                "Can't handle serializing a dynamic type with content token ["
                    + token
                    + "] and field name ["
                    + currentFieldName
                    + "]");
          }
        }
        putMapper(mapper);
        context.addedMapper();
      }
    }
    if (newMapper) {
      mapper.traverse(
          new FieldMapperListener() {
            @Override
            public void fieldMapper(FieldMapper fieldMapper) {
              context.docMapper().addFieldMapper(fieldMapper);
            }
          });
    }
    mapper.parse(context);
  }
コード例 #26
0
  @Override
  public void merge(final Mapper mergeWith, final MergeContext mergeContext)
      throws MergeMappingException {
    if (!(mergeWith instanceof ObjectMapper)) {
      mergeContext.addConflict(
          "Can't merge a non object mapping ["
              + mergeWith.name()
              + "] with an object mapping ["
              + name()
              + "]");
      return;
    }
    ObjectMapper mergeWithObject = (ObjectMapper) mergeWith;

    doMerge(mergeWithObject, mergeContext);

    List<Mapper> mappersToTraverse = new ArrayList<Mapper>();
    synchronized (mutex) {
      for (Mapper mergeWithMapper : mergeWithObject.mappers.values()) {
        Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
        if (mergeIntoMapper == null) {
          // no mapping, simply add it if not simulating
          if (!mergeContext.mergeFlags().simulate()) {
            putMapper(mergeWithMapper);
            mappersToTraverse.add(mergeWithMapper);
          }
        } else {
          if ((mergeWithMapper instanceof MultiFieldMapper)
              && !(mergeIntoMapper instanceof MultiFieldMapper)) {
            MultiFieldMapper mergeWithMultiField = (MultiFieldMapper) mergeWithMapper;
            mergeWithMultiField.merge(mergeIntoMapper, mergeContext);
            if (!mergeContext.mergeFlags().simulate()) {
              putMapper(mergeWithMultiField);
              // now, record mappers to traverse events for all mappers
              for (Mapper mapper : mergeWithMultiField.mappers().values()) {
                mappersToTraverse.add(mapper);
              }
            }
          } else {
            mergeIntoMapper.merge(mergeWithMapper, mergeContext);
          }
        }
      }
    }
    // call this outside of the mutex
    for (Mapper mapper : mappersToTraverse) {
      mapper.traverse(
          new FieldMapperListener() {
            @Override
            public void fieldMapper(FieldMapper fieldMapper) {
              mergeContext.docMapper().addFieldMapper(fieldMapper);
            }
          });
      mapper.traverse(
          new ObjectMapperListener() {
            @Override
            public void objectMapper(ObjectMapper objectMapper) {
              mergeContext.docMapper().addObjectMapper(objectMapper);
            }
          });
    }
  }
コード例 #27
0
/** @author kimchy (shay.banon) */
public class InternalIndexService extends AbstractIndexComponent implements IndexService {

  private final Injector injector;

  private final Settings indexSettings;

  private final NodeEnvironment nodeEnv;

  private final ThreadPool threadPool;

  private final PluginsService pluginsService;

  private final InternalIndicesLifecycle indicesLifecycle;

  private final AnalysisService analysisService;

  private final MapperService mapperService;

  private final IndexQueryParserService queryParserService;

  private final SimilarityService similarityService;

  private final IndexCache indexCache;

  private final IndexEngine indexEngine;

  private final IndexGateway indexGateway;

  private final IndexStore indexStore;

  private volatile ImmutableMap<Integer, Injector> shardsInjectors = ImmutableMap.of();

  private volatile ImmutableMap<Integer, IndexShard> shards = ImmutableMap.of();

  private final CleanCacheOnIndicesLifecycleListener cleanCacheOnIndicesLifecycleListener =
      new CleanCacheOnIndicesLifecycleListener();

  @Inject
  public InternalIndexService(
      Injector injector,
      Index index,
      @IndexSettings Settings indexSettings,
      NodeEnvironment nodeEnv,
      ThreadPool threadPool,
      AnalysisService analysisService,
      MapperService mapperService,
      IndexQueryParserService queryParserService,
      SimilarityService similarityService,
      IndexCache indexCache,
      IndexEngine indexEngine,
      IndexGateway indexGateway,
      IndexStore indexStore) {
    super(index, indexSettings);
    this.injector = injector;
    this.nodeEnv = nodeEnv;
    this.threadPool = threadPool;
    this.indexSettings = indexSettings;
    this.analysisService = analysisService;
    this.mapperService = mapperService;
    this.queryParserService = queryParserService;
    this.similarityService = similarityService;
    this.indexCache = indexCache;
    this.indexEngine = indexEngine;
    this.indexGateway = indexGateway;
    this.indexStore = indexStore;

    this.pluginsService = injector.getInstance(PluginsService.class);
    this.indicesLifecycle = (InternalIndicesLifecycle) injector.getInstance(IndicesLifecycle.class);

    this.indicesLifecycle.addListener(cleanCacheOnIndicesLifecycleListener);
  }

  @Override
  public int numberOfShards() {
    return shards.size();
  }

  @Override
  public UnmodifiableIterator<IndexShard> iterator() {
    return shards.values().iterator();
  }

  @Override
  public boolean hasShard(int shardId) {
    return shards.containsKey(shardId);
  }

  @Override
  public IndexShard shard(int shardId) {
    return shards.get(shardId);
  }

  @Override
  public IndexShard shardSafe(int shardId) throws IndexShardMissingException {
    IndexShard indexShard = shard(shardId);
    if (indexShard == null) {
      throw new IndexShardMissingException(new ShardId(index, shardId));
    }
    return indexShard;
  }

  @Override
  public ImmutableSet<Integer> shardIds() {
    return ImmutableSet.copyOf(shards.keySet());
  }

  @Override
  public Injector injector() {
    return injector;
  }

  @Override
  public IndexGateway gateway() {
    return indexGateway;
  }

  @Override
  public IndexStore store() {
    return indexStore;
  }

  @Override
  public IndexCache cache() {
    return indexCache;
  }

  @Override
  public AnalysisService analysisService() {
    return this.analysisService;
  }

  @Override
  public MapperService mapperService() {
    return mapperService;
  }

  @Override
  public IndexQueryParserService queryParserService() {
    return queryParserService;
  }

  @Override
  public SimilarityService similarityService() {
    return similarityService;
  }

  @Override
  public IndexEngine engine() {
    return indexEngine;
  }

  @Override
  public void close(final boolean delete) {
    try {
      Set<Integer> shardIds = shardIds();
      final CountDownLatch latch = new CountDownLatch(shardIds.size());
      for (final int shardId : shardIds) {
        threadPool
            .cached()
            .execute(
                new Runnable() {
                  @Override
                  public void run() {
                    try {
                      deleteShard(shardId, delete, !delete, delete);
                    } catch (Exception e) {
                      logger.warn("failed to close shard, delete [{}]", e, delete);
                    } finally {
                      latch.countDown();
                    }
                  }
                });
      }
      try {
        latch.await();
      } catch (InterruptedException e) {
        throw new ElasticSearchInterruptedException(
            "interrupted closing index [ " + index().name() + "]", e);
      }
    } finally {
      indicesLifecycle.removeListener(cleanCacheOnIndicesLifecycleListener);
    }
  }

  @Override
  public Injector shardInjector(int shardId) throws ElasticSearchException {
    return shardsInjectors.get(shardId);
  }

  @Override
  public Injector shardInjectorSafe(int shardId) throws IndexShardMissingException {
    Injector shardInjector = shardInjector(shardId);
    if (shardInjector == null) {
      throw new IndexShardMissingException(new ShardId(index, shardId));
    }
    return shardInjector;
  }

  @Override
  public synchronized IndexShard createShard(int sShardId) throws ElasticSearchException {
    ShardId shardId = new ShardId(index, sShardId);
    if (shardsInjectors.containsKey(shardId.id())) {
      throw new IndexShardAlreadyExistsException(shardId + " already exists");
    }

    indicesLifecycle.beforeIndexShardCreated(shardId);

    logger.debug("creating shard_id [{}]", shardId.id());

    ModulesBuilder modules = new ModulesBuilder();
    modules.add(new ShardsPluginsModule(indexSettings, pluginsService));
    modules.add(new IndexShardModule(shardId));
    modules.add(new StoreModule(indexSettings, injector.getInstance(IndexStore.class)));
    modules.add(new DeletionPolicyModule(indexSettings));
    modules.add(new MergePolicyModule(indexSettings));
    modules.add(new MergeSchedulerModule(indexSettings));
    modules.add(new TranslogModule(indexSettings));
    modules.add(new EngineModule(indexSettings));
    modules.add(new IndexShardGatewayModule(injector.getInstance(IndexGateway.class)));

    Injector shardInjector = modules.createChildInjector(injector);

    shardsInjectors =
        newMapBuilder(shardsInjectors).put(shardId.id(), shardInjector).immutableMap();

    IndexShard indexShard = shardInjector.getInstance(IndexShard.class);

    indicesLifecycle.afterIndexShardCreated(indexShard);

    shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap();

    return indexShard;
  }

  @Override
  public synchronized void cleanShard(int shardId) throws ElasticSearchException {
    deleteShard(shardId, true, false, false);
  }

  @Override
  public synchronized void removeShard(int shardId) throws ElasticSearchException {
    deleteShard(shardId, false, false, false);
  }

  private void deleteShard(
      int shardId, boolean delete, boolean snapshotGateway, boolean deleteGateway)
      throws ElasticSearchException {
    Injector shardInjector;
    IndexShard indexShard;
    synchronized (this) {
      Map<Integer, Injector> tmpShardInjectors = newHashMap(shardsInjectors);
      shardInjector = tmpShardInjectors.remove(shardId);
      if (shardInjector == null) {
        if (!delete) {
          return;
        }
        throw new IndexShardMissingException(new ShardId(index, shardId));
      }
      shardsInjectors = ImmutableMap.copyOf(tmpShardInjectors);
      if (delete) {
        logger.debug("deleting shard_id [{}]", shardId);
      }

      Map<Integer, IndexShard> tmpShardsMap = newHashMap(shards);
      indexShard = tmpShardsMap.remove(shardId);
      shards = ImmutableMap.copyOf(tmpShardsMap);
    }

    ShardId sId = new ShardId(index, shardId);

    indicesLifecycle.beforeIndexShardClosed(sId, indexShard, delete);

    for (Class<? extends CloseableIndexComponent> closeable : pluginsService.shardServices()) {
      try {
        shardInjector.getInstance(closeable).close(delete);
      } catch (Exception e) {
        logger.debug("failed to clean plugin shard service [{}]", e, closeable);
      }
    }

    try {
      // now we can close the translog service, we need to close it before the we close the shard
      shardInjector.getInstance(TranslogService.class).close();
    } catch (Exception e) {
      // ignore
    }

    // close shard actions
    if (indexShard != null) {
      shardInjector.getInstance(IndexShardManagement.class).close();
    }

    // this logic is tricky, we want to close the engine so we rollback the changes done to it
    // and close the shard so no operations are allowed to it
    if (indexShard != null) {
      indexShard.close();
    }
    try {
      shardInjector.getInstance(Engine.class).close();
    } catch (Exception e) {
      // ignore
    }

    try {
      // now, we can snapshot to the gateway, it will be only the translog
      if (snapshotGateway) {
        shardInjector.getInstance(IndexShardGatewayService.class).snapshotOnClose();
      }
    } catch (Exception e) {
      // ignore
    }
    try {
      shardInjector.getInstance(IndexShardGatewayService.class).close(deleteGateway);
    } catch (Exception e) {
      // ignore
    }
    try {
      // now we can close the translog
      shardInjector.getInstance(Translog.class).close(delete);
    } catch (Exception e) {
      // ignore
    }

    // call this before we close the store, so we can release resources for it
    indicesLifecycle.afterIndexShardClosed(sId, delete);

    // if we delete or have no gateway or the store is not persistent, clean the store...
    Store store = shardInjector.getInstance(Store.class);
    if (delete || indexGateway.type().equals(NoneGateway.TYPE) || !indexStore.persistent()) {
      try {
        store.fullDelete();
      } catch (IOException e) {
        logger.warn("failed to clean store on shard deletion", e);
      }
    }
    // and close it
    try {
      store.close();
    } catch (IOException e) {
      logger.warn("failed to close store on shard deletion", e);
    }

    Injectors.close(injector);

    // delete the shard location if needed
    if (delete || indexGateway.type().equals(NoneGateway.TYPE)) {
      FileSystemUtils.deleteRecursively(nodeEnv.shardLocation(sId));
    }
  }

  class CleanCacheOnIndicesLifecycleListener extends IndicesLifecycle.Listener {

    @Override
    public void beforeIndexShardClosed(
        ShardId shardId, @Nullable IndexShard indexShard, boolean delete) {
      indexCache.clearUnreferenced();
    }

    @Override
    public void afterIndexShardClosed(ShardId shardId, boolean delete) {
      indexCache.clearUnreferenced();
    }
  }
}
コード例 #28
0
 @Override
 public void close() {
   for (Mapper mapper : mappers.values()) {
     mapper.close();
   }
 }
コード例 #29
0
  @Test
  public void test_validationSuccess() throws IOException {
    final String bucket_str =
        Resources.toString(
            Resources.getResource(
                "com/ikanow/aleph2/search_service/elasticsearch/services/test_bucket_validate_success.json"),
            Charsets.UTF_8);
    final DataBucketBean bucket =
        BeanTemplateUtils.build(bucket_str, DataBucketBean.class).done().get();

    // 1) Verbose mode off
    {
      final Collection<BasicMessageBean> res_col =
          _index_service.validateSchema(bucket.data_schema().columnar_schema(), bucket)._2();
      final Collection<BasicMessageBean> res_search =
          _index_service.validateSchema(bucket.data_schema().search_index_schema(), bucket)._2();
      final Collection<BasicMessageBean> res_time =
          _index_service.validateSchema(bucket.data_schema().temporal_schema(), bucket)._2();

      assertEquals(0, res_col.size());
      assertEquals(0, res_search.size());
      assertEquals(0, res_time.size());
    }

    // 2) Verbose mode on
    {
      final DataBucketBean bucket_verbose =
          BeanTemplateUtils.clone(bucket)
              .with(
                  DataBucketBean::data_schema,
                  BeanTemplateUtils.clone(bucket.data_schema())
                      .with(
                          DataSchemaBean::search_index_schema,
                          BeanTemplateUtils.clone(bucket.data_schema().search_index_schema())
                              .with(
                                  DataSchemaBean.SearchIndexSchemaBean::technology_override_schema,
                                  ImmutableMap.builder()
                                      .putAll(
                                          bucket
                                              .data_schema()
                                              .search_index_schema()
                                              .technology_override_schema())
                                      .put("verbose", true)
                                      .build())
                              .done())
                      .done())
              .done();

      final Collection<BasicMessageBean> res_col =
          _index_service
              .validateSchema(bucket_verbose.data_schema().columnar_schema(), bucket)
              ._2();
      final Collection<BasicMessageBean> res_search =
          _index_service
              .validateSchema(bucket_verbose.data_schema().search_index_schema(), bucket)
              ._2();
      final Collection<BasicMessageBean> res_time =
          _index_service
              .validateSchema(bucket_verbose.data_schema().temporal_schema(), bucket)
              ._2();

      assertEquals(0, res_col.size());
      assertEquals(0, res_time.size());
      assertEquals(2, res_search.size());
      assertEquals(true, res_search.stream().allMatch(BasicMessageBean::success));
      Iterator<BasicMessageBean> res_search_message = res_search.iterator();

      final String mapping_str =
          Resources.toString(
              Resources.getResource(
                  "com/ikanow/aleph2/search_service/elasticsearch/services/test_verbose_mapping_validate_results.json"),
              Charsets.UTF_8);
      final JsonNode mapping_json = _mapper.readTree(mapping_str.getBytes());
      assertEquals(
          mapping_json.toString(),
          _mapper.readTree(res_search_message.next().message()).toString());
      assertTrue(
          "Sets the max index override: "
              + res_search.stream().skip(1).map(m -> m.message()).collect(Collectors.joining()),
          res_search_message.next().message().contains("1,000 MB"));
    }

    // 3) Temporal

    {
      final DataBucketBean bucket_temporal_no_grouping =
          BeanTemplateUtils.clone(bucket)
              .with(
                  DataBucketBean::data_schema,
                  BeanTemplateUtils.clone(bucket.data_schema())
                      .with(
                          DataSchemaBean::temporal_schema,
                          BeanTemplateUtils.build(DataSchemaBean.TemporalSchemaBean.class)
                              .done()
                              .get())
                      .done())
              .done();

      assertEquals(
          "",
          _index_service
              .validateSchema(bucket_temporal_no_grouping.data_schema().temporal_schema(), bucket)
              ._1());

      final DataBucketBean bucket_temporal_grouping =
          BeanTemplateUtils.clone(bucket)
              .with(
                  DataBucketBean::data_schema,
                  BeanTemplateUtils.clone(bucket.data_schema())
                      .with(
                          DataSchemaBean::temporal_schema,
                          BeanTemplateUtils.build(DataSchemaBean.TemporalSchemaBean.class)
                              .with(DataSchemaBean.TemporalSchemaBean::grouping_time_period, "1d")
                              .done()
                              .get())
                      .done())
              .done();

      assertEquals(
          "_{yyyy-MM-dd}",
          _index_service
              .validateSchema(bucket_temporal_grouping.data_schema().temporal_schema(), bucket)
              ._1());
    }
  }
コード例 #30
0
  private void recoverFile(
      final CommitPoint.FileInfo fileInfo,
      final ImmutableMap<String, BlobMetaData> blobs,
      final CountDownLatch latch,
      final List<Throwable> failures) {
    final IndexOutput indexOutput;
    try {
      // we create an output with no checksum, this is because the pure binary data of the file is
      // not
      // the checksum (because of seek). We will create the checksum file once copying is done
      indexOutput = store.createOutputWithNoChecksum(fileInfo.physicalName());
    } catch (IOException e) {
      failures.add(e);
      latch.countDown();
      return;
    }

    String firstFileToRecover = fileInfo.name();
    if (!blobs.containsKey(fileInfo.name())) {
      // chunking, append part0 to it
      firstFileToRecover = fileInfo.name() + ".part0";
    }
    if (!blobs.containsKey(firstFileToRecover)) {
      // no file, what to do, what to do?
      logger.warn(
          "no file [{}]/[{}] to recover, ignoring it", fileInfo.name(), fileInfo.physicalName());
      latch.countDown();
      return;
    }
    final AtomicInteger partIndex = new AtomicInteger();

    blobContainer.readBlob(
        firstFileToRecover,
        new BlobContainer.ReadBlobListener() {
          @Override
          public synchronized void onPartial(byte[] data, int offset, int size) throws IOException {
            recoveryStatus.index().addCurrentFilesSize(size);
            indexOutput.writeBytes(data, offset, size);
          }

          @Override
          public synchronized void onCompleted() {
            int part = partIndex.incrementAndGet();
            String partName = fileInfo.name() + ".part" + part;
            if (blobs.containsKey(partName)) {
              // continue with the new part
              blobContainer.readBlob(partName, this);
              return;
            } else {
              // we are done...
              try {
                indexOutput.close();
                // write the checksum
                if (fileInfo.checksum() != null) {
                  store.writeChecksum(fileInfo.physicalName(), fileInfo.checksum());
                }
                store.directory().sync(Collections.singleton(fileInfo.physicalName()));
              } catch (IOException e) {
                onFailure(e);
                return;
              }
            }
            latch.countDown();
          }

          @Override
          public void onFailure(Throwable t) {
            failures.add(t);
            latch.countDown();
          }
        });
  }