Exemple #1
0
 @Override
 protected ShardSuggestResponse shardOperation(ShardSuggestRequest request)
     throws ElasticSearchException {
   IndexService indexService = indicesService.indexServiceSafe(request.index());
   IndexShard indexShard = indexService.shardSafe(request.shardId());
   final Engine.Searcher searcher = indexShard.searcher();
   XContentParser parser = null;
   try {
     BytesReference suggest = request.suggest();
     if (suggest != null && suggest.length() > 0) {
       parser = XContentFactory.xContent(suggest).createParser(suggest);
       if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
         throw new ElasticSearchIllegalArgumentException("suggest content missing");
       }
       final SuggestionSearchContext context =
           suggestPhase
               .parseElement()
               .parseInternal(
                   parser, indexService.mapperService(), request.index(), request.shardId());
       final Suggest result = suggestPhase.execute(context, searcher.reader());
       return new ShardSuggestResponse(request.index(), request.shardId(), result);
     }
     return new ShardSuggestResponse(request.index(), request.shardId(), new Suggest());
   } catch (Throwable ex) {
     throw new ElasticSearchException("failed to execute suggest", ex);
   } finally {
     searcher.release();
     if (parser != null) {
       parser.close();
     }
   }
 }
  public void testStreamInputBulkReadWithOffset() throws IOException {
    final int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20));
    BytesReference pbr = newBytesReference(length);
    StreamInput si = pbr.streamInput();
    assertNotNull(si);

    // read a bunch of single bytes one by one
    int offset = randomIntBetween(1, length / 2);
    for (int i = 0; i < offset; i++) {
      assertEquals(si.available(), length - i);
      assertEquals(pbr.get(i), si.readByte());
    }

    // now do NOT reset the stream - keep the stream's offset!

    // buffer to compare remaining bytes against bulk read
    byte[] pbrBytesWithOffset = Arrays.copyOfRange(pbr.toBytes(), offset, length);
    // randomized target buffer to ensure no stale slots
    byte[] targetBytes = new byte[pbrBytesWithOffset.length];
    random().nextBytes(targetBytes);

    // bulk-read all
    si.readFully(targetBytes);
    assertArrayEquals(pbrBytesWithOffset, targetBytes);
    assertEquals(si.available(), 0);
  }
  public Fields getFields() throws IOException {
    if (hasTermVectors() && isExists()) {
      if (!sourceCopied) { // make the bytes safe
        headerRef = headerRef.copyBytesArray();
        termVectors = termVectors.copyBytesArray();
      }
      return new TermVectorsFields(headerRef, termVectors);
    } else {
      return new Fields() {
        @Override
        public Iterator<String> iterator() {
          return Iterators.emptyIterator();
        }

        @Override
        public Terms terms(String field) throws IOException {
          return null;
        }

        @Override
        public int size() {
          return 0;
        }
      };
    }
  }
  public void testSerializeRequest() throws IOException {
    ClusterRerouteRequest req = new ClusterRerouteRequest();
    req.setRetryFailed(randomBoolean());
    req.dryRun(randomBoolean());
    req.explain(randomBoolean());
    req.add(new AllocateEmptyPrimaryAllocationCommand("foo", 1, "bar", randomBoolean()));
    req.timeout(TimeValue.timeValueMillis(randomIntBetween(0, 100)));
    BytesStreamOutput out = new BytesStreamOutput();
    req.writeTo(out);
    BytesReference bytes = out.bytes();
    NetworkModule networkModule = new NetworkModule(null, Settings.EMPTY, true);
    NamedWriteableRegistry namedWriteableRegistry =
        new NamedWriteableRegistry(networkModule.getNamedWriteables());
    StreamInput wrap =
        new NamedWriteableAwareStreamInput(bytes.streamInput(), namedWriteableRegistry);
    ClusterRerouteRequest deserializedReq = new ClusterRerouteRequest();
    deserializedReq.readFrom(wrap);

    assertEquals(req.isRetryFailed(), deserializedReq.isRetryFailed());
    assertEquals(req.dryRun(), deserializedReq.dryRun());
    assertEquals(req.explain(), deserializedReq.explain());
    assertEquals(req.timeout(), deserializedReq.timeout());
    assertEquals(
        1,
        deserializedReq
            .getCommands()
            .commands()
            .size()); // allocation commands have their own tests
    assertEquals(
        req.getCommands().commands().size(), deserializedReq.getCommands().commands().size());
  }
 /**
  * A parser for the contents of this request if it has contents, otherwise a parser for the {@code
  * source} parameter if there is one, otherwise throws an {@link ElasticsearchParseException}. Use
  * {@link #withContentOrSourceParamParserOrNull(CheckedConsumer)} instead if you need to handle
  * the absence request content gracefully.
  */
 public final XContentParser contentOrSourceParamParser() throws IOException {
   BytesReference content = contentOrSourceParam();
   if (content.length() == 0) {
     throw new ElasticsearchParseException("Body required");
   }
   return XContentFactory.xContent(content).createParser(content);
 }
 public void testCopyBytesRef() throws IOException {
   int length = randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5));
   BytesReference pbr = newBytesReference(length);
   BytesRef ref = pbr.copyBytesRef();
   assertNotNull(ref);
   assertEquals(pbr.length(), ref.length);
 }
  private void writeIndex(
      String reason, IndexMetaData indexMetaData, @Nullable IndexMetaData previousIndexMetaData)
      throws Exception {
    logger.trace("[{}] writing state, reason [{}]", indexMetaData.index(), reason);
    XContentBuilder builder = XContentFactory.contentBuilder(format, new BytesStreamOutput());
    builder.startObject();
    IndexMetaData.Builder.toXContent(indexMetaData, builder, formatParams);
    builder.endObject();
    builder.flush();

    String stateFileName = "state-" + indexMetaData.version();
    Exception lastFailure = null;
    boolean wroteAtLeastOnce = false;
    for (File indexLocation : nodeEnv.indexLocations(new Index(indexMetaData.index()))) {
      File stateLocation = new File(indexLocation, "_state");
      FileSystemUtils.mkdirs(stateLocation);
      File stateFile = new File(stateLocation, stateFileName);

      FileOutputStream fos = null;
      try {
        fos = new FileOutputStream(stateFile);
        BytesReference bytes = builder.bytes();
        fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
        fos.getChannel().force(true);
        fos.close();
        wroteAtLeastOnce = true;
      } catch (Exception e) {
        lastFailure = e;
      } finally {
        IOUtils.closeWhileHandlingException(fos);
      }
    }

    if (!wroteAtLeastOnce) {
      logger.warn("[{}]: failed to state", lastFailure, indexMetaData.index());
      throw new IOException(
          "failed to write state for [" + indexMetaData.index() + "]", lastFailure);
    }

    // delete the old files
    if (previousIndexMetaData != null
        && previousIndexMetaData.version() != indexMetaData.version()) {
      for (File indexLocation : nodeEnv.indexLocations(new Index(indexMetaData.index()))) {
        File[] files = new File(indexLocation, "_state").listFiles();
        if (files == null) {
          continue;
        }
        for (File file : files) {
          if (!file.getName().startsWith("state-")) {
            continue;
          }
          if (file.getName().equals(stateFileName)) {
            continue;
          }
          file.delete();
        }
      }
    }
  }
 public void testToBytesRef() throws IOException {
   int length = randomIntBetween(0, PAGE_SIZE);
   BytesReference pbr = newBytesReference(length);
   BytesRef ref = pbr.toBytesRef();
   assertNotNull(ref);
   assertEquals(pbr.arrayOffset(), ref.offset);
   assertEquals(pbr.length(), ref.length);
 }
  public void testLength() throws IOException {
    int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomInt(PAGE_SIZE * 3)};

    for (int i = 0; i < sizes.length; i++) {
      BytesReference pbr = newBytesReference(sizes[i]);
      assertEquals(sizes[i], pbr.length());
    }
  }
 /**
  * Writes the binary content of the given BytesReference Use {@link
  * org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back
  */
 public XContentBuilder field(String name, BytesReference value) throws IOException {
   field(name);
   if (!value.hasArray()) {
     value = value.toBytesArray();
   }
   generator.writeBinary(value.array(), value.arrayOffset(), value.length());
   return this;
 }
 public void testArrayOffset() throws IOException {
   int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5));
   BytesReference pbr = newBytesReference(length);
   if (pbr.hasArray()) {
     assertEquals(0, pbr.arrayOffset());
   } else {
     expectThrows(IllegalStateException.class, () -> pbr.arrayOffset());
   }
 }
  private void writeGlobalState(
      String reason, MetaData metaData, @Nullable MetaData previousMetaData) throws Exception {
    logger.trace("[_global] writing state, reason [{}]", reason);
    // create metadata to write with just the global state
    MetaData globalMetaData = MetaData.builder().metaData(metaData).removeAllIndices().build();

    XContentBuilder builder = XContentFactory.contentBuilder(format);
    builder.startObject();
    MetaData.Builder.toXContent(globalMetaData, builder, formatParams);
    builder.endObject();
    builder.flush();

    String globalFileName = "global-" + globalMetaData.version();
    Exception lastFailure = null;
    boolean wroteAtLeastOnce = false;
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File stateLocation = new File(dataLocation, "_state");
      FileSystemUtils.mkdirs(stateLocation);
      File stateFile = new File(stateLocation, globalFileName);

      FileOutputStream fos = null;
      try {
        fos = new FileOutputStream(stateFile);
        BytesReference bytes = builder.bytes();
        fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
        fos.getChannel().force(true);
        fos.close();
        wroteAtLeastOnce = true;
      } catch (Exception e) {
        lastFailure = e;
      } finally {
        IOUtils.closeWhileHandlingException(fos);
      }
    }

    if (!wroteAtLeastOnce) {
      logger.warn("[_global]: failed to write global state", lastFailure);
      throw new IOException("failed to write global state", lastFailure);
    }

    // delete the old files
    for (File dataLocation : nodeEnv.nodeDataLocations()) {
      File[] files = new File(dataLocation, "_state").listFiles();
      if (files == null) {
        continue;
      }
      for (File file : files) {
        if (!file.getName().startsWith("global-")) {
          continue;
        }
        if (file.getName().equals(globalFileName)) {
          continue;
        }
        file.delete();
      }
    }
  }
 public void testWriteToOutputStream() throws IOException {
   int length = randomIntBetween(10, PAGE_SIZE * 4);
   BytesReference pbr = newBytesReference(length);
   BytesStreamOutput out = new BytesStreamOutput();
   pbr.writeTo(out);
   assertEquals(pbr.length(), out.size());
   assertArrayEquals(pbr.toBytes(), out.bytes().toBytes());
   out.close();
 }
 public void testCopyBytesArray() throws IOException {
   // small PBR which would normally share the first page
   int length = randomIntBetween(10, PAGE_SIZE);
   BytesReference pbr = newBytesReference(length);
   BytesArray ba = pbr.copyBytesArray();
   BytesArray ba2 = pbr.copyBytesArray();
   assertNotNull(ba);
   assertNotSame(ba, ba2);
   assertNotSame(ba.array(), ba2.array());
 }
 public void testToBytes() throws IOException {
   int[] sizes = {
     0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))
   };
   for (int i = 0; i < sizes.length; i++) {
     BytesReference pbr = newBytesReference(sizes[i]);
     byte[] bytes = pbr.toBytes();
     assertEquals(sizes[i], bytes.length);
   }
 }
 /**
  * Writes the binary content of the given BytesReference Use {@link
  * org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back
  */
 public XContentBuilder value(BytesReference value) throws IOException {
   if (value == null) {
     return nullValue();
   }
   if (!value.hasArray()) {
     value = value.toBytesArray();
   }
   generator.writeBinary(value.array(), value.arrayOffset(), value.length());
   return this;
 }
 public void testSliceToBytesRef() throws IOException {
   int length = randomIntBetween(0, PAGE_SIZE);
   BytesReference pbr = newBytesReference(length);
   // get a BytesRef from a slice
   int sliceOffset = randomIntBetween(0, pbr.length());
   int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset);
   BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef();
   // note that these are only true if we have <= than a page, otherwise offset/length are shifted
   assertEquals(sliceOffset, sliceRef.offset);
   assertEquals(sliceLength, sliceRef.length);
 }
 public void testIterator() throws IOException {
   int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
   BytesReference pbr = newBytesReference(length);
   BytesRefIterator iterator = pbr.iterator();
   BytesRef ref;
   BytesRefBuilder builder = new BytesRefBuilder();
   while ((ref = iterator.next()) != null) {
     builder.append(ref);
   }
   assertArrayEquals(pbr.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
 }
 public void testToBytesArraySharedPage() throws IOException {
   int length = randomIntBetween(10, PAGE_SIZE);
   BytesReference pbr = newBytesReference(length);
   BytesArray ba = pbr.toBytesArray();
   BytesArray ba2 = pbr.toBytesArray();
   assertNotNull(ba);
   assertNotNull(ba2);
   assertEquals(pbr.length(), ba.length());
   assertEquals(ba.length(), ba2.length());
   // single-page optimization
   assertSame(ba.array(), ba2.array());
 }
  public void testArray() throws IOException {
    int[] sizes = {
      0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))
    };

    for (int i = 0; i < sizes.length; i++) {
      BytesReference pbr = newBytesReference(sizes[i]);
      byte[] array = pbr.array();
      assertNotNull(array);
      assertEquals(sizes[i], array.length);
      assertSame(array, pbr.array());
    }
  }
  /**
   * Call a consumer with the parser for the contents of this request if it has contents, otherwise
   * with a parser for the {@code source} parameter if there is one, otherwise with {@code null}.
   * Use {@link #contentOrSourceParamParser()} if you should throw an exception back to the user
   * when there isn't request content.
   */
  public final void withContentOrSourceParamParserOrNull(
      CheckedConsumer<XContentParser, IOException> withParser) throws IOException {
    XContentParser parser = null;
    BytesReference content = contentOrSourceParam();
    if (content.length() > 0) {
      parser = XContentFactory.xContent(content).createParser(content);
    }

    try {
      withParser.accept(parser);
    } finally {
      IOUtils.close(parser);
    }
  }
 public void testToBytesArrayMaterializedPages() throws IOException {
   // we need a length != (n * pagesize) to avoid page sharing at boundaries
   int length = 0;
   while ((length % PAGE_SIZE) == 0) {
     length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5));
   }
   BytesReference pbr = newBytesReference(length);
   BytesArray ba = pbr.toBytesArray();
   BytesArray ba2 = pbr.toBytesArray();
   assertNotNull(ba);
   assertNotNull(ba2);
   assertEquals(pbr.length(), ba.length());
   assertEquals(ba.length(), ba2.length());
 }
 public void testSliceArrayOffset() throws IOException {
   int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5));
   BytesReference pbr = newBytesReference(length);
   int sliceOffset = randomIntBetween(0, pbr.length());
   int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset);
   BytesReference slice = pbr.slice(sliceOffset, sliceLength);
   if (slice.hasArray()) {
     assertEquals(sliceOffset, slice.arrayOffset());
   } else {
     expectThrows(IllegalStateException.class, () -> slice.arrayOffset());
   }
 }
Exemple #24
0
 @Override
 protected void beforeStart() {
   if (sourceUnsafe) {
     source = source.copyBytesArray();
     sourceUnsafe = false;
   }
 }
  @Override
  public Mapper parse(ParseContext context) throws IOException {
    QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext);
    if (context.doc().getField(queryBuilderField.name()) != null) {
      // If a percolator query has been defined in an array object then multiple percolator queries
      // could be provided. In order to prevent this we fail if we try to parse more than one query
      // for the current document.
      throw new IllegalArgumentException("a document can only contain one percolator query");
    }

    XContentParser parser = context.parser();
    QueryBuilder queryBuilder =
        parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation());
    verifyQuery(queryBuilder);
    // Fetching of terms, shapes and indexed scripts happen during this rewrite:
    queryBuilder = queryBuilder.rewrite(queryShardContext);

    try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) {
      queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap()));
      builder.flush();
      byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes());
      context
          .doc()
          .add(
              new Field(
                  queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType()));
    }

    Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder);
    processQuery(query, context);
    return null;
  }
  // we ignore this test for now since all existing callers of BytesStreamOutput happily
  // call bytes() after close().
  @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/12620")
  public void testAccessAfterClose() throws Exception {
    BytesStreamOutput out = new BytesStreamOutput();

    // immediately close
    out.close();

    assertEquals(-1, out.size());
    assertEquals(-1, out.position());

    // writing a single byte must fail
    try {
      out.writeByte((byte) 0);
      fail("expected IllegalStateException: stream closed");
    } catch (IllegalStateException iex1) {
      // expected
    }

    // writing in bulk must fail
    try {
      out.writeBytes(new byte[0], 0, 0);
      fail("expected IllegalStateException: stream closed");
    } catch (IllegalStateException iex1) {
      // expected
    }

    // toByteArray() must fail
    try {
      BytesReference.toBytes(out.bytes());
      fail("expected IllegalStateException: stream closed");
    } catch (IllegalStateException iex1) {
      // expected
    }
  }
  public void testWriteStreamableList() throws IOException {
    final int size = randomIntBetween(0, 5);
    final List<TestStreamable> expected = new ArrayList<>(size);

    for (int i = 0; i < size; ++i) {
      expected.add(new TestStreamable(randomBoolean()));
    }

    final BytesStreamOutput out = new BytesStreamOutput();
    out.writeStreamableList(expected);

    final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));

    final List<TestStreamable> loaded = in.readStreamableList(TestStreamable::new);

    assertThat(loaded, hasSize(expected.size()));

    for (int i = 0; i < expected.size(); ++i) {
      assertEquals(expected.get(i).value, loaded.get(i).value);
    }

    assertEquals(0, in.available());

    in.close();
    out.close();
  }
 public void testWriteableReaderReturnsWrongName() throws IOException {
   BytesStreamOutput out = new BytesStreamOutput();
   NamedWriteableRegistry namedWriteableRegistry =
       new NamedWriteableRegistry(
           Collections.singletonList(
               new NamedWriteableRegistry.Entry(
                   BaseNamedWriteable.class,
                   TestNamedWriteable.NAME,
                   (StreamInput in) ->
                       new TestNamedWriteable(in) {
                         @Override
                         public String getWriteableName() {
                           return "intentionally-broken";
                         }
                       })));
   TestNamedWriteable namedWriteableIn =
       new TestNamedWriteable(
           randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
   out.writeNamedWriteable(namedWriteableIn);
   byte[] bytes = BytesReference.toBytes(out.bytes());
   StreamInput in =
       new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
   assertEquals(in.available(), bytes.length);
   AssertionError e =
       expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
   assertThat(
       e.getMessage(),
       endsWith(
           " claims to have a different name [intentionally-broken] than it was read from [test-named-writeable]."));
 }
 public BytesReference safeSource() {
   if (sourceUnsafe) {
     source = source.copyBytesArray();
     sourceUnsafe = false;
   }
   return source;
 }
 @Override
 public int hashCode() {
   int result = shard.hashCode();
   result = 31 * result + (int) (readerVersion ^ (readerVersion >>> 32));
   result = 31 * result + value.hashCode();
   return result;
 }