public void testGet() throws IOException {
   int length = randomIntBetween(1, PAGE_SIZE * 3);
   BytesReference pbr = newBytesReference(length);
   int sliceOffset = randomIntBetween(0, length / 2);
   int sliceLength = Math.max(1, length - sliceOffset - 1);
   BytesReference slice = pbr.slice(sliceOffset, sliceLength);
   assertEquals(pbr.get(sliceOffset), slice.get(0));
   assertEquals(pbr.get(sliceOffset + sliceLength - 1), slice.get(sliceLength - 1));
 }
  public void testSliceEquals() {
    int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5));
    ByteArray ba1 = bigarrays.newByteArray(length, false);
    BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length);

    // test equality of slices
    int sliceFrom = randomIntBetween(0, pbr.length());
    int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom);
    BytesReference slice1 = pbr.slice(sliceFrom, sliceLength);
    BytesReference slice2 = pbr.slice(sliceFrom, sliceLength);
    assertArrayEquals(slice1.toBytes(), slice2.toBytes());

    // test a slice with same offset but different length,
    // unless randomized testing gave us a 0-length slice.
    if (sliceLength > 0) {
      BytesReference slice3 = pbr.slice(sliceFrom, sliceLength / 2);
      assertFalse(Arrays.equals(slice1.toBytes(), slice3.toBytes()));
    }
  }
 public void testSliceToBytesRef() throws IOException {
   int length = randomIntBetween(0, PAGE_SIZE);
   BytesReference pbr = newBytesReference(length);
   // get a BytesRef from a slice
   int sliceOffset = randomIntBetween(0, pbr.length());
   int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset);
   BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef();
   // note that these are only true if we have <= than a page, otherwise offset/length are shifted
   assertEquals(sliceOffset, sliceRef.offset);
   assertEquals(sliceLength, sliceRef.length);
 }
 public void testSliceArrayOffset() throws IOException {
   int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5));
   BytesReference pbr = newBytesReference(length);
   int sliceOffset = randomIntBetween(0, pbr.length());
   int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset);
   BytesReference slice = pbr.slice(sliceOffset, sliceLength);
   if (slice.hasArray()) {
     assertEquals(sliceOffset, slice.arrayOffset());
   } else {
     expectThrows(IllegalStateException.class, () -> slice.arrayOffset());
   }
 }
 public void testSliceWriteToOutputStream() throws IOException {
   int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 5));
   BytesReference pbr = newBytesReference(length);
   int sliceOffset = randomIntBetween(1, length / 2);
   int sliceLength = length - sliceOffset;
   BytesReference slice = pbr.slice(sliceOffset, sliceLength);
   BytesStreamOutput sliceOut = new BytesStreamOutput(sliceLength);
   slice.writeTo(sliceOut);
   assertEquals(slice.length(), sliceOut.size());
   assertArrayEquals(slice.toBytes(), sliceOut.bytes().toBytes());
   sliceOut.close();
 }
 public void testSliceIterator() throws IOException {
   int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
   BytesReference pbr = newBytesReference(length);
   int sliceOffset = randomIntBetween(0, pbr.length());
   int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset);
   BytesReference slice = pbr.slice(sliceOffset, sliceLength);
   BytesRefIterator iterator = slice.iterator();
   BytesRef ref = null;
   BytesRefBuilder builder = new BytesRefBuilder();
   while ((ref = iterator.next()) != null) {
     builder.append(ref);
   }
   assertArrayEquals(slice.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
 }
  public void testSliceCopyBytesArray() throws IOException {
    int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
    BytesReference pbr = newBytesReference(length);
    int sliceOffset = randomIntBetween(0, pbr.length());
    int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset);
    BytesReference slice = pbr.slice(sliceOffset, sliceLength);

    BytesArray ba1 = slice.copyBytesArray();
    BytesArray ba2 = slice.copyBytesArray();
    assertNotNull(ba1);
    assertNotNull(ba2);
    assertNotSame(ba1.array(), ba2.array());
    assertArrayEquals(slice.toBytes(), ba1.array());
    assertArrayEquals(slice.toBytes(), ba2.array());
    assertArrayEquals(ba1.array(), ba2.array());
  }
  public void testSliceStreamInput() throws IOException {
    int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20));
    BytesReference pbr = newBytesReference(length);

    // test stream input over slice (upper half of original)
    int sliceOffset = randomIntBetween(1, length / 2);
    int sliceLength = length - sliceOffset;
    BytesReference slice = pbr.slice(sliceOffset, sliceLength);
    StreamInput sliceInput = slice.streamInput();
    assertEquals(sliceInput.available(), sliceLength);

    // single reads
    assertEquals(slice.get(0), sliceInput.readByte());
    assertEquals(slice.get(1), sliceInput.readByte());
    assertEquals(slice.get(2), sliceInput.readByte());
    assertEquals(sliceInput.available(), sliceLength - 3);

    // reset the slice stream for bulk reading
    sliceInput.reset();
    assertEquals(sliceInput.available(), sliceLength);

    // bulk read
    byte[] sliceBytes = new byte[sliceLength];
    sliceInput.readFully(sliceBytes);
    assertEquals(sliceInput.available(), 0);

    // compare slice content with upper half of original
    byte[] pbrSliceBytes = Arrays.copyOfRange(pbr.toBytes(), sliceOffset, length);
    assertArrayEquals(pbrSliceBytes, sliceBytes);

    // compare slice bytes with bytes read from slice via streamInput :D
    byte[] sliceToBytes = slice.toBytes();
    assertEquals(sliceBytes.length, sliceToBytes.length);
    assertArrayEquals(sliceBytes, sliceToBytes);

    sliceInput.reset();
    assertEquals(sliceInput.available(), sliceLength);
    byte[] buffer = new byte[sliceLength + scaledRandomIntBetween(1, 100)];
    int offset = scaledRandomIntBetween(0, Math.max(1, buffer.length - sliceLength - 1));
    int read = sliceInput.read(buffer, offset, sliceLength / 2);
    assertEquals(sliceInput.available(), sliceLength - read);
    sliceInput.read(buffer, offset + read, sliceLength - read);
    assertArrayEquals(sliceBytes, Arrays.copyOfRange(buffer, offset, offset + sliceLength));
    assertEquals(sliceInput.available(), 0);
  }
  public void testHashCode() throws IOException {
    // empty content must have hash 1 (JDK compat)
    BytesReference pbr = newBytesReference(0);
    assertEquals(Arrays.hashCode(BytesRef.EMPTY_BYTES), pbr.hashCode());

    // test with content
    pbr = newBytesReference(randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5)));
    int jdkHash = Arrays.hashCode(pbr.toBytes());
    int pbrHash = pbr.hashCode();
    assertEquals(jdkHash, pbrHash);

    // test hashes of slices
    int sliceFrom = randomIntBetween(0, pbr.length());
    int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom);
    BytesReference slice = pbr.slice(sliceFrom, sliceLength);
    int sliceJdkHash = Arrays.hashCode(slice.toBytes());
    int sliceHash = slice.hashCode();
    assertEquals(sliceJdkHash, sliceHash);
  }
      private void parse(BytesReference data) throws Exception {
        XContent xContent = XContentFactory.xContent(data);
        String source = XContentBuilder.builder(xContent).string();
        int from = 0;
        int length = data.length();
        byte marker = xContent.streamSeparator();
        int nextMarker = findNextMarker(marker, from, data, length);
        if (nextMarker == -1) {
          nextMarker = length;
        }
        // now parse the action
        XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));

        try {
          // move pointers
          from = nextMarker + 1;

          // Move to START_OBJECT
          XContentParser.Token token = parser.nextToken();
          if (token == null) {
            throw new Exception("Wrong object structure");
          }
          assert token == XContentParser.Token.START_OBJECT;
          // Move to FIELD_NAME, that's the action
          // token = parser.nextToken();
          // assert token == XContentParser.Token.FIELD_NAME;
          // String action = parser.currentName();

          String id = null;
          String routing = null;
          String parent = null;
          String timestamp = null;
          Long ttl = null;
          String opType = null;
          long version = 0;
          VersionType versionType = VersionType.INTERNAL;
          String percolate = null;

          // at this stage, next token can either be END_OBJECT
          // (and use default index and type, with auto generated
          // id)
          // or START_OBJECT which will have another set of
          // parameters

          String currentFieldName = null;
          while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
            if (token == XContentParser.Token.FIELD_NAME) {
              currentFieldName = parser.currentName();
            } else if (token.isValue()) {
              if ("_index".equals(currentFieldName)) {
                index = parser.text();
              } else if ("_type".equals(currentFieldName)) {
                type = parser.text();
              } else if ("_queryString".equals(currentFieldName)) {
                queryString = parser.text();
              } else if ("_id".equals(currentFieldName)) {
                id = parser.text();
              } else if ("_routing".equals(currentFieldName)
                  || "routing".equals(currentFieldName)) {
                routing = parser.text();
              } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
                parent = parser.text();
              } else if ("_timestamp".equals(currentFieldName)
                  || "timestamp".equals(currentFieldName)) {
                timestamp = parser.text();
              } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) {
                if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
                  ttl = TimeValue.parseTimeValue(parser.text(), null).millis();
                } else {
                  ttl = parser.longValue();
                }
              } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
                opType = parser.text();
              } else if ("_version".equals(currentFieldName)
                  || "version".equals(currentFieldName)) {
                version = parser.longValue();
              } else if ("_version_type".equals(currentFieldName)
                  || "_versionType".equals(currentFieldName)
                  || "version_type".equals(currentFieldName)
                  || "versionType".equals(currentFieldName)) {
                versionType = VersionType.fromString(parser.text());
              } else if ("percolate".equals(currentFieldName)
                  || "_percolate".equals(currentFieldName)) {
                percolate = parser.textOrNull();
              }
            }
          }
          if (nextMarker < length) {
            nextMarker = findNextMarker(marker, from, data, length);
            if (nextMarker == -1) {
              nextMarker = length;
            }
            content = getString(data.slice(from, nextMarker - from));
          }

        } finally {
          parser.close();
        }
      }
Exemplo n.º 11
0
  public MultiSearchRequest add(
      BytesReference data,
      boolean contentUnsafe,
      @Nullable String[] indices,
      @Nullable String[] types,
      @Nullable String searchType,
      @Nullable String routing,
      IndicesOptions indicesOptions,
      boolean allowExplicitIndex)
      throws Exception {
    XContent xContent = XContentFactory.xContent(data);
    int from = 0;
    int length = data.length();
    byte marker = xContent.streamSeparator();
    while (true) {
      int nextMarker = findNextMarker(marker, from, data, length);
      if (nextMarker == -1) {
        break;
      }
      // support first line with \n
      if (nextMarker == 0) {
        from = nextMarker + 1;
        continue;
      }

      SearchRequest searchRequest = new SearchRequest();
      if (indices != null) {
        searchRequest.indices(indices);
      }
      if (indicesOptions != null) {
        searchRequest.indicesOptions(indicesOptions);
      }
      if (types != null && types.length > 0) {
        searchRequest.types(types);
      }
      if (routing != null) {
        searchRequest.routing(routing);
      }
      searchRequest.searchType(searchType);

      IndicesOptions defaultOptions = IndicesOptions.strictExpandOpenAndForbidClosed();
      boolean ignoreUnavailable = defaultOptions.ignoreUnavailable();
      boolean allowNoIndices = defaultOptions.allowNoIndices();
      boolean expandWildcardsOpen = defaultOptions.expandWildcardsOpen();
      boolean expandWildcardsClosed = defaultOptions.expandWildcardsClosed();

      // now parse the action
      if (nextMarker - from > 0) {
        try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) {
          // Move to START_OBJECT, if token is null, its an empty data
          XContentParser.Token token = parser.nextToken();
          if (token != null) {
            assert token == XContentParser.Token.START_OBJECT;
            String currentFieldName = null;
            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
              if (token == XContentParser.Token.FIELD_NAME) {
                currentFieldName = parser.currentName();
              } else if (token.isValue()) {
                if ("index".equals(currentFieldName) || "indices".equals(currentFieldName)) {
                  if (!allowExplicitIndex) {
                    throw new ElasticsearchIllegalArgumentException(
                        "explicit index in multi search is not allowed");
                  }
                  searchRequest.indices(Strings.splitStringByCommaToArray(parser.text()));
                } else if ("type".equals(currentFieldName) || "types".equals(currentFieldName)) {
                  searchRequest.types(Strings.splitStringByCommaToArray(parser.text()));
                } else if ("search_type".equals(currentFieldName)
                    || "searchType".equals(currentFieldName)) {
                  searchRequest.searchType(parser.text());
                } else if ("query_cache".equals(currentFieldName)
                    || "queryCache".equals(currentFieldName)) {
                  searchRequest.queryCache(parser.booleanValue());
                } else if ("preference".equals(currentFieldName)) {
                  searchRequest.preference(parser.text());
                } else if ("routing".equals(currentFieldName)) {
                  searchRequest.routing(parser.text());
                } else if ("ignore_unavailable".equals(currentFieldName)
                    || "ignoreUnavailable".equals(currentFieldName)) {
                  ignoreUnavailable = parser.booleanValue();
                } else if ("allow_no_indices".equals(currentFieldName)
                    || "allowNoIndices".equals(currentFieldName)) {
                  allowNoIndices = parser.booleanValue();
                } else if ("expand_wildcards".equals(currentFieldName)
                    || "expandWildcards".equals(currentFieldName)) {
                  String[] wildcards = Strings.splitStringByCommaToArray(parser.text());
                  for (String wildcard : wildcards) {
                    if ("open".equals(wildcard)) {
                      expandWildcardsOpen = true;
                    } else if ("closed".equals(wildcard)) {
                      expandWildcardsClosed = true;
                    } else {
                      throw new ElasticsearchIllegalArgumentException(
                          "No valid expand wildcard value [" + wildcard + "]");
                    }
                  }
                }
              } else if (token == XContentParser.Token.START_ARRAY) {
                if ("index".equals(currentFieldName) || "indices".equals(currentFieldName)) {
                  if (!allowExplicitIndex) {
                    throw new ElasticsearchIllegalArgumentException(
                        "explicit index in multi search is not allowed");
                  }
                  searchRequest.indices(parseArray(parser));
                } else if ("type".equals(currentFieldName) || "types".equals(currentFieldName)) {
                  searchRequest.types(parseArray(parser));
                } else if ("expand_wildcards".equals(currentFieldName)
                    || "expandWildcards".equals(currentFieldName)) {
                  String[] wildcards = parseArray(parser);
                  for (String wildcard : wildcards) {
                    if ("open".equals(wildcard)) {
                      expandWildcardsOpen = true;
                    } else if ("closed".equals(wildcard)) {
                      expandWildcardsClosed = true;
                    } else {
                      throw new ElasticsearchIllegalArgumentException(
                          "No valid expand wildcard value [" + wildcard + "]");
                    }
                  }
                } else {
                  throw new ElasticsearchParseException(
                      currentFieldName + " doesn't support arrays");
                }
              }
            }
          }
        }
      }
      searchRequest.indicesOptions(
          IndicesOptions.fromOptions(
              ignoreUnavailable,
              allowNoIndices,
              expandWildcardsOpen,
              expandWildcardsClosed,
              defaultOptions));

      // move pointers
      from = nextMarker + 1;
      // now for the body
      nextMarker = findNextMarker(marker, from, data, length);
      if (nextMarker == -1) {
        break;
      }

      searchRequest.source(data.slice(from, nextMarker - from), contentUnsafe);
      // move pointers
      from = nextMarker + 1;

      add(searchRequest);
    }

    return this;
  }