public static TopDocs readTopDocs(StreamInput in) throws IOException { if (!in.readBoolean()) { // no docs return null; } if (in.readBoolean()) { int totalHits = in.readVInt(); float maxScore = in.readFloat(); SortField[] fields = new SortField[in.readVInt()]; for (int i = 0; i < fields.length; i++) { String field = null; if (in.readBoolean()) { field = in.readUTF(); } fields[i] = new SortField(field, in.readVInt(), in.readBoolean()); } FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()]; for (int i = 0; i < fieldDocs.length; i++) { Comparable[] cFields = new Comparable[in.readVInt()]; for (int j = 0; j < cFields.length; j++) { byte type = in.readByte(); if (type == 0) { cFields[j] = null; } else if (type == 1) { cFields[j] = in.readUTF(); } else if (type == 2) { cFields[j] = in.readInt(); } else if (type == 3) { cFields[j] = in.readLong(); } else if (type == 4) { cFields[j] = in.readFloat(); } else if (type == 5) { cFields[j] = in.readDouble(); } else if (type == 6) { cFields[j] = in.readByte(); } else if (type == 7) { cFields[j] = in.readShort(); } else if (type == 8) { cFields[j] = in.readBoolean(); } else { throw new IOException("Can't match type [" + type + "]"); } } fieldDocs[i] = new FieldDoc(in.readVInt(), in.readFloat(), cFields); } return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore); } else { int totalHits = in.readVInt(); float maxScore = in.readFloat(); ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()]; for (int i = 0; i < scoreDocs.length; i++) { scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat()); } return new TopDocs(totalHits, scoreDocs, maxScore); } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); replicationType = ReplicationType.fromId(in.readByte()); consistencyLevel = WriteConsistencyLevel.fromId(in.readByte()); timeout = TimeValue.readTimeValue(in); index = in.readString(); }
public void testStreamInput() throws IOException { int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); BytesReference pbr = newBytesReference(length); StreamInput si = pbr.streamInput(); assertNotNull(si); // read single bytes one by one assertEquals(pbr.get(0), si.readByte()); assertEquals(pbr.get(1), si.readByte()); assertEquals(pbr.get(2), si.readByte()); // reset the stream for bulk reading si.reset(); // buffer for bulk reads byte[] origBuf = new byte[length]; random().nextBytes(origBuf); byte[] targetBuf = Arrays.copyOf(origBuf, origBuf.length); // bulk-read 0 bytes: must not modify buffer si.readBytes(targetBuf, 0, 0); assertEquals(origBuf[0], targetBuf[0]); si.reset(); // read a few few bytes as ints int bytesToRead = randomIntBetween(1, length / 2); for (int i = 0; i < bytesToRead; i++) { int b = si.read(); assertEquals(pbr.get(i) & 0xff, b); } si.reset(); // bulk-read all si.readFully(targetBuf); assertArrayEquals(pbr.toBytes(), targetBuf); // continuing to read should now fail with EOFException try { si.readByte(); fail("expected EOF"); } catch (EOFException | IndexOutOfBoundsException eof) { // yay } // try to read more than the stream contains si.reset(); expectThrows(IndexOutOfBoundsException.class, () -> si.readBytes(targetBuf, 0, length * 2)); }
@Override public void readFrom(StreamInput in) throws IOException { index = in.readString(); numberOfShards = in.readVInt(); numberOfReplicas = in.readVInt(); activePrimaryShards = in.readVInt(); activeShards = in.readVInt(); relocatingShards = in.readVInt(); initializingShards = in.readVInt(); unassignedShards = in.readVInt(); status = ClusterHealthStatus.fromValue(in.readByte()); int size = in.readVInt(); for (int i = 0; i < size; i++) { ClusterShardHealth shardHealth = readClusterShardHealth(in); shards.put(shardHealth.getId(), shardHealth); } size = in.readVInt(); if (size == 0) { validationFailures = ImmutableList.of(); } else { for (int i = 0; i < size; i++) { validationFailures.add(in.readString()); } } }
@Override public IndexMetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readString()); builder.version(in.readLong()); builder.state(State.fromId(in.readByte())); builder.settings(readSettingsFromStream(in)); int mappingsSize = in.readVInt(); for (int i = 0; i < mappingsSize; i++) { MappingMetaData mappingMd = MappingMetaData.PROTO.readFrom(in); builder.putMapping(mappingMd); } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetaData aliasMd = AliasMetaData.Builder.readFrom(in); builder.putAlias(aliasMd); } int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readString(); Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } int activeAllocationIdsSize = in.readVInt(); for (int i = 0; i < activeAllocationIdsSize; i++) { int key = in.readVInt(); Set<String> allocationIds = DiffableUtils.StringSetValueSerializer.getInstance().read(in, key); builder.putActiveAllocationIds(key, allocationIds); } return builder.build(); }
public IndexMetaDataDiff(StreamInput in) throws IOException { index = in.readString(); version = in.readLong(); state = State.fromId(in.readByte()); settings = Settings.readSettingsFromStream(in); mappings = DiffableUtils.readImmutableOpenMapDiff( in, DiffableUtils.getStringKeySerializer(), MappingMetaData.PROTO); aliases = DiffableUtils.readImmutableOpenMapDiff( in, DiffableUtils.getStringKeySerializer(), AliasMetaData.PROTO); customs = DiffableUtils.readImmutableOpenMapDiff( in, DiffableUtils.getStringKeySerializer(), new DiffableUtils.DiffableValueSerializer<String, Custom>() { @Override public Custom read(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readFrom(in); } @Override public Diff<Custom> readDiff(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); } }); activeAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff( in, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); }
@Override public void readFrom(StreamInput in) throws IOException { index = in.readUTF(); shardId = in.readVInt(); searchType = SearchType.fromId(in.readByte()); numberOfShards = in.readVInt(); if (in.readBoolean()) { scroll = readScroll(in); } source = in.readBytesReference(); extraSource = in.readBytesReference(); int typesSize = in.readVInt(); if (typesSize > 0) { types = new String[typesSize]; for (int i = 0; i < typesSize; i++) { types[i] = in.readUTF(); } } int indicesSize = in.readVInt(); if (indicesSize > 0) { filteringAliases = new String[indicesSize]; for (int i = 0; i < indicesSize; i++) { filteringAliases[i] = in.readUTF(); } } else { filteringAliases = null; } nowInMillis = in.readVLong(); }
public void testRandomReads() throws IOException { int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); BytesReference pbr = newBytesReference(length); StreamInput streamInput = pbr.streamInput(); BytesRefBuilder target = new BytesRefBuilder(); while (target.length() < pbr.length()) { switch (randomIntBetween(0, 10)) { case 6: case 5: target.append(new BytesRef(new byte[] {streamInput.readByte()})); break; case 4: case 3: BytesRef bytesRef = streamInput.readBytesRef(scaledRandomIntBetween(1, pbr.length() - target.length())); target.append(bytesRef); break; default: byte[] buffer = new byte[scaledRandomIntBetween(1, pbr.length() - target.length())]; int offset = scaledRandomIntBetween(0, buffer.length - 1); int read = streamInput.read(buffer, offset, buffer.length - offset); target.append(new BytesRef(buffer, offset, read)); break; } } assertEquals(pbr.length(), target.length()); BytesRef targetBytes = target.get(); assertArrayEquals( pbr.toBytes(), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length)); }
public void testStreamInputBulkReadWithOffset() throws IOException { final int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); BytesReference pbr = newBytesReference(length); StreamInput si = pbr.streamInput(); assertNotNull(si); // read a bunch of single bytes one by one int offset = randomIntBetween(1, length / 2); for (int i = 0; i < offset; i++) { assertEquals(si.available(), length - i); assertEquals(pbr.get(i), si.readByte()); } // now do NOT reset the stream - keep the stream's offset! // buffer to compare remaining bytes against bulk read byte[] pbrBytesWithOffset = Arrays.copyOfRange(pbr.toBytes(), offset, length); // randomized target buffer to ensure no stale slots byte[] targetBytes = new byte[pbrBytesWithOffset.length]; random().nextBytes(targetBytes); // bulk-read all si.readFully(targetBytes); assertArrayEquals(pbrBytesWithOffset, targetBytes); assertEquals(si.available(), 0); }
@Override public void readFrom(StreamInput in) throws IOException { clusterName = in.readUTF(); activePrimaryShards = in.readVInt(); activeShards = in.readVInt(); relocatingShards = in.readVInt(); initializingShards = in.readVInt(); unassignedShards = in.readVInt(); numberOfNodes = in.readVInt(); numberOfDataNodes = in.readVInt(); status = ClusterHealthStatus.fromValue(in.readByte()); int size = in.readVInt(); for (int i = 0; i < size; i++) { ClusterIndexHealth indexHealth = readClusterIndexHealth(in); indices.put(indexHealth.index(), indexHealth); } timedOut = in.readBoolean(); size = in.readVInt(); if (size == 0) { validationFailures = ImmutableList.of(); } else { for (int i = 0; i < size; i++) { validationFailures.add(in.readUTF()); } } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); stage = SnapshotIndexShardStage.fromValue(in.readByte()); stats = SnapshotStats.readSnapshotStats(in); nodeId = in.readOptionalString(); failure = in.readOptionalString(); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); type = in.readSharedString(); id = in.readOptionalString(); routing = in.readOptionalString(); parent = in.readOptionalString(); timestamp = in.readOptionalString(); ttl = in.readLong(); source = in.readBytesReference(); sourceUnsafe = false; opType = OpType.fromId(in.readByte()); refresh = in.readBoolean(); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); }
public void testSliceStreamInput() throws IOException { int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); BytesReference pbr = newBytesReference(length); // test stream input over slice (upper half of original) int sliceOffset = randomIntBetween(1, length / 2); int sliceLength = length - sliceOffset; BytesReference slice = pbr.slice(sliceOffset, sliceLength); StreamInput sliceInput = slice.streamInput(); assertEquals(sliceInput.available(), sliceLength); // single reads assertEquals(slice.get(0), sliceInput.readByte()); assertEquals(slice.get(1), sliceInput.readByte()); assertEquals(slice.get(2), sliceInput.readByte()); assertEquals(sliceInput.available(), sliceLength - 3); // reset the slice stream for bulk reading sliceInput.reset(); assertEquals(sliceInput.available(), sliceLength); // bulk read byte[] sliceBytes = new byte[sliceLength]; sliceInput.readFully(sliceBytes); assertEquals(sliceInput.available(), 0); // compare slice content with upper half of original byte[] pbrSliceBytes = Arrays.copyOfRange(pbr.toBytes(), sliceOffset, length); assertArrayEquals(pbrSliceBytes, sliceBytes); // compare slice bytes with bytes read from slice via streamInput :D byte[] sliceToBytes = slice.toBytes(); assertEquals(sliceBytes.length, sliceToBytes.length); assertArrayEquals(sliceBytes, sliceToBytes); sliceInput.reset(); assertEquals(sliceInput.available(), sliceLength); byte[] buffer = new byte[sliceLength + scaledRandomIntBetween(1, 100)]; int offset = scaledRandomIntBetween(0, Math.max(1, buffer.length - sliceLength - 1)); int read = sliceInput.read(buffer, offset, sliceLength / 2); assertEquals(sliceInput.available(), sliceLength - read); sliceInput.read(buffer, offset + read, sliceLength - read); assertArrayEquals(sliceBytes, Arrays.copyOfRange(buffer, offset, offset + sliceLength)); assertEquals(sliceInput.available(), 0); }
/** * Deserialize the GapPolicy from the input stream * * @return GapPolicy Enum */ public static GapPolicy readFrom(StreamInput in) throws IOException { byte id = in.readByte(); for (GapPolicy gapPolicy : values()) { if (id == gapPolicy.id) { return gapPolicy; } } throw new IllegalStateException("Unknown GapPolicy with id [" + id + "]"); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); type = in.readOptionalString(); id = in.readOptionalString(); routing = in.readOptionalString(); parent = in.readOptionalString(); if (in.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { in.readOptionalString(); // timestamp in.readOptionalWriteable(TimeValue::new); // ttl } source = in.readBytesReference(); opType = OpType.fromId(in.readByte()); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); pipeline = in.readOptionalString(); isRetry = in.readBoolean(); autoGeneratedTimestamp = in.readLong(); }
@Override public BenchmarkMetaData readFrom(StreamInput in) throws IOException { Entry[] entries = new Entry[in.readVInt()]; for (int i = 0; i < entries.length; i++) { String benchmarkId = in.readString(); State state = State.fromId(in.readByte()); String[] nodes = in.readStringArray(); entries[i] = new Entry(benchmarkId, state, nodes); } return new BenchmarkMetaData(entries); }
@Override public void readFrom(StreamInput in) throws IOException { name = in.readString(); size = in.readVInt(); sort = Sort.fromId(in.readByte()); int size = in.readVInt(); entries.clear(); for (int i = 0; i < size; i++) { entries.add(Entry.read(in)); } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); type = in.readUTF(); id = in.readUTF(); if (in.readBoolean()) { routing = in.readUTF(); } refresh = in.readBoolean(); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); }
protected void messageReceived( byte[] data, String action, LocalTransport sourceTransport, Version version, @Nullable final Long sendRequestId) { Transports.assertTransportThread(); try { transportServiceAdapter.received(data.length); StreamInput stream = StreamInput.wrap(data); stream.setVersion(version); long requestId = stream.readLong(); byte status = stream.readByte(); boolean isRequest = TransportStatus.isRequest(status); if (isRequest) { ThreadContext threadContext = threadPool.getThreadContext(); threadContext.readHeaders(stream); handleRequest(stream, requestId, data.length, sourceTransport, version); } else { final TransportResponseHandler handler = transportServiceAdapter.onResponseReceived(requestId); // ignore if its null, the adapter logs it if (handler != null) { if (TransportStatus.isError(status)) { handleResponseError(stream, handler); } else { handleResponse(stream, sourceTransport, handler); } } } } catch (Throwable e) { if (sendRequestId != null) { TransportResponseHandler handler = sourceTransport.transportServiceAdapter.onResponseReceived(sendRequestId); if (handler != null) { RemoteTransportException error = new RemoteTransportException(nodeName(), localAddress, action, e); sourceTransport .workers() .execute( () -> { ThreadContext threadContext = sourceTransport.threadPool.getThreadContext(); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { sourceTransport.handleException(handler, error); } }); } } else { logger.warn("Failed to receive message for action [{}]", e, action); } } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); if (in.readBoolean()) { shardId = ShardId.readShardId(in); } else { shardId = null; } consistencyLevel = WriteConsistencyLevel.fromId(in.readByte()); timeout = TimeValue.readTimeValue(in); index = in.readString(); }
@Override public void readFrom(StreamInput in) throws IOException { executionPhase = ExecutionPhases.fromStream(in); downstreamExecutionPhaseId = in.readVInt(); downstreamExecutionPhaseInputId = in.readByte(); int numExecutionNodes = in.readVInt(); List<String> executionNodes = new ArrayList<>(); for (int i = 0; i < numExecutionNodes; i++) { executionNodes.add(in.readString()); } this.downstreamNodes = executionNodes; }
@Override public void readFrom(StreamInput in) throws IOException { index = in.readString(); type = in.readOptionalString(); id = in.readString(); routing = in.readOptionalString(); parent = in.readOptionalString(); storedFields = in.readOptionalStringArray(); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new); }
@Override public void readFrom(StreamInput in) throws IOException { name = in.readUTF(); comparatorType = ComparatorType.fromId(in.readByte()); requiredSize = in.readVInt(); missing = in.readVLong(); int size = in.readVInt(); entries = new ArrayList<IntEntry>(size); for (int i = 0; i < size; i++) { entries.add(new IntEntry(in.readInt(), in.readVInt())); } }
@Override public void readFrom(StreamInput in) throws IOException { int size = in.readVInt(); if (size == 0) { indices = Strings.EMPTY_ARRAY; } else { indices = new String[size]; for (int i = 0; i < indices.length; i++) { indices[i] = in.readUTF(); } } operationThreading = BroadcastOperationThreading.fromId(in.readByte()); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); timestamp = in.readVLong(); status = null; if (in.readBoolean()) { // it may be that the master switched on us while doing the operation. In this case the status // may be null. status = ClusterHealthStatus.fromValue(in.readByte()); } clusterUUID = in.readString(); nodesStats = ClusterStatsNodes.readNodeStats(in); indicesStats = ClusterStatsIndices.readIndicesStats(in); }
@Override public void readFrom(StreamInput in) throws IOException { index = in.readUTF(); shardId = in.readVInt(); searchType = SearchType.fromId(in.readByte()); numberOfShards = in.readVInt(); if (in.readBoolean()) { scroll = readScroll(in); } if (in.readBoolean()) { timeout = readTimeValue(in); } sourceOffset = 0; sourceLength = in.readVInt(); if (sourceLength == 0) { source = Bytes.EMPTY_ARRAY; } else { source = new byte[sourceLength]; in.readFully(source); } extraSourceOffset = 0; extraSourceLength = in.readVInt(); if (extraSourceLength == 0) { extraSource = Bytes.EMPTY_ARRAY; } else { extraSource = new byte[extraSourceLength]; in.readFully(extraSource); } int typesSize = in.readVInt(); if (typesSize > 0) { types = new String[typesSize]; for (int i = 0; i < typesSize; i++) { types[i] = in.readUTF(); } } int indicesSize = in.readVInt(); if (indicesSize > 0) { filteringAliases = new String[indicesSize]; for (int i = 0; i < indicesSize; i++) { filteringAliases[i] = in.readUTF(); } } else { filteringAliases = null; } nowInMillis = in.readVLong(); }
@Override public void readFrom(StreamInput in) throws IOException { index = in.readString(); type = in.readOptionalString(); id = in.readString(); routing = in.readOptionalString(); int size = in.readVInt(); if (size > 0) { fields = new String[size]; for (int i = 0; i < size; i++) { fields[i] = in.readString(); } } version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); fetchSourceContext = FetchSourceContext.optionalReadFromStream(in); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); preference = in.readOptionalString(); refresh = in.readBoolean(); byte realtime = in.readByte(); if (realtime == 0) { this.realtime = false; } else if (realtime == 1) { this.realtime = true; } int size = in.readVInt(); items = new ArrayList<>(size); for (int i = 0; i < size; i++) { items.add(Item.readItem(in)); } }
public static ValueFormatter read(StreamInput in) throws IOException { byte id = in.readByte(); ValueFormatter formatter = null; switch (id) { case ValueFormatter.Raw.ID: return ValueFormatter.RAW; case ValueFormatter.IPv4Formatter.ID: return ValueFormatter.IPv4; case ValueFormatter.DateTime.ID: formatter = new ValueFormatter.DateTime(); break; case ValueFormatter.Number.Pattern.ID: formatter = new ValueFormatter.Number.Pattern(); break; default: throw new ElasticsearchIllegalArgumentException( "Unknown value formatter with id [" + id + "]"); } formatter.readFrom(in); return formatter; }
public static IndexMetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readUTF()); builder.version(in.readLong()); builder.state(State.fromId(in.readByte())); builder.settings(readSettingsFromStream(in)); int mappingsSize = in.readVInt(); for (int i = 0; i < mappingsSize; i++) { MappingMetaData mappingMd = MappingMetaData.readFrom(in); builder.putMapping(mappingMd); } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetaData aliasMd = AliasMetaData.Builder.readFrom(in); builder.putAlias(aliasMd); } int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readUTF(); Custom customIndexMetaData = lookupFactorySafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } return builder.build(); }