MetaInfoObjExtractor(String codecStr, int bufferSize, int metadataSize, ByteBuffer footerBuffer) throws IOException { this.compressionKind = CompressionKind.valueOf(codecStr); this.bufferSize = bufferSize; this.codec = WriterImpl.createCodec(compressionKind); this.metadataSize = metadataSize; int position = footerBuffer.position(); int footerBufferSize = footerBuffer.limit() - footerBuffer.position() - metadataSize; footerBuffer.limit(position + metadataSize); InputStream instream = InStream.create( "metadata", Lists.<DiskRange>newArrayList(new BufferChunk(footerBuffer, 0)), metadataSize, codec, bufferSize); this.metadata = OrcProto.Metadata.parseFrom(instream); footerBuffer.position(position + metadataSize); footerBuffer.limit(position + metadataSize + footerBufferSize); instream = InStream.create( "footer", Lists.<DiskRange>newArrayList(new BufferChunk(footerBuffer, 0)), footerBufferSize, codec, bufferSize); this.footer = OrcProto.Footer.parseFrom(instream); footerBuffer.position(position); this.inspector = OrcStruct.createObjectInspector(0, footer.getTypesList()); }
private void readValues(boolean ignoreEof) throws IOException { int control = input.read(); used = 0; if (control == -1) { if (!ignoreEof) { throw new EOFException("Read past end of buffer RLE byte from " + input); } used = numLiterals = 0; return; } else if (control < 0x80) { repeat = true; numLiterals = control + RunLengthByteWriter.MIN_REPEAT_SIZE; int val = input.read(); if (val == -1) { throw new EOFException("Reading RLE byte got EOF"); } literals[0] = (byte) val; } else { repeat = false; numLiterals = 0x100 - control; int bytes = 0; while (bytes < numLiterals) { int result = input.read(literals, bytes, numLiterals - bytes); if (result == -1) { throw new EOFException("Reading RLE byte literal got EOF in " + this); } bytes += result; } } }
/** Test <code>long skip(long len)</code>. */ @Test public void skipTest() throws IOException { for (int k = MIN_LEN + DELTA; k <= MAX_LEN; k += DELTA) { WriteType op = WriteType.THROUGH; int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = file.getInStream(ReadType.CACHE); Assert.assertTrue(is instanceof RemoteBlockInStream); Assert.assertEquals(k / 2, is.skip(k / 2)); Assert.assertEquals(k / 2, is.read()); is.close(); Assert.assertFalse(file.isInMemory()); if (k >= 3) { is = file.getInStream(ReadType.CACHE); Assert.assertTrue(is instanceof RemoteBlockInStream); int t = k / 3; Assert.assertEquals(t, is.skip(t)); Assert.assertEquals(t, is.read()); Assert.assertEquals(t, is.skip(t)); Assert.assertEquals(2 * t + 1, is.read()); is.close(); Assert.assertFalse(file.isInMemory()); } } }
/** Test <code>long skip(long len)</code>. */ @Test public void skipTest() throws IOException { for (int k = 10; k <= 200; k += 33) { for (WriteType op : WriteType.values()) { int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = (k < 100 ? file.getInStream(ReadType.CACHE) : file.getInStream(ReadType.NO_CACHE)); Assert.assertTrue(is instanceof BlockInStream); Assert.assertEquals(k / 2, is.skip(k / 2)); Assert.assertEquals(k / 2, is.read()); is.close(); is = (k < 100 ? file.getInStream(ReadType.CACHE) : file.getInStream(ReadType.NO_CACHE)); Assert.assertTrue(is instanceof BlockInStream); int t = k / 3; Assert.assertEquals(t, is.skip(t)); Assert.assertEquals(t, is.read()); Assert.assertEquals(t, is.skip(t)); Assert.assertEquals((byte) (2 * t + 1), is.read()); is.close(); } } }
public ReaderImpl(FileSystem fs, Path path, Configuration conf) throws IOException { this.fileSystem = fs; this.path = path; this.conf = conf; FSDataInputStream file = fs.open(path); long size = fs.getFileStatus(path).getLen(); int readSize = (int) Math.min(size, DIRECTORY_SIZE_GUESS); ByteBuffer buffer = ByteBuffer.allocate(readSize); InStream.read( file, size - readSize, buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); int psLen = buffer.get(readSize - 1); int psOffset = readSize - 1 - psLen; CodedInputStream in = CodedInputStream.newInstance(buffer.array(), buffer.arrayOffset() + psOffset, psLen); OrcProto.PostScript ps = OrcProto.PostScript.parseFrom(in); int footerSize = (int) ps.getFooterLength(); bufferSize = (int) ps.getCompressionBlockSize(); switch (ps.getCompression()) { case NONE: compressionKind = CompressionKind.NONE; break; case ZLIB: compressionKind = CompressionKind.ZLIB; break; case SNAPPY: compressionKind = CompressionKind.SNAPPY; break; case LZO: compressionKind = CompressionKind.LZO; break; default: throw new IllegalArgumentException("Unknown compression"); } codec = WriterImpl.createCodec(compressionKind); InputStream instream = InStream.create( "footer", file, size - 1 - psLen - footerSize, footerSize, codec, bufferSize); footer = OrcProto.Footer.parseFrom(instream); inspector = new OrcLazyRowObjectInspector(0, footer.getTypesList()); file.close(); }
/** Test <code>void read()</code>. */ @Test public void readTest1() throws IOException { for (int k = 100; k <= 200; k += 33) { for (WriteType op : WriteType.values()) { int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = (k < 150 ? file.getInStream(ReadType.CACHE) : file.getInStream(ReadType.NO_CACHE)); Assert.assertTrue(is instanceof BlockInStream); byte[] ret = new byte[k]; int value = is.read(); int cnt = 0; while (value != -1) { ret[cnt++] = (byte) value; value = is.read(); } Assert.assertTrue(TestUtils.equalIncreasingByteArray(k, ret)); is.close(); is = (k < 150 ? file.getInStream(ReadType.CACHE) : file.getInStream(ReadType.NO_CACHE)); Assert.assertTrue(is instanceof BlockInStream); ret = new byte[k]; value = is.read(); cnt = 0; while (value != -1) { ret[cnt++] = (byte) value; value = is.read(); } Assert.assertTrue(TestUtils.equalIncreasingByteArray(k, ret)); is.close(); } } }
@Test public void testCompressed() throws Exception { OutputCollector collect = new OutputCollector(); CompressionCodec codec = new ZlibCodec(); OutStream out = new OutStream("test", 300, codec, collect); PositionCollector[] positions = new PositionCollector[1024]; for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); out.getPosition(positions[i]); out.write(i); } out.flush(); assertEquals("test", out.toString()); assertEquals(961, collect.buffer.size()); ByteBuffer inBuf = ByteBuffer.allocate(collect.buffer.size()); collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size()); inBuf.flip(); InStream in = InStream.create( "test", new ByteBuffer[] {inBuf}, new long[] {0}, inBuf.remaining(), codec, 300); assertEquals( "compressed stream test position: 0 length: 961 range: 0" + " offset: 0 limit: 0 range 0 = 0 to 961", in.toString()); for (int i = 0; i < 1024; ++i) { int x = in.read(); assertEquals(i & 0xff, x); } assertEquals(0, in.available()); for (int i = 1023; i >= 0; --i) { in.seek(positions[i]); assertEquals(i & 0xff, in.read()); } }
@Test public void testCorruptStream() throws Exception { OutputCollector collect = new OutputCollector(); CompressionCodec codec = new ZlibCodec(); OutStream out = new OutStream("test", 500, codec, collect); PositionCollector[] positions = new PositionCollector[1024]; for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); out.getPosition(positions[i]); out.write(i); } out.flush(); // now try to read the stream with a buffer that is too small ByteBuffer inBuf = ByteBuffer.allocate(collect.buffer.size()); collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size()); inBuf.flip(); InStream in = InStream.create( "test", new ByteBuffer[] {inBuf}, new long[] {0}, inBuf.remaining(), codec, 100); byte[] contents = new byte[1024]; try { in.read(contents); fail(); } catch (IllegalArgumentException iae) { // EXPECTED } // make a corrupted header inBuf.clear(); inBuf.put((byte) 32); inBuf.put((byte) 0); inBuf.flip(); in = InStream.create( "test2", new ByteBuffer[] {inBuf}, new long[] {0}, inBuf.remaining(), codec, 300); try { in.read(); fail(); } catch (IllegalStateException ise) { // EXPECTED } }
/** Test <code>void read(byte[] b, int off, int len)</code>. Read from remote data server. */ @Test public void readTest6() throws IOException { for (int k = MIN_LEN + DELTA; k <= MAX_LEN; k += DELTA) { WriteType op = WriteType.MUST_CACHE; int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = new RemoteBlockInStream(file, ReadType.NO_CACHE, 0); Assert.assertTrue(is instanceof RemoteBlockInStream); byte[] ret = new byte[k / 2]; int start = 0; while (start < k / 2) { int read = is.read(ret, 0, (k / 2) - start); Assert.assertTrue(TestUtils.equalIncreasingByteArray(start, read, ret)); start += read; } is.close(); Assert.assertTrue(file.isInMemory()); } }
/** Test <code>void read(byte b[])</code>. Read from underfs. */ @Test public void readTest7() throws IOException { for (int k = MIN_LEN + DELTA; k <= MAX_LEN; k += DELTA) { WriteType op = WriteType.THROUGH; int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = file.getInStream(ReadType.NO_CACHE); if (k == 0) { Assert.assertTrue(is instanceof EmptyBlockInStream); } else { Assert.assertTrue(is instanceof RemoteBlockInStream); } byte[] ret = new byte[k]; Assert.assertEquals(k, is.read(ret)); Assert.assertTrue(TestUtils.equalIncreasingByteArray(k, ret)); Assert.assertEquals(-1, is.read(ret)); is.close(); Assert.assertFalse(file.isInMemory()); } }
@Test public void testUncompressed() throws Exception { OutputCollector collect = new OutputCollector(); OutStream out = new OutStream("test", 100, null, collect); PositionCollector[] positions = new PositionCollector[1024]; for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); out.getPosition(positions[i]); out.write(i); } out.flush(); assertEquals(1024, collect.buffer.size()); for (int i = 0; i < 1024; ++i) { assertEquals((byte) i, collect.buffer.get(i)); } ByteBuffer inBuf = ByteBuffer.allocate(collect.buffer.size()); collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size()); inBuf.flip(); InStream in = InStream.create( "test", new ByteBuffer[] {inBuf}, new long[] {0}, inBuf.remaining(), null, 100); assertEquals( "uncompressed stream test position: 0 length: 1024" + " range: 0 offset: 0 limit: 0", in.toString()); for (int i = 0; i < 1024; ++i) { int x = in.read(); assertEquals(i & 0xff, x); } for (int i = 1023; i >= 0; --i) { in.seek(positions[i]); assertEquals(i & 0xff, in.read()); } }
/** * Test <code>void seek(long pos)</code>. * * @throws IOException */ @Test public void seekExceptionTest() throws IOException { for (int k = MIN_LEN; k <= MAX_LEN; k += DELTA) { WriteType op = WriteType.THROUGH; int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = file.getInStream(ReadType.NO_CACHE); if (k == 0) { Assert.assertTrue(is instanceof EmptyBlockInStream); } else { Assert.assertTrue(is instanceof RemoteBlockInStream); } try { is.seek(-1); } catch (IOException e) { // This is expected continue; } is.close(); throw new IOException("Except seek IOException"); } }
/** Test <code>void read(byte[] b, int off, int len)</code>. */ @Test public void readTest3() throws IOException { for (int k = 100; k <= 300; k += 33) { for (WriteType op : WriteType.values()) { int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = (k < 200 ? file.getInStream(ReadType.CACHE) : file.getInStream(ReadType.NO_CACHE)); Assert.assertTrue(is instanceof BlockInStream); byte[] ret = new byte[k / 2]; Assert.assertEquals(k / 2, is.read(ret, 0, k / 2)); Assert.assertTrue(TestUtils.equalIncreasingByteArray(k / 2, ret)); is.close(); is = (k < 200 ? file.getInStream(ReadType.CACHE) : file.getInStream(ReadType.NO_CACHE)); Assert.assertTrue(is instanceof BlockInStream); ret = new byte[k]; Assert.assertEquals(k, is.read(ret, 0, k)); Assert.assertTrue(TestUtils.equalIncreasingByteArray(k, ret)); is.close(); } } }
public void seek(PositionProvider index) throws IOException { input.seek(index); int consumed = (int) index.getNext(); if (consumed != 0) { // a loop is required for cases where we break the run into two parts while (consumed > 0) { readValues(false); used = consumed; consumed -= numLiterals; } } else { used = 0; numLiterals = 0; } }
private void readValues(boolean ignoreEof) throws IOException { int control = input.read(); if (control == -1) { if (!ignoreEof) { throw new EOFException("Read past end of RLE integer from " + input); } used = numLiterals = 0; return; } else if (control < 0x80) { numLiterals = control + RunLengthIntegerWriter.MIN_REPEAT_SIZE; used = 0; repeat = true; delta = input.read(); if (delta == -1) { throw new EOFException("End of stream in RLE Integer from " + input); } // convert from 0 to 255 to -128 to 127 by converting to a signed byte delta = (byte) (0 + delta); if (signed) { literals[0] = utils.readVslong(input); } else { literals[0] = utils.readVulong(input); } } else { repeat = false; numLiterals = 0x100 - control; used = 0; for (int i = 0; i < numLiterals; ++i) { if (signed) { literals[i] = utils.readVslong(input); } else { literals[i] = utils.readVulong(input); } } } }
/** * Test <code>void seek(long pos)</code>. * * @throws IOException */ @Test public void seekTest() throws IOException { for (int k = MIN_LEN + DELTA; k <= MAX_LEN; k += DELTA) { WriteType op = WriteType.THROUGH; int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = file.getInStream(ReadType.NO_CACHE); if (k == 0) { Assert.assertTrue(is instanceof EmptyBlockInStream); } else { Assert.assertTrue(is instanceof RemoteBlockInStream); } is.seek(k / 3); Assert.assertEquals(k / 3, is.read()); is.seek(k / 2); Assert.assertEquals(k / 2, is.read()); is.seek(k / 4); Assert.assertEquals(k / 4, is.read()); is.close(); } }
public void readRect(Rect r, CMsgHandler handler) { InStream is = reader.getInStream(); int[] buf = reader.getImageBuf(64 * 64 * 4); int bpp = handler.cp.pf().bpp; int bytesPerPixel = (bpp > 24 ? 3 : bpp / 8); boolean bigEndian = handler.cp.pf().bigEndian; int length = is.readU32(); zis.setUnderlying(is, length); Rect t = new Rect(); for (t.tl.y = r.tl.y; t.tl.y < r.br.y; t.tl.y += 64) { t.br.y = Math.min(r.br.y, t.tl.y + 64); for (t.tl.x = r.tl.x; t.tl.x < r.br.x; t.tl.x += 64) { t.br.x = Math.min(r.br.x, t.tl.x + 64); int mode = zis.readU8(); boolean rle = (mode & 128) != 0; int palSize = mode & 127; int[] palette = new int[128]; zis.readPixels(palette, palSize, bytesPerPixel, bigEndian); if (palSize == 1) { int pix = palette[0]; handler.fillRect(t, pix); continue; } if (!rle) { if (palSize == 0) { // raw zis.readPixels(buf, t.area(), bytesPerPixel, bigEndian); } else { // packed pixels int bppp = ((palSize > 16) ? 8 : ((palSize > 4) ? 4 : ((palSize > 2) ? 2 : 1))); int ptr = 0; for (int i = 0; i < t.height(); i++) { int eol = ptr + t.width(); int b = 0; int nbits = 0; while (ptr < eol) { if (nbits == 0) { b = zis.readU8(); nbits = 8; } nbits -= bppp; int index = (b >> nbits) & ((1 << bppp) - 1) & 127; buf[ptr++] = palette[index]; } } } } else { if (palSize == 0) { // plain RLE int ptr = 0; int end = ptr + t.area(); while (ptr < end) { int pix = zis.readPixel(bytesPerPixel, bigEndian); int len = 1; int b; do { b = zis.readU8(); len += b; } while (b == 255); if (!(len <= end - ptr)) throw new Exception("ZRLEDecoder: assertion (len <= end - ptr)" + " failed"); while (len-- > 0) buf[ptr++] = pix; } } else { // palette RLE int ptr = 0; int end = ptr + t.area(); while (ptr < end) { int index = zis.readU8(); int len = 1; if ((index & 128) != 0) { int b; do { b = zis.readU8(); len += b; } while (b == 255); if (!(len <= end - ptr)) throw new Exception("ZRLEDecoder: assertion " + "(len <= end - ptr) failed"); } index &= 127; int pix = palette[index]; while (len-- > 0) buf[ptr++] = pix; } } } handler.imageRect(t, buf); } } zis.reset(); }
@Test public void testUncompressedDisjointBuffers() throws Exception { OutputCollector collect = new OutputCollector(); OutStream out = new OutStream("test", 400, null, collect); PositionCollector[] positions = new PositionCollector[1024]; DataOutput stream = new DataOutputStream(out); for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); out.getPosition(positions[i]); stream.writeInt(i); } out.flush(); assertEquals("test", out.toString()); assertEquals(4096, collect.buffer.size()); ByteBuffer[] inBuf = new ByteBuffer[3]; inBuf[0] = ByteBuffer.allocate(1100); inBuf[1] = ByteBuffer.allocate(2200); inBuf[2] = ByteBuffer.allocate(1100); collect.buffer.setByteBuffer(inBuf[0], 0, 1024); collect.buffer.setByteBuffer(inBuf[1], 1024, 2048); collect.buffer.setByteBuffer(inBuf[2], 3072, 1024); for (int i = 0; i < inBuf.length; ++i) { inBuf[i].flip(); } InStream in = InStream.create("test", inBuf, new long[] {0, 1024, 3072}, 4096, null, 400); assertEquals( "uncompressed stream test position: 0 length: 4096" + " range: 0 offset: 0 limit: 0", in.toString()); DataInputStream inStream = new DataInputStream(in); for (int i = 0; i < 1024; ++i) { int x = inStream.readInt(); assertEquals(i, x); } assertEquals(0, in.available()); for (int i = 1023; i >= 0; --i) { in.seek(positions[i]); assertEquals(i, inStream.readInt()); } in = InStream.create( "test", new ByteBuffer[] {inBuf[1], inBuf[2]}, new long[] {1024, 3072}, 4096, null, 400); inStream = new DataInputStream(in); positions[256].reset(); in.seek(positions[256]); for (int i = 256; i < 1024; ++i) { assertEquals(i, inStream.readInt()); } in = InStream.create( "test", new ByteBuffer[] {inBuf[0], inBuf[2]}, new long[] {0, 3072}, 4096, null, 400); inStream = new DataInputStream(in); positions[768].reset(); for (int i = 0; i < 256; ++i) { assertEquals(i, inStream.readInt()); } in.seek(positions[768]); for (int i = 768; i < 1024; ++i) { assertEquals(i, inStream.readInt()); } }
@Override public boolean hasNext() throws IOException { return used != numLiterals || input.available() > 0; }
/** Test <code>void read()</code>. Read from underfs. */ @Test public void readTest1() throws IOException { for (int k = MIN_LEN; k <= MAX_LEN; k += DELTA) { WriteType op = WriteType.THROUGH; int fileId = TestUtils.createByteFile(mTfs, "/root/testFile_" + k + "_" + op, op, k); TachyonFile file = mTfs.getFile(fileId); InStream is = file.getInStream(ReadType.NO_CACHE); if (k == 0) { Assert.assertTrue(is instanceof EmptyBlockInStream); } else { Assert.assertTrue(is instanceof RemoteBlockInStream); } byte[] ret = new byte[k]; int value = is.read(); int cnt = 0; while (value != -1) { ret[cnt++] = (byte) value; value = is.read(); } Assert.assertTrue(TestUtils.equalIncreasingByteArray(k, ret)); is.close(); if (k == 0) { Assert.assertTrue(file.isInMemory()); } else { Assert.assertFalse(file.isInMemory()); } is = file.getInStream(ReadType.CACHE); if (k == 0) { Assert.assertTrue(is instanceof EmptyBlockInStream); } else { Assert.assertTrue(is instanceof RemoteBlockInStream); } ret = new byte[k]; value = is.read(); cnt = 0; while (value != -1) { ret[cnt++] = (byte) value; value = is.read(); } Assert.assertTrue(TestUtils.equalIncreasingByteArray(k, ret)); is.close(); Assert.assertTrue(file.isInMemory()); is = file.getInStream(ReadType.CACHE); if (k == 0) { Assert.assertTrue(is instanceof EmptyBlockInStream); } else { Assert.assertTrue(is instanceof LocalBlockInStream); } ret = new byte[k]; value = is.read(); cnt = 0; while (value != -1) { ret[cnt++] = (byte) value; value = is.read(); } Assert.assertTrue(TestUtils.equalIncreasingByteArray(k, ret)); is.close(); Assert.assertTrue(file.isInMemory()); } }
@Test public void testDisjointBuffers() throws Exception { OutputCollector collect = new OutputCollector(); CompressionCodec codec = new ZlibCodec(); OutStream out = new OutStream("test", 400, codec, collect); PositionCollector[] positions = new PositionCollector[1024]; DataOutput stream = new DataOutputStream(out); for (int i = 0; i < 1024; ++i) { positions[i] = new PositionCollector(); out.getPosition(positions[i]); stream.writeInt(i); } out.flush(); assertEquals("test", out.toString()); assertEquals(1674, collect.buffer.size()); ByteBuffer[] inBuf = new ByteBuffer[3]; inBuf[0] = ByteBuffer.allocate(500); inBuf[1] = ByteBuffer.allocate(1200); inBuf[2] = ByteBuffer.allocate(500); collect.buffer.setByteBuffer(inBuf[0], 0, 483); collect.buffer.setByteBuffer(inBuf[1], 483, 1625 - 483); collect.buffer.setByteBuffer(inBuf[2], 1625, 1674 - 1625); for (int i = 0; i < inBuf.length; ++i) { inBuf[i].flip(); } InStream in = InStream.create("test", inBuf, new long[] {0, 483, 1625}, 1674, codec, 400); assertEquals( "compressed stream test position: 0 length: 1674 range: 0" + " offset: 0 limit: 0 range 0 = 0 to 483;" + " range 1 = 483 to 1142; range 2 = 1625 to 49", in.toString()); DataInputStream inStream = new DataInputStream(in); for (int i = 0; i < 1024; ++i) { int x = inStream.readInt(); assertEquals(i, x); } assertEquals(0, in.available()); for (int i = 1023; i >= 0; --i) { in.seek(positions[i]); assertEquals(i, inStream.readInt()); } in = InStream.create( "test", new ByteBuffer[] {inBuf[1], inBuf[2]}, new long[] {483, 1625}, 1674, codec, 400); inStream = new DataInputStream(in); positions[303].reset(); in.seek(positions[303]); for (int i = 303; i < 1024; ++i) { assertEquals(i, inStream.readInt()); } in = InStream.create( "test", new ByteBuffer[] {inBuf[0], inBuf[2]}, new long[] {0, 1625}, 1674, codec, 400); inStream = new DataInputStream(in); positions[1001].reset(); for (int i = 0; i < 300; ++i) { assertEquals(i, inStream.readInt()); } in.seek(positions[1001]); for (int i = 1001; i < 1024; ++i) { assertEquals(i, inStream.readInt()); } }