/** * Maps blah file with a random offset and checks to see if read from the ByteBuffer gets the * right line number */ private static void testRead() throws Exception { StringBuilder sb = new StringBuilder(); sb.setLength(4); for (int x = 0; x < 1000; x++) { try (FileInputStream fis = new FileInputStream(blah)) { FileChannel fc = fis.getChannel(); long offset = generator.nextInt(10000); long expectedResult = offset / CHARS_PER_LINE; offset = expectedResult * CHARS_PER_LINE; MappedByteBuffer b = fc.map(MapMode.READ_ONLY, offset, 100); for (int i = 0; i < 4; i++) { byte aByte = b.get(i); sb.setCharAt(i, (char) aByte); } int result = Integer.parseInt(sb.toString()); if (result != expectedResult) { err.println("I expected " + expectedResult); err.println("I got " + result); throw new Exception("Read test failed"); } } } }
public static void main(String[] args) throws Exception { fc = new RandomAccessFile("test.dat", "rw").getChannel(); MappedByteBuffer out = fc.map(FileChannel.MapMode.READ_WRITE, 0, LENGTH); for (int i = 0; i < LENGTH; i++) out.put((byte) 'x'); new LockAndModify(out, 0, 0 + LENGTH / 3); new LockAndModify(out, LENGTH / 2, LENGTH / 2 + LENGTH / 4); }
@Override public void scrubFile(FileChannel file) throws IOException, ScrubException { if (!Machos.isMacho(file)) { return; } long size = file.size(); MappedByteBuffer map = file.map(FileChannel.MapMode.READ_WRITE, 0, size); try { Machos.setUuid(map, ZERO_UUID); } catch (Machos.MachoException e) { throw new ScrubException(e.getMessage()); } map.rewind(); Hasher hasher = Hashing.sha1().newHasher(); while (map.hasRemaining()) { hasher.putByte(map.get()); } map.rewind(); try { Machos.setUuid(map, Arrays.copyOf(hasher.hash().asBytes(), 16)); } catch (Machos.MachoException e) { throw new ScrubException(e.getMessage()); } }
/* (non-Javadoc) * @see com.ongraphdb.store.DiskSore1#start() */ public void start() throws IOException { File file = new File(dataFileName); boolean newStore = file.exists() ? false : true; RandomAccessFile dataFile = new RandomAccessFile(dataFileName, "rw"); dataFile.setLength(initialFileSize); dataChannel = dataFile.getChannel(); dataLock = dataChannel.lock(); mappedDataBuffer = dataChannel.map(MapMode.READ_WRITE, 0, dataMappedMemorySize); if (newStore) { nextPosition = NEXT_BYTES; mappedDataBuffer.putInt(nextPosition); } else { nextPosition = mappedDataBuffer.getInt(); } shutdownHookThread = new Thread() { public void run() { try { mappedDataBuffer.force(); dataLock.release(); dataChannel.close(); unmap(mappedDataBuffer); } catch (Exception e) { e.printStackTrace(); } } }; Runtime.getRuntime().addShutdownHook(shutdownHookThread); }
public static ArrayList<String> getData() throws IOException { File file = new File("new_dataset_10000.txt"); FileInputStream f = new FileInputStream(file); int SIZE = (int) file.length(); byte[] barray = new byte[SIZE]; FileChannel ch = f.getChannel(); MappedByteBuffer mb = ch.map(FileChannel.MapMode.READ_ONLY, 0L, ch.size()); int i = 0; while (mb.hasRemaining()) { barray[i] = mb.get(); i++; } ArrayList<String> valToSort = new ArrayList<String>(); String str = new String(barray); Scanner s = new Scanner(str); while (s.hasNext()) { valToSort.add(s.next()); } // Heap heap = new Heap(valToSort); // heap.HeapSort(); // for(int i1 = 1 ; i1 < valToSort.size(); i1++) // { // System.out.println(valToSort.get(i1)); // } return valToSort; }
/** * This is the entry point to the whole file version function set. It takes the filename as a * parameter and returns the version. If the version can't be returned for whatever reason, an * IOException is thrown. * * @param filename The filename to get the version of. * @return The version of the file, as an integer. * @throws IOException If the version can't be retrieved for some reason. */ public static int getVersion(String filename, boolean byteorder) throws IOException { /* psStart is the pointer to the first byte in the PE data. The byte is pointed to at 0x3c */ int peStart; /* The signature of the pe file, PE\0\0 */ int peSignature; /* The number of sections (.data, .text, .rsrc, etc) */ short numberOfSections; /* A pointer to the "optional" header (which will always be present in .exe files */ int ptrOptionalHeader; /* The file we're reading from.*/ MappedByteBuffer file = new FileInputStream(filename) .getChannel() .map(FileChannel.MapMode.READ_ONLY, 0, new File(filename).length()); /* Set the file ordering to little endian */ file.order(ByteOrder.LITTLE_ENDIAN); /* The start of the PE is pointed at by the 0x3c'th byte */ peStart = file.getInt(PE_START); /* The first 4 bytes are the signature */ peSignature = file.getInt(peStart + 0); /* Verify that it's a valid pe file. IF not, throw an exception */ if (peSignature != 0x00004550) throw new IOException("Invalid PE file!"); /* The number of sections is the short starting at the 6th byte */ numberOfSections = file.getShort(peStart + 6); /* Get a pointer to the optional header */ ptrOptionalHeader = peStart + 24; return processOptionalHeader(file, ptrOptionalHeader, numberOfSections, byteorder); }
/*Function to read file */ public byte[] getFile(String fname) throws Exception { /*FileInputStream and FileChannel for performance. */ FileInputStream in = new FileInputStream(fname); FileChannel ch = in.getChannel(); MappedByteBuffer mb = ch.map(FileChannel.MapMode.READ_ONLY, 0L, ch.size()); long l = (new File(fname)).length(); /*Currently, supported max size is 20MB*/ if (l > MAX_SIZE) { // errorMessage("File size too large. Max file size allowed // is"+(Integer.MAX_VALUE/1000)+"KB"); return null; } byte[] barray = new byte[(int) l]; int nGet; /*Read the file in to barray*/ while (mb.hasRemaining()) { nGet = Math.min(mb.remaining(), Integer.MAX_VALUE); mb.get(barray, 0, nGet); } if (in != null) in.close(); /*Return barray*/ return barray; }
private void writeWayNodes(List<Integer> waynodes, int compressionType, MappedByteBuffer buffer) { if (!waynodes.isEmpty() && waynodes.size() % 2 == 0) { Iterator<Integer> waynodeIterator = waynodes.iterator(); buffer.putInt(waynodeIterator.next()); buffer.putInt(waynodeIterator.next()); while (waynodeIterator.hasNext()) { switch (compressionType) { case 0: buffer.putInt(waynodeIterator.next().intValue()); buffer.putInt(waynodeIterator.next().intValue()); break; case 1: buffer.put(Serializer.getSignedThreeBytes(waynodeIterator.next().intValue())); buffer.put(Serializer.getSignedThreeBytes(waynodeIterator.next().intValue())); break; case 2: buffer.putShort(waynodeIterator.next().shortValue()); buffer.putShort(waynodeIterator.next().shortValue()); break; case 3: buffer.put(waynodeIterator.next().byteValue()); buffer.put(waynodeIterator.next().byteValue()); break; } } } }
// return byte offset of next IFD private IFDData readIFD(long byteOffset) throws IOException { MappedByteBuffer entries = makeReadOnlyBuffer(byteOffset, 2); int numEntries = entries.getChar(); IFDData data = new IFDData(); for (int i = 0; i < numEntries; i++) { IFDEntry entry = readDirectoryEntry(12 * i + 2 + byteOffset); if (entry.tag == MM_METADATA) { data.mdOffset = entry.value; } else if (entry.tag == STRIP_OFFSETS) { data.pixelOffset = entry.value; } else if (entry.tag == STRIP_BYTE_COUNTS) { data.bytesPerImage = entry.value; } else if (entry.tag == BITS_PER_SAMPLE) { if (entry.value <= 8) { data.bytesPerPixel = 1; } else if (entry.value <= 16) { data.bytesPerPixel = 2; } else { data.bytesPerPixel = 3; } } else if (entry.tag == MM_METADATA_LENGTH) { data.mdLength = entry.value; } } MappedByteBuffer next = makeReadOnlyBuffer(byteOffset + 2 + 12 * numEntries, 4); data.nextIFDAdress = unsignInt(next.getInt()); return data; }
private int getNextPosition(int increment) { int result = nextPosition; nextPosition = nextPosition + increment; mappedDataBuffer.position(0); mappedDataBuffer.putInt(nextPosition); return result; }
/** * 从语料库中随机读取一行字符 * * @return * @throws UnsupportedEncodingException */ String readRandomLine(EncodingSet encoding) throws UnsupportedEncodingException { String line = null; // 从语料库文件中随机取一个字节 int startIndex = NumberUtil.getRandomInt(-1, fileSize); byte b = mapBuf.get(startIndex); while (true) { try { if ((char) b == '\n') { // 拿到第一个\n和第二个\n之间的串 byte[] lineBytes = ByteBufferUtil.getBytesAbsoluteBeforeEOFChar(mapBuf, startIndex + 1, '\n'); line = new String(lineBytes, encoding.getEncode()).trim(); if (line.length() <= 1) { startIndex += lineBytes.length + 1; b = mapBuf.get(startIndex); continue; } break; } else { b = mapBuf.get(++startIndex); } } catch (IndexOutOfBoundsException e) { line = readRandomLine(encoding); break; } } return line; }
private synchronized void appendCurrentBuffer(byte[] buf, int offset, int length) throws IOException { if (!TFS.requestSpace(length)) { mCanWrite = false; String msg = "Local tachyon worker does not have enough " + "space (" + length + ") or no worker for " + FILE.FID + " " + BLOCK_ID; if (PIN) { TFS.outOfMemoryForPinFile(FILE.FID); throw new IOException(msg); } throw new IOException(msg); } MappedByteBuffer out = mLocalFileChannel.map(MapMode.READ_WRITE, mInFileBytes, length); out.put(buf, 0, length); mInFileBytes += length; }
public String getFileBinaryBase64(String fileName) throws APIException { if ((new File(fileName)).exists()) { FileInputStream stream = null; try { stream = new FileInputStream(new File(fileName)); FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); /* Instead of using default, pass in a decoder. */ byte[] b = new byte[bb.remaining()]; bb.get(b); return Base64.encodeBytes(b); } catch (Exception e) { throw new APIException(fileName + " could not have its files extracte!"); } finally { try { stream.close(); } catch (Exception e) { throw new APIException(fileName + " could not be closed!"); } } } else { throw new APIException(fileName + " doesn't exist!"); } }
/** * Mapped File way MappedByteBuffer 可以在处理大文件时,提升性能 * * @param filename * @return * @throws IOException */ public static byte[] toByteArray3(String filePath) throws IOException { FileChannel fc = null; RandomAccessFile rf = null; try { rf = new RandomAccessFile(filePath, "r"); fc = rf.getChannel(); MappedByteBuffer byteBuffer = fc.map(MapMode.READ_ONLY, 0, fc.size()).load(); // System.out.println(byteBuffer.isLoaded()); byte[] result = new byte[(int) fc.size()]; if (byteBuffer.remaining() > 0) { // System.out.println("remain"); byteBuffer.get(result, 0, byteBuffer.remaining()); } return result; } catch (IOException e) { e.printStackTrace(); throw e; } finally { try { rf.close(); fc.close(); } catch (IOException e) { e.printStackTrace(); } } }
public void loadDump(File dumpFile, File lineFile, File dumpInfoFile) throws FileNotFoundException, IOException { FileInputStream vertexStream = new FileInputStream(dumpFile); MappedByteBuffer inVertex = vertexStream.getChannel().map(FileChannel.MapMode.READ_ONLY, 0, dumpFile.length()); vertices = inVertex.asFloatBuffer(); FileInputStream vertexLineStream = new FileInputStream(lineFile); MappedByteBuffer inLine; inLine = vertexLineStream.getChannel().map(FileChannel.MapMode.READ_ONLY, 0, lineFile.length()); stripVertexCounts = inLine.asIntBuffer(); Scanner scanner = new Scanner(dumpInfoFile); String line; line = scanner.nextLine(); minX = Float.parseFloat(line); line = scanner.nextLine(); maxX = Float.parseFloat(line); line = scanner.nextLine(); minY = Float.parseFloat(line); line = scanner.nextLine(); maxY = Float.parseFloat(line); }
/** * This reads the optional header and returns the version, or throws an IOException if the version * could not be found. * * @param file The file we're reading from. * @param ptrOptionalHeader A pointer to the optional header. * @param numberOfSections The number of sections that we will need to process. * @return The version, always. * @throws IOException If the version couldn't be found. */ private static int processOptionalHeader( MappedByteBuffer file, int ptrOptionalHeader, int numberOfSections, boolean byteorder) throws IOException { /* Set to true if this is a PE+ file. Some addresses are slightly different. Untested! */ boolean plus; /* The number of RVA entries. We don't care what the entries are, so we just skip right over them. */ int numberOfRvaAndSizes; /* A pointer to the table of sections. This, along with the number of sections, is passed to the next * function. */ int ptrSectionTable; /* The version, which will eventually be returned. */ int version; /* PE+ files have the first ("magic") byte set to 0x20b */ plus = file.getShort(ptrOptionalHeader) == 0x020b; /* Get the RVA counts from the optional header */ numberOfRvaAndSizes = file.getInt(ptrOptionalHeader + (plus ? 108 : 92)); /* The optional header is 96 bytes, and each RVA is 8 bytes. Skip over them all. */ ptrSectionTable = ptrOptionalHeader + 96 + (numberOfRvaAndSizes * 8); /* Get the version from the sections */ version = processSections(file, ptrSectionTable, numberOfSections, byteorder); /* If the version wasn't found, throw an exception */ if (version == 0) throw new IOException("Couldn't find .rsrc section!"); return version; }
private Pair<Long, byte[]> queryLog(HttpExchange t, Map<String, String> paramMap) throws IOException { String fileParam = paramMap.get(HttpserverUtils.HTTPSERVER_LOGVIEW_PARAM_LOGFILE); if (StringUtils.isBlank(fileParam)) { handlFailure(t, "Bad Request, Params Error, no log file name."); return null; } String logFile = Joiner.on(File.separator).join(logDir, fileParam); FileChannel fc = null; MappedByteBuffer fout = null; long fileSize = 0; byte[] ret = "Failed to get data".getBytes(); try { fc = new RandomAccessFile(logFile, "r").getChannel(); fileSize = fc.size(); long position = fileSize - HttpserverUtils.HTTPSERVER_LOGVIEW_PAGESIZE; try { String posStr = paramMap.get(HttpserverUtils.HTTPSERVER_LOGVIEW_PARAM_POS); if (StringUtils.isBlank(posStr) == false) { long pos = Long.valueOf(posStr); position = pos; } } catch (Exception e) { LOG.warn("Invalide position "); } if (position < 0) { position = 0L; } long size = Math.min(fileSize - position, HttpserverUtils.HTTPSERVER_LOGVIEW_PAGESIZE); LOG.info("logview " + logFile + ", position=" + position + ", size=" + size); fout = fc.map(FileChannel.MapMode.READ_ONLY, position, size); ret = new byte[(int) size]; fout.get(ret); return new Pair<Long, byte[]>(fileSize, ret); } catch (FileNotFoundException e) { LOG.warn(e); handlFailure(t, "Bad Request, Failed to find " + fileParam); return null; } catch (IOException e) { LOG.warn(e); handlFailure(t, "Bad Request, Failed to open " + fileParam); return null; } finally { fout = null; if (fc != null) { IOUtils.closeQuietly(fc); } } }
protected void sendMessage(String message) { buffer.position(0); for (int i = 0; i < message.length(); ++i) { buffer.putChar(message.charAt(i)); } buffer.putChar('\0'); }
/* (non-Javadoc) * @see com.ongraphdb.store.DiskSore1#shutdown() */ public void shutdown() throws IOException { mappedDataBuffer.force(); mappedDataBuffer.clear(); dataLock.release(); dataChannel.close(); unmap(mappedDataBuffer); Runtime.getRuntime().removeShutdownHook(shutdownHookThread); }
private void writeRecordHeaderToMappedBuffer(RecordHeader header) { mappedDataBuffer.position(header.getPosition()); mappedDataBuffer.putInt(header.getDataSize()); mappedDataBuffer.put(header.getFragmented()); if (header.isFragmented()) { mappedDataBuffer.putInt(header.getNextPos()); } }
private JSONObject readJSONObject(long offset, long length) throws IOException, JSONException { MappedByteBuffer mdBuffer = makeReadOnlyBuffer(offset, length); StringBuffer sBuffer = new StringBuffer(); for (int i = 0; i < length; i++) { sBuffer.append((char) mdBuffer.get(i)); } return new JSONObject(sBuffer.toString()); }
@SuppressWarnings("deprecation") @Override protected void writeToBuffer(MappedByteBuffer buffer, Tuple e) { KeyValue kv = KeyValueUtil.ensureKeyValue(e.getValue(0)); buffer.putInt(kv.getLength() + Bytes.SIZEOF_INT); buffer.putInt(kv.getLength()); buffer.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); }
private void writeWordBlock(WordBlock block) throws IOException { MappedByteBuffer buffer = fileChannel.map(MapMode.READ_WRITE, block.position, block_size); byte[] data = block.encode(); if (data.length > block_size) { throw new RuntimeException(); } buffer.put(data); // buffer.force(); }
public static void main(String[] args) throws Exception { MappedByteBuffer out = new RandomAccessFile("test.dat", "rw") .getChannel() .map(FileChannel.MapMode.READ_WRITE, 0, length); for (int i = 0; i < length; i++) out.put((byte) 'x'); print("Finished writing"); for (int i = length / 2; i < length / 2 + 6; i++) printnb((char) out.get(i)); }
private static void corrupt(DataSegment segment) throws IOException { final Map<String, Object> localLoadSpec = segment.getLoadSpec(); final Path segmentPath = Paths.get(localLoadSpec.get("path").toString()); final MappedByteBuffer buffer = Files.map(segmentPath.toFile(), FileChannel.MapMode.READ_WRITE); while (buffer.hasRemaining()) { buffer.put((byte) 0xFF); } }
private IFDEntry readDirectoryEntry(long byteOffset) throws IOException { MappedByteBuffer ifd = makeReadOnlyBuffer(byteOffset, 12); char tag = ifd.getChar(); char type = ifd.getChar(); long count = unsignInt(ifd.getInt()); long value = unsignInt(ifd.getInt()); return (new IFDEntry(tag, type, count, value)); }
/** * @param offset 地区记录的起始偏移 * @return 地区名字符串 */ private String readArea(int offset) { mbb.position(offset); byte b = mbb.get(); if (b == REDIRECT_MODE_1 || b == REDIRECT_MODE_2) { int areaOffset = readInt3(); if (areaOffset == 0) return Message.unknown_area; else return readString(areaOffset); } else return readString(offset); }
private WordBlock readWordBlock(int position) throws IOException { MappedByteBuffer buffer = fileChannel.map(MapMode.READ_ONLY, position, block_size).load(); WordBlock block = new WordBlock(position); byte[] data = new byte[block_size]; buffer.get(data); block.setData(data); block.decode(); return block; }
/** * 从offset位置读取四个字节的ip地址放入ip数组中,读取后的ip为big-endian格式,但是 文件中是little-endian形式,将会进行转换 * * @param offset * @param ip */ private void readIP(int offset, byte[] ip) { mbb.position(offset); mbb.get(ip); byte temp = ip[0]; ip[0] = ip[3]; ip[3] = temp; temp = ip[1]; ip[1] = ip[2]; ip[2] = temp; }
protected String receiveMessage() { String message = new String(); buffer.position(0); for (char c = buffer.getChar(); c != '\0'; c = buffer.getChar()) { message += c; } return message; }