/** * Maps blah file with a random offset and checks to see if data written out to the file can be * read back in */ private static void testWrite() throws Exception { StringBuilder sb = new StringBuilder(); sb.setLength(4); for (int x = 0; x < 1000; x++) { try (RandomAccessFile raf = new RandomAccessFile(blah, "rw")) { FileChannel fc = raf.getChannel(); long offset = generator.nextInt(1000); MappedByteBuffer b = fc.map(MapMode.READ_WRITE, offset, 100); for (int i = 0; i < 4; i++) { b.put(i, (byte) ('0' + i)); } for (int i = 0; i < 4; i++) { byte aByte = b.get(i); sb.setCharAt(i, (char) aByte); } if (!sb.toString().equals("0123")) throw new Exception("Write test failed"); } } }
public void write(Tag tag, RandomAccessFile raf, RandomAccessFile tempRaf) throws CannotWriteException, IOException { FileChannel fc = raf.getChannel(); int oldTagSize = 0; if (tagExists(fc)) { // read the length if (!canOverwrite(raf)) throw new CannotWriteException("Overwritting of this kind of ID3v2 tag not supported yet"); fc.position(6); ByteBuffer buf = ByteBuffer.allocate(4); fc.read(buf); oldTagSize = (buf.get(0) & 0xFF) << 21; oldTagSize += (buf.get(1) & 0xFF) << 14; oldTagSize += (buf.get(2) & 0xFF) << 7; oldTagSize += buf.get(3) & 0xFF; oldTagSize += 10; // System.err.println("Old tag size: "+oldTagSize); int newTagSize = tc.getTagLength(tag); if (oldTagSize >= newTagSize) { // replace // System.err.println("Old ID32v Tag found, replacing the old // tag"); fc.position(0); fc.write(tc.convert(tag, oldTagSize - newTagSize)); // ID3v2 Tag Written return; } } // create new tag with padding // System.err.println("Creating a new ID3v2 Tag"); fc.position(oldTagSize); if (fc.size() > 15 * 1024 * 1024) { FileChannel tempFC = tempRaf.getChannel(); tempFC.position(0); tempFC.write(tc.convert(tag, Id3v2TagCreator.DEFAULT_PADDING)); tempFC.transferFrom(fc, tempFC.position(), fc.size() - oldTagSize); fc.close(); } else { ByteBuffer[] content = new ByteBuffer[2]; content[1] = ByteBuffer.allocate((int) fc.size()); fc.read(content[1]); content[1].rewind(); content[0] = tc.convert(tag, Id3v2TagCreator.DEFAULT_PADDING); fc.position(0); fc.write(content); } }
public static void main(String args[]) throws Exception { String inputFile = "samplein.txt"; String outputFile = "sampleout.txt"; RandomAccessFile inf = new RandomAccessFile(inputFile, "r"); RandomAccessFile outf = new RandomAccessFile(outputFile, "rw"); long inputLength = new File(inputFile).length(); FileChannel inc = inf.getChannel(); FileChannel outc = outf.getChannel(); MappedByteBuffer inputData = inc.map(FileChannel.MapMode.READ_ONLY, 0, inputLength); Charset latin1 = Charset.forName("ISO-8859-1"); CharsetDecoder decoder = latin1.newDecoder(); CharsetEncoder encoder = latin1.newEncoder(); CharBuffer cb = decoder.decode(inputData); // Process char data here ByteBuffer outputData = encoder.encode(cb); outc.write(outputData); inf.close(); outf.close(); }
private void merge(SingleHit[] hits, String prefix, int chrom) throws IOException { String postmp = getPositionsFname(prefix, chrom) + ".tmp"; String weightstmp = getWeightsFname(prefix, chrom) + ".tmp"; String lastmp = getLaSFname(prefix, chrom) + ".tmp"; RandomAccessFile positionsRAF = new RandomAccessFile(postmp, "rw"); RandomAccessFile weightsRAF = new RandomAccessFile(weightstmp, "rw"); RandomAccessFile lasRAF = new RandomAccessFile(lastmp, "rw"); int newsize = getPositionsBuffer().limit() + hits.length; IntBP posfile = new IntBP(positionsRAF.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, newsize * 4)); FloatBP weightfile = new FloatBP(weightsRAF.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, newsize * 4)); IntBP lasfile = new IntBP(lasRAF.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, newsize * 4)); int oldp = 0; int newp = 0; int pos = 0; IntBP oldpositions = getPositionsBuffer(); FloatBP oldweights = getWeightsBuffer(); IntBP oldlas = getLASBuffer(); while (oldp < oldpositions.limit() || newp < hits.length) { while (newp < hits.length && (oldp == oldpositions.limit() || hits[newp].pos <= oldpositions.get(oldp))) { posfile.put(pos, hits[newp].pos); weightfile.put(pos, hits[newp].weight); lasfile.put(pos, Hits.makeLAS(hits[newp].length, hits[newp].strand)); newp++; pos++; } while (oldp < oldpositions.limit() && (newp == hits.length || oldpositions.get(oldp) <= hits[newp].pos)) { posfile.put(pos, oldpositions.get(oldp)); weightfile.put(pos, oldweights.get(oldp)); lasfile.put(pos, oldlas.get(oldp)); oldp++; pos++; } // System.err.println(String.format("%d %d %d", pos, newp, oldp)); } posfile = null; weightfile = null; lasfile = null; oldpositions = null; oldweights = null; oldlas = null; positionsRAF.close(); weightsRAF.close(); lasRAF.close(); /* ideally this part with the renames would atomic... */ (new File(postmp)).renameTo(new File(getPositionsFname(prefix, chrom))); (new File(weightstmp)).renameTo(new File(getWeightsFname(prefix, chrom))); (new File(lastmp)).renameTo(new File(getLaSFname(prefix, chrom))); }
public void pdfRead() { try { String INPUTFILE = "example_2.pdf"; File file = new File(INPUTFILE); RandomAccessFile raf = new RandomAccessFile(file, "r"); FileChannel channel = raf.getChannel(); ByteBuffer buf = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); PDFFile pdffile = new PDFFile(buf); // draw the first page to an image PDFPage page = pdffile.getPage(0); // get the width and height for the doc at the default zoom Rectangle rect = new Rectangle(0, 0, (int) page.getBBox().getWidth(), (int) page.getBBox().getHeight()); // generate the image Image img = page.getImage( rect.width, rect.height, // width & height rect, // clip rect null, // null for the ImageObserver true, // fill background with white true // block until drawing is done ); frame.getContentPane().add(new JLabel(new ImageIcon(img))); frame.pack(); frame.setSize(600, 700); } catch (Exception e) { System.out.println(e); } }
private ChannelFuture sendFile(ChannelHandlerContext ctx, Channel ch, FileChunk file) throws IOException { RandomAccessFile raf; try { raf = new RandomAccessFile(file.getFile(), "r"); } catch (FileNotFoundException fnfe) { return null; } ChannelFuture writeFuture; if (ch.getPipeline().get(SslHandler.class) != null) { // Cannot use zero-copy with HTTPS. writeFuture = ch.write(new ChunkedFile(raf, file.startOffset(), file.length(), 8192)); } else { // No encryption - use zero-copy. final FileRegion region = new DefaultFileRegion(raf.getChannel(), file.startOffset(), file.length()); writeFuture = ch.write(region); writeFuture.addListener( new ChannelFutureListener() { public void operationComplete(ChannelFuture future) { region.releaseExternalResources(); } }); } return writeFuture; }
/** @param workTokDir Token directory (common for multiple nodes). */ private void cleanupResources(File workTokDir) { RandomAccessFile lockFile = null; FileLock lock = null; try { lockFile = new RandomAccessFile(new File(workTokDir, LOCK_FILE_NAME), "rw"); lock = lockFile.getChannel().lock(); if (lock != null) processTokenDirectory(workTokDir); else if (log.isDebugEnabled()) log.debug( "Token directory is being processed concurrently: " + workTokDir.getAbsolutePath()); } catch (OverlappingFileLockException ignored) { if (log.isDebugEnabled()) log.debug( "Token directory is being processed concurrently: " + workTokDir.getAbsolutePath()); } catch (FileLockInterruptionException ignored) { Thread.currentThread().interrupt(); } catch (IOException e) { U.error(log, "Failed to process directory: " + workTokDir.getAbsolutePath(), e); } finally { U.releaseQuiet(lock); U.closeQuiet(lockFile); } }
/** * Ecrit la piece donnée dans le fichier temporaire sur le disque * * @param piece : pièce à écrire * @param num : numéros de la pièce */ private synchronized void writePieceTmpFile(byte[] piece, int num) { if (num < 0 || num >= this.nbPieces()) { throw new IllegalArgumentException(); } if (piece.length > _piecesize) { throw new IllegalArgumentException(); } try { RandomAccessFile writer_tmp = new RandomAccessFile(this, "rw"); FileChannel writer = writer_tmp.getChannel(); int index_piece = ((int) this.length() - this.headerSize()) / _piecesize; if (piece.length < _piecesize) { piece = Arrays.copyOf(piece, _piecesize); } Tools.write(writer, 4 + _key.length() + 4 + 4 + 4 * num, index_piece); Tools.write(writer, this.headerSize() + _piecesize * index_piece, piece); writer.force(true); writer_tmp.close(); } catch (Exception e) { System.out.println("Unable to write tmp file piece"); e.printStackTrace(); } }
public void resort(String prefix, int chrom) throws IOException { IntBP positions = getPositionsBuffer(); FloatBP weights = getWeightsBuffer(); IntBP las = getLASBuffer(); long indices[] = new long[positions.limit()]; for (int i = 0; i < indices.length; i++) { long v = positions.get(i); v <<= 32; v |= i; indices[i] = v; } Arrays.sort(indices); String postmp = getPositionsFname(prefix, chrom) + ".tmp"; String weightstmp = getWeightsFname(prefix, chrom) + ".tmp"; String lastmp = getLaSFname(prefix, chrom) + ".tmp"; RandomAccessFile positionsRAF = new RandomAccessFile(postmp, "rw"); RandomAccessFile weightsRAF = new RandomAccessFile(weightstmp, "rw"); RandomAccessFile lasRAF = new RandomAccessFile(lastmp, "rw"); int newsize = getPositionsBuffer().limit(); IntBP posfile = new IntBP(positionsRAF.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, newsize * 4)); FloatBP weightfile = new FloatBP(weightsRAF.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, newsize * 4)); IntBP lasfile = new IntBP(lasRAF.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, newsize * 4)); for (int i = 0; i < indices.length; i++) { int index = (int) (indices[i] & 0xffffffffL); int pos = (int) (indices[i] >> 32); posfile.put(i, pos); weightfile.put(i, weights.get(index)); lasfile.put(i, las.get(index)); } posfile = null; weightfile = null; lasfile = null; positionsRAF.close(); weightsRAF.close(); lasRAF.close(); /* ideally this part with the renames would atomic... */ (new File(postmp)).renameTo(new File(getPositionsFname(prefix, chrom))); (new File(weightstmp)).renameTo(new File(getWeightsFname(prefix, chrom))); (new File(lastmp)).renameTo(new File(getLaSFname(prefix, chrom))); }
public RandomAccessFile delete(RandomAccessFile raf, RandomAccessFile tempRaf) throws IOException { FileChannel fc = raf.getChannel(); fc.position(0); if (!tagExists(fc)) return raf; fc.position(6); ByteBuffer b = ByteBuffer.allocate(4); fc.read(b); b.rewind(); int tagSize = (b.get() & 0xFF) << 21; tagSize += (b.get() & 0xFF) << 14; tagSize += (b.get() & 0xFF) << 7; tagSize += b.get() & 0xFF; FileChannel tempFC = tempRaf.getChannel(); tempFC.position(0); fc.position(tagSize + 10); // Here we will try to skip eventual trash afer the tag and before the // audio data b = ByteBuffer.allocate(4); int skip = 0; while (fc.read(b) != -1) { if ((b.get(0) & 0xFF) == 0xFF && (b.get(1) & 0xE0) == 0xE0 && (b.get(1) & 0x06) != 0 && (b.get(2) & 0xF0) != 0xF0 && (b.get(2) & 0x08) != 0x08) { fc.position(fc.position() - 4); break; } fc.position(fc.position() - 3); b.rewind(); skip++; } tempFC.transferFrom(fc, 0, fc.size() - tagSize - 10 - skip); return tempRaf; }
/** * Creates and initializes an SPV block store. Will create the given file if it's missing. This * operation will block on disk. */ public SPVBlockStore(NetworkParameters params, File file) throws BlockStoreException { checkNotNull(file); this.params = checkNotNull(params); try { this.numHeaders = DEFAULT_NUM_HEADERS; boolean exists = file.exists(); // Set up the backing file. randomAccessFile = new RandomAccessFile(file, "rw"); long fileSize = getFileSize(); if (!exists) { log.info("Creating new SPV block chain file " + file); randomAccessFile.setLength(fileSize); } else if (randomAccessFile.length() != fileSize) { throw new BlockStoreException( "File size on disk does not match expected size: " + randomAccessFile.length() + " vs " + fileSize); } FileChannel channel = randomAccessFile.getChannel(); fileLock = channel.tryLock(); if (fileLock == null) throw new BlockStoreException("Store file is already locked by another process"); // Map it into memory read/write. The kernel will take care of flushing writes to disk at the // most // efficient times, which may mean that until the map is deallocated the data on disk is // randomly // inconsistent. However the only process accessing it is us, via this mapping, so our own // view will // always be correct. Once we establish the mmap the underlying file and channel can go away. // Note that // the details of mmapping vary between platforms. buffer = channel.map(FileChannel.MapMode.READ_WRITE, 0, fileSize); // Check or initialize the header bytes to ensure we don't try to open some random file. byte[] header; if (exists) { header = new byte[4]; buffer.get(header); if (!new String(header, "US-ASCII").equals(HEADER_MAGIC)) throw new BlockStoreException("Header bytes do not equal " + HEADER_MAGIC); } else { initNewStore(params); } } catch (Exception e) { try { if (randomAccessFile != null) randomAccessFile.close(); } catch (IOException e2) { throw new BlockStoreException(e2); } throw new BlockStoreException(e); } }
public void run() { // Create a file channel for the file try { _parent.addToActive(this); file = new RandomAccessFile(Globals.ourHome + outFile, "rwd"); channel = file.getChannel(); // Create a segment downloader for each segment and run them simultaneously for (int i = 0; i < nSeg; i++) { segDownloads.add(new SegDownloader(this, peers.get(i % peers.size()) + "/" + path, i)); (new Thread(segDownloads.get(segDownloads.size() - 1))).start(); } // Wait for a download thread to either finish or fail, and update our bookkeeping try { while (!done) { // Get a new segment downloader off the stopped list. SegDownloader s = bstopped.take(); // Give a new path to every download thread that failed. if (s.status() == Dstatus.FAILED) { synchronized (s) { s.dlPath = getNewPath(); removeStopped(s); s.notify(); } } // Check if that segment finishes off the download. else if (s.status() == Dstatus.FINISHED) { if (nSeg == doneSegs) { done = true; percentDone = 100; System.out.println("Download " + Globals.ourHome + outFile + " Finished!"); // "Notify" the waiters that we're done. while (waiters > 0 && waitToken.take()) { waiters--; } } } else throw new RuntimeException("Impossible"); } } catch (InterruptedException e) { status = Dstatus.FAILED; } } catch (IOException e) { Log.info("Could not open file for download!"); status = Dstatus.FAILED; } catch (InterruptedException e) { // TODO Auto-generated catch block status = Dstatus.FAILED; } }
/** * Checks if the table of the specified database is locked. * * @param db name of database * @param ctx database context * @return result of check */ public static boolean locked(final String db, final Context ctx) { final IOFile table = MetaData.file(ctx.globalopts.dbpath(db), DATATBL); if (!table.exists()) return false; try (final RandomAccessFile file = new RandomAccessFile(table.file(), "rw")) { return file.getChannel().tryLock() == null; } catch (final ClosedChannelException ex) { return false; } catch (final OverlappingFileLockException | IOException ex) { return true; } }
public static void writeSingleHits( IntBP positions, FloatBP weights, IntBP las, String prefix, int chrom) throws IOException { String postmp = getPositionsFname(prefix, chrom) + ".tmp"; String weightstmp = getWeightsFname(prefix, chrom) + ".tmp"; String lastmp = getLaSFname(prefix, chrom) + ".tmp"; RandomAccessFile positionsRAF = new RandomAccessFile(postmp, "rw"); RandomAccessFile weightsRAF = new RandomAccessFile(weightstmp, "rw"); RandomAccessFile lasRAF = new RandomAccessFile(lastmp, "rw"); Bits.sendBytes(positions.bb, 0, positions.bb.limit(), positionsRAF.getChannel()); Bits.sendBytes(weights.bb, 0, weights.bb.limit(), weightsRAF.getChannel()); Bits.sendBytes(las.bb, 0, las.bb.limit(), lasRAF.getChannel()); positionsRAF.close(); weightsRAF.close(); lasRAF.close(); /* ideally this part with the renames would atomic... */ (new File(postmp)).renameTo(new File(getPositionsFname(prefix, chrom))); (new File(weightstmp)).renameTo(new File(getWeightsFname(prefix, chrom))); (new File(lastmp)).renameTo(new File(getLaSFname(prefix, chrom))); }
private static void testHighOffset() throws Exception { StringBuilder sb = new StringBuilder(); sb.setLength(4); for (int x = 0; x < 1000; x++) { try (RandomAccessFile raf = new RandomAccessFile(blah, "rw")) { FileChannel fc = raf.getChannel(); long offset = 66000; MappedByteBuffer b = fc.map(MapMode.READ_WRITE, offset, 100); } } }
PageSmoother(String pdf) throws IOException { File file = new File(pdf); ndone = 0; RandomAccessFile raf = new RandomAccessFile(file, "r"); FileChannel channel = raf.getChannel(); ByteBuffer buf = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); pdffile = new PDFFile(buf); page1 = new CountDownLatch(1); raf.close(); tmpdir = File.createTempFile("pagesmoother", ""); tmpdir.delete(); tmpdir.mkdir(); }
@Override public void flush(File file) throws IOException { long start = System.currentTimeMillis(); RandomAccessFile randomAccessFile = randomAccessFile(file); try { FileChannel channel = randomAccessFile.getChannel(); channel.force(true); if (LOG.isLoggable(Level.FINE)) { long stop = System.currentTimeMillis(); LOG.log(Level.FINE, "Flushed {0}: {1} msec", new Object[] {file, stop - start}); } } finally { randomAccessFile.close(); } }
public static void main(String args[]) throws IOException { if (args.length != 1) { System.err.println("Usage: java LockingExample <input file>"); System.exit(0); } FileLock sharedLock = null; FileLock exclusiveLock = null; try { RandomAccessFile raf = new RandomAccessFile(args[0], "rw"); // get the channel for the file FileChannel channel = raf.getChannel(); System.out.println("trying to acquire lock ..."); // this locks the first half of the file - exclusive exclusiveLock = channel.lock(0, raf.length() / 2, SHARED); System.out.println("lock acquired ..."); /** Now modify the data . . . */ try { // sleep for 10 seconds Thread.sleep(10000); } catch (InterruptedException ie) { } // release the lock exclusiveLock.release(); System.out.println("lock released ..."); // this locks the second half of the file - shared sharedLock = channel.lock(raf.length() / 2 + 1, raf.length(), SHARED); /** Now read the data . . . */ // release the lock sharedLock.release(); } catch (java.io.IOException ioe) { System.err.println(ioe); } finally { if (exclusiveLock != null) exclusiveLock.release(); if (sharedLock != null) sharedLock.release(); } }
private void readPalette() { RandomAccessFile rIn = null; ByteBuffer buf = null; int i; try { if (paletteFile != null) { // see if the file exists, if not, set it to the default palette. File file = new File(paletteFile); numPaletteEntries = (int) (file.length() / 4); buf = ByteBuffer.allocate(numPaletteEntries * 4); rIn = new RandomAccessFile(paletteFile, "r"); FileChannel inChannel = rIn.getChannel(); inChannel.position(0); inChannel.read(buf); // Check the byte order. buf.order(ByteOrder.LITTLE_ENDIAN); buf.rewind(); IntBuffer ib = buf.asIntBuffer(); paletteData = new int[numPaletteEntries]; ib.get(paletteData); ib = null; } } catch (Exception e) { System.err.println("Caught exception: " + e.toString()); System.err.println(e.getStackTrace()); } finally { if (rIn != null) { try { rIn.close(); } catch (Exception e) { } } } }
/** * Decode file charset. * * @param f File to process. * @return File charset. * @throws IOException in case of error. */ public static Charset decode(File f) throws IOException { SortedMap<String, Charset> charsets = Charset.availableCharsets(); String[] firstCharsets = { Charset.defaultCharset().name(), "US-ASCII", "UTF-8", "UTF-16BE", "UTF-16LE" }; Collection<Charset> orderedCharsets = U.newLinkedHashSet(charsets.size()); for (String c : firstCharsets) if (charsets.containsKey(c)) orderedCharsets.add(charsets.get(c)); orderedCharsets.addAll(charsets.values()); try (RandomAccessFile raf = new RandomAccessFile(f, "r")) { FileChannel ch = raf.getChannel(); ByteBuffer buf = ByteBuffer.allocate(4096); ch.read(buf); buf.flip(); for (Charset charset : orderedCharsets) { CharsetDecoder decoder = charset.newDecoder(); decoder.reset(); try { decoder.decode(buf); return charset; } catch (CharacterCodingException ignored) { } } } return Charset.defaultCharset(); }
/** @param args */ public static void main(String[] args) { FileChannel fc = null; RandomAccessFile raf = null; // StringBuilder sb; if (args.length != 1) { System.out.println("Usage: Ntfs filename"); System.exit(1); } /* sb = new StringBuilder(); int[] foo = {129,4,229,33}; for (int b: foo) { sb.insert(0,String.format("%02X", b)); } System.out.println(sb.toString()); System.exit(0); */ try { raf = new RandomAccessFile(args[0], "r"); fc = raf.getChannel(); Filesystem fs = new Filesystem(fc); fs.demo(); // fs.displayFs(); } catch (FileNotFoundException x) { System.out.println("FNF exp: " + x.getMessage()); } catch (IOException x) { System.out.println("IO exp: " + x.getMessage()); } finally { if (raf != null) try { raf.close(); } catch (IOException e) { e.printStackTrace(); // To change body of catch statement use File | Settings | File // Templates. } } }
@Override public void write(File file, long position, ByteBuffer buffer) throws IOException, InterruptedException { long start = System.currentTimeMillis(); RandomAccessFile randomAccessFile = randomAccessFile(file); try { FileChannel channel = randomAccessFile.getChannel(); channel.position(position); channel.write(buffer); if (Thread.interrupted()) { throw new InterruptedException(); } long stop = System.currentTimeMillis(); if (LOG.isLoggable(Level.FINE)) { LOG.log( Level.FINE, "Wrote page at {0} of {1}: {2} msec", new Object[] {position, file, stop - start}); } } finally { randomAccessFile.close(); } }
private static MappedByteBuffer openMemoryMappedFile() throws IOException { RandomAccessFile file = new RandomAccessFile("MmfIpcSpike.tmp", "rw"); return file.getChannel().map(FileChannel.MapMode.READ_WRITE, 0, FILE_SIZE); }
public static FileLock lock_file(String path) throws IOException, FileNotFoundException { RandomAccessFile file = new RandomAccessFile(path, "rw"); FileChannel fileChannel = file.getChannel(); return fileChannel.tryLock(); }
/** * Open a new or existing file. The maxFileSize parameter is only used when creating a new file. */ @SuppressWarnings("StatementWithEmptyBody") public MessageFile(File file, long firstMessageId, int maxFileSize) throws IOException { this.file = file; this.firstMessageId = firstMessageId; if (maxFileSize < 0 && !file.isFile()) { throw new IllegalArgumentException( "File does not exist, is not readable or is not a file [" + file + "]"); } raf = new RandomAccessFile(file, "rw"); channel = raf.getChannel(); fileHeader = ByteBuffer.allocateDirect(FILE_HEADER_SIZE); header = ByteBuffer.allocateDirect(1024); int size = (int) channel.size(); if (size == 0) { if (maxFileSize < FILE_HEADER_SIZE) { throw new IllegalArgumentException("Invalid max file size " + maxFileSize); } fileHeader.putShort(FILE_MAGIC); fileHeader.putShort((short) 0); fileHeader.putInt(this.maxFileSize = maxFileSize); fileHeader.putInt(length = FILE_HEADER_SIZE); for (int i = bucketPosition(0); i < FILE_HEADER_SIZE; i += 16) fileHeader.putInt(i, -1); fileHeader.position(0); channel.write(fileHeader); channel.force(false); // make sure file always has a valid header bucketIndex = -1; } else { int sz = channel.read(fileHeader); if (sz < FILE_HEADER_SIZE) throw new IOException("File header too short [" + file + "]"); fileHeader.flip(); short magic = fileHeader.getShort(); if (magic != FILE_MAGIC) { throw new IOException( "Invalid file magic 0x" + Integer.toHexString(magic & 0xFFFF) + " [" + file + "]"); } fileHeader.position(fileHeader.position() + 2); this.maxFileSize = fileHeader.getInt(); if (this.maxFileSize < FILE_HEADER_SIZE) { throw new IOException("Invalid max file size " + this.maxFileSize + " [" + file + "]"); } length = fileHeader.getInt(); if (length > size) { throw new IOException( "Checkpoint " + length + " exceeds file size " + size + " [" + file + "]"); } else if (length < size) { channel.truncate(length); // discard possibly corrupt portion } lastCheckpointLength = length; for (bucketIndex = 0; bucketIndex < MAX_BUCKETS && fileHeader.getInt(bucketPosition(bucketIndex)) != -1; bucketIndex++) ; fileHeader.position(bucketPosition(--bucketIndex)); bucketMessageId = fileHeader.getInt(); bucketTimestamp = fileHeader.getLong(); bucketCount = fileHeader.getInt(); } bytesPerBucket = (this.maxFileSize - FILE_HEADER_SIZE) / MAX_BUCKETS; }
/** * Acquires a lock on the file. Does nothing if the correct lock has already been acquired. * Otherwise, releases an existing lock. * * @param shared shared/exclusive lock * @return success flag * @throws IOException I/O exception */ private boolean lck(final boolean shared) throws IOException { if (fl != null && shared == fl.isShared()) return true; if (fl != null) fl.release(); fl = file.getChannel().tryLock(0, Long.MAX_VALUE, shared); return fl != null; }
@Override public void parseBody(Map<String, String> files) throws IOException, ResponseException { RandomAccessFile randomAccessFile = null; BufferedReader in = null; try { randomAccessFile = getTmpBucket(); long size; if (headers.containsKey("content-length")) { size = Integer.parseInt(headers.get("content-length")); } else if (splitbyte < rlen) { size = rlen - splitbyte; } else { size = 0; } // Now read all the body and write it to f byte[] buf = new byte[512]; while (rlen >= 0 && size > 0) { rlen = inputStream.read(buf, 0, (int) Math.min(size, 512)); size -= rlen; if (rlen > 0) { randomAccessFile.write(buf, 0, rlen); } } // Get the raw body as a byte [] ByteBuffer fbuf = randomAccessFile .getChannel() .map(FileChannel.MapMode.READ_ONLY, 0, randomAccessFile.length()); randomAccessFile.seek(0); // Create a BufferedReader for easily reading it as string. InputStream bin = new FileInputStream(randomAccessFile.getFD()); in = new BufferedReader(new InputStreamReader(bin)); // If the method is POST, there may be parameters // in data section, too, read it: if (Method.POST.equals(method)) { String contentType = ""; String contentTypeHeader = headers.get("content-type"); StringTokenizer st = null; if (contentTypeHeader != null) { st = new StringTokenizer(contentTypeHeader, ",; "); if (st.hasMoreTokens()) { contentType = st.nextToken(); } } if ("multipart/form-data".equalsIgnoreCase(contentType)) { // Handle multipart/form-data if (!st.hasMoreTokens()) { throw new ResponseException( Response.Status.BAD_REQUEST, "BAD REQUEST: Content type is multipart/form-data but boundary missing. Usage: GET /example/file.html"); } String boundaryStartString = "boundary="; int boundaryContentStart = contentTypeHeader.indexOf(boundaryStartString) + boundaryStartString.length(); String boundary = contentTypeHeader.substring(boundaryContentStart, contentTypeHeader.length()); if (boundary.startsWith("\"") && boundary.endsWith("\"")) { boundary = boundary.substring(1, boundary.length() - 1); } decodeMultipartData(boundary, fbuf, in, parms, files); } else { String postLine = ""; StringBuilder postLineBuffer = new StringBuilder(); char pbuf[] = new char[512]; int read = in.read(pbuf); while (read >= 0 && !postLine.endsWith("\r\n")) { postLine = String.valueOf(pbuf, 0, read); postLineBuffer.append(postLine); read = in.read(pbuf); } postLine = postLineBuffer.toString().trim(); // Handle application/x-www-form-urlencoded if ("application/x-www-form-urlencoded".equalsIgnoreCase(contentType)) { decodeParms(postLine, parms); } else if (postLine.length() != 0) { // Special case for raw POST data => create a // special files entry "postData" with raw content // data files.put("postData", postLine); } } } else if (Method.PUT.equals(method)) { files.put("content", saveTmpFile(fbuf, 0, fbuf.limit())); } } finally { safeClose(randomAccessFile); safeClose(in); } }