/** @param workTokDir Token directory (common for multiple nodes). */ private void cleanupResources(File workTokDir) { RandomAccessFile lockFile = null; FileLock lock = null; try { lockFile = new RandomAccessFile(new File(workTokDir, LOCK_FILE_NAME), "rw"); lock = lockFile.getChannel().lock(); if (lock != null) processTokenDirectory(workTokDir); else if (log.isDebugEnabled()) log.debug( "Token directory is being processed concurrently: " + workTokDir.getAbsolutePath()); } catch (OverlappingFileLockException ignored) { if (log.isDebugEnabled()) log.debug( "Token directory is being processed concurrently: " + workTokDir.getAbsolutePath()); } catch (FileLockInterruptionException ignored) { Thread.currentThread().interrupt(); } catch (IOException e) { U.error(log, "Failed to process directory: " + workTokDir.getAbsolutePath(), e); } finally { U.releaseQuiet(lock); U.closeQuiet(lockFile); } }
/** * Decode file charset. * * @param f File to process. * @return File charset. * @throws IOException in case of error. */ public static Charset decode(File f) throws IOException { SortedMap<String, Charset> charsets = Charset.availableCharsets(); String[] firstCharsets = { Charset.defaultCharset().name(), "US-ASCII", "UTF-8", "UTF-16BE", "UTF-16LE" }; Collection<Charset> orderedCharsets = U.newLinkedHashSet(charsets.size()); for (String c : firstCharsets) if (charsets.containsKey(c)) orderedCharsets.add(charsets.get(c)); orderedCharsets.addAll(charsets.values()); try (RandomAccessFile raf = new RandomAccessFile(f, "r")) { FileChannel ch = raf.getChannel(); ByteBuffer buf = ByteBuffer.allocate(4096); ch.read(buf); buf.flip(); for (Charset charset : orderedCharsets) { CharsetDecoder decoder = charset.newDecoder(); decoder.reset(); try { decoder.decode(buf); return charset; } catch (CharacterCodingException ignored) { } } } return Charset.defaultCharset(); }