private static void copyFile(File srcFile, File targetFile) throws FileNotFoundException, IOException { InputStream in = new BufferedInputStream(new FileInputStream(srcFile)); OutputStream out = null; try { out = new FileOutputStream(targetFile); IOUtil.transfer(in, out); } finally { IOUtil.close(out); IOUtil.close(in); } }
/** * Compares the contents of two streams by reading them. * * <p>NOTE: The streams get closed in any case. * * @param contents1 the first content stream * @param contents2 the second content stream * @return true iff both stream had the same length and the same data * @throws IOException if an I/O exception occurs while reading one of the streams */ public static boolean hasSameContents(InputStream contents1, InputStream contents2) throws IOException { try { int bufSize = 10000; byte[] buffer1 = new byte[bufSize]; byte[] buffer2 = new byte[bufSize]; boolean eof1 = false; boolean eof2 = false; while (!eof1 && !eof2) { int pos1 = 0; while (pos1 != bufSize) { int count = contents1.read(buffer1, pos1, bufSize - pos1); if (count == -1) { eof1 = true; break; } pos1 += count; } int pos2 = 0; while (pos2 != bufSize) { int count = contents2.read(buffer2, pos2, bufSize - pos2); if (count == -1) { eof2 = true; break; } pos2 += count; } if (eof1 || eof2) { if (pos1 != pos2 || !firstBytesEquals(buffer1, buffer2, pos1)) { return false; } } else { if (!Arrays.equals(buffer1, buffer2)) { return false; } } } return true; } finally { IOUtil.closeSilently(contents1); IOUtil.closeSilently(contents2); } }
public static String[] runCommand(String cmd, File dir) throws IOException { Process p = Runtime.getRuntime().exec(cmd.split(" +"), new String[] {}, dir); String[] results = new String[] { IOUtil.readStringFully(p.getInputStream()), IOUtil.readStringFully(p.getErrorStream()) }; try { if (p.waitFor() != 0) throw new RuntimeException( "command failed [" + cmd + "]\n" + results[0] + "\n" + results[1]); } catch (InterruptedException ie) { throw new RuntimeException("uh oh"); } return results; }
long write(FileDescriptor fd, ByteBuffer[] bufs, int offset, int length) throws IOException { long totalWritten = 0; try { for (int i = offset; i < offset + length; i++) { int size = bufs[i].remaining(); if (size > 0) { int written = IOUtil.write(fd, bufs[i], -1, this, this); totalWritten += written; if (written < size) { break; } } } } catch (IOException x) { if (totalWritten == 0) { throw x; } } return totalWritten; }
long read(FileDescriptor fd, ByteBuffer[] bufs, int offset, int length) throws IOException { long totalRead = 0; try { for (int i = offset; i < offset + length; i++) { int size = bufs[i].remaining(); if (size > 0) { int read = IOUtil.read(fd, bufs[i], -1, this, this); if (read < 0) { break; } totalRead += read; if (read < size || fd.available() == 0) { break; } } } } catch (IOException x) { if (totalRead == 0) { throw x; } } return totalRead; }
public void packageToFile(File packageFile) throws Exception { F2< RelativePath, PackageFileSystemObject<F2<UnixFsObject, ZipArchiveOutputStream, IoEffect>>, Boolean> pathFilter = pathFilter(); fileSystem = fileSystem.prettify(); Stream<PackageFileSystemObject<F2<UnixFsObject, ZipArchiveOutputStream, IoEffect>>> items = fileSystem.toList().filter(compose(BooleanF.invert, curry(pathFilter, BASE))); ZipArchiveOutputStream zos = null; try { zos = new ZipArchiveOutputStream(packageFile); zos.setLevel(Deflater.BEST_COMPRESSION); for (PackageFileSystemObject<F2<UnixFsObject, ZipArchiveOutputStream, IoEffect>> fileSystemObject : items) { fileSystemObject.getExtension().f(fileSystemObject.getUnixFsObject(), zos).run(); } } finally { IOUtil.close(zos); } }
protected void writeCu(CachedUrl cu) throws IOException { String url = cu.getUrl(); long contentSize = cu.getContentSize(); CIProperties props = cu.getProperties(); long fetchTime = Long.parseLong(props.getProperty(CachedUrl.PROPERTY_FETCH_TIME)); InputStream contentIn = cu.getUnfilteredInputStream(); try { if (isResponse) { String hdrString = getHttpResponseString(cu); long size = contentSize + hdrString.length(); InputStream headerIn = new ReaderInputStream(new StringReader(hdrString)); InputStream concat = new SequenceInputStream(headerIn, contentIn); try { aw.write(xlateFilename(url), cu.getContentType(), getHostIp(), fetchTime, size, concat); } finally { IOUtil.safeClose(concat); } } else { aw.write( xlateFilename(url), cu.getContentType(), getHostIp(), fetchTime, cu.getContentSize(), contentIn); } } finally { AuUtil.safeRelease(cu); } }
public String getPackageVersion(DebianDependency pkg, boolean onlyInstalled) { GetPackageVersionResult packageResult = new GetPackageVersionResult(); IOUtil.executeProcess(new String[] {"dpkg", "--status", pkg.getPackageName()}, packageResult); if (packageResult.getResult() != null) { return packageResult.getResult(); } if (!onlyInstalled) { GetChangelogVersionResult versionResult = new GetChangelogVersionResult(pkg.getPackageName()); IOUtil.executeProcess( new String[] { "apt-get", "--no-act", "--verbose-versions", "install", pkg.getPackageName() }, versionResult); if (versionResult.getResult() != null) { return versionResult.getResult(); } } return null; }
public DebianDependency searchPkg(File dir, String extension) { // lookup the cache first File cacheId = new File(dir, "<ANY>" + extension); if (filesInPackages.containsKey(cacheId)) { return new DebianDependency(filesInPackages.get(cacheId)); } GetPackageContainingPatternResult packageResult = new GetPackageContainingPatternResult(extension); IOUtil.executeProcess( new String[] {"dpkg", "--search", dir.getAbsolutePath() + "/*/*"}, packageResult); if (!packageResult.getPackages().isEmpty()) { String pkg = packageResult.getPackages().iterator().next(); filesInPackages.put(cacheId, pkg); return new DebianDependency(pkg); } // Debian policy prevents the use of apt-file during a build if (offline) { System.err.println("Offline mode. Give up looking for package containing " + dir); return null; } if (!new File("/usr/bin/apt-file").exists()) { System.err.println( "/usr/bin/apt-file not found. Give up looking for package containing " + dir); return null; } IOUtil.executeProcess( new String[] {"apt-file", "search", dir.getAbsolutePath()}, packageResult); if (!packageResult.getPackages().isEmpty()) { String pkg = packageResult.getPackages().iterator().next(); filesInPackages.put(cacheId, pkg); new DebianDependency(pkg); } return null; }
private static synchronized String getJavascript() { if (jstext == null) { InputStream istr = null; try { ClassLoader loader = Thread.currentThread().getContextClassLoader(); istr = loader.getResourceAsStream(JAVASCRIPT_RESOURCE); jstext = StringUtil.fromInputStream(istr); istr.close(); } catch (Exception e) { log.error("Can't load javascript", e); } finally { IOUtil.safeClose(istr); } } return jstext; }
public List<String> listSharedJars(String library) { if (cacheOfSharedJars.get(library) != null) { return cacheOfSharedJars.get(library); } final List<String> jars = new ArrayList<String>(); if (library.indexOf("(") > 0) { library = library.substring(0, library.indexOf("(")).trim(); } System.out.println(); System.out.println("Looking for shared jars in package " + library + "..."); IOUtil.executeProcess( new String[] {"dpkg", "--listfiles", library}, new SharedJarOutputHandler(jars)); cacheOfSharedJars.put(library, jars); return jars; }
public long read(ByteBuffer[] dsts) throws IOException { if (dsts == null) throw new NullPointerException(); ensureOpen(); synchronized (lock) { long n = 0; try { begin(); if (!isOpen()) return 0; thread = NativeThread.current(); do { n = IOUtil.read(fd, dsts, nd); } while ((n == IOStatus.INTERRUPTED) && isOpen()); return IOStatus.normalize(n); } finally { thread = 0; end((n > 0) || (n == IOStatus.UNAVAILABLE)); assert IOStatus.check(n); } } }
SourceChannelImpl(SelectorProvider sp, FileDescriptor fd) { super(sp); this.fd = fd; this.fdVal = IOUtil.fdVal(fd); this.state = ST_INUSE; }
protected void implConfigureBlocking(boolean block) throws IOException { IOUtil.configureBlocking(fd, block); }
@Override public void onEnable() { try { loadConfig(); } catch (IOException e) { throw new RuntimeException("Unable to load/save config", e); } catch (JedisConnectionException e) { throw new RuntimeException("Unable to connect to your Redis server!", e); } if (pool != null) { try (Jedis tmpRsc = pool.getResource()) { // This is more portable than INFO <section> String info = tmpRsc.info(); for (String s : info.split("\r\n")) { if (s.startsWith("redis_version:")) { String version = s.split(":")[1]; if (!(usingLua = RedisUtil.canUseLua(version))) { getLogger() .warning( "Your version of Redis (" + version + ") is not at least version 2.6. RedisBungee requires a newer version of Redis."); throw new RuntimeException("Unsupported Redis version detected"); } else { LuaManager manager = new LuaManager(this); serverToPlayersScript = manager.createScript( IOUtil.readInputStreamAsString( getResourceAsStream("lua/server_to_players.lua"))); getPlayerCountScript = manager.createScript( IOUtil.readInputStreamAsString( getResourceAsStream("lua/get_player_count.lua"))); getServerPlayersScript = manager.createScript( IOUtil.readInputStreamAsString( getResourceAsStream("lua/get_server_players.lua"))); } break; } } tmpRsc.hset( "heartbeats", configuration.getServerId(), String.valueOf(System.currentTimeMillis())); long uuidCacheSize = tmpRsc.hlen("uuid-cache"); if (uuidCacheSize > 750000) { getLogger() .info( "Looks like you have a really big UUID cache! Run https://www.spigotmc.org/resources/redisbungeecleaner.8505/ as soon as possible."); } } serverIds = getCurrentServerIds(true, false); uuidTranslator = new UUIDTranslator(this); heartbeatTask = getProxy() .getScheduler() .schedule( this, new Runnable() { @Override public void run() { try (Jedis rsc = pool.getResource()) { long redisTime = getRedisTime(rsc.time()); rsc.hset( "heartbeats", configuration.getServerId(), String.valueOf(redisTime)); } catch (JedisConnectionException e) { // Redis server has disappeared! getLogger() .log( Level.SEVERE, "Unable to update heartbeat - did your Redis server go away?", e); } serverIds = getCurrentServerIds(true, false); globalPlayerCount.set(getCurrentCount()); } }, 0, 3, TimeUnit.SECONDS); dataManager = new DataManager(this); if (configuration.isRegisterBungeeCommands()) { getProxy() .getPluginManager() .registerCommand(this, new RedisBungeeCommands.GlistCommand(this)); getProxy() .getPluginManager() .registerCommand(this, new RedisBungeeCommands.FindCommand(this)); getProxy() .getPluginManager() .registerCommand(this, new RedisBungeeCommands.LastSeenCommand(this)); getProxy() .getPluginManager() .registerCommand(this, new RedisBungeeCommands.IpCommand(this)); } getProxy().getPluginManager().registerCommand(this, new RedisBungeeCommands.SendToAll(this)); getProxy().getPluginManager().registerCommand(this, new RedisBungeeCommands.ServerId(this)); getProxy().getPluginManager().registerCommand(this, new RedisBungeeCommands.ServerIds()); getProxy() .getPluginManager() .registerCommand(this, new RedisBungeeCommands.PlayerProxyCommand(this)); getProxy() .getPluginManager() .registerCommand(this, new RedisBungeeCommands.PlistCommand(this)); getProxy() .getPluginManager() .registerCommand(this, new RedisBungeeCommands.DebugCommand(this)); api = new RedisBungeeAPI(this); getProxy() .getPluginManager() .registerListener( this, new RedisBungeeListener(this, configuration.getExemptAddresses())); getProxy().getPluginManager().registerListener(this, dataManager); psl = new PubSubListener(); getProxy().getScheduler().runAsync(this, psl); integrityCheck = getProxy() .getScheduler() .schedule( this, new Runnable() { @Override public void run() { try (Jedis tmpRsc = pool.getResource()) { Set<String> players = getLocalPlayersAsUuidStrings(); Set<String> playersInRedis = tmpRsc.smembers( "proxy:" + configuration.getServerId() + ":usersOnline"); List<String> lagged = getCurrentServerIds(false, true); // Clean up lagged players. for (String s : lagged) { Set<String> laggedPlayers = tmpRsc.smembers("proxy:" + s + ":usersOnline"); tmpRsc.del("proxy:" + s + ":usersOnline"); if (!laggedPlayers.isEmpty()) { getLogger() .info( "Cleaning up lagged proxy " + s + " (" + laggedPlayers.size() + " players)..."); for (String laggedPlayer : laggedPlayers) { RedisUtil.cleanUpPlayer(laggedPlayer, tmpRsc); } } } for (Iterator<String> it = playersInRedis.iterator(); it.hasNext(); ) { String member = it.next(); if (!players.contains(member)) { // Are they simply on a different proxy? String found = null; for (String proxyId : getServerIds()) { if (proxyId.equals(configuration.getServerId())) continue; if (tmpRsc.sismember("proxy:" + proxyId + ":usersOnline", member)) { // Just clean up the set. found = proxyId; break; } } if (found == null) { RedisUtil.cleanUpPlayer(member, tmpRsc); getLogger() .warning( "Player found in set that was not found locally and globally: " + member); } else { tmpRsc.srem( "proxy:" + configuration.getServerId() + ":usersOnline", member); getLogger() .warning( "Player found in set that was not found locally, but is on another proxy: " + member); } it.remove(); } } Pipeline pipeline = tmpRsc.pipelined(); for (String player : players) { if (playersInRedis.contains(player)) continue; // Player not online according to Redis but not BungeeCord. getLogger() .warning("Player " + player + " is on the proxy but not in Redis."); ProxiedPlayer proxiedPlayer = ProxyServer.getInstance().getPlayer(UUID.fromString(player)); if (proxiedPlayer == null) continue; // We'll deal with it later. RedisUtil.createPlayer(proxiedPlayer, pipeline, true); } pipeline.sync(); } } }, 0, 1, TimeUnit.MINUTES); } getProxy().registerChannel("RedisBungee"); }
/** Explode the archive into its constituent elements */ public void explode() throws CacheException { CachedUrl cachedUrl = null; int goodEntries = 0; int badEntries = 0; int ignoredEntries = 0; int entriesBetweenSleep = 0; ArchiveReader arcReader = null; logger.info( (storeArchive ? "Storing" : "Fetching") + " WARC file: " + origUrl + " will explode"); try { if (storeArchive) { UrlCacher uc = au.makeUrlCacher(new UrlData(arcStream, arcProps, fetchUrl)); BitSet bs = new BitSet(); bs.set(UrlCacher.DONT_CLOSE_INPUT_STREAM_FLAG); uc.setFetchFlags(bs); uc.storeContent(); archiveData.resetInputStream(); arcStream = archiveData.input; } // Wrap it in an ArchiveReader logger.debug3("About to wrap stream"); arcReader = wrapStream(fetchUrl, arcStream); logger.debug3("wrapStream() returns " + (arcReader == null ? "null" : "non-null")); // Explode it if (arcReader == null) { throw new CacheException.ExploderException("no WarcReader for " + origUrl); } ArchivalUnit au = crawlFacade.getAu(); Set stemSet = new HashSet(); logger.debug("Exploding " + fetchUrl); // Iterate through the elements in the WARC file, except the first Iterator i = arcReader.iterator(); // Skip first record for (i.next(); i.hasNext(); ) { // XXX probably not necessary helper.pokeWDog(); if ((++entriesBetweenSleep % sleepAfter) == 0) { long pauseTime = CurrentConfig.getTimeIntervalParam(PARAM_RETRY_PAUSE, DEFAULT_RETRY_PAUSE); Deadline pause = Deadline.in(pauseTime); logger.debug3("Sleeping for " + StringUtil.timeIntervalToString(pauseTime)); while (!pause.expired()) { try { pause.sleep(); } catch (InterruptedException ie) { // no action } } } ArchiveRecord element = (ArchiveRecord) i.next(); // Each element is a URL to be cached in a suitable AU ArchiveRecordHeader elementHeader = element.getHeader(); String elementUrl = elementHeader.getUrl(); String elementMimeType = elementHeader.getMimetype(); long elementLength = elementHeader.getLength(); logger.debug2("WARC url " + elementUrl + " mime " + elementMimeType); if (elementUrl.startsWith("http:")) { ArchiveEntry ae = new ArchiveEntry( elementUrl, elementLength, 0, // XXX need to convert getDate string to long element, // ArchiveRecord extends InputStream this, fetchUrl); ae.setHeaderFields(makeCIProperties(elementHeader)); long bytesStored = elementLength; logger.debug3("ArchiveEntry: " + ae.getName() + " bytes " + bytesStored); try { helper.process(ae); } catch (PluginException ex) { throw new CacheException.ExploderException("helper.process() threw", ex); } if (ae.getBaseUrl() != null) { if (ae.getRestOfUrl() != null && ae.getHeaderFields() != null) { storeEntry(ae); handleAddText(ae); goodEntries++; crawlFacade.getCrawlerStatus().addContentBytesFetched(bytesStored); } else { ignoredEntries++; } } else { badEntries++; logger.debug2("Can't map " + elementUrl + " from " + archiveUrl); } } } } catch (IOException ex) { throw new CacheException.ExploderException(ex); } finally { if (arcReader != null) try { arcReader.close(); arcReader = null; } catch (IOException ex) { throw new CacheException.ExploderException(ex); } if (cachedUrl != null) { cachedUrl.release(); } IOUtil.safeClose(arcStream); } if (badEntries == 0 && goodEntries > 0) { // Make it look like a new crawl finished on each AU to which // URLs were added. for (Iterator it = touchedAus.iterator(); it.hasNext(); ) { ArchivalUnit au = (ArchivalUnit) it.next(); logger.debug3(archiveUrl + " touching " + au.toString()); AuUtil.getDaemon(au).getNodeManager(au).newContentCrawlFinished(); } } else { ArchivalUnit au = crawlFacade.getAu(); String msg = archiveUrl + ": " + badEntries + "/" + goodEntries + " bad entries"; throw new CacheException.UnretryableException(msg); } }