private void removeReceiveStream(ReceiveStreamDesc receiveStream, boolean emptyJB) { if (receiveStream.format instanceof VideoFormat) { rtpConnector.packetBuffer.disable(receiveStream.ssrc); emptyPacketBuffer(receiveStream.ssrc); } if (receiveStream.dataSink != null) { try { receiveStream.dataSink.stop(); } catch (IOException e) { logger.error("Failed to stop DataSink " + e); } receiveStream.dataSink.close(); } if (receiveStream.processor != null) { receiveStream.processor.stop(); receiveStream.processor.close(); } DataSource dataSource = receiveStream.receiveStream.getDataSource(); if (dataSource != null) { try { dataSource.stop(); } catch (IOException ioe) { logger.warn("Failed to stop DataSource"); } dataSource.disconnect(); } synchronized (receiveStreams) { receiveStreams.remove(receiveStream); } }
public static void main(String[] args) throws IOException { br = new BufferedReader(new InputStreamReader(System.in)); out = new PrintWriter(new OutputStreamWriter(System.out)); // br = new BufferedReader(new FileReader("in.txt")); // out = new PrintWriter(new FileWriter("out.txt")); TreeMap<Character, TreeSet<Integer>> tm = new TreeMap<Character, TreeSet<Integer>>(); N = readInt(); HashSet<Character> open = new HashSet<Character>(); for (int i = 0; i < N; i++) { String next = readLine(); if (next.charAt(0) == '^') { open.add(next.charAt(1)); if (tm.get(next.charAt(1)) == null) tm.put(next.charAt(1), new TreeSet<Integer>()); } else if (next.charAt(0) == '/') { open.remove(next.charAt(1)); } else { int val = Integer.parseInt(next); for (Character c : open) { tm.get(c).add(val); } } } for (Map.Entry<Character, TreeSet<Integer>> e : tm.entrySet()) { out.print(e.getKey() + " "); for (Integer i : e.getValue()) out.print(i + " "); out.println(); } out.close(); }
public void deregisterListener(int id, MessageListener m) { HashSet listenerSet = (HashSet) idTable.get(new Integer(id)); if (listenerSet == null) { throw new IllegalArgumentException("No listeners registered for message type " + id); } listenerSet.remove(m); }
/** * Removes a player from the friend list * * @param friend The player */ public static void removeFriend(final Player friend) { synchronized (monitor) { if (friends.contains(friend) == false) return; friends.remove(friend); saveFriends(); } }
public void setAttributeEnabled(AttributeKey key, boolean b) { if (forbiddenAttributes == null) { forbiddenAttributes = new HashSet<AttributeKey>(); } if (b) { forbiddenAttributes.remove(key); } else { forbiddenAttributes.add(key); } }
public boolean hashSetContainsSameOperations(HashSet set1, HashSet set2) { boolean result = false; HashSet s1 = (HashSet) set1.clone(); HashSet s2 = (HashSet) set2.clone(); // first part, are all elements of s1 also in s2? boolean one = false; Iterator it = set1.iterator(); while (it.hasNext()) { PDMOperation d = (PDMOperation) it.next(); if (s2.contains(d)) { s1.remove(d); } } if (s1.isEmpty()) { one = true; } // second part, are all elements of s21 also in s1? boolean two = false; HashSet s3 = (HashSet) set1.clone(); HashSet s4 = (HashSet) set2.clone(); Iterator it2 = set2.iterator(); while (it2.hasNext()) { PDMOperation d = (PDMOperation) it2.next(); if (s3.contains(d)) { s4.remove(d); } } if (s4.isEmpty()) { two = true; } // administrative stuff s1.clear(); s2.clear(); s3.clear(); s4.clear(); result = one && two; return result; }
public void libraryReturn(String[] arguments) { String subject, object; HashSet<String> objects; if (arguments.length != 2) { return; } subject = arguments[1]; object = arguments[0]; objects = this.subjectObjects.get(subject); objects.remove(object); this.subjectObjects.put(subject, objects); }
public void bibaRemoveCat(String[] arguments) { String object, category; HashSet<String> objectCategories; if (arguments.length != 2) { return; } object = arguments[0]; category = arguments[1]; if (!this.objectCategories.containsKey(object)) { return; } objectCategories = this.objectCategories.get(object); objectCategories.remove(category); this.objectCategories.put(object, objectCategories); System.out.println("Removed category '" + category + "' from object '" + object + "'."); }
/** Creates a patch from the two passed in files, writing the result to <code>os</code>. */ public static void createPatch(String oldPath, String newPath, OutputStream os, boolean minimal) throws IOException { JarFile2 oldJar = new JarFile2(oldPath); JarFile2 newJar = new JarFile2(newPath); try { Iterator entries; HashMap moved = new HashMap(); HashSet visited = new HashSet(); HashSet implicit = new HashSet(); HashSet moveSrc = new HashSet(); HashSet newEntries = new HashSet(); // FIRST PASS // Go through the entries in new jar and // determine which files are candidates for implicit moves // ( files that has the same filename and same content in old.jar // and new.jar ) // and for files that cannot be implicitly moved, we will either // find out whether it is moved or new (modified) entries = newJar.getJarEntries(); if (entries != null) { while (entries.hasNext()) { JarEntry newEntry = (JarEntry) entries.next(); String newname = newEntry.getName(); // Return best match of contents, will return a name match if possible String oldname = oldJar.getBestMatch(newJar, newEntry); if (oldname == null) { // New or modified entry if (_debug) { System.out.println("NEW: " + newname); } newEntries.add(newname); } else { // Content already exist - need to do a move // Should do implicit move? Yes, if names are the same, and // no move command already exist from oldJar if (oldname.equals(newname) && !moveSrc.contains(oldname)) { if (_debug) { System.out.println(newname + " added to implicit set!"); } implicit.add(newname); } else { // The 1.0.1/1.0 JarDiffPatcher cannot handle // multiple MOVE command with same src. // The work around here is if we are going to generate // a MOVE command with duplicate src, we will // instead add the target as a new file. This way // the jardiff can be applied by 1.0.1/1.0 // JarDiffPatcher also. if (!minimal && (implicit.contains(oldname) || moveSrc.contains(oldname))) { // generate non-minimal jardiff // for backward compatibility if (_debug) { System.out.println("NEW: " + newname); } newEntries.add(newname); } else { // Use newname as key, since they are unique if (_debug) { System.err.println("moved.put " + newname + " " + oldname); } moved.put(newname, oldname); moveSrc.add(oldname); } // Check if this disables an implicit 'move <oldname> <oldname>' if (implicit.contains(oldname) && minimal) { if (_debug) { System.err.println("implicit.remove " + oldname); System.err.println("moved.put " + oldname + " " + oldname); } implicit.remove(oldname); moved.put(oldname, oldname); moveSrc.add(oldname); } } } } } // if (entries != null) // SECOND PASS: <deleted files> = <oldjarnames> - <implicitmoves> - // <source of move commands> - <new or modified entries> ArrayList deleted = new ArrayList(); entries = oldJar.getJarEntries(); if (entries != null) { while (entries.hasNext()) { JarEntry oldEntry = (JarEntry) entries.next(); String oldName = oldEntry.getName(); if (!implicit.contains(oldName) && !moveSrc.contains(oldName) && !newEntries.contains(oldName)) { if (_debug) { System.err.println("deleted.add " + oldName); } deleted.add(oldName); } } } // DEBUG if (_debug) { // DEBUG: print out moved map entries = moved.keySet().iterator(); if (entries != null) { System.out.println("MOVED MAP!!!"); while (entries.hasNext()) { String newName = (String) entries.next(); String oldName = (String) moved.get(newName); System.out.println("key is " + newName + " value is " + oldName); } } // DEBUG: print out IMOVE map entries = implicit.iterator(); if (entries != null) { System.out.println("IMOVE MAP!!!"); while (entries.hasNext()) { String newName = (String) entries.next(); System.out.println("key is " + newName); } } } JarOutputStream jos = new JarOutputStream(os); // Write out all the MOVEs and REMOVEs createIndex(jos, deleted, moved); // Put in New and Modified entries entries = newEntries.iterator(); if (entries != null) { while (entries.hasNext()) { String newName = (String) entries.next(); if (_debug) { System.out.println("New File: " + newName); } writeEntry(jos, newJar.getEntryByName(newName), newJar); } } jos.finish(); jos.close(); } catch (IOException ioE) { throw ioE; } finally { try { oldJar.getJarFile().close(); } catch (IOException e1) { // ignore } try { newJar.getJarFile().close(); } catch (IOException e1) { // ignore } } // finally }
private void add(PptTopLevel ppt, ValueTuple vt) { // Add the sample to any splitters if (ppt.has_splitters()) { for (PptSplitter ppt_split : ppt.splitters) { PptConditional ppt_cond = ppt_split.choose_conditional(vt); if (ppt_cond != null) add(ppt_cond, vt); else debug.fine(": sample doesn't pick conditional"); } } // if this is a numbered exit, apply to the combined exit as well if (!(ppt instanceof PptConditional) && ppt.ppt_name.isNumberedExitPoint()) { PptTopLevel parent = all_ppts.get(ppt.ppt_name.makeExit()); if (parent != null) { parent.get_missingOutOfBounds(ppt, vt); add(parent, vt); } } // If the point has no variables, skip it if (ppt.var_infos.length == 0) return; // We should have received sample here before, or there is nothing // to check. // Yoav added: It can be that the different dtrace and inv files have different program points if (false && ppt.num_samples() <= 0) Assert.assertTrue( ppt.num_samples() > 0, "ppt " + ppt.name + " has 0 samples and " + ppt.var_infos.length + " variables"); // Loop through each slice slice_loop: for (Iterator<PptSlice> i = ppt.views_iterator(); i.hasNext(); ) { PptSlice slice = i.next(); if (debug_detail.isLoggable(Level.FINE)) debug_detail.fine( ": processing slice " + slice + "vars: " + Debug.toString(slice.var_infos, vt)); // If any variables are missing, skip this slice for (int j = 0; j < slice.var_infos.length; j++) { VarInfo v = slice.var_infos[j]; int mod = vt.getModified(v); if (v.isMissing(vt)) { if (debug_detail.isLoggable(Level.FINE)) debug_detail.fine(": : Skipping slice, " + v.name() + " missing"); continue slice_loop; } if (v.missingOutOfBounds()) { if (debug_detail.isLoggable(Level.FINE)) debug.fine(": : Skipping slice, " + v.name() + " out of bounds"); continue slice_loop; } } // Loop through each invariant for (Invariant inv : slice.invs) { if (debug_detail.isLoggable(Level.FINE)) debug_detail.fine(": : Processing invariant: " + inv); if (!inv.isActive()) { if (debug_detail.isLoggable(Level.FINE)) debug_detail.fine(": : skipped non-active " + inv); continue; } // Yoav added if (!activeInvariants.contains(inv)) { // System.out.printf ("skipping invariant %s:%s\n", inv.ppt.name(), // inv.format()); continue; } // String invRep = invariant2str(ppt, inv); testedInvariants.add(inv); InvariantStatus status = inv.add_sample(vt, 1); sample_cnt++; if (status != InvariantStatus.NO_CHANGE) { LineNumberReader lnr = FileIO.data_trace_state.reader; String line = (lnr == null) ? "?" : String.valueOf(lnr.getLineNumber()); if (!quiet) { output_stream.println( "At ppt " + ppt.name + ", Invariant '" + inv.format() + "' invalidated by sample " + Debug.toString(slice.var_infos, vt) + "at line " + line + " in file " + FileIO.data_trace_state.filename); } failedInvariants.add(inv); activeInvariants.remove(inv); error_cnt++; } } } }
private void unpackSegment(InputStream in, JarOutputStream out) throws IOException { _props.setProperty(java.util.jar.Pack200.Unpacker.PROGRESS, "0"); // Process the output directory or jar output. new PackageReader(pkg, in).read(); if (_props.getBoolean("unpack.strip.debug")) pkg.stripAttributeKind("Debug"); if (_props.getBoolean("unpack.strip.compile")) pkg.stripAttributeKind("Compile"); _props.setProperty(java.util.jar.Pack200.Unpacker.PROGRESS, "50"); pkg.ensureAllClassFiles(); // Now write out the files. HashSet classesToWrite = new HashSet(pkg.getClasses()); for (Iterator i = pkg.getFiles().iterator(); i.hasNext(); ) { Package.File file = (Package.File) i.next(); String name = file.nameString; JarEntry je = new JarEntry(Utils.getJarEntryName(name)); boolean deflate; deflate = (keepDeflateHint) ? (((file.options & Constants.FO_DEFLATE_HINT) != 0) || ((pkg.default_options & Constants.AO_DEFLATE_HINT) != 0)) : deflateHint; boolean needCRC = !deflate; // STORE mode requires CRC if (needCRC) crc.reset(); bufOut.reset(); if (file.isClassStub()) { Package.Class cls = file.getStubClass(); assert (cls != null); new ClassWriter(cls, needCRC ? crcOut : bufOut).write(); classesToWrite.remove(cls); // for an error check } else { // collect data & maybe CRC file.writeTo(needCRC ? crcOut : bufOut); } je.setMethod(deflate ? JarEntry.DEFLATED : JarEntry.STORED); if (needCRC) { if (verbose > 0) Utils.log.info("stored size=" + bufOut.size() + " and crc=" + crc.getValue()); je.setMethod(JarEntry.STORED); je.setSize(bufOut.size()); je.setCrc(crc.getValue()); } if (keepModtime) { je.setTime(file.modtime); // Convert back to milliseconds je.setTime((long) file.modtime * 1000); } else { je.setTime((long) modtime * 1000); } out.putNextEntry(je); bufOut.writeTo(out); out.closeEntry(); if (verbose > 0) Utils.log.info("Writing " + Utils.zeString((ZipEntry) je)); } assert (classesToWrite.isEmpty()); _props.setProperty(java.util.jar.Pack200.Unpacker.PROGRESS, "100"); pkg.reset(); // reset for the next segment, if any }
public PDMStateSpace calculateSimpleStateSpace( boolean root, boolean failure, boolean input, boolean colored, int numStates, int breadth) { PDMStateSpace result = new PDMStateSpace(this, colored); HashSet states = new HashSet(); int j = (operations.size() + 1); if (!input) { HashSet empty = new HashSet(); PDMState st = new PDMState(result, "state" + i, empty, empty, empty); result.addState(st); states.add(st); i++; } else { // Start with the complete set of input data elements available HashSet empty = new HashSet(); String name = new String("state" + i); HashSet ins = new HashSet(); // this hashSet contains the input // elements to the process (input // elements of PDM) HashSet execOps = new HashSet(); // Fill the hashSet with the leaf elements HashMap leafs = getLeafElements(); Object[] leafElts = leafs.values().toArray(); for (int i = 0; i < leafElts.length; i++) { PDMDataElement d = (PDMDataElement) leafElts[i]; ins.add(d); } HashSet leafOps = getLeafOperations(); Iterator it = leafOps.iterator(); while (it.hasNext()) { PDMOperation op = (PDMOperation) it.next(); execOps.add(op); } PDMState start = new PDMState(result, name, ins, execOps, empty); // start // state // of // the // statespace result.addState(start); i++; states.add(start); } while (!states.isEmpty()) { HashSet states2 = (HashSet) states.clone(); Iterator it = states2.iterator(); while (it.hasNext()) { PDMState state = (PDMState) it.next(); HashSet nextStates = calculateNextStates(state, result, root, failure, numStates, breadth); Iterator it2 = nextStates.iterator(); // Add the new states to iterator while (it2.hasNext()) { PDMState st = (PDMState) it2.next(); states.add(st); } states.remove(state); } } i = 0; j = 0; Message.add("<PDMMDPStateSpace>", Message.TEST); Message.add("<NumberOfStates = " + result.getNumberOfStates() + " >", Message.TEST); Message.add("</PDMMDPStateSpace>", Message.TEST); return result; }
public HashSet calculateExecutableOperations( HashSet dataElts, HashSet executed, HashSet failed, boolean root) { HashSet result = new HashSet(); HashSet enabledOperations = new HashSet(); if (root) { // Calculate the enabled operations (i.e. those operation of which // all input elements are in the set of available elements) Object[] ops = operations.values().toArray(); for (int i = 0; i < ops.length; i++) { PDMOperation op = (PDMOperation) ops[i]; HashMap inputs = op.getInputElements(); Object[] ins = inputs.values().toArray(); boolean enabled = true; int k = 0; while (enabled && k < ins.length) { PDMDataElement d = (PDMDataElement) ins[k]; if (!(dataElts.contains(d))) { enabled = false; } k++; } if (enabled) { enabledOperations.add(op); // System.out.println("Enabled operation: "+ op.getID()); } } } else if (!(dataElts.contains(this.getRootElement()))) { // Calculate the enabled operations (i.e. those operation of which // all input elements are in the set of available elements) Object[] ops = operations.values().toArray(); for (int i = 0; i < ops.length; i++) { PDMOperation op = (PDMOperation) ops[i]; HashMap inputs = op.getInputElements(); Object[] ins = inputs.values().toArray(); boolean enabled = true; int k = 0; while (enabled && k < ins.length) { PDMDataElement d = (PDMDataElement) ins[k]; if (!(dataElts.contains(d))) { enabled = false; } k++; } if (enabled) { enabledOperations.add(op); } } } // remove already executed operations Iterator exIt = executed.iterator(); while (exIt.hasNext()) { PDMOperation op = (PDMOperation) exIt.next(); enabledOperations.remove(op); } // remove already failed operations Iterator fIt = failed.iterator(); while (fIt.hasNext()) { PDMOperation op = (PDMOperation) fIt.next(); enabledOperations.remove(op); } result = enabledOperations; return result; }
static synchronized void pushConnectionBackToPool(ConnectionWrapper con) { boolean exist = m_usedUsedConnection.remove(con); if (exist) { m_notUsedConnection.addLast(con); } }