@Override public void prepare() { contentUriToTitleMap.clear(); brandUriToSeriesUrisMap.clear(); seriesUriToEpisodeUrisMap.clear(); brandUriToTypeMap.clear(); }
public void rebuildMaps() { participantMap.clear(); reactionMap.clear(); for (MetabolicReaction rxn : this) { for (MetabolicParticipant m : rxn.getReactants()) { participantMap.get(m.getMolecule().getIdentifier()).add(rxn); } for (MetabolicParticipant m : rxn.getProducts()) { participantMap.get(m.getMolecule().getIdentifier()).add(rxn); } reactionMap.put(rxn.getIdentifier(), rxn); } }
private void processInputs() { // Index. for (INPUT userOrderedInput : userOrderedInputs) { if (userOrderedInput.getProvides().isEmpty()) { nonExportingInputs.add(userOrderedInput); } for (String providedSymbolName : userOrderedInput.getProvides()) { exportingInputBySymbolName.put(providedSymbolName, userOrderedInput); } } for (INPUT userOrderedInput : userOrderedInputs) { for (String symbolName : userOrderedInput.getRequires()) { INPUT importedInput = exportingInputBySymbolName.get(symbolName); if (importedInput != null) { importedInputByImportingInput.put(userOrderedInput, importedInput); } } } // Order. // For each input, traverse in user-provided order. for (INPUT userOrderedInput : userOrderedInputs) { // Traverse the graph starting from this input and record any // newly-reached inputs. orderInput(userOrderedInput); } // Free temporary indexes. completedInputs.clear(); importedInputByImportingInput.clear(); }
public Builder setRoot(DocStructure root) { Preconditions.checkState(!build, "This builder has already been used"); this.root = root; structures.clear(); root.accept(INDEXER, this); return this; }
@After // after each test public void cleanup() { // delete the files. for (File f : dirs) delete(f); // empty the variables. dirs.clear(); expectedFiles.clear(); }
@Override public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { nodes.clear(); elements = null; config = (IModelTransferConfiguration) newInput; this.viewer = viewer; }
@Override public synchronized void close() throws Exception { factories.clear(); for (AutoCloseable reg : registrations.values()) { reg.close(); } registrations.clear(); listeners.clear(); }
@Override public void close() { super.close(); if (null != polyMultimap) { polyMultimap.clear(); } radialDataBlock = null; }
@Override public void scanFile(JavaFileScannerContext context) { JavaTree.CompilationUnitTreeImpl tree = (JavaTree.CompilationUnitTreeImpl) context.getTree(); currentPackage = PackageDeclarationTreeImpl.packageNameAsString(tree.packageDeclaration()).replace('.', '/'); currentFile = context.getFile(); currentClassKey.clear(); parent.clear(); anonymousInnerClassCounter.clear(); suppressWarningLines.clear(); scan(tree); }
private Multimap<Integer, I_C_Flatrate_Term> getC_Flatrate_Terms_IndexedByBPartnerId() { if (_bpartnerId2contract_fullyLoadedRequired && !_bpartnerId2contract_fullyLoaded) { _bpartnerId2contract.clear(); // clear all first final List<I_C_Flatrate_Term> terms = pmmContractsDAO.retrieveAllRunningContractsOnDate(date); for (final I_C_Flatrate_Term term : terms) { final int bpartnerId = term.getDropShip_BPartner_ID(); _bpartnerId2contract.put(bpartnerId, term); } _bpartnerId2contract_fullyLoaded = true; } return _bpartnerId2contract; }
private Multimap<Integer, I_C_RfQResponseLine> getActiveRfqResponseLines_IndexedByBPartnerId() { if (_bpartnerId2activeRfqResponseLines_fullyLoadedRequired && !_bpartnerId2activeRfqResponseLines_fullyLoaded) { _bpartnerId2activeRfqResponseLines.clear(); // clear all first final List<I_C_RfQResponseLine> rfqResponseLines = pmmRfQDAO.retrieveAllActiveResponseLines(getCtx()); for (final I_C_RfQResponseLine rfqResponseLine : rfqResponseLines) { final int bpartnerId = rfqResponseLine.getC_BPartner_ID(); _bpartnerId2activeRfqResponseLines.put(bpartnerId, rfqResponseLine); } _bpartnerId2activeRfqResponseLines_fullyLoaded = true; } return _bpartnerId2activeRfqResponseLines; }
public static void waitForSchemaVersionsToCoalesce( String encapsulatingOperationDescription, CQLKeyValueService kvs) { PreparedStatement peerInfoQuery = kvs.getPreparedStatement( CassandraConstants.NO_TABLE, "select peer, schema_version from system.peers;", kvs.session); peerInfoQuery.setConsistencyLevel(ConsistencyLevel.ALL); Multimap<UUID, InetAddress> peerInfo = ArrayListMultimap.create(); long start = System.currentTimeMillis(); long sleepTime = 100; do { peerInfo.clear(); for (Row row : kvs.session.execute(peerInfoQuery.bind()).all()) { peerInfo.put(row.getUUID("schema_version"), row.getInet("peer")); } if (peerInfo.keySet().size() <= 1) { // full schema agreement return; } sleepTime = Math.min(sleepTime * 2, 5000); } while (System.currentTimeMillis() < start + CassandraConstants.SECONDS_WAIT_FOR_VERSIONS * 1000); StringBuilder sb = new StringBuilder(); sb.append( String.format( "Cassandra cluster cannot come to agreement on schema versions, during operation: %s.", encapsulatingOperationDescription)); for (Entry<UUID, Collection<InetAddress>> versionToPeer : peerInfo.asMap().entrySet()) { sb.append(String.format("\nAt schema version %s:", versionToPeer.getKey())); for (InetAddress peer : versionToPeer.getValue()) { sb.append(String.format("\n\tNode: %s", peer)); } } sb.append( "\nFind the nodes above that diverge from the majority schema " + "(or have schema 'UNKNOWN', which likely means they are down/unresponsive) " + "and examine their logs to determine the issue. Fixing the underlying issue and restarting Cassandra " + "should resolve the problem. You can quick-check this with 'nodetool describecluster'."); throw new IllegalStateException(sb.toString()); }
protected void clear() { dcEndpoints.clear(); dcRacks.clear(); currentLocations.clear(); }
public void clear() { pathsMap.clear(); }
@Override public void clear() { _forward.clear(); _reverse.clear(); }
@Override public void onEquippedOrLoadedIntoWorld(ItemStack stack, EntityLivingBase player) { attributes.clear(); fillModifiers(attributes, stack); player.getAttributeMap().applyAttributeModifiers(attributes); }
@Override public void onUnequipped(ItemStack stack, EntityLivingBase player) { attributes.clear(); fillModifiers(attributes, stack); player.getAttributeMap().removeAttributeModifiers(attributes); }
@Override public void dispose() { nodes.clear(); elements = null; }