private final PredicatePrecision computeNewPrecision() { // get previous precision UnmodifiableReachedSet unmodifiableReached = reached.asReachedSet(); logger.log(Level.FINEST, "Removing everything below", refinementRoot, "from ARG."); // now create new precision precisionUpdate.start(); PredicatePrecision basePrecision = findAllPredicatesFromSubgraph(refinementRoot, unmodifiableReached); logger.log(Level.ALL, "Old predicate map is", basePrecision); logger.log(Level.ALL, "New predicates are", newPredicates); PredicatePrecision newPrecision = basePrecision.addLocalPredicates(newPredicates.entries()); logger.log(Level.ALL, "Predicate map now is", newPrecision); assert basePrecision.calculateDifferenceTo(newPrecision) == 0 : "We forgot predicates during refinement!"; precisionUpdate.stop(); return newPrecision; }
private AlgorithmStatus run0(ReachedSet reached, Algorithm algorithm) throws InterruptedException, CPAException, CPAEnabledAnalysisPropertyViolationException { logger.log(Level.INFO, "Starting sub-analysis"); shutdownNotifier.shutdownIfNecessary(); AlgorithmStatus status = algorithm.run(reached); shutdownNotifier.shutdownIfNecessary(); logger.log(Level.INFO, "Finished sub-analysis"); return status; }
/** * Verify all the consistency properties related to CLangSMG * * @param pLogger Logger to log results * @param pSmg SMG to check * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ public static boolean verifyCLangSMG(LogManager pLogger, CLangSMG pSmg) { boolean toReturn = SMGConsistencyVerifier.verifySMG(pLogger, pSmg); pLogger.log(Level.FINEST, "Starting constistency check of a CLangSMG"); toReturn = toReturn && verifyCLangSMGProperty( verifyDisjunctHeapAndGlobal(pLogger, pSmg), pLogger, "Checking CLangSMG consistency: heap and global object sets are disjunt"); toReturn = toReturn && verifyCLangSMGProperty( verifyDisjunctHeapAndStack(pLogger, pSmg), pLogger, "Checking CLangSMG consistency: heap and stack objects are disjunct"); toReturn = toReturn && verifyCLangSMGProperty( verifyDisjunctGlobalAndStack(pLogger, pSmg), pLogger, "Checking CLangSMG consistency: global and stack objects are disjunct"); toReturn = toReturn && verifyCLangSMGProperty( verifyStackGlobalHeapUnion(pLogger, pSmg), pLogger, "Checking CLangSMG consistency: global, stack and heap object union contains all objects in SMG"); toReturn = toReturn && verifyCLangSMGProperty( verifyNullObjectCLangProperties(pLogger, pSmg), pLogger, "Checking CLangSMG consistency: null object invariants hold"); toReturn = toReturn && verifyCLangSMGProperty( verifyGlobalNamespace(pLogger, pSmg), pLogger, "Checking CLangSMG consistency: global namespace problem"); toReturn = toReturn && verifyCLangSMGProperty( verifyStackNamespaces(pLogger, pSmg), pLogger, "Checking CLangSMG consistency: stack namespace"); pLogger.log(Level.FINEST, "Ending consistency check of a CLangSMG"); return toReturn; }
@SuppressWarnings("unused") private void reorderCallback(Integer pre, BDDFactory.ReorderStats stats) { if (logger.wouldBeLogged(LOG_LEVEL)) { switch (pre) { case 1: logger.log(LOG_LEVEL, "Starting BDD Reordering"); break; case 0: logger.log(LOG_LEVEL, "Finished BDD Reordering:", stats); break; default: logger.log(LOG_LEVEL, stats); } } }
@SuppressWarnings("unused") private void gcCallback(Integer pre, BDDFactory.GCStats stats) { if (logger.wouldBeLogged(LOG_LEVEL)) { switch (pre) { case 1: logger.log(LOG_LEVEL, "Starting BDD Garbage Collection"); break; case 0: logger.log(LOG_LEVEL, "Finished BDD", stats); break; default: logger.log(LOG_LEVEL, stats); } } }
/** * Verifies several NULL object-related properties * * @param pLogger Logger to log the message * @param pSmg SMG to check * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ private static boolean verifyNullObjectCLangProperties(LogManager pLogger, CLangSMG pSmg) { // Verify that there is no NULL object in global scope for (SMGObject obj : pSmg.getGlobalObjects().values()) { if (!obj.notNull()) { pLogger.log( Level.SEVERE, "CLangSMG inconsistent: null object in global object set [" + obj + "]"); return false; } } // Verify there is no more than one NULL object in the heap object set SMGObject firstNull = null; for (SMGObject obj : pSmg.getHeapObjects()) { if (!obj.notNull()) { if (firstNull != null) { pLogger.log( Level.SEVERE, "CLangSMG inconsistent: second null object in heap object set [first=" + firstNull + ", second=" + obj + "]"); return false; } else { firstNull = obj; } } } // Verify there is no NULL object in the stack object set for (CLangStackFrame frame : pSmg.getStackFrames()) { for (SMGObject obj : frame.getAllObjects()) { if (!obj.notNull()) { pLogger.log( Level.SEVERE, "CLangSMG inconsistent: null object in stack object set [" + obj + "]"); return false; } } } // Verify there is at least one NULL object if (firstNull == null) { pLogger.log(Level.SEVERE, "CLangSMG inconsistent: no null object"); return false; } return true; }
/** * Starts and execute the ImmediateChecksParser for parsing those Variables/functions that should * be checked for security violation at every state. * * @param pFile the file to be parsed. */ @SuppressWarnings("resource") public ImmediateChecksParser(LogManager pLogger, Path pFile) { set = new TreeSet<>(); List<String> contents = null; try { contents = Files.readAllLines(pFile, Charset.defaultCharset()); } catch (IOException e) { pLogger.logUserException( Level.WARNING, e, "Could not read intial security mapping from file named " + pFile); return; } for (String strLine : contents) { if (strLine.trim().isEmpty()) { continue; } else if (strLine.contains(";")) { int sem = strLine.indexOf(";"); Variable var = new Variable(strLine.substring(0, sem)); if (!set.contains(var)) { set.add(var); } } } }
/** * Get the predicates out of an interpolant. * * @param pInterpolant The interpolant formula. * @return A set of predicates. */ private final Collection<AbstractionPredicate> convertInterpolant( final BooleanFormula pInterpolant) { BooleanFormula interpolant = pInterpolant; if (bfmgr.isTrue(interpolant)) { return Collections.<AbstractionPredicate>emptySet(); } Collection<AbstractionPredicate> preds; if (atomicPredicates) { preds = predAbsMgr.getPredicatesForAtomsOf(interpolant); } else { preds = ImmutableList.of(predAbsMgr.getPredicateFor(interpolant)); } assert !preds.isEmpty() : "Interpolant without relevant predicates: " + pInterpolant + "; simplified to " + interpolant; logger.log(Level.FINEST, "Got predicates", preds); return preds; }
/** * This method exports the current representation in dot format to the given file. * * @param file file the file to write to */ public void exportToDot(PathTemplate file, int refinementCounter) { StringBuilder result = new StringBuilder().append("digraph tree {" + "\n"); for (Map.Entry<ARGState, ARGState> current : successorRelation.entries()) { if (interpolants.containsKey(current.getKey())) { StringBuilder sb = new StringBuilder(); sb.append("itp is " + interpolants.get(current.getKey())); result.append( current.getKey().getStateId() + " [label=\"" + (current.getKey().getStateId() + " / " + AbstractStates.extractLocation(current.getKey())) + " has itp " + (sb.toString()) + "\"]" + "\n"); result.append( current.getKey().getStateId() + " -> " + current.getValue().getStateId() + "\n"); // + " [label=\"" + // current.getKey().getEdgeToChild(current.getValue()).getRawStatement().replace("\n", "") + "\"]\n"); } else { result.append( current.getKey().getStateId() + " [label=\"" + current.getKey().getStateId() + " has itp NA\"]" + "\n"); result.append( current.getKey().getStateId() + " -> " + current.getValue().getStateId() + "\n"); // + " [label=\"" + // current.getKey().getEdgeToChild(current.getValue()).getRawStatement().replace("\n", "") + "\"]\n"); } if (current.getValue().isTarget()) { result.append( current.getValue().getStateId() + " [style=filled, fillcolor=\"red\"]" + "\n"); } assert (!current.getKey().isTarget()); } result.append("}"); try { MoreFiles.writeFile( file.getPath(refinementCounter, interpolationCounter), Charset.defaultCharset(), result); } catch (IOException e) { logger.logUserException(Level.WARNING, e, "Could not write interpolation tree to file"); } }
/** * Verifies that heap and global object sets are disjunct * * @param pLogger Logger to log the message * @param pSmg SMG to check * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ private static boolean verifyDisjunctHeapAndGlobal(LogManager pLogger, CLangSMG pSmg) { Map<String, SMGRegion> globals = pSmg.getGlobalObjects(); Set<SMGObject> heap = pSmg.getHeapObjects(); boolean toReturn = Collections.disjoint(globals.values(), heap); if (!toReturn) { pLogger.log(Level.SEVERE, "CLangSMG inconsistent, heap and global objects are not disjoint"); } return toReturn; }
@SuppressWarnings("unchecked") @Override public void printStatistics(PrintStream out, Result result, ReachedSet reached) { StatisticsData statistics; if (cpa.isAnalysis()) { statistics = cpa.getFactory().getGlobalAnalysis(); } else { StatisticsState lastState = (StatisticsState) reached.getLastState(); if (lastState == null) { for (AbstractState abstractState : reached.asCollection()) { if (abstractState != null) { lastState = (StatisticsState) reached.getLastState(); } } } statistics = lastState.getStatistics(); } Map<String, Object> jsonMap = new HashMap<>(); for (Entry<StatisticsProvider, StatisticsDataProvider> entry : statistics) { StatisticsProvider provider = entry.getKey(); StatisticsDataProvider data = entry.getValue(); String propName = provider.getPropertyName(); Object value = data.getPropertyValue(); String mergeInfo = ""; if (!cpa.isAnalysis()) { String mergeType = provider.getMergeType(); mergeInfo = "_" + mergeType; // Save in json with merge type Map<String, Object> innerJsonMap; if (jsonMap.containsKey(propName)) { innerJsonMap = (Map<String, Object>) jsonMap.get(propName); } else { innerJsonMap = new HashMap<>(); jsonMap.put(propName, innerJsonMap); } innerJsonMap.put(mergeType, value); } else { // Save in json without merge type jsonMap.put(propName, value); } out.println("\t" + propName + mergeInfo + ": " + value); } if (statisticsCPAFile != null) { try { JSON.writeJSONString(jsonMap, statisticsCPAFile); } catch (IOException e) { logger.logUserException(Level.WARNING, e, "Could not write statistics to file"); } } }
@Test public void parse() throws InvalidConfigurationException { List<Automaton> automata = AutomatonParser.parseAutomatonFile( automatonFile, TestDataTools.configurationForTest().build(), LogManager.createTestLogManager(), MachineModel.LINUX32, CProgramScope.empty(), Language.C); assertThat(automata).named("automata from file " + automatonFile).isNotEmpty(); }
private Algorithm getAlgorithm( ShutdownNotifier singleShutdownNotifier, Configuration singleConfig, LogManager singleLogger, ConfigurableProgramAnalysis cpa) throws InvalidConfigurationException, CPAException { singleLogger.log(Level.FINE, "Creating algorithms"); Algorithm algorithm = CPAAlgorithm.create(cpa, singleLogger, singleConfig, singleShutdownNotifier); CEGARAlgorithm cegarAlgorithm = new CEGARAlgorithm(algorithm, cpa, singleConfig, singleLogger); cegarAlgorithm.collectStatistics(stats.getSubStatistics()); return cegarAlgorithm; }
/** * Verifies that global and stack object sets are disjunct * * @param pLogger Logger to log the message * @param pSmg SMG to check * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ private static boolean verifyDisjunctGlobalAndStack(LogManager pLogger, CLangSMG pSmg) { Deque<CLangStackFrame> stack_frames = pSmg.getStackFrames(); Set<SMGObject> stack = new HashSet<>(); for (CLangStackFrame frame : stack_frames) { stack.addAll(frame.getAllObjects()); } Map<String, SMGRegion> globals = pSmg.getGlobalObjects(); boolean toReturn = Collections.disjoint(stack, globals.values()); if (!toReturn) { pLogger.log(Level.SEVERE, "CLangSMG inconsistent, global and stack objects are not disjoint"); } return toReturn; }
@Before public void setUp() throws Exception { requireArrays(); requireQuantifiers(); requireRationals(); FormulaManagerView mgrv = new FormulaManagerView(mgr, config, LogManager.createTestLogManager()); this.qfm = mgrv.getQuantifiedFormulaManager(); imgr = mgrv.getIntegerFormulaManager(); _x = imgr.makeVariable("x"); _b = amgr.makeArray("b", FormulaType.IntegerType, FormulaType.IntegerType); _b_at_x_eq_1 = imgr.equal(amgr.select(_b, _x), imgr.makeNumber(1)); _b_at_x_eq_0 = imgr.equal(amgr.select(_b, _x), imgr.makeNumber(0)); }
/** * Verify the stack name space: each record points to an appropriately labeled object * * @param pLogger Logger to log the message * @param pSmg the current smg * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ private static boolean verifyStackNamespaces(LogManager pLogger, CLangSMG pSmg) { HashSet<SMGObject> stack_objects = new HashSet<>(); for (CLangStackFrame frame : pSmg.getStackFrames()) { for (SMGObject object : frame.getAllObjects()) { if (stack_objects.contains(object)) { pLogger.log( Level.SEVERE, "CLangSMG inconsistent: object [" + object + "] present multiple times in the stack"); return false; } stack_objects.add(object); } } return true; }
private ReachedSet createInitialReachedSetForRestart( ConfigurableProgramAnalysis cpa, CFANode mainFunction, Configuration singleConfig, LogManager singleLogger) throws InvalidConfigurationException, InterruptedException { singleLogger.log(Level.FINE, "Creating initial reached set"); ReachedSetFactory reachedSetFactory = new ReachedSetFactory(singleConfig); AbstractState initialState = cpa.getInitialState(mainFunction, StateSpacePartition.getDefaultPartition()); Precision initialPrecision = cpa.getInitialPrecision(mainFunction, StateSpacePartition.getDefaultPartition()); ReachedSet reached = reachedSetFactory.create(); reached.add(initialState, initialPrecision); return reached; }
/** * Verify the global scope is consistent: each record points to an appropriately labeled object * * @param pLogger Logger to log the message * @param pSmg SMG to check * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ private static boolean verifyGlobalNamespace(LogManager pLogger, CLangSMG pSmg) { Map<String, SMGRegion> globals = pSmg.getGlobalObjects(); for (String label : pSmg.getGlobalObjects().keySet()) { String globalLabel = globals.get(label).getLabel(); if (!globalLabel.equals(label)) { pLogger.log( Level.SEVERE, "CLangSMG inconsistent: label [" + label + "] points to an object with label [" + pSmg.getGlobalObjects().get(label).getLabel() + "]"); return false; } } return true; }
public AbstractState widening(OctagonState successorOct, OctagonState reachedOct) { Pair<OctagonState, OctagonState> shrinkedStates = getShrinkedStates(successorOct, reachedOct); successorOct = shrinkedStates.getFirst(); reachedOct = shrinkedStates.getSecond(); Octagon newOctagon = reachedOct .getOctagon() .getManager() .widening(reachedOct.getOctagon(), successorOct.getOctagon()); // TODO this should not be necessary however it occurs that a widened state is bottom if (reachedOct.getOctagon().getManager().isEmpty(newOctagon)) { newOctagon = reachedOct .getOctagon() .getManager() .union(reachedOct.getOctagon(), successorOct.getOctagon()); logger.log( Level.WARNING, "bottom state occured where it should not be, using union instead of widening as a fallback"); if (reachedOct.getOctagon().getManager().isEmpty(newOctagon)) { throw new AssertionError("bottom state occured where it should not be"); } } OctagonState newState = new OctagonState( newOctagon, successorOct.getVariableToIndexMap(), successorOct.getVariableToTypeMap(), logger); if (reachedOct.isLoopHead()) { newState = newState.asLoopHead(); } if (newState.equals(successorOct)) { return successorOct; } else if (newState.equals(reachedOct)) { return reachedOct; } else { return newState; } }
/** * Verifies that heap and stack object sets are disjunct * * @param pLogger Logger to log the message * @param pSmg SMG to check * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ private static boolean verifyDisjunctHeapAndStack(LogManager pLogger, CLangSMG pSmg) { Deque<CLangStackFrame> stack_frames = pSmg.getStackFrames(); Set<SMGObject> stack = new HashSet<>(); for (CLangStackFrame frame : stack_frames) { stack.addAll(frame.getAllObjects()); } Set<SMGObject> heap = pSmg.getHeapObjects(); boolean toReturn = Collections.disjoint(stack, heap); if (!toReturn) { pLogger.log( Level.SEVERE, "CLangSMG inconsistent, heap and stack objects are not disjoint: " + Sets.intersection(stack, heap)); } return toReturn; }
/** * Verifies that heap, global and stack union is equal to the set of all objects * * @param pLogger Logger to log the message * @param pSmg SMG to check * @return True if pSmg is consistent w.r.t. this criteria. False otherwise. */ private static boolean verifyStackGlobalHeapUnion(LogManager pLogger, CLangSMG pSmg) { HashSet<SMGObject> object_union = new HashSet<>(); object_union.addAll(pSmg.getHeapObjects()); object_union.addAll(pSmg.getGlobalObjects().values()); for (CLangStackFrame frame : pSmg.getStackFrames()) { object_union.addAll(frame.getAllObjects()); } boolean toReturn = object_union.containsAll(pSmg.getObjects()) && pSmg.getObjects().containsAll(object_union); if (!toReturn) { pLogger.log( Level.SEVERE, "CLangSMG inconsistent: union of stack, heap and global object is not the same set as the set of SMG objects"); } return toReturn; }
/** * Return the current size of the cache of the BDD library. Returns -1 if value cannot be read. */ private int readCacheSize() { if (factory instanceof JFactory) { // Unfortunately JFactory does not update its reported size on cache resizes. try { Field cacheField = JFactory.class.getDeclaredField("applycache"); cacheField.setAccessible(true); Object cache = cacheField.get(factory); if (cache != null) { Field tableField = cache.getClass().getDeclaredField("table"); tableField.setAccessible(true); Object table = tableField.get(cache); if (table instanceof Object[]) { return ((Object[]) table).length; } } } catch (ReflectiveOperationException | SecurityException e) { logger.logDebugException(e, "Could not access cache field of JFactory for statistics"); } return -1; } return factory.getCacheSize(); }
public ConfigurableProgramAnalysis createCPA( final CFA cfa, @Nullable final MainCPAStatistics stats, SpecAutomatonCompositionType composeWithSpecificationCPAs) throws InvalidConfigurationException, CPAException { logger.log(Level.FINE, "Creating CPAs"); if (stats != null) { stats.cpaCreationTime.start(); } try { if (useRestartingAlgorithm) { // hard-coded dummy CPA return LocationCPA.factory().set(cfa, CFA.class).setConfiguration(config).createInstance(); } final ConfigurableProgramAnalysis cpa; switch (composeWithSpecificationCPAs) { case TARGET_SPEC: cpa = cpaFactory.buildCPAWithSpecAutomatas(cfa); break; case BACKWARD_TO_ENTRY_SPEC: cpa = cpaFactory.buildCPAWithBackwardSpecAutomatas(cfa); break; default: cpa = cpaFactory.buildCPAs(cfa, null); } if (stats != null && cpa instanceof StatisticsProvider) { ((StatisticsProvider) cpa).collectStatistics(stats.getSubStatistics()); } return cpa; } finally { if (stats != null) { stats.cpaCreationTime.stop(); } } }
/** * Records a result of a single check to a logger along with a message * * @param pResult Result of the check * @param pLogger Logger to log the message * @param pMessage Message to be logged * @return The result of the check, i.e. equivalent to pResult */ private static boolean verifyCLangSMGProperty( boolean pResult, LogManager pLogger, String pMessage) { pLogger.log(Level.FINEST, pMessage, ":", pResult); return pResult; }
/** * Returns the <code>AutomatonStates</code> that follow this State in the ControlAutomatonCPA. If * the passed <code>AutomatonExpressionArguments</code> are not sufficient to determine the * following state this method returns a <code>AutomatonUnknownState</code> that contains this as * previous State. The strengthen method of the <code>AutomatonUnknownState</code> should be used * once enough Information is available to determine the correct following State. * * <p>If the state is a NonDet-State multiple following states may be returned. If the only * following state is BOTTOM an empty set is returned. * * @throws CPATransferException */ private Collection<AutomatonState> getFollowStates( AutomatonState state, List<AbstractState> otherElements, CFAEdge edge, boolean failOnUnknownMatch) throws CPATransferException { Preconditions.checkArgument(!(state instanceof AutomatonUnknownState)); if (state == cpa.getBottomState()) { return Collections.emptySet(); } if (collectTokenInformation) { SourceLocationMapper.getKnownToEdge(edge); } if (state.getInternalState().getTransitions().isEmpty()) { // shortcut return Collections.singleton(state); } Collection<AutomatonState> lSuccessors = Sets.newHashSetWithExpectedSize(2); AutomatonExpressionArguments exprArgs = new AutomatonExpressionArguments(state, state.getVars(), otherElements, edge, logger); boolean edgeMatched = false; int failedMatches = 0; boolean nonDetState = state.getInternalState().isNonDetState(); // these transitions cannot be evaluated until last, because they might have sideeffects on // other CPAs (dont want to execute them twice) // the transitionVariables have to be cached (produced during the match operation) // the list holds a Transition and the TransitionVariables generated during its match List<Pair<AutomatonTransition, Map<Integer, String>>> transitionsToBeTaken = new ArrayList<>(2); for (AutomatonTransition t : state.getInternalState().getTransitions()) { exprArgs.clearTransitionVariables(); matchTime.start(); ResultValue<Boolean> match = t.match(exprArgs); matchTime.stop(); // System.out.println("----------------------"); // System.out.println(t.getTrigger()); // System.out.println(t.getFollowState().getName()); // System.out.println(edge.getPredecessor().getNodeNumber()); // System.out.println(edge.getCode()); // System.out.println(match.getValue()); if (match.canNotEvaluate()) { if (failOnUnknownMatch) { throw new CPATransferException( "Automaton transition condition could not be evaluated: " + match.getFailureMessage()); } // if one transition cannot be evaluated the evaluation must be postponed until enough // information is available return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state)); } else { if (match.getValue()) { edgeMatched = true; assertionsTime.start(); ResultValue<Boolean> assertionsHold = t.assertionsHold(exprArgs); assertionsTime.stop(); if (assertionsHold.canNotEvaluate()) { if (failOnUnknownMatch) { throw new CPATransferException( "Automaton transition assertions could not be evaluated: " + assertionsHold.getFailureMessage()); } // cannot yet be evaluated return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state)); } else if (assertionsHold.getValue()) { if (!t.canExecuteActionsOn(exprArgs)) { if (failOnUnknownMatch) { throw new CPATransferException("Automaton transition action could not be executed"); } // cannot yet execute, goto UnknownState return Collections.<AutomatonState>singleton(new AutomatonUnknownState(state)); } // delay execution as described above Map<Integer, String> transitionVariables = ImmutableMap.copyOf(exprArgs.getTransitionVariables()); transitionsToBeTaken.add(Pair.of(t, transitionVariables)); } else { // matching transitions, but unfulfilled assertions: goto error state AutomatonState errorState = AutomatonState.automatonStateFactory( Collections.<String, AutomatonVariable>emptyMap(), AutomatonInternalState.ERROR, cpa, 0, 0, ""); logger.log( Level.INFO, "Automaton going to ErrorState on edge \"" + edge.getDescription() + "\""); lSuccessors.add(errorState); } if (!nonDetState) { // not a nondet State, break on the first matching edge break; } } else { // do nothing if the edge did not match failedMatches++; } } } if (edgeMatched) { // execute Transitions for (Pair<AutomatonTransition, Map<Integer, String>> pair : transitionsToBeTaken) { // this transition will be taken. copy the variables AutomatonTransition t = pair.getFirst(); Map<Integer, String> transitionVariables = pair.getSecond(); actionTime.start(); Map<String, AutomatonVariable> newVars = deepCloneVars(state.getVars()); exprArgs.setAutomatonVariables(newVars); exprArgs.putTransitionVariables(transitionVariables); t.executeActions(exprArgs); actionTime.stop(); String violatedPropertyDescription = null; if (t.getFollowState().isTarget()) { violatedPropertyDescription = t.getViolatedPropertyDescription(exprArgs); } AutomatonState lSuccessor = AutomatonState.automatonStateFactory( newVars, t.getFollowState(), cpa, t.getAssumptions(), state.getMatches() + 1, state.getFailedMatches(), violatedPropertyDescription); if (!(lSuccessor instanceof AutomatonState.BOTTOM)) { lSuccessors.add(lSuccessor); } else { // add nothing } } return lSuccessors; } else { // stay in same state, no transitions to be executed here (no transition matched) AutomatonState stateNewCounters = AutomatonState.automatonStateFactory( state.getVars(), state.getInternalState(), cpa, state.getMatches(), state.getFailedMatches() + failedMatches, null); if (collectTokenInformation) { stateNewCounters.addNoMatchTokens(state.getTokensSinceLastMatch()); if (edge.getEdgeType() != CFAEdgeType.DeclarationEdge) { stateNewCounters.addNoMatchTokens( SourceLocationMapper.getAbsoluteTokensFromCFAEdge(edge, true)); } } return Collections.singleton(stateNewCounters); } }
@Override public AlgorithmStatus run(ReachedSet pReached) throws CPAException, InterruptedException { checkArgument( pReached instanceof ForwardingReachedSet, "RestartAlgorithm needs ForwardingReachedSet"); ForwardingReachedSet reached = (ForwardingReachedSet) pReached; CFANode mainFunction = AbstractStates.extractLocation(pReached.getFirstState()); assert mainFunction != null : "Location information needed"; AlgorithmStatus status = AlgorithmStatus.UNSOUND_AND_PRECISE; try { ReachedSetFactory reachedSetFactory = new ReachedSetFactory(globalConfig); // predicate analysis logger.log(Level.FINE, "Creating CPA for PredicateAnalysis"); Configuration singleConfig1 = getConfigFromFile(configFiles.get(0)); ConfigurableProgramAnalysis cpa1 = getCPA(reachedSetFactory, singleConfig1); Algorithm algorithm1 = getAlgorithm(shutdownNotifier, singleConfig1, logger, cpa1); ReachedSet reached1 = createInitialReachedSetForRestart(cpa1, mainFunction, singleConfig1, logger); reached.setDelegate(reached1); stats.noOfAlgorithmsUsed++; stats.totalTime.start(); status = run0(reached1, algorithm1); // stats.printIntermediateStatistics(System.out, Result.UNKNOWN, reached); // disabled, // because table-generator can not distinguish 1st and 2nd statistics. stats.resetSubStatistics(); // predicate bit-precise analysis logger.log(Level.FINE, "Creating CPA for PredicateAnalysis-Bitprecise"); Configuration singleConfig2 = getConfigFromFile(configFiles.get(1)); ConfigurableProgramAnalysis cpa2 = getCPA(reachedSetFactory, singleConfig2); { // this is the important step: re-use the reached-set ARGReplayCPA argReplay = CPAs.retrieveCPA(cpa2, ARGReplayCPA.class); checkNotNull(argReplay, "ARGReplay-CPA is needed for second analysis"); argReplay.setARGAndCPA(reached1, cpa1); } Algorithm algorithm2 = getAlgorithm(shutdownNotifier, singleConfig2, logger, cpa2); ReachedSet reached2 = createInitialReachedSetForRestart(cpa2, mainFunction, singleConfig2, logger); reached.setDelegate(reached2); stats.noOfAlgorithmsUsed++; stats.totalTime.start(); status = run0(reached2, algorithm2); stats.printIntermediateStatistics(System.out, Result.UNKNOWN, reached); stats.resetSubStatistics(); } catch (InvalidConfigurationException e) { logger.logUserException( Level.WARNING, e, "Exiting analysis because the configuration file is invalid"); } catch (IOException e) { logger.logUserException( Level.WARNING, e, "Exiting analysis because the configuration file could not be read"); } finally { // TODO close CPAs and algorithms } return status; }
@Override public AbstractState rebuildStateAfterFunctionCall( AbstractState pRootState, AbstractState pEntryState, AbstractState pExpandedState, FunctionExitNode exitLocation) { final PredicateAbstractState rootState = (PredicateAbstractState) pRootState; final PredicateAbstractState entryState = (PredicateAbstractState) pEntryState; final PredicateAbstractState expandedState = (PredicateAbstractState) pExpandedState; final PersistentMap<CFANode, Integer> abstractionLocations = expandedState.getAbstractionLocationsOnPath(); // TODO why did I copy the next if-statement? when is it used? if (!expandedState.isAbstractionState()) { return expandedState; } // we have: // - abstraction of rootState with ssa --> use as it is // - callEdge-pathFormula with ssa (from rootState) --> use as it is, with updated SSAMap // - abstraction of functioncall (expandedSSA) --> instantiate, with updated SSAMap, so // that: // - only param and return-var overlap to callEdge // - all other vars are distinct final String calledFunction = exitLocation.getFunctionName(); final PathFormula functionCall = entryState.getAbstractionFormula().getBlockFormula(); final SSAMap entrySsaWithRet = functionCall.getSsa(); final SSAMapBuilder entrySsaWithRetBuilder = entrySsaWithRet.builder(); final SSAMapBuilder summSsa = rootState.getAbstractionFormula().getBlockFormula().getSsa().builder(); final SSAMap expandedSSA = expandedState.getAbstractionFormula().getBlockFormula().getSsa(); for (String var : expandedSSA.allVariables()) { final CType type = expandedSSA.getType(var); if (var.startsWith(calledFunction + "::") && var.endsWith(PARAM_VARIABLE_NAME)) { int newIndex = entrySsaWithRet.getIndex(var); assert entrySsaWithRet.containsVariable(var) : "param for function is not used in functioncall"; entrySsaWithRetBuilder.setIndex(var, type, newIndex); setFreshValueBasis(summSsa, var, newIndex); } else if (exitLocation.getEntryNode().getReturnVariable().isPresent() && exitLocation.getEntryNode().getReturnVariable().get().getQualifiedName().equals(var)) { // var.startsWith(calledFunction + "::") && var.endsWith(RETURN_VARIABLE_NAME) final int newIndex = Math.max(expandedSSA.getIndex(var), entrySsaWithRetBuilder.getFreshIndex(var)); entrySsaWithRetBuilder.setIndex(var, type, newIndex); summSsa.setIndex(var, type, newIndex); } else if (!entrySsaWithRet.containsVariable(var)) { // non-existent index for variable only used in functioncall, just copy final int newIndex = expandedSSA.getIndex(var); entrySsaWithRetBuilder.setIndex(var, type, newIndex); summSsa.setIndex(var, type, newIndex); } else { final int newIndex = entrySsaWithRetBuilder.getFreshIndex(var); entrySsaWithRetBuilder.setIndex(var, type, newIndex); setFreshValueBasis(summSsa, var, newIndex); } } final SSAMap newEntrySsaWithRet = entrySsaWithRetBuilder.build(); final SSAMap newSummSsa = summSsa.build(); // function-call needs have new retvars-indices. PathFormula functionCallWithSSA = new PathFormula( functionCall.getFormula(), newEntrySsaWithRet, functionCall.getPointerTargetSet(), functionCall.getLength()); // concat function-call with function-summary, // function-summary will be instantiated with indices for params and retvars. PathFormula executedFunction = pmgr.makeAnd(functionCallWithSSA, expandedState.getAbstractionFormula().asFormula()); // after function-execution we have to re-use the previous indices (fromouter scope), // thus lets change the SSAmap. PathFormula executedFunctionWithSSA = new PathFormula( executedFunction.getFormula(), newSummSsa, executedFunction.getPointerTargetSet(), executedFunction.getLength()); // everything is prepared, so build a new AbstractionState. // we do this as 'future abstraction', because we do not have enough information // (necessary classes and managers) for the abstraction-process at this place. PredicateAbstractState rebuildState = new PredicateAbstractState.ComputeAbstractionState( executedFunctionWithSSA, rootState.getAbstractionFormula(), exitLocation, abstractionLocations); logger.log( Level.ALL, "\noldAbs: ", rootState.getAbstractionFormula().asInstantiatedFormula(), "\ncall: ", functionCallWithSSA, "\nsumm: ", expandedState.getAbstractionFormula().asFormula(), "\nexe: ", executedFunction, "\nentrySsaRet", newEntrySsaWithRet, "\nsummSsaRet", newSummSsa); return rebuildState; }
@SuppressWarnings("unused") private void resizeCallback(Integer oldSize, Integer newSize) { logger.log(LOG_LEVEL, "BDD node table resized from", oldSize, "to", newSize); }
public Algorithm createAlgorithm( final ConfigurableProgramAnalysis cpa, final String programDenotation, final CFA cfa, @Nullable final MainCPAStatistics stats) throws InvalidConfigurationException, CPAException { logger.log(Level.FINE, "Creating algorithms"); Algorithm algorithm; if (useProofCheckAlgorithm) { logger.log(Level.INFO, "Using Proof Check Algorithm"); algorithm = new ProofCheckAlgorithm(cpa, config, logger, shutdownNotifier, cfa); } else if (useRestartingAlgorithm) { logger.log(Level.INFO, "Using Restarting Algorithm"); algorithm = new RestartAlgorithm(config, logger, shutdownNotifier, programDenotation, cfa); if (useARGCombiningAlgorithm) { algorithm = new PartialARGsCombiner(algorithm, config, logger, shutdownNotifier, cfa); } } else if (useImpactAlgorithm) { algorithm = new ImpactAlgorithm(config, logger, shutdownNotifier, cpa, cfa); } else if (useRestartAlgorithmWithARGReplay) { algorithm = new RestartAlgorithmWithARGReplay(config, logger, shutdownNotifier, cfa); } else { algorithm = CPAAlgorithm.create(cpa, logger, config, shutdownNotifier, stats); if (useAnalysisWithEnablerCPAAlgorithm) { algorithm = new AnalysisWithRefinableEnablerCPAAlgorithm( algorithm, cpa, cfa, logger, config, shutdownNotifier); } if (useCEGAR) { algorithm = new CEGARAlgorithm(algorithm, cpa, config, logger); } if (useBMC) { algorithm = new BMCAlgorithm( algorithm, cpa, config, logger, reachedSetFactory, shutdownNotifier, cfa); } if (checkCounterexamples) { algorithm = new CounterexampleCheckAlgorithm( algorithm, cpa, config, logger, shutdownNotifier, cfa, programDenotation); } if (useBDDCPARestriction) { algorithm = new BDDCPARestrictionAlgorithm( algorithm, cpa, config, logger, shutdownNotifier, cfa, programDenotation); } if (collectAssumptions) { algorithm = new AssumptionCollectorAlgorithm(algorithm, cpa, cfa, shutdownNotifier, config, logger); } if (useAdjustableConditions) { algorithm = new RestartWithConditionsAlgorithm(algorithm, cpa, config, logger); } if (useTestGenAlgorithm) { algorithm = new TestGenAlgorithm(algorithm, cpa, shutdownNotifier, cfa, config, logger); } if (usePropertyCheckingAlgorithm) { if (!(cpa instanceof PropertyCheckerCPA)) { throw new InvalidConfigurationException( "Property checking algorithm requires CPAWithPropertyChecker as Top CPA"); } algorithm = new AlgorithmWithPropertyCheck(algorithm, logger, (PropertyCheckerCPA) cpa); } if (useResultCheckAlgorithm) { algorithm = new ResultCheckAlgorithm(algorithm, cpa, cfa, config, logger, shutdownNotifier); } if (useCustomInstructionRequirementExtraction) { algorithm = new CustomInstructionRequirementsExtractingAlgorithm( algorithm, cpa, config, logger, shutdownNotifier, cfa); } if (usePreconditionRefinementAlgorithm) { algorithm = new PreconditionRefinerAlgorithm(algorithm, cpa, cfa, config, logger, shutdownNotifier); } } if (stats != null && algorithm instanceof StatisticsProvider) { ((StatisticsProvider) algorithm).collectStatistics(stats.getSubStatistics()); } return algorithm; }
JavaBDDRegionManager(String bddPackage, Configuration config, LogManager pLogger) throws InvalidConfigurationException { config.inject(this); logger = pLogger; if (initTableRatio <= 0 || initTableRatio >= 1) { throw new InvalidConfigurationException( "Invalid value " + initTableRatio + " for option bdd.javabdd.initTableRatio, needs to be between 0 and 1."); } if (initTableSize == 0) { // JFactory uses 5 ints of 4 byte sizes for each entry in the BDD table double size = Runtime.getRuntime().maxMemory() * initTableRatio / 5 / 4; initTableSize = (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : (int) size; logger.log(Level.CONFIG, "Setting value of bdd.javabdd.initTableSize to", initTableSize); } if (cacheRatio < 0) { throw new InvalidConfigurationException( "Invalid value " + cacheRatio + " for option bdd.javabdd.cacheRatio, cannot be negative."); } if (cacheSize == 0) { cacheSize = (int) (initTableSize * cacheRatio); } factory = BDDFactory.init(bddPackage.toLowerCase(), initTableSize, cacheSize); // register callbacks for logging try { Method gcCallback = JavaBDDRegionManager.class.getDeclaredMethod( "gcCallback", Integer.class, BDDFactory.GCStats.class); gcCallback.setAccessible(true); factory.registerGCCallback(this, gcCallback); Method resizeCallback = JavaBDDRegionManager.class.getDeclaredMethod( "resizeCallback", Integer.class, Integer.class); resizeCallback.setAccessible(true); factory.registerResizeCallback(this, resizeCallback); Method reorderCallback = JavaBDDRegionManager.class.getDeclaredMethod( "reorderCallback", Integer.class, BDDFactory.ReorderStats.class); reorderCallback.setAccessible(true); factory.registerReorderCallback(this, reorderCallback); // If we do not log, unregister the handlers to avoid the cost of // calling them with reflection. // Registering and immediately unregistering prevents the library // from printing stuff to stdout. if (!logger.wouldBeLogged(LOG_LEVEL)) { factory.unregisterGCCallback(this, gcCallback); factory.unregisterResizeCallback(this, resizeCallback); factory.unregisterReorderCallback(this, reorderCallback); } } catch (NoSuchMethodException e) { throw new AssertionError(e); } factory.setVarNum(varcount); factory.setCacheRatio(cacheRatio); trueFormula = new JavaBDDRegion(factory.one()); falseFormula = new JavaBDDRegion(factory.zero()); }