@Implementation
 public void reset() {
   preOps.clear();
   postOps.clear();
   setOps.clear();
   mMatrix = SimpleMatrix.IDENTITY;
 }
 public void scan(CompilationUnitTree tree) {
   classTrees.clear();
   currentParents.clear();
   publicApi = 0;
   documentedPublicApi = 0;
   super.scan(tree);
 }
  @Override
  public void visitCode(Code obj) {
    Method m = getMethod();
    if (m.getReturnType() == Type.VOID) {
      return;
    }

    stack.resetForMethodEntry(this);
    ifBlocks.clear();
    activeUnconditional = null;

    CodeException[] ces = obj.getExceptionTable();
    if (CollectionUtils.isEmpty(ces)) {
      catchPCs = null;
    } else {
      catchPCs = new BitSet();
      for (CodeException ce : ces) {
        catchPCs.set(ce.getHandlerPC());
      }
    }
    gotoBranchPCs.clear();
    casePositions.clear();
    lookingForResetOp = false;

    try {
      super.visitCode(obj);
    } catch (StopOpcodeParsingException e) {
      // reported an issue, so get out
    }
  }
 @Override
 public void scanFile(JavaFileScannerContext context) {
   JavaTree.CompilationUnitTreeImpl tree = (JavaTree.CompilationUnitTreeImpl) context.getTree();
   currentPackage =
       PackageDeclarationTreeImpl.packageNameAsString(tree.packageDeclaration()).replace('.', '/');
   currentFile = context.getFile();
   currentClassKey.clear();
   parent.clear();
   anonymousInnerClassCounter.clear();
   suppressWarningLines.clear();
   scan(tree);
 }
  @Override
  public void startDocument() throws SAXException {
    handlerStack.clear();
    elemStack.clear();
    lemmasParsed = 0;
    acceptedLemmaCounter = 0;
    rejectedLemmaCounter = 0;
    finished = false;

    rootHandler = new RootHandler(new DictionaryElemHandler());
    handlerStack.addFirst(rootHandler);
  }
 public static void loadFromFile() throws Exception {
   BufferedReader in = new BufferedReader(new FileReader(fileName));
   String s = null;
   while ((s = in.readLine()) != null) {
     if (s.startsWith("#")) {
       if (s.startsWith("#graphRes")) {
         graphRes.clear();
         int n = Integer.parseInt(in.readLine());
         for (int i = 0; i < n; i++) {
           StringTokenizer tok = new StringTokenizer(in.readLine(), " ");
           graphRes.add(
               new Vector2f(Float.parseFloat(tok.nextToken()), Float.parseFloat(tok.nextToken())));
         }
       } else if (s.startsWith("#wasFract")) {
         Arrays.fill(wasFract, false);
         StringTokenizer tok = new StringTokenizer(in.readLine());
         while (tok.hasMoreTokens()) {
           wasFract[Integer.parseInt(tok.nextToken())] = true;
         }
       }
     } else {
       StringTokenizer tok = new StringTokenizer(s, "=");
       String fieldName = tok.nextToken();
       String value = tok.nextToken();
       Field f = Physics06Settings.class.getField(fieldName);
       f.set(Physics06Settings.class, parseObject(f.getType(), value));
     }
   }
   in.close();
   refresh();
   clear();
 }
  /** Gets the current scope. */
  public Scope getScope() {
    Scope scope = scopes.isEmpty() ? null : scopes.peek();
    if (scopeRoots.isEmpty()) {
      return scope;
    }

    Iterator<Node> it = scopeRoots.descendingIterator();
    while (it.hasNext()) {
      scope = scopeCreator.createScope(it.next(), scope);
      scopes.push(scope);
    }
    scopeRoots.clear();
    cfgRoots.clear();
    // No need to call compiler.setScope; the top scopeRoot is now the top scope
    return scope;
  }
Exemple #8
0
 public void insert(T state) {
   past.addFirst(present);
   if (past.size() > limit) {
     past.removeLast();
   }
   future.clear();
   present = state;
 }
    @Override
    public void leave() {
      pendingValues.clear();
      bookedInstances.clear();
      nextInstanceId = 0;

      paxosInstances.leave();
    }
  @Override
  public void leave() {
    pendingValues.clear();
    bookedInstances.clear();
    commonState.setNextInstanceId(0);

    paxosInstances.leave();
  }
 /**
  * Add a entry to the history list. This causes that the redo list is cleared and in case the undo
  * list is getting too long the oldest entry is removed.
  *
  * @param entry the entry to add to this list
  */
 @SuppressWarnings("nls")
 public void addEntry(final HistoryAction entry) {
   undoList.addLast(entry);
   redoList.clear();
   while (undoList.size() > MAX_HISTORY_LENGHT) {
     undoList.removeFirst();
   }
 }
 public static void clear() {
   try {
     ex = new Expression(F);
   } catch (Exception exc) {
     log("Bad expression: " + F, exc);
   }
   leftW_tern = graph1X0 = chosenLeftW = leftW;
   rightW_tern = chosenRightW = rightW;
   resizeHistory.clear();
 }
Exemple #13
0
  @Override
  public void scanFile(JavaFileScannerContext context) {
    sonarFile = fs.inputFile(fs.predicates().is(context.getFile()));
    classTrees.clear();
    methods = 0;
    complexityInMethods = 0;
    accessors = 0;
    classes = 0;
    PublicApiChecker publicApiChecker = PublicApiChecker.newInstanceWithAccessorsHandledAsMethods();
    if (separateAccessorsFromMethods) {
      publicApiChecker = PublicApiChecker.newInstanceWithAccessorsSeparatedFromMethods();
    }
    publicApiChecker.scan(context.getTree());
    methodComplexityDistribution =
        new RangeDistributionBuilder(
            CoreMetrics.FUNCTION_COMPLEXITY_DISTRIBUTION, LIMITS_COMPLEXITY_METHODS);
    CommentLinesVisitor commentLinesVisitor = new CommentLinesVisitor();
    commentLinesVisitor.analyzeCommentLines(context.getTree());
    noSonarFilter.addComponent(
        sensorContext.getResource(sonarFile).getEffectiveKey(), commentLinesVisitor.noSonarLines());
    super.scanFile(context);
    // leave file.
    int fileComplexity = context.getComplexityNodes(context.getTree()).size();
    saveMetricOnFile(CoreMetrics.CLASSES, classes);
    saveMetricOnFile(CoreMetrics.FUNCTIONS, methods);
    saveMetricOnFile(CoreMetrics.ACCESSORS, accessors);
    saveMetricOnFile(CoreMetrics.COMPLEXITY_IN_FUNCTIONS, complexityInMethods);
    saveMetricOnFile(CoreMetrics.COMPLEXITY_IN_CLASSES, fileComplexity);
    saveMetricOnFile(CoreMetrics.COMPLEXITY, fileComplexity);
    saveMetricOnFile(CoreMetrics.PUBLIC_API, publicApiChecker.getPublicApi());
    saveMetricOnFile(
        CoreMetrics.PUBLIC_DOCUMENTED_API_DENSITY,
        publicApiChecker.getDocumentedPublicApiDensity());
    saveMetricOnFile(
        CoreMetrics.PUBLIC_UNDOCUMENTED_API, publicApiChecker.getUndocumentedPublicApi());
    saveMetricOnFile(CoreMetrics.COMMENT_LINES, commentLinesVisitor.commentLinesMetric());
    saveMetricOnFile(
        CoreMetrics.STATEMENTS, new StatementVisitor().numberOfStatements(context.getTree()));
    saveMetricOnFile(CoreMetrics.NCLOC, new LinesOfCodeVisitor().linesOfCode(context.getTree()));

    sensorContext.saveMeasure(
        sonarFile,
        methodComplexityDistribution.build(true).setPersistenceMode(PersistenceMode.MEMORY));

    RangeDistributionBuilder fileComplexityDistribution =
        new RangeDistributionBuilder(
            CoreMetrics.FILE_COMPLEXITY_DISTRIBUTION, LIMITS_COMPLEXITY_FILES);
    sensorContext.saveMeasure(
        sonarFile,
        fileComplexityDistribution
            .add(fileComplexity)
            .build(true)
            .setPersistenceMode(PersistenceMode.MEMORY));
    saveLinesMetric();
  }
Exemple #14
0
  @Test
  public void testDuplicateKey() throws SQLException {
    Entry a;

    a = use(1);
    expectEvict.clear();
    expectEvict.add(a);
    // This overwrites the cache, evicting previous entry with exactly the same key
    cache.put(1, new Entry(1));
    assertEvict();
  }
  /**
   * Its guaranted that closePacket will be sent before closing connection, but all past and future
   * packets wont. Connection will be closed [by Dispatcher Thread], and onDisconnect() method will
   * be called to clear all other things. forced means that server shouldn't wait with removing this
   * connection.
   *
   * @param closePacket Packet that will be send before closing.
   * @param forced have no effect in this implementation.
   */
  public final void close(LsServerPacket closePacket, boolean forced) {
    synchronized (guard) {
      if (isWriteDisabled()) return;

      log.debug("sending packet: " + closePacket + " and closing connection after that.");

      pendingClose = true;
      isForcedClosing = forced;
      sendMsgQueue.clear();
      sendMsgQueue.addLast(closePacket);
      enableWriteInterest();
    }
  }
 private static void refresh() {
   place = a = V = 0;
   time = 0;
   graphSpring.clear();
   frame.jSliderdY2.setValue(2);
   maxA = minA = 0;
   periodCount = 0;
   graph2X0 = 0;
   idleProcess = false;
   idleTime = 0;
   frame.setDone(0);
   curExtremumCount = 0;
 }
  public void initialize() {
    /*
     *  Seed the deque so that it contains the bins for the current
     *  24-hour interval.
     */
    deque.clear();

    long now = System.currentTimeMillis();
    for (int i = 0; i < 24; i++) {
      long binTime = now - TimeUnit.HOURS.toMillis(i);
      deque.addFirst(new HourlyAggregateData(binTime));
    }

    new Thread(this::populateFromViews, "View-initializer").start();
  }
 @NotNull
 private Set<VirtualFile> pollFilesToResolve() {
   Set<VirtualFile> set;
   synchronized (filesToResolve) {
     int queuedSize = filesToResolve.size();
     set = new LinkedHashSet<VirtualFile>(queuedSize);
     // someone might have cleared this bit to mark file as processed
     for (VirtualFile file : filesToResolve) {
       if (fileIsInQueue.clear(getAbsId(file))) {
         set.add(file);
       }
     }
     filesToResolve.clear();
   }
   return countAndMarkUnresolved(set, false);
 }
  @Override
  public void writeStartDocument() throws XMLStreamException {

    try {

      if (!objectQueue.isEmpty()) {
        objectQueue.clear();
      }

      // At this point we do not know the root name
      objectQueue.push(new JsonObject(null));

    } catch (Exception e) {
      throw new XMLStreamException(e);
    }
  }
  private void resetParser() {
    descriptorsStack.clear();
    ci = 0;
    cu = null;
    cn = DummyNode.getInstance();
    errorSlot = null;
    errorIndex = 0;
    errorGSSNode = null;

    descriptorsCount = 0;
    countGSSNodes = 0;
    countGSSEdges = 0;
    countNonterminalNodes = 0;
    countIntemediateNodes = 0;
    countTerminalNodes = 0;
    countPackedNodes = 0;
    countAmbiguousNodes = 0;
  }
  @Override
  public void transform(final CommonTree ast) {
    LOGGER.info("DatatypeInfoMapper.transform..." + ast);
    namespaceStack.clear();

    this.datatypeInfos = new ArrayList<DatatypeInfo>(ast.getChildCount());

    try {
      for (int i = 0; i < ast.getChildCount(); i++) {
        walk(ast.getChild(i), null);
      }
    } catch (RuntimeException e) {
      LOGGER.error(e);
      throw e;
    }

    LOGGER.info("DONE: " + references.size() + " loaded.");
  }
  private void doAuth(
      HttpServletRequest req, HttpServletResponse rsp, Deque<AuthenticationProfile> stack)
      throws IOException, ServletException {

    boolean stop = false;
    while (!stop && !stack.isEmpty()) {
      AuthenticationProfile profile = stack.peek();

      ExtMap output =
          profile
              .getAuthn()
              .invoke(
                  new ExtMap()
                      .mput(Base.InvokeKeys.COMMAND, Authn.InvokeCommands.AUTHENTICATE_NEGOTIATE)
                      .mput(Authn.InvokeKeys.HTTP_SERVLET_REQUEST, req)
                      .mput(Authn.InvokeKeys.HTTP_SERVLET_RESPONSE, rsp));

      switch (output.<Integer>get(Authn.InvokeKeys.RESULT)) {
        case Authn.AuthResult.SUCCESS:
          req.setAttribute(
              FiltersHelper.Constants.REQUEST_AUTH_RECORD_KEY,
              output.<ExtMap>get(Authn.InvokeKeys.AUTH_RECORD));
          req.setAttribute(FiltersHelper.Constants.REQUEST_AUTH_TYPE_KEY, AuthType.NEGOTIATION);
          req.setAttribute(FiltersHelper.Constants.REQUEST_PROFILE_KEY, profile.getName());
          stack.clear();
          break;

        case Authn.AuthResult.NEGOTIATION_UNAUTHORIZED:
          stack.pop();
          break;

        case Authn.AuthResult.NEGOTIATION_INCOMPLETE:
          stop = true;
          break;

        default:
          log.error(
              "Unexpected authentication result. AuthResult code is {}",
              output.<Integer>get(Authn.InvokeKeys.RESULT));
          stack.pop();
          break;
      }
    }
  }
  @Override
  public void writeEndDocument() throws XMLStreamException {

    try {

      if (objectQueue.isEmpty()) {
        throw new XMLStreamException("Could not find root JsonObject, queue is empty");
      } else {

        // Get last element in queue and export
        objectQueue.peekLast().toJson(!stripRoot);
      }

      // Free resources
      objectQueue.clear();
    } catch (Exception e) {
      throw new XMLStreamException(e);
    }
  }
Exemple #24
0
 protected boolean slowCheck() {
   EntityPlayer player = Minecraft.getMinecraft().thePlayer;
   mOpen.clear();
   mClosed.clear();
   boolean newValue = false;
   // Boundry problems because doubles to ints suck, always pick the "good position"
   Pos current = new Pos(Math.ceil(player.posX), Math.ceil(player.posY), Math.ceil(player.posZ));
   if (!goodSuccessor(current, null))
     current = new Pos(Math.floor(player.posX), Math.floor(player.posY), Math.floor(player.posZ));
   while (current != null && !newValue) {
     if (current.isExposed()) {
       newValue = true;
       break;
     }
     mOpen.addAll(successors(current));
     mClosed.add(current);
     current = mOpen.poll();
   }
   return newValue;
 }
Exemple #25
0
 public void clear() {
   equation.clear();
 }
 public synchronized void reset() {
   count = 0;
   min = Double.MAX_VALUE;
   max = 0;
   samples.clear();
 }
 @Override
 protected void setUp() throws Exception {
   super.setUp();
   projectsToDeleteOnTearDown.clear();
   target = nonExistentTempFile("temp", ".pdom");
 }
Exemple #28
0
 public void resetUnsafe() {
   segments.clear();
   segments.add(new CommitLogSegment());
 }
 private void clearEstimates() {
   recentEndTimeEstimates.clear();
 }
 public void clear() {
   playedList.clear();
   musicList.clear();
 }