@Override public void execute(SensorContext context) { List<TreeVisitor> treeVisitors = Lists.newArrayList(); boolean isAtLeastSq62 = context.getSonarQubeVersion().isGreaterThanOrEqual(V6_2); MetricsVisitor metricsVisitor = new MetricsVisitor( context, noSonarFilter, context.settings().getBoolean(JavaScriptPlugin.IGNORE_HEADER_COMMENTS), fileLinesContextFactory, isAtLeastSq62); treeVisitors.add(metricsVisitor); treeVisitors.add(new HighlighterVisitor(context, fileSystem)); treeVisitors.add(new SeChecksDispatcher(checks.seChecks())); treeVisitors.add(new CpdVisitor(fileSystem, context)); treeVisitors.addAll(checks.visitorChecks()); for (TreeVisitor check : treeVisitors) { if (check instanceof ParsingErrorCheck) { parsingErrorRuleKey = checks.ruleKeyFor((JavaScriptCheck) check); break; } } ProgressReport progressReport = new ProgressReport( "Report about progress of Javascript analyzer", TimeUnit.SECONDS.toMillis(10)); progressReport.start(Lists.newArrayList(fileSystem.files(mainFilePredicate))); analyseFiles(context, treeVisitors, fileSystem.inputFiles(mainFilePredicate), progressReport); executeCoverageSensors(context, metricsVisitor.linesOfCode(), isAtLeastSq62); }
@Override public void scanFile(JavaFileScannerContext context) { sonarFile = fs.inputFile(fs.predicates().is(context.getFile())); classTrees.clear(); methods = 0; complexityInMethods = 0; accessors = 0; classes = 0; PublicApiChecker publicApiChecker = PublicApiChecker.newInstanceWithAccessorsHandledAsMethods(); if (separateAccessorsFromMethods) { publicApiChecker = PublicApiChecker.newInstanceWithAccessorsSeparatedFromMethods(); } publicApiChecker.scan(context.getTree()); methodComplexityDistribution = new RangeDistributionBuilder( CoreMetrics.FUNCTION_COMPLEXITY_DISTRIBUTION, LIMITS_COMPLEXITY_METHODS); CommentLinesVisitor commentLinesVisitor = new CommentLinesVisitor(); commentLinesVisitor.analyzeCommentLines(context.getTree()); noSonarFilter.addComponent( sensorContext.getResource(sonarFile).getEffectiveKey(), commentLinesVisitor.noSonarLines()); super.scanFile(context); // leave file. int fileComplexity = context.getComplexityNodes(context.getTree()).size(); saveMetricOnFile(CoreMetrics.CLASSES, classes); saveMetricOnFile(CoreMetrics.FUNCTIONS, methods); saveMetricOnFile(CoreMetrics.ACCESSORS, accessors); saveMetricOnFile(CoreMetrics.COMPLEXITY_IN_FUNCTIONS, complexityInMethods); saveMetricOnFile(CoreMetrics.COMPLEXITY_IN_CLASSES, fileComplexity); saveMetricOnFile(CoreMetrics.COMPLEXITY, fileComplexity); saveMetricOnFile(CoreMetrics.PUBLIC_API, publicApiChecker.getPublicApi()); saveMetricOnFile( CoreMetrics.PUBLIC_DOCUMENTED_API_DENSITY, publicApiChecker.getDocumentedPublicApiDensity()); saveMetricOnFile( CoreMetrics.PUBLIC_UNDOCUMENTED_API, publicApiChecker.getUndocumentedPublicApi()); saveMetricOnFile(CoreMetrics.COMMENT_LINES, commentLinesVisitor.commentLinesMetric()); saveMetricOnFile( CoreMetrics.STATEMENTS, new StatementVisitor().numberOfStatements(context.getTree())); saveMetricOnFile(CoreMetrics.NCLOC, new LinesOfCodeVisitor().linesOfCode(context.getTree())); sensorContext.saveMeasure( sonarFile, methodComplexityDistribution.build(true).setPersistenceMode(PersistenceMode.MEMORY)); RangeDistributionBuilder fileComplexityDistribution = new RangeDistributionBuilder( CoreMetrics.FILE_COMPLEXITY_DISTRIBUTION, LIMITS_COMPLEXITY_FILES); sensorContext.saveMeasure( sonarFile, fileComplexityDistribution .add(fileComplexity) .build(true) .setPersistenceMode(PersistenceMode.MEMORY)); saveLinesMetric(); }
private long getDevelopmentCost(DecoratorContext context) { InputFile file = fs.inputFile(fs.predicates().hasRelativePath(context.getResource().getKey())); if (file != null) { String language = file.language(); return getMeasureValue(context, sqaleRatingSettings.getSizeMetric(language, metrics)) * sqaleRatingSettings.getDevCost(language); } else { Collection<Measure> childrenMeasures = context.getChildrenMeasures(CoreMetrics.DEVELOPMENT_COST); Double sum = sum(childrenMeasures); return sum.longValue(); } }
private boolean fsCondition(DefaultSensorDescriptor descriptor) { if (!descriptor.languages().isEmpty() || descriptor.type() != null) { FilePredicate langPredicate = descriptor.languages().isEmpty() ? fs.predicates().all() : fs.predicates().hasLanguages(descriptor.languages()); FilePredicate typePredicate = descriptor.type() == null ? fs.predicates().all() : fs.predicates().hasType(descriptor.type()); return fs.hasFiles(fs.predicates().and(langPredicate, typePredicate)); } return true; }
private void scanFile( SensorContext sensorContext, InputFile inputFile, List<TreeVisitor> visitors, ScriptTree scriptTree) { JavaScriptVisitorContext context = new JavaScriptVisitorContext(scriptTree, inputFile.file(), sensorContext.settings()); highlightSymbols(sensorContext.newSymbolTable().onFile(inputFile), context); List<Issue> fileIssues = new ArrayList<>(); for (TreeVisitor visitor : visitors) { if (visitor instanceof CharsetAwareVisitor) { ((CharsetAwareVisitor) visitor).setCharset(fileSystem.encoding()); } if (visitor instanceof JavaScriptCheck) { fileIssues.addAll(((JavaScriptCheck) visitor).scanFile(context)); } else { visitor.scanTree(context); } } saveFileIssues(sensorContext, fileIssues, inputFile); }
/** * Check if the xml file starts with a prolog "<?xml version="1.0" ?>" if so, check if there * is any characters prefixing it. */ private void checkForCharactersBeforeProlog(FileSystem fileSystem) { try { int lineNb = 1; Pattern firstTagPattern = Pattern.compile("<[a-zA-Z?]+"); boolean hasBOM = false; for (String line : Files.readLines(inputFile.file(), fileSystem.encoding())) { if (lineNb == 1 && line.startsWith(BOM_CHAR)) { hasBOM = true; characterDeltaForHighlight = -1; } Matcher m = firstTagPattern.matcher(line); if (m.find()) { int column = line.indexOf(m.group()); if (XML_PROLOG_START_TAG.equals(m.group()) && !isFileBeginning(lineNb, column, hasBOM)) { hasCharsBeforeProlog = true; } break; } lineNb++; } if (hasCharsBeforeProlog) { processCharBeforePrologInFile(fileSystem, lineNb); } } catch (IOException e) { LOG.warn("Unable to analyse file {}", inputFile.absolutePath(), e); } }
public void analyse(final Project project, final SensorContext context) { final String projectBaseDir = fileSystem.baseDir().getPath(); for (final File report : reportFilesFinder.reportsIn(projectBaseDir)) { LOGGER.info("Processing coverage report {}", report); CoberturaReportParser.parseReport(report, fileSystem, project, context); } }
public JavaScriptSquidSensor( CheckFactory checkFactory, FileLinesContextFactory fileLinesContextFactory, FileSystem fileSystem, NoSonarFilter noSonarFilter, @Nullable CustomJavaScriptRulesDefinition[] customRulesDefinition) { this.checks = JavaScriptChecks.createJavaScriptCheck(checkFactory) .addChecks(CheckList.REPOSITORY_KEY, CheckList.getChecks()) .addCustomChecks(customRulesDefinition); this.fileLinesContextFactory = fileLinesContextFactory; this.fileSystem = fileSystem; this.noSonarFilter = noSonarFilter; this.mainFilePredicate = fileSystem .predicates() .and( fileSystem.predicates().hasType(InputFile.Type.MAIN), fileSystem.predicates().hasLanguage(JavaScriptLanguage.KEY)); this.parser = JavaScriptParserBuilder.createParser(getEncoding()); }
/** * Create a temporary file without any character before the prolog and update the following * attributes in order to correctly report issues: * * <ul> * <li>lineDeltaForIssue * <li>file */ private void processCharBeforePrologInFile(FileSystem fileSystem, int lineDelta) { try { String content = Files.toString(inputFile.file(), fileSystem.encoding()); File tempFile = new File(fileSystem.workDir(), inputFile.file().getName()); int index = content.indexOf(XML_PROLOG_START_TAG); Files.write(content.substring(index), tempFile, fileSystem.encoding()); noCharBeforePrologFile = tempFile; if (index != -1) { characterDeltaForHighlight += index; } if (lineDelta > 1) { lineDeltaForIssue = lineDelta - 1; } } catch (IOException e) { LOG.warn("Unable to analyse file {}", inputFile.absolutePath(), e); } }
public void analyse(Project project, SensorContext context) { List<QProfileWithId> profiles = Lists.newArrayList(); for (String language : fs.languages()) { QProfileWithId qProfile = (QProfileWithId) moduleQProfiles.findByLanguage(language); if (qProfile != null) { dao.updateUsedColumn(qProfile.id(), true); profiles.add(qProfile); } } UsedQProfiles used = UsedQProfiles.fromProfiles(profiles); Measure detailsMeasure = new Measure(CoreMetrics.QUALITY_PROFILES, used.toJSON()); context.saveMeasure(detailsMeasure); // For backward compatibility if (profiles.size() == 1) { QProfileWithId qProfile = profiles.get(0); Measure measure = new Measure(CoreMetrics.PROFILE, qProfile.name()).setValue((double) qProfile.id()); Measure measureVersion = new Measure(CoreMetrics.PROFILE_VERSION, qProfile.version().doubleValue()); context.saveMeasure(measure); context.saveMeasure(measureVersion); } }
public boolean shouldExecuteOnProject(final Project project) { this.project = project; return project.isRoot() && fileSystem.languages().contains(Swift.KEY); }
private InputFile inputFromIOFile(File file) { return fs.inputFile(fs.predicates().is(file)); }
@Override public void visitFile(Tree scriptTree) { FileSystem fs = sensorContext.fileSystem(); this.inputFile = fs.inputFile(fs.predicates().is(getContext().getFile())); init(); }
private Charset getEncoding() { return fileSystem.encoding(); }
@CheckForNull private InputFile getInputFile(String filePath) { return fs.inputFile(fs.predicates().hasRelativePath(filePath)); }