示例#1
0
文件: SnpEff.java 项目: wenjiany/gatk
  @Override
  public void initialize(
      AnnotatorCompatible walker, GenomeAnalysisEngine toolkit, Set<VCFHeaderLine> headerLines) {
    // Make sure that we actually have a valid SnpEff rod binding (just in case the user specified
    // -A SnpEff
    // without providing a SnpEff rod via --snpEffFile):
    if (!isValidRodBinding(walker.getSnpEffRodBinding())) {
      canAnnotate = false;
      return;
    }

    RodBinding<VariantContext> snpEffRodBinding = walker.getSnpEffRodBinding();

    // Make sure that the SnpEff version number and command-line header lines are present in the VCF
    // header of
    // the SnpEff rod, and that the file was generated by a supported version of SnpEff:
    VCFHeader snpEffVCFHeader =
        GATKVCFUtils.getVCFHeadersFromRods(toolkit, Arrays.asList(snpEffRodBinding.getName()))
            .get(snpEffRodBinding.getName());
    VCFHeaderLine snpEffVersionLine =
        snpEffVCFHeader.getOtherHeaderLine(SNPEFF_VCF_HEADER_VERSION_LINE_KEY);
    VCFHeaderLine snpEffCommandLine =
        snpEffVCFHeader.getOtherHeaderLine(SNPEFF_VCF_HEADER_COMMAND_LINE_KEY);

    if (!isValidSnpEffVersionAndCommandLine(snpEffVersionLine, snpEffCommandLine)) {
      canAnnotate = false;
      return;
    }

    // If everything looks ok, add the SnpEff version number and command-line header lines to the
    // header of the VCF output file, changing the key names so that our output file won't be
    // mistaken in the future for a SnpEff output file:
    headerLines.add(
        new VCFHeaderLine(OUTPUT_VCF_HEADER_VERSION_LINE_KEY, snpEffVersionLine.getValue()));
    headerLines.add(
        new VCFHeaderLine(OUTPUT_VCF_HEADER_COMMAND_LINE_KEY, snpEffCommandLine.getValue()));

    // Can only be called from VariantAnnotator
    if (!(walker instanceof VariantAnnotator)) {
      if (walker != null)
        logger.warn(
            "Annotation will not be calculated, must be called from VariantAnnotator, not "
                + walker.getClass().getName());
      else logger.warn("Annotation will not be calculated, must be called from VariantAnnotator");
      canAnnotate = false;
      return;
    }
  }
  @Override
  public Map<String, Object> annotate(
      final RefMetaDataTracker tracker,
      final AnnotatorCompatible walker,
      final ReferenceContext ref,
      final Map<String, AlignmentContext> stratifiedContexts,
      final VariantContext vc,
      final Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap) {
    // Can only call from UnifiedGenotyper
    if (!(walker instanceof UnifiedGenotyper)) {
      if (!walkerIdentityCheckWarningLogged) {
        if (walker != null)
          logger.warn(
              "Annotation will not be calculated, must be called from UnifiedGenotyper, not "
                  + walker.getClass().getName());
        else logger.warn("Annotation will not be calculated, must be called from UnifiedGenotyper");
        walkerIdentityCheckWarningLogged = true;
      }
      return null;
    }

    if (stratifiedContexts.isEmpty()) return null;

    // not meaningful when we're at an indel location: deletions that start at location N are by
    // definition called at the position  N-1, and at position N-1
    // there are no informative deletions in the pileup
    if (!vc.isSNP()) return null;

    int deletions = 0;
    int depth = 0;
    for (Map.Entry<String, AlignmentContext> sample : stratifiedContexts.entrySet()) {
      for (final PileupElement p : sample.getValue().getBasePileup()) {
        depth++;
        if (p.isDeletion()) deletions++;
      }
    }
    Map<String, Object> map = new HashMap<>();
    map.put(
        getKeyNames().get(0),
        String.format("%.2f", depth == 0 ? 0.0 : (double) deletions / (double) depth));
    return map;
  }
示例#3
0
文件: SnpEff.java 项目: wenjiany/gatk
  @Override
  public Map<String, Object> annotate(
      final RefMetaDataTracker tracker,
      final AnnotatorCompatible walker,
      final ReferenceContext ref,
      final Map<String, AlignmentContext> stratifiedContexts,
      final VariantContext vc,
      final Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap) {

    // Can not annotate if failed initialization conditions
    if (!canAnnotate) return null;

    RodBinding<VariantContext> snpEffRodBinding = walker.getSnpEffRodBinding();

    // Get only SnpEff records that start at this locus, not merely span it:
    List<VariantContext> snpEffRecords = tracker.getValues(snpEffRodBinding, ref.getLocus());

    // Within this set, look for a SnpEff record whose ref/alt alleles match the record to annotate.
    // If there is more than one such record, we only need to pick the first one, since the
    // biological
    // effects will be the same across all such records:
    VariantContext matchingRecord = getMatchingSnpEffRecord(snpEffRecords, vc);
    if (matchingRecord == null) {
      return null;
    }

    // Parse the SnpEff INFO field annotation from the matching record into individual effect
    // objects:
    List<SnpEffEffect> effects = parseSnpEffRecord(matchingRecord);
    if (effects.isEmpty()) {
      return null;
    }

    // Add only annotations for one of the most biologically-significant effects from this set:
    SnpEffEffect mostSignificantEffect = getMostSignificantEffect(effects);
    return mostSignificantEffect.getAnnotations();
  }
  /**
   * Checks if the input data is appropriate
   *
   * @param annotation the input genotype annotation key name(s)
   * @param walker input walker
   * @param map input map for each read, holds underlying alleles represented by an aligned read,
   *     and corresponding relative likelihood.
   * @param g input genotype
   * @param warningsLogged array that enforces the warning is logged once for each caller
   * @param logger logger specific for each caller
   * @return true if the walker is a HaplotypeCaller, the likelihood map is non-null and the
   *     genotype is non-null and called, false otherwise
   * @throws IllegalArgumentException if annotation, walker, g, warningsLogged, or logger are null.
   * @throws ReviewedGATKException if the size of warningsLogged is less than 3.
   */
  public static boolean isAppropriateInput(
      final String annotation,
      final AnnotatorCompatible walker,
      final PerReadAlleleLikelihoodMap map,
      final Genotype g,
      final boolean[] warningsLogged,
      final Logger logger) {

    if (annotation == null) {
      throw new IllegalArgumentException("The input annotation cannot be null");
    }

    if (walker == null) {
      throw new IllegalArgumentException("The input walker cannot be null");
    }

    if (g == null) {
      throw new IllegalArgumentException("The input genotype cannot be null");
    }

    if (warningsLogged == null) {
      throw new IllegalArgumentException("The input warnings logged cannot be null");
    }

    if (logger == null) {
      throw new IllegalArgumentException("The input logger cannot be null");
    }

    if (warningsLogged.length < WARNINGS_LOGGED_SIZE) {
      throw new ReviewedGATKException(
          "Warnings logged array must have at least "
              + WARNINGS_LOGGED_SIZE
              + " elements, but has "
              + warningsLogged.length);
    }

    if (!(walker instanceof HaplotypeCaller) && !(walker instanceof MuTect2)) {
      if (!warningsLogged[0]) {
        logger.warn(
            annotation + ANNOTATION_HC_WARN_MSG + ", not " + walker.getClass().getSimpleName());
        warningsLogged[0] = true;
      }
      return false;
    }

    if (map == null) {
      if (!warningsLogged[1]) {
        logger.warn(
            "Annotation will not be calculated, can only be used with likelihood based annotations in the HaplotypeCaller");
        warningsLogged[1] = true;
      }
      return false;
    }

    if (!g.isCalled()) {
      if (!warningsLogged[2]) {
        logger.warn("Annotation will not be calculated, genotype is not called");
        warningsLogged[2] = true;
      }
      return false;
    }

    return true;
  }