// Overload function in GenotypeLikelihoodsCalculationModel so that, for an indel case, we // consider a deletion as part of the pileup, // so that per-sample DP will include deletions covering the event. protected int getFilteredDepth(ReadBackedPileup pileup) { int count = 0; for (PileupElement p : pileup) { if (p.isDeletion() || BaseUtils.isRegularBase(p.getBase())) count++; } return count; }
private Map<String, Object> annotateSNP(AlignmentContext stratifiedContext, VariantContext vc) { if (!stratifiedContext.hasBasePileup()) return null; HashMap<Byte, Integer> alleleCounts = new HashMap<Byte, Integer>(); for (Allele allele : vc.getAlternateAlleles()) alleleCounts.put(allele.getBases()[0], 0); ReadBackedPileup pileup = stratifiedContext.getBasePileup(); int totalDepth = pileup.size(); Map<String, Object> map = new HashMap<String, Object>(); map.put(getKeyNames().get(0), totalDepth); // put total depth in right away if (totalDepth == 0) return map; // done, can not compute FA at 0 coverage!! int mq0 = 0; // number of "ref" reads that are acually mq0 for (PileupElement p : pileup) { if (p.getMappingQual() == 0) { mq0++; continue; } if (alleleCounts.containsKey(p.getBase())) // non-mq0 read and it's an alt alleleCounts.put(p.getBase(), alleleCounts.get(p.getBase()) + 1); } if (mq0 == totalDepth) return map; // if all reads are mq0, there is nothing left to do // we need to add counts in the correct order String[] fracs = new String[alleleCounts.size()]; for (int i = 0; i < vc.getAlternateAlleles().size(); i++) { fracs[i] = String.format( "%.3f", ((float) alleleCounts.get(vc.getAlternateAllele(i).getBases()[0])) / (totalDepth - mq0)); } map.put(getKeyNames().get(1), fracs); return map; }