private byte[] generateHaplotype(
      final List<VariantContext> sourceVCs, final ReferenceContext refContext) {

    final StringBuilder sb = new StringBuilder();

    final int startPos = refContext.getWindow().getStart();
    int currentPos = startPos;
    final byte[] reference = refContext.getBases();

    for (final VariantContext vc : sourceVCs) {
      // add any missing reference context
      int vcStart = vc.getStart();
      final int refAlleleLength = vc.getReference().length();
      if (refAlleleLength
          == vc.getEnd()
              - vc.getStart()) // this is a deletion (whereas for other events the padding base
        // isn't part of the position)
        vcStart++;

      while (currentPos < vcStart) sb.append((char) reference[currentPos++ - startPos]);

      // add the alt allele
      sb.append(vc.getAlternateAllele(0).getBaseString());

      // skip the reference allele
      currentPos += refAlleleLength;
    }
    // add any missing reference context
    final int stopPos = refContext.getWindow().getStop();
    while (currentPos < stopPos) sb.append((char) reference[currentPos++ - startPos]);

    return sb.toString().getBytes();
  }
  private void resolveByHaplotype(final ReferenceContext refContext) {

    final byte[] source1Haplotype = generateHaplotype(sourceVCs1, refContext);
    final byte[] source2Haplotype = generateHaplotype(sourceVCs2, refContext);

    final SWPairwiseAlignment swConsensus1 =
        new SWPairwiseAlignment(
            refContext.getBases(), source1Haplotype, SW_MATCH, SW_MISMATCH, SW_GAP, SW_GAP_EXTEND);
    final SWPairwiseAlignment swConsensus2 =
        new SWPairwiseAlignment(
            refContext.getBases(), source2Haplotype, SW_MATCH, SW_MISMATCH, SW_GAP, SW_GAP_EXTEND);

    // protect against SW failures
    if (swConsensus1.getCigar().toString().contains("S")
        || swConsensus1.getCigar().getReferenceLength() < 20
        || swConsensus2.getCigar().toString().contains("S")
        || swConsensus2.getCigar().getReferenceLength() < 20) {
      // TODO -- handle errors appropriately
      logger.debug("Bad SW alignment; aborting at " + refContext.getLocus());
      return;
    }

    // order results by start position
    final TreeMap<Integer, VariantContext> source1Map =
        new TreeMap<Integer, VariantContext>(
            HaplotypeCallerGenotypingEngine.generateVCsFromAlignment(
                new Haplotype(source1Haplotype, false, 0, swConsensus1.getCigar()),
                refContext.getBases(),
                refContext.getWindow(),
                source1));
    final TreeMap<Integer, VariantContext> source2Map =
        new TreeMap<Integer, VariantContext>(
            HaplotypeCallerGenotypingEngine.generateVCsFromAlignment(
                new Haplotype(source2Haplotype, false, 0, swConsensus2.getCigar()),
                refContext.getBases(),
                refContext.getWindow(),
                source2));
    if (source1Map.size() == 0 || source2Map.size() == 0) {
      // TODO -- handle errors appropriately
      logger.debug("No source alleles; aborting at " + refContext.getLocus());
      return;
    }

    // create lists and test for equality
    final List<VariantContext> source1Alleles = new ArrayList<VariantContext>(source1Map.values());
    final List<VariantContext> source2Alleles = new ArrayList<VariantContext>(source2Map.values());

    writeAndPurgeAllEqualVariants(source1Alleles, source2Alleles, SAME_BY_HAPLOTYPE_STATUS);
    if (source1Alleles.isEmpty()) {
      writeAll(source2Alleles, source2, null);
    } else if (source2Alleles.isEmpty()) {
      writeAll(source1Alleles, source1, null);
    } else {
      writeDifferences(source1Alleles, source2Alleles);
    }
  }
  private Collection<VariantContext> getVariantContexts(
      RefMetaDataTracker tracker, ReferenceContext ref) {

    List<Feature> features = tracker.getValues(variants, ref.getLocus());
    List<VariantContext> VCs = new ArrayList<VariantContext>(features.size());

    for (Feature record : features) {
      if (VariantContextAdaptors.canBeConvertedToVariantContext(record)) {
        // we need to special case the HapMap format because indels aren't handled correctly
        if (record instanceof RawHapMapFeature) {

          // is it an indel?
          RawHapMapFeature hapmap = (RawHapMapFeature) record;
          if (hapmap.getAlleles()[0].equals(RawHapMapFeature.NULL_ALLELE_STRING)
              || hapmap.getAlleles()[1].equals(RawHapMapFeature.NULL_ALLELE_STRING)) {
            // get the dbsnp object corresponding to this record (needed to help us distinguish
            // between insertions and deletions)
            VariantContext dbsnpVC = getDbsnp(hapmap.getName());
            if (dbsnpVC == null || dbsnpVC.isMixed()) continue;

            Map<String, Allele> alleleMap = new HashMap<String, Allele>(2);
            alleleMap.put(
                RawHapMapFeature.DELETION,
                Allele.create(ref.getBase(), dbsnpVC.isSimpleInsertion()));
            alleleMap.put(
                RawHapMapFeature.INSERTION,
                Allele.create(
                    (char) ref.getBase() + ((RawHapMapFeature) record).getAlleles()[1],
                    !dbsnpVC.isSimpleInsertion()));
            hapmap.setActualAlleles(alleleMap);

            // also, use the correct positioning for insertions
            hapmap.updatePosition(dbsnpVC.getStart());

            if (hapmap.getStart() < ref.getWindow().getStart()) {
              logger.warn(
                  "Hapmap record at "
                      + ref.getLocus()
                      + " represents an indel too large to be converted; skipping...");
              continue;
            }
          }
        }

        // ok, we might actually be able to turn this record in a variant context
        VariantContext vc =
            VariantContextAdaptors.toVariantContext(variants.getName(), record, ref);

        if (vc != null) // sometimes the track has odd stuff in it that can't be converted
        VCs.add(vc);
      }
    }

    return VCs;
  }