private static long[] getTimes(Movie m, Track track) {
    long[] syncSamples = track.getSyncSamples();
    long[] syncSampleTimes = new long[syncSamples.length];
    Queue<TimeToSampleBox.Entry> timeQueue =
        new LinkedList<TimeToSampleBox.Entry>(track.getDecodingTimeEntries());

    int currentSample = 1; // first syncsample is 1
    long currentDuration = 0;
    long currentDelta = 0;
    int currentSyncSampleIndex = 0;
    long left = 0;

    long timeScale = 1;
    for (Track track1 : m.getTracks()) {
      if (track1.getHandler().equals(track.getHandler())) {
        if (track1.getTrackMetaData().getTimescale() != track.getTrackMetaData().getTimescale()) {
          timeScale = lcm(timeScale, track1.getTrackMetaData().getTimescale());
        }
      }
    }

    while (currentSample <= syncSamples[syncSamples.length - 1]) {
      if (currentSample++ == syncSamples[currentSyncSampleIndex]) {
        syncSampleTimes[currentSyncSampleIndex++] = currentDuration * timeScale;
      }
      if (left-- == 0) {
        TimeToSampleBox.Entry entry = timeQueue.poll();
        left = entry.getCount() - 1;
        currentDelta = entry.getDelta();
      }
      currentDuration += currentDelta;
    }
    return syncSampleTimes;
  }
 /**
  * Calculates the timestamp of all tracks' sync samples.
  *
  * @param movie
  * @param track
  * @return
  */
 public static List<long[]> getSyncSamplesTimestamps(Movie movie, Track track) {
   List<long[]> times = new LinkedList<long[]>();
   for (Track currentTrack : movie.getTracks()) {
     if (currentTrack.getHandler().equals(track.getHandler())) {
       long[] currentTrackSyncSamples = currentTrack.getSyncSamples();
       if (currentTrackSyncSamples != null && currentTrackSyncSamples.length > 0) {
         final long[] currentTrackTimes = getTimes(movie, currentTrack);
         times.add(currentTrackTimes);
       }
     }
   }
   return times;
 }
 public String getHandler() {
   return source.getHandler();
 }
  protected TrackBox createTrackBox(Track track, Movie movie, Map<Track, int[]> chunks) {

    TrackBox trackBox = new TrackBox();
    TrackHeaderBox tkhd = new TrackHeaderBox();

    tkhd.setEnabled(true);
    tkhd.setInMovie(true);
    tkhd.setInPreview(true);
    tkhd.setInPoster(true);
    tkhd.setMatrix(track.getTrackMetaData().getMatrix());

    tkhd.setAlternateGroup(track.getTrackMetaData().getGroup());
    tkhd.setCreationTime(track.getTrackMetaData().getCreationTime());

    if (track.getEdits() == null || track.getEdits().isEmpty()) {
      tkhd.setDuration(
          track.getDuration() * getTimescale(movie) / track.getTrackMetaData().getTimescale());
    } else {
      long d = 0;
      for (Edit edit : track.getEdits()) {
        d += (long) edit.getSegmentDuration();
      }
      tkhd.setDuration(d * track.getTrackMetaData().getTimescale());
    }

    tkhd.setHeight(track.getTrackMetaData().getHeight());
    tkhd.setWidth(track.getTrackMetaData().getWidth());
    tkhd.setLayer(track.getTrackMetaData().getLayer());
    tkhd.setModificationTime(new Date());
    tkhd.setTrackId(track.getTrackMetaData().getTrackId());
    tkhd.setVolume(track.getTrackMetaData().getVolume());

    trackBox.addBox(tkhd);

    trackBox.addBox(createEdts(track, movie));

    MediaBox mdia = new MediaBox();
    trackBox.addBox(mdia);
    MediaHeaderBox mdhd = new MediaHeaderBox();
    mdhd.setCreationTime(track.getTrackMetaData().getCreationTime());
    mdhd.setDuration(track.getDuration());
    mdhd.setTimescale(track.getTrackMetaData().getTimescale());
    mdhd.setLanguage(track.getTrackMetaData().getLanguage());
    mdia.addBox(mdhd);
    HandlerBox hdlr = new HandlerBox();
    mdia.addBox(hdlr);

    hdlr.setHandlerType(track.getHandler());

    MediaInformationBox minf = new MediaInformationBox();
    if (track.getHandler().equals("vide")) {
      minf.addBox(new VideoMediaHeaderBox());
    } else if (track.getHandler().equals("soun")) {
      minf.addBox(new SoundMediaHeaderBox());
    } else if (track.getHandler().equals("text")) {
      minf.addBox(new NullMediaHeaderBox());
    } else if (track.getHandler().equals("subt")) {
      minf.addBox(new SubtitleMediaHeaderBox());
    } else if (track.getHandler().equals("hint")) {
      minf.addBox(new HintMediaHeaderBox());
    } else if (track.getHandler().equals("sbtl")) {
      minf.addBox(new NullMediaHeaderBox());
    }

    // dinf: all these three boxes tell us is that the actual
    // data is in the current file and not somewhere external
    DataInformationBox dinf = new DataInformationBox();
    DataReferenceBox dref = new DataReferenceBox();
    dinf.addBox(dref);
    DataEntryUrlBox url = new DataEntryUrlBox();
    url.setFlags(1);
    dref.addBox(url);
    minf.addBox(dinf);
    //

    Box stbl = createStbl(track, movie, chunks);
    minf.addBox(stbl);
    mdia.addBox(minf);

    return trackBox;
  }
  private TrackBox createTrackBox(Track track, Movie movie) {

    LOG.info("Creating Mp4TrackImpl " + track);
    TrackBox trackBox = new TrackBox();
    TrackHeaderBox tkhd = new TrackHeaderBox();
    tkhd.setVersion(1);
    int flags = 0;
    if (track.isEnabled()) {
      flags += 1;
    }

    if (track.isInMovie()) {
      flags += 2;
    }

    if (track.isInPreview()) {
      flags += 4;
    }

    if (track.isInPoster()) {
      flags += 8;
    }
    tkhd.setFlags(flags);

    tkhd.setAlternateGroup(track.getTrackMetaData().getGroup());
    tkhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
    // We need to take edit list box into account in trackheader duration
    // but as long as I don't support edit list boxes it is sufficient to
    // just translate media duration to movie timescale
    tkhd.setDuration(
        getDuration(track) * getTimescale(movie) / track.getTrackMetaData().getTimescale());
    tkhd.setHeight(track.getTrackMetaData().getHeight());
    tkhd.setWidth(track.getTrackMetaData().getWidth());
    tkhd.setLayer(track.getTrackMetaData().getLayer());
    tkhd.setModificationTime(DateHelper.convert(new Date()));
    tkhd.setTrackId(track.getTrackMetaData().getTrackId());
    tkhd.setVolume(track.getTrackMetaData().getVolume());
    trackBox.addBox(tkhd);

    /*
            EditBox edit = new EditBox();
            EditListBox editListBox = new EditListBox();
            editListBox.setEntries(Collections.singletonList(
                    new EditListBox.Entry(editListBox, (long) (track.getTrackMetaData().getStartTime() * getTimescale(movie)), -1, 1)));
            edit.addBox(editListBox);
            trackBox.addBox(edit);
    */

    MediaBox mdia = new MediaBox();
    trackBox.addBox(mdia);
    MediaHeaderBox mdhd = new MediaHeaderBox();
    mdhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
    mdhd.setDuration(getDuration(track));
    mdhd.setTimescale(track.getTrackMetaData().getTimescale());
    mdhd.setLanguage(track.getTrackMetaData().getLanguage());
    mdia.addBox(mdhd);
    HandlerBox hdlr = new HandlerBox();
    mdia.addBox(hdlr);

    hdlr.setHandlerType(track.getHandler());

    MediaInformationBox minf = new MediaInformationBox();
    minf.addBox(track.getMediaHeaderBox());

    // dinf: all these three boxes tell us is that the actual
    // data is in the current file and not somewhere external
    DataInformationBox dinf = new DataInformationBox();
    DataReferenceBox dref = new DataReferenceBox();
    dinf.addBox(dref);
    DataEntryUrlBox url = new DataEntryUrlBox();
    url.setFlags(1);
    dref.addBox(url);
    minf.addBox(dinf);
    //

    SampleTableBox stbl = new SampleTableBox();

    stbl.addBox(track.getSampleDescriptionBox());

    List<TimeToSampleBox.Entry> decodingTimeToSampleEntries = track.getDecodingTimeEntries();
    if (decodingTimeToSampleEntries != null && !track.getDecodingTimeEntries().isEmpty()) {
      TimeToSampleBox stts = new TimeToSampleBox();
      stts.setEntries(track.getDecodingTimeEntries());
      stbl.addBox(stts);
    }

    List<CompositionTimeToSample.Entry> compositionTimeToSampleEntries =
        track.getCompositionTimeEntries();
    if (compositionTimeToSampleEntries != null && !compositionTimeToSampleEntries.isEmpty()) {
      CompositionTimeToSample ctts = new CompositionTimeToSample();
      ctts.setEntries(compositionTimeToSampleEntries);
      stbl.addBox(ctts);
    }

    long[] syncSamples = track.getSyncSamples();
    if (syncSamples != null && syncSamples.length > 0) {
      SyncSampleBox stss = new SyncSampleBox();
      stss.setSampleNumber(syncSamples);
      stbl.addBox(stss);
    }

    if (track.getSampleDependencies() != null && !track.getSampleDependencies().isEmpty()) {
      SampleDependencyTypeBox sdtp = new SampleDependencyTypeBox();
      sdtp.setEntries(track.getSampleDependencies());
      stbl.addBox(sdtp);
    }
    HashMap<Track, int[]> track2ChunkSizes = new HashMap<Track, int[]>();
    for (Track current : movie.getTracks()) {
      track2ChunkSizes.put(current, getChunkSizes(current, movie));
    }
    int[] tracksChunkSizes = track2ChunkSizes.get(track);

    SampleToChunkBox stsc = new SampleToChunkBox();
    stsc.setEntries(new LinkedList<SampleToChunkBox.Entry>());
    long lastChunkSize = Integer.MIN_VALUE; // to be sure the first chunks hasn't got the same size
    for (int i = 0; i < tracksChunkSizes.length; i++) {
      // The sample description index references the sample description box
      // that describes the samples of this chunk. My Tracks cannot have more
      // than one sample description box. Therefore 1 is always right
      // the first chunk has the number '1'
      if (lastChunkSize != tracksChunkSizes[i]) {
        stsc.getEntries().add(new SampleToChunkBox.Entry(i + 1, tracksChunkSizes[i], 1));
        lastChunkSize = tracksChunkSizes[i];
      }
    }
    stbl.addBox(stsc);

    SampleSizeBox stsz = new SampleSizeBox();
    stsz.setSampleSizes(track2SampleSizes.get(track));

    stbl.addBox(stsz);
    // The ChunkOffsetBox we create here is just a stub
    // since we haven't created the whole structure we can't tell where the
    // first chunk starts (mdat box). So I just let the chunk offset
    // start at zero and I will add the mdat offset later.
    StaticChunkOffsetBox stco = new StaticChunkOffsetBox();
    this.chunkOffsetBoxes.add(stco);
    long offset = 0;
    long[] chunkOffset = new long[tracksChunkSizes.length];
    // all tracks have the same number of chunks
    if (LOG.isLoggable(Level.FINE)) {
      LOG.fine("Calculating chunk offsets for track_" + track.getTrackMetaData().getTrackId());
    }

    for (int i = 0; i < tracksChunkSizes.length; i++) {
      // The filelayout will be:
      // chunk_1_track_1,... ,chunk_1_track_n, chunk_2_track_1,... ,chunk_2_track_n, ... ,
      // chunk_m_track_1,... ,chunk_m_track_n
      // calculating the offsets
      if (LOG.isLoggable(Level.FINER)) {
        LOG.finer(
            "Calculating chunk offsets for track_"
                + track.getTrackMetaData().getTrackId()
                + " chunk "
                + i);
      }
      for (Track current : movie.getTracks()) {
        if (LOG.isLoggable(Level.FINEST)) {
          LOG.finest("Adding offsets of track_" + current.getTrackMetaData().getTrackId());
        }
        int[] chunkSizes = track2ChunkSizes.get(current);
        long firstSampleOfChunk = 0;
        for (int j = 0; j < i; j++) {
          firstSampleOfChunk += chunkSizes[j];
        }
        if (current == track) {
          chunkOffset[i] = offset;
        }
        for (int j = l2i(firstSampleOfChunk); j < firstSampleOfChunk + chunkSizes[i]; j++) {
          offset += track2SampleSizes.get(current)[j];
        }
      }
    }
    stco.setChunkOffsets(chunkOffset);
    stbl.addBox(stco);
    minf.addBox(stbl);
    mdia.addBox(minf);

    return trackBox;
  }
  /**
   * Gets an array of sample numbers that are meant to be the first sample of each chunk or
   * fragment.
   *
   * @param track concerned track
   * @param movie the context of the track
   * @return an array containing the ordinal of each fragment's first sample
   */
  public long[] sampleNumbers(Track track, Movie movie) {
    if ("vide".equals(track.getHandler())) {
      if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
        List<long[]> times = getSyncSamplesTimestamps(movie, track);
        return getCommonIndices(
            track.getSyncSamples(),
            getTimes(movie, track),
            times.toArray(new long[times.size()][]));
      } else {
        throw new RuntimeException(
            "Video Tracks need sync samples. Only tracks other than video may have no sync samples.");
      }
    } else if ("soun".equals(track.getHandler())) {
      Track referenceTrack = null;
      for (Track candidate : movie.getTracks()) {
        if (candidate.getSyncSamples() != null
            && "vide".equals(candidate.getHandler())
            && candidate.getSyncSamples().length > 0) {
          referenceTrack = candidate;
        }
      }
      if (referenceTrack != null) {

        // Gets the reference track's fra
        long[] refSyncSamples = sampleNumbers(referenceTrack, movie);

        int refSampleCount = referenceTrack.getSamples().size();

        long[] syncSamples = new long[refSyncSamples.length];
        long minSampleRate = 192000;
        for (Track testTrack : movie.getTracks()) {
          if ("soun".equals(testTrack.getHandler())) {
            AudioSampleEntry ase =
                (AudioSampleEntry) testTrack.getSampleDescriptionBox().getSampleEntry();
            if (ase.getSampleRate() < minSampleRate) {
              minSampleRate = ase.getSampleRate();
              long sc = testTrack.getSamples().size();
              double stretch = (double) sc / refSampleCount;

              for (int i = 0; i < syncSamples.length; i++) {
                int start = (int) Math.ceil(stretch * (refSyncSamples[i] - 1)) + 1;
                syncSamples[i] = start;
                // The Stretch makes sure that there are as much audio and video chunks!
              }
            }
          }
        }
        AudioSampleEntry ase = (AudioSampleEntry) track.getSampleDescriptionBox().getSampleEntry();
        double factor = (double) ase.getSampleRate() / (double) minSampleRate;
        if (factor != Math.rint(factor)) { // Not an integer
          throw new RuntimeException(
              "Sample rates must be a multiple of the lowest sample rate to create a correct file!");
        }
        for (int i = 1; i < syncSamples.length; i++) {
          syncSamples[i] = (int) (1 + (syncSamples[i] - 1) * factor);
        }
        return syncSamples;
      }
      throw new RuntimeException(
          "There was absolutely no Track with sync samples. I can't work with that!");
    } else {
      // Ok, my track has no sync samples - let's find one with sync samples.
      for (Track candidate : movie.getTracks()) {
        if (candidate.getSyncSamples() != null && candidate.getSyncSamples().length > 0) {
          long[] refSyncSamples = sampleNumbers(candidate, movie);
          int refSampleCount = candidate.getSamples().size();

          long[] syncSamples = new long[refSyncSamples.length];
          long sc = track.getSamples().size();
          double stretch = (double) sc / refSampleCount;

          for (int i = 0; i < syncSamples.length; i++) {
            int start = (int) Math.ceil(stretch * (refSyncSamples[i] - 1)) + 1;
            syncSamples[i] = start;
            // The Stretch makes sure that there are as much audio and video chunks!
          }
          return syncSamples;
        }
      }
      throw new RuntimeException(
          "There was absolutely no Track with sync samples. I can't work with that!");
    }
  }
  @Override
  protected List<Box> createMoofMdat(Movie movie) {
    List<Box> moofsMdats = new LinkedList<Box>();
    HashMap<Track, long[]> intersectionMap = new HashMap<Track, long[]>();

    int maxNumberOfFragments = 0;
    for (Track track : movie.getTracks()) {
      long[] intersects = intersectionFinder.sampleNumbers(track);
      intersectionMap.put(track, intersects);
      maxNumberOfFragments = Math.max(maxNumberOfFragments, intersects.length);
    }

    int sequence = 1;
    // this loop has two indices:

    for (int cycle = 0; cycle < maxNumberOfFragments; cycle++) {

      final List<Track> sortedTracks =
          sortTracksInSequence(movie.getTracks(), cycle, intersectionMap);

      for (Track track : sortedTracks) {
        if (getAllowedHandlers().isEmpty() || getAllowedHandlers().contains(track.getHandler())) {
          long[] startSamples = intersectionMap.get(track);
          sequence = createFragment(moofsMdats, track, startSamples, cycle, sequence);
        }
      }
    }

    List<SegmentIndexBox> sidx_boxes = new LinkedList<SegmentIndexBox>();

    int inserter = 0;
    List<Box> newboxes = new ArrayList<Box>();
    int counter = 0;
    SegmentIndexBox sidx = new SegmentIndexBox();

    for (int i = 0; i < moofsMdats.size(); i++) {

      if (moofsMdats.get(i).getType().equals("sidx")) {
        sidx_boxes.add((SegmentIndexBox) moofsMdats.get(i));
        counter++;
        if (counter == 1) {
          inserter = i;
        }
      } else {
        newboxes.add(moofsMdats.get(i));
      }
    }
    long earliestPresentationTime = sidx_boxes.get(0).getEarliestPresentationTime();
    if (earliestPresentationTime < 0) {
      System.err.println(
          "negative earlist_presentation_time in sidx. Setting to 0. May cause sync issues");
      earliestPresentationTime = 0;
    }
    sidx.setEarliestPresentationTime(earliestPresentationTime);
    sidx.setFirstOffset(sidx_boxes.get(0).getFirstOffset());
    sidx.setReferenceId(sidx_boxes.get(0).getReferenceId());
    sidx.setTimeScale(sidx_boxes.get(0).getTimeScale());
    sidx.setFlags(sidx_boxes.get(0).getFlags());
    List<SegmentIndexBox.Entry> sidxbox_entries = new ArrayList<SegmentIndexBox.Entry>();
    for (SegmentIndexBox sidxbox : sidx_boxes) {
      List<SegmentIndexBox.Entry> entryfrag = sidxbox.getEntries();
      for (SegmentIndexBox.Entry entry : entryfrag) {
        sidxbox_entries.add(entry);
      }
    }

    sidx.setEntries(sidxbox_entries);
    newboxes.add(inserter, sidx);
    return newboxes;
  }