public SampleDescriptionBox getSampleDescriptionBox() {
   return source.getSampleDescriptionBox();
 }
  private TrackBox createTrackBox(Track track, Movie movie) {

    LOG.info("Creating Mp4TrackImpl " + track);
    TrackBox trackBox = new TrackBox();
    TrackHeaderBox tkhd = new TrackHeaderBox();
    tkhd.setVersion(1);
    int flags = 0;
    if (track.isEnabled()) {
      flags += 1;
    }

    if (track.isInMovie()) {
      flags += 2;
    }

    if (track.isInPreview()) {
      flags += 4;
    }

    if (track.isInPoster()) {
      flags += 8;
    }
    tkhd.setFlags(flags);

    tkhd.setAlternateGroup(track.getTrackMetaData().getGroup());
    tkhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
    // We need to take edit list box into account in trackheader duration
    // but as long as I don't support edit list boxes it is sufficient to
    // just translate media duration to movie timescale
    tkhd.setDuration(
        getDuration(track) * getTimescale(movie) / track.getTrackMetaData().getTimescale());
    tkhd.setHeight(track.getTrackMetaData().getHeight());
    tkhd.setWidth(track.getTrackMetaData().getWidth());
    tkhd.setLayer(track.getTrackMetaData().getLayer());
    tkhd.setModificationTime(DateHelper.convert(new Date()));
    tkhd.setTrackId(track.getTrackMetaData().getTrackId());
    tkhd.setVolume(track.getTrackMetaData().getVolume());
    trackBox.addBox(tkhd);

    /*
            EditBox edit = new EditBox();
            EditListBox editListBox = new EditListBox();
            editListBox.setEntries(Collections.singletonList(
                    new EditListBox.Entry(editListBox, (long) (track.getTrackMetaData().getStartTime() * getTimescale(movie)), -1, 1)));
            edit.addBox(editListBox);
            trackBox.addBox(edit);
    */

    MediaBox mdia = new MediaBox();
    trackBox.addBox(mdia);
    MediaHeaderBox mdhd = new MediaHeaderBox();
    mdhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
    mdhd.setDuration(getDuration(track));
    mdhd.setTimescale(track.getTrackMetaData().getTimescale());
    mdhd.setLanguage(track.getTrackMetaData().getLanguage());
    mdia.addBox(mdhd);
    HandlerBox hdlr = new HandlerBox();
    mdia.addBox(hdlr);

    hdlr.setHandlerType(track.getHandler());

    MediaInformationBox minf = new MediaInformationBox();
    minf.addBox(track.getMediaHeaderBox());

    // dinf: all these three boxes tell us is that the actual
    // data is in the current file and not somewhere external
    DataInformationBox dinf = new DataInformationBox();
    DataReferenceBox dref = new DataReferenceBox();
    dinf.addBox(dref);
    DataEntryUrlBox url = new DataEntryUrlBox();
    url.setFlags(1);
    dref.addBox(url);
    minf.addBox(dinf);
    //

    SampleTableBox stbl = new SampleTableBox();

    stbl.addBox(track.getSampleDescriptionBox());

    List<TimeToSampleBox.Entry> decodingTimeToSampleEntries = track.getDecodingTimeEntries();
    if (decodingTimeToSampleEntries != null && !track.getDecodingTimeEntries().isEmpty()) {
      TimeToSampleBox stts = new TimeToSampleBox();
      stts.setEntries(track.getDecodingTimeEntries());
      stbl.addBox(stts);
    }

    List<CompositionTimeToSample.Entry> compositionTimeToSampleEntries =
        track.getCompositionTimeEntries();
    if (compositionTimeToSampleEntries != null && !compositionTimeToSampleEntries.isEmpty()) {
      CompositionTimeToSample ctts = new CompositionTimeToSample();
      ctts.setEntries(compositionTimeToSampleEntries);
      stbl.addBox(ctts);
    }

    long[] syncSamples = track.getSyncSamples();
    if (syncSamples != null && syncSamples.length > 0) {
      SyncSampleBox stss = new SyncSampleBox();
      stss.setSampleNumber(syncSamples);
      stbl.addBox(stss);
    }

    if (track.getSampleDependencies() != null && !track.getSampleDependencies().isEmpty()) {
      SampleDependencyTypeBox sdtp = new SampleDependencyTypeBox();
      sdtp.setEntries(track.getSampleDependencies());
      stbl.addBox(sdtp);
    }
    HashMap<Track, int[]> track2ChunkSizes = new HashMap<Track, int[]>();
    for (Track current : movie.getTracks()) {
      track2ChunkSizes.put(current, getChunkSizes(current, movie));
    }
    int[] tracksChunkSizes = track2ChunkSizes.get(track);

    SampleToChunkBox stsc = new SampleToChunkBox();
    stsc.setEntries(new LinkedList<SampleToChunkBox.Entry>());
    long lastChunkSize = Integer.MIN_VALUE; // to be sure the first chunks hasn't got the same size
    for (int i = 0; i < tracksChunkSizes.length; i++) {
      // The sample description index references the sample description box
      // that describes the samples of this chunk. My Tracks cannot have more
      // than one sample description box. Therefore 1 is always right
      // the first chunk has the number '1'
      if (lastChunkSize != tracksChunkSizes[i]) {
        stsc.getEntries().add(new SampleToChunkBox.Entry(i + 1, tracksChunkSizes[i], 1));
        lastChunkSize = tracksChunkSizes[i];
      }
    }
    stbl.addBox(stsc);

    SampleSizeBox stsz = new SampleSizeBox();
    stsz.setSampleSizes(track2SampleSizes.get(track));

    stbl.addBox(stsz);
    // The ChunkOffsetBox we create here is just a stub
    // since we haven't created the whole structure we can't tell where the
    // first chunk starts (mdat box). So I just let the chunk offset
    // start at zero and I will add the mdat offset later.
    StaticChunkOffsetBox stco = new StaticChunkOffsetBox();
    this.chunkOffsetBoxes.add(stco);
    long offset = 0;
    long[] chunkOffset = new long[tracksChunkSizes.length];
    // all tracks have the same number of chunks
    if (LOG.isLoggable(Level.FINE)) {
      LOG.fine("Calculating chunk offsets for track_" + track.getTrackMetaData().getTrackId());
    }

    for (int i = 0; i < tracksChunkSizes.length; i++) {
      // The filelayout will be:
      // chunk_1_track_1,... ,chunk_1_track_n, chunk_2_track_1,... ,chunk_2_track_n, ... ,
      // chunk_m_track_1,... ,chunk_m_track_n
      // calculating the offsets
      if (LOG.isLoggable(Level.FINER)) {
        LOG.finer(
            "Calculating chunk offsets for track_"
                + track.getTrackMetaData().getTrackId()
                + " chunk "
                + i);
      }
      for (Track current : movie.getTracks()) {
        if (LOG.isLoggable(Level.FINEST)) {
          LOG.finest("Adding offsets of track_" + current.getTrackMetaData().getTrackId());
        }
        int[] chunkSizes = track2ChunkSizes.get(current);
        long firstSampleOfChunk = 0;
        for (int j = 0; j < i; j++) {
          firstSampleOfChunk += chunkSizes[j];
        }
        if (current == track) {
          chunkOffset[i] = offset;
        }
        for (int j = l2i(firstSampleOfChunk); j < firstSampleOfChunk + chunkSizes[i]; j++) {
          offset += track2SampleSizes.get(current)[j];
        }
      }
    }
    stco.setChunkOffsets(chunkOffset);
    stbl.addBox(stco);
    minf.addBox(stbl);
    mdia.addBox(minf);

    return trackBox;
  }
 protected void createStsd(Track track, SampleTableBox stbl) {
   stbl.addBox(track.getSampleDescriptionBox());
 }
  /**
   * Gets an array of sample numbers that are meant to be the first sample of each chunk or
   * fragment.
   *
   * @param track concerned track
   * @param movie the context of the track
   * @return an array containing the ordinal of each fragment's first sample
   */
  public long[] sampleNumbers(Track track, Movie movie) {
    if ("vide".equals(track.getHandler())) {
      if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
        List<long[]> times = getSyncSamplesTimestamps(movie, track);
        return getCommonIndices(
            track.getSyncSamples(),
            getTimes(movie, track),
            times.toArray(new long[times.size()][]));
      } else {
        throw new RuntimeException(
            "Video Tracks need sync samples. Only tracks other than video may have no sync samples.");
      }
    } else if ("soun".equals(track.getHandler())) {
      Track referenceTrack = null;
      for (Track candidate : movie.getTracks()) {
        if (candidate.getSyncSamples() != null
            && "vide".equals(candidate.getHandler())
            && candidate.getSyncSamples().length > 0) {
          referenceTrack = candidate;
        }
      }
      if (referenceTrack != null) {

        // Gets the reference track's fra
        long[] refSyncSamples = sampleNumbers(referenceTrack, movie);

        int refSampleCount = referenceTrack.getSamples().size();

        long[] syncSamples = new long[refSyncSamples.length];
        long minSampleRate = 192000;
        for (Track testTrack : movie.getTracks()) {
          if ("soun".equals(testTrack.getHandler())) {
            AudioSampleEntry ase =
                (AudioSampleEntry) testTrack.getSampleDescriptionBox().getSampleEntry();
            if (ase.getSampleRate() < minSampleRate) {
              minSampleRate = ase.getSampleRate();
              long sc = testTrack.getSamples().size();
              double stretch = (double) sc / refSampleCount;

              for (int i = 0; i < syncSamples.length; i++) {
                int start = (int) Math.ceil(stretch * (refSyncSamples[i] - 1)) + 1;
                syncSamples[i] = start;
                // The Stretch makes sure that there are as much audio and video chunks!
              }
            }
          }
        }
        AudioSampleEntry ase = (AudioSampleEntry) track.getSampleDescriptionBox().getSampleEntry();
        double factor = (double) ase.getSampleRate() / (double) minSampleRate;
        if (factor != Math.rint(factor)) { // Not an integer
          throw new RuntimeException(
              "Sample rates must be a multiple of the lowest sample rate to create a correct file!");
        }
        for (int i = 1; i < syncSamples.length; i++) {
          syncSamples[i] = (int) (1 + (syncSamples[i] - 1) * factor);
        }
        return syncSamples;
      }
      throw new RuntimeException(
          "There was absolutely no Track with sync samples. I can't work with that!");
    } else {
      // Ok, my track has no sync samples - let's find one with sync samples.
      for (Track candidate : movie.getTracks()) {
        if (candidate.getSyncSamples() != null && candidate.getSyncSamples().length > 0) {
          long[] refSyncSamples = sampleNumbers(candidate, movie);
          int refSampleCount = candidate.getSamples().size();

          long[] syncSamples = new long[refSyncSamples.length];
          long sc = track.getSamples().size();
          double stretch = (double) sc / refSampleCount;

          for (int i = 0; i < syncSamples.length; i++) {
            int start = (int) Math.ceil(stretch * (refSyncSamples[i] - 1)) + 1;
            syncSamples[i] = start;
            // The Stretch makes sure that there are as much audio and video chunks!
          }
          return syncSamples;
        }
      }
      throw new RuntimeException(
          "There was absolutely no Track with sync samples. I can't work with that!");
    }
  }