Exemplo n.º 1
0
 /**
  * we try to get the nearest synchronized sample to our desired start time. Afterwards, we are
  * ready to crop each track of the video.
  */
 private double correctTimeToNextSyncSample(Track track, double cropVidPlace, boolean nextPlace) {
   double[] timeOfSyncSamples = new double[track.getSyncSamples().length];
   long currentSample = 0;
   double currentTime = 0;
   for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
     TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);
     for (int j = 0; j < entry.getCount(); j++) {
       if (Arrays.binarySearch(track.getSyncSamples(), currentSample + 1) >= 0) {
         // samples always start with 1 but we start with zero therefore +1
         timeOfSyncSamples[Arrays.binarySearch(track.getSyncSamples(), currentSample + 1)] =
             currentTime;
       }
       currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();
       currentSample++;
     }
   }
   double previous = 0;
   for (double timeOfSyncSample : timeOfSyncSamples) {
     if (timeOfSyncSample > cropVidPlace) {
       if (nextPlace) {
         return timeOfSyncSample;
       } else {
         return previous;
       }
     }
     previous = timeOfSyncSample;
   }
   return timeOfSyncSamples[timeOfSyncSamples.length - 1];
 }
Exemplo n.º 2
0
 protected static long getDuration(Track track) {
   long duration = 0;
   for (TimeToSampleBox.Entry entry : track.getDecodingTimeEntries()) {
     duration += entry.getCount() * entry.getDelta();
   }
   return duration;
 }
  private static long[] getTimes(Movie m, Track track) {
    long[] syncSamples = track.getSyncSamples();
    long[] syncSampleTimes = new long[syncSamples.length];
    Queue<TimeToSampleBox.Entry> timeQueue =
        new LinkedList<TimeToSampleBox.Entry>(track.getDecodingTimeEntries());

    int currentSample = 1; // first syncsample is 1
    long currentDuration = 0;
    long currentDelta = 0;
    int currentSyncSampleIndex = 0;
    long left = 0;

    long timeScale = 1;
    for (Track track1 : m.getTracks()) {
      if (track1.getHandler().equals(track.getHandler())) {
        if (track1.getTrackMetaData().getTimescale() != track.getTrackMetaData().getTimescale()) {
          timeScale = lcm(timeScale, track1.getTrackMetaData().getTimescale());
        }
      }
    }

    while (currentSample <= syncSamples[syncSamples.length - 1]) {
      if (currentSample++ == syncSamples[currentSyncSampleIndex]) {
        syncSampleTimes[currentSyncSampleIndex++] = currentDuration * timeScale;
      }
      if (left-- == 0) {
        TimeToSampleBox.Entry entry = timeQueue.poll();
        left = entry.getCount() - 1;
        currentDelta = entry.getDelta();
      }
      currentDuration += currentDelta;
    }
    return syncSampleTimes;
  }
Exemplo n.º 4
0
  private TrackBox createTrackBox(Track track, Movie movie) {

    LOG.info("Creating Mp4TrackImpl " + track);
    TrackBox trackBox = new TrackBox();
    TrackHeaderBox tkhd = new TrackHeaderBox();
    tkhd.setVersion(1);
    int flags = 0;
    if (track.isEnabled()) {
      flags += 1;
    }

    if (track.isInMovie()) {
      flags += 2;
    }

    if (track.isInPreview()) {
      flags += 4;
    }

    if (track.isInPoster()) {
      flags += 8;
    }
    tkhd.setFlags(flags);

    tkhd.setAlternateGroup(track.getTrackMetaData().getGroup());
    tkhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
    // We need to take edit list box into account in trackheader duration
    // but as long as I don't support edit list boxes it is sufficient to
    // just translate media duration to movie timescale
    tkhd.setDuration(
        getDuration(track) * getTimescale(movie) / track.getTrackMetaData().getTimescale());
    tkhd.setHeight(track.getTrackMetaData().getHeight());
    tkhd.setWidth(track.getTrackMetaData().getWidth());
    tkhd.setLayer(track.getTrackMetaData().getLayer());
    tkhd.setModificationTime(DateHelper.convert(new Date()));
    tkhd.setTrackId(track.getTrackMetaData().getTrackId());
    tkhd.setVolume(track.getTrackMetaData().getVolume());
    trackBox.addBox(tkhd);

    /*
            EditBox edit = new EditBox();
            EditListBox editListBox = new EditListBox();
            editListBox.setEntries(Collections.singletonList(
                    new EditListBox.Entry(editListBox, (long) (track.getTrackMetaData().getStartTime() * getTimescale(movie)), -1, 1)));
            edit.addBox(editListBox);
            trackBox.addBox(edit);
    */

    MediaBox mdia = new MediaBox();
    trackBox.addBox(mdia);
    MediaHeaderBox mdhd = new MediaHeaderBox();
    mdhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
    mdhd.setDuration(getDuration(track));
    mdhd.setTimescale(track.getTrackMetaData().getTimescale());
    mdhd.setLanguage(track.getTrackMetaData().getLanguage());
    mdia.addBox(mdhd);
    HandlerBox hdlr = new HandlerBox();
    mdia.addBox(hdlr);

    hdlr.setHandlerType(track.getHandler());

    MediaInformationBox minf = new MediaInformationBox();
    minf.addBox(track.getMediaHeaderBox());

    // dinf: all these three boxes tell us is that the actual
    // data is in the current file and not somewhere external
    DataInformationBox dinf = new DataInformationBox();
    DataReferenceBox dref = new DataReferenceBox();
    dinf.addBox(dref);
    DataEntryUrlBox url = new DataEntryUrlBox();
    url.setFlags(1);
    dref.addBox(url);
    minf.addBox(dinf);
    //

    SampleTableBox stbl = new SampleTableBox();

    stbl.addBox(track.getSampleDescriptionBox());

    List<TimeToSampleBox.Entry> decodingTimeToSampleEntries = track.getDecodingTimeEntries();
    if (decodingTimeToSampleEntries != null && !track.getDecodingTimeEntries().isEmpty()) {
      TimeToSampleBox stts = new TimeToSampleBox();
      stts.setEntries(track.getDecodingTimeEntries());
      stbl.addBox(stts);
    }

    List<CompositionTimeToSample.Entry> compositionTimeToSampleEntries =
        track.getCompositionTimeEntries();
    if (compositionTimeToSampleEntries != null && !compositionTimeToSampleEntries.isEmpty()) {
      CompositionTimeToSample ctts = new CompositionTimeToSample();
      ctts.setEntries(compositionTimeToSampleEntries);
      stbl.addBox(ctts);
    }

    long[] syncSamples = track.getSyncSamples();
    if (syncSamples != null && syncSamples.length > 0) {
      SyncSampleBox stss = new SyncSampleBox();
      stss.setSampleNumber(syncSamples);
      stbl.addBox(stss);
    }

    if (track.getSampleDependencies() != null && !track.getSampleDependencies().isEmpty()) {
      SampleDependencyTypeBox sdtp = new SampleDependencyTypeBox();
      sdtp.setEntries(track.getSampleDependencies());
      stbl.addBox(sdtp);
    }
    HashMap<Track, int[]> track2ChunkSizes = new HashMap<Track, int[]>();
    for (Track current : movie.getTracks()) {
      track2ChunkSizes.put(current, getChunkSizes(current, movie));
    }
    int[] tracksChunkSizes = track2ChunkSizes.get(track);

    SampleToChunkBox stsc = new SampleToChunkBox();
    stsc.setEntries(new LinkedList<SampleToChunkBox.Entry>());
    long lastChunkSize = Integer.MIN_VALUE; // to be sure the first chunks hasn't got the same size
    for (int i = 0; i < tracksChunkSizes.length; i++) {
      // The sample description index references the sample description box
      // that describes the samples of this chunk. My Tracks cannot have more
      // than one sample description box. Therefore 1 is always right
      // the first chunk has the number '1'
      if (lastChunkSize != tracksChunkSizes[i]) {
        stsc.getEntries().add(new SampleToChunkBox.Entry(i + 1, tracksChunkSizes[i], 1));
        lastChunkSize = tracksChunkSizes[i];
      }
    }
    stbl.addBox(stsc);

    SampleSizeBox stsz = new SampleSizeBox();
    stsz.setSampleSizes(track2SampleSizes.get(track));

    stbl.addBox(stsz);
    // The ChunkOffsetBox we create here is just a stub
    // since we haven't created the whole structure we can't tell where the
    // first chunk starts (mdat box). So I just let the chunk offset
    // start at zero and I will add the mdat offset later.
    StaticChunkOffsetBox stco = new StaticChunkOffsetBox();
    this.chunkOffsetBoxes.add(stco);
    long offset = 0;
    long[] chunkOffset = new long[tracksChunkSizes.length];
    // all tracks have the same number of chunks
    if (LOG.isLoggable(Level.FINE)) {
      LOG.fine("Calculating chunk offsets for track_" + track.getTrackMetaData().getTrackId());
    }

    for (int i = 0; i < tracksChunkSizes.length; i++) {
      // The filelayout will be:
      // chunk_1_track_1,... ,chunk_1_track_n, chunk_2_track_1,... ,chunk_2_track_n, ... ,
      // chunk_m_track_1,... ,chunk_m_track_n
      // calculating the offsets
      if (LOG.isLoggable(Level.FINER)) {
        LOG.finer(
            "Calculating chunk offsets for track_"
                + track.getTrackMetaData().getTrackId()
                + " chunk "
                + i);
      }
      for (Track current : movie.getTracks()) {
        if (LOG.isLoggable(Level.FINEST)) {
          LOG.finest("Adding offsets of track_" + current.getTrackMetaData().getTrackId());
        }
        int[] chunkSizes = track2ChunkSizes.get(current);
        long firstSampleOfChunk = 0;
        for (int j = 0; j < i; j++) {
          firstSampleOfChunk += chunkSizes[j];
        }
        if (current == track) {
          chunkOffset[i] = offset;
        }
        for (int j = l2i(firstSampleOfChunk); j < firstSampleOfChunk + chunkSizes[i]; j++) {
          offset += track2SampleSizes.get(current)[j];
        }
      }
    }
    stco.setChunkOffsets(chunkOffset);
    stbl.addBox(stco);
    minf.addBox(stbl);
    mdia.addBox(minf);

    return trackBox;
  }
Exemplo n.º 5
0
  /**
   * all the trimming happens in this method.
   *
   * <p>STEP A ==>> For each track in the video, we first get the start time for all the tracks.
   *
   * <p>STEP B ==>> For each track in the video, we crop the tracks with a 'start time' and 'end
   * time' and add it to the Movie object.
   *
   * <p>STEP C ==>> Finally write the newly created movie to the disk.
   */
  private String cropSelectedVideo() {
    FileInputStream fileInputStream = null;
    FileChannel fileChannel = null;
    try {

      File videoFile = new File(mFullPathToVideoFile);
      fileInputStream = new FileInputStream(videoFile);
      fileChannel = fileInputStream.getChannel();
      Movie movie = MovieCreator.build(fileChannel);
      if (movie == null) {
        return null;
      } else {
        List<Track> tracks = movie.getTracks();
        movie.setTracks(new LinkedList<Track>());

        boolean timeCorrected = false;
        if ((tracks == null) && (tracks.size() <= 0)) {
          return null;
        } else {
          /**
           * here we try to find a track that has sync samples. Since we can only start decoding at
           * such a sample we should make sure that the start of the fragment is exactly the such a
           * frame.
           */
          for (Track track : tracks) {
            if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
              if (timeCorrected) {
                /**
                 * This exception here can be false position in case we have multiple tracks with
                 * sync sample at exactly the same position. E.g. a single movie containing multiple
                 * qualities of the same video.
                 */
              } else {
                mStartTime = correctTimeToNextSyncSample(track, mStartTime, false);
                mEndTime = correctTimeToNextSyncSample(track, mEndTime, true);
                timeCorrected = true;
              }
            }
          }

          for (Track track : tracks) {
            long currentVidSample = 0;
            double currentTime = 0;
            long startVidSample = -1;
            long endVidSample = -1;

            for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {
              TimeToSampleBox.Entry myEntry = track.getDecodingTimeEntries().get(i);

              for (int j = 0; j < myEntry.getCount(); j++) {
                // I am trying to find the start time and end time when the trimmimg occurs
                if (currentTime <= mStartTime) {
                  // our current video sample is before the starting time for the crop
                  // if the startVidSample is equal to the length of the video,
                  // an error happened, and we should throw an exception
                  startVidSample =
                      currentVidSample; // the new begining of the video will be set to this place
                  // of the video
                } else if (currentTime <= mEndTime) {
                  // our current video sample is after the starting time for the crop
                  // but  before the end time of the crop
                  endVidSample =
                      currentVidSample; // the new end of the video will be set to this place of the
                  // video
                } else {
                  // our current video sample is after the end time of the cropping
                  // we just stop this this loop
                  break;
                }

                // getDelta() : the amount of time the current video sample covers
                currentTime +=
                    (double) myEntry.getDelta() / (double) track.getTrackMetaData().getTimescale();
                currentVidSample++;
              }
            }
            movie.addTrack(new CroppedTrack(track, startVidSample, endVidSample));
          }
        }
      }

      IsoFile isoFile = new DefaultMp4Builder().build(movie);
      /** STEP C ==>> After we created the Movie, we have to place it into a nonvolatile memory. */
      return Util.placeFileInNonVolatileDrive(isoFile, mVideFolderPath, "trim_output");

    } catch (Exception e) {
      Log.e(TAG, "IO ERROR: " + e);
    } finally {
      if (fileInputStream != null) {
        try {
          fileInputStream.close();
        } catch (IOException e) {
          Log.e(TAG, "IO ERROR: " + e);
        }
      }
      if (fileChannel != null) {
        try {
          fileChannel.close();
        } catch (IOException e) {
          Log.e(TAG, "IO ERROR: " + e);
        }
      }
    }
    return null;
  }