@Override
 public void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] trackIndices) {
   if (adaptiveFormatEvaluator == null) {
     // Do nothing.
     return;
   }
   MediaFormat maxHeightMediaFormat = null;
   StreamElement streamElement = manifest.streamElements[element];
   int maxWidth = -1;
   int maxHeight = -1;
   Format[] formats = new Format[trackIndices.length];
   for (int i = 0; i < formats.length; i++) {
     int manifestTrackIndex = trackIndices[i];
     formats[i] = streamElement.tracks[manifestTrackIndex].format;
     MediaFormat mediaFormat = initManifestTrack(manifest, element, manifestTrackIndex);
     if (maxHeightMediaFormat == null || mediaFormat.height > maxHeight) {
       maxHeightMediaFormat = mediaFormat;
     }
     maxWidth = Math.max(maxWidth, mediaFormat.width);
     maxHeight = Math.max(maxHeight, mediaFormat.height);
   }
   Arrays.sort(formats, new DecreasingBandwidthComparator());
   MediaFormat adaptiveMediaFormat = maxHeightMediaFormat.copyAsAdaptive();
   tracks.add(new ExposedTrack(adaptiveMediaFormat, element, formats, maxWidth, maxHeight));
 }
Exemplo n.º 2
0
  @Override
  public int readData(
      int track, long positionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder) {
    Assertions.checkState(state == STATE_ENABLED);
    downstreamPositionUs = positionUs;

    if (pendingDiscontinuity || isPendingReset()) {
      return NOTHING_READ;
    }

    boolean haveSamples = !sampleQueue.isEmpty();
    BaseMediaChunk currentChunk = mediaChunks.getFirst();
    while (haveSamples
        && mediaChunks.size() > 1
        && mediaChunks.get(1).getFirstSampleIndex() <= sampleQueue.getReadIndex()) {
      mediaChunks.removeFirst();
      currentChunk = mediaChunks.getFirst();
    }

    Format format = currentChunk.format;
    if (!format.equals(downstreamFormat)) {
      notifyDownstreamFormatChanged(format, currentChunk.trigger, currentChunk.startTimeUs);
    }
    downstreamFormat = format;

    if (haveSamples || currentChunk.isMediaFormatFinal) {
      MediaFormat mediaFormat = currentChunk.getMediaFormat();
      if (!mediaFormat.equals(downstreamMediaFormat)) {
        formatHolder.format = mediaFormat;
        formatHolder.drmInitData = currentChunk.getDrmInitData();
        downstreamMediaFormat = mediaFormat;
        return FORMAT_READ;
      }
      // If mediaFormat and downstreamMediaFormat are equal but different objects then the equality
      // check above will have been expensive, comparing the fields in each format. We update
      // downstreamMediaFormat here so that referential equality can be cheaply established during
      // subsequent calls.
      downstreamMediaFormat = mediaFormat;
    }

    if (!haveSamples) {
      if (loadingFinished) {
        return END_OF_STREAM;
      }
      return NOTHING_READ;
    }

    if (sampleQueue.getSample(sampleHolder)) {
      boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs;
      sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0;
      onSampleRead(currentChunk, sampleHolder);
      return SAMPLE_READ;
    }

    return NOTHING_READ;
  }
Exemplo n.º 3
0
  @Override
  public int read(ExtractorInput input, PositionHolder seekPosition)
      throws IOException, InterruptedException {

    if (vorbisSetup == null) {
      vorbisSetup = readSetupHeaders(input, scratch);
      ArrayList<byte[]> codecInitialisationData = new ArrayList<>();
      codecInitialisationData.clear();
      codecInitialisationData.add(vorbisSetup.idHeader.data);
      codecInitialisationData.add(vorbisSetup.setupHeaderData);

      long duration =
          input.getLength() == C.LENGTH_UNBOUNDED
              ? C.UNKNOWN_TIME_US
              : input.getLength() * 8000000 / vorbisSetup.idHeader.getApproximateBitrate();
      trackOutput.format(
          MediaFormat.createAudioFormat(
              null,
              MimeTypes.AUDIO_VORBIS,
              this.vorbisSetup.idHeader.bitrateNominal,
              OGG_MAX_SEGMENT_SIZE * 255,
              duration,
              this.vorbisSetup.idHeader.channels,
              (int) this.vorbisSetup.idHeader.sampleRate,
              codecInitialisationData,
              null));
    }
    if (oggReader.readPacket(input, scratch)) {
      // if this is an audio packet...
      if ((scratch.data[0] & 0x01) != 1) {
        // ... we need to decode the block size
        int packetBlockSize = decodeBlockSize(scratch.data[0], vorbisSetup);
        // a packet contains samples produced from overlapping the previous and current frame data
        // (https://www.xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-350001.3.2)
        int samplesInPacket =
            seenFirstAudioPacket ? (packetBlockSize + previousPacketBlockSize) / 4 : 0;
        // codec expects the number of samples appended to audio data
        appendNumberOfSamples(scratch, samplesInPacket);

        // calculate time and send audio data to codec
        long timeUs = elapsedSamples * C.MICROS_PER_SECOND / vorbisSetup.idHeader.sampleRate;
        trackOutput.sampleData(scratch, scratch.limit());
        trackOutput.sampleMetadata(timeUs, C.SAMPLE_FLAG_SYNC, scratch.limit(), 0, null);

        // update state in members for next iteration
        seenFirstAudioPacket = true;
        elapsedSamples += samplesInPacket;
        previousPacketBlockSize = packetBlockSize;
      }
      scratch.reset();
      return RESULT_CONTINUE;
    }
    return RESULT_END_OF_INPUT;
  }
Exemplo n.º 4
0
  @Override
  public void getTrackMediaFormat(int track, MediaFormatHolder mediaFormatHolder) {
    int height = ffGetHeight(pFormatCtx, track);
    int width = ffGetWidth(pFormatCtx, track);
    // int bitrate = ffGetBitrate(pFormatCtx, track);
    // int channels = ffGetChannelCount(pFormatCtx, track);
    // int sampleRate = ffGetSampleRate(pFormatCtx, track);
    byte[] initData = ffGetInitData(pFormatCtx, track);
    List<byte[]> initList = new ArrayList<byte[]>();

    // Split SPS/PPS data by start code and insert into initialization data
    int i, start;
    for (i = 4, start = 0; i < initData.length; i++) {

      // Found a new start code - add the last byte array to the list
      if (i + 4 <= initData.length
          && initData[i] == 0
          && initData[i + 1] == 0
          && initData[i + 2] == 0
          && initData[i + 3] == 1) {
        byte[] csd = new byte[i - start];
        System.arraycopy(initData, start, csd, 0, i - start);
        initList.add(csd);
        Log.d(TAG, "inserted csd " + csd.length);
        start = i;
      }
    }

    // Insert the last csd
    if (i > start) {
      byte[] csd = new byte[i - start];
      System.arraycopy(initData, start, csd, 0, i - start);
      initList.add(csd);
      Log.d(TAG, "inserted final csd " + csd.length);
    }

    MediaFormat format =
        MediaFormat.createVideoFormat(
            mTrackInfos[track].mimeType, 10 * 1024 * 1024, width, height, 1, initList);
    mediaFormatHolder.format = format;

    // Start playing the stream
    // ffPlay(pFormatCtx);
  }
  private MediaFormat initManifestTrack(
      SmoothStreamingManifest manifest, int elementIndex, int trackIndex) {
    int manifestTrackKey = getManifestTrackKey(elementIndex, trackIndex);
    MediaFormat mediaFormat = mediaFormats.get(manifestTrackKey);
    if (mediaFormat != null) {
      // Already initialized.
      return mediaFormat;
    }

    // Build the media format.
    long durationUs = live ? C.UNKNOWN_TIME_US : manifest.durationUs;
    StreamElement element = manifest.streamElements[elementIndex];
    Format format = element.tracks[trackIndex].format;
    byte[][] csdArray = element.tracks[trackIndex].csd;
    int mp4TrackType;
    switch (element.type) {
      case StreamElement.TYPE_VIDEO:
        mediaFormat =
            MediaFormat.createVideoFormat(
                format.mimeType,
                format.bitrate,
                MediaFormat.NO_VALUE,
                durationUs,
                format.width,
                format.height,
                0,
                Arrays.asList(csdArray));
        mp4TrackType = Track.TYPE_vide;
        break;
      case StreamElement.TYPE_AUDIO:
        List<byte[]> csd;
        if (csdArray != null) {
          csd = Arrays.asList(csdArray);
        } else {
          csd =
              Collections.singletonList(
                  CodecSpecificDataUtil.buildAacAudioSpecificConfig(
                      format.audioSamplingRate, format.audioChannels));
        }
        mediaFormat =
            MediaFormat.createAudioFormat(
                format.mimeType,
                format.bitrate,
                MediaFormat.NO_VALUE,
                durationUs,
                format.audioChannels,
                format.audioSamplingRate,
                csd);
        mp4TrackType = Track.TYPE_soun;
        break;
      case StreamElement.TYPE_TEXT:
        mediaFormat =
            MediaFormat.createTextFormat(
                format.mimeType, format.bitrate, format.language, durationUs);
        mp4TrackType = Track.TYPE_text;
        break;
      default:
        throw new IllegalStateException("Invalid type: " + element.type);
    }

    // Build the extractor.
    FragmentedMp4Extractor mp4Extractor =
        new FragmentedMp4Extractor(
            FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
    Track mp4Track =
        new Track(
            trackIndex,
            mp4TrackType,
            element.timescale,
            durationUs,
            mediaFormat,
            trackEncryptionBoxes,
            mp4TrackType == Track.TYPE_vide ? 4 : -1);
    mp4Extractor.setTrack(mp4Track);

    // Store the format and a wrapper around the extractor.
    mediaFormats.put(manifestTrackKey, mediaFormat);
    extractorWrappers.put(manifestTrackKey, new ChunkExtractorWrapper(mp4Extractor));
    return mediaFormat;
  }
Exemplo n.º 6
0
 public Id3Reader(TrackOutput output) {
   super(output);
   output.format(MediaFormat.createId3Format());
   id3Header = new ParsableByteArray(ID3_HEADER_SIZE);
 }