protected AudioInputStream getAudioInputStream(InputStream inputStream, long lFileLengthInBytes)
     throws UnsupportedAudioFileException, IOException {
   if (TDebug.TraceAudioFileReader) {
     TDebug.out("AbstractChunkyAudioFileReader.getAudioInputStream(InputStream, long): begin");
   }
   inputStream = new BufferedInputStream(inputStream, INIT_READ_LIMIT);
   inputStream.mark(INIT_READ_LIMIT);
   preliminaryCheck(inputStream, INIT_READ_LIMIT);
   inputStream.reset();
   inputStream =
       new BufferedInputStream(
           inputStream,
           (int)
               lFileLengthInBytes); // ok buffer it all now - we know we are not wasting our time
                                    // with this stream
   AudioFileFormat audioFileFormat = getAudioFileFormat(inputStream, lFileLengthInBytes);
   AudioInputStream audioInputStream =
       new AudioInputStream(
           inputStream, audioFileFormat.getFormat(), audioFileFormat.getFrameLength());
   if (TDebug.TraceAudioFileReader) {
     TDebug.out("AbstractChunkyAudioFileReader.getAudioInputStream(InputStream, long): end");
   }
   return audioInputStream;
 }
예제 #2
0
  public static void main(String args[]) throws Exception {
    System.out.println();
    System.out.println();
    System.out.println(
        "4636355: Check that RIFF headers are written with extra data length field.");
    byte[] fakedata = new byte[1234];
    MyByteArrayInputStream is = new MyByteArrayInputStream(fakedata);
    AudioFormat inFormat = new AudioFormat(AudioFormat.Encoding.ULAW, 8000, 8, 1, 1, 8000, true);

    AudioInputStream ais = new AudioInputStream((InputStream) is, inFormat, fakedata.length);
    ByteArrayOutputStream out = new ByteArrayOutputStream(1500);
    System.out.println("  ulaw data will be written as WAVE to stream...");
    int t = AudioSystem.write(ais, AudioFileFormat.Type.WAVE, out);
    byte[] writtenData = out.toByteArray();
    // now header must have at least 46 bytes
    System.out.println(
        "  Length should be " + (fakedata.length + 46) + " bytes: " + writtenData.length);
    // re-read this file
    is = new MyByteArrayInputStream(writtenData);
    System.out.println("  Get AudioFileFormat of written file");
    AudioFileFormat fileformat = AudioSystem.getAudioFileFormat(is);
    AudioFileFormat.Type type = fileformat.getType();
    System.out.println("  The file format type: " + type);
    if (fileformat.getFrameLength() != fakedata.length
        && fileformat.getFrameLength() != AudioSystem.NOT_SPECIFIED) {
      throw new Exception(
          "The written file's frame length is "
              + fileformat.getFrameLength()
              + " but should be "
              + fakedata.length
              + " !");
    }
    ais = AudioSystem.getAudioInputStream(is);
    System.out.println("  Got Stream with format: " + ais.getFormat());
    if (is.getPos() < 46) {
      throw new Exception(
          "After reading the header, stream position must be at least 46, but is "
              + is.getPos()
              + " !");
    }
    System.out.println("  test passed.");
  }
 /** Return the AudioInputStream from the given InputStream. */
 public AudioInputStream getAudioInputStream(InputStream inputStream, int medialength, int totalms)
     throws UnsupportedAudioFileException, IOException {
   if (TDebug.TraceAudioFileReader)
     TDebug.out("getAudioInputStream(InputStream inputStreamint medialength, int totalms)");
   try {
     if (!inputStream.markSupported()) inputStream = new BufferedInputStream(inputStream);
     inputStream.mark(MARK_LIMIT);
     AudioFileFormat audioFileFormat = getAudioFileFormat(inputStream, medialength, totalms);
     inputStream.reset();
     return new AudioInputStream(
         inputStream, audioFileFormat.getFormat(), audioFileFormat.getFrameLength());
   } catch (UnsupportedAudioFileException e) {
     inputStream.reset();
     throw e;
   } catch (IOException e) {
     inputStream.reset();
     throw e;
   }
 }
예제 #4
0
 /**
  * Inits AudioInputStream and AudioFileFormat from the data source.
  *
  * @throws BasicPlayerException
  */
 protected void initAudioInputStream() throws BasicPlayerException {
   try {
     reset();
     notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
     if (m_dataSource instanceof URL) {
       initAudioInputStream((URL) m_dataSource);
     } else if (m_dataSource instanceof File) {
       initAudioInputStream((File) m_dataSource);
     } else if (m_dataSource instanceof InputStream) {
       initAudioInputStream((InputStream) m_dataSource);
     }
     createLine();
     // Notify listeners with AudioFileFormat properties.
     Map properties = null;
     if (m_audioFileFormat instanceof TAudioFileFormat) {
       // Tritonus SPI compliant audio file format.
       properties = ((TAudioFileFormat) m_audioFileFormat).properties();
       // Clone the Map because it is not mutable.
       properties = deepCopy(properties);
     } else {
       properties = new HashMap();
     }
     // Add JavaSound properties.
     if (m_audioFileFormat.getByteLength() > 0) {
       properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
     }
     if (m_audioFileFormat.getFrameLength() > 0) {
       properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
     }
     if (m_audioFileFormat.getType() != null) {
       properties.put("audio.type", (m_audioFileFormat.getType().toString()));
     }
     // Audio format.
     AudioFormat audioFormat = m_audioFileFormat.getFormat();
     if (audioFormat.getFrameRate() > 0) {
       properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
     }
     if (audioFormat.getFrameSize() > 0) {
       properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
     }
     if (audioFormat.getSampleRate() > 0) {
       properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
     }
     if (audioFormat.getSampleSizeInBits() > 0) {
       properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
     }
     if (audioFormat.getChannels() > 0) {
       properties.put("audio.channels", new Integer(audioFormat.getChannels()));
     }
     if (audioFormat instanceof TAudioFormat) {
       // Tritonus SPI compliant audio format.
       Map addproperties = ((TAudioFormat) audioFormat).properties();
       properties.putAll(addproperties);
     }
     // Add SourceDataLine
     properties.put("basicplayer.sourcedataline", m_line);
     Iterator<BasicPlayerListener> it = laucher.getBasicPlayerListeners().iterator();
     while (it.hasNext()) {
       BasicPlayerListener bpl = it.next();
       bpl.opened(m_dataSource, properties);
     }
     m_status = OPENED;
     notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
   } catch (LineUnavailableException e) {
     throw new BasicPlayerException(e);
   } catch (UnsupportedAudioFileException e) {
     throw new BasicPlayerException(e);
   } catch (IOException e) {
     throw new BasicPlayerException(e);
   }
 }
예제 #5
0
  WaveFileFormat(AudioFileFormat aff) {

    this(aff.getType(), aff.getByteLength(), aff.getFormat(), aff.getFrameLength());
  }