示例#1
1
  /**
   * @param filename the
   * @param is
   * @return
   */
  AudioInputStream getAudioInputStream(String filename) {
    AudioInputStream ais = null;
    BufferedInputStream bis = null;
    if (filename.startsWith("http")) {
      try {
        ais = getAudioInputStream(new URL(filename));
      } catch (MalformedURLException e) {
        error("Bad URL: " + e.getMessage());
      } catch (UnsupportedAudioFileException e) {
        error("URL is in an unsupported audio file format: " + e.getMessage());
      } catch (IOException e) {
        Sound.error("Error reading the URL: " + e.getMessage());
      }
    } else {
      try {
        // InputStream is = createInput(filename);
        InputStream is =
            new FileInputStream(filename) {

              @Override
              public int read() throws IOException {
                // TODO Auto-generated method stub
                return 0;
              }
            };

        debug("Base input stream is: " + is.toString());
        bis = new BufferedInputStream(is);
        ais = getAudioInputStream(bis);
        // don't mark it like this because it means the entire
        // file will be loaded into memory as it plays. this
        // will cause out-of-memory problems with very large files.
        // ais.mark((int)ais.available());
        debug(
            "Acquired AudioInputStream.\n"
                + "It is "
                + ais.getFrameLength()
                + " frames long.\n"
                + "Marking support: "
                + ais.markSupported());
      } catch (IOException ioe) {
        error("IOException: " + ioe.getMessage());
      } catch (UnsupportedAudioFileException uafe) {
        error("Unsupported Audio File: " + uafe.getMessage());
      }
    }
    return ais;
  }
示例#2
0
 TargetDataLine getTargetDataLine(AudioFormat format, int bufferSize) {
   TargetDataLine line = null;
   DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
   if (AudioSystem.isLineSupported(info)) {
     try {
       if (inputMixer == null) {
         line = (TargetDataLine) AudioSystem.getLine(info);
       } else {
         line = (TargetDataLine) inputMixer.getLine(info);
       }
       line.open(format, bufferSize * format.getFrameSize());
       debug(
           "TargetDataLine buffer size is "
               + line.getBufferSize()
               + "\n"
               + "TargetDataLine format is "
               + line.getFormat().toString()
               + "\n"
               + "TargetDataLine info is "
               + line.getLineInfo().toString());
     } catch (Exception e) {
       error("Error acquiring TargetDataLine: " + e.getMessage());
     }
   } else {
     error("Unable to return a TargetDataLine: unsupported format - " + format.toString());
   }
   return line;
 }
示例#3
0
 SourceDataLine getSourceDataLine(AudioFormat format, int bufferSize) {
   SourceDataLine line = null;
   DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
   if (AudioSystem.isLineSupported(info)) {
     try {
       if (outputMixer == null) {
         line = (SourceDataLine) AudioSystem.getLine(info);
       } else {
         line = (SourceDataLine) outputMixer.getLine(info);
       }
       // remember that time you spent, like, an entire afternoon fussing
       // with this buffer size to try to get the latency decent on Linux?
       // Yah, don't fuss with this anymore, ok?
       line.open(format, bufferSize * format.getFrameSize() * 4);
       if (line.isOpen()) {
         debug(
             "SourceDataLine is "
                 + line.getClass().toString()
                 + "\n"
                 + "Buffer size is "
                 + line.getBufferSize()
                 + " bytes.\n"
                 + "Format is "
                 + line.getFormat().toString()
                 + ".");
         return line;
       }
     } catch (LineUnavailableException e) {
       error("Couldn't open the line: " + e.getMessage());
     }
   }
   error("Unable to return a SourceDataLine: unsupported format - " + format.toString());
   return line;
 }
示例#4
0
 private Map<String, Object> getID3Tags(String filename) {
   debug("Getting the properties.");
   Map<String, Object> props = new HashMap<String, Object>();
   try {
     MpegAudioFileReader reader = new MpegAudioFileReader(this);
     // InputStream stream = createInput(filename);
     InputStream stream = new FileInputStream(filename);
     AudioFileFormat baseFileFormat = reader.getAudioFileFormat(stream, stream.available());
     stream.close();
     if (baseFileFormat instanceof TAudioFileFormat) {
       TAudioFileFormat fileFormat = (TAudioFileFormat) baseFileFormat;
       props = fileFormat.properties();
       if (props.size() == 0) {
         error("No file properties available for " + filename + ".");
       } else {
         debug("File properties: " + props.toString());
       }
     }
   } catch (UnsupportedAudioFileException e) {
     error("Couldn't get the file format for " + filename + ": " + e.getMessage());
   } catch (IOException e) {
     error("Couldn't access " + filename + ": " + e.getMessage());
   }
   return props;
 }
示例#5
0
 public AudioRecording getAudioRecordingClip(String filename) {
   Clip clip = null;
   AudioMetaData meta = null;
   AudioInputStream ais = getAudioInputStream(filename);
   if (ais != null) {
     AudioFormat format = ais.getFormat();
     if (format instanceof MpegAudioFormat) {
       AudioFormat baseFormat = format;
       format =
           new AudioFormat(
               AudioFormat.Encoding.PCM_SIGNED,
               baseFormat.getSampleRate(),
               16,
               baseFormat.getChannels(),
               baseFormat.getChannels() * 2,
               baseFormat.getSampleRate(),
               false);
       // converts the stream to PCM audio from mp3 audio
       ais = getAudioInputStream(format, ais);
     }
     DataLine.Info info = new DataLine.Info(Clip.class, ais.getFormat());
     if (AudioSystem.isLineSupported(info)) {
       // Obtain and open the line.
       try {
         clip = (Clip) AudioSystem.getLine(info);
         clip.open(ais);
       } catch (Exception e) {
         error("Error obtaining Javasound Clip: " + e.getMessage());
         return null;
       }
       Map<String, Object> props = getID3Tags(filename);
       long lengthInMillis = -1;
       if (props.containsKey("duration")) {
         Long dur = (Long) props.get("duration");
         lengthInMillis = dur.longValue() / 1000;
       }
       meta = new MP3MetaData(filename, lengthInMillis, props);
     } else {
       error("File format not supported.");
       return null;
     }
   }
   if (meta == null) {
     // this means we're dealing with not-an-mp3
     meta = new BasicMetaData(filename, clip.getMicrosecondLength() / 1000);
   }
   return new JSAudioRecordingClip(clip, meta);
 }
示例#6
0
 public SampleRecorder getSampleRecorder(Recordable source, String fileName, boolean buffered) {
   String ext = fileName.substring(fileName.lastIndexOf('.') + 1).toLowerCase();
   debug("createRecorder: file extension is " + ext + ".");
   AudioFileFormat.Type fileType = null;
   if (ext.equals(Sound.WAV.getExtension())) {
     fileType = Sound.WAV;
   } else if (ext.equals(Sound.AIFF.getExtension()) || ext.equals("aif")) {
     fileType = Sound.AIFF;
   } else if (ext.equals(Sound.AIFC.getExtension())) {
     fileType = Sound.AIFC;
   } else if (ext.equals(Sound.AU.getExtension())) {
     fileType = Sound.AU;
   } else if (ext.equals(Sound.SND.getExtension())) {
     fileType = Sound.SND;
   } else {
     error("The extension " + ext + " is not a recognized audio file type.");
     return null;
   }
   SampleRecorder recorder = null;
   if (buffered) {
     recorder =
         new JSBufferedSampleRecorder(
             this, sketchPath(fileName), fileType, source.getFormat(), source.bufferSize());
   } else {
     recorder =
         new JSStreamingSampleRecorder(
             this, sketchPath(fileName), fileType, source.getFormat(), source.bufferSize());
   }
   return recorder;
 }
示例#7
0
 public AudioSample getAudioSample(String filename, int bufferSize) {
   AudioInputStream ais = getAudioInputStream(filename);
   if (ais != null) {
     AudioMetaData meta = null;
     AudioFormat format = ais.getFormat();
     FloatSampleBuffer samples = null;
     if (format instanceof MpegAudioFormat) {
       AudioFormat baseFormat = format;
       format =
           new AudioFormat(
               AudioFormat.Encoding.PCM_SIGNED,
               baseFormat.getSampleRate(),
               16,
               baseFormat.getChannels(),
               baseFormat.getChannels() * 2,
               baseFormat.getSampleRate(),
               false);
       // converts the stream to PCM audio from mp3 audio
       ais = getAudioInputStream(format, ais);
       // get a map of properties so we can find out how long it is
       Map<String, Object> props = getID3Tags(filename);
       // there is a property called mp3.length.bytes, but that is
       // the length in bytes of the mp3 file, which will of course
       // be much shorter than the decoded version. so we use the
       // duration of the file to figure out how many bytes the
       // decoded file will be.
       long dur = ((Long) props.get("duration")).longValue();
       int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
       samples = loadFloatAudio(ais, toRead);
       meta = new MP3MetaData(filename, dur / 1000, props);
     } else {
       samples = loadFloatAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
       long length = AudioUtils.frames2Millis(samples.getSampleCount(), format);
       meta = new BasicMetaData(filename, length);
     }
     AudioSynthesizer out =
         getAudioSynthesizer(
             format.getChannels(),
             bufferSize,
             format.getSampleRate(),
             format.getSampleSizeInBits());
     if (out != null) {
       SampleSignal ssig = new SampleSignal(samples);
       out.setAudioSignal(ssig);
       return new JSAudioSample(meta, ssig, out);
     } else {
       error("Couldn't acquire an output.");
     }
   }
   return null;
 }
示例#8
0
 private byte[] loadByteAudio(AudioInputStream ais, int toRead) {
   int totalRead = 0;
   byte[] rawBytes = new byte[toRead];
   try {
     // we have to read in chunks because the decoded stream won't
     // read more than about 2000 bytes at a time
     while (totalRead < toRead) {
       int actualRead = ais.read(rawBytes, totalRead, toRead - totalRead);
       if (actualRead < 1) break;
       totalRead += actualRead;
     }
     ais.close();
   } catch (Exception ioe) {
     error("Error loading file into memory: " + ioe.getMessage());
   }
   debug("Needed to read " + toRead + " actually read " + totalRead);
   return rawBytes;
 }
示例#9
0
  private JSAudioSample getAudioSampleImp(
      FloatSampleBuffer samples, AudioFormat format, int bufferSize) {
    AudioSynthesizer out =
        getAudioSynthesizer(
            samples.getChannelCount(),
            bufferSize,
            format.getSampleRate(),
            format.getSampleSizeInBits());
    if (out != null) {
      SampleSignal ssig = new SampleSignal(samples);
      out.setAudioSignal(ssig);
      long length = AudioUtils.frames2Millis(samples.getSampleCount(), format);
      BasicMetaData meta = new BasicMetaData(samples.toString(), length);
      return new JSAudioSample(meta, ssig, out);
    } else {
      error("Couldn't acquire an output.");
    }

    return null;
  }
示例#10
0
 private FloatSampleBuffer loadFloatAudio(AudioInputStream ais, int toRead) {
   FloatSampleBuffer samples = new FloatSampleBuffer();
   int totalRead = 0;
   byte[] rawBytes = new byte[toRead];
   try {
     // we have to read in chunks because the decoded stream won't
     // read more than about 2000 bytes at a time
     while (totalRead < toRead) {
       int actualRead = ais.read(rawBytes, totalRead, toRead - totalRead);
       if (actualRead < 1) {
         break;
       }
       totalRead += actualRead;
     }
     ais.close();
   } catch (Exception ioe) {
     error("Error loading file into memory: " + ioe.getMessage());
   }
   debug("Needed to read " + toRead + " actually read " + totalRead);
   samples.initFromByteArray(rawBytes, 0, totalRead, ais.getFormat());
   return samples;
 }