예제 #1
0
 public Mixer getMixer(Mixer.Info info) {
   if (TDebug.TraceMixerProvider) {
     TDebug.out("TMixerProvider.getMixer(): begin");
   }
   MixerProviderStruct struct = getMixerProviderStruct();
   Mixer mixerResult = null;
   synchronized (struct) {
     if (info == null) {
       mixerResult = struct.m_defaultMixer;
     } else {
       Iterator mixers = struct.m_mixers.iterator();
       while (mixers.hasNext()) {
         Mixer mixer = (Mixer) mixers.next();
         if (mixer.getMixerInfo().equals(info)) {
           mixerResult = mixer;
           break;
         }
       }
     }
   }
   if (mixerResult == null) {
     throw new IllegalArgumentException("no mixer available for " + info);
   }
   if (TDebug.TraceMixerProvider) {
     TDebug.out("TMixerProvider.getMixer(): end");
   }
   return mixerResult;
 }
  public void storePreferences() {
    ISettingsManager props = properties;

    props.setOptionsSoundEnableSound(getEnableSoundCheckBox().isSelected());
    props.setOptionsSoundEnableGunshot(getEnableGunshotCheckBox().isSelected());
    props.setOptionsSoundEnableBulletHit(getEnableBulletHitCheckBox().isSelected());
    props.setOptionsSoundEnableRobotDeath(getEnableRobotDeathCheckBox().isSelected());
    props.setOptionsSoundEnableRobotCollision(getEnableRobotCollisionCheckBox().isSelected());
    props.setOptionsSoundEnableWallCollision(getEnableWallCollisionCheckBox().isSelected());
    props.setOptionsSoundEnableMixerVolume(getEnableMixerVolumeCheckBox().isSelected());
    props.setOptionsSoundEnableMixerPan(getEnableMixerPanCheckBox().isSelected());

    String mixerClassName = null;
    Mixer.Info mixerInfo = (Mixer.Info) getMixerComboBox().getSelectedItem();

    if (mixerInfo != null) {
      Mixer mixer = AudioSystem.getMixer((Mixer.Info) getMixerComboBox().getSelectedItem());

      if (mixer != null) {
        mixerClassName = mixer.getClass().getSimpleName();
      }
    }
    if (mixerClassName != null) {
      props.setOptionsSoundMixer(mixerClassName);
    }

    properties.saveProperties();
  }
예제 #3
0
  public SonarSoundEngine(int maxChannels) throws LineUnavailableException {
    silentSample = new SonarSample(new float[] {0}, 44100);
    Mixer mixer = AudioSystem.getMixer(null);

    sdl = (SourceDataLine) mixer.getLine(new Line.Info(SourceDataLine.class));
    sdl.open(new AudioFormat(rate, 16, 2, true, false), bufferSize * 2 * 2 * 2 * 2 * 2);
    soundBuffer.order(ByteOrder.LITTLE_ENDIAN);
    sdl.start();

    try {
      /*            FloatControl volumeControl = (FloatControl) sdl.getControl(FloatControl.Type.MASTER_GAIN);
      volumeControl.setValue(volumeControl.getMaximum());*/
    } catch (IllegalArgumentException e) {
      // System.out.println("Failed to set the sound volume");
    }

    listenerMixer = new ListenerMixer(maxChannels);

    leftBuf = new float[bufferSize];
    rightBuf = new float[bufferSize];

    Thread thread = new Thread(this);
    thread.setDaemon(true);
    thread.setPriority(10);
    thread.start();
  }
예제 #4
0
  /** Does any clean up before closing. */
  protected void cleanUp() {
    // signal to unpause
    setPaused(false);

    // close the mixer (stops any running sounds)
    Mixer mixer = AudioSystem.getMixer(null);
    if (mixer.isOpen()) {
      mixer.close();
    }
  }
 // Constructor takes the name to save decoded track as in the form of a string ex:"track1.wav"
 public FlacTrackPlayer(String trackName) {
   player = null;
   track = new File(trackName);
   Mixer mixer = AudioSystem.getMixer(AudioSystem.getMixerInfo()[0]);
   try {
     mixer.open();
   } catch (LineUnavailableException e) {
     // TODO Auto-generated catch block
     System.out.println("huh?");
     e.printStackTrace();
   }
 }
예제 #6
0
  @Ignore
  @Test
  public void testSilenceWriter()
      throws UnsupportedAudioFileException, InterruptedException, LineUnavailableException,
          FileNotFoundException {
    float sampleRate = 44100;
    int bufferSize = 1024;
    int overlap = 0;

    // available mixers
    int index = 0;
    int selectedMixerIndex = 4;
    for (Mixer.Info mixer : AudioSystem.getMixerInfo()) {
      System.out.println(index + ": " + Shared.toLocalString(mixer));
      index++;
    }
    Mixer.Info selectedMixer = AudioSystem.getMixerInfo()[selectedMixerIndex];
    System.out.println("Selected mixer: " + Shared.toLocalString(selectedMixer));

    // open a line
    final Mixer mixer = AudioSystem.getMixer(selectedMixer);
    final AudioFormat format = new AudioFormat(sampleRate, 16, 1, true, true);
    final DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, format);
    TargetDataLine line;
    line = (TargetDataLine) mixer.getLine(dataLineInfo);
    final int numberOfSamples = bufferSize;
    line.open(format, numberOfSamples);
    line.start();
    final AudioInputStream stream = new AudioInputStream(line);

    // create a new dispatcher
    AudioDispatcher dispatcher = new AudioDispatcher(stream, bufferSize, overlap);

    WaveformWriter writer = new WaveformWriter(format, "01.file.wav");
    // add a processor, handle percussion event.
    dispatcher.addAudioProcessor(new SilenceDetector());
    dispatcher.addAudioProcessor(writer);

    // run the dispatcher (on the same thread, use start() to run it on
    // another thread).
    new Thread(dispatcher).start();

    Thread.sleep(3000);

    dispatcher.removeAudioProcessor(writer);
    writer = new WaveformWriter(format, "02.file.wav");
    dispatcher.addAudioProcessor(writer);

    Thread.sleep(3000);

    dispatcher.stop();
  }
예제 #7
0
 /**
  * Inits a DateLine.<br>
  * We check if the line supports Gain and Pan controls.
  *
  * <p>From the AudioInputStream, i.e. from the sound file, we fetch information about the format
  * of the audio data. These information include the sampling frequency, the number of channels and
  * the size of the samples. There information are needed to ask JavaSound for a suitable output
  * line for this audio file. Furthermore, we have to give JavaSound a hint about how big the
  * internal buffer for the line should be. Here, we say AudioSystem.NOT_SPECIFIED, signaling that
  * we don't care about the exact size. JavaSound will use some default value for the buffer size.
  */
 protected void createLine() throws LineUnavailableException {
   log.info("Create Line");
   if (m_line == null) {
     AudioFormat sourceFormat = m_audioInputStream.getFormat();
     log.info("Create Line : Source format : " + sourceFormat.toString());
     int nSampleSizeInBits = sourceFormat.getSampleSizeInBits();
     if (nSampleSizeInBits <= 0) {
       nSampleSizeInBits = 16;
     }
     if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW)
         || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) {
       nSampleSizeInBits = 16;
     }
     if (nSampleSizeInBits != 8) {
       nSampleSizeInBits = 16;
     }
     AudioFormat targetFormat =
         new AudioFormat(
             AudioFormat.Encoding.PCM_SIGNED,
             sourceFormat.getSampleRate(),
             nSampleSizeInBits,
             sourceFormat.getChannels(),
             sourceFormat.getChannels() * (nSampleSizeInBits / 8),
             sourceFormat.getSampleRate(),
             false);
     log.info("Create Line : Target format: " + targetFormat);
     // Keep a reference on encoded stream to progress notification.
     m_encodedaudioInputStream = m_audioInputStream;
     try {
       // Get total length in bytes of the encoded stream.
       encodedLength = m_encodedaudioInputStream.available();
     } catch (IOException e) {
       log.log(Level.SEVERE, "Cannot get m_encodedaudioInputStream.available()", e);
     }
     // Create decoded stream.
     m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
     AudioFormat audioFormat = m_audioInputStream.getFormat();
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, -1);
     Mixer mixer = getMixer(m_mixerName);
     if (mixer != null) {
       log.info("Mixer : " + mixer.getMixerInfo().toString());
       m_line = (SourceDataLine) mixer.getLine(info);
     } else {
       m_line = (SourceDataLine) AudioSystem.getLine(info);
       m_mixerName = null;
     }
     log.info("Line : " + m_line.toString());
     log.info("Line Info : " + m_line.getLineInfo().toString());
     log.info("Line AudioFormat: " + m_line.getFormat().toString());
   }
 }
예제 #8
0
 public List getMixers() {
   ArrayList mixers = new ArrayList();
   Mixer.Info[] mInfos = AudioSystem.getMixerInfo();
   if (mInfos != null) {
     for (int i = 0; i < mInfos.length; i++) {
       Line.Info lineInfo = new Line.Info(SourceDataLine.class);
       Mixer mixer = AudioSystem.getMixer(mInfos[i]);
       if (mixer.isLineSupported(lineInfo)) {
         mixers.add(mInfos[i].getName());
       }
     }
   }
   return mixers;
 }
예제 #9
0
 TargetDataLine getTargetDataLine(AudioFormat format, int bufferSize) {
   TargetDataLine line = null;
   DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
   if (AudioSystem.isLineSupported(info)) {
     try {
       if (inputMixer == null) {
         line = (TargetDataLine) AudioSystem.getLine(info);
       } else {
         line = (TargetDataLine) inputMixer.getLine(info);
       }
       line.open(format, bufferSize * format.getFrameSize());
       debug(
           "TargetDataLine buffer size is "
               + line.getBufferSize()
               + "\n"
               + "TargetDataLine format is "
               + line.getFormat().toString()
               + "\n"
               + "TargetDataLine info is "
               + line.getLineInfo().toString());
     } catch (Exception e) {
       error("Error acquiring TargetDataLine: " + e.getMessage());
     }
   } else {
     error("Unable to return a TargetDataLine: unsupported format - " + format.toString());
   }
   return line;
 }
예제 #10
0
 SourceDataLine getSourceDataLine(AudioFormat format, int bufferSize) {
   SourceDataLine line = null;
   DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
   if (AudioSystem.isLineSupported(info)) {
     try {
       if (outputMixer == null) {
         line = (SourceDataLine) AudioSystem.getLine(info);
       } else {
         line = (SourceDataLine) outputMixer.getLine(info);
       }
       // remember that time you spent, like, an entire afternoon fussing
       // with this buffer size to try to get the latency decent on Linux?
       // Yah, don't fuss with this anymore, ok?
       line.open(format, bufferSize * format.getFrameSize() * 4);
       if (line.isOpen()) {
         debug(
             "SourceDataLine is "
                 + line.getClass().toString()
                 + "\n"
                 + "Buffer size is "
                 + line.getBufferSize()
                 + " bytes.\n"
                 + "Format is "
                 + line.getFormat().toString()
                 + ".");
         return line;
       }
     } catch (LineUnavailableException e) {
       error("Couldn't open the line: " + e.getMessage());
     }
   }
   error("Unable to return a SourceDataLine: unsupported format - " + format.toString());
   return line;
 }
 @Override
 public int hashCode() {
   final int prime = 31;
   int result = 1;
   result = prime * result + ((mMixer == null) ? 0 : mMixer.hashCode());
   result = prime * result + ((mMixerChannel == null) ? 0 : mMixerChannel.hashCode());
   return result;
 }
예제 #12
0
  public TargetDataLine getInputLine(AudioFormat format) throws LineUnavailableException {
    TargetDataLine in;

    DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
    in = (TargetDataLine) mixer.getLine(info);
    in.open(format, in.getBufferSize());
    return in;
  }
예제 #13
0
 private Line.Info[] getPortInfo(Mixer mixer) {
   Line.Info[] infos;
   List<Line.Info> portInfoList = new ArrayList<>();
   infos = mixer.getSourceLineInfo();
   for (Line.Info info : infos) {
     if (info instanceof Port.Info || info instanceof DataLine.Info) {
       portInfoList.add(info);
     }
   }
   infos = mixer.getTargetLineInfo();
   for (Line.Info info1 : infos) {
     if (info1 instanceof Port.Info || info1 instanceof DataLine.Info) {
       portInfoList.add(info1);
     }
   }
   return portInfoList.toArray(EMPTY_PORT_INFO_ARRAY);
 }
예제 #14
0
  public SourceDataLine getOutputLine(AudioFormat format) throws LineUnavailableException {
    SourceDataLine out;

    DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
    out = (SourceDataLine) mixer.getLine(info);
    out.open(format, out.getBufferSize());
    return out;
  }
  public String toString() {
    StringBuilder sb = new StringBuilder();

    sb.append(mMixer.getMixerInfo().getName());
    sb.append(" - ");
    sb.append(mMixerChannel.name());

    return sb.toString();
  }
예제 #16
0
 public Mixer.Info[] getMixerInfo() {
   if (TDebug.TraceMixerProvider) {
     TDebug.out("TMixerProvider.getMixerInfo(): begin");
   }
   Set<Mixer.Info> mixerInfos = new HashSet<Mixer.Info>();
   MixerProviderStruct struct = getMixerProviderStruct();
   synchronized (struct) {
     Iterator<Mixer> mixers = struct.m_mixers.iterator();
     while (mixers.hasNext()) {
       Mixer mixer = mixers.next();
       mixerInfos.add(mixer.getMixerInfo());
     }
   }
   if (TDebug.TraceMixerProvider) {
     TDebug.out("TMixerProvider.getMixerInfo(): end");
   }
   return mixerInfos.toArray(EMPTY_MIXER_INFO_ARRAY);
 }
예제 #17
0
 private boolean arePortsSupported(Mixer mixer) {
   Line.Info[] infos;
   infos = mixer.getSourceLineInfo();
   for (Line.Info info : infos) {
     if (info instanceof Port.Info) {
       return true;
     } else if (info instanceof DataLine.Info) {
       return true;
     }
   }
   infos = mixer.getTargetLineInfo();
   for (Line.Info info : infos) {
     if (info instanceof Port.Info) {
       return true;
     } else if (info instanceof DataLine.Info) {
       return true;
     }
   }
   return false;
 }
예제 #18
0
파일: Audio.java 프로젝트: nikil511/openhab
 private static void runVolumeCommand(Closure closure) {
   Mixer.Info[] infos = AudioSystem.getMixerInfo();
   for (Mixer.Info info : infos) {
     Mixer mixer = AudioSystem.getMixer(info);
     if (mixer.isLineSupported(Port.Info.SPEAKER)) {
       Port port;
       try {
         port = (Port) mixer.getLine(Port.Info.SPEAKER);
         port.open();
         if (port.isControlSupported(FloatControl.Type.VOLUME)) {
           FloatControl volume = (FloatControl) port.getControl(FloatControl.Type.VOLUME);
           closure.execute(volume);
         }
         port.close();
       } catch (LineUnavailableException e) {
         logger.error("Cannot access master volume control", e);
       }
     }
   }
 }
  private boolean setupSpeaker(String device) throws LineUnavailableException {

    Mixer.Info[] aInfos = AudioSystem.getMixerInfo();

    for (int i = 0; i < aInfos.length; i++) {
      Mixer.Info mixerInfo = aInfos[i];

      if (GetDataLines.equals(device, mixerInfo) == false) {
        if (Logger.logLevel >= Logger.LOG_MOREINFO) {
          Logger.println("Skipping:  " + mixerInfo.getName() + "," + mixerInfo.getDescription());
        }

        continue;
      }

      try {
        Mixer mixer = AudioSystem.getMixer(mixerInfo);

        Line.Info[] infos = mixer.getSourceLineInfo();

        for (int j = 0; j < infos.length; j++) {
          Line line = (Line) mixer.getLine(infos[j]);

          if (line instanceof SourceDataLine) {
            speaker = (SourceDataLine) line;

            if (Logger.logLevel >= Logger.LOG_INFO) {
              Logger.println("Found speaker:  " + j);
            }
            break;
          }
        }
      } catch (Exception e) {
        if (Logger.logLevel >= Logger.LOG_MOREINFO) {
          Logger.println("Exception:  " + e.getMessage());
        }
      }
    }

    return speaker != null;
  }
  private void mixerComboBoxActionPerformed() {
    Mixer mixer = AudioSystem.getMixer((Mixer.Info) mixerComboBox.getSelectedItem());

    Line.Info lineInfo = mixer.getSourceLineInfo(new Line.Info(Clip.class))[0];

    boolean volumeSupported;
    boolean panSupported;

    try {
      Line line = mixer.getLine(lineInfo);

      volumeSupported = line.isControlSupported(FloatControl.Type.MASTER_GAIN);
      panSupported = line.isControlSupported(FloatControl.Type.PAN);
    } catch (LineUnavailableException e) {
      volumeSupported = false;
      panSupported = false;
    }

    enableMixerVolumeCheckBox.setEnabled(volumeSupported);
    enableMixerPanCheckBox.setEnabled(panSupported);
  }
예제 #21
0
파일: at1.java 프로젝트: BerndDammer/at1
 private static void d1() {
   Mixer.Info[] fmi = AudioSystem.getMixerInfo();
   for (Mixer.Info mi : fmi) {
     Mixer mixer = AudioSystem.getMixer(mi);
     Line.Info[] isl = mixer.getSourceLineInfo();
     Line.Info[] itl = mixer.getTargetLineInfo();
     // if( itl.length > 0)
     {
       error.log("--------------------------------------------------------------");
       error.log("Src Count " + isl.length + " Target size " + itl.length);
       error.log(mi.toString());
       error.log("Name " + mi.getName());
       error.log("Version " + mi.getVersion());
       error.log("Vendor " + mi.getVendor());
       error.log("Description " + mi.getDescription());
       error.log("~~~");
       dumpLines(isl, "  >> SourceLine  : ");
       dumpLines(itl, "  >> TargetLine  : ");
     }
   }
 }
  /**
   * Close the audio clip. There is a bunch of code here attempting to close the audio stream. Turns
   * out there is a a bug and the only way to close out the background audio threads is to do a
   * System.exit().
   */
  public void close() {
    if (audioClip == null) return;

    synchronized (audioClip) {
      audioClip.removeLineListener(this);
      audioClip.close();
      try {
        audioStream.close();
      } catch (IOException e) {
        System.out.println(e.getMessage());
      }

      // close the mixer (stops any running sounds)
      Mixer mixer = AudioSystem.getMixer(null);
      if (mixer.isOpen()) {
        mixer.close();
      }

      audioState = AudioState.DEAD;
    }
  }
 @Override
 public boolean equals(Object obj) {
   if (this == obj) return true;
   if (obj == null) return false;
   if (getClass() != obj.getClass()) return false;
   MixerChannelConfiguration other = (MixerChannelConfiguration) obj;
   if (mMixer == null) {
     if (other.mMixer != null) return false;
   } else if (!mMixer.equals(other.mMixer)) return false;
   if (mMixerChannel != other.mMixerChannel) return false;
   return true;
 }
  public Mixer getMixer(Mixer.Info info) {
    // if the default device is asked, we provide the mixer
    // with SourceDataLine's
    if (info == null) {
      for (int i = 0; i < infos.length; i++) {
        Mixer mixer = getDevice(infos[i]);
        if (mixer.getSourceLineInfo().length > 0) {
          return mixer;
        }
      }
    }
    // otherwise get the first mixer that matches
    // the requested info object
    for (int i = 0; i < infos.length; i++) {
      if (infos[i].equals(info)) {
        return getDevice(infos[i]);
      }
    }

    throw new IllegalArgumentException(
        "Mixer " + info.toString() + " not supported by this provider.");
  }
예제 #25
0
  private static Mixer.Info[] getAvailableMixers(boolean isTarget) {
    ArrayList<Mixer.Info> mixers =
        new ArrayList<Mixer.Info>(Arrays.asList((Mixer.Info[]) AudioSystem.getMixerInfo()));
    for (Iterator<Mixer.Info> it = mixers.iterator(); it.hasNext(); ) {
      Mixer.Info minfo = it.next();
      Mixer mixer = AudioSystem.getMixer(minfo);

      Line.Info[] linfo = (isTarget) ? mixer.getTargetLineInfo() : mixer.getSourceLineInfo();
      boolean hasDataLine = false;
      for (int j = 0; j < linfo.length; j++) {
        if (linfo[j] instanceof DataLine.Info) {
          hasDataLine = true;
          break;
        }
      }
      if (!hasDataLine) {
        it.remove();
      }
    }

    return mixers.toArray(new Mixer.Info[mixers.size()]);
  }
예제 #26
0
  private void createMixerChildren(JavaMixer.MixerNode mixerNode) {
    Mixer mixer = mixerNode.getMixer();
    Line.Info[] infosToCheck = getPortInfo(mixer);
    for (Line.Info anInfosToCheck : infosToCheck) {
      if (mixer.isLineSupported(anInfosToCheck)) {
        Port port = null;
        DataLine dLine = null;

        int maxLines = mixer.getMaxLines(anInfosToCheck);
        // Workaround to prevent a JVM crash on Mac OS X (Intel) 1.5.0_07 JVM
        if (maxLines > 0) {
          try {
            if (anInfosToCheck instanceof Port.Info) {
              port = (Port) mixer.getLine(anInfosToCheck);
              port.open();
            } else if (anInfosToCheck instanceof DataLine.Info) {
              dLine = (DataLine) mixer.getLine(anInfosToCheck);
              if (!dLine.isOpen()) {
                dLine.open();
              }
            }
          } catch (LineUnavailableException e) {
            e.printStackTrace();
          } catch (Exception e) {
            // Do Nothing
          }
        }
        if (port != null) {
          JavaMixer.PortNode portNode = new JavaMixer.PortNode(port);
          createPortChildren(portNode);
          mixerNode.add(portNode);
        } else if (dLine != null) {
          JavaMixer.PortNode portNode = new JavaMixer.PortNode(dLine);
          createPortChildren(portNode);
          mixerNode.add(portNode);
        }
      }
    }
  }
예제 #27
0
  private void setNewMixer(Mixer mixer)
      throws LineUnavailableException, UnsupportedAudioFileException {

    if (dispatcher != null) {
      dispatcher.stop();
    }
    if (fileName == null) {
      final AudioFormat format = new AudioFormat(sampleRate, 16, 1, true, false);
      final DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, format);
      TargetDataLine line;
      line = (TargetDataLine) mixer.getLine(dataLineInfo);
      final int numberOfSamples = bufferSize;
      line.open(format, numberOfSamples);
      line.start();
      final AudioInputStream stream = new AudioInputStream(line);

      // create a new dispatcher
      dispatcher = new AudioDispatcher(stream, bufferSize, overlap);
    } else {
      try {
        File audioFile = new File(fileName);
        dispatcher = AudioDispatcher.fromFile(audioFile, bufferSize, overlap);
        AudioFormat format = AudioSystem.getAudioFileFormat(audioFile).getFormat();
        dispatcher.addAudioProcessor(new AudioPlayer(format));
      } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }
    currentMixer = mixer;

    // add a processor, handle pitch event.
    dispatcher.addAudioProcessor(new PitchProcessor(algo, sampleRate, bufferSize, this));
    dispatcher.addAudioProcessor(fftProcessor);

    // run the dispatcher (on a new thread).
    new Thread(dispatcher, "Audio dispatching").start();
  }
예제 #28
0
 /**
  * Gets the maximum number of simultaneous sounds with the specified AudioFormat that the default
  * mixer can play.
  */
 public static int getMaxSimultaneousSounds(AudioFormat playbackFormat) {
   DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
   Mixer mixer = AudioSystem.getMixer(null);
   return mixer.getMaxLines(lineInfo);
 }
예제 #29
0
  /**
   * Single audio channel playback with automatic starting and stopping of the underlying
   * sourcedataline specified by the mixer and mixer channel arguments.
   *
   * <p>Maintains an internal non-blocking audio packet queue and processes this queue 25 times a
   * second (every 40 ms).
   *
   * @param threadPoolManager for assigning buffer processing schedule task
   * @param mixer to obtain source data line
   * @param mixerChannel either mono or left/right stereo
   * @param audioFormat to use during playback
   * @param lineInfo to use when obtaining the source data line
   * @param requestedBufferSize of approximately 1 second of audio
   */
  public AudioOutput(
      ThreadPoolManager threadPoolManager,
      Mixer mixer,
      MixerChannel mixerChannel,
      AudioFormat audioFormat,
      Line.Info lineInfo,
      int requestedBufferSize) {
    mThreadPoolManager = threadPoolManager;
    mMixer = mixer;
    mMixerChannel = mixerChannel;

    try {
      mOutput = (SourceDataLine) mMixer.getLine(lineInfo);

      if (mOutput != null) {
        mOutput.open(audioFormat, requestedBufferSize);

        // Start threshold: buffer is full with 10% or less of capacity remaining
        mBufferStartThreshold = (int) (mOutput.getBufferSize() * 0.10);

        // Stop threshold: buffer is empty with 90% or more capacity available
        mBufferStopThreshold = (int) (mOutput.getBufferSize() * 0.90);

        mOutput.addLineListener(this);

        if (mOutput != null) {
          try {
            Control gain = mOutput.getControl(FloatControl.Type.MASTER_GAIN);
            mGainControl = (FloatControl) gain;
          } catch (IllegalArgumentException iae) {
            mLog.warn(
                "Couldn't obtain MASTER GAIN control for stereo line ["
                    + mixer.getMixerInfo().getName()
                    + " | "
                    + getChannelName()
                    + "]");
          }

          try {
            Control mute = mOutput.getControl(BooleanControl.Type.MUTE);
            mMuteControl = (BooleanControl) mute;
          } catch (IllegalArgumentException iae) {
            mLog.warn(
                "Couldn't obtain MUTE control for stereo line ["
                    + mixer.getMixerInfo().getName()
                    + " | "
                    + getChannelName()
                    + "]");
          }

          /* Run the queue processor task every 40 milliseconds or 25 times a second */
          mProcessorTask =
              mThreadPoolManager.scheduleFixedRate(
                  ThreadType.AUDIO_PROCESSING, new BufferProcessor(), 40, TimeUnit.MILLISECONDS);
        }

        mAudioStartEvent = new AudioEvent(AudioEvent.Type.AUDIO_STARTED, getChannelName());
        mAudioStopEvent = new AudioEvent(AudioEvent.Type.AUDIO_STOPPED, getChannelName());

        mCanProcessAudio = true;
      }
    } catch (LineUnavailableException e) {
      mLog.error(
          "Couldn't obtain audio source data line for "
              + "audio output - mixer ["
              + mMixer.getMixerInfo().getName()
              + "]");
    }
  }
예제 #30
0
  @SuppressWarnings("SleepWhileInLoop")
  @Override
  boolean bindAudioBuffer(AudioBuffer audioBuffer) {
    // First check we've been initialised
    if (!initialised) {
      return false;
    }

    // Wait for AudioBuffer to be loaded, or 20 seconds
    long startTime = System.currentTimeMillis();
    while (audioBuffer.getState() != AudioBuffer.STATE_LOADED
        && System.currentTimeMillis() - startTime < 20000) {
      try {
        Thread.sleep(50);
      } catch (InterruptedException ex) {
      }
    }

    if (audioBuffer instanceof JavaSoundAudioBuffer
        && audioBuffer.getState() == AudioBuffer.STATE_LOADED) {
      // Cast to JavaSoundAudioBuffer to enable easier access to specific methods
      JavaSoundAudioBuffer buffer = (JavaSoundAudioBuffer) audioBuffer;

      // Get a JavaSound DataLine and Clip
      DataLine.Info lineInfo;
      lineInfo = new DataLine.Info(Clip.class, buffer.getAudioFormat());
      Clip newClip;
      try {
        newClip = (Clip) mixer.getLine(lineInfo);
      } catch (LineUnavailableException ex) {
        log.warn(
            "Error binding JavaSoundSource ("
                + this.getSystemName()
                + ") to AudioBuffer ("
                + this.getAssignedBufferName()
                + ") "
                + ex);
        return false;
      }

      this.clip = newClip;

      try {
        clip.open(
            buffer.getAudioFormat(),
            buffer.getDataStorageBuffer(),
            0,
            buffer.getDataStorageBuffer().length);
      } catch (LineUnavailableException ex) {
        log.warn(
            "Error binding JavaSoundSource ("
                + this.getSystemName()
                + ") to AudioBuffer ("
                + this.getAssignedBufferName()
                + ")"
                + ex);
      }
      if (log.isDebugEnabled()) {
        log.debug(
            "Bind JavaSoundAudioSource ("
                + this.getSystemName()
                + ") to JavaSoundAudioBuffer ("
                + audioBuffer.getSystemName()
                + ")");
      }
      return true;
    } else {
      log.warn(
          "AudioBuffer not loaded error when binding JavaSoundSource ("
              + this.getSystemName()
              + ") to AudioBuffer ("
              + this.getAssignedBufferName()
              + ")");
      return false;
    }
  }