Esempio n. 1
0
 SourceDataLine getSourceDataLine(AudioFormat format, int bufferSize) {
   SourceDataLine line = null;
   DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
   if (AudioSystem.isLineSupported(info)) {
     try {
       if (outputMixer == null) {
         line = (SourceDataLine) AudioSystem.getLine(info);
       } else {
         line = (SourceDataLine) outputMixer.getLine(info);
       }
       // remember that time you spent, like, an entire afternoon fussing
       // with this buffer size to try to get the latency decent on Linux?
       // Yah, don't fuss with this anymore, ok?
       line.open(format, bufferSize * format.getFrameSize() * 4);
       if (line.isOpen()) {
         debug(
             "SourceDataLine is "
                 + line.getClass().toString()
                 + "\n"
                 + "Buffer size is "
                 + line.getBufferSize()
                 + " bytes.\n"
                 + "Format is "
                 + line.getFormat().toString()
                 + ".");
         return line;
       }
     } catch (LineUnavailableException e) {
       error("Couldn't open the line: " + e.getMessage());
     }
   }
   error("Unable to return a SourceDataLine: unsupported format - " + format.toString());
   return line;
 }
Esempio n. 2
0
 @ActionDoc(text = "plays a sound from the sounds folder")
 public static void playSound(
     @ParamDoc(name = "filename", text = "the filename with extension") String filename) {
   try {
     InputStream is = new FileInputStream(SOUND_DIR + File.separator + filename);
     if (filename.toLowerCase().endsWith(".mp3")) {
       Player player = new Player(is);
       playInThread(player);
     } else {
       AudioInputStream ais = AudioSystem.getAudioInputStream(is);
       Clip clip = AudioSystem.getClip();
       clip.open(ais);
       playInThread(clip);
     }
   } catch (FileNotFoundException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   } catch (JavaLayerException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   } catch (UnsupportedAudioFileException e) {
     logger.error(
         "Format of sound file '{}' is not supported: {}",
         new String[] {filename, e.getMessage()});
   } catch (IOException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   } catch (LineUnavailableException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   }
 }
Esempio n. 3
0
 public SourceDataLine getFreeLine(AudioFormat format, int volume) {
   try {
     synchronized (this) {
       DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
       if (currentLine >= lines.length) {
         currentLine = 0;
       }
       if (lines[currentLine] == null) {
         lines[currentLine] = (SourceDataLine) AudioSystem.getLine(info);
       }
       if (!lines[currentLine].isActive()) {
         return lines[currentLine++];
       }
     }
   } catch (LineUnavailableException e) {
     e.printStackTrace();
     LOGGER.error(e.getMessage());
   }
   return null;
 }
  private void testPlayer() {
    System.out.println("Testing getNoteBytes");
    Tone tone = new Tone(48);
    double seconds = 1.4;
    byte[] bytes = Player.getToneBytes(tone, seconds);
    gAssEqInt(bytes.length, (int) (seconds * Player.SAMPLES_PER_SECOND));
    gAssEqByte(bytes[0], (byte) 0);
    // I could test more accurately whether the right bytes are showing up in the byte array...
    // But I'm not. Too much math, not right now, etc.

    try {
      // Note a4 = new Note(new Tone(48), Note.Length.QUARTER);
      // Note a0 = new Note(new Tone(0), Note.Length.QUARTER);
      // Player.playNote(a4, 1.0);
      // Player.playNote(a0, 0.05);
      Player.click(0.05);
    } catch (LineUnavailableException e) {
      throw new RuntimeException("Line unavailable: " + e.getMessage());
    }
  }
Esempio n. 5
0
  // open up an audio stream
  private void init() {
    try {
      // 44,100 samples per second, 16-bit audio, mono, signed PCM, little Endian
      AudioFormat format =
          new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, STEREO, true, false);
      DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

      line = (SourceDataLine) AudioSystem.getLine(info);
      line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE);

      // the internal buffer is a fraction of the actual buffer size, this choice is arbitrary
      // it gets divided because we can't expect the buffered data to line up exactly with when
      // the sound card decides to push out its samples.
      buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3];
    } catch (LineUnavailableException e) {
      System.out.println(e.getMessage());
      System.exit(1);
    }

    // no sound gets made before this call
    line.start();
  }
  public SpeakerJavasoundImpl(int sampleRate, int channels, int bufferSize) throws IOException {

    this.sampleRate = sampleRate;
    this.channels = channels;
    this.bufferSize = bufferSize;

    chunkSize = RtpPacket.getDataSize(RtpPacket.PCM_ENCODING, sampleRate, channels);

    try {
      setupSpeaker();
    } catch (LineUnavailableException e) {
      throw new IOException(e.getMessage());
    }

    if (Logger.logLevel >= Logger.LOG_INFO) {
      Logger.println("New speaker " + sampleRate + "/" + channels + " bufferSize " + bufferSize);
    }

    double volumeLevel = Utils.getDoublePreference(VOLUME_LEVEL);

    if (volumeLevel != -1D) {
      this.volumeLevel = volumeLevel;
    }
  }
Esempio n. 7
0
  /**
   * Apply the given effect to this WaveTab´s audio data
   *
   * @param effect The effect to apply
   */
  public void applyEffect(Effect effect) {
    Selection sel = waveDisplay.getSelection();
    if (sel.getLeft() == sel.getRight())
      waveDisplay.setSelection(new Selection(0, getTotalLength()));
    Thread thread = null;
    try {
      AudioInputStream stream = getAudioInputStream();
      int sourceChannels = stream.getFormat().getChannels();
      stream = AudioManager.getStereoInputStream(stream);
      final FXUnit unit = new FXUnit(effect);
      if (effect.needsAnalysis()) {
        Analyzer a = new Analyzer(unit, stream);
        ProgressMonitor monitor =
            new ProgressMonitor(getShell(), a, "Analyzing...", "Analyzing audio data");
        monitor.start();
        stream = AudioManager.getStereoInputStream(getAudioInputStream());
      }

      final SourceDataLine sourceLine = unit.getEffectSourceLine();
      sourceLine.open();
      sourceLine.start();
      final TargetDataLine targetLine = unit.getEffectTargetLine();
      targetLine.open();
      targetLine.start();
      if (!stream.getFormat().equals(sourceLine.getFormat())) {
        if (AudioSystem.isConversionSupported(sourceLine.getFormat(), stream.getFormat()))
          stream = AudioSystem.getAudioInputStream(sourceLine.getFormat(), stream);
        else {
          editor.errorMessage(
              "Unable to apply effect:\nFormat conversion from "
                  + stream.getFormat()
                  + " to "
                  + sourceLine.getFormat()
                  + " not supported.");
          return;
        }
      }

      final AudioInputStream inStream = stream;
      thread =
          new Thread() {
            public void run() {
              int numBytesRead = 0;
              byte[] buffer = new byte[sourceLine.getBufferSize()];
              while (numBytesRead != -1 && !getItem().isDisposed()) {
                try {
                  numBytesRead = inStream.read(buffer, 0, buffer.length);
                } catch (IOException e1) {
                  e1.printStackTrace();
                  numBytesRead = -1;
                }
                if (numBytesRead > 0) {
                  sourceLine.write(buffer, 0, numBytesRead);
                }
                try {
                  Thread.sleep(0, 1);
                } catch (InterruptedException e) {
                }
              }
            }
          };
      thread.start();

      AudioInputStream in = new AudioInputStream(targetLine);
      if (sourceChannels == 1) in = AudioManager.getMonoInputStream(in);
      File tempFile = File.createTempFile("gmtmp_", ".wav");
      AudioFormat tempFormat =
          new AudioFormat(
              fileFormat.getFormat().getSampleRate(),
              16,
              fileFormat.getFormat().getChannels(),
              true,
              false);
      AudioFileOutputStream out =
          AudioManager.getDefault()
              .getAudioFileOutputStream(
                  tempFile, tempFormat, AudioFileFormat.Type.WAVE, null, null, null);
      if (!in.getFormat().equals(out.getFormat()))
        in = AudioSystem.getAudioInputStream(out.getFormat(), in);
      SaveFileThread saver =
          new SaveFileThread(
              in, out, (int) inStream.getFrameLength(), in.getFormat().getFrameSize(), true);
      ProgressMonitor monitor =
          new ProgressMonitor(
              getShell(), saver, "Apply Effect", "Applying " + effect.getName() + " to Selection");
      monitor.start();

      File tempPeak = File.createTempFile("gmtmp_", ".gmpk");
      CreatePeakFileThread peak =
          new CreatePeakFileThread(AudioSystem.getAudioInputStream(tempFile), tempPeak);
      monitor =
          new ProgressMonitor(
              getShell(), peak, "Creating peak file", "Creating peak file for applied effect.");
      monitor.start();

      PeakWaveForm pwf = new PeakWaveForm(tempPeak);
      AudioFileWaveForm awf = new AudioFileWaveForm(tempFile, pwf, 32 * 1024, 25);
      CutListSource newSource = new AudioFileSource(tempFile, awf);

      sel = waveDisplay.getSelection();
      int left = sel.getLeft();
      int right = sel.getRight();

      ReplaceElement el =
          new ReplaceElement(
              effect.getName(), newSource, left, right - left, fileFormat.getFormat());
      cutList.addElement(el);
      undoOperations.add(el);
      redoOperations.clear();
      thread.stop();
    } catch (NotReadyException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (NotFinishedException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (LineUnavailableException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (IOException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (UnsupportedAudioFileException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    }
  }
  /**
   * Creates a new <code>BZStyledText</code> for parentShell <code>parentShell</code>.
   *
   * @param parentShell parentShell of the new instance (cannot be null)
   */
  public BZStyledText(Shell parentShell) {
    this.parentShell = parentShell;

    //   version from jar manifest
    String version = getClass().getPackage().getImplementationVersion();

    //   version from build file
    if (version == null) version = System.getProperty("braillezephyr.version");

    //   no version
    if (version == null) {
      logWriter.println("WARNING:  unable to determine version, using 0.0");
      version = "0.0";
    }

    versionString = version;
    String[] versionStrings = versionString.split("\\.");
    versionMajor = Integer.parseInt(versionStrings[0]);
    if (versionStrings.length > 1) versionMinor = Integer.parseInt(versionStrings[1]);
    else versionMinor = 0;
    if (versionStrings.length > 2) versionPatch = Integer.parseInt(versionStrings[2]);
    else versionPatch = 0;

    color = parentShell.getDisplay().getSystemColor(SWT.COLOR_BLACK);

    composite = new Composite(parentShell, 0);
    composite.setLayout(new GridLayout(2, true));

    //   load fonts
    loadFont("BrailleZephyr_6.otf");
    loadFont("BrailleZephyr_6b.otf");
    loadFont("BrailleZephyr_6s.otf");
    loadFont("BrailleZephyr_6sb.otf");
    loadFont("BrailleZephyr_8.otf");
    loadFont("BrailleZephyr_8b.otf");
    loadFont("BrailleZephyr_8s.otf");
    loadFont("BrailleZephyr_8sb.otf");
    loadFont("BrailleZephyr_8w.otf");
    loadFont("BrailleZephyr_8wb.otf");
    loadFont("BrailleZephyr_8ws.otf");
    loadFont("BrailleZephyr_8wsb.otf");

    //   load line margin bell
    try {
      InputStream inputStreamBellMargin =
          new BufferedInputStream(getClass().getResourceAsStream("/sounds/line_margin_bell.wav"));
      AudioInputStream audioInputStreamMargin =
          AudioSystem.getAudioInputStream(inputStreamBellMargin);
      DataLine.Info dataLineInfoMargin =
          new DataLine.Info(Clip.class, audioInputStreamMargin.getFormat());
      lineMarginClip = (Clip) AudioSystem.getLine(dataLineInfoMargin);
      lineMarginClip.open(audioInputStreamMargin);
    } catch (IOException exception) {
      logWriter.println(
          "ERROR:  Unable to read default line margin bell file:  " + exception.getMessage());
      lineMarginClip = null;
    } catch (UnsupportedAudioFileException exception) {
      logWriter.println(
          "ERROR:  Sound file unsupported for default line margin bell:  "
              + exception.getMessage());
      lineMarginClip = null;
    } catch (LineUnavailableException exception) {
      logWriter.println(
          "ERROR:  Line unavailable for default line margin bell:  " + exception.getMessage());
      lineMarginClip = null;
    }

    //   load page margin bell
    try {
      InputStream inputStreamBellPage =
          new BufferedInputStream(getClass().getResourceAsStream("/sounds/page_margin_bell.wav"));
      AudioInputStream audioInputStreamPage = AudioSystem.getAudioInputStream(inputStreamBellPage);
      DataLine.Info dataLineInfoPage =
          new DataLine.Info(Clip.class, audioInputStreamPage.getFormat());
      pageMarginClip = (Clip) AudioSystem.getLine(dataLineInfoPage);
      pageMarginClip.open(audioInputStreamPage);
    } catch (IOException exception) {
      logWriter.println(
          "ERROR:  Unable to read default page margin bell file:  " + exception.getMessage());
      pageMarginClip = null;
    } catch (UnsupportedAudioFileException exception) {
      logWriter.println(
          "ERROR:  Sound file unsupported for default page margin bell:  "
              + exception.getMessage());
      pageMarginClip = null;
    } catch (LineUnavailableException exception) {
      logWriter.println(
          "ERROR:  Line unavailable for default page margin bell:  " + exception.getMessage());
      pageMarginClip = null;
    }

    //   load line end bell
    try {
      InputStream inputStreamBellPage =
          new BufferedInputStream(getClass().getResourceAsStream("/sounds/line_end_bell.wav"));
      AudioInputStream audioInputStreamPage = AudioSystem.getAudioInputStream(inputStreamBellPage);
      DataLine.Info dataLineInfoPage =
          new DataLine.Info(Clip.class, audioInputStreamPage.getFormat());
      lineEndClip = (Clip) AudioSystem.getLine(dataLineInfoPage);
      lineEndClip.open(audioInputStreamPage);
    } catch (IOException exception) {
      logWriter.println(
          "ERROR:  Unable to read default line end bell file:  " + exception.getMessage());
      lineEndClip = null;
    } catch (UnsupportedAudioFileException exception) {
      logWriter.println(
          "ERROR:  Sound file unsupported for default line end bell:  " + exception.getMessage());
      lineEndClip = null;
    } catch (LineUnavailableException exception) {
      logWriter.println(
          "ERROR:  Line unavailable for default line end bell:  " + exception.getMessage());
      lineEndClip = null;
    }

    brailleText = new StyledText(composite, SWT.BORDER | SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL);
    brailleText.setLayoutData(new GridData(GridData.FILL_BOTH));
    brailleText.setFont(new Font(parentShell.getDisplay(), "BrailleZephyr_6s", 18, SWT.NORMAL));
    brailleText.addFocusListener(new FocusHandler(brailleText));
    brailleText.addPaintListener(new PaintHandler(brailleText));
    BrailleKeyHandler brailleKeyHandler = new BrailleKeyHandler(true);
    brailleText.addKeyListener(brailleKeyHandler);
    brailleText.addVerifyKeyListener(brailleKeyHandler);
    brailleText.addExtendedModifyListener(new ExtendedModifyHandler(brailleText));

    content = brailleText.getContent();

    asciiText = new StyledText(composite, SWT.BORDER | SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL);
    asciiText.setContent(content);
    asciiText.setLayoutData(new GridData(GridData.FILL_BOTH));
    asciiText.setFont(new Font(parentShell.getDisplay(), "Monospace", 18, SWT.NORMAL));
    asciiText.addFocusListener(new FocusHandler(asciiText));
    asciiText.addPaintListener(new PaintHandler(asciiText));
    asciiText.addVerifyKeyListener(new BrailleKeyHandler(false));
    asciiText.addExtendedModifyListener(new ExtendedModifyHandler(asciiText));

    brailleText.addCaretListener(new CaretHandler(brailleText, asciiText));
    asciiText.addCaretListener(new CaretHandler(asciiText, brailleText));

    currentText = brailleText;
  }
  /**
   * Open the plugin. Must be called after the formats have been determined and before "process" is
   * called.
   *
   * <p>Open the DataLine.
   */
  public void open() throws ResourceUnavailableException {
    javax.sound.sampled.AudioFormat audioFormat = convertFormat(inputFormat);
    logger.info("opening with javax.sound format: " + audioFormat);
    try {

      if (!inputFormat.getEncoding().equals(AudioFormat.LINEAR)) {
        logger.info("JavaSoundRenderer: Audio format is not linear, creating conversion");

        if (inputFormat.getEncoding().equals(AudioFormat.ULAW))
          codec =
              new net.sf.fmj.media.codec.audio.ulaw
                  .Decoder(); // much more efficient than JavaSoundCodec
        else if (inputFormat.getEncoding().equals(AudioFormat.ALAW))
          codec =
              new net.sf.fmj.media.codec.audio.alaw
                  .Decoder(); // much more efficient than JavaSoundCodec
        else
          throw new ResourceUnavailableException(
              "Unsupported input format encoding: " + inputFormat.getEncoding());
        // codec = new net.sf.fmj.media.codec.JavaSoundCodec();
        codec.setInputFormat(inputFormat);
        final Format[] outputFormats = codec.getSupportedOutputFormats(inputFormat);
        if (outputFormats.length < 1)
          throw new ResourceUnavailableException(
              "Unable to get an output format for input format: " + inputFormat);
        final AudioFormat codecOutputFormat =
            (AudioFormat) outputFormats[0]; // TODO: choose the best quality one.
        codec.setOutputFormat(codecOutputFormat);
        audioFormat = convertFormat(codecOutputFormat);

        codec.open();

        logger.info(
            "JavaSoundRenderer: Audio format is not linear, created conversion from "
                + inputFormat
                + " to "
                + codecOutputFormat);
      }

      sourceLine = getSourceDataLine(audioFormat);
      sourceLine.open(audioFormat);

      {
        FloatControl gainFloatControl = null;
        BooleanControl muteBooleanControl = null;

        try {
          gainFloatControl = (FloatControl) sourceLine.getControl(FloatControl.Type.MASTER_GAIN);
        } catch (Exception e) {
          e.printStackTrace();
        }

        try {
          muteBooleanControl = (BooleanControl) sourceLine.getControl(BooleanControl.Type.MUTE);
        } catch (Exception e) {
          e.printStackTrace();
        }

        // TODO add other controls
        JavaSoundGainControl gainControl =
            new JavaSoundGainControl(gainFloatControl, muteBooleanControl);
        controls.addControl(gainControl);
      }

      logControls(sourceLine.getControls());
    } catch (LineUnavailableException e) {
      throw new ResourceUnavailableException(e.getMessage());
    }
  }