Ejemplo n.º 1
1
  /**
   * @param filename the
   * @param is
   * @return
   */
  AudioInputStream getAudioInputStream(String filename) {
    AudioInputStream ais = null;
    BufferedInputStream bis = null;
    if (filename.startsWith("http")) {
      try {
        ais = getAudioInputStream(new URL(filename));
      } catch (MalformedURLException e) {
        error("Bad URL: " + e.getMessage());
      } catch (UnsupportedAudioFileException e) {
        error("URL is in an unsupported audio file format: " + e.getMessage());
      } catch (IOException e) {
        Sound.error("Error reading the URL: " + e.getMessage());
      }
    } else {
      try {
        // InputStream is = createInput(filename);
        InputStream is =
            new FileInputStream(filename) {

              @Override
              public int read() throws IOException {
                // TODO Auto-generated method stub
                return 0;
              }
            };

        debug("Base input stream is: " + is.toString());
        bis = new BufferedInputStream(is);
        ais = getAudioInputStream(bis);
        // don't mark it like this because it means the entire
        // file will be loaded into memory as it plays. this
        // will cause out-of-memory problems with very large files.
        // ais.mark((int)ais.available());
        debug(
            "Acquired AudioInputStream.\n"
                + "It is "
                + ais.getFrameLength()
                + " frames long.\n"
                + "Marking support: "
                + ais.markSupported());
      } catch (IOException ioe) {
        error("IOException: " + ioe.getMessage());
      } catch (UnsupportedAudioFileException uafe) {
        error("Unsupported Audio File: " + uafe.getMessage());
      }
    }
    return ais;
  }
Ejemplo n.º 2
0
  // return data as a byte array
  private byte[] readByte(String filename) {
    byte[] data = null;
    AudioInputStream ais = null;
    try {

      // try to read from file
      File file = new File(filename);
      if (file.exists()) {
        ais = AudioSystem.getAudioInputStream(file);
        data = new byte[ais.available()];
        ais.read(data);
      }

      // try to read from URL
      else {
        URL url = StdAudio.class.getResource(filename);
        ais = AudioSystem.getAudioInputStream(url);
        data = new byte[ais.available()];
        ais.read(data);
      }
    } catch (IOException e) {
      System.out.println(e.getMessage());
      throw new RuntimeException("Could not read " + filename);
    } catch (UnsupportedAudioFileException e) {
      System.out.println(e.getMessage());
      throw new RuntimeException(filename + " in unsupported audio format");
    }

    return data;
  }
Ejemplo n.º 3
0
  @Override
  public void run() {
    running = true;
    done = false;
    Log.println("WAV Source START");
    if (audioStream == null)
      try {
        initWav();
      } catch (UnsupportedAudioFileException e1) {
        Log.errorDialog("ERROR", "Unsupported File Format\n" + e1.getMessage());
        e1.printStackTrace(Log.getWriter());
        running = false;
      } catch (IOException e1) {
        Log.errorDialog("ERROR", "There was a problem opening the wav file\n" + e1.getMessage());
        e1.printStackTrace(Log.getWriter());
        running = false;
      }

    while (running) {
      //			Log.println("wav running");
      if (audioStream != null) {
        int nBytesRead = 0;
        if (circularBuffer.getCapacity() > readBuffer.length) {
          try {
            nBytesRead = audioStream.read(readBuffer, 0, readBuffer.length);
            bytesRead = bytesRead + nBytesRead;
            framesProcessed = framesProcessed + nBytesRead / frameSize;
            // Check we have not stopped mid read
            if (audioStream == null) running = false;
            else if (!(audioStream.available() > 0)) running = false;
          } catch (IOException e) {
            Log.errorDialog("ERROR", "Failed to read from file " + fileName);
            e.printStackTrace(Log.getWriter());
          }
        } else {
          try {
            Thread.sleep(1);
          } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
          }
          // Log.println("No room in Buffer");
        }
        for (int i = 0; i < nBytesRead; i += 2) {
          // circularBuffer.add(readBuffer[i]);
          circularBuffer.add(readBuffer[i], readBuffer[i + 1]);
        }
      }
    }
    framesProcessed = totalFrames;
    cleanup(); // This might cause the decoder to miss the end of a file (testing inconclusive) but
               // it also ensure the file is close and stops an error if run again very quickly
    running = false;
    try {
      Thread.sleep(100);
    } catch (InterruptedException e) {
      e.printStackTrace();
    }
    Log.println("WAV Source EXIT");
  }
Ejemplo n.º 4
0
 @ActionDoc(text = "plays a sound from the sounds folder")
 public static void playSound(
     @ParamDoc(name = "filename", text = "the filename with extension") String filename) {
   try {
     InputStream is = new FileInputStream(SOUND_DIR + File.separator + filename);
     if (filename.toLowerCase().endsWith(".mp3")) {
       Player player = new Player(is);
       playInThread(player);
     } else {
       AudioInputStream ais = AudioSystem.getAudioInputStream(is);
       Clip clip = AudioSystem.getClip();
       clip.open(ais);
       playInThread(clip);
     }
   } catch (FileNotFoundException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   } catch (JavaLayerException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   } catch (UnsupportedAudioFileException e) {
     logger.error(
         "Format of sound file '{}' is not supported: {}",
         new String[] {filename, e.getMessage()});
   } catch (IOException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   } catch (LineUnavailableException e) {
     logger.error("Cannot play sound '{}': {}", new String[] {filename, e.getMessage()});
   }
 }
Ejemplo n.º 5
0
 private Map<String, Object> getID3Tags(String filename) {
   debug("Getting the properties.");
   Map<String, Object> props = new HashMap<String, Object>();
   try {
     MpegAudioFileReader reader = new MpegAudioFileReader(this);
     // InputStream stream = createInput(filename);
     InputStream stream = new FileInputStream(filename);
     AudioFileFormat baseFileFormat = reader.getAudioFileFormat(stream, stream.available());
     stream.close();
     if (baseFileFormat instanceof TAudioFileFormat) {
       TAudioFileFormat fileFormat = (TAudioFileFormat) baseFileFormat;
       props = fileFormat.properties();
       if (props.size() == 0) {
         error("No file properties available for " + filename + ".");
       } else {
         debug("File properties: " + props.toString());
       }
     }
   } catch (UnsupportedAudioFileException e) {
     error("Couldn't get the file format for " + filename + ": " + e.getMessage());
   } catch (IOException e) {
     error("Couldn't access " + filename + ": " + e.getMessage());
   }
   return props;
 }
  public void testPropertiesFile() {
    String[] testPropsAFF = {
      "duration",
      "title",
      "author",
      "album",
      "date",
      "comment",
      "copyright",
      "ogg.bitrate.min",
      "ogg.bitrate.nominal",
      "ogg.bitrate.max"
    };
    String[] testPropsAF = {"vbr", "bitrate"};

    File file = new File(filename);
    AudioFileFormat baseFileFormat = null;
    AudioFormat baseFormat = null;
    try {
      baseFileFormat = AudioSystem.getAudioFileFormat(file);
      baseFormat = baseFileFormat.getFormat();
      if (out != null) out.println("-> Filename : " + filename + " <-");
      if (out != null) out.println(baseFileFormat);
      if (baseFileFormat instanceof TAudioFileFormat) {
        Map properties = ((TAudioFileFormat) baseFileFormat).properties();
        if (out != null) out.println(properties);
        for (int i = 0; i < testPropsAFF.length; i++) {
          String key = testPropsAFF[i];
          if (properties.get(key) != null) {
            String val = (properties.get(key)).toString();
            // if (out != null)  out.println(key+"="+val);
            String valexpected = props.getProperty(key);
            // assertEquals(key,valexpected,val);
          }
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFileFormat expected", false);
      }

      if (baseFormat instanceof TAudioFormat) {
        Map properties = ((TAudioFormat) baseFormat).properties();
        for (int i = 0; i < testPropsAF.length; i++) {
          String key = testPropsAF[i];
          if (properties.get(key) != null) {
            String val = (properties.get(key)).toString();
            if (out != null) out.println(key + "=" + val);
            String valexpected = props.getProperty(key);
            // assertEquals(key,valexpected,val);
          }
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFormat expected", false);
      }
    } catch (UnsupportedAudioFileException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    } catch (IOException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    }
  }
Ejemplo n.º 7
0
 public void testPropertiesShoutcast() {
   AudioFileFormat baseFileFormat = null;
   AudioFormat baseFormat = null;
   String shoutURL = (String) props.getProperty("shoutcast");
   try {
     URL url = new URL(shoutURL);
     baseFileFormat = AudioSystem.getAudioFileFormat(url);
     baseFormat = baseFileFormat.getFormat();
     if (out != null) out.println("-> URL : " + url.toString() + " <-");
     if (out != null) out.println(baseFileFormat);
     if (baseFileFormat instanceof TAudioFileFormat) {
       Map properties = ((TAudioFileFormat) baseFileFormat).properties();
       Iterator it = properties.keySet().iterator();
       while (it.hasNext()) {
         String key = (String) it.next();
         String val = null;
         if (properties.get(key) != null) val = (properties.get(key)).toString();
         if (out != null) out.println(key + "='" + val + "'");
       }
     } else {
       assertTrue("testPropertiesShoutcast : TAudioFileFormat expected", false);
     }
     if (baseFormat instanceof TAudioFormat) {
       Map properties = ((TAudioFormat) baseFormat).properties();
       Iterator it = properties.keySet().iterator();
       while (it.hasNext()) {
         String key = (String) it.next();
         String val = null;
         if (properties.get(key) != null) val = (properties.get(key)).toString();
         if (out != null) out.println(key + "='" + val + "'");
       }
     } else {
       assertTrue("testPropertiesShoutcast : TAudioFormat expected", false);
     }
   } catch (UnsupportedAudioFileException e) {
     assertTrue("testPropertiesShoutcast : " + e.getMessage(), false);
   } catch (IOException e) {
     assertTrue("testPropertiesShoutcast : " + e.getMessage(), false);
   }
 }
Ejemplo n.º 8
0
  public void _testDumpPropertiesFile() {
    File file = new File(filename);
    AudioFileFormat baseFileFormat = null;
    AudioFormat baseFormat = null;
    try {
      baseFileFormat = AudioSystem.getAudioFileFormat(file);
      baseFormat = baseFileFormat.getFormat();
      if (out != null) out.println("-> Filename : " + filename + " <-");
      if (baseFileFormat instanceof TAudioFileFormat) {
        Map properties = ((TAudioFileFormat) baseFileFormat).properties();
        Iterator it = properties.keySet().iterator();
        while (it.hasNext()) {
          String key = (String) it.next();
          String val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
        }
      } else {
        assertTrue("testDumpPropertiesFile : TAudioFileFormat expected", false);
      }

      if (baseFormat instanceof TAudioFormat) {
        Map properties = ((TAudioFormat) baseFormat).properties();
        Iterator it = properties.keySet().iterator();
        while (it.hasNext()) {
          String key = (String) it.next();
          String val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
        }
      } else {
        assertTrue("testDumpPropertiesFile : TAudioFormat expected", false);
      }
    } catch (UnsupportedAudioFileException e) {
      assertTrue("testDumpPropertiesFile : " + e.getMessage(), false);
    } catch (IOException e) {
      assertTrue("testDumpPropertiesFile : " + e.getMessage(), false);
    }
  }
Ejemplo n.º 9
0
 /** @see com.groovemanager.sampled.AudioPlayerProvider#stopRec() */
 public void stopRec() {
   modified = true;
   canRec = false;
   recording = false;
   try {
     out.close();
     File tempPeak = File.createTempFile("gmtmp_", ".gmpk");
     ((DynamicPeakWaveForm) afWF.getPeakWaveForm()).close(source.lastModified());
     try {
       AudioFileWaveForm aw = new AudioFileWaveForm(source, afWF.getPeakWaveForm(), 32 * 1024, 25);
       fileFormat = AudioSystem.getAudioFileFormat(source);
       cutList.setSource(new AudioFileSource(source, aw));
       waveDisplay.showAll();
     } catch (UnsupportedAudioFileException e1) {
       e1.printStackTrace();
       editor.errorMessage(e1.getMessage());
     }
     this.editor.player.setProvider(this);
   } catch (IOException e) {
     e.printStackTrace();
     editor.errorMessage(e.getMessage());
   }
   editor.zoomWaveDisplay.setSource(this);
 }
Ejemplo n.º 10
0
 /** @see com.groovemanager.sampled.AudioPlayerProvider#startRec() */
 public AudioFormat startRec() throws NotReadyException {
   tabItem
       .getDisplay()
       .asyncExec(
           new Runnable() {
             public void run() {
               waveDisplay.scroll(1);
             }
           });
   try {
     out =
         AudioManager.getDefault()
             .getAudioFileOutputStream(
                 source, format, AudioFileFormat.Type.WAVE, null, null, null);
   } catch (IOException e) {
     e.printStackTrace();
     throw new NotReadyException(e.getMessage());
   } catch (UnsupportedAudioFileException e) {
     e.printStackTrace();
     throw new NotReadyException(e.getMessage());
   }
   recording = true;
   return format;
 }
Ejemplo n.º 11
0
  public void testPropertiesFile() {
    String[] testPropsAFF = {
      "duration",
      "title",
      "author",
      "album",
      "date",
      "comment",
      "copyright",
      "mp3.framerate.fps",
      "mp3.copyright",
      "mp3.padding",
      "mp3.original",
      "mp3.length.bytes",
      "mp3.frequency.hz",
      "mp3.length.frames",
      "mp3.mode",
      "mp3.channels",
      "mp3.version.mpeg",
      "mp3.framesize.bytes",
      "mp3.vbr.scale",
      "mp3.version.encoding",
      "mp3.header.pos",
      "mp3.version.layer",
      "mp3.crc"
    };
    String[] testPropsAF = {"vbr", "bitrate"};

    File file = new File(filename);
    AudioFileFormat baseFileFormat = null;
    AudioFormat baseFormat = null;
    try {
      baseFileFormat = AudioSystem.getAudioFileFormat(file);
      baseFormat = baseFileFormat.getFormat();
      if (out != null) out.println("-> Filename : " + filename + " <-");
      if (out != null) out.println(baseFileFormat);
      if (baseFileFormat instanceof TAudioFileFormat) {
        Map properties = ((TAudioFileFormat) baseFileFormat).properties();
        if (out != null) out.println(properties);
        for (int i = 0; i < testPropsAFF.length; i++) {
          String key = testPropsAFF[i];
          String val = null;
          if (properties.get(key) != null) val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
          String valexpected = props.getProperty(key);
          assertEquals(key, valexpected, val);
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFileFormat expected", false);
      }

      if (baseFormat instanceof TAudioFormat) {
        Map properties = ((TAudioFormat) baseFormat).properties();
        for (int i = 0; i < testPropsAF.length; i++) {
          String key = testPropsAF[i];
          String val = null;
          if (properties.get(key) != null) val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
          String valexpected = props.getProperty(key);
          assertEquals(key, valexpected, val);
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFormat expected", false);
      }
    } catch (UnsupportedAudioFileException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    } catch (IOException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    }
  }
Ejemplo n.º 12
0
  /**
   * Apply the given effect to this WaveTab´s audio data
   *
   * @param effect The effect to apply
   */
  public void applyEffect(Effect effect) {
    Selection sel = waveDisplay.getSelection();
    if (sel.getLeft() == sel.getRight())
      waveDisplay.setSelection(new Selection(0, getTotalLength()));
    Thread thread = null;
    try {
      AudioInputStream stream = getAudioInputStream();
      int sourceChannels = stream.getFormat().getChannels();
      stream = AudioManager.getStereoInputStream(stream);
      final FXUnit unit = new FXUnit(effect);
      if (effect.needsAnalysis()) {
        Analyzer a = new Analyzer(unit, stream);
        ProgressMonitor monitor =
            new ProgressMonitor(getShell(), a, "Analyzing...", "Analyzing audio data");
        monitor.start();
        stream = AudioManager.getStereoInputStream(getAudioInputStream());
      }

      final SourceDataLine sourceLine = unit.getEffectSourceLine();
      sourceLine.open();
      sourceLine.start();
      final TargetDataLine targetLine = unit.getEffectTargetLine();
      targetLine.open();
      targetLine.start();
      if (!stream.getFormat().equals(sourceLine.getFormat())) {
        if (AudioSystem.isConversionSupported(sourceLine.getFormat(), stream.getFormat()))
          stream = AudioSystem.getAudioInputStream(sourceLine.getFormat(), stream);
        else {
          editor.errorMessage(
              "Unable to apply effect:\nFormat conversion from "
                  + stream.getFormat()
                  + " to "
                  + sourceLine.getFormat()
                  + " not supported.");
          return;
        }
      }

      final AudioInputStream inStream = stream;
      thread =
          new Thread() {
            public void run() {
              int numBytesRead = 0;
              byte[] buffer = new byte[sourceLine.getBufferSize()];
              while (numBytesRead != -1 && !getItem().isDisposed()) {
                try {
                  numBytesRead = inStream.read(buffer, 0, buffer.length);
                } catch (IOException e1) {
                  e1.printStackTrace();
                  numBytesRead = -1;
                }
                if (numBytesRead > 0) {
                  sourceLine.write(buffer, 0, numBytesRead);
                }
                try {
                  Thread.sleep(0, 1);
                } catch (InterruptedException e) {
                }
              }
            }
          };
      thread.start();

      AudioInputStream in = new AudioInputStream(targetLine);
      if (sourceChannels == 1) in = AudioManager.getMonoInputStream(in);
      File tempFile = File.createTempFile("gmtmp_", ".wav");
      AudioFormat tempFormat =
          new AudioFormat(
              fileFormat.getFormat().getSampleRate(),
              16,
              fileFormat.getFormat().getChannels(),
              true,
              false);
      AudioFileOutputStream out =
          AudioManager.getDefault()
              .getAudioFileOutputStream(
                  tempFile, tempFormat, AudioFileFormat.Type.WAVE, null, null, null);
      if (!in.getFormat().equals(out.getFormat()))
        in = AudioSystem.getAudioInputStream(out.getFormat(), in);
      SaveFileThread saver =
          new SaveFileThread(
              in, out, (int) inStream.getFrameLength(), in.getFormat().getFrameSize(), true);
      ProgressMonitor monitor =
          new ProgressMonitor(
              getShell(), saver, "Apply Effect", "Applying " + effect.getName() + " to Selection");
      monitor.start();

      File tempPeak = File.createTempFile("gmtmp_", ".gmpk");
      CreatePeakFileThread peak =
          new CreatePeakFileThread(AudioSystem.getAudioInputStream(tempFile), tempPeak);
      monitor =
          new ProgressMonitor(
              getShell(), peak, "Creating peak file", "Creating peak file for applied effect.");
      monitor.start();

      PeakWaveForm pwf = new PeakWaveForm(tempPeak);
      AudioFileWaveForm awf = new AudioFileWaveForm(tempFile, pwf, 32 * 1024, 25);
      CutListSource newSource = new AudioFileSource(tempFile, awf);

      sel = waveDisplay.getSelection();
      int left = sel.getLeft();
      int right = sel.getRight();

      ReplaceElement el =
          new ReplaceElement(
              effect.getName(), newSource, left, right - left, fileFormat.getFormat());
      cutList.addElement(el);
      undoOperations.add(el);
      redoOperations.clear();
      thread.stop();
    } catch (NotReadyException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (NotFinishedException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (LineUnavailableException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (IOException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (UnsupportedAudioFileException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    }
  }
Ejemplo n.º 13
0
  /**
   * Creates a new <code>BZStyledText</code> for parentShell <code>parentShell</code>.
   *
   * @param parentShell parentShell of the new instance (cannot be null)
   */
  public BZStyledText(Shell parentShell) {
    this.parentShell = parentShell;

    //   version from jar manifest
    String version = getClass().getPackage().getImplementationVersion();

    //   version from build file
    if (version == null) version = System.getProperty("braillezephyr.version");

    //   no version
    if (version == null) {
      logWriter.println("WARNING:  unable to determine version, using 0.0");
      version = "0.0";
    }

    versionString = version;
    String[] versionStrings = versionString.split("\\.");
    versionMajor = Integer.parseInt(versionStrings[0]);
    if (versionStrings.length > 1) versionMinor = Integer.parseInt(versionStrings[1]);
    else versionMinor = 0;
    if (versionStrings.length > 2) versionPatch = Integer.parseInt(versionStrings[2]);
    else versionPatch = 0;

    color = parentShell.getDisplay().getSystemColor(SWT.COLOR_BLACK);

    composite = new Composite(parentShell, 0);
    composite.setLayout(new GridLayout(2, true));

    //   load fonts
    loadFont("BrailleZephyr_6.otf");
    loadFont("BrailleZephyr_6b.otf");
    loadFont("BrailleZephyr_6s.otf");
    loadFont("BrailleZephyr_6sb.otf");
    loadFont("BrailleZephyr_8.otf");
    loadFont("BrailleZephyr_8b.otf");
    loadFont("BrailleZephyr_8s.otf");
    loadFont("BrailleZephyr_8sb.otf");
    loadFont("BrailleZephyr_8w.otf");
    loadFont("BrailleZephyr_8wb.otf");
    loadFont("BrailleZephyr_8ws.otf");
    loadFont("BrailleZephyr_8wsb.otf");

    //   load line margin bell
    try {
      InputStream inputStreamBellMargin =
          new BufferedInputStream(getClass().getResourceAsStream("/sounds/line_margin_bell.wav"));
      AudioInputStream audioInputStreamMargin =
          AudioSystem.getAudioInputStream(inputStreamBellMargin);
      DataLine.Info dataLineInfoMargin =
          new DataLine.Info(Clip.class, audioInputStreamMargin.getFormat());
      lineMarginClip = (Clip) AudioSystem.getLine(dataLineInfoMargin);
      lineMarginClip.open(audioInputStreamMargin);
    } catch (IOException exception) {
      logWriter.println(
          "ERROR:  Unable to read default line margin bell file:  " + exception.getMessage());
      lineMarginClip = null;
    } catch (UnsupportedAudioFileException exception) {
      logWriter.println(
          "ERROR:  Sound file unsupported for default line margin bell:  "
              + exception.getMessage());
      lineMarginClip = null;
    } catch (LineUnavailableException exception) {
      logWriter.println(
          "ERROR:  Line unavailable for default line margin bell:  " + exception.getMessage());
      lineMarginClip = null;
    }

    //   load page margin bell
    try {
      InputStream inputStreamBellPage =
          new BufferedInputStream(getClass().getResourceAsStream("/sounds/page_margin_bell.wav"));
      AudioInputStream audioInputStreamPage = AudioSystem.getAudioInputStream(inputStreamBellPage);
      DataLine.Info dataLineInfoPage =
          new DataLine.Info(Clip.class, audioInputStreamPage.getFormat());
      pageMarginClip = (Clip) AudioSystem.getLine(dataLineInfoPage);
      pageMarginClip.open(audioInputStreamPage);
    } catch (IOException exception) {
      logWriter.println(
          "ERROR:  Unable to read default page margin bell file:  " + exception.getMessage());
      pageMarginClip = null;
    } catch (UnsupportedAudioFileException exception) {
      logWriter.println(
          "ERROR:  Sound file unsupported for default page margin bell:  "
              + exception.getMessage());
      pageMarginClip = null;
    } catch (LineUnavailableException exception) {
      logWriter.println(
          "ERROR:  Line unavailable for default page margin bell:  " + exception.getMessage());
      pageMarginClip = null;
    }

    //   load line end bell
    try {
      InputStream inputStreamBellPage =
          new BufferedInputStream(getClass().getResourceAsStream("/sounds/line_end_bell.wav"));
      AudioInputStream audioInputStreamPage = AudioSystem.getAudioInputStream(inputStreamBellPage);
      DataLine.Info dataLineInfoPage =
          new DataLine.Info(Clip.class, audioInputStreamPage.getFormat());
      lineEndClip = (Clip) AudioSystem.getLine(dataLineInfoPage);
      lineEndClip.open(audioInputStreamPage);
    } catch (IOException exception) {
      logWriter.println(
          "ERROR:  Unable to read default line end bell file:  " + exception.getMessage());
      lineEndClip = null;
    } catch (UnsupportedAudioFileException exception) {
      logWriter.println(
          "ERROR:  Sound file unsupported for default line end bell:  " + exception.getMessage());
      lineEndClip = null;
    } catch (LineUnavailableException exception) {
      logWriter.println(
          "ERROR:  Line unavailable for default line end bell:  " + exception.getMessage());
      lineEndClip = null;
    }

    brailleText = new StyledText(composite, SWT.BORDER | SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL);
    brailleText.setLayoutData(new GridData(GridData.FILL_BOTH));
    brailleText.setFont(new Font(parentShell.getDisplay(), "BrailleZephyr_6s", 18, SWT.NORMAL));
    brailleText.addFocusListener(new FocusHandler(brailleText));
    brailleText.addPaintListener(new PaintHandler(brailleText));
    BrailleKeyHandler brailleKeyHandler = new BrailleKeyHandler(true);
    brailleText.addKeyListener(brailleKeyHandler);
    brailleText.addVerifyKeyListener(brailleKeyHandler);
    brailleText.addExtendedModifyListener(new ExtendedModifyHandler(brailleText));

    content = brailleText.getContent();

    asciiText = new StyledText(composite, SWT.BORDER | SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL);
    asciiText.setContent(content);
    asciiText.setLayoutData(new GridData(GridData.FILL_BOTH));
    asciiText.setFont(new Font(parentShell.getDisplay(), "Monospace", 18, SWT.NORMAL));
    asciiText.addFocusListener(new FocusHandler(asciiText));
    asciiText.addPaintListener(new PaintHandler(asciiText));
    asciiText.addVerifyKeyListener(new BrailleKeyHandler(false));
    asciiText.addExtendedModifyListener(new ExtendedModifyHandler(asciiText));

    brailleText.addCaretListener(new CaretHandler(brailleText, asciiText));
    asciiText.addCaretListener(new CaretHandler(asciiText, brailleText));

    currentText = brailleText;
  }