protected AudioInputStream getAudioInputStream(InputStream inputStream, long lFileLengthInBytes)
     throws UnsupportedAudioFileException, IOException {
   if (TDebug.TraceAudioFileReader) {
     TDebug.out("AbstractChunkyAudioFileReader.getAudioInputStream(InputStream, long): begin");
   }
   inputStream = new BufferedInputStream(inputStream, INIT_READ_LIMIT);
   inputStream.mark(INIT_READ_LIMIT);
   preliminaryCheck(inputStream, INIT_READ_LIMIT);
   inputStream.reset();
   inputStream =
       new BufferedInputStream(
           inputStream,
           (int)
               lFileLengthInBytes); // ok buffer it all now - we know we are not wasting our time
                                    // with this stream
   AudioFileFormat audioFileFormat = getAudioFileFormat(inputStream, lFileLengthInBytes);
   AudioInputStream audioInputStream =
       new AudioInputStream(
           inputStream, audioFileFormat.getFormat(), audioFileFormat.getFrameLength());
   if (TDebug.TraceAudioFileReader) {
     TDebug.out("AbstractChunkyAudioFileReader.getAudioInputStream(InputStream, long): end");
   }
   return audioInputStream;
 }
Beispiel #2
0
  /**
   * load the desired sound data into memory.
   *
   * @param filename
   * @return
   * @throws IOException
   * @throws UnsupportedAudioFileException
   */
  private GameSoundData loadSound(String filename)
      throws IOException, UnsupportedAudioFileException {
    // should find sounds placed in root of jar file, which build file should do;
    // in an ide, setting should be tweaked to see audio dir as on path.
    // (e.g., in eclipse, run -> debug configs -> classpath -> add folder,
    //  or place audio files in root of runtime dir, likely "bin")
    InputStream is = this.getClass().getClassLoader().getResourceAsStream(filename);
    is = new BufferedInputStream(is);

    AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(is);
    AudioInputStream ais = AudioSystem.getAudioInputStream(is);

    AudioFormat audioFormat = fileFormat.getFormat();
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    int bufSize = BUFLEN * audioFormat.getFrameSize();
    byte[] byteBuf = new byte[bufSize];
    while (true) {
      int nRead = ais.read(byteBuf);
      if (nRead == -1) break;
      baos.write(byteBuf, 0, nRead);
    }
    ;
    ais.close();
    is.close();
    GameSoundData gsd = new GameSoundData();
    gsd.format = audioFormat;
    gsd.rawData = baos.toByteArray();
    baos.close();
    return gsd;
  }
  public void testPropertiesFile() {
    String[] testPropsAFF = {
      "duration",
      "title",
      "author",
      "album",
      "date",
      "comment",
      "copyright",
      "ogg.bitrate.min",
      "ogg.bitrate.nominal",
      "ogg.bitrate.max"
    };
    String[] testPropsAF = {"vbr", "bitrate"};

    File file = new File(filename);
    AudioFileFormat baseFileFormat = null;
    AudioFormat baseFormat = null;
    try {
      baseFileFormat = AudioSystem.getAudioFileFormat(file);
      baseFormat = baseFileFormat.getFormat();
      if (out != null) out.println("-> Filename : " + filename + " <-");
      if (out != null) out.println(baseFileFormat);
      if (baseFileFormat instanceof TAudioFileFormat) {
        Map properties = ((TAudioFileFormat) baseFileFormat).properties();
        if (out != null) out.println(properties);
        for (int i = 0; i < testPropsAFF.length; i++) {
          String key = testPropsAFF[i];
          if (properties.get(key) != null) {
            String val = (properties.get(key)).toString();
            // if (out != null)  out.println(key+"="+val);
            String valexpected = props.getProperty(key);
            // assertEquals(key,valexpected,val);
          }
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFileFormat expected", false);
      }

      if (baseFormat instanceof TAudioFormat) {
        Map properties = ((TAudioFormat) baseFormat).properties();
        for (int i = 0; i < testPropsAF.length; i++) {
          String key = testPropsAF[i];
          if (properties.get(key) != null) {
            String val = (properties.get(key)).toString();
            if (out != null) out.println(key + "=" + val);
            String valexpected = props.getProperty(key);
            // assertEquals(key,valexpected,val);
          }
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFormat expected", false);
      }
    } catch (UnsupportedAudioFileException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    } catch (IOException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    }
  }
  public AudioInputStream getAudioInputStream(InputStream stream)
      throws UnsupportedAudioFileException, IOException {

    AudioFileFormat format = getAudioFileFormat(stream);
    RIFFReader riffiterator = new RIFFReader(stream);
    if (!riffiterator.getFormat().equals("RIFF")) throw new UnsupportedAudioFileException();
    if (!riffiterator.getType().equals("WAVE")) throw new UnsupportedAudioFileException();
    while (riffiterator.hasNextChunk()) {
      RIFFReader chunk = riffiterator.nextChunk();
      if (chunk.getFormat().equals("data")) {
        return new AudioInputStream(chunk, format.getFormat(), chunk.getSize());
      }
    }
    throw new UnsupportedAudioFileException();
  }
 /** Return the AudioInputStream from the given InputStream. */
 public AudioInputStream getAudioInputStream(InputStream inputStream, int medialength, int totalms)
     throws UnsupportedAudioFileException, IOException {
   if (TDebug.TraceAudioFileReader)
     TDebug.out("getAudioInputStream(InputStream inputStreamint medialength, int totalms)");
   try {
     if (!inputStream.markSupported()) inputStream = new BufferedInputStream(inputStream);
     inputStream.mark(MARK_LIMIT);
     AudioFileFormat audioFileFormat = getAudioFileFormat(inputStream, medialength, totalms);
     inputStream.reset();
     return new AudioInputStream(
         inputStream, audioFileFormat.getFormat(), audioFileFormat.getFrameLength());
   } catch (UnsupportedAudioFileException e) {
     inputStream.reset();
     throw e;
   } catch (IOException e) {
     inputStream.reset();
     throw e;
   }
 }
Beispiel #6
0
  public static void main(String args[]) throws Exception {
    System.out.println();
    System.out.println();
    System.out.println(
        "4636355: Check that RIFF headers are written with extra data length field.");
    byte[] fakedata = new byte[1234];
    MyByteArrayInputStream is = new MyByteArrayInputStream(fakedata);
    AudioFormat inFormat = new AudioFormat(AudioFormat.Encoding.ULAW, 8000, 8, 1, 1, 8000, true);

    AudioInputStream ais = new AudioInputStream((InputStream) is, inFormat, fakedata.length);
    ByteArrayOutputStream out = new ByteArrayOutputStream(1500);
    System.out.println("  ulaw data will be written as WAVE to stream...");
    int t = AudioSystem.write(ais, AudioFileFormat.Type.WAVE, out);
    byte[] writtenData = out.toByteArray();
    // now header must have at least 46 bytes
    System.out.println(
        "  Length should be " + (fakedata.length + 46) + " bytes: " + writtenData.length);
    // re-read this file
    is = new MyByteArrayInputStream(writtenData);
    System.out.println("  Get AudioFileFormat of written file");
    AudioFileFormat fileformat = AudioSystem.getAudioFileFormat(is);
    AudioFileFormat.Type type = fileformat.getType();
    System.out.println("  The file format type: " + type);
    if (fileformat.getFrameLength() != fakedata.length
        && fileformat.getFrameLength() != AudioSystem.NOT_SPECIFIED) {
      throw new Exception(
          "The written file's frame length is "
              + fileformat.getFrameLength()
              + " but should be "
              + fakedata.length
              + " !");
    }
    ais = AudioSystem.getAudioInputStream(is);
    System.out.println("  Got Stream with format: " + ais.getFormat());
    if (is.getPos() < 46) {
      throw new Exception(
          "After reading the header, stream position must be at least 46, but is "
              + is.getPos()
              + " !");
    }
    System.out.println("  test passed.");
  }
 public void testPropertiesShoutcast() {
   AudioFileFormat baseFileFormat = null;
   AudioFormat baseFormat = null;
   String shoutURL = (String) props.getProperty("shoutcast");
   try {
     URL url = new URL(shoutURL);
     baseFileFormat = AudioSystem.getAudioFileFormat(url);
     baseFormat = baseFileFormat.getFormat();
     if (out != null) out.println("-> URL : " + url.toString() + " <-");
     if (out != null) out.println(baseFileFormat);
     if (baseFileFormat instanceof TAudioFileFormat) {
       Map properties = ((TAudioFileFormat) baseFileFormat).properties();
       Iterator it = properties.keySet().iterator();
       while (it.hasNext()) {
         String key = (String) it.next();
         String val = null;
         if (properties.get(key) != null) val = (properties.get(key)).toString();
         if (out != null) out.println(key + "='" + val + "'");
       }
     } else {
       assertTrue("testPropertiesShoutcast : TAudioFileFormat expected", false);
     }
     if (baseFormat instanceof TAudioFormat) {
       Map properties = ((TAudioFormat) baseFormat).properties();
       Iterator it = properties.keySet().iterator();
       while (it.hasNext()) {
         String key = (String) it.next();
         String val = null;
         if (properties.get(key) != null) val = (properties.get(key)).toString();
         if (out != null) out.println(key + "='" + val + "'");
       }
     } else {
       assertTrue("testPropertiesShoutcast : TAudioFormat expected", false);
     }
   } catch (UnsupportedAudioFileException e) {
     assertTrue("testPropertiesShoutcast : " + e.getMessage(), false);
   } catch (IOException e) {
     assertTrue("testPropertiesShoutcast : " + e.getMessage(), false);
   }
 }
  public void _testDumpPropertiesFile() {
    File file = new File(filename);
    AudioFileFormat baseFileFormat = null;
    AudioFormat baseFormat = null;
    try {
      baseFileFormat = AudioSystem.getAudioFileFormat(file);
      baseFormat = baseFileFormat.getFormat();
      if (out != null) out.println("-> Filename : " + filename + " <-");
      if (baseFileFormat instanceof TAudioFileFormat) {
        Map properties = ((TAudioFileFormat) baseFileFormat).properties();
        Iterator it = properties.keySet().iterator();
        while (it.hasNext()) {
          String key = (String) it.next();
          String val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
        }
      } else {
        assertTrue("testDumpPropertiesFile : TAudioFileFormat expected", false);
      }

      if (baseFormat instanceof TAudioFormat) {
        Map properties = ((TAudioFormat) baseFormat).properties();
        Iterator it = properties.keySet().iterator();
        while (it.hasNext()) {
          String key = (String) it.next();
          String val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
        }
      } else {
        assertTrue("testDumpPropertiesFile : TAudioFormat expected", false);
      }
    } catch (UnsupportedAudioFileException e) {
      assertTrue("testDumpPropertiesFile : " + e.getMessage(), false);
    } catch (IOException e) {
      assertTrue("testDumpPropertiesFile : " + e.getMessage(), false);
    }
  }
Beispiel #9
0
 /**
  * Write our internal representation to output stream <code>os</code>, in the appropriate way as
  * determined by our <code>type</code>.
  *
  * @param os os
  * @throws TransformerConfigurationException TransformerConfigurationException
  * @throws FileNotFoundException FileNotFoundException
  * @throws TransformerException TransformerException
  * @throws IOException IOException
  * @throws Exception Exception
  */
 public void writeTo(OutputStream os)
     throws TransformerConfigurationException, FileNotFoundException, TransformerException,
         IOException, Exception {
   if (type.isXMLType()) {
     if (writer == null) writer = new MaryNormalisedWriter();
     if (logger.getEffectiveLevel().equals(Level.DEBUG)) {
       ByteArrayOutputStream debugOut = new ByteArrayOutputStream();
       writer.output(xmlDocument, debugOut);
       logger.debug(debugOut.toString());
     }
     writer.output(xmlDocument, new BufferedOutputStream(os));
   } else if (type.isTextType()) { // caution: XML types are text types!
     writeTo(new OutputStreamWriter(os, "UTF-8"));
   } else { // audio
     logger.debug("Writing audio output, frame length " + audio.getFrameLength());
     AudioSystem.write(audio, audioFileFormat.getType(), os);
     os.flush();
     os.close();
   }
 }
 /**
  * Inits AudioInputStream and AudioFileFormat from the data source.
  *
  * @throws BasicPlayerException
  */
 protected void initAudioInputStream() throws BasicPlayerException {
   try {
     reset();
     notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
     if (m_dataSource instanceof URL) {
       initAudioInputStream((URL) m_dataSource);
     } else if (m_dataSource instanceof File) {
       initAudioInputStream((File) m_dataSource);
     } else if (m_dataSource instanceof InputStream) {
       initAudioInputStream((InputStream) m_dataSource);
     }
     createLine();
     // Notify listeners with AudioFileFormat properties.
     Map properties = null;
     if (m_audioFileFormat instanceof TAudioFileFormat) {
       // Tritonus SPI compliant audio file format.
       properties = ((TAudioFileFormat) m_audioFileFormat).properties();
       // Clone the Map because it is not mutable.
       properties = deepCopy(properties);
     } else {
       properties = new HashMap();
     }
     // Add JavaSound properties.
     if (m_audioFileFormat.getByteLength() > 0) {
       properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
     }
     if (m_audioFileFormat.getFrameLength() > 0) {
       properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
     }
     if (m_audioFileFormat.getType() != null) {
       properties.put("audio.type", (m_audioFileFormat.getType().toString()));
     }
     // Audio format.
     AudioFormat audioFormat = m_audioFileFormat.getFormat();
     if (audioFormat.getFrameRate() > 0) {
       properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
     }
     if (audioFormat.getFrameSize() > 0) {
       properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
     }
     if (audioFormat.getSampleRate() > 0) {
       properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
     }
     if (audioFormat.getSampleSizeInBits() > 0) {
       properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
     }
     if (audioFormat.getChannels() > 0) {
       properties.put("audio.channels", new Integer(audioFormat.getChannels()));
     }
     if (audioFormat instanceof TAudioFormat) {
       // Tritonus SPI compliant audio format.
       Map addproperties = ((TAudioFormat) audioFormat).properties();
       properties.putAll(addproperties);
     }
     // Add SourceDataLine
     properties.put("basicplayer.sourcedataline", m_line);
     Iterator<BasicPlayerListener> it = laucher.getBasicPlayerListeners().iterator();
     while (it.hasNext()) {
       BasicPlayerListener bpl = it.next();
       bpl.opened(m_dataSource, properties);
     }
     m_status = OPENED;
     notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
   } catch (LineUnavailableException e) {
     throw new BasicPlayerException(e);
   } catch (UnsupportedAudioFileException e) {
     throw new BasicPlayerException(e);
   } catch (IOException e) {
     throw new BasicPlayerException(e);
   }
 }
  public void testPropertiesFile() {
    String[] testPropsAFF = {
      "duration",
      "title",
      "author",
      "album",
      "date",
      "comment",
      "copyright",
      "mp3.framerate.fps",
      "mp3.copyright",
      "mp3.padding",
      "mp3.original",
      "mp3.length.bytes",
      "mp3.frequency.hz",
      "mp3.length.frames",
      "mp3.mode",
      "mp3.channels",
      "mp3.version.mpeg",
      "mp3.framesize.bytes",
      "mp3.vbr.scale",
      "mp3.version.encoding",
      "mp3.header.pos",
      "mp3.version.layer",
      "mp3.crc"
    };
    String[] testPropsAF = {"vbr", "bitrate"};

    File file = new File(filename);
    AudioFileFormat baseFileFormat = null;
    AudioFormat baseFormat = null;
    try {
      baseFileFormat = AudioSystem.getAudioFileFormat(file);
      baseFormat = baseFileFormat.getFormat();
      if (out != null) out.println("-> Filename : " + filename + " <-");
      if (out != null) out.println(baseFileFormat);
      if (baseFileFormat instanceof TAudioFileFormat) {
        Map properties = ((TAudioFileFormat) baseFileFormat).properties();
        if (out != null) out.println(properties);
        for (int i = 0; i < testPropsAFF.length; i++) {
          String key = testPropsAFF[i];
          String val = null;
          if (properties.get(key) != null) val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
          String valexpected = props.getProperty(key);
          assertEquals(key, valexpected, val);
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFileFormat expected", false);
      }

      if (baseFormat instanceof TAudioFormat) {
        Map properties = ((TAudioFormat) baseFormat).properties();
        for (int i = 0; i < testPropsAF.length; i++) {
          String key = testPropsAF[i];
          String val = null;
          if (properties.get(key) != null) val = (properties.get(key)).toString();
          if (out != null) out.println(key + "='" + val + "'");
          String valexpected = props.getProperty(key);
          assertEquals(key, valexpected, val);
        }
      } else {
        assertTrue("testPropertiesFile : TAudioFormat expected", false);
      }
    } catch (UnsupportedAudioFileException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    } catch (IOException e) {
      assertTrue("testPropertiesFile : " + e.getMessage(), false);
    }
  }
  WaveFileFormat(AudioFileFormat aff) {

    this(aff.getType(), aff.getByteLength(), aff.getFormat(), aff.getFrameLength());
  }
  /**
   * Apply the given effect to this WaveTab´s audio data
   *
   * @param effect The effect to apply
   */
  public void applyEffect(Effect effect) {
    Selection sel = waveDisplay.getSelection();
    if (sel.getLeft() == sel.getRight())
      waveDisplay.setSelection(new Selection(0, getTotalLength()));
    Thread thread = null;
    try {
      AudioInputStream stream = getAudioInputStream();
      int sourceChannels = stream.getFormat().getChannels();
      stream = AudioManager.getStereoInputStream(stream);
      final FXUnit unit = new FXUnit(effect);
      if (effect.needsAnalysis()) {
        Analyzer a = new Analyzer(unit, stream);
        ProgressMonitor monitor =
            new ProgressMonitor(getShell(), a, "Analyzing...", "Analyzing audio data");
        monitor.start();
        stream = AudioManager.getStereoInputStream(getAudioInputStream());
      }

      final SourceDataLine sourceLine = unit.getEffectSourceLine();
      sourceLine.open();
      sourceLine.start();
      final TargetDataLine targetLine = unit.getEffectTargetLine();
      targetLine.open();
      targetLine.start();
      if (!stream.getFormat().equals(sourceLine.getFormat())) {
        if (AudioSystem.isConversionSupported(sourceLine.getFormat(), stream.getFormat()))
          stream = AudioSystem.getAudioInputStream(sourceLine.getFormat(), stream);
        else {
          editor.errorMessage(
              "Unable to apply effect:\nFormat conversion from "
                  + stream.getFormat()
                  + " to "
                  + sourceLine.getFormat()
                  + " not supported.");
          return;
        }
      }

      final AudioInputStream inStream = stream;
      thread =
          new Thread() {
            public void run() {
              int numBytesRead = 0;
              byte[] buffer = new byte[sourceLine.getBufferSize()];
              while (numBytesRead != -1 && !getItem().isDisposed()) {
                try {
                  numBytesRead = inStream.read(buffer, 0, buffer.length);
                } catch (IOException e1) {
                  e1.printStackTrace();
                  numBytesRead = -1;
                }
                if (numBytesRead > 0) {
                  sourceLine.write(buffer, 0, numBytesRead);
                }
                try {
                  Thread.sleep(0, 1);
                } catch (InterruptedException e) {
                }
              }
            }
          };
      thread.start();

      AudioInputStream in = new AudioInputStream(targetLine);
      if (sourceChannels == 1) in = AudioManager.getMonoInputStream(in);
      File tempFile = File.createTempFile("gmtmp_", ".wav");
      AudioFormat tempFormat =
          new AudioFormat(
              fileFormat.getFormat().getSampleRate(),
              16,
              fileFormat.getFormat().getChannels(),
              true,
              false);
      AudioFileOutputStream out =
          AudioManager.getDefault()
              .getAudioFileOutputStream(
                  tempFile, tempFormat, AudioFileFormat.Type.WAVE, null, null, null);
      if (!in.getFormat().equals(out.getFormat()))
        in = AudioSystem.getAudioInputStream(out.getFormat(), in);
      SaveFileThread saver =
          new SaveFileThread(
              in, out, (int) inStream.getFrameLength(), in.getFormat().getFrameSize(), true);
      ProgressMonitor monitor =
          new ProgressMonitor(
              getShell(), saver, "Apply Effect", "Applying " + effect.getName() + " to Selection");
      monitor.start();

      File tempPeak = File.createTempFile("gmtmp_", ".gmpk");
      CreatePeakFileThread peak =
          new CreatePeakFileThread(AudioSystem.getAudioInputStream(tempFile), tempPeak);
      monitor =
          new ProgressMonitor(
              getShell(), peak, "Creating peak file", "Creating peak file for applied effect.");
      monitor.start();

      PeakWaveForm pwf = new PeakWaveForm(tempPeak);
      AudioFileWaveForm awf = new AudioFileWaveForm(tempFile, pwf, 32 * 1024, 25);
      CutListSource newSource = new AudioFileSource(tempFile, awf);

      sel = waveDisplay.getSelection();
      int left = sel.getLeft();
      int right = sel.getRight();

      ReplaceElement el =
          new ReplaceElement(
              effect.getName(), newSource, left, right - left, fileFormat.getFormat());
      cutList.addElement(el);
      undoOperations.add(el);
      redoOperations.clear();
      thread.stop();
    } catch (NotReadyException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (NotFinishedException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (LineUnavailableException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (IOException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    } catch (UnsupportedAudioFileException e) {
      e.printStackTrace();
      editor.errorMessage(e.getMessage());
      if (thread != null) thread.stop();
    }
  }
  /**
   * Create a new WaveTab
   *
   * @param editor The SampleEditor to which the WaveTab belongs
   * @param parent The TabFolder to be used as parent for the TabItem that will be created
   * @param style The style to be used for the TabItem
   * @param waveStyle The style to be used for the WaveFormDisplay
   * @param f The source file
   * @param wf The WaveForm corresponding to the file
   * @param format The AudioFormat to be used for recording or <code>null</code>, if not recording
   * @param rec true, if recording should be possible for this WaveTab, false otherwise
   * @throws UnsupportedAudioFileException If the AudioFileFormat of the given file can not be
   *     detected (only in case of not recording)
   * @throws IOException If an I/O error occured
   */
  WaveTab(
      SimpleSampleEditor editor,
      TabFolder parent,
      int style,
      int waveStyle,
      File f,
      AudioFileWaveForm wf,
      AudioFormat format,
      boolean rec)
      throws UnsupportedAudioFileException, IOException {
    this.editor = editor;
    afWF = wf;
    channels = wf.getChannels();
    afSource = new AudioFileSource(f, afWF);
    cutList = new CutList(afSource);
    cutList.addModificationListener(this);
    waveDisplay = new WaveFormDisplay(parent, waveStyle, false);
    waveDisplay.setChannelBackgroundColor(parent.getDisplay().getSystemColor(SWT.COLOR_BLACK));
    waveDisplay.addWaveDisplayListener(editor);
    waveDisplay.setSource(this);
    waveDisplay.setEditMarkers(true);
    waveDisplay.getComposite().addKeyListener(createKeyListener());
    waveDisplay.getComposite().addKeyListener(editor.keyListener);
    waveDisplay.addSelectableListener(
        new SelectableListener() {
          public void selectionPermanentChanged(Selectable s, Selection sel) {}

          public void selectionChanged(Selectable s, Selection sel) {
            WaveTab.this.editor.zoomSel.setEnabled(!sel.isEmpty());
          }

          public void positionChanged(Selectable s, int pos) {}

          public void positionWillChange(Selectable s) {}

          public void positionWontChange(Selectable s) {}
        });
    source = f;
    tabItem = new TabItem(parent, style);
    tabItem.setText(f.getName());
    tabItem.setControl(waveDisplay.getComposite());
    tabItem.setData(WaveTab.this);
    tabItem.addDisposeListener(
        new DisposeListener() {
          public void widgetDisposed(DisposeEvent e) {
            if (WaveTab.this.editor.player.getProvider() == WaveTab.this)
              WaveTab.this.editor.player.removeProvider();
          }
        });
    recording = rec;
    if (rec) {
      this.format = format;
      isNew = true;
      canRec = true;
    } else {
      fileFormat =
          AudioManager.getDefault().getAudioFileFormat(f, null, new String[] {"slices", "RLND"});
      format = fileFormat.getFormat();
      isNew = false;
      canRec = false;

      // For compatibility to 1.4...
      Map properties = AudioManager.getProperties(fileFormat);
      if (properties != null) {
        Object sl = properties.get("slices");
        if (sl != null && sl instanceof int[][]) {
          int[][] slices = (int[][]) sl;
          for (int i = 0; i < slices.length; i++) {
            waveDisplay.addMarker(slices[i][0], "");
          }
        }
      }
    }
  }