Ejemplo n.º 1
0
  /** Signals that a PooledThread has started. Creates the Thread's line and buffer. */
  protected void threadStarted() {
    // wait for the SoundManager constructor to finish
    synchronized (this) {
      try {
        wait();
      } catch (InterruptedException ex) {
      }
    }

    // use a short, 100ms (1/10th sec) buffer for filters that
    // change in real-time
    int bufferSize =
        playbackFormat.getFrameSize() * Math.round(playbackFormat.getSampleRate() / 10);

    // create, open, and start the line
    SourceDataLine line;
    DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat);
    try {
      line = (SourceDataLine) AudioSystem.getLine(lineInfo);
      line.open(playbackFormat, bufferSize);
    } catch (LineUnavailableException ex) {
      // the line is unavailable - signal to end this thread
      Thread.currentThread().interrupt();
      return;
    }

    line.start();

    // create the buffer
    byte[] buffer = new byte[bufferSize];

    // set this thread's locals
    localLine.set(line);
    localBuffer.set(buffer);
  }
Ejemplo n.º 2
0
 public void play(InputStream source) {
   int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
   byte[] buffer = new byte[bufferSize];
   SourceDataLine line;
   try {
     DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
     line = (SourceDataLine) AudioSystem.getLine(info);
     line.open(format, bufferSize);
   } catch (LineUnavailableException e) {
     e.printStackTrace();
     return;
   }
   line.start();
   try {
     int numBytesRead = 0;
     while (numBytesRead != -1) {
       numBytesRead = source.read(buffer, 0, buffer.length);
       if (numBytesRead != -1) line.write(buffer, 0, numBytesRead);
     }
   } catch (IOException e) {
     e.printStackTrace();
   }
   line.drain();
   line.close();
 }
Ejemplo n.º 3
0
  public Format[] getSupportedOutputFormats(Format in) {

    if (in == null) return new Format[] {new AudioFormat(AudioFormat.ULAW)};

    if (matches(in, inputFormats) == null) return new Format[1];

    if (!(in instanceof AudioFormat)) return new Format[] {new AudioFormat(AudioFormat.ULAW)};

    AudioFormat af = (AudioFormat) in;
    return new Format[] {
      new AudioFormat(
          AudioFormat.ULAW, af.getSampleRate(), af.getSampleSizeInBits(), af.getChannels())
    };
  }
  /**
   * Sells a specified number of records Throws an exception if the record doesn't exist in the
   * store or if trying to sell more records than exist in the inventory
   *
   * @param record - the record to sell
   * @param quantity - the number of records to sell
   */
  public void sellItem(AudioFormat record, int quantity)
      throws NullPointerException, IllegalArgumentException {
    // System.out.println("Selling: " + record.toString() + ", quantity: " + quantity);
    InventoryItem item = null;
    for (int i = 0; i < this.listOfItems.length; i++) {
      item = this.listOfItems[i];

      // If it finds the record, break out of the loop
      if (record.equals(item.getRecord())) {
        break;
      }
    }

    // Handle the record not being in the inventory
    if (item == null) {
      throw new NullPointerException(
          "Error: there is no record of this type in stock "
              + "(record does not exist within the array).");
    }

    // Handle selling more items than the store has
    if (item.getQuantity() - quantity < 0) {
      throw new IllegalArgumentException(
          "Error: you cannot sell more records than you have "
              + "(# of records sold makes quantity smaller than 0).");
    }
    item.setQuantity(item.getQuantity() - quantity);
  }
Ejemplo n.º 5
0
  public boolean load(File file) {

    this.file = file;

    if (file != null && file.isFile()) {
      try {
        errStr = null;
        audioInputStream = AudioSystem.getAudioInputStream(file);

        fileName = file.getName();

        format = audioInputStream.getFormat();

      } catch (Exception ex) {
        reportStatus(ex.toString());
        return false;
      }
    } else {
      reportStatus("Audio file required.");
      return false;
    }

    numChannels = format.getChannels();
    sampleRate = (double) format.getSampleRate();
    sampleBitSize = format.getSampleSizeInBits();
    long frameLength = audioInputStream.getFrameLength();
    long milliseconds = (long) ((frameLength * 1000) / audioInputStream.getFormat().getFrameRate());
    double audioFileDuration = milliseconds / 1000.0;

    if (audioFileDuration > MAX_AUDIO_DURATION) duration = MAX_AUDIO_DURATION;
    else duration = audioFileDuration;

    frameLength = (int) Math.floor((duration / audioFileDuration) * (double) frameLength);

    try {
      audioBytes = new byte[(int) frameLength * format.getFrameSize()];
      audioInputStream.read(audioBytes);
    } catch (Exception ex) {
      reportStatus(ex.toString());
      return false;
    }

    getAudioData();

    return true;
  }
Ejemplo n.º 6
0
  public static Clip loadClip(URL url) {
    Clip clip = null;
    String fnm = "" + url;
    try {
      AudioInputStream stream = AudioSystem.getAudioInputStream(url);
      AudioFormat format = stream.getFormat();

      if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
          || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
        AudioFormat newFormat =
            new AudioFormat(
                AudioFormat.Encoding.PCM_SIGNED,
                format.getSampleRate(),
                format.getSampleSizeInBits() * 2,
                format.getChannels(),
                format.getFrameSize() * 2,
                format.getFrameRate(),
                true); // big endian
        stream = AudioSystem.getAudioInputStream(newFormat, stream);
        // System.out.println("Converted Audio format: " + newFormat);
        format = newFormat;
      }

      DataLine.Info info = new DataLine.Info(Clip.class, format);

      // make sure sound system supports data line
      if (!AudioSystem.isLineSupported(info)) {
        System.out.println("Unsupported Clip File: " + fnm);
        return null;
      }
      // get clip line resource
      clip = (Clip) AudioSystem.getLine(info);
      clip.open(stream); // open the sound file as a clip
      stream.close(); // we're done with the input stream
      // duration (in secs) of the clip
      double duration = clip.getMicrosecondLength() / 1000000.0; // new
      if (duration <= 1.0) {
        System.out.println("WARNING. Duration <= 1 sec : " + duration + " secs");
        System.out.println(
            "         The clip in " + fnm + " may not play in J2SE 1.5 -- make it longer");
      }
      // else
      //  System.out.println(fnm + ": Duration: " + duration + " secs");
    } // end of try block
    catch (UnsupportedAudioFileException audioException) {
      System.out.println("Unsupported audio file: " + fnm);
    } catch (LineUnavailableException noLineException) {
      System.out.println("No audio line available for : " + fnm);
    } catch (IOException ioException) {
      System.out.println("Could not read: " + fnm);
    } catch (Exception e) {
      System.out.println("Problem with " + fnm);
    }
    return clip;
  } // end of loadClip()
Ejemplo n.º 7
0
 @Override
 public boolean equals(Object obj) {
   if (obj instanceof AudioFormat) {
     AudioFormat format = (AudioFormat) obj;
     if (format.getCodec() != getCodec()) {
       return false;
     }
     if (format.getContainer() != getContainer()) {
       return false;
     }
     if (format.isBigEndian() != isBigEndian()) {
       return false;
     }
     if (format.getBitDepth() != getBitDepth()) {
       return false;
     }
     if (format.getBitRate() != getBitRate()) {
       return false;
     }
     if (format.getFrequency() != getFrequency()) {
       return false;
     }
     return true;
   }
   return super.equals(obj);
 }
Ejemplo n.º 8
0
  protected Format[] getMatchingOutputFormats(Format in) {

    AudioFormat af = (AudioFormat) in;

    supportedOutputFormats =
        new AudioFormat[] {
          new AudioFormat(
              Constants.ALAW_RTP,
              af.getSampleRate(),
              8,
              1,
              Format.NOT_SPECIFIED,
              Format.NOT_SPECIFIED,
              8,
              Format.NOT_SPECIFIED,
              Format.byteArray)
        };
    return supportedOutputFormats;
  }
Ejemplo n.º 9
0
  private static WAVData readFromStream(AudioInputStream aIn)
      throws UnsupportedAudioFileException, IOException {
    ReadableByteChannel aChannel = Channels.newChannel(aIn);
    AudioFormat fmt = aIn.getFormat();
    int numChannels = fmt.getChannels();
    int bits = fmt.getSampleSizeInBits();
    int format = AL_FORMAT_MONO8;

    if ((bits == 8) && (numChannels == 1)) {
      format = AL_FORMAT_MONO8;
    } else if ((bits == 16) && (numChannels == 1)) {
      format = AL_FORMAT_MONO16;
    } else if ((bits == 8) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO8;
    } else if ((bits == 16) && (numChannels == 2)) {
      format = AL_FORMAT_STEREO16;
    }

    int freq = Math.round(fmt.getSampleRate());
    int size = aIn.available();
    ByteBuffer buffer = ByteBuffer.allocateDirect(size);
    while (buffer.remaining() > 0) {
      aChannel.read(buffer);
    }
    buffer.rewind();

    // Must byte swap on big endian platforms
    // Thanks to swpalmer on javagaming.org forums for hint at fix
    if ((bits == 16) && (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN)) {
      int len = buffer.remaining();
      for (int i = 0; i < len; i += 2) {
        byte a = buffer.get(i);
        byte b = buffer.get(i + 1);
        buffer.put(i, b);
        buffer.put(i + 1, a);
      }
    }

    WAVData result = new WAVData(buffer, format, size, freq, false);
    aIn.close();

    return result;
  }
Ejemplo n.º 10
0
  // 播放au,aiff,wav音乐流, 这个函数基本完全为帖子上的代码
  private synchronized void play() {
    ByteArrayInputStream aMusicInputStream;
    AudioFormat format;
    AudioInputStream musicInputStream;
    byte[] audioSamples;
    SourceDataLine line;
    try {
      File MusicFile = new File(m_filename);

      musicInputStream = AudioSystem.getAudioInputStream(MusicFile); // 取得文件的音频输入流
      format = musicInputStream.getFormat(); // 取得音频输入流的格式
      audioSamples = getAudioSamples(musicInputStream, format); // 取得音频样本

      aMusicInputStream = new ByteArrayInputStream(audioSamples);
      int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
      byte[] buffer = new byte[bufferSize];
      try {
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, bufferSize);
      } catch (LineUnavailableException e) {
        e.printStackTrace();
        return;
      }

      if (!line.isRunning()) {
        line.start();
      }

      int numBytesRead = 0;
      while (numBytesRead != -1 && !m_stopped) {
        numBytesRead = aMusicInputStream.read(buffer, 0, buffer.length);
        if (numBytesRead != -1) {
          line.write(buffer, 0, numBytesRead);
        }
      }
      line.drain();
      line.close();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
Ejemplo n.º 11
0
 public byte[] getSamples(AudioInputStream stream) {
   int length = (int) (stream.getFrameLength() * format.getFrameSize());
   byte[] samples = new byte[length];
   DataInputStream in = new DataInputStream(stream);
   try {
     in.readFully(samples);
   } catch (IOException e) {
     e.printStackTrace();
   }
   return samples;
 }
    @Override
    public void read(Buffer buffer) throws IOException {
      pbs.read(buffer);

      // Remap the time stamps so it won't wrap around
      // while changing to a new file.
      if (buffer.getTimeStamp() != Buffer.TIME_UNKNOWN) {
        long diff = buffer.getTimeStamp() - lastTS;
        lastTS = buffer.getTimeStamp();
        if (diff > 0) timeStamp += diff;
        buffer.setTimeStamp(timeStamp);
      }

      // If this track is to be used as the master time base,
      // we'll need to compute the master time based on this track.
      if (useAsMaster) {
        if (buffer.getFormat() instanceof AudioFormat) {
          AudioFormat af = (AudioFormat) buffer.getFormat();
          masterAudioLen += buffer.getLength();
          long t = af.computeDuration(masterAudioLen);
          if (t > 0) {
            masterTime = t;
          } else {
            masterTime = buffer.getTimeStamp();
          }
        } else {
          masterTime = buffer.getTimeStamp();
        }
      }

      if (buffer.isEOM()) {
        tInfo.done = true;
        if (!ds.handleEOM(tInfo)) {
          // This is not the last processor to be done.
          // We'll need to un-set the EOM flag.
          buffer.setEOM(false);
          buffer.setDiscard(true);
        }
      }
    }
Ejemplo n.º 13
0
  // 取得音频样本
  private byte[] getAudioSamples(AudioInputStream MusicStream, AudioFormat format) {
    int AudioSampleLengh = (int) (MusicStream.getFrameLength() * format.getFrameSize());
    byte aAudioSamples[] = new byte[AudioSampleLengh];
    DataInputStream dataInputStream = new DataInputStream(MusicStream);

    try {
      dataInputStream.readFully(aAudioSamples);
    } catch (Exception e) {
      e.printStackTrace();
    }

    return aAudioSamples;
  }
Ejemplo n.º 14
0
  private void getAudioData() {

    if (format.getSampleSizeInBits() == 16) {
      nlengthInSamples = audioBytes.length / 2;
      audioData = new int[nlengthInSamples];
      if (format.isBigEndian()) {
        for (int i = 0; i < nlengthInSamples; i++) {
          // First byte is MSB (high order)
          int MSB = (int) audioBytes[2 * i];
          // Second byte is LSB (low order)
          int LSB = (int) audioBytes[2 * i + 1];
          audioData[i] = MSB << 8 | (255 & LSB);
        }
      } else {
        for (int i = 0; i < nlengthInSamples; i++) {
          // First byte is LSB (low order)
          int LSB = (int) audioBytes[2 * i];
          // Second byte is MSB (high order)
          int MSB = (int) audioBytes[2 * i + 1];
          audioData[i] = MSB << 8 | (255 & LSB);
        }
      }
    } else {
      if (format.getSampleSizeInBits() == 8) {
        nlengthInSamples = audioBytes.length;
        audioData = new int[nlengthInSamples];
        if (format.getEncoding().toString().startsWith("PCM_SIGN")) {
          for (int i = 0; i < audioBytes.length; i++) {
            audioData[i] = audioBytes[i];
          }
        } else {
          for (int i = 0; i < audioBytes.length; i++) {
            audioData[i] = audioBytes[i] - 128;
          }
        }
      }
    }
  }
  /**
   * Constructs an audio input stream that reads its data from the target data line indicated. The
   * format of the stream is the same as that of the target data line, and the length is
   * AudioSystem#NOT_SPECIFIED.
   *
   * @param line the target data line from which this stream obtains its data.
   * @see AudioSystem#NOT_SPECIFIED
   */
  public AudioInputStream(TargetDataLine line) {

    TargetDataLineInputStream tstream = new TargetDataLineInputStream(line);
    format = line.getFormat();
    frameLength = AudioSystem.NOT_SPECIFIED;
    frameSize = format.getFrameSize();

    if (frameSize == AudioSystem.NOT_SPECIFIED || frameSize <= 0) {
      frameSize = 1;
    }
    this.stream = tstream;
    framePos = 0;
    markpos = 0;
  }
Ejemplo n.º 16
0
 private void soundAbspielen(File sound) {
   if (!läuft) return;
   try {
     audioInputStream = AudioSystem.getAudioInputStream(sound);
     af = audioInputStream.getFormat();
     size = (int) (af.getFrameSize() * audioInputStream.getFrameLength());
     audio = new byte[size];
     info = new DataLine.Info(Clip.class, af, size);
     audioInputStream.read(audio, 0, size);
     clip = (Clip) AudioSystem.getLine(info);
     clip.open(af, audio, 0, size);
     clip.start();
   } catch (Exception e) {
     e.printStackTrace();
   }
 }
  /**
   * Constructs an audio input stream that has the requested format and length in sample frames,
   * using audio data from the specified input stream.
   *
   * @param stream the stream on which this <code>AudioInputStream</code> object is based
   * @param format the format of this stream's audio data
   * @param length the length in sample frames of the data in this stream
   */
  public AudioInputStream(InputStream stream, AudioFormat format, long length) {

    super();

    this.format = format;
    this.frameLength = length;
    this.frameSize = format.getFrameSize();

    // any frameSize that is not well-defined will
    // cause that this stream will be read in bytes
    if (this.frameSize == AudioSystem.NOT_SPECIFIED || frameSize <= 0) {
      this.frameSize = 1;
    }

    this.stream = stream;
    framePos = 0;
    markpos = 0;
  }
  /**
   * Adds a specified number of records to the inventory and updates the price
   *
   * @param record - the record to add
   * @param quantity - the number of records to add
   * @param price - the price to sell the record at
   */
  public void addItem(AudioFormat record, int quantity, double price) {
    // System.out.println("Adding: " + record.toString() + ", quantity: " + quantity + ", price: " +
    // price);
    for (int i = 0; i < this.listOfItems.length; i++) {
      InventoryItem item = this.listOfItems[i];

      // If the record is found, set the price and add the quantity and end the method
      if (record.equals(item.getRecord())) {
        item.setPrice(price);
        item.setQuantity(item.getQuantity() + quantity);
        return;
      }
    }

    // If it's an entirely new record, duplicates the old array and increases its size by 1
    InventoryItem[] temp = new InventoryItem[this.listOfItems.length + 1];
    for (int i = 0; i < this.listOfItems.length; i++) {
      temp[i] = this.listOfItems[i];
    }
    this.listOfItems = temp;

    // Adds the new record to the end of the array
    this.listOfItems[this.listOfItems.length - 1] = new InventoryItem(record, quantity, price);
  }
Ejemplo n.º 19
0
 /**
  * Determines if the passed AudioFormat is compatable with this AudioFormat.
  *
  * <p>This AudioFormat is compatible with the passed AudioFormat if both have the same value for
  * all non-null members of this instance.
  */
 boolean isCompatible(AudioFormat audioFormat) {
   if (audioFormat == null) {
     return false;
   }
   if ((null != getContainer()) && (getContainer() != audioFormat.getContainer())) {
     return false;
   }
   if ((null != getCodec()) && (getCodec() != audioFormat.getCodec())) {
     return false;
   }
   if ((null != isBigEndian()) && (isBigEndian() != audioFormat.isBigEndian())) {
     return false;
   }
   if ((null != getBitDepth()) && (getBitDepth() != audioFormat.getBitDepth())) {
     return false;
   }
   if ((null != getBitRate()) && (getBitRate() != audioFormat.getBitRate())) {
     return false;
   }
   if ((null != getFrequency()) && (getFrequency() != audioFormat.getFrequency())) {
     return false;
   }
   return true;
 }
Ejemplo n.º 20
0
  public static void main(String[] args) throws Exception {
    int midiDev = -1;
    int audioDev = -1;
    int latencyInMillis = 70;

    // parse arguments
    int argi = 0;
    while (argi < args.length) {
      String arg = args[argi];
      if (arg.equals("-h")) {
        printUsageAndExit();
      } else if (arg.equals("-m")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        midiDev = Integer.parseInt(args[argi]);
      } else if (arg.equals("-a")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        audioDev = Integer.parseInt(args[argi]);
      } else if (arg.equals("-l")) {
        argi++;
        if (argi >= args.length) {
          printUsageAndExit();
        }
        latencyInMillis = Integer.parseInt(args[argi]);
      } else {
        printUsageAndExit();
      }
      argi++;
    }

    // load samples
    final AudioFileSource[] src = new AudioFileSource[sounds.length];
    for (int i = 0; i < sounds.length; i++) {
      src[i] = new AudioFileSource(new File(sounds[i]));
    }
    // define the first source's audioformat as the master format
    final AudioFormat format = src[0].getFormat();

    // set up mixer
    final AudioMixer mixer = new AudioMixer(format.getChannels(), format.getSampleRate());

    // set up soundcard (sink)
    SoundcardSink sink = new SoundcardSink();
    // open the sink and connect it with the mixer
    sink.open(audioDev, latencyInMillis, format, mixer);
    try {

      // do we want to open a MIDI port?
      MidiIn midi = null;
      if (midiDev >= 0) {
        // start MIDI IN
        midi = new MidiIn();
        midi.setListener(
            new MidiIn.Listener() {
              public void midiInPlayed(int status, int data1, int data2) {
                // only react to NOTE ON messages with velocity > 0
                if (((status & 0xF0) == 0x90) && (data2 > 0)) {
                  AudioFileSource newSrc = src[data1 % src.length].makeClone();
                  mixer.addAudioStream(newSrc);
                  serviceMixer(mixer);
                }
              }

              public void midiInPlayed(byte[] message) {
                // nothing to do for long MIDI messages
              }
            });
        midi.open(midiDev);
      } else {
        Debug.debug("No MIDI.");
      }
      try {

        // start the sink -- from now on, the mixer is polled for new
        // data
        sink.start();

        System.out.println("Press ENTER for a sound, 'q'+ENTER to quit.");
        int currSrc = 0;
        while (true) {
          char c = (char) System.in.read();
          if (c == 'q') {
            break;
          }
          AudioFileSource newSrc = src[(currSrc++) % src.length].makeClone();
          mixer.addAudioStream(newSrc);
          serviceMixer(mixer);
        }
      } finally {
        // clean-up
        if (midi != null) {
          midi.close();
        }
      }
    } finally {
      sink.close();
    }

    Debug.debug("done");
  }
Ejemplo n.º 21
0
  public static void main(String[] args) throws Exception {
    float[] testarray = new float[1024];
    for (int i = 0; i < 1024; i++) {
      double ii = i / 1024.0;
      ii = ii * ii;
      testarray[i] = (float) Math.sin(10 * ii * 2 * Math.PI);
      testarray[i] += (float) Math.sin(1.731 + 2 * ii * 2 * Math.PI);
      testarray[i] += (float) Math.sin(0.231 + 6.3 * ii * 2 * Math.PI);
      testarray[i] *= 0.3;
    }

    // Check conversion using PCM_FLOAT
    for (int big = 0; big < 2; big += 1)
      for (int bits = 32; bits <= 64; bits += 32) {
        AudioFormat frm =
            new AudioFormat(
                AudioFloatConverter.PCM_FLOAT, 44100, bits, 1, bits / 8, 44100, big == 1);
        byte[] buff = new byte[testarray.length * frm.getFrameSize()];
        float[] testarray2 = new float[testarray.length];
        AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
        conv.toByteArray(testarray, buff);
        conv.toFloatArray(buff, testarray2);
        for (int i = 0; i < testarray2.length; i++) {
          if (Math.abs(testarray[i] - testarray2[i]) > 0.05)
            throw new RuntimeException(
                "Conversion failed for " + frm + " , arrays not equal enough!\n");
        }
      }

    // Check conversion from float2byte and byte2float.
    for (int big = 0; big < 2; big += 1)
      for (int signed = 0; signed < 2; signed += 1)
        for (int bits = 6; bits <= 40; bits += 2) {
          AudioFormat frm = new AudioFormat(44100, bits, 1, signed == 1, big == 1);
          byte[] buff = new byte[testarray.length * frm.getFrameSize()];
          float[] testarray2 = new float[testarray.length];
          AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
          conv.toByteArray(testarray, buff);
          conv.toFloatArray(buff, testarray2);
          for (int i = 0; i < testarray2.length; i++) {
            if (Math.abs(testarray[i] - testarray2[i]) > 0.05)
              throw new RuntimeException(
                  "Conversion failed for " + frm + " , arrays not equal enough!\n");
          }
        }

    // Check big/little
    for (int big = 0; big < 2; big += 1)
      for (int signed = 0; signed < 2; signed += 1)
        for (int bits = 6; bits <= 40; bits += 2) {
          AudioFormat frm = new AudioFormat(44100, bits, 1, signed == 1, big == 1);
          byte[] buff = new byte[testarray.length * frm.getFrameSize()];
          AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
          conv.toByteArray(testarray, buff);
          byte[] buff2 = new byte[testarray.length * frm.getFrameSize()];
          int fs = frm.getFrameSize();
          for (int i = 0; i < buff2.length; i += fs) {
            for (int j = 0; j < fs; j++) {
              buff2[i + (fs - j - 1)] = buff[i + j];
            }
          }
          float[] testarray2 = new float[testarray.length];
          AudioFormat frm2 = new AudioFormat(44100, bits, 1, signed == 1, big == 0);
          AudioFloatConverter.getConverter(frm2).toFloatArray(buff2, testarray2);
          for (int i = 0; i < testarray2.length; i++) {
            if (Math.abs(testarray[i] - testarray2[i]) > 0.05) {
              throw new RuntimeException(
                  "Conversion failed for " + frm + " to " + frm2 + " , arrays not equal enough!\n");
            }
          }
        }

    // Check signed/unsigned
    for (int big = 0; big < 2; big += 1)
      for (int signed = 0; signed < 2; signed += 1)
        for (int bits = 6; bits <= 40; bits += 2) {
          AudioFormat frm = new AudioFormat(44100, bits, 1, signed == 1, big == 1);
          byte[] b = new byte[testarray.length * frm.getFrameSize()];
          AudioFloatConverter conv = AudioFloatConverter.getConverter(frm);
          conv.toByteArray(testarray, b);
          int fs = frm.getFrameSize();
          if (big == 1) {
            for (int i = 0; i < b.length; i += fs)
              b[i] = (b[i] >= 0) ? (byte) (0x80 | b[i]) : (byte) (0x7F & b[i]);
          } else {
            for (int i = (0 + fs - 1); i < b.length; i += fs)
              b[i] = (b[i] >= 0) ? (byte) (0x80 | b[i]) : (byte) (0x7F & b[i]);
          }
          float[] testarray2 = new float[testarray.length];
          AudioFormat frm2 = new AudioFormat(44100, bits, 1, signed == 0, big == 1);
          AudioFloatConverter.getConverter(frm2).toFloatArray(b, testarray2);
          for (int i = 0; i < testarray2.length; i++) {
            if (Math.abs(testarray[i] - testarray2[i]) > 0.05) {
              throw new RuntimeException(
                  "Conversion failed for " + frm + " to " + frm2 + " , arrays not equal enough!\n");
            }
          }
        }

    // Check if conversion 32->24, 24->16, 16->8 result in same float data
    AudioFormat frm = new AudioFormat(44100, 40, 1, true, true);
    byte[] b = new byte[testarray.length * frm.getFrameSize()];
    AudioFloatConverter.getConverter(frm).toByteArray(testarray, b);
    for (int bits = 6; bits <= 40; bits += 2) {
      AudioFormat frm2 = new AudioFormat(44100, bits, 1, true, true);
      byte[] b2 = new byte[testarray.length * frm2.getFrameSize()];
      int fs1 = frm.getFrameSize();
      int fs2 = frm2.getFrameSize();
      int ii = 0;
      for (int i = 0; i < b.length; i += fs1) for (int j = 0; j < fs2; j++) b2[ii++] = b[i + j];
      float[] testarray2 = new float[testarray.length];
      AudioFloatConverter.getConverter(frm2).toFloatArray(b2, testarray2);
      for (int i = 0; i < testarray2.length; i++) {
        if (Math.abs(testarray[i] - testarray2[i]) > 0.05) {
          throw new RuntimeException(
              "Conversion failed for " + frm + " to " + frm2 + " , arrays not equal enough!\n");
        }
      }
    }
  }
Ejemplo n.º 22
0
  @Override
  public Format[] getSupportedOutputFormats(Format input) {
    if (input == null) return outputFormats;
    else {
      if (!(input instanceof AudioFormat)) {
        logger.warning(
            this.getClass().getSimpleName()
                + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for "
                + input); // this can cause an NPE in JMF if it ever
        // happens.
        return new Format[] {null};
      }
      final AudioFormat inputCast = (AudioFormat) input;
      if (!inputCast.getEncoding().equals(AudioFormat.ALAW)
          || (inputCast.getSampleSizeInBits() != 8
              && inputCast.getSampleSizeInBits() != Format.NOT_SPECIFIED)
          || (inputCast.getChannels() != 1 && inputCast.getChannels() != Format.NOT_SPECIFIED)
          || (inputCast.getFrameSizeInBits() != 8
              && inputCast.getFrameSizeInBits() != Format.NOT_SPECIFIED)) {
        logger.warning(
            this.getClass().getSimpleName()
                + ".getSupportedOutputFormats: input format does not match, returning format array of {null} for "
                + input); // this can cause an NPE in JMF if it ever
        // happens.
        return new Format[] {null};
      }
      final AudioFormat result =
          new AudioFormat(
              BonusAudioFormatEncodings.ALAW_RTP,
              inputCast.getSampleRate(),
              8,
              1,
              inputCast.getEndian(),
              inputCast.getSigned(),
              8,
              inputCast.getFrameRate(),
              inputCast.getDataType());

      return new Format[] {result};
    }
  }
 Player() {
   super("Haven audio player");
   setDaemon(true);
   srate = (int) fmt.getSampleRate();
 }