@Override public <A> long put(A value, Serializer<A> serializer) { assert (value != null); DataIO.DataOutputByteArray out = serialize(value, serializer); final long ioRecid; if (!disableLocks) { newRecidLock.readLock().lock(); } try { if (!disableLocks) { structuralLock.lock(); } final long[] indexVals; try { ioRecid = freeIoRecidTake(true); indexVals = physAllocate(out.pos, true, false); } finally { if (!disableLocks) { structuralLock.unlock(); } } final Lock lock; if (disableLocks) { lock = null; } else { lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); } try { put2(out, ioRecid, indexVals); } finally { if (!disableLocks) { lock.unlock(); } } } finally { if (!disableLocks) { newRecidLock.readLock().unlock(); } } long recid = (ioRecid - IO_USER_START) / 8; assert (recid > 0); if (CC.LOG_STORE && LOG.isLoggable(Level.FINEST)) LOG.finest( "Put recid=" + recid + ", " + " size=" + out.pos + ", " + " val=" + value + " ser=" + serializer); recycledDataOuts.offer(out); return recid; }
@Override public void preallocate(long[] recids) { if (!disableLocks) { newRecidLock.readLock().lock(); } try { if (!disableLocks) { structuralLock.lock(); } try { for (int i = 0; i < recids.length; i++) recids[i] = freeIoRecidTake(true); } finally { if (!disableLocks) { structuralLock.unlock(); } } for (int i = 0; i < recids.length; i++) { final long ioRecid = recids[i]; final Lock lock; if (disableLocks) { lock = null; } else { lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); } try { index.putLong(ioRecid, MASK_DISCARD); } finally { if (!disableLocks) { lock.unlock(); } } recids[i] = (ioRecid - IO_USER_START) / 8; assert (recids[i] > 0); } if (CC.LOG_STORE && LOG.isLoggable(Level.FINEST)) LOG.finest("Preallocate recids=" + Arrays.toString(recids)); } finally { if (!disableLocks) { newRecidLock.readLock().unlock(); } } }
@Override public long preallocate() { if (!disableLocks) { newRecidLock.readLock().lock(); } try { if (!disableLocks) { structuralLock.lock(); } final long ioRecid; try { ioRecid = freeIoRecidTake(true); } finally { if (!disableLocks) { structuralLock.unlock(); } } final Lock lock; if (disableLocks) { lock = null; } else { lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); } try { index.putLong(ioRecid, MASK_DISCARD); } finally { if (!disableLocks) { lock.unlock(); } } long recid = (ioRecid - IO_USER_START) / 8; assert (recid > 0); if (CC.LOG_STORE && LOG.isLoggable(Level.FINEST)) LOG.finest("Preallocate recid=" + recid); return recid; } finally { if (!disableLocks) { newRecidLock.readLock().unlock(); } } }
@Override public <A> void update(long recid, A value, Serializer<A> serializer) { assert (value != null); assert (recid > 0); DataIO.DataOutputByteArray out = serialize(value, serializer); final long ioRecid = IO_USER_START + recid * 8; final Lock lock; if (disableLocks) { lock = null; } else { lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); } try { update2(out, ioRecid); } finally { if (!disableLocks) { lock.unlock(); } } if (CC.LOG_STORE && LOG.isLoggable(Level.FINEST)) LOG.finest( "Update recid=" + recid + ", " + " size=" + out.pos + ", " + " val=" + value + " ser=" + serializer); recycledDataOuts.offer(out); }
@Override public <A> A get(long recid, Serializer<A> serializer) { assert (recid > 0); final long ioRecid = IO_USER_START + recid * 8; final Lock lock; if (disableLocks) { lock = null; } else { lock = locks[Store.lockPos(ioRecid)].readLock(); lock.lock(); } try { final A ret = get2(ioRecid, serializer); if (CC.LOG_STORE && LOG.isLoggable(Level.FINEST)) LOG.finest("GET recid=" + recid + ", " + " ret=" + ret + ", " + " ser=" + serializer); return ret; } catch (IOException e) { throw new IOError(e); } finally { if (!disableLocks) { lock.unlock(); } } }
/** * Append the range [off, off+len) from the provided sample data to the line. * * @param samples sample data * @param off sample data offset * @param len sample data length */ private void appendFrames(final byte[] samples, int off, int len) { assert off % bytesPerFrame == 0; assert len % bytesPerFrame == 0; /* Make sure that [off, off+len) does not exceed sample's bounds */ off = Math.min(off, (samples != null) ? samples.length : 0); len = Math.min(len, (samples != null) ? samples.length - off : 0); if (len <= 0) { return; } /* Convert samples if necessary */ final byte[] samplesConverted = Arrays.copyOfRange(samples, off, off + len); if (convertUnsignedToSigned) { /* The line expects signed PCM samples, so we must * convert the unsigned PCM samples to signed. * Note that this only affects the high bytes! */ for (int i = 0; i < samplesConverted.length; i += 2) { samplesConverted[i] = (byte) ((samplesConverted[i] & 0xff) - 0x80); } } /* Write samples to line */ // final int bytesWritten = m_line.write(samplesConverted, 0, samplesConverted.length); final int bytesWritten = audioTrack.write(samplesConverted, 0, samplesConverted.length); if (bytesWritten == AudioTrack.ERROR_INVALID_OPERATION) { LOG.severe("Audio Track not initialized properly"); throw new RuntimeException( "Audio Track not initialized properly: AudioTrack status: ERROR_INVALID_OPERATION"); } else if (bytesWritten == AudioTrack.ERROR_BAD_VALUE) { LOG.severe("Wrong parameters sent to Audio Track!"); throw new RuntimeException( "Wrong parameters sent to Audio Track! AudioTrack status: ERROR_BAD_VALUE"); } else if (bytesWritten != len) { LOG.warning( "Audio output line accepted only " + bytesWritten + " bytes of sample data while trying to write " + samples.length + " bytes"); } else { LOG.info(bytesWritten + " bytes written to the audio output line"); } /* Update state */ synchronized (AudioOutputQueue.this) { framesWrittenToLine += (bytesWritten / bytesPerFrame); for (int b = 0; b < bytesPerFrame; ++b) { lineLastFrame[b] = samples[off + len - (bytesPerFrame - b)]; } if (LOG.isLoggable(Level.FINE)) { LOG.finest( "Audio output line end is now at " + getNextLineTime() + " after writing " + len / bytesPerFrame + " frames"); } } }
/** Enqueuer thread main method */ @Override public void run() { try { /* Mute line initially to prevent clicks */ setVolume(Float.NEGATIVE_INFINITY); /* Start the line */ // m_line.start(); // start the audio track audioTrack.play(); LOG.info("Audio Track started !!!"); boolean lineMuted = true; boolean didWarnGap = false; while (!closing) { if (!frameQueue.isEmpty()) { /* Queue filled */ /* If the gap between the next packet and the end of line is * negligible (less than one packet), we write it to the line. * Otherwise, we fill the line buffer with silence and hope for * further packets to appear in the queue */ final long entryFrameTime = frameQueue.firstKey(); final long entryLineTime = convertFrameToLineTime(entryFrameTime); final long gapFrames = entryLineTime - getNextLineTime(); // LOG.info("** gapFrames: " + gapFrames + " packetSizeFrames: " + packetSizeFrames); if (gapFrames < -packetSizeFrames) { /* Too late for playback */ LOG.warning( "Audio data was scheduled for playback " + (-gapFrames) + " frames ago, skipping"); frameQueue.remove(entryFrameTime); continue; } else if (gapFrames < packetSizeFrames) { /* Negligible gap between packet and line end. Prepare packet for playback */ didWarnGap = false; /* Unmute line in case it was muted previously */ if (lineMuted) { LOG.info("Audio data available, un-muting line"); lineMuted = false; applyVolume(); } else if (getVolume() != getRequestedVolume()) { applyVolume(); } /* Get sample data and do sanity checks */ final byte[] nextPlaybackSamples = frameQueue.remove(entryFrameTime); int nextPlaybackSamplesLength = nextPlaybackSamples.length; if (nextPlaybackSamplesLength % bytesPerFrame != 0) { LOG.severe( "Audio data contains non-integral number of frames, ignore last " + (nextPlaybackSamplesLength % bytesPerFrame) + " bytes"); nextPlaybackSamplesLength -= nextPlaybackSamplesLength % bytesPerFrame; } /* Append packet to line */ LOG.finest( "Audio data containing " + nextPlaybackSamplesLength / bytesPerFrame + " frames for playback time " + entryFrameTime + " found in queue, appending to the output line"); appendFrames(nextPlaybackSamples, 0, nextPlaybackSamplesLength, entryLineTime); continue; } else { /* Gap between packet and line end. Warn */ if (!didWarnGap) { didWarnGap = true; LOG.warning( "Audio data missing for frame time " + getNextLineTime() + " (currently " + gapFrames + " frames), writing " + packetSizeFrames + " frames of silence"); } } } else { /* Queue empty */ if (!lineMuted) { lineMuted = true; setVolume(Float.NEGATIVE_INFINITY); LOG.fine( "Audio data ended at frame time " + getNextLineTime() + ", writing " + packetSizeFrames + " frames of silence and muted line"); } } appendSilence(packetSizeFrames); } // TODO: I don't think we need the appendSilence anymore when using Android API, but will // evaluate that later during tests /* Before we exit, we fill the line's buffer with silence. This should prevent * noise from being output while the line is being stopped */ // appendSilence(m_line.available() / m_bytesPerFrame); } catch (final Throwable e) { LOG.log(Level.SEVERE, "Audio output thread died unexpectedly", e); } finally { setVolume(Float.NEGATIVE_INFINITY); audioTrack.stop(); audioTrack.release(); // m_line.stop(); // m_line.close(); } }