/** * Returns whether the audio track has any pending data to play out at its current position. * * @param writtenFrames The number of frames written to the audio track. * @return Whether the audio track has any pending data to play out. */ public boolean hasPendingData(long writtenFrames) { return writtenFrames > getPlaybackHeadPosition() || forceHasPendingData(); }
private long getPlaybackHeadPositionUs() { return framesToDurationUs(getPlaybackHeadPosition()); }
private void maybeSampleSyncParams() { long playbackPositionUs = getPlaybackHeadPositionUs(); if (playbackPositionUs == 0) { // The AudioTrack hasn't output anything yet. return; } long systemTimeUs = System.nanoTime() / 1000; if (systemTimeUs - lastPlayheadSampleTimeUs >= MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US) { // Take a new sample and update the smoothed offset between the system clock and the playhead. playheadOffsets[nextPlayheadOffsetIndex] = playbackPositionUs - systemTimeUs; nextPlayheadOffsetIndex = (nextPlayheadOffsetIndex + 1) % MAX_PLAYHEAD_OFFSET_COUNT; if (playheadOffsetCount < MAX_PLAYHEAD_OFFSET_COUNT) { playheadOffsetCount++; } lastPlayheadSampleTimeUs = systemTimeUs; smoothedPlayheadOffsetUs = 0; for (int i = 0; i < playheadOffsetCount; i++) { smoothedPlayheadOffsetUs += playheadOffsets[i] / playheadOffsetCount; } } if (needsPassthroughWorkarounds) { // Don't sample the timestamp and latency if this is an AC-3 passthrough AudioTrack on // platform API versions 21/22, as incorrect values are returned. See [Internal: b/21145353]. return; } maybePollAndCheckTimestamp(systemTimeUs, playbackPositionUs); maybeUpdateLatency(systemTimeUs); }
public long getCurrentPositionUs(boolean sourceEnded) { if (Assertions.checkNotNull(this.audioTrack).getPlayState() == PLAYSTATE_PLAYING) { maybeSampleSyncParams(); long timestampPositionUs = framesToDurationUs(timestampPositionFrames); if (!audioTimestampPoller.isTimestampAdvancing()) { return timestampPositionUs; if (playheadOffsetCount == 0) { positionUs = getPlaybackHeadPositionUs(); } else {
audioTimestampPoller = new AudioTimestampPoller(audioTrack); outputSampleRate = audioTrack.getSampleRate(); needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding); isOutputPcm = Util.isEncodingLinearPcm(outputEncoding); bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET; lastRawPlaybackHeadPosition = 0; rawPlaybackHeadWrapCount = 0;
if (playState == PLAYSTATE_STOPPED && getPlaybackHeadPosition() == 0) { return false; hasData = hasPendingData(writtenFrames); if (hadData && !hasData && playState != PLAYSTATE_STOPPED && listener != null) { listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs));
/** * Returns an estimate of the number of additional bytes that can be written to the audio track's * buffer without running out of space. * * <p>May only be called if the output encoding is one of the PCM encodings. * * @param writtenBytes The number of bytes written to the audio track so far. * @return An estimate of the number of bytes that can be written. */ public int getAvailableBufferSize(long writtenBytes) { int bytesPending = (int) (writtenBytes - (getPlaybackHeadPosition() * outputPcmFrameSize)); return bufferSize - bytesPending; }
@Override public boolean hasPendingData() { return isInitialized() && audioTrackPositionTracker.hasPendingData(getWrittenFrames()); }
if (Util.SDK_INT < 21) { // isInputPcm == true int bytesToWrite = audioTrackPositionTracker.getAvailableBufferSize(writtenPcmBytes); if (bytesToWrite > 0) { bytesToWrite = Math.min(bytesRemaining, bytesToWrite);
@Override public long getCurrentPositionUs(boolean sourceEnded) { if (!isInitialized() || startMediaTimeState == START_NOT_SET) { return CURRENT_POSITION_NOT_SET; } long positionUs = audioTrackPositionTracker.getCurrentPositionUs(sourceEnded); positionUs = Math.min(positionUs, framesToDurationUs(getWrittenFrames())); return startMediaTimeUs + applySkipping(applySpeedup(positionUs)); }
@Override public void playToEndOfStream() throws WriteException { if (handledEndOfStream || !isInitialized()) { return; } if (drainAudioProcessorsToEndOfStream()) { // The audio processors have drained, so drain the underlying audio track. audioTrackPositionTracker.handleEndOfStream(getWrittenFrames()); audioTrack.stop(); bytesUntilNextAvSync = 0; handledEndOfStream = true; } }
this.enableConvertHighResIntPcmToFloat = enableConvertHighResIntPcmToFloat; releasingConditionVariable = new ConditionVariable(true); audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener()); channelMappingAudioProcessor = new ChannelMappingAudioProcessor(); trimmingAudioProcessor = new TrimmingAudioProcessor();
private void maybePollAndCheckTimestamp(long systemTimeUs, long playbackPositionUs) { AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller); if (!audioTimestampPoller.maybePollTimestamp(systemTimeUs)) { return; } // Perform sanity checks on the timestamp and accept/reject it. long audioTimestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs(); long audioTimestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames(); if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) { listener.onSystemTimeUsMismatch( audioTimestampPositionFrames, audioTimestampSystemTimeUs, systemTimeUs, playbackPositionUs); audioTimestampPoller.rejectTimestamp(); } else if (Math.abs(framesToDurationUs(audioTimestampPositionFrames) - playbackPositionUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) { listener.onPositionFramesMismatch( audioTimestampPositionFrames, audioTimestampSystemTimeUs, systemTimeUs, playbackPositionUs); audioTimestampPoller.rejectTimestamp(); } else { audioTimestampPoller.acceptTimestamp(); } }
/** * Records the writing position at which the stream ended, so that the reported position can * continue to increment while remaining data is played out. * * @param writtenFrames The number of frames that have been written. */ public void handleEndOfStream(long writtenFrames) { stopPlaybackHeadPosition = getPlaybackHeadPosition(); stopTimestampUs = SystemClock.elapsedRealtime() * 1000; endPlaybackHeadPosition = writtenFrames; }
/** * If passthrough workarounds are enabled, pausing is implemented by forcing the AudioTrack to * underrun. In this case, still behave as if we have pending data, otherwise writing won't * resume. */ private boolean forceHasPendingData() { return needsPassthroughWorkarounds && Assertions.checkNotNull(audioTrack).getPlayState() == AudioTrack.PLAYSTATE_PAUSED && getPlaybackHeadPosition() == 0; }