From f7fff0d5835b42f6d6db7e46c7b1e546f84f545a Mon Sep 17 00:00:00 2001 From: andrewlewis Date: Fri, 31 Mar 2017 01:26:19 -0700 Subject: [PATCH] Apply parameter adjustments with resetting. Start draining the audio processors when the playback parameters change, and when draining completes use the written frame count and next input buffer presentation timestamp as an offset for applying the new playback speed. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=151800074 --- .../android/exoplayer2/audio/AudioTrack.java | 146 ++++++++++++++---- .../exoplayer2/audio/SonicAudioProcessor.java | 14 +- 2 files changed, 127 insertions(+), 33 deletions(-) diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java index d56f6a0d896..d376ffee14d 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java @@ -32,6 +32,7 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; +import java.util.LinkedList; /** * Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles @@ -170,7 +171,7 @@ public InvalidAudioTrackTimestampException(String detailMessage) { } /** - * Returned by {@link #getCurrentPositionUs} when the position is not set. + * Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set. */ public static final long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE; @@ -251,6 +252,13 @@ public InvalidAudioTrackTimestampException(String detailMessage) { private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000; private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000; + /** + * The minimum number of output bytes from {@link #sonicAudioProcessor} at which the speedup is + * calculated using the input/output byte counts from the processor, rather than using the + * current playback parameters speed. + */ + private static final int SONIC_MIN_BYTES_FOR_SPEEDUP = 1024; + /** * Whether to enable a workaround for an issue where an audio effect does not keep its session * active across releasing/initializing a new audio track, on platform builds where @@ -277,6 +285,7 @@ public InvalidAudioTrackTimestampException(String detailMessage) { private final ConditionVariable releasingConditionVariable; private final long[] playheadOffsets; private final AudioTrackUtil audioTrackUtil; + private final LinkedList playbackParametersCheckpoints; /** * Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). @@ -295,7 +304,11 @@ public InvalidAudioTrackTimestampException(String detailMessage) { private boolean passthrough; private int bufferSize; private long bufferSizeUs; + + private PlaybackParameters drainingPlaybackParameters; private PlaybackParameters playbackParameters; + private long playbackParametersOffsetUs; + private long playbackParametersPositionUs; private ByteBuffer avSyncHeader; private int bytesUntilNextAvSync; @@ -377,6 +390,7 @@ public AudioTrack(AudioCapabilities audioCapabilities, AudioProcessor[] audioPro drainingAudioProcessorIndex = C.INDEX_UNSET; this.audioProcessors = new AudioProcessor[0]; outputBuffers = new ByteBuffer[0]; + playbackParametersCheckpoints = new LinkedList<>(); } /** @@ -432,7 +446,8 @@ public long getCurrentPositionUs(boolean sourceEnded) { positionUs -= latencyUs; } } - return startMediaTimeUs + scaleFrames(positionUs); + + return startMediaTimeUs + applySpeedup(positionUs); } /** @@ -747,6 +762,21 @@ public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) framesPerEncodedSample = getFramesPerEncodedSample(outputEncoding, buffer); } + if (drainingPlaybackParameters != null) { + if (!drainAudioProcessorsToEndOfStream()) { + // Don't process any more input until draining completes. + return false; + } + // Store the position and corresponding media time from which the parameters will apply. + playbackParametersCheckpoints.add(new PlaybackParametersCheckpoint( + drainingPlaybackParameters, Math.max(0, presentationTimeUs), + framesToDurationUs(getWrittenFrames()))); + drainingPlaybackParameters = null; + // The audio processors have drained, so flush them. This will cause any active speed + // adjustment audio processor to start producing audio with the new parameters. + resetAudioProcessors(); + } + if (startMediaTimeState == START_NOT_SET) { startMediaTimeUs = Math.max(0, presentationTimeUs); startMediaTimeState = START_IN_SYNC; @@ -895,7 +925,15 @@ public void playToEndOfStream() throws WriteException { return; } - // Drain the audio processors. + if (drainAudioProcessorsToEndOfStream()) { + // The audio processors have drained, so drain the underlying audio track. + audioTrackUtil.handleEndOfStream(getWrittenFrames()); + bytesUntilNextAvSync = 0; + handledEndOfStream = true; + } + } + + private boolean drainAudioProcessorsToEndOfStream() throws WriteException { boolean audioProcessorNeedsEndOfStream = false; if (drainingAudioProcessorIndex == C.INDEX_UNSET) { drainingAudioProcessorIndex = passthrough ? audioProcessors.length : 0; @@ -908,7 +946,7 @@ public void playToEndOfStream() throws WriteException { } processBuffers(C.TIME_UNSET); if (!audioProcessor.isEnded()) { - return; + return false; } audioProcessorNeedsEndOfStream = true; drainingAudioProcessorIndex++; @@ -918,14 +956,11 @@ public void playToEndOfStream() throws WriteException { if (outputBuffer != null) { writeBuffer(outputBuffer, C.TIME_UNSET); if (outputBuffer != null) { - return; + return false; } } - - // Drain the track. - audioTrackUtil.handleEndOfStream(getWrittenFrames()); - bytesUntilNextAvSync = 0; - handledEndOfStream = true; + drainingAudioProcessorIndex = C.INDEX_UNSET; + return true; } /** @@ -949,21 +984,27 @@ public boolean hasPendingData() { * Attempts to set the playback parameters and returns the active playback parameters, which may * differ from those passed in. * + * @param playbackParameters The new playback parameters to attempt to set. * @return The active playback parameters. */ public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { if (passthrough) { + // The playback parameters are always the default in passthrough mode. this.playbackParameters = PlaybackParameters.DEFAULT; - } else { - this.playbackParameters = new PlaybackParameters( - sonicAudioProcessor.setSpeed(playbackParameters.speed), - sonicAudioProcessor.setPitch(playbackParameters.pitch)); - // TODO: Avoid resetting the track, so that speed/pitch changes are seamless. - // See [Internal: b/36542189]. - reset(); - // Setting the playback parameters never changes the output format, so it is not necessary to - // reconfigure the processors, though they may have become active/inactive. - resetAudioProcessors(); + return this.playbackParameters; + } + playbackParameters = new PlaybackParameters( + sonicAudioProcessor.setSpeed(playbackParameters.speed), + sonicAudioProcessor.setPitch(playbackParameters.pitch)); + PlaybackParameters lastSetPlaybackParameters = + drainingPlaybackParameters != null ? drainingPlaybackParameters + : !playbackParametersCheckpoints.isEmpty() + ? playbackParametersCheckpoints.getLast().playbackParameters + : this.playbackParameters; + if (!playbackParameters.equals(lastSetPlaybackParameters)) { + // We need to change the playback parameters. Drain the audio processors so we can determine + // the frame position at which the new parameters apply. + drainingPlaybackParameters = playbackParameters; } return this.playbackParameters; } @@ -975,15 +1016,6 @@ public PlaybackParameters getPlaybackParameters() { return playbackParameters; } - /** - * Returns the number of input frames corresponding to the specified number of output frames, - * taking into account any internal playback speed adjustment. - */ - private long scaleFrames(long outputFrameCount) { - return sonicAudioProcessor.isActive() ? sonicAudioProcessor.getInputFrames(outputFrameCount) - : outputFrameCount; - } - /** * Sets the stream type for audio track. If the stream type has changed and if the audio track * is not configured for use with tunneling, then the audio track is reset and the audio session @@ -1098,6 +1130,14 @@ public void reset() { writtenPcmBytes = 0; writtenEncodedFrames = 0; framesPerEncodedSample = 0; + if (drainingPlaybackParameters != null) { + playbackParameters = drainingPlaybackParameters; + } else if (!playbackParametersCheckpoints.isEmpty()) { + playbackParameters = playbackParametersCheckpoints.getLast().playbackParameters; + } + playbackParametersCheckpoints.clear(); + playbackParametersOffsetUs = 0; + playbackParametersPositionUs = 0; inputBuffer = null; outputBuffer = null; for (int i = 0; i < audioProcessors.length; i++) { @@ -1174,6 +1214,36 @@ private boolean hasCurrentPositionUs() { return isInitialized() && startMediaTimeState != START_NOT_SET; } + /** + * Returns the underlying audio track {@code positionUs} with any applicable speedup applied. + */ + private long applySpeedup(long positionUs) { + while (!playbackParametersCheckpoints.isEmpty() + && positionUs >= playbackParametersCheckpoints.getFirst().positionUs) { + // We are playing (or about to play) media with the new playback parameters, so update them. + PlaybackParametersCheckpoint checkpoint = playbackParametersCheckpoints.remove(); + playbackParameters = checkpoint.playbackParameters; + playbackParametersPositionUs = checkpoint.positionUs; + playbackParametersOffsetUs = checkpoint.mediaTimeUs - startMediaTimeUs; + } + + if (playbackParameters.speed == 1f) { + return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs; + } + + if (playbackParametersCheckpoints.isEmpty() + && sonicAudioProcessor.getOutputByteCount() >= SONIC_MIN_BYTES_FOR_SPEEDUP) { + return playbackParametersOffsetUs + + Util.scaleLargeTimestamp(positionUs - playbackParametersPositionUs, + sonicAudioProcessor.getInputByteCount(), sonicAudioProcessor.getOutputByteCount()); + } + + // We are playing drained data at a previous playback speed, or don't have enough bytes to + // calculate an accurate speedup, so fall back to multiplying by the speed. + return playbackParametersOffsetUs + + (long) ((double) playbackParameters.speed * (positionUs - playbackParametersPositionUs)); + } + /** * Updates the audio track latency and playback position parameters. */ @@ -1636,4 +1706,22 @@ public long getTimestampFramePosition() { } + /** + * Stores playback parameters with the position and media time at which they apply. + */ + private static final class PlaybackParametersCheckpoint { + + private final PlaybackParameters playbackParameters; + private final long mediaTimeUs; + private final long positionUs; + + private PlaybackParametersCheckpoint(PlaybackParameters playbackParameters, long mediaTimeUs, + long positionUs) { + this.playbackParameters = playbackParameters; + this.mediaTimeUs = mediaTimeUs; + this.positionUs = positionUs; + } + + } + } diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java index 3f45afd53e0..2dc14a094d3 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java @@ -102,11 +102,17 @@ public float setPitch(float pitch) { } /** - * Returns the number of input frames corresponding to the specified number of output frames. + * Returns the number of bytes of input queued since the last call to {@link #flush()}. */ - public long getInputFrames(long outputFrames) { - // Sonic produces output data as soon as input is queued. - return outputBytes == 0 ? 0 : Util.scaleLargeTimestamp(outputFrames, inputBytes, outputBytes); + public long getInputByteCount() { + return inputBytes; + } + + /** + * Returns the number of bytes of output dequeued since the last call to {@link #flush()}. + */ + public long getOutputByteCount() { + return outputBytes; } @Override