Add Sonic library for audio speed adjustment.

Add methods to ExoPlayer for setting/getting the playback speed, using
SonicAudioProcessor.

Remove PlaybackParams support, as the AudioTrack timestamp does not work
reliably on Marshmallow. The platform also uses Sonic and performance
should be comparable between the Java and native versions on recent Android
runtimes.

In a later change, SonicAudioProcessor will be made public so it can
be used in conjunction with other processors.

Issue: #26

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=151027121
This commit is contained in:
andrewlewis 2017-03-23 11:08:11 -07:00 committed by Oliver Woodman
parent 6faf566344
commit 0e6ef0edf6
24 changed files with 1492 additions and 201 deletions

View file

@ -22,6 +22,7 @@ import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
@ -99,6 +100,12 @@ import java.util.Locale;
Log.d(TAG, "positionDiscontinuity");
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
Log.d(TAG, "playbackParameters " + String.format(
"[speed=%.2f, pitch=%.2f]", playbackParameters.speed, playbackParameters.pitch));
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
int periodCount = timeline.getPeriodCount();

View file

@ -34,6 +34,7 @@ import com.google.android.exoplayer2.DefaultLoadControl;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.ExoPlayerFactory;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
@ -427,6 +428,11 @@ public class PlayerActivity extends Activity implements OnClickListener, ExoPlay
}
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
// Do nothing.

View file

@ -22,6 +22,7 @@ import android.test.InstrumentationTestCase;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.ExoPlayerFactory;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Renderer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.extractor.mkv.MatroskaExtractor;
@ -102,6 +103,11 @@ public class FlacPlaybackTest extends InstrumentationTestCase {
// Do nothing.
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
// Do nothing.

View file

@ -22,6 +22,7 @@ import android.test.InstrumentationTestCase;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.ExoPlayerFactory;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Renderer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.extractor.mkv.MatroskaExtractor;
@ -102,6 +103,11 @@ public class OpusPlaybackTest extends InstrumentationTestCase {
// Do nothing.
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
// Do nothing.

View file

@ -23,6 +23,7 @@ import android.util.Log;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.ExoPlayerFactory;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Renderer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.extractor.mkv.MatroskaExtractor;
@ -134,6 +135,11 @@ public class VpxPlaybackTest extends InstrumentationTestCase {
// Do nothing.
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
// Do nothing.

View file

@ -140,6 +140,16 @@ public final class ExoPlayerTest extends TestCase {
return isCurrentStreamFinal() ? 60000030 : 60000000;
}
@Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
return PlaybackParameters.DEFAULT;
}
@Override
public PlaybackParameters getPlaybackParameters() {
return PlaybackParameters.DEFAULT;
}
@Override
public boolean isEnded() {
// Allow playback to end once the final period is playing.
@ -272,6 +282,11 @@ public final class ExoPlayerTest extends TestCase {
positionDiscontinuityCount++;
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
}
private static final class TimelineWindowDefinition {

View file

@ -479,15 +479,6 @@ public final class C {
*/
public static final int MSG_SET_VOLUME = 2;
/**
* A type of a message that can be passed to an audio {@link Renderer} via
* {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message object
* should be a {@link android.media.PlaybackParams}, or null, which will be used to configure the
* underlying {@link android.media.AudioTrack}. The message object should not be modified by the
* caller after it has been passed.
*/
public static final int MSG_SET_PLAYBACK_PARAMS = 3;
/**
* A type of a message that can be passed to an audio {@link Renderer} via
* {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message object
@ -500,7 +491,7 @@ public final class C {
* introduce a brief gap in audio output. Note also that tracks in the same audio session must
* share the same routing, so a new audio session id will be generated.
*/
public static final int MSG_SET_STREAM_TYPE = 4;
public static final int MSG_SET_STREAM_TYPE = 3;
/**
* The type of a message that can be passed to a {@link MediaCodec}-based video {@link Renderer}
@ -510,7 +501,7 @@ public final class C {
* Note that the scaling mode only applies if the {@link Surface} targeted by the renderer is
* owned by a {@link android.view.SurfaceView}.
*/
public static final int MSG_SET_SCALING_MODE = 5;
public static final int MSG_SET_SCALING_MODE = 4;
/**
* Applications or extensions may define custom {@code MSG_*} constants greater than or equal to

View file

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer2;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import com.google.android.exoplayer2.metadata.MetadataRenderer;
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
@ -168,6 +169,16 @@ public interface ExoPlayer {
*/
void onPositionDiscontinuity();
/**
* Called when the current playback parameters change. The playback parameters may change due to
* a call to {@link ExoPlayer#setPlaybackParameters(PlaybackParameters)}, or the player itself
* may change them (for example, if audio playback switches to passthrough mode, where speed
* adjustment is no longer possible).
*
* @param playbackParameters The playback parameters.
*/
void onPlaybackParametersChanged(PlaybackParameters playbackParameters);
}
/**
@ -340,6 +351,28 @@ public interface ExoPlayer {
*/
void seekTo(int windowIndex, long positionMs);
/**
* Attempts to set the playback parameters. Passing {@code null} sets the parameters to the
* default, {@link PlaybackParameters#DEFAULT}, which means there is no speed or pitch adjustment.
* <p>
* Playback parameters changes may cause the player to buffer.
* {@link EventListener#onPlaybackParametersChanged(PlaybackParameters)} will be called whenever
* the currently active playback parameters change. When that listener is called, the parameters
* passed to it may not match {@code playbackParameters}. For example, the chosen speed or pitch
* may be out of range, in which case they are constrained to a set of permitted values. If it is
* not possible to change the playback parameters, the listener will not be invoked.
*
* @param playbackParameters The playback parameters, or {@code null} to use the defaults.
*/
void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters);
/**
* Returns the currently active playback parameters.
*
* @see EventListener#onPlaybackParametersChanged(PlaybackParameters)
*/
PlaybackParameters getPlaybackParameters();
/**
* Stops playback. Use {@code setPlayWhenReady(false)} rather than this method if the intention
* is to pause playback.

View file

@ -19,6 +19,7 @@ import android.annotation.SuppressLint;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.Nullable;
import android.util.Log;
import com.google.android.exoplayer2.ExoPlayerImplInternal.PlaybackInfo;
import com.google.android.exoplayer2.ExoPlayerImplInternal.SourceInfo;
@ -57,6 +58,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
private Object manifest;
private TrackGroupArray trackGroups;
private TrackSelectionArray trackSelections;
private PlaybackParameters playbackParameters;
// Playback information when there is no pending seek/set source operation.
private PlaybackInfo playbackInfo;
@ -87,6 +89,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
period = new Timeline.Period();
trackGroups = TrackGroupArray.EMPTY;
trackSelections = emptyTrackSelections;
playbackParameters = PlaybackParameters.DEFAULT;
eventHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
@ -196,6 +199,19 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
}
@Override
public void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters) {
if (playbackParameters == null) {
playbackParameters = PlaybackParameters.DEFAULT;
}
internalPlayer.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return playbackParameters;
}
@Override
public void stop() {
internalPlayer.stop();
@ -376,6 +392,16 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
break;
}
case ExoPlayerImplInternal.MSG_PLAYBACK_PARAMETERS_CHANGED: {
PlaybackParameters playbackParameters = (PlaybackParameters) msg.obj;
if (!this.playbackParameters.equals(playbackParameters)) {
this.playbackParameters = playbackParameters;
for (EventListener listener : listeners) {
listener.onPlaybackParametersChanged(playbackParameters);
}
}
break;
}
case ExoPlayerImplInternal.MSG_ERROR: {
ExoPlaybackException exception = (ExoPlaybackException) msg.obj;
for (EventListener listener : listeners) {
@ -383,6 +409,8 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
break;
}
default:
throw new IllegalStateException();
}
}

View file

@ -96,20 +96,22 @@ import java.io.IOException;
public static final int MSG_SEEK_ACK = 4;
public static final int MSG_POSITION_DISCONTINUITY = 5;
public static final int MSG_SOURCE_INFO_REFRESHED = 6;
public static final int MSG_ERROR = 7;
public static final int MSG_PLAYBACK_PARAMETERS_CHANGED = 7;
public static final int MSG_ERROR = 8;
// Internal messages
private static final int MSG_PREPARE = 0;
private static final int MSG_SET_PLAY_WHEN_READY = 1;
private static final int MSG_DO_SOME_WORK = 2;
private static final int MSG_SEEK_TO = 3;
private static final int MSG_STOP = 4;
private static final int MSG_RELEASE = 5;
private static final int MSG_REFRESH_SOURCE_INFO = 6;
private static final int MSG_PERIOD_PREPARED = 7;
private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 8;
private static final int MSG_TRACK_SELECTION_INVALIDATED = 9;
private static final int MSG_CUSTOM = 10;
private static final int MSG_SET_PLAYBACK_PARAMETERS = 4;
private static final int MSG_STOP = 5;
private static final int MSG_RELEASE = 6;
private static final int MSG_REFRESH_SOURCE_INFO = 7;
private static final int MSG_PERIOD_PREPARED = 8;
private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 9;
private static final int MSG_TRACK_SELECTION_INVALIDATED = 10;
private static final int MSG_CUSTOM = 11;
private static final int PREPARING_SOURCE_INTERVAL_MS = 10;
private static final int RENDERING_INTERVAL_MS = 10;
@ -143,6 +145,7 @@ import java.io.IOException;
private final Timeline.Period period;
private PlaybackInfo playbackInfo;
private PlaybackParameters playbackParameters;
private Renderer rendererMediaClockSource;
private MediaClock rendererMediaClock;
private MediaSource mediaSource;
@ -188,6 +191,7 @@ import java.io.IOException;
window = new Timeline.Window();
period = new Timeline.Period();
trackSelector.init(this);
playbackParameters = PlaybackParameters.DEFAULT;
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
@ -211,6 +215,10 @@ import java.io.IOException;
.sendToTarget();
}
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
handler.obtainMessage(MSG_SET_PLAYBACK_PARAMETERS, playbackParameters).sendToTarget();
}
public void stop() {
handler.sendEmptyMessage(MSG_STOP);
}
@ -304,6 +312,10 @@ import java.io.IOException;
seekToInternal((SeekPosition) msg.obj);
return true;
}
case MSG_SET_PLAYBACK_PARAMETERS: {
setPlaybackParametersInternal((PlaybackParameters) msg.obj);
return true;
}
case MSG_STOP: {
stopInternal();
return true;
@ -478,6 +490,19 @@ import java.io.IOException;
maybeThrowPeriodPrepareError();
}
// The standalone media clock never changes playback parameters, so just check the renderer.
if (rendererMediaClock != null) {
PlaybackParameters playbackParameters = rendererMediaClock.getPlaybackParameters();
if (!playbackParameters.equals(this.playbackParameters)) {
// TODO: Make LoadControl, period transition position projection, adaptive track selection
// and potentially any time-related code in renderers take into account the playback speed.
this.playbackParameters = playbackParameters;
standaloneMediaClock.synchronize(rendererMediaClock);
eventHandler.obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED, playbackParameters)
.sendToTarget();
}
}
long playingPeriodDurationUs = timeline.getPeriod(playingPeriodHolder.index, period)
.getDurationUs();
if (allRenderersEnded
@ -646,6 +671,14 @@ import java.io.IOException;
}
}
private void setPlaybackParametersInternal(PlaybackParameters playbackParameters) {
playbackParameters = rendererMediaClock != null
? rendererMediaClock.setPlaybackParameters(playbackParameters)
: standaloneMediaClock.setPlaybackParameters(playbackParameters);
this.playbackParameters = playbackParameters;
eventHandler.obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED, playbackParameters).sendToTarget();
}
private void stopInternal() {
resetInternal(true);
loadControl.onStopped();
@ -774,7 +807,7 @@ import java.io.IOException;
if (sampleStream == null) {
// The renderer won't be re-enabled. Sync standaloneMediaClock so that it can take
// over timing responsibilities.
standaloneMediaClock.setPositionUs(rendererMediaClock.getPositionUs());
standaloneMediaClock.synchronize(rendererMediaClock);
}
rendererMediaClock = null;
rendererMediaClockSource = null;
@ -1334,7 +1367,7 @@ import java.io.IOException;
// is final and it's not reading ahead.
if (renderer == rendererMediaClockSource) {
// Sync standaloneMediaClock so that it can take over timing responsibilities.
standaloneMediaClock.setPositionUs(rendererMediaClock.getPositionUs());
standaloneMediaClock.synchronize(rendererMediaClock);
rendererMediaClock = null;
rendererMediaClockSource = null;
}
@ -1380,6 +1413,7 @@ import java.io.IOException;
}
rendererMediaClock = mediaClock;
rendererMediaClockSource = renderer;
rendererMediaClock.setPlaybackParameters(playbackParameters);
}
// Start the renderer if playing.
if (playing) {

View file

@ -0,0 +1,83 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
/**
* The parameters that apply to playback.
*/
public final class PlaybackParameters {
/**
* The default playback parameters: real-time playback with no pitch modification.
*/
public static final PlaybackParameters DEFAULT = new PlaybackParameters(1f, 1f);
/**
* The factor by which playback will be sped up.
*/
public final float speed;
/**
* The factor by which the audio pitch will be scaled.
*/
public final float pitch;
private final int scaledUsPerMs;
/**
* Creates new playback parameters.
*
* @param speed The factor by which playback will be sped up.
* @param pitch The factor by which the audio pitch will be scaled.
*/
public PlaybackParameters(float speed, float pitch) {
this.speed = speed;
this.pitch = pitch;
scaledUsPerMs = Math.round(speed * 1000f);
}
/**
* Scales the millisecond duration {@code timeMs} by the playback speed, returning the result in
* microseconds.
*
* @param timeMs The time to scale, in milliseconds.
* @return The scaled time, in microseconds.
*/
public long getSpeedAdjustedDurationUs(long timeMs) {
return timeMs * scaledUsPerMs;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
PlaybackParameters other = (PlaybackParameters) obj;
return this.speed == other.speed && this.pitch == other.pitch;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + Float.floatToRawIntBits(speed);
result = 31 * result + Float.floatToRawIntBits(pitch);
return result;
}
}

View file

@ -22,6 +22,7 @@ import android.media.MediaCodec;
import android.media.PlaybackParams;
import android.os.Handler;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
@ -145,7 +146,6 @@ public class SimpleExoPlayer implements ExoPlayer {
@C.StreamType
private int audioStreamType;
private float audioVolume;
private PlaybackParamsHolder playbackParamsHolder;
protected SimpleExoPlayer(Context context, TrackSelector trackSelector, LoadControl loadControl,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ -344,37 +344,20 @@ public class SimpleExoPlayer implements ExoPlayer {
/**
* Sets the {@link PlaybackParams} governing audio playback.
*
* @deprecated Use {@link #setPlaybackParameters(PlaybackParameters)}.
* @param params The {@link PlaybackParams}, or null to clear any previously set parameters.
*/
@Deprecated
@TargetApi(23)
public void setPlaybackParams(PlaybackParams params) {
public void setPlaybackParams(@Nullable PlaybackParams params) {
PlaybackParameters playbackParameters;
if (params != null) {
// The audio renderers will call this on the playback thread to ensure they can query
// parameters without failure. We do the same up front, which is redundant except that it
// ensures an immediate call to getPlaybackParams will retrieve the instance with defaults
// allowed, rather than this change becoming visible sometime later once the audio renderers
// receive the parameters.
params.allowDefaults();
playbackParamsHolder = new PlaybackParamsHolder(params);
playbackParameters = new PlaybackParameters(params.getSpeed(), params.getPitch());
} else {
playbackParamsHolder = null;
playbackParameters = null;
}
ExoPlayerMessage[] messages = new ExoPlayerMessage[audioRendererCount];
int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) {
messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_PLAYBACK_PARAMS, params);
}
}
player.sendMessages(messages);
}
/**
* Returns the {@link PlaybackParams} governing audio playback, or null if not set.
*/
@TargetApi(23)
public PlaybackParams getPlaybackParams() {
return playbackParamsHolder == null ? null : playbackParamsHolder.params;
setPlaybackParameters(playbackParameters);
}
/**
@ -519,6 +502,16 @@ public class SimpleExoPlayer implements ExoPlayer {
player.seekTo(windowIndex, positionMs);
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
player.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return player.getPlaybackParameters();
}
@Override
public void stop() {
player.stop();
@ -1024,15 +1017,4 @@ public class SimpleExoPlayer implements ExoPlayer {
}
@TargetApi(23)
private static final class PlaybackParamsHolder {
public final PlaybackParams params;
public PlaybackParamsHolder(PlaybackParams params) {
this.params = params;
}
}
}

View file

@ -20,11 +20,11 @@ import android.annotation.TargetApi;
import android.media.AudioAttributes;
import android.media.AudioFormat;
import android.media.AudioTimestamp;
import android.media.PlaybackParams;
import android.os.ConditionVariable;
import android.os.SystemClock;
import android.util.Log;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
@ -271,6 +271,7 @@ public final class AudioTrack {
private final AudioCapabilities audioCapabilities;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final SonicAudioProcessor sonicAudioProcessor;
private final AudioProcessor[] availableAudioProcessors;
private final Listener listener;
private final ConditionVariable releasingConditionVariable;
@ -294,6 +295,7 @@ public final class AudioTrack {
private boolean passthrough;
private int bufferSize;
private long bufferSizeUs;
private PlaybackParameters playbackParameters;
private ByteBuffer avSyncHeader;
private int bytesUntilNextAvSync;
@ -344,11 +346,6 @@ public final class AudioTrack {
public AudioTrack(AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors,
Listener listener) {
this.audioCapabilities = audioCapabilities;
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
availableAudioProcessors = new AudioProcessor[audioProcessors.length + 2];
availableAudioProcessors[0] = new ResamplingAudioProcessor();
availableAudioProcessors[1] = channelMappingAudioProcessor;
System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
this.listener = listener;
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) {
@ -359,18 +356,24 @@ public final class AudioTrack {
// There's no guarantee this method exists. Do nothing.
}
}
if (Util.SDK_INT >= 23) {
audioTrackUtil = new AudioTrackUtilV23();
} else if (Util.SDK_INT >= 19) {
if (Util.SDK_INT >= 19) {
audioTrackUtil = new AudioTrackUtilV19();
} else {
audioTrackUtil = new AudioTrackUtil();
}
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
sonicAudioProcessor = new SonicAudioProcessor();
availableAudioProcessors = new AudioProcessor[3 + audioProcessors.length];
availableAudioProcessors[0] = new ResamplingAudioProcessor();
availableAudioProcessors[1] = channelMappingAudioProcessor;
System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
availableAudioProcessors[2 + audioProcessors.length] = sonicAudioProcessor;
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
volume = 1.0f;
startMediaTimeState = START_NOT_SET;
streamType = C.STREAM_TYPE_DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playbackParameters = PlaybackParameters.DEFAULT;
drainingAudioProcessorIndex = C.INDEX_UNSET;
this.audioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0];
@ -408,33 +411,28 @@ public final class AudioTrack {
}
long systemClockUs = System.nanoTime() / 1000;
long currentPositionUs;
long positionUs;
if (audioTimestampSet) {
// How long ago in the past the audio timestamp is (negative if it's in the future).
long presentationDiff = systemClockUs - (audioTrackUtil.getTimestampNanoTime() / 1000);
// Fixes such difference if the playback speed is not real time speed.
long actualSpeedPresentationDiff = (long) (presentationDiff
* audioTrackUtil.getPlaybackSpeed());
long framesDiff = durationUsToFrames(actualSpeedPresentationDiff);
// The position of the frame that's currently being presented.
long currentFramePosition = audioTrackUtil.getTimestampFramePosition() + framesDiff;
currentPositionUs = framesToDurationUs(currentFramePosition) + startMediaTimeUs;
// Calculate the speed-adjusted position using the timestamp (which may be in the future).
long elapsedSinceTimestampUs = systemClockUs - (audioTrackUtil.getTimestampNanoTime() / 1000);
long elapsedSinceTimestampFrames = durationUsToFrames(elapsedSinceTimestampUs);
long elapsedFrames = audioTrackUtil.getTimestampFramePosition() + elapsedSinceTimestampFrames;
positionUs = framesToDurationUs(elapsedFrames);
} else {
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
currentPositionUs = audioTrackUtil.getPlaybackHeadPositionUs() + startMediaTimeUs;
positionUs = audioTrackUtil.getPositionUs();
} else {
// getPlayheadPositionUs() only has a granularity of ~20 ms, so we base the position off the
// system clock (and a smoothed offset between it and the playhead position) so as to
// prevent jitter in the reported positions.
currentPositionUs = systemClockUs + smoothedPlayheadOffsetUs + startMediaTimeUs;
positionUs = systemClockUs + smoothedPlayheadOffsetUs;
}
if (!sourceEnded) {
currentPositionUs -= latencyUs;
positionUs -= latencyUs;
}
}
return currentPositionUs;
return startMediaTimeUs + scaleFrames(positionUs);
}
/**
@ -481,10 +479,7 @@ public final class AudioTrack {
boolean flush = false;
if (!passthrough) {
pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
// Reconfigure the audio processors.
channelMappingAudioProcessor.setChannelMap(outputChannels);
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
flush |= audioProcessor.configure(sampleRate, channelCount, encoding);
@ -492,23 +487,12 @@ public final class AudioTrack {
throw new ConfigurationException(e);
}
if (audioProcessor.isActive()) {
newAudioProcessors.add(audioProcessor);
channelCount = audioProcessor.getOutputChannelCount();
encoding = audioProcessor.getOutputEncoding();
} else {
audioProcessor.flush();
}
}
if (flush) {
int count = newAudioProcessors.size();
audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count];
for (int i = 0; i < count; i++) {
AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
resetAudioProcessors();
}
}
@ -603,6 +587,28 @@ public final class AudioTrack {
: multipliedBufferSize;
}
bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(bufferSize / outputPcmFrameSize);
// The old playback parameters may no longer be applicable so try to reset them now.
setPlaybackParameters(playbackParameters);
}
private void resetAudioProcessors() {
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : availableAudioProcessors) {
if (audioProcessor.isActive()) {
newAudioProcessors.add(audioProcessor);
} else {
audioProcessor.flush();
}
}
int count = newAudioProcessors.size();
audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
outputBuffers = new ByteBuffer[count];
for (int i = 0; i < count; i++) {
AudioProcessor audioProcessor = audioProcessors[i];
audioProcessor.flush();
outputBuffers[i] = audioProcessor.getOutput();
}
}
private void initialize() throws InitializationException {
@ -940,15 +946,42 @@ public final class AudioTrack {
}
/**
* Sets the playback parameters. Only available for {@link Util#SDK_INT} &gt;= 23
* Attempts to set the playback parameters and returns the active playback parameters, which may
* differ from those passed in.
*
* @param playbackParams The playback parameters to be used by the
* {@link android.media.AudioTrack}.
* @throws UnsupportedOperationException if the Playback Parameters are not supported. That is,
* {@link Util#SDK_INT} &lt; 23.
* @return The active playback parameters.
*/
public void setPlaybackParams(PlaybackParams playbackParams) {
audioTrackUtil.setPlaybackParams(playbackParams);
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
if (passthrough) {
this.playbackParameters = PlaybackParameters.DEFAULT;
} else {
this.playbackParameters = new PlaybackParameters(
sonicAudioProcessor.setSpeed(playbackParameters.speed),
sonicAudioProcessor.setPitch(playbackParameters.pitch));
// TODO: Avoid resetting the track, so that speed/pitch changes are seamless.
// See [Internal: b/36542189].
reset();
// Setting the playback parameters never changes the output format, so it is not necessary to
// reconfigure the processors, though they may have become active/inactive.
resetAudioProcessors();
}
return this.playbackParameters;
}
/**
* Gets the {@link PlaybackParameters}.
*/
public PlaybackParameters getPlaybackParameters() {
return playbackParameters;
}
/**
* Returns the number of input frames corresponding to the specified number of output frames,
* taking into account any internal playback speed adjustment.
*/
private long scaleFrames(long outputFrameCount) {
return sonicAudioProcessor.isActive() ? sonicAudioProcessor.getInputFrames(outputFrameCount)
: outputFrameCount;
}
/**
@ -1145,7 +1178,7 @@ public final class AudioTrack {
* Updates the audio track latency and playback position parameters.
*/
private void maybeSampleSyncParams() {
long playbackPositionUs = audioTrackUtil.getPlaybackHeadPositionUs();
long playbackPositionUs = audioTrackUtil.getPositionUs();
if (playbackPositionUs == 0) {
// The AudioTrack hasn't output anything yet.
return;
@ -1441,15 +1474,15 @@ public final class AudioTrack {
/**
* Stops the audio track in a way that ensures media written to it is played out in full, and
* that {@link #getPlaybackHeadPosition()} and {@link #getPlaybackHeadPositionUs()} continue to
* increment as the remaining media is played out.
* that {@link #getPlaybackHeadPosition()} and {@link #getPositionUs()} continue to increment as
* the remaining media is played out.
*
* @param submittedFrames The total number of frames that have been submitted.
* @param writtenFrames The total number of frames that have been written.
*/
public void handleEndOfStream(long submittedFrames) {
public void handleEndOfStream(long writtenFrames) {
stopPlaybackHeadPosition = getPlaybackHeadPosition();
stopTimestampUs = SystemClock.elapsedRealtime() * 1000;
endPlaybackHeadPosition = submittedFrames;
endPlaybackHeadPosition = writtenFrames;
audioTrack.stop();
}
@ -1471,8 +1504,7 @@ public final class AudioTrack {
* returns the playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
* @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
* expressed as a long.
* @return The playback head position, in frames.
*/
public long getPlaybackHeadPosition() {
if (stopTimestampUs != C.TIME_UNSET) {
@ -1507,9 +1539,9 @@ public final class AudioTrack {
}
/**
* Returns {@link #getPlaybackHeadPosition()} expressed as microseconds.
* Returns the duration of played media since reconfiguration, in microseconds.
*/
public long getPlaybackHeadPositionUs() {
public long getPositionUs() {
return (getPlaybackHeadPosition() * C.MICROS_PER_SECOND) / sampleRate;
}
@ -1553,28 +1585,6 @@ public final class AudioTrack {
throw new UnsupportedOperationException();
}
/**
* Sets the Playback Parameters to be used by the underlying {@link android.media.AudioTrack}.
*
* @param playbackParams The playback parameters to be used by the
* {@link android.media.AudioTrack}.
* @throws UnsupportedOperationException If Playback Parameters are not supported
* (i.e. {@link Util#SDK_INT} &lt; 23).
*/
public void setPlaybackParams(PlaybackParams playbackParams) {
throw new UnsupportedOperationException();
}
/**
* Returns the configured playback speed according to the used Playback Parameters. If these are
* not supported, 1.0f(normal speed) is returned.
*
* @return The speed factor used by the underlying {@link android.media.AudioTrack}.
*/
public float getPlaybackSpeed() {
return 1.0f;
}
}
@TargetApi(19)
@ -1626,43 +1636,4 @@ public final class AudioTrack {
}
@TargetApi(23)
private static class AudioTrackUtilV23 extends AudioTrackUtilV19 {
private PlaybackParams playbackParams;
private float playbackSpeed;
public AudioTrackUtilV23() {
playbackSpeed = 1.0f;
}
@Override
public void reconfigure(android.media.AudioTrack audioTrack,
boolean needsPassthroughWorkaround) {
super.reconfigure(audioTrack, needsPassthroughWorkaround);
maybeApplyPlaybackParams();
}
@Override
public void setPlaybackParams(PlaybackParams playbackParams) {
playbackParams = (playbackParams != null ? playbackParams : new PlaybackParams())
.allowDefaults();
this.playbackParams = playbackParams;
playbackSpeed = playbackParams.getSpeed();
maybeApplyPlaybackParams();
}
@Override
public float getPlaybackSpeed() {
return playbackSpeed;
}
private void maybeApplyPlaybackParams() {
if (audioTrack != null && playbackParams != null) {
audioTrack.setPlaybackParams(playbackParams);
}
}
}
}

View file

@ -19,12 +19,12 @@ import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.media.PlaybackParams;
import android.media.audiofx.Virtualizer;
import android.os.Handler;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
@ -345,6 +345,16 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return currentPositionUs;
}
@Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
return audioTrack.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return audioTrack.getPlaybackParameters();
}
@Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
@ -389,9 +399,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
case C.MSG_SET_VOLUME:
audioTrack.setVolume((Float) message);
break;
case C.MSG_SET_PLAYBACK_PARAMS:
audioTrack.setPlaybackParams((PlaybackParams) message);
break;
case C.MSG_SET_STREAM_TYPE:
@C.StreamType int streamType = (Integer) message;
audioTrack.setStreamType(streamType);

View file

@ -15,7 +15,6 @@
*/
package com.google.android.exoplayer2.audio;
import android.media.PlaybackParams;
import android.media.audiofx.Virtualizer;
import android.os.Handler;
import android.os.Looper;
@ -26,6 +25,7 @@ import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
@ -434,6 +434,16 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return currentPositionUs;
}
@Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
return audioTrack.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return audioTrack.getPlaybackParameters();
}
@Override
protected void onEnabled(boolean joining) throws ExoPlaybackException {
decoderCounters = new DecoderCounters();
@ -585,9 +595,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
case C.MSG_SET_VOLUME:
audioTrack.setVolume((Float) message);
break;
case C.MSG_SET_PLAYBACK_PARAMS:
audioTrack.setPlaybackParams((PlaybackParams) message);
break;
case C.MSG_SET_STREAM_TYPE:
@C.StreamType int streamType = (Integer) message;
audioTrack.setStreamType(streamType);

View file

@ -0,0 +1,817 @@
/*
* Copyright (C) 2017 The Android Open Source Project
* Copyright (C) 2010 Bill Cox, Sonic Library
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
/**
* Sonic audio time/pitch stretching library. Based on https://github.com/waywardgeek/sonic.
*/
/* package */ final class Sonic {
private static final int SONIC_MIN_PITCH = 65;
private static final int SONIC_MAX_PITCH = 400;
/* This is used to down-sample some inputs to improve speed */
private static final int SONIC_AMDF_FREQ = 4000;
private short[] inputBuffer;
private short[] outputBuffer;
private short[] pitchBuffer;
private short[] downSampleBuffer;
private float speed;
private float volume;
private float pitch;
private float rate;
private int oldRatePosition;
private int newRatePosition;
private boolean useChordPitch;
private int quality;
private int numChannels;
private int inputBufferSize;
private int pitchBufferSize;
private int outputBufferSize;
private int numInputSamples;
private int numOutputSamples;
private int numPitchSamples;
private int minPeriod;
private int maxPeriod;
private int maxRequired;
private int remainingInputToCopy;
private int sampleRate;
private int prevPeriod;
private int prevMinDiff;
private int minDiff;
private int maxDiff;
// Resize the array.
private short[] resize(short[] oldArray, int newLength) {
newLength *= numChannels;
short[] newArray = new short[newLength];
int length = Math.min(oldArray.length, newLength);
System.arraycopy(oldArray, 0, newArray, 0, length);
return newArray;
}
// Move samples from one array to another. May move samples down within an array, but not up.
private void move(short[] dest, int destPos, short[] source, int sourcePos, int numSamples) {
System.arraycopy(
source, sourcePos * numChannels, dest, destPos * numChannels, numSamples * numChannels);
}
// Scale the samples by the factor.
private void scaleSamples(short[] samples, int position, int numSamples, float volume) {
int fixedPointVolume = (int) (volume * 4096.0f);
int start = position * numChannels;
int stop = start + numSamples * numChannels;
for (int xSample = start; xSample < stop; xSample++) {
int value = (samples[xSample] * fixedPointVolume) >> 12;
if (value > 32767) {
value = 32767;
} else if (value < -32767) {
value = -32767;
}
samples[xSample] = (short) value;
}
}
// Get the speed of the stream.
public float getSpeed() {
return speed;
}
// Set the speed of the stream.
public void setSpeed(float speed) {
this.speed = speed;
}
// Get the pitch of the stream.
public float getPitch() {
return pitch;
}
// Set the pitch of the stream.
public void setPitch(float pitch) {
this.pitch = pitch;
}
// Get the rate of the stream.
public float getRate() {
return rate;
}
// Set the playback rate of the stream. This scales pitch and speed at the same time.
public void setRate(float rate) {
this.rate = rate;
this.oldRatePosition = 0;
this.newRatePosition = 0;
}
// Get the vocal chord pitch setting.
public boolean getChordPitch() {
return useChordPitch;
}
// Set the vocal chord mode for pitch computation. Default is off.
public void setChordPitch(boolean useChordPitch) {
this.useChordPitch = useChordPitch;
}
// Get the quality setting.
public int getQuality() {
return quality;
}
// Set the "quality". Default 0 is virtually as good as 1, but very much faster.
public void setQuality(int quality) {
this.quality = quality;
}
// Get the scaling factor of the stream.
public float getVolume() {
return volume;
}
// Set the scaling factor of the stream.
public void setVolume(float volume) {
this.volume = volume;
}
// Allocate stream buffers.
private void allocateStreamBuffers(int sampleRate, int numChannels) {
minPeriod = sampleRate / SONIC_MAX_PITCH;
maxPeriod = sampleRate / SONIC_MIN_PITCH;
maxRequired = 2 * maxPeriod;
inputBufferSize = maxRequired;
inputBuffer = new short[maxRequired * numChannels];
outputBufferSize = maxRequired;
outputBuffer = new short[maxRequired * numChannels];
pitchBufferSize = maxRequired;
pitchBuffer = new short[maxRequired * numChannels];
downSampleBuffer = new short[maxRequired];
this.sampleRate = sampleRate;
this.numChannels = numChannels;
oldRatePosition = 0;
newRatePosition = 0;
prevPeriod = 0;
}
// Create a sonic stream.
public Sonic(int sampleRate, int numChannels) {
allocateStreamBuffers(sampleRate, numChannels);
speed = 1.0f;
pitch = 1.0f;
volume = 1.0f;
rate = 1.0f;
oldRatePosition = 0;
newRatePosition = 0;
useChordPitch = false;
quality = 0;
}
// Get the sample rate of the stream.
public int getSampleRate() {
return sampleRate;
}
// Set the sample rate of the stream. This will cause samples buffered in the stream to be lost.
public void setSampleRate(int sampleRate) {
allocateStreamBuffers(sampleRate, numChannels);
}
// Get the number of channels.
public int getNumChannels() {
return numChannels;
}
// Set the num channels of the stream. This will cause samples buffered in the stream to be lost.
public void setNumChannels(int numChannels) {
allocateStreamBuffers(sampleRate, numChannels);
}
// Enlarge the output buffer if needed.
private void enlargeOutputBufferIfNeeded(int numSamples) {
if (numOutputSamples + numSamples > outputBufferSize) {
outputBufferSize += (outputBufferSize >> 1) + numSamples;
outputBuffer = resize(outputBuffer, outputBufferSize);
}
}
// Enlarge the input buffer if needed.
private void enlargeInputBufferIfNeeded(int numSamples) {
if (numInputSamples + numSamples > inputBufferSize) {
inputBufferSize += (inputBufferSize >> 1) + numSamples;
inputBuffer = resize(inputBuffer, inputBufferSize);
}
}
// Add the input samples to the input buffer.
private void addFloatSamplesToInputBuffer(float[] samples, int numSamples) {
if (numSamples == 0) {
return;
}
enlargeInputBufferIfNeeded(numSamples);
int xBuffer = numInputSamples * numChannels;
for (int xSample = 0; xSample < numSamples * numChannels; xSample++) {
inputBuffer[xBuffer++] = (short) (samples[xSample] * 32767.0f);
}
numInputSamples += numSamples;
}
// Add the input samples to the input buffer.
private void addShortSamplesToInputBuffer(short[] samples, int numSamples) {
if (numSamples == 0) {
return;
}
enlargeInputBufferIfNeeded(numSamples);
move(inputBuffer, numInputSamples, samples, 0, numSamples);
numInputSamples += numSamples;
}
// Add the input samples to the input buffer.
private void addUnsignedByteSamplesToInputBuffer(byte[] samples, int numSamples) {
short sample;
enlargeInputBufferIfNeeded(numSamples);
int xBuffer = numInputSamples * numChannels;
for (int xSample = 0; xSample < numSamples * numChannels; xSample++) {
sample = (short) ((samples[xSample] & 0xff) - 128); // Convert from unsigned to signed
inputBuffer[xBuffer++] = (short) (sample << 8);
}
numInputSamples += numSamples;
}
// Add the input samples to the input buffer. They must be 16-bit little-endian encoded in a byte
// array.
private void addBytesToInputBuffer(byte[] inBuffer, int numBytes) {
int numSamples = numBytes / (2 * numChannels);
short sample;
enlargeInputBufferIfNeeded(numSamples);
int xBuffer = numInputSamples * numChannels;
for (int xByte = 0; xByte + 1 < numBytes; xByte += 2) {
sample = (short) ((inBuffer[xByte] & 0xff) | (inBuffer[xByte + 1] << 8));
inputBuffer[xBuffer++] = sample;
}
numInputSamples += numSamples;
}
// Remove input samples that we have already processed.
private void removeInputSamples(int position) {
int remainingSamples = numInputSamples - position;
move(inputBuffer, 0, inputBuffer, position, remainingSamples);
numInputSamples = remainingSamples;
}
// Just copy from the array to the output buffer
private void copyToOutput(short[] samples, int position, int numSamples) {
enlargeOutputBufferIfNeeded(numSamples);
move(outputBuffer, numOutputSamples, samples, position, numSamples);
numOutputSamples += numSamples;
}
// Just copy from the input buffer to the output buffer. Return num samples copied.
private int copyInputToOutput(int position) {
int numSamples = remainingInputToCopy;
if (numSamples > maxRequired) {
numSamples = maxRequired;
}
copyToOutput(inputBuffer, position, numSamples);
remainingInputToCopy -= numSamples;
return numSamples;
}
// Read data out of the stream. Sometimes no data will be available, and zero
// is returned, which is not an error condition.
public int readFloatFromStream(float[] samples, int maxSamples) {
int numSamples = numOutputSamples;
int remainingSamples = 0;
if (numSamples == 0) {
return 0;
}
if (numSamples > maxSamples) {
remainingSamples = numSamples - maxSamples;
numSamples = maxSamples;
}
for (int xSample = 0; xSample < numSamples * numChannels; xSample++) {
samples[xSample++] = (outputBuffer[xSample]) / 32767.0f;
}
move(outputBuffer, 0, outputBuffer, numSamples, remainingSamples);
numOutputSamples = remainingSamples;
return numSamples;
}
// Read short data out of the stream. Sometimes no data will be available, and zero
// is returned, which is not an error condition.
public int readShortFromStream(short[] samples, int maxSamples) {
int numSamples = numOutputSamples;
int remainingSamples = 0;
if (numSamples == 0) {
return 0;
}
if (numSamples > maxSamples) {
remainingSamples = numSamples - maxSamples;
numSamples = maxSamples;
}
move(samples, 0, outputBuffer, 0, numSamples);
move(outputBuffer, 0, outputBuffer, numSamples, remainingSamples);
numOutputSamples = remainingSamples;
return numSamples;
}
// Read unsigned byte data out of the stream. Sometimes no data will be available, and zero
// is returned, which is not an error condition.
public int readBytesFromStream(byte[] outBuffer, int maxBytes) {
int maxSamples = maxBytes / (2 * numChannels);
int numSamples = numOutputSamples;
int remainingSamples = 0;
if (numSamples == 0 || maxSamples == 0) {
return 0;
}
if (numSamples > maxSamples) {
remainingSamples = numSamples - maxSamples;
numSamples = maxSamples;
}
for (int xSample = 0; xSample < numSamples * numChannels; xSample++) {
short sample = outputBuffer[xSample];
outBuffer[xSample << 1] = (byte) (sample & 0xff);
outBuffer[(xSample << 1) + 1] = (byte) (sample >> 8);
}
move(outputBuffer, 0, outputBuffer, numSamples, remainingSamples);
numOutputSamples = remainingSamples;
return 2 * numSamples * numChannels;
}
// Force the sonic stream to generate output using whatever data it currently
// has. No extra delay will be added to the output, but flushing in the middle of
// words could introduce distortion.
public void flushStream() {
int remainingSamples = numInputSamples;
float s = speed / pitch;
float r = rate * pitch;
int expectedOutputSamples =
numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / r + 0.5f);
// Add enough silence to flush both input and pitch buffers.
enlargeInputBufferIfNeeded(remainingSamples + 2 * maxRequired);
for (int xSample = 0; xSample < 2 * maxRequired * numChannels; xSample++) {
inputBuffer[remainingSamples * numChannels + xSample] = 0;
}
numInputSamples += 2 * maxRequired;
writeShortToStream(null, 0);
// Throw away any extra samples we generated due to the silence we added.
if (numOutputSamples > expectedOutputSamples) {
numOutputSamples = expectedOutputSamples;
}
// Empty input and pitch buffers.
numInputSamples = 0;
remainingInputToCopy = 0;
numPitchSamples = 0;
}
// Return the number of samples in the output buffer
public int samplesAvailable() {
return numOutputSamples;
}
// If skip is greater than one, average skip samples together and write them to
// the down-sample buffer. If numChannels is greater than one, mix the channels
// together as we down sample.
private void downSampleInput(short[] samples, int position, int skip) {
int numSamples = maxRequired / skip;
int samplesPerValue = numChannels * skip;
int value;
position *= numChannels;
for (int i = 0; i < numSamples; i++) {
value = 0;
for (int j = 0; j < samplesPerValue; j++) {
value += samples[position + i * samplesPerValue + j];
}
value /= samplesPerValue;
downSampleBuffer[i] = (short) value;
}
}
// Find the best frequency match in the range, and given a sample skip multiple.
// For now, just find the pitch of the first channel.
private int findPitchPeriodInRange(short[] samples, int position, int minPeriod, int maxPeriod) {
int bestPeriod = 0;
int worstPeriod = 255;
int minDiff = 1;
int maxDiff = 0;
position *= numChannels;
for (int period = minPeriod; period <= maxPeriod; period++) {
int diff = 0;
for (int i = 0; i < period; i++) {
short sVal = samples[position + i];
short pVal = samples[position + period + i];
diff += sVal >= pVal ? sVal - pVal : pVal - sVal;
}
// Note that the highest number of samples we add into diff will be less than 256, since we
// skip samples. Thus, diff is a 24 bit number, and we can safely multiply by numSamples
// without overflow.
if (diff * bestPeriod < minDiff * period) {
minDiff = diff;
bestPeriod = period;
}
if (diff * worstPeriod > maxDiff * period) {
maxDiff = diff;
worstPeriod = period;
}
}
this.minDiff = minDiff / bestPeriod;
this.maxDiff = maxDiff / worstPeriod;
return bestPeriod;
}
// At abrupt ends of voiced words, we can have pitch periods that are better
// approximated by the previous pitch period estimate. Try to detect this case.
private boolean prevPeriodBetter(int minDiff, int maxDiff, boolean preferNewPeriod) {
if (minDiff == 0 || prevPeriod == 0) {
return false;
}
if (preferNewPeriod) {
if (maxDiff > minDiff * 3) {
// Got a reasonable match this period
return false;
}
if (minDiff * 2 <= prevMinDiff * 3) {
// Mismatch is not that much greater this period
return false;
}
} else {
if (minDiff <= prevMinDiff) {
return false;
}
}
return true;
}
// Find the pitch period. This is a critical step, and we may have to try
// multiple ways to get a good answer. This version uses AMDF. To improve
// speed, we down sample by an integer factor get in the 11KHz range, and then
// do it again with a narrower frequency range without down sampling
private int findPitchPeriod(short[] samples, int position, boolean preferNewPeriod) {
int period;
int retPeriod;
int skip = 1;
if (sampleRate > SONIC_AMDF_FREQ && quality == 0) {
skip = sampleRate / SONIC_AMDF_FREQ;
}
if (numChannels == 1 && skip == 1) {
period = findPitchPeriodInRange(samples, position, minPeriod, maxPeriod);
} else {
downSampleInput(samples, position, skip);
period = findPitchPeriodInRange(downSampleBuffer, 0, minPeriod / skip, maxPeriod / skip);
if (skip != 1) {
period *= skip;
int minP = period - (skip << 2);
int maxP = period + (skip << 2);
if (minP < minPeriod) {
minP = minPeriod;
}
if (maxP > maxPeriod) {
maxP = maxPeriod;
}
if (numChannels == 1) {
period = findPitchPeriodInRange(samples, position, minP, maxP);
} else {
downSampleInput(samples, position, 1);
period = findPitchPeriodInRange(downSampleBuffer, 0, minP, maxP);
}
}
}
if (prevPeriodBetter(minDiff, maxDiff, preferNewPeriod)) {
retPeriod = prevPeriod;
} else {
retPeriod = period;
}
prevMinDiff = minDiff;
prevPeriod = period;
return retPeriod;
}
// Overlap two sound segments, ramp the volume of one down, while ramping the
// other one from zero up, and add them, storing the result at the output.
private static void overlapAdd(int numSamples, int numChannels, short[] out, int outPos,
short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
for (int i = 0; i < numChannels; i++) {
int o = outPos * numChannels + i;
int u = rampUpPos * numChannels + i;
int d = rampDownPos * numChannels + i;
for (int t = 0; t < numSamples; t++) {
out[o] = (short) ((rampDown[d] * (numSamples - t) + rampUp[u] * t) / numSamples);
o += numChannels;
d += numChannels;
u += numChannels;
}
}
}
// Overlap two sound segments, ramp the volume of one down, while ramping the
// other one from zero up, and add them, storing the result at the output.
private static void overlapAddWithSeparation(int numSamples, int numChannels, int separation,
short[] out, int outPos, short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
for (int i = 0; i < numChannels; i++) {
int o = outPos * numChannels + i;
int u = rampUpPos * numChannels + i;
int d = rampDownPos * numChannels + i;
for (int t = 0; t < numSamples + separation; t++) {
if (t < separation) {
out[o] = (short) (rampDown[d] * (numSamples - t) / numSamples);
d += numChannels;
} else if (t < numSamples) {
out[o] =
(short) ((rampDown[d] * (numSamples - t) + rampUp[u] * (t - separation))
/ numSamples);
d += numChannels;
u += numChannels;
} else {
out[o] = (short) (rampUp[u] * (t - separation) / numSamples);
u += numChannels;
}
o += numChannels;
}
}
}
// Just move the new samples in the output buffer to the pitch buffer
private void moveNewSamplesToPitchBuffer(int originalNumOutputSamples) {
int numSamples = numOutputSamples - originalNumOutputSamples;
if (numPitchSamples + numSamples > pitchBufferSize) {
pitchBufferSize += (pitchBufferSize >> 1) + numSamples;
pitchBuffer = resize(pitchBuffer, pitchBufferSize);
}
move(pitchBuffer, numPitchSamples, outputBuffer, originalNumOutputSamples, numSamples);
numOutputSamples = originalNumOutputSamples;
numPitchSamples += numSamples;
}
// Remove processed samples from the pitch buffer.
private void removePitchSamples(int numSamples) {
if (numSamples == 0) {
return;
}
move(pitchBuffer, 0, pitchBuffer, numSamples, numPitchSamples - numSamples);
numPitchSamples -= numSamples;
}
// Change the pitch. The latency this introduces could be reduced by looking at
// past samples to determine pitch, rather than future.
private void adjustPitch(int originalNumOutputSamples) {
int period;
int newPeriod;
int separation;
int position = 0;
if (numOutputSamples == originalNumOutputSamples) {
return;
}
moveNewSamplesToPitchBuffer(originalNumOutputSamples);
while (numPitchSamples - position >= maxRequired) {
period = findPitchPeriod(pitchBuffer, position, false);
newPeriod = (int) (period / pitch);
enlargeOutputBufferIfNeeded(newPeriod);
if (pitch >= 1.0f) {
overlapAdd(newPeriod, numChannels, outputBuffer, numOutputSamples, pitchBuffer, position,
pitchBuffer, position + period - newPeriod);
} else {
separation = newPeriod - period;
overlapAddWithSeparation(period, numChannels, separation, outputBuffer, numOutputSamples,
pitchBuffer, position, pitchBuffer, position);
}
numOutputSamples += newPeriod;
position += period;
}
removePitchSamples(position);
}
// Interpolate the new output sample.
private short interpolate(short[] in, int inPos, int oldSampleRate, int newSampleRate) {
short left = in[inPos * numChannels];
short right = in[inPos * numChannels + numChannels];
int position = newRatePosition * oldSampleRate;
int leftPosition = oldRatePosition * newSampleRate;
int rightPosition = (oldRatePosition + 1) * newSampleRate;
int ratio = rightPosition - position;
int width = rightPosition - leftPosition;
return (short) ((ratio * left + (width - ratio) * right) / width);
}
// Change the rate.
private void adjustRate(float rate, int originalNumOutputSamples) {
int newSampleRate = (int) (sampleRate / rate);
int oldSampleRate = sampleRate;
int position;
// Set these values to help with the integer math
while (newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) {
newSampleRate >>= 1;
oldSampleRate >>= 1;
}
if (numOutputSamples == originalNumOutputSamples) {
return;
}
moveNewSamplesToPitchBuffer(originalNumOutputSamples);
// Leave at least one pitch sample in the buffer
for (position = 0; position < numPitchSamples - 1; position++) {
while ((oldRatePosition + 1) * newSampleRate > newRatePosition * oldSampleRate) {
enlargeOutputBufferIfNeeded(1);
for (int i = 0; i < numChannels; i++) {
outputBuffer[numOutputSamples * numChannels + i] =
interpolate(pitchBuffer, position + i, oldSampleRate, newSampleRate);
}
newRatePosition++;
numOutputSamples++;
}
oldRatePosition++;
if (oldRatePosition == oldSampleRate) {
oldRatePosition = 0;
if (newRatePosition != newSampleRate) {
System.out.printf("Assertion failed: newRatePosition != newSampleRate\n");
assert false;
}
newRatePosition = 0;
}
}
removePitchSamples(position);
}
// Skip over a pitch period, and copy period/speed samples to the output
private int skipPitchPeriod(short[] samples, int position, float speed, int period) {
int newSamples;
if (speed >= 2.0f) {
newSamples = (int) (period / (speed - 1.0f));
} else {
newSamples = period;
remainingInputToCopy = (int) (period * (2.0f - speed) / (speed - 1.0f));
}
enlargeOutputBufferIfNeeded(newSamples);
overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples, samples, position, samples,
position + period);
numOutputSamples += newSamples;
return newSamples;
}
// Insert a pitch period, and determine how much input to copy directly.
private int insertPitchPeriod(short[] samples, int position, float speed, int period) {
int newSamples;
if (speed < 0.5f) {
newSamples = (int) (period * speed / (1.0f - speed));
} else {
newSamples = period;
remainingInputToCopy = (int) (period * (2.0f * speed - 1.0f) / (1.0f - speed));
}
enlargeOutputBufferIfNeeded(period + newSamples);
move(outputBuffer, numOutputSamples, samples, position, period);
overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples + period, samples,
position + period, samples, position);
numOutputSamples += period + newSamples;
return newSamples;
}
// Resample as many pitch periods as we have buffered on the input. Return 0 if
// we fail to resize an input or output buffer. Also scale the output by the volume.
private void changeSpeed(float speed) {
int numSamples = numInputSamples;
int position = 0;
int period;
int newSamples;
if (numInputSamples < maxRequired) {
return;
}
do {
if (remainingInputToCopy > 0) {
newSamples = copyInputToOutput(position);
position += newSamples;
} else {
period = findPitchPeriod(inputBuffer, position, true);
if (speed > 1.0) {
newSamples = skipPitchPeriod(inputBuffer, position, speed, period);
position += period + newSamples;
} else {
newSamples = insertPitchPeriod(inputBuffer, position, speed, period);
position += newSamples;
}
}
} while (position + maxRequired <= numSamples);
removeInputSamples(position);
}
// Resample as many pitch periods as we have buffered on the input. Scale the output by the
// volume.
private void processStreamInput() {
int originalNumOutputSamples = numOutputSamples;
float s = speed / pitch;
float r = rate;
if (!useChordPitch) {
r *= pitch;
}
if (s > 1.00001 || s < 0.99999) {
changeSpeed(s);
} else {
copyToOutput(inputBuffer, 0, numInputSamples);
numInputSamples = 0;
}
if (useChordPitch) {
if (pitch != 1.0f) {
adjustPitch(originalNumOutputSamples);
}
} else if (r != 1.0f) {
adjustRate(r, originalNumOutputSamples);
}
if (volume != 1.0f) {
// Adjust output volume.
scaleSamples(outputBuffer, originalNumOutputSamples,
numOutputSamples - originalNumOutputSamples, volume);
}
}
// Write floating point data to the input buffer and process it.
public void writeFloatToStream(float[] samples, int numSamples) {
addFloatSamplesToInputBuffer(samples, numSamples);
processStreamInput();
}
// Write the data to the input stream, and process it.
public void writeShortToStream(short[] samples, int numSamples) {
addShortSamplesToInputBuffer(samples, numSamples);
processStreamInput();
}
// Simple wrapper around sonicWriteFloatToStream that does the unsigned byte to short
// conversion for you.
public void writeUnsignedByteToStream(byte[] samples, int numSamples) {
addUnsignedByteSamplesToInputBuffer(samples, numSamples);
processStreamInput();
}
// Simple wrapper around sonicWriteBytesToStream that does the byte to 16-bit LE conversion.
public void writeBytesToStream(byte[] inBuffer, int numBytes) {
addBytesToInputBuffer(inBuffer, numBytes);
processStreamInput();
}
// This is a non-stream oriented interface to just change the speed of a sound sample
public static int changeFloatSpeed(float[] samples, int numSamples, float speed, float pitch,
float rate, float volume, boolean useChordPitch, int sampleRate, int numChannels) {
Sonic stream = new Sonic(sampleRate, numChannels);
stream.setSpeed(speed);
stream.setPitch(pitch);
stream.setRate(rate);
stream.setVolume(volume);
stream.setChordPitch(useChordPitch);
stream.writeFloatToStream(samples, numSamples);
stream.flushStream();
numSamples = stream.samplesAvailable();
stream.readFloatFromStream(samples, numSamples);
return numSamples;
}
/* This is a non-stream oriented interface to just change the speed of a sound sample */
public int sonicChangeShortSpeed(short[] samples, int numSamples, float speed, float pitch,
float rate, float volume, boolean useChordPitch, int sampleRate, int numChannels) {
Sonic stream = new Sonic(sampleRate, numChannels);
stream.setSpeed(speed);
stream.setPitch(pitch);
stream.setRate(rate);
stream.setVolume(volume);
stream.setChordPitch(useChordPitch);
stream.writeShortToStream(samples, numSamples);
stream.flushStream();
numSamples = stream.samplesAvailable();
stream.readShortFromStream(samples, numSamples);
return numSamples;
}
}

View file

@ -0,0 +1,205 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* An {@link AudioProcessor} that uses the Sonic library to modify the speed/pitch of audio.
*/
// TODO: Make public once it is possible to override AudioTrack's position calculations.
/* package */ final class SonicAudioProcessor implements AudioProcessor {
/**
* The maximum allowed playback speed in {@link #setSpeed(float)}.
*/
public static final float MAXIMUM_SPEED = 8.0f;
/**
* The minimum allowed playback speed in {@link #setSpeed(float)}.
*/
public static final float MINIMUM_SPEED = 0.1f;
/**
* The maximum allowed pitch in {@link #setPitch(float)}.
*/
public static final float MAXIMUM_PITCH = 8.0f;
/**
* The minimum allowed pitch in {@link #setPitch(float)}.
*/
public static final float MINIMUM_PITCH = 0.1f;
/**
* The threshold below which the difference between two pitch/speed factors is negligible.
*/
private static final float CLOSE_THRESHOLD = 0.01f;
private static final byte[] EMPTY_ARRAY = new byte[0];
private int channelCount;
private int sampleRateHz;
private Sonic sonic;
private float speed;
private float pitch;
private byte[] inputArray;
private ByteBuffer buffer;
private byte[] bufferArray;
private ByteBuffer outputBuffer;
private long inputBytes;
private long outputBytes;
private boolean inputEnded;
/**
* Creates a new Sonic audio processor.
*/
public SonicAudioProcessor() {
speed = 1f;
pitch = 1f;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
inputArray = EMPTY_ARRAY;
bufferArray = EMPTY_ARRAY;
}
/**
* Sets the playback speed. The new speed will take effect after a call to {@link #flush()}.
*
* @param speed The requested new playback speed.
* @return The actual new playback speed.
*/
public float setSpeed(float speed) {
this.speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED);
return this.speed;
}
/**
* Sets the playback pitch. The new pitch will take effect after a call to {@link #flush()}.
*
* @param pitch The requested new pitch.
* @return The actual new pitch.
*/
public float setPitch(float pitch) {
this.pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH);
return pitch;
}
/**
* Returns the number of input frames corresponding to the specified number of output frames.
*/
public long getInputFrames(long outputFrames) {
// Sonic produces output data as soon as input is queued.
return outputBytes == 0 ? 0 : Util.scaleLargeTimestamp(outputFrames, inputBytes, outputBytes);
}
@Override
public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding)
throws UnhandledFormatException {
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
return true;
}
@Override
public boolean isActive() {
return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD;
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
// TODO: Remove this extra copy.
int inputBytesToRead = inputBuffer.remaining();
if (inputArray == null || inputArray.length < inputBytesToRead) {
inputArray = new byte[inputBytesToRead];
}
inputBuffer.get(inputArray, 0, inputBytesToRead);
sonic.writeBytesToStream(inputArray, inputBytesToRead);
int outputSize = sonic.samplesAvailable() * channelCount * 2;
if (buffer.capacity() < outputSize) {
buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder());
bufferArray = new byte[outputSize];
} else {
buffer.clear();
}
inputBytes += inputBytesToRead;
int outputBytesRead = sonic.readBytesFromStream(bufferArray, outputSize);
buffer.put(bufferArray, 0, outputBytesRead);
buffer.flip();
outputBytes += outputSize;
outputBuffer = buffer;
}
@Override
public void queueEndOfStream() {
sonic.flushStream();
inputEnded = true;
}
@Override
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
@Override
public boolean isEnded() {
return inputEnded && (sonic == null || sonic.samplesAvailable() == 0);
}
@Override
public void flush() {
sonic = new Sonic(sampleRateHz, channelCount);
sonic.setSpeed(speed);
sonic.setPitch(pitch);
outputBuffer = EMPTY_BUFFER;
inputBytes = 0;
outputBytes = 0;
inputEnded = false;
}
@Override
public void release() {
sonic = null;
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
inputArray = EMPTY_ARRAY;
bufferArray = EMPTY_ARRAY;
}
}

View file

@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer2.util;
import com.google.android.exoplayer2.PlaybackParameters;
/**
* Tracks the progression of media time.
*/
@ -25,4 +27,18 @@ public interface MediaClock {
*/
long getPositionUs();
/**
* Attempts to set the playback parameters and returns the active playback parameters, which may
* differ from those passed in.
*
* @param playbackParameters The playback parameters.
* @return The active playback parameters.
*/
PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters);
/**
* Returns the active playback parameters.
*/
PlaybackParameters getPlaybackParameters();
}

View file

@ -16,33 +16,34 @@
package com.google.android.exoplayer2.util;
import android.os.SystemClock;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.PlaybackParameters;
/**
* A standalone {@link MediaClock}. The clock can be started, stopped and its time can be set and
* retrieved. When started, this clock is based on {@link SystemClock#elapsedRealtime()}.
* A {@link MediaClock} whose position advances with real time based on the playback parameters when
* started.
*/
public final class StandaloneMediaClock implements MediaClock {
private boolean started;
private long baseUs;
private long baseElapsedMs;
private PlaybackParameters playbackParameters;
/**
* The media time when the clock was last set or stopped.
* Creates a new standalone media clock.
*/
private long positionUs;
/**
* The difference between {@link SystemClock#elapsedRealtime()} and {@link #positionUs}
* when the clock was last set or started.
*/
private long deltaUs;
public StandaloneMediaClock() {
playbackParameters = PlaybackParameters.DEFAULT;
}
/**
* Starts the clock. Does nothing if the clock is already started.
*/
public void start() {
if (!started) {
baseElapsedMs = SystemClock.elapsedRealtime();
started = true;
deltaUs = elapsedRealtimeMinus(positionUs);
}
}
@ -51,26 +52,60 @@ public final class StandaloneMediaClock implements MediaClock {
*/
public void stop() {
if (started) {
positionUs = elapsedRealtimeMinus(deltaUs);
setPositionUs(getPositionUs());
started = false;
}
}
/**
* @param timeUs The position to set in microseconds.
* Sets the clock's position.
*
* @param positionUs The position to set in microseconds.
*/
public void setPositionUs(long timeUs) {
this.positionUs = timeUs;
deltaUs = elapsedRealtimeMinus(timeUs);
public void setPositionUs(long positionUs) {
baseUs = positionUs;
if (started) {
baseElapsedMs = SystemClock.elapsedRealtime();
}
}
/**
* Synchronizes this clock with the current state of {@code clock}.
*
* @param clock The clock with which to synchronize.
*/
public void synchronize(MediaClock clock) {
setPositionUs(clock.getPositionUs());
playbackParameters = clock.getPlaybackParameters();
}
@Override
public long getPositionUs() {
return started ? elapsedRealtimeMinus(deltaUs) : positionUs;
long positionUs = baseUs;
if (started) {
long elapsedSinceBaseMs = SystemClock.elapsedRealtime() - baseElapsedMs;
if (playbackParameters.speed == 1f) {
positionUs += C.msToUs(elapsedSinceBaseMs);
} else {
positionUs += playbackParameters.getSpeedAdjustedDurationUs(elapsedSinceBaseMs);
}
}
return positionUs;
}
private long elapsedRealtimeMinus(long toSubtractUs) {
return SystemClock.elapsedRealtime() * 1000 - toSubtractUs;
@Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
// Store the current position as the new base, in case the playback speed has changed.
if (started) {
setPositionUs(getPositionUs());
}
this.playbackParameters = playbackParameters;
return playbackParameters;
}
@Override
public PlaybackParameters getPlaybackParameters() {
return playbackParameters;
}
}

View file

@ -309,6 +309,18 @@ public final class Util {
return Math.max(min, Math.min(value, max));
}
/**
* Constrains a value to the specified bounds.
*
* @param value The value to constrain.
* @param min The lower bound.
* @param max The upper bound.
* @return The constrained value {@code Math.max(min, Math.min(value, max))}.
*/
public static float constrainValue(float value, float min, float max) {
return Math.max(min, Math.min(value, max));
}
/**
* Returns the index of the largest element in {@code array} that is less than (or optionally
* equal to) a specified {@code value}.

View file

@ -19,6 +19,7 @@ import android.widget.TextView;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.decoder.DecoderCounters;
@ -90,6 +91,11 @@ public final class DebugTextViewHelper implements Runnable, ExoPlayer.EventListe
updateAndPost();
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
// Do nothing.

View file

@ -29,6 +29,7 @@ import android.widget.TextView;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
@ -766,6 +767,11 @@ public class PlaybackControlView extends FrameLayout {
updateProgress();
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
updateNavigation();

View file

@ -34,6 +34,7 @@ import android.widget.ImageView;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.metadata.Metadata;
@ -741,6 +742,11 @@ public final class SimpleExoPlayerView extends FrameLayout {
// Do nothing.
}
@Override
public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
// Do nothing.

View file

@ -24,6 +24,7 @@ import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.ExoPlayerFactory;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
@ -223,6 +224,11 @@ public abstract class ExoHostedTest implements HostedTest, ExoPlayer.EventListen
// Do nothing.
}
@Override
public final void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Do nothing.
}
@Override
public final void onTimelineChanged(Timeline timeline, Object manifest) {
// Do nothing.