Merge pull request #7244 from tvarga-dss:cancel-hls-chunk-download-and-discard-upstream

PiperOrigin-RevId: 312679454
This commit is contained in:
tonihei 2020-05-21 17:10:51 +01:00
commit 80eb5d4235
5 changed files with 133 additions and 17 deletions

View file

@ -168,6 +168,9 @@
* Enable support for embedded CEA-708.
* Fix assertion failure in `SampleQueue` when playing DASH streams with
EMSG tracks ([#7273](https://github.com/google/ExoPlayer/issues/7273)).
* HLS:
* Add support for upstream discard including cancelation of ongoing load
([#6322](https://github.com/google/ExoPlayer/issues/6322)).
* MP3:
* Add `IndexSeeker` for accurate seeks in VBR streams
([#6787](https://github.com/google/ExoPlayer/issues/6787)). This seeker

View file

@ -628,7 +628,7 @@ public class ChunkSampleStream<T extends ChunkSource> implements SampleStream, S
return;
}
int newQueueSize = currentQueueSize;
int newQueueSize = Integer.MAX_VALUE;
for (int i = preferredQueueSize; i < currentQueueSize; i++) {
if (!haveReadFromMediaChunk(i)) {
newQueueSize = i;

View file

@ -451,6 +451,24 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return chunkIterators;
}
/**
* Evaluates whether {@link MediaChunk MediaChunks} should be removed from the back of the queue.
*
* <p>Removing {@link MediaChunk MediaChunks} from the back of the queue can be useful if they
* could be replaced with chunks of a significantly higher quality (e.g. because the available
* bandwidth has substantially increased).
*
* @param playbackPositionUs The current playback position, in microseconds.
* @param queue The queue of buffered {@link MediaChunk MediaChunks}.
* @return The preferred queue size.
*/
public int getPreferredQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) {
if (fatalError != null || trackSelection.length() < 2) {
return queue.size();
}
return trackSelection.evaluateQueueSize(playbackPositionUs, queue);
}
// Private methods.
/**

View file

@ -27,6 +27,7 @@ import com.google.android.exoplayer2.extractor.PositionHolder;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.id3.Id3Decoder;
import com.google.android.exoplayer2.metadata.id3.PrivFrame;
import com.google.android.exoplayer2.source.SampleQueue;
import com.google.android.exoplayer2.source.chunk.MediaChunk;
import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist;
import com.google.android.exoplayer2.upstream.DataSource;
@ -36,6 +37,7 @@ import com.google.android.exoplayer2.util.ParsableByteArray;
import com.google.android.exoplayer2.util.TimestampAdjuster;
import com.google.android.exoplayer2.util.UriUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableMap;
import java.io.EOFException;
import java.io.IOException;
import java.io.InterruptedIOException;
@ -131,11 +133,15 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
Id3Decoder id3Decoder;
ParsableByteArray scratchId3Data;
boolean shouldSpliceIn;
ImmutableMap<SampleQueue, Integer> sampleQueueDiscardFromIndices = ImmutableMap.of();
if (previousChunk != null) {
id3Decoder = previousChunk.id3Decoder;
scratchId3Data = previousChunk.scratchId3Data;
shouldSpliceIn =
!playlistUrl.equals(previousChunk.playlistUrl) || !previousChunk.loadCompleted;
if (shouldSpliceIn) {
sampleQueueDiscardFromIndices = previousChunk.sampleQueueDiscardFromIndices;
}
previousExtractor =
previousChunk.isExtractorReusable
&& previousChunk.discontinuitySequenceNumber == discontinuitySequenceNumber
@ -172,7 +178,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
previousExtractor,
id3Decoder,
scratchId3Data,
shouldSpliceIn);
shouldSpliceIn,
sampleQueueDiscardFromIndices);
}
public static final String PRIV_TIMESTAMP_FRAME_OWNER =
@ -194,9 +201,6 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** The url of the playlist from which this chunk was obtained. */
public final Uri playlistUrl;
/** Whether the samples parsed from this chunk should be spliced into already queued samples. */
public final boolean shouldSpliceIn;
@Nullable private final DataSource initDataSource;
@Nullable private final DataSpec initDataSpec;
@Nullable private final Extractor previousExtractor;
@ -211,6 +215,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private final ParsableByteArray scratchId3Data;
private final boolean mediaSegmentEncrypted;
private final boolean initSegmentEncrypted;
private final boolean shouldSpliceIn;
private @MonotonicNonNull Extractor extractor;
private boolean isExtractorReusable;
@ -221,6 +226,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private boolean initDataLoadRequired;
private volatile boolean loadCanceled;
private boolean loadCompleted;
private ImmutableMap<SampleQueue, Integer> sampleQueueDiscardFromIndices;
private HlsMediaChunk(
HlsExtractorFactory extractorFactory,
@ -246,7 +252,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Nullable Extractor previousExtractor,
Id3Decoder id3Decoder,
ParsableByteArray scratchId3Data,
boolean shouldSpliceIn) {
boolean shouldSpliceIn,
ImmutableMap<SampleQueue, Integer> sampleQueueDiscardFromIndices) {
super(
mediaDataSource,
dataSpec,
@ -273,17 +280,43 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
this.id3Decoder = id3Decoder;
this.scratchId3Data = scratchId3Data;
this.shouldSpliceIn = shouldSpliceIn;
this.sampleQueueDiscardFromIndices = sampleQueueDiscardFromIndices;
uid = uidSource.getAndIncrement();
}
/**
* Initializes the chunk for loading, setting the {@link HlsSampleStreamWrapper} that will receive
* samples as they are loaded.
* Initializes the chunk for loading.
*
* @param output The output that will receive the loaded samples.
* @param output The {@link HlsSampleStreamWrapper} that will receive the loaded samples.
* @param sampleQueues The {@link SampleQueue sampleQueues} with already loaded samples.
*/
public void init(HlsSampleStreamWrapper output) {
public void init(HlsSampleStreamWrapper output, SampleQueue[] sampleQueues) {
this.output = output;
if (shouldSpliceIn) {
for (SampleQueue sampleQueue : sampleQueues) {
sampleQueue.splice();
}
// sampleQueueDiscardFromIndices already set to values of previous chunk in constructor.
} else {
ImmutableMap.Builder<SampleQueue, Integer> mapBuilder = ImmutableMap.builder();
for (SampleQueue sampleQueue : sampleQueues) {
mapBuilder.put(sampleQueue, sampleQueue.getWriteIndex());
}
sampleQueueDiscardFromIndices = mapBuilder.build();
}
}
/**
* Returns the absolute index from which samples need to be discarded in the given {@link
* SampleQueue} when this media chunk is discarded.
*
* @param sampleQueue The {@link SampleQueue}.
* @return The absolute index from which samples need to be discarded.
*/
int getSampleQueueDiscardFromIndex(SampleQueue sampleQueue) {
// If the sample queue was created by this chunk or a later chunk, return 0 to discard the whole
// stream from the beginning.
return sampleQueueDiscardFromIndices.getOrDefault(sampleQueue, /* defaultValue= */ 0);
}
@Override

View file

@ -146,6 +146,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private @MonotonicNonNull Format upstreamTrackFormat;
@Nullable private Format downstreamTrackFormat;
private boolean released;
private int pendingDiscardUpstreamQueueSize;
// Tracks are complicated in HLS. See documentation of buildTracksFromSampleStreams for details.
// Indexed by track (as exposed by this source).
@ -229,6 +230,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
handler = Util.createHandler();
lastSeekPositionUs = positionUs;
pendingResetPositionUs = positionUs;
pendingDiscardUpstreamQueueSize = C.LENGTH_UNSET;
}
public void continuePreparing() {
@ -696,7 +698,21 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Override
public void reevaluateBuffer(long positionUs) {
// Do nothing.
if (loader.hasFatalError() || isPendingReset()) {
return;
}
int currentQueueSize = mediaChunks.size();
int preferredQueueSize = chunkSource.getPreferredQueueSize(positionUs, readOnlyMediaChunks);
if (currentQueueSize <= preferredQueueSize) {
return;
}
if (loader.isLoading()) {
pendingDiscardUpstreamQueueSize = preferredQueueSize;
loader.cancelLoading();
} else {
discardUpstream(preferredQueueSize);
}
}
// Loader.Callback implementation.
@ -753,7 +769,12 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
loadable.startTimeUs,
loadable.endTimeUs);
if (!released) {
resetSampleQueues();
if (pendingDiscardUpstreamQueueSize != C.LENGTH_UNSET) {
discardUpstream(pendingDiscardUpstreamQueueSize);
pendingDiscardUpstreamQueueSize = C.LENGTH_UNSET;
} else {
resetSampleQueues();
}
if (enabledTrackGroupCount > 0) {
callback.onContinueLoadingRequested(this);
}
@ -851,16 +872,36 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
upstreamTrackFormat = chunk.trackFormat;
pendingResetPositionUs = C.TIME_UNSET;
mediaChunks.add(chunk);
chunk.init(this);
chunk.init(/* output= */ this, sampleQueues);
for (HlsSampleQueue sampleQueue : sampleQueues) {
sampleQueue.setSourceChunk(chunk);
}
if (chunk.shouldSpliceIn) {
for (SampleQueue sampleQueue : sampleQueues) {
sampleQueue.splice();
}
private void discardUpstream(int preferredQueueSize) {
Assertions.checkState(!loader.isLoading());
int currentQueueSize = mediaChunks.size();
int newQueueSize = Integer.MAX_VALUE;
for (int i = preferredQueueSize; i < currentQueueSize; i++) {
if (!haveReadFromMediaChunkDiscardRange(i)) {
newQueueSize = i;
break;
}
}
if (newQueueSize >= currentQueueSize) {
return;
}
long endTimeUs = getLastMediaChunk().endTimeUs;
HlsMediaChunk firstRemovedChunk = discardUpstreamMediaChunksFromIndex(newQueueSize);
if (mediaChunks.isEmpty()) {
pendingResetPositionUs = lastSeekPositionUs;
}
loadingFinished = false;
eventDispatcher.upstreamDiscarded(
primarySampleQueueType, firstRemovedChunk.startTimeUs, endTimeUs);
}
// ExtractorOutput implementation. Called by the loading thread.
@ -1061,6 +1102,27 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return true;
}
private boolean haveReadFromMediaChunkDiscardRange(int mediaChunkIndex) {
HlsMediaChunk mediaChunk = mediaChunks.get(mediaChunkIndex);
for (SampleQueue sampleQueue : sampleQueues) {
int discardFromIndex = mediaChunk.getSampleQueueDiscardFromIndex(sampleQueue);
if (sampleQueue.getReadIndex() > discardFromIndex) {
return true;
}
}
return false;
}
private HlsMediaChunk discardUpstreamMediaChunksFromIndex(int chunkIndex) {
HlsMediaChunk firstRemovedChunk = mediaChunks.get(chunkIndex);
Util.removeRange(mediaChunks, /* fromIndex= */ chunkIndex, /* toIndex= */ mediaChunks.size());
for (SampleQueue sampleQueue : sampleQueues) {
int discardFromIndex = firstRemovedChunk.getSampleQueueDiscardFromIndex(sampleQueue);
sampleQueue.discardUpstreamSamples(discardFromIndex);
}
return firstRemovedChunk;
}
private void resetSampleQueues() {
for (SampleQueue sampleQueue : sampleQueues) {
sampleQueue.reset(pendingResetUpstreamFormats);