First steps toward implementing bounded live seek.

This commit is contained in:
Oliver Woodman 2015-05-19 15:16:29 +01:00
parent 763d68f26c
commit 709fc7735b
17 changed files with 1042 additions and 247 deletions

View file

@ -1,6 +1,12 @@
# Release notes #
### r1.3.2 ###
### Current dev branch (from r1.3.2) ###
* Add option to TsExtractor to allow non-IDR keyframes.
* Added MulticastDataSource for connecting to multicast streams.
* (WorkInProgress) - First steps to supporting seeking in DASH DVR window.
### r1.3.2 (from r1.3.1) ###
* DataSource improvements: `DefaultUriDataSource` now handles http://, https://, file://, asset://
and content:// URIs automatically. It also handles file:///android_asset/* URIs, and file paths

View file

@ -17,6 +17,7 @@ package com.google.android.exoplayer.demo;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.demo.player.DemoPlayer;
@ -46,6 +47,7 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
private long sessionStartTimeMs;
private long[] loadStartTimeMs;
private long[] seekRangeValuesUs;
public EventLogger() {
loadStartTimeMs = new long[DemoPlayer.RENDERER_COUNT];
@ -163,7 +165,14 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
@Override
public void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
long initializationDurationMs) {
Log.d(TAG, "decoderInitialized [" + getSessionTimeString() + "]");
Log.d(TAG, "decoderInitialized [" + getSessionTimeString() + ", " + decoderName + "]");
}
@Override
public void onSeekRangeChanged(TimeRange seekRange) {
seekRangeValuesUs = seekRange.getCurrentBoundsUs(seekRangeValuesUs);
Log.d(TAG, "seekRange [ " + seekRange.type + ", " + seekRangeValuesUs[0] + ", "
+ seekRangeValuesUs[1] + "]");
}
private void printInternalError(String type, Exception e) {

View file

@ -235,14 +235,15 @@ public class DashRendererBuilder implements RendererBuilder,
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset);
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
}
// Build the audio chunk sources.
@ -259,7 +260,7 @@ public class DashRendererBuilder implements RendererBuilder,
format.audioSamplingRate + "Hz)");
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset));
elapsedRealtimeOffset, mainHandler, player));
codecs.add(format.codecs);
}
@ -316,7 +317,8 @@ public class DashRendererBuilder implements RendererBuilder,
Representation representation = representations.get(j);
textTrackNameList.add(representation.format.id);
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset));
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player));
}
}
}

View file

@ -19,6 +19,7 @@ import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.MediaCodecTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.upstream.BandwidthMeter;
import android.widget.TextView;
@ -31,15 +32,22 @@ import android.widget.TextView;
private final TextView textView;
private final DemoPlayer player;
private final MediaCodecTrackRenderer renderer;
private final BandwidthMeter bandwidthMeter;
private volatile boolean pendingFailure;
private volatile long currentPositionUs;
public DebugTrackRenderer(TextView textView, DemoPlayer player,
MediaCodecTrackRenderer renderer) {
this(textView, player, renderer, null);
}
public DebugTrackRenderer(TextView textView, DemoPlayer player, MediaCodecTrackRenderer renderer,
BandwidthMeter bandwidthMeter) {
this.textView = textView;
this.player = player;
this.renderer = renderer;
this.bandwidthMeter = bandwidthMeter;
}
public void injectFailure() {
@ -77,7 +85,12 @@ import android.widget.TextView;
}
private String getRenderString() {
return getQualityString() + " " + renderer.codecCounters.getDebugString();
return getTimeString() + " " + getQualityString() + " " + getBandwidthString() + " "
+ renderer.codecCounters.getDebugString();
}
private String getTimeString() {
return "ms(" + (currentPositionUs / 1000) + ")";
}
private String getQualityString() {
@ -86,6 +99,15 @@ import android.widget.TextView;
: "id:" + format.id + " br:" + format.bitrate + " h:" + format.height;
}
private String getBandwidthString() {
if (bandwidthMeter == null
|| bandwidthMeter.getBitrateEstimate() == BandwidthMeter.NO_ESTIMATE) {
return "bw:?";
} else {
return "bw:" + (bandwidthMeter.getBitrateEstimate() / 1000);
}
}
@Override
protected long getCurrentPositionUs() {
return currentPositionUs;

View file

@ -21,11 +21,13 @@ import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.hls.HlsSampleSource;
import com.google.android.exoplayer.metadata.MetadataTrackRenderer;
@ -50,7 +52,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventListener,
HlsSampleSource.EventListener, DefaultBandwidthMeter.EventListener,
MediaCodecVideoTrackRenderer.EventListener, MediaCodecAudioTrackRenderer.EventListener,
StreamingDrmSessionManager.EventListener, TextRenderer {
StreamingDrmSessionManager.EventListener, DashChunkSource.EventListener, TextRenderer {
/**
* Builds renderers for the player.
@ -132,6 +134,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
int mediaStartTimeMs, int mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs);
void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
long initializationDurationMs);
void onSeekRangeChanged(TimeRange seekRange);
}
/**
@ -510,6 +513,13 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
processText(text);
}
@Override
public void onSeekRangeChanged(TimeRange seekRange) {
if (infoListener != null) {
infoListener.onSeekRangeChanged(seekRange);
}
}
/* package */ MetadataTrackRenderer.MetadataRenderer<Map<String, Object>>
getId3MetadataRenderer() {
return new MetadataTrackRenderer.MetadataRenderer<Map<String, Object>>() {

View file

@ -23,6 +23,7 @@ import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallba
import com.google.android.exoplayer.extractor.Extractor;
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.upstream.DefaultUriDataSource;
import android.content.Context;
@ -55,7 +56,9 @@ public class ExtractorRendererBuilder implements RendererBuilder {
@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
// Build the video and audio renderers.
DataSource dataSource = new DefaultUriDataSource(context, userAgent);
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(player.getMainHandler(),
null);
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, extractor, 2,
BUFFER_SIZE);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
@ -66,7 +69,7 @@ public class ExtractorRendererBuilder implements RendererBuilder {
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];

View file

@ -121,7 +121,7 @@ public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<Hls
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;

View file

@ -174,7 +174,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
}
// Build the audio renderer.

View file

@ -0,0 +1,102 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
/**
* A container to store a start and end time in microseconds.
*/
public final class TimeRange {
/**
* Represents a range of time whose bounds change in bulk increments rather than smoothly over
* time.
*/
public static final int TYPE_SNAPSHOT = 0;
/**
* The type of this time range.
*/
public final int type;
private final long startTimeUs;
private final long endTimeUs;
/**
* Create a new {@link TimeRange} of the appropriate type.
*
* @param type The type of the TimeRange.
* @param startTimeUs The beginning of the TimeRange.
* @param endTimeUs The end of the TimeRange.
*/
public TimeRange(int type, long startTimeUs, long endTimeUs) {
this.type = type;
this.startTimeUs = startTimeUs;
this.endTimeUs = endTimeUs;
}
/**
* Returns the start and end times (in milliseconds) of the TimeRange in the provided array,
* or creates a new one.
*
* @param out An array to store the start and end times; can be null.
* @return An array containing the start time (index 0) and end time (index 1) in milliseconds.
*/
public long[] getCurrentBoundsMs(long[] out) {
out = getCurrentBoundsUs(out);
out[0] /= 1000;
out[1] /= 1000;
return out;
}
/**
* Returns the start and end times (in microseconds) of the TimeRange in the provided array,
* or creates a new one.
*
* @param out An array to store the start and end times; can be null.
* @return An array containing the start time (index 0) and end time (index 1) in microseconds.
*/
public long[] getCurrentBoundsUs(long[] out) {
if (out == null || out.length < 2) {
out = new long[2];
}
out[0] = startTimeUs;
out[1] = endTimeUs;
return out;
}
@Override
public int hashCode() {
int hashCode = 0;
hashCode |= type << 30;
hashCode |= (((startTimeUs + endTimeUs) / 1000) & 0x3FFFFFFF);
return hashCode;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (other instanceof TimeRange) {
TimeRange otherTimeRange = (TimeRange) other;
return (otherTimeRange.type == type) && (otherTimeRange.startTimeUs == startTimeUs)
&& (otherTimeRange.endTimeUs == endTimeUs);
} else {
return false;
}
}
}

View file

@ -17,6 +17,7 @@ package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.BehindLiveWindowException;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.Chunk;
@ -50,6 +51,8 @@ import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.SystemClock;
import android.os.Handler;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
@ -63,6 +66,20 @@ import java.util.List;
*/
public class DashChunkSource implements ChunkSource {
/**
* Interface definition for a callback to be notified of {@link DashChunkSource} events.
*/
public interface EventListener {
/**
* Invoked when the available seek range of the stream has changed.
*
* @param seekRange The range which specifies available content that can be seeked to.
*/
public void onSeekRangeChanged(TimeRange seekRange);
}
/**
* Thrown when an AdaptationSet is missing from the MPD.
*/
@ -79,6 +96,9 @@ public class DashChunkSource implements ChunkSource {
*/
public static final int USE_ALL_TRACKS = -1;
private final Handler eventHandler;
private final EventListener eventListener;
private final TrackInfo trackInfo;
private final DataSource dataSource;
private final FormatEvaluator evaluator;
@ -99,6 +119,10 @@ public class DashChunkSource implements ChunkSource {
private DrmInitData drmInitData;
private MediaPresentationDescription currentManifest;
private TimeRange seekRange;
private long[] seekRangeValues;
private int firstAvailableSegmentNum;
private int lastAvailableSegmentNum;
private boolean finishedCurrentManifest;
private boolean lastChunkWasInitialization;
@ -142,7 +166,7 @@ public class DashChunkSource implements ChunkSource {
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator,
new SystemClock(), 0, 0);
new SystemClock(), 0, 0, null, null);
}
/**
@ -167,19 +191,24 @@ public class DashChunkSource implements ChunkSource {
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
* @param eventHandler A handler to use when delivering events to {@code EventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs) {
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
Handler eventHandler, EventListener eventListener) {
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000);
elapsedRealtimeOffsetMs * 1000, eventHandler, eventListener);
}
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
MediaPresentationDescription initialManifest, int adaptationSetIndex,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs) {
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs,
Handler eventHandler, EventListener eventListener) {
this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest;
this.adaptationSetIndex = adaptationSetIndex;
@ -189,8 +218,11 @@ public class DashChunkSource implements ChunkSource {
this.systemClock = systemClock;
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
this.eventHandler = eventHandler;
this.eventListener = eventListener;
this.evaluation = new Evaluation();
this.headerBuilder = new StringBuilder();
this.seekRangeValues = new long[2];
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
Representation[] representations = getFilteredRepresentations(currentManifest,
@ -229,12 +261,27 @@ public class DashChunkSource implements ChunkSource {
return trackInfo;
}
// VisibleForTesting
/* package */ TimeRange getSeekRange() {
return seekRange;
}
@Override
public void enable() {
evaluator.enable();
if (manifestFetcher != null) {
manifestFetcher.enable();
}
DashSegmentIndex segmentIndex =
representationHolders.get(formats[0].id).representation.getIndex();
if (segmentIndex == null) {
seekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, currentManifest.duration * 1000);
notifySeekRangeChanged(seekRange);
} else {
long nowUs = getNowUs();
updateAvailableSegmentBounds(segmentIndex, nowUs);
updateSeekRange(segmentIndex, nowUs);
}
}
@Override
@ -243,6 +290,7 @@ public class DashChunkSource implements ChunkSource {
if (manifestFetcher != null) {
manifestFetcher.disable();
}
seekRange = null;
}
@Override
@ -268,6 +316,10 @@ public class DashChunkSource implements ChunkSource {
}
currentManifest = newManifest;
finishedCurrentManifest = false;
long nowUs = getNowUs();
updateAvailableSegmentBounds(newRepresentations[0].getIndex(), nowUs);
updateSeekRange(newRepresentations[0].getIndex(), nowUs);
}
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
@ -334,36 +386,21 @@ public class DashChunkSource implements ChunkSource {
return;
}
long nowUs;
if (elapsedRealtimeOffsetUs != 0) {
nowUs = (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
} else {
nowUs = System.currentTimeMillis() * 1000;
}
int firstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
int lastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
boolean indexUnbounded = lastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED;
if (indexUnbounded) {
// The index is itself unbounded. We need to use the current time to calculate the range of
// available segments.
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
if (currentManifest.timeShiftBufferDepth != -1) {
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
firstAvailableSegmentNum = Math.max(firstAvailableSegmentNum,
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
}
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
// index of the last completed segment.
lastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
}
int segmentNum;
boolean indexUnbounded = segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED;
if (queue.isEmpty()) {
if (currentManifest.dynamic) {
seekPositionUs = getLiveSeekPosition(nowUs, indexUnbounded, segmentIndex.isExplicit());
seekRangeValues = seekRange.getCurrentBoundsUs(seekRangeValues);
seekPositionUs = Math.max(seekPositionUs, seekRangeValues[0]);
seekPositionUs = Math.min(seekPositionUs, seekRangeValues[1]);
}
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
// if the index is unbounded then the result of getSegmentNum isn't clamped to ensure that
// it doesn't exceed the last available segment. Clamp it here.
if (indexUnbounded) {
segmentNum = Math.min(segmentNum, lastAvailableSegmentNum);
}
} else {
MediaChunk previous = queue.get(out.queueSize - 1);
segmentNum = previous.isLastChunk ? -1
@ -432,6 +469,59 @@ public class DashChunkSource implements ChunkSource {
// Do nothing.
}
private void updateAvailableSegmentBounds(DashSegmentIndex segmentIndex, long nowUs) {
int indexFirstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
int indexLastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
if (indexLastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED) {
// The index is itself unbounded. We need to use the current time to calculate the range of
// available segments.
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
if (currentManifest.timeShiftBufferDepth != -1) {
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
indexFirstAvailableSegmentNum = Math.max(indexFirstAvailableSegmentNum,
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
}
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
// index of the last completed segment.
indexLastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
}
firstAvailableSegmentNum = indexFirstAvailableSegmentNum;
lastAvailableSegmentNum = indexLastAvailableSegmentNum;
}
private void updateSeekRange(DashSegmentIndex segmentIndex, long nowUs) {
long earliestSeekPosition = segmentIndex.getTimeUs(firstAvailableSegmentNum);
long latestSeekPosition = segmentIndex.getTimeUs(lastAvailableSegmentNum)
+ segmentIndex.getDurationUs(lastAvailableSegmentNum);
if (currentManifest.dynamic) {
long liveEdgeTimestampUs;
if (segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED) {
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
} else {
liveEdgeTimestampUs = segmentIndex.getTimeUs(segmentIndex.getLastSegmentNum())
+ segmentIndex.getDurationUs(segmentIndex.getLastSegmentNum());
if (!segmentIndex.isExplicit()) {
// Some segments defined by the index may not be available yet. Bound the calculated live
// edge based on the elapsed time since the manifest became available.
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
nowUs - currentManifest.availabilityStartTime * 1000);
}
}
// it's possible that the live edge latency actually puts our latest position before
// the earliest position in the case of a DVR-like stream that's just starting up, so
// in that case just return the earliest position instead
latestSeekPosition = Math.max(earliestSeekPosition, liveEdgeTimestampUs - liveEdgeLatencyUs);
}
TimeRange newSeekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, earliestSeekPosition,
latestSeekPosition);
if (seekRange == null || !seekRange.equals(newSeekRange)) {
seekRange = newSeekRange;
notifySeekRangeChanged(seekRange);
}
}
private static boolean mimeTypeIsWebm(String mimeType) {
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
}
@ -491,36 +581,12 @@ public class DashChunkSource implements ChunkSource {
}
}
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
*
* @param nowUs An estimate of the current server time, in microseconds.
* @param indexUnbounded True if the segment index for this source is unbounded. False otherwise.
* @param indexExplicit True if the segment index is explicit. False otherwise.
* @return The seek position in microseconds.
*/
private long getLiveSeekPosition(long nowUs, boolean indexUnbounded, boolean indexExplicit) {
long liveEdgeTimestampUs;
if (indexUnbounded) {
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
private long getNowUs() {
if (elapsedRealtimeOffsetUs != 0) {
return (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
} else {
liveEdgeTimestampUs = Long.MIN_VALUE;
for (RepresentationHolder representationHolder : representationHolders.values()) {
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
int lastSegmentNum = segmentIndex.getLastSegmentNum();
long indexLiveEdgeTimestampUs = segmentIndex.getTimeUs(lastSegmentNum)
+ segmentIndex.getDurationUs(lastSegmentNum);
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, indexLiveEdgeTimestampUs);
}
if (!indexExplicit) {
// Some segments defined by the index may not be available yet. Bound the calculated live
// edge based on the elapsed time since the manifest became available.
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
nowUs - currentManifest.availabilityStartTime * 1000);
}
return System.currentTimeMillis() * 1000;
}
return liveEdgeTimestampUs - liveEdgeLatencyUs;
}
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
@ -571,6 +637,17 @@ public class DashChunkSource implements ChunkSource {
Collections.singletonList(period));
}
private void notifySeekRangeChanged(final TimeRange seekRange) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onSeekRangeChanged(seekRange);
}
});
}
}
private static class RepresentationHolder {
public final Representation representation;

View file

@ -37,11 +37,15 @@ import java.util.List;
private static final String TAG = "H264Reader";
private static final int NAL_UNIT_TYPE_IDR = 5;
private static final int NAL_UNIT_TYPE_SEI = 6;
private static final int NAL_UNIT_TYPE_SPS = 7;
private static final int NAL_UNIT_TYPE_PPS = 8;
private static final int NAL_UNIT_TYPE_AUD = 9;
private static final int FRAME_TYPE_I = 2;
private static final int FRAME_TYPE_ALL_I = 7;
private static final int NAL_UNIT_TYPE_IFR = 1; // Coded slice of a non-IDR picture
private static final int NAL_UNIT_TYPE_IDR = 5; // Coded slice of an IDR picture
private static final int NAL_UNIT_TYPE_SEI = 6; // Supplemental enhancement information
private static final int NAL_UNIT_TYPE_SPS = 7; // Sequence parameter set
private static final int NAL_UNIT_TYPE_PPS = 8; // Picture parameter set
private static final int NAL_UNIT_TYPE_AUD = 9; // Access unit delimiter
private static final int EXTENDED_SAR = 0xFF;
private static final float[] ASPECT_RATIO_IDC_VALUES = new float[] {
1f /* Unspecified. Assume square */,
@ -69,6 +73,7 @@ import java.util.List;
// State that should be reset on seek.
private final SeiReader seiReader;
private final boolean[] prefixFlags;
private final IfrParserBuffer ifrParserBuffer;
private final NalUnitTargetBuffer sps;
private final NalUnitTargetBuffer pps;
private final NalUnitTargetBuffer sei;
@ -84,10 +89,11 @@ import java.util.List;
private final ParsableByteArray seiWrapper;
private int[] scratchEscapePositions;
public H264Reader(TrackOutput output, SeiReader seiReader) {
public H264Reader(TrackOutput output, SeiReader seiReader, boolean idrKeyframesOnly) {
super(output);
this.seiReader = seiReader;
prefixFlags = new boolean[3];
ifrParserBuffer = (idrKeyframesOnly) ? null : new IfrParserBuffer();
sps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SPS, 128);
pps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_PPS, 128);
sei = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SEI, 128);
@ -102,6 +108,9 @@ import java.util.List;
sps.reset();
pps.reset();
sei.reset();
if (ifrParserBuffer != null) {
ifrParserBuffer.reset();
}
writingSample = false;
totalBytesWritten = 0;
}
@ -132,22 +141,30 @@ import java.util.List;
int nalUnitType = H264Util.getNalUnitType(dataArray, nextNalUnitOffset);
int bytesWrittenPastNalUnit = limit - nextNalUnitOffset;
if (nalUnitType == NAL_UNIT_TYPE_AUD) {
if (writingSample) {
if (isKeyframe && !hasOutputFormat && sps.isCompleted() && pps.isCompleted()) {
parseMediaFormat(sps, pps);
switch (nalUnitType) {
case NAL_UNIT_TYPE_IDR:
isKeyframe = true;
break;
case NAL_UNIT_TYPE_AUD:
if (writingSample) {
if (ifrParserBuffer != null && ifrParserBuffer.isCompleted()) {
int sliceType = ifrParserBuffer.getSliceType();
isKeyframe |= (sliceType == FRAME_TYPE_I || sliceType == FRAME_TYPE_ALL_I);
ifrParserBuffer.reset();
}
if (isKeyframe && !hasOutputFormat && sps.isCompleted() && pps.isCompleted()) {
parseMediaFormat(sps, pps);
}
int flags = isKeyframe ? C.SAMPLE_FLAG_SYNC : 0;
int size = (int) (totalBytesWritten - samplePosition) - bytesWrittenPastNalUnit;
output.sampleMetadata(sampleTimeUs, flags, size, bytesWrittenPastNalUnit, null);
writingSample = false;
}
int flags = isKeyframe ? C.SAMPLE_FLAG_SYNC : 0;
int size = (int) (totalBytesWritten - samplePosition) - bytesWrittenPastNalUnit;
output.sampleMetadata(sampleTimeUs, flags, size, bytesWrittenPastNalUnit, null);
writingSample = false;
}
writingSample = true;
isKeyframe = false;
sampleTimeUs = pesTimeUs;
samplePosition = totalBytesWritten - bytesWrittenPastNalUnit;
} else if (nalUnitType == NAL_UNIT_TYPE_IDR) {
isKeyframe = true;
writingSample = true;
samplePosition = totalBytesWritten - bytesWrittenPastNalUnit;
sampleTimeUs = pesTimeUs;
isKeyframe = false;
break;
}
// If the length to the start of the unit is negative then we wrote too many bytes to the
@ -171,6 +188,9 @@ import java.util.List;
}
private void feedNalUnitTargetBuffersStart(int nalUnitType) {
if (ifrParserBuffer != null) {
ifrParserBuffer.startNalUnit(nalUnitType);
}
if (!hasOutputFormat) {
sps.startNalUnit(nalUnitType);
pps.startNalUnit(nalUnitType);
@ -179,6 +199,9 @@ import java.util.List;
}
private void feedNalUnitTargetBuffersData(byte[] dataArray, int offset, int limit) {
if (ifrParserBuffer != null) {
ifrParserBuffer.appendToNalUnit(dataArray, offset, limit);
}
if (!hasOutputFormat) {
sps.appendToNalUnit(dataArray, offset, limit);
pps.appendToNalUnit(dataArray, offset, limit);
@ -461,4 +484,99 @@ import java.util.List;
}
/**
* A buffer specifically for IFR units that can be used to parse the IFR's slice type.
*/
private static final class IfrParserBuffer {
private static final int DEFAULT_BUFFER_SIZE = 128;
private static final int NOT_SET = -1;
private final ParsableBitArray scratchSliceType;
private byte[] ifrData;
private int ifrLength;
private boolean isFilling;
private int sliceType;
public IfrParserBuffer() {
ifrData = new byte[DEFAULT_BUFFER_SIZE];
scratchSliceType = new ParsableBitArray(ifrData);
reset();
}
/**
* Resets the buffer, clearing any data that it holds.
*/
public void reset() {
isFilling = false;
ifrLength = 0;
sliceType = NOT_SET;
}
/**
* True if enough data was added to the buffer that the slice type was determined.
*/
public boolean isCompleted() {
return sliceType != NOT_SET;
}
/**
* Invoked to indicate that a NAL unit has started, and if it is an IFR then the buffer will
* start.
*/
public void startNalUnit(int nalUnitType) {
if (nalUnitType == NAL_UNIT_TYPE_IFR) {
reset();
isFilling = true;
}
}
/**
* Invoked to pass stream data. The data passed should not include 4 byte NAL unit prefixes.
*
* @param data Holds the data being passed.
* @param offset The offset of the data in {@code data}.
* @param limit The limit (exclusive) of the data in {@code data}.
*/
public void appendToNalUnit(byte[] data, int offset, int limit) {
if (!isFilling) {
return;
}
int readLength = limit - offset;
if (ifrData.length < ifrLength + readLength) {
ifrData = Arrays.copyOf(ifrData, (ifrLength + readLength) * 2);
}
System.arraycopy(data, offset, ifrData, ifrLength, readLength);
ifrLength += readLength;
scratchSliceType.reset(ifrData, ifrLength);
// first_mb_in_slice
int len = scratchSliceType.peekExpGolombCodedNumLength();
if ((len == -1) || (len > scratchSliceType.bitsLeft())) {
// Not enough yet
return;
}
scratchSliceType.skipBits(len);
// slice_type
len = scratchSliceType.peekExpGolombCodedNumLength();
if ((len == -1) || (len > scratchSliceType.bitsLeft())) {
// Not enough yet
return;
}
sliceType = scratchSliceType.readUnsignedExpGolombCodedInt();
isFilling = false;
}
/**
* @return the slice type of the IFR.
*/
public int getSliceType() {
return sliceType;
}
}
}

View file

@ -53,6 +53,7 @@ public final class TsExtractor implements Extractor, SeekMap {
private final ParsableByteArray tsPacketBuffer;
private final ParsableBitArray tsScratch;
private final boolean idrKeyframesOnly;
private final long firstSampleTimestampUs;
/* package */ final SparseBooleanArray streamTypes;
/* package */ final SparseBooleanArray allowedPassthroughStreamTypes;
@ -65,11 +66,21 @@ public final class TsExtractor implements Extractor, SeekMap {
/* package */ Id3Reader id3Reader;
public TsExtractor() {
this(0, null);
this(0);
}
public TsExtractor(long firstSampleTimestampUs) {
this(firstSampleTimestampUs, null);
}
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities) {
this(firstSampleTimestampUs, audioCapabilities, true);
}
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities,
boolean idrKeyframesOnly) {
this.firstSampleTimestampUs = firstSampleTimestampUs;
this.idrKeyframesOnly = idrKeyframesOnly;
tsScratch = new ParsableBitArray(new byte[3]);
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
streamTypes = new SparseBooleanArray();
@ -103,6 +114,8 @@ public final class TsExtractor implements Extractor, SeekMap {
return RESULT_END_OF_INPUT;
}
// Note: see ISO/IEC 13818-1, section 2.4.3.2 for detailed information on the format of
// the header.
tsPacketBuffer.setPosition(0);
tsPacketBuffer.setLimit(TS_PACKET_SIZE);
int syncByte = tsPacketBuffer.readUnsignedByte();
@ -292,6 +305,8 @@ public final class TsExtractor implements Extractor, SeekMap {
data.skipBytes(pointerField);
}
// Note: see ISO/IEC 13818-1, section 2.4.4.8 for detailed information on the format of
// the header.
data.readBytes(pmtScratch, 3);
pmtScratch.skipBits(12); // table_id (8), section_syntax_indicator (1), '0' (1), reserved (2)
int sectionLength = pmtScratch.readBits(12);
@ -347,7 +362,8 @@ public final class TsExtractor implements Extractor, SeekMap {
break;
case TS_STREAM_TYPE_H264:
SeiReader seiReader = new SeiReader(output.track(TS_STREAM_TYPE_EIA608));
pesPayloadReader = new H264Reader(output.track(TS_STREAM_TYPE_H264), seiReader);
pesPayloadReader = new H264Reader(output.track(TS_STREAM_TYPE_H264), seiReader,
idrKeyframesOnly);
break;
case TS_STREAM_TYPE_ID3:
pesPayloadReader = id3Reader;
@ -502,6 +518,8 @@ public final class TsExtractor implements Extractor, SeekMap {
}
private boolean parseHeader() {
// Note: see ISO/IEC 13818-1, section 2.4.3.6 for detailed information on the format of
// the header.
pesScratch.setPosition(0);
int startCodePrefix = pesScratch.readBits(24);
if (startCodePrefix != 0x000001) {
@ -534,7 +552,7 @@ public final class TsExtractor implements Extractor, SeekMap {
pesScratch.setPosition(0);
timeUs = 0;
if (ptsFlag) {
pesScratch.skipBits(4); // '0010'
pesScratch.skipBits(4); // '0010' or '0011'
long pts = (long) pesScratch.readBits(3) << 30;
pesScratch.skipBits(1); // marker_bit
pts |= pesScratch.readBits(15) << 15;

View file

@ -0,0 +1,140 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.upstream;
import com.google.android.exoplayer.C;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.MulticastSocket;
/**
* A multicast {@link DataSource}.
*/
public class MulticastDataSource implements UriDataSource {
/**
* Thrown when an error is encountered when trying to read from a {@link MulticastDataSource}.
*/
public static final class MulticastDataSourceException extends IOException {
public MulticastDataSourceException(String message) {
super(message);
}
public MulticastDataSourceException(IOException cause) {
super(cause);
}
}
public static final int DEFAULT_MAX_PACKET_SIZE = 2000;
public static final int TRANSFER_LISTENER_PACKET_INTERVAL = 1000;
private final TransferListener transferListener;
private final DatagramPacket packet;
private DataSpec dataSpec;
private MulticastSocket socket;
private boolean opened;
private int packetsReceived;
private byte[] packetBuffer;
private int packetRemaining;
public MulticastDataSource(TransferListener transferListener) {
this(transferListener, DEFAULT_MAX_PACKET_SIZE);
}
public MulticastDataSource(TransferListener transferListener, int maxPacketSize) {
this.transferListener = transferListener;
packetBuffer = new byte[maxPacketSize];
packet = new DatagramPacket(packetBuffer, 0, maxPacketSize);
}
@Override
public long open(DataSpec dataSpec) throws MulticastDataSourceException {
this.dataSpec = dataSpec;
String uri = dataSpec.uri.toString();
String host = uri.substring(0, uri.indexOf(':'));
int port = Integer.parseInt(uri.substring(uri.indexOf(':') + 1));
try {
socket = new MulticastSocket(port);
socket.joinGroup(InetAddress.getByName(host));
} catch (IOException e) {
throw new MulticastDataSourceException(e);
}
opened = true;
transferListener.onTransferStart();
return C.LENGTH_UNBOUNDED;
}
@Override
public void close() {
if (opened) {
socket.close();
socket = null;
transferListener.onTransferEnd();
packetRemaining = 0;
packetsReceived = 0;
opened = false;
}
}
@Override
public int read(byte[] buffer, int offset, int readLength) throws MulticastDataSourceException {
// if we've read all the data, get another packet
if (packetRemaining == 0) {
if (packetsReceived == TRANSFER_LISTENER_PACKET_INTERVAL) {
transferListener.onTransferEnd();
transferListener.onTransferStart();
packetsReceived = 0;
}
try {
socket.receive(packet);
} catch (IOException e) {
throw new MulticastDataSourceException(e);
}
packetRemaining = packet.getLength();
transferListener.onBytesTransferred(packetRemaining);
packetsReceived++;
}
// don't try to read too much
if (packetRemaining < readLength) {
readLength = packetRemaining;
}
int packetOffset = packet.getLength() - packetRemaining;
System.arraycopy(packetBuffer, packetOffset, buffer, offset, readLength);
packetRemaining -= readLength;
return readLength;
}
@Override
public String getUri() {
return dataSpec == null ? null : dataSpec.uri.toString();
}
}

View file

@ -26,6 +26,7 @@ public final class ParsableBitArray {
// byte (from 0 to 7).
private int byteOffset;
private int bitOffset;
private int byteLimit;
/** Creates a new instance that initially has no backing data. */
public ParsableBitArray() {}
@ -36,7 +37,18 @@ public final class ParsableBitArray {
* @param data The data to wrap.
*/
public ParsableBitArray(byte[] data) {
this(data, data.length);
}
/**
* Creates a new instance that wraps an existing array.
*
* @param data The data to wrap.
* @param limit The limit in bytes.
*/
public ParsableBitArray(byte[] data, int limit) {
this.data = data;
byteLimit = limit;
}
/**
@ -45,9 +57,27 @@ public final class ParsableBitArray {
* @param data The array to wrap.
*/
public void reset(byte[] data) {
reset(data, data.length);
}
/**
* Updates the instance to wrap {@code data}, and resets the position to zero.
*
* @param data The array to wrap.
* @param limit The limit in bytes.
*/
public void reset(byte[] data, int limit) {
this.data = data;
byteOffset = 0;
bitOffset = 0;
byteLimit = limit;
}
/**
* Returns the number of bits yet to be read.
*/
public int bitsLeft() {
return (byteLimit - byteOffset) * 8 - bitOffset;
}
/**
@ -67,6 +97,7 @@ public final class ParsableBitArray {
public void setPosition(int position) {
byteOffset = position / 8;
bitOffset = position - (byteOffset * 8);
assertValidOffset();
}
/**
@ -81,6 +112,7 @@ public final class ParsableBitArray {
byteOffset++;
bitOffset -= 8;
}
assertValidOffset();
}
/**
@ -103,12 +135,20 @@ public final class ParsableBitArray {
return 0;
}
int retval = 0;
int returnValue = 0;
// While n >= 8, read whole bytes.
while (n >= 8) {
int byteValue;
if (bitOffset != 0) {
byteValue = ((data[byteOffset] & 0xFF) << bitOffset)
| ((data[byteOffset + 1] & 0xFF) >>> (8 - bitOffset));
} else {
byteValue = data[byteOffset];
}
n -= 8;
retval |= (readUnsignedByte() << n);
returnValue |= (byteValue & 0xFF) << n;
byteOffset++;
}
if (n > 0) {
@ -117,12 +157,12 @@ public final class ParsableBitArray {
if (nextBit > 8) {
// Combine bits from current byte and next byte.
retval |= (((getUnsignedByte(byteOffset) << (nextBit - 8)
| (getUnsignedByte(byteOffset + 1) >> (16 - nextBit))) & writeMask));
returnValue |= ((((data[byteOffset] & 0xFF) << (nextBit - 8)
| ((data[byteOffset + 1] & 0xFF) >> (16 - nextBit))) & writeMask));
byteOffset++;
} else {
// Bits to be read only within current byte.
retval |= ((getUnsignedByte(byteOffset) >> (8 - nextBit)) & writeMask);
returnValue |= (((data[byteOffset] & 0xFF) >> (8 - nextBit)) & writeMask);
if (nextBit == 8) {
byteOffset++;
}
@ -131,7 +171,27 @@ public final class ParsableBitArray {
bitOffset = nextBit % 8;
}
return retval;
assertValidOffset();
return returnValue;
}
/**
* Peeks the length of an Exp-Golomb-coded integer (signed or unsigned) starting from the current
* offset, returning the length or -1 if the limit is reached.
*
* @return The length of the Exp-Golob-coded integer, or -1.
*/
public int peekExpGolombCodedNumLength() {
int initialByteOffset = byteOffset;
int initialBitOffset = bitOffset;
int leadingZeros = 0;
while (byteOffset < byteLimit && !readBit()) {
leadingZeros++;
}
boolean hitLimit = byteOffset == byteLimit;
byteOffset = initialByteOffset;
bitOffset = initialBitOffset;
return hitLimit ? -1 : leadingZeros * 2 + 1;
}
/**
@ -153,22 +213,6 @@ public final class ParsableBitArray {
return ((codeNum % 2) == 0 ? -1 : 1) * ((codeNum + 1) / 2);
}
private int readUnsignedByte() {
int value;
if (bitOffset != 0) {
value = ((data[byteOffset] & 0xFF) << bitOffset)
| ((data[byteOffset + 1] & 0xFF) >>> (8 - bitOffset));
} else {
value = data[byteOffset];
}
byteOffset++;
return value & 0xFF;
}
private int getUnsignedByte(int offset) {
return data[offset] & 0xFF;
}
private int readExpGolombCodeNum() {
int leadingZeros = 0;
while (!readBit()) {
@ -177,4 +221,11 @@ public final class ParsableBitArray {
return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0);
}
private void assertValidOffset() {
// It is fine for position to be at the end of the array, but no further.
Assertions.checkState(byteOffset >= 0
&& (bitOffset >= 0 && bitOffset < 8)
&& (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0)));
}
}

View file

@ -541,6 +541,22 @@ public final class Util {
return result;
}
/**
* Returns a hex string representation of the data provided.
*
* @param data The byte array containing the data to be turned into a hex string.
* @param beginIndex The begin index, inclusive.
* @param endIndex The end index, exclusive.
* @return A string containing the hex representation of the data provided.
*/
public static String getHexStringFromBytes(byte[] data, int beginIndex, int endIndex) {
StringBuffer dataStringBuffer = new StringBuffer(endIndex - beginIndex);
for (int i = beginIndex; i < endIndex; i++) {
dataStringBuffer.append(String.format("%02X", data[i]));
}
return dataStringBuffer.toString();
}
/**
* Returns a user agent string based on the given application name and the library version.
*

View file

@ -0,0 +1,36 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import junit.framework.TestCase;
/**
* Unit test for {@link TimeRange}.
*/
public class TimeRangeTest extends TestCase {
public void testEquals() {
TimeRange timeRange1 = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, 30000000);
assertTrue(timeRange1.equals(timeRange1));
TimeRange timeRange2 = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, 30000000);
assertTrue(timeRange1.equals(timeRange2));
TimeRange timeRange3 = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, 60000000);
assertFalse(timeRange1.equals(timeRange3));
}
}

View file

@ -15,9 +15,11 @@
*/
package com.google.android.exoplayer.dash;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
import com.google.android.exoplayer.chunk.Format;
@ -55,12 +57,19 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
private static final long AVAILABILITY_START_TIME = 0;
private static final long AVAILABILITY_LATENCY = 5000;
private static final long AVAILABILITY_REALTIME_OFFSET = 1000;
private static final long AVAILABILITY_CURRENT_TIME =
AVAILABILITY_START_TIME + AVAILABILITY_LATENCY - AVAILABILITY_REALTIME_OFFSET;
private static final FakeClock AVAILABILITY_CLOCK = new FakeClock(AVAILABILITY_CURRENT_TIME);
private static final long VOD_DURATION = 30000;
private static final long LIVE_SEGMENT_COUNT = 5;
private static final long LIVE_SEGMENT_DURATION_MS = 1000;
private static final long LIVE_TIMESHIFT_BUFFER_DEPTH_MS =
LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS;
private static final long AVAILABILITY_START_TIME_MS = 60000;
private static final long AVAILABILITY_REALTIME_OFFSET_MS = 1000;
private static final long AVAILABILITY_CURRENT_TIME_MS =
AVAILABILITY_START_TIME_MS + LIVE_TIMESHIFT_BUFFER_DEPTH_MS - AVAILABILITY_REALTIME_OFFSET_MS;
private static final long LIVE_SEEK_BEYOND_EDGE_MS = 60000;
private static final int TALL_HEIGHT = 200;
private static final int WIDE_WIDTH = 400;
@ -90,6 +99,21 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
}
public void testGetSeekRangeOnVod() {
DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO,
null, null, mock(FormatEvaluator.class));
chunkSource.enable();
TimeRange seekRange = chunkSource.getSeekRange();
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
assertEquals(0, seekRangeValuesUs[0]);
assertEquals(VOD_DURATION * 1000, seekRangeValuesUs[1]);
long[] seekRangeValuesMs = seekRange.getCurrentBoundsMs(null);
assertEquals(0, seekRangeValuesMs[0]);
assertEquals(VOD_DURATION, seekRangeValuesMs[1]);
}
public void testMaxVideoDimensionsLegacy() {
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
Representation representation1 =
@ -107,147 +131,254 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
}
public void testLiveEdgeNoLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(0L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdgeNoLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 0;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge500msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(500L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdgeAlmostNoLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 1;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge1000msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1000L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge500msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge1001msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1001L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge1000msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 1000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge2500msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(2500L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge1001msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 1001;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 3000;
long chunkEndTimeMs = 4000;
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdgeVeryHighLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(10000L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge2500msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 2500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 2000;
long chunkEndTimeMs = 3000;
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdgeNoLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(0L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdgeVeryHighLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 10000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = 0;
long chunkStartTimeMs = 0;
long chunkEndTimeMs = 1000;
// this should actually return the "5th" segment, but it currently returns the "6th", which
// doesn't actually exist yet; this will be resolved in a subsequent cl (cl/87518875).
//assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
//assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdgeAlmostNoLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdgeNoLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 0;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge500msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(500L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdgeAlmostNoLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 1;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge1000msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1000L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge500msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge1001msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1001L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge1000msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 1000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdge2500msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(2500L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge1001msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 1001;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 6000;
long chunkEndTimeMs = 7000;
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdgeVeryHighLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(10000L);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
public void testLiveEdge2500msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 2500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 5000;
long chunkEndTimeMs = 6000;
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdgeVeryHighLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 10000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000;
long chunkStartTimeMs = 3000;
long chunkEndTimeMs = 4000;
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, 0, 0, 1000);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
private static MediaPresentationDescription generateMpd(boolean live,
List<Representation> representations) {
List<Representation> representations, boolean limitTimeshiftBuffer) {
Representation firstRepresentation = representations.get(0);
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
Period period = new Period(null, firstRepresentation.periodStartMs,
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US
: firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
return new MediaPresentationDescription(AVAILABILITY_START_TIME, duration, -1, live, -1, -1,
return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, duration, -1, live, -1,
(limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1,
null, Collections.singletonList(period));
}
@ -256,72 +387,126 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
Representation representation1 =
Representation.newInstance(0, 0, null, 0, TALL_VIDEO, segmentBase1);
Representation.newInstance(0, VOD_DURATION, null, 0, TALL_VIDEO, segmentBase1);
representations.add(representation1);
SingleSegmentBase segmentBase2 = new SingleSegmentBase("https://example.com/2.mp4");
Representation representation2 =
Representation.newInstance(0, 0, null, 0, WIDE_VIDEO, segmentBase2);
Representation.newInstance(0, VOD_DURATION, null, 0, WIDE_VIDEO, segmentBase2);
representations.add(representation2);
return generateMpd(false, representations);
return generateMpd(false, representations, false);
}
private static MediaPresentationDescription generateLiveMpdWithTimeline() {
private static MediaPresentationDescription generateLiveMpdWithTimeline(long startTime) {
List<Representation> representations = new ArrayList<Representation>();
List<SegmentTimelineElement> segmentTimeline = new ArrayList<SegmentTimelineElement>();
segmentTimeline.add(new SegmentTimelineElement(0L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(1000L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(2000L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(3000L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(4000L, 1000L));
List<RangedUri> mediaSegments = new ArrayList<RangedUri>();
mediaSegments.add(new RangedUri("", "", 0L, 500L));
mediaSegments.add(new RangedUri("", "", 500L, 500L));
mediaSegments.add(new RangedUri("", "", 1000L, 500L));
mediaSegments.add(new RangedUri("", "", 1500L, 500L));
mediaSegments.add(new RangedUri("", "", 2000L, 500L));
long byteStart = 0;
for (int i = 0; i < LIVE_SEGMENT_COUNT; i++) {
segmentTimeline.add(new SegmentTimelineElement(startTime, LIVE_SEGMENT_DURATION_MS));
mediaSegments.add(new RangedUri("", "", byteStart, 500L));
startTime += LIVE_SEGMENT_DURATION_MS;
byteStart += 500;
}
MultiSegmentBase segmentBase = new SegmentList(null, 1000, 0,
TrackRenderer.UNKNOWN_TIME_US, 1, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
TrackRenderer.UNKNOWN_TIME_US, 0, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
mediaSegments);
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
null, 0, REGULAR_VIDEO, segmentBase);
Representation representation = Representation.newInstance(startTime,
TrackRenderer.UNKNOWN_TIME_US, null, 0, REGULAR_VIDEO, segmentBase);
representations.add(representation);
return generateMpd(true, representations);
return generateMpd(true, representations, false);
}
private static MediaPresentationDescription generateLiveMpdWithTemplate() {
private static MediaPresentationDescription generateLiveMpdWithTemplate(
boolean limitTimeshiftBuffer) {
List<Representation> representations = new ArrayList<Representation>();
UrlTemplate initializationTemplate = null;
UrlTemplate mediaTemplate = UrlTemplate.compile("$RepresentationID$/$Number$");
MultiSegmentBase segmentBase = new SegmentTemplate(null, 1000, 0,
TrackRenderer.UNKNOWN_TIME_US, 1, 1000, null,
TrackRenderer.UNKNOWN_TIME_US, 0, LIVE_SEGMENT_DURATION_MS, null,
initializationTemplate, mediaTemplate, "http://www.youtube.com");
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
null, 0, REGULAR_VIDEO, segmentBase);
representations.add(representation);
return generateMpd(true, representations);
return generateMpd(true, representations, limitTimeshiftBuffer);
}
private DashChunkSource setupLiveEdgeTimelineTest(long liveEdgeLatencyMs) {
MediaPresentationDescription manifest = generateLiveMpdWithTimeline();
private DashChunkSource setupLiveEdgeTimelineTest(long startTime, long liveEdgeLatencyMs) {
MediaPresentationDescription manifest = generateLiveMpdWithTimeline(startTime);
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET * 1000);
DashChunkSource chunkSource = new DashChunkSource(mockManifestFetcher, manifest,
AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR,
new FakeClock(AVAILABILITY_CURRENT_TIME_MS + startTime), liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET_MS * 1000, null, null);
chunkSource.enable();
return chunkSource;
}
private DashChunkSource setupLiveEdgeTemplateTest(long liveEdgeLatencyMs) {
MediaPresentationDescription manifest = generateLiveMpdWithTemplate();
private DashChunkSource setupLiveEdgeTemplateTest(long startTime, long liveEdgeLatencyMs,
boolean limitTimeshiftBuffer) {
MediaPresentationDescription manifest = generateLiveMpdWithTemplate(limitTimeshiftBuffer);
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET * 1000);
DashChunkSource chunkSource = new DashChunkSource(mockManifestFetcher, manifest,
AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR,
new FakeClock(AVAILABILITY_CURRENT_TIME_MS + startTime), liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET_MS * 1000, null, null);
chunkSource.enable();
return chunkSource;
}
private void checkLiveEdgeLatencyWithTimeline(long startTimeMs, long liveEdgeLatencyMs,
long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs, long chunkStartTimeMs,
long chunkEndTimeMs) {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(startTimeMs, liveEdgeLatencyMs);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, seekPositionMs * 1000, 0, out);
TimeRange seekRange = chunkSource.getSeekRange();
assertNotNull(out.chunk);
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
assertEquals(seekRangeStartMs * 1000, seekRangeValuesUs[0]);
assertEquals(seekRangeEndMs * 1000, seekRangeValuesUs[1]);
assertEquals(chunkStartTimeMs * 1000, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(chunkEndTimeMs * 1000, ((MediaChunk) out.chunk).endTimeUs);
}
private void checkLiveEdgeLatencyWithTemplate(long startTimeMs, long liveEdgeLatencyMs,
long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs, long chunkStartTimeMs,
long chunkEndTimeMs, boolean limitTimeshiftBuffer) {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(startTimeMs, liveEdgeLatencyMs,
limitTimeshiftBuffer);
List<MediaChunk> queue = new ArrayList<MediaChunk>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, seekPositionMs * 1000, 0, out);
TimeRange seekRange = chunkSource.getSeekRange();
assertNotNull(out.chunk);
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
assertEquals(seekRangeStartMs * 1000, seekRangeValuesUs[0]);
assertEquals(seekRangeEndMs * 1000, seekRangeValuesUs[1]);
assertEquals(chunkStartTimeMs * 1000, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(chunkEndTimeMs * 1000, ((MediaChunk) out.chunk).endTimeUs);
}
private void checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(long startTimeMs,
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeEndMs,
long chunkStartTimeMs, long chunkEndTimeMs) {
checkLiveEdgeLatencyWithTemplate(startTimeMs, liveEdgeLatencyMs, seekPositionMs, 0,
seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs, false);
}
private void checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(long startTimeMs,
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs,
long chunkStartTimeMs, long chunkEndTimeMs) {
checkLiveEdgeLatencyWithTemplate(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs, true);
}
}