Multi-track - The (nearly) final step.

- Migrate demo app to use new APIs.
- Add multi-track support for ExtractorSampleSource case.
- Add multi-track support for SmoothStreaming use case.

The final step is to add support back for the DASH use case and
delete MultiTrackChunkSource. This is blocked on multi-period support
landing, in order to prevent a horrendous merge conflict. We also
need to update HLS to expose sensible track information.

Issue: #514
This commit is contained in:
Oliver Woodman 2015-08-19 16:29:30 +01:00
parent 57250036bf
commit e770e5c24b
12 changed files with 440 additions and 374 deletions

View file

@ -17,6 +17,7 @@ package com.google.android.exoplayer.demo;
import com.google.android.exoplayer.AspectRatioFrameLayout;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.audio.AudioCapabilitiesReceiver;
import com.google.android.exoplayer.demo.player.DashRendererBuilder;
@ -33,6 +34,7 @@ import com.google.android.exoplayer.text.CaptionStyleCompat;
import com.google.android.exoplayer.text.Cue;
import com.google.android.exoplayer.text.SubtitleLayout;
import com.google.android.exoplayer.util.DebugTextViewHelper;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import com.google.android.exoplayer.util.VerboseLogUtil;
@ -435,23 +437,34 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
});
Menu menu = popup.getMenu();
// ID_OFFSET ensures we avoid clashing with Menu.NONE (which equals 0)
menu.add(MENU_GROUP_TRACKS, DemoPlayer.DISABLED_TRACK + ID_OFFSET, Menu.NONE, R.string.off);
if (trackCount == 1 && TextUtils.isEmpty(player.getTrackName(trackType, 0))) {
menu.add(MENU_GROUP_TRACKS, DemoPlayer.PRIMARY_TRACK + ID_OFFSET, Menu.NONE, R.string.on);
} else {
for (int i = 0; i < trackCount; i++) {
menu.add(MENU_GROUP_TRACKS, i + ID_OFFSET, Menu.NONE, player.getTrackName(trackType, i));
}
menu.add(MENU_GROUP_TRACKS, DemoPlayer.TRACK_DISABLED + ID_OFFSET, Menu.NONE, R.string.off);
for (int i = 0; i < trackCount; i++) {
menu.add(MENU_GROUP_TRACKS, i + ID_OFFSET, Menu.NONE,
buildTrackName(player.getTrackFormat(trackType, i)));
}
menu.setGroupCheckable(MENU_GROUP_TRACKS, true, true);
menu.findItem(player.getSelectedTrackIndex(trackType) + ID_OFFSET).setChecked(true);
menu.findItem(player.getSelectedTrack(trackType) + ID_OFFSET).setChecked(true);
}
private static String buildTrackName(MediaFormat format) {
if (format.adaptive) {
return "auto";
} else if (MimeTypes.isVideo(format.mimeType)) {
return format.width + "x" + format.height;
} else if (MimeTypes.isAudio(format.mimeType)) {
return format.channelCount + "ch, " + format.sampleRate + "Hz";
} else if (MimeTypes.isText(format.mimeType) && !TextUtils.isEmpty(format.language)) {
return format.language;
} else {
return "unknown";
}
}
private boolean onTrackItemClick(MenuItem item, int type) {
if (player == null || item.getGroupId() != MENU_GROUP_TRACKS) {
return false;
}
player.selectTrack(type, item.getItemId() - ID_OFFSET);
player.setSelectedTrack(type, item.getItemId() - ID_OFFSET);
return true;
}

View file

@ -352,7 +352,7 @@ public class DashRendererBuilder implements RendererBuilder {
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
player.onRenderers(renderers, bandwidthMeter);
}
private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) {

View file

@ -23,12 +23,12 @@ import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecTrackRenderer;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.hls.HlsSampleSource;
@ -46,7 +46,6 @@ import android.os.Looper;
import android.view.Surface;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
@ -148,9 +147,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
public static final int STATE_BUFFERING = ExoPlayer.STATE_BUFFERING;
public static final int STATE_READY = ExoPlayer.STATE_READY;
public static final int STATE_ENDED = ExoPlayer.STATE_ENDED;
public static final int DISABLED_TRACK = -1;
public static final int PRIMARY_TRACK = 0;
public static final int TRACK_DISABLED = ExoPlayer.TRACK_DISABLED;
public static final int TRACK_DEFAULT = ExoPlayer.TRACK_DEFAULT;
public static final int RENDERER_COUNT = 4;
public static final int TYPE_VIDEO = 0;
@ -179,9 +177,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
private int videoTrackToRestore;
private BandwidthMeter bandwidthMeter;
private MultiTrackChunkSource[] multiTrackSources;
private String[][] trackNames;
private int[] selectedTracks;
private boolean backgrounded;
private CaptionListener captionListener;
@ -198,9 +193,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
listeners = new CopyOnWriteArrayList<>();
lastReportedPlaybackState = STATE_IDLE;
rendererBuildingState = RENDERER_BUILDING_STATE_IDLE;
selectedTracks = new int[RENDERER_COUNT];
// Disable text initially.
selectedTracks[TYPE_TEXT] = DISABLED_TRACK;
player.setSelectedTrack(TYPE_TEXT, TRACK_DISABLED);
}
public PlayerControl getPlayerControl() {
@ -245,28 +239,20 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
pushSurface(true);
}
@SuppressWarnings("deprecation")
public int getTrackCount(int type) {
return !player.getRendererHasMedia(type) ? 0 : trackNames[type].length;
return player.getTrackCount(type);
}
public String getTrackName(int type, int index) {
return trackNames[type][index];
public MediaFormat getTrackFormat(int type, int index) {
return player.getTrackFormat(type, index);
}
public int getSelectedTrackIndex(int type) {
return selectedTracks[type];
public int getSelectedTrack(int type) {
return player.getSelectedTrack(type);
}
public void selectTrack(int type, int index) {
if (selectedTracks[type] == index) {
return;
}
selectedTracks[type] = index;
pushTrackSelection(type, true);
if (type == TYPE_TEXT && index == DISABLED_TRACK && captionListener != null) {
captionListener.onCues(Collections.<Cue>emptyList());
}
public void setSelectedTrack(int type, int index) {
player.setSelectedTrack(type, index);
}
public boolean getBackgrounded() {
@ -279,11 +265,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
this.backgrounded = backgrounded;
if (backgrounded) {
videoTrackToRestore = getSelectedTrackIndex(TYPE_VIDEO);
selectTrack(TYPE_VIDEO, DISABLED_TRACK);
videoTrackToRestore = getSelectedTrack(TYPE_VIDEO);
setSelectedTrack(TYPE_VIDEO, TRACK_DISABLED);
blockingClearSurface();
} else {
selectTrack(TYPE_VIDEO, videoTrackToRestore);
setSelectedTrack(TYPE_VIDEO, videoTrackToRestore);
}
}
@ -294,7 +280,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
rendererBuilder.cancel();
videoFormat = null;
videoRenderer = null;
multiTrackSources = null;
rendererBuildingState = RENDERER_BUILDING_STATE_BUILDING;
maybeReportPlayerState();
rendererBuilder.buildRenderers(this);
@ -303,51 +288,25 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
/**
* Invoked with the results from a {@link RendererBuilder}.
*
* @param trackNames The names of the available tracks, indexed by {@link DemoPlayer} TYPE_*
* constants. May be null if the track names are unknown. An individual element may be null
* if the track names are unknown for the corresponding type.
* @param multiTrackSources Sources capable of switching between multiple available tracks,
* indexed by {@link DemoPlayer} TYPE_* constants. May be null if there are no types with
* multiple tracks. An individual element may be null if it does not have multiple tracks.
* @param renderers Renderers indexed by {@link DemoPlayer} TYPE_* constants. An individual
* element may be null if there do not exist tracks of the corresponding type.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth. May be null.
*/
/* package */ void onRenderers(String[][] trackNames,
MultiTrackChunkSource[] multiTrackSources, TrackRenderer[] renderers,
BandwidthMeter bandwidthMeter) {
// Normalize the results.
if (trackNames == null) {
trackNames = new String[RENDERER_COUNT][];
}
if (multiTrackSources == null) {
multiTrackSources = new MultiTrackChunkSource[RENDERER_COUNT];
}
for (int rendererIndex = 0; rendererIndex < RENDERER_COUNT; rendererIndex++) {
if (renderers[rendererIndex] == null) {
/* package */ void onRenderers(TrackRenderer[] renderers, BandwidthMeter bandwidthMeter) {
for (int i = 0; i < RENDERER_COUNT; i++) {
if (renderers[i] == null) {
// Convert a null renderer to a dummy renderer.
renderers[rendererIndex] = new DummyTrackRenderer();
}
if (trackNames[rendererIndex] == null) {
// Convert a null trackNames to an array of suitable length.
int trackCount = multiTrackSources[rendererIndex] != null
? multiTrackSources[rendererIndex].getMultiTrackCount() : 1;
trackNames[rendererIndex] = new String[trackCount];
renderers[i] = new DummyTrackRenderer();
}
}
// Complete preparation.
this.trackNames = trackNames;
this.videoRenderer = renderers[TYPE_VIDEO];
this.codecCounters = videoRenderer instanceof MediaCodecTrackRenderer
? ((MediaCodecTrackRenderer) videoRenderer).codecCounters
: renderers[TYPE_AUDIO] instanceof MediaCodecTrackRenderer
? ((MediaCodecTrackRenderer) renderers[TYPE_AUDIO]).codecCounters : null;
this.multiTrackSources = multiTrackSources;
this.bandwidthMeter = bandwidthMeter;
pushSurface(false);
pushTrackSelection(TYPE_VIDEO, true);
pushTrackSelection(TYPE_AUDIO, true);
pushTrackSelection(TYPE_TEXT, true);
player.prepare(renderers);
rendererBuildingState = RENDERER_BUILDING_STATE_BUILT;
}
@ -537,14 +496,14 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
@Override
public void onCues(List<Cue> cues) {
if (captionListener != null && selectedTracks[TYPE_TEXT] != DISABLED_TRACK) {
if (captionListener != null && getSelectedTrack(TYPE_TEXT) != TRACK_DISABLED) {
captionListener.onCues(cues);
}
}
@Override
public void onMetadata(Map<String, Object> metadata) {
if (id3MetadataListener != null && selectedTracks[TYPE_METADATA] != DISABLED_TRACK) {
if (id3MetadataListener != null && getSelectedTrack(TYPE_METADATA) != TRACK_DISABLED) {
id3MetadataListener.onId3Metadata(metadata);
}
}
@ -620,26 +579,4 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
}
@SuppressWarnings("deprecation")
private void pushTrackSelection(int type, boolean allowRendererEnable) {
if (multiTrackSources == null) {
return;
}
int trackIndex = selectedTracks[type];
if (trackIndex == DISABLED_TRACK) {
player.setRendererEnabled(type, false);
} else if (multiTrackSources[type] == null) {
player.setRendererEnabled(type, allowRendererEnable);
} else {
boolean playWhenReady = player.getPlayWhenReady();
player.setPlayWhenReady(false);
player.setRendererEnabled(type, false);
player.sendMessage(multiTrackSources[type], MultiTrackChunkSource.MSG_SELECT_TRACK,
trackIndex);
player.setRendererEnabled(type, allowRendererEnable);
player.setPlayWhenReady(playWhenReady);
}
}
}

View file

@ -74,7 +74,7 @@ public class ExtractorRendererBuilder implements RendererBuilder {
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(null, null, renderers, bandwidthMeter);
player.onRenderers(renderers, bandwidthMeter);
}
@Override

View file

@ -162,7 +162,7 @@ public class HlsRendererBuilder implements RendererBuilder {
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_METADATA] = id3Renderer;
renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer;
player.onRenderers(null, null, renderers, bandwidthMeter);
player.onRenderers(renderers, bandwidthMeter);
}
}

View file

@ -18,15 +18,12 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.drm.DrmSessionManager;
@ -37,8 +34,8 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingTrackSelector;
import com.google.android.exoplayer.text.TextTrackRenderer;
import com.google.android.exoplayer.text.ttml.TtmlParser;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultAllocator;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
@ -160,126 +157,78 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder {
}
}
// Obtain stream elements for playback.
int audioStreamElementCount = 0;
int textStreamElementCount = 0;
int videoStreamElementIndex = -1;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioStreamElementCount++;
} else if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textStreamElementCount++;
} else if (videoStreamElementIndex == -1
&& manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) {
videoStreamElementIndex = i;
}
}
// Determine which video tracks we should use for playback.
int[] videoTrackIndices = null;
if (videoStreamElementIndex != -1) {
try {
videoTrackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(context,
Arrays.asList(manifest.streamElements[videoStreamElementIndex].tracks), null, false);
} catch (DecoderQueryException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
if (videoTrackIndices == null || videoTrackIndices.length == 0) {
videoRenderer = null;
} else {
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoStreamElementIndex, videoTrackIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
}
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
new TrackSelector(context, StreamElement.TYPE_VIDEO), videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioStreamElementCount == 0) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
audioTrackNames = new String[audioStreamElementCount];
ChunkSource[] audioChunkSources = new ChunkSource[audioStreamElementCount];
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator audioFormatEvaluator = new FormatEvaluator.FixedEvaluator();
audioStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioTrackNames[audioStreamElementCount] = manifest.streamElements[i].name;
audioChunkSources[audioStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, audioDataSource, audioFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
audioStreamElementCount++;
}
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true,
mainHandler, player, AudioCapabilities.getCapabilities(context));
}
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
new TrackSelector(context, StreamElement.TYPE_AUDIO), audioDataSource, null,
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context));
// Build the text renderer.
final String[] textTrackNames;
final MultiTrackChunkSource textChunkSource;
final TrackRenderer textRenderer;
if (textStreamElementCount == 0) {
textTrackNames = null;
textChunkSource = null;
textRenderer = null;
} else {
textTrackNames = new String[textStreamElementCount];
ChunkSource[] textChunkSources = new ChunkSource[textStreamElementCount];
DataSource ttmlDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
FormatEvaluator ttmlFormatEvaluator = new FormatEvaluator.FixedEvaluator();
textStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textTrackNames[textStreamElementCount] = manifest.streamElements[i].language;
textChunkSources[textStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, ttmlDataSource, ttmlFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
textStreamElementCount++;
}
}
textChunkSource = new MultiTrackChunkSource(textChunkSources);
ChunkSampleSource ttmlSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(ttmlSampleSource, player, mainHandler.getLooper(),
new TtmlParser());
}
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
new TrackSelector(context, StreamElement.TYPE_TEXT), textDataSource, null,
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
mainHandler.getLooper());
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
player.onRenderers(renderers, bandwidthMeter);
}
}
private static final class TrackSelector implements SmoothStreamingTrackSelector {
private final Context context;
private final int elementType;
private TrackSelector(Context context, int type) {
this.context = context;
this.elementType = type;
}
@Override
public void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException {
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == elementType) {
if (elementType == StreamElement.TYPE_VIDEO) {
int[] trackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, Arrays.asList(manifest.streamElements[i].tracks), null, false);
output.adaptiveTrack(manifest, i, trackIndices);
for (int j = 0; j < trackIndices.length; j++) {
output.fixedTrack(manifest, i, trackIndices[j]);
}
} else {
for (int j = 0; j < manifest.streamElements[i].tracks.length; j++) {
output.fixedTrack(manifest, i, j);
}
}
}
}
}
}

View file

@ -28,6 +28,7 @@ import android.text.TextUtils;
import android.util.Log;
import android.util.Pair;
import java.io.IOException;
import java.util.HashMap;
/**
@ -42,7 +43,7 @@ public final class MediaCodecUtil {
* Such failures are not expected in normal operation and are normally temporary (e.g. if the
* mediaserver process has crashed and is yet to restart).
*/
public static class DecoderQueryException extends Exception {
public static class DecoderQueryException extends IOException {
private DecoderQueryException(Throwable cause) {
super("Failed to query underlying media codecs", cause);

View file

@ -187,15 +187,6 @@ public final class MediaFormat {
NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, null, false, NO_VALUE, NO_VALUE);
}
public static MediaFormat createAdaptiveFormat(String mimeType) {
return createAdaptiveFormat(mimeType, C.UNKNOWN_TIME_US);
}
public static MediaFormat createAdaptiveFormat(String mimeType, long durationUs) {
return new MediaFormat(mimeType, NO_VALUE, durationUs, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, null, true, NO_VALUE, NO_VALUE);
}
/* package */ MediaFormat(String mimeType, int maxInputSize, long durationUs, int width,
int height, int rotationDegrees, float pixelWidthHeightRatio, int channelCount,
int sampleRate, String language, long subsampleOffsetUs, List<byte[]> initializationData,
@ -236,6 +227,12 @@ public final class MediaFormat {
initializationData, adaptive, maxWidth, maxHeight);
}
public MediaFormat copyWithAdaptive(boolean adaptive) {
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, rotationDegrees,
pixelWidthHeightRatio, channelCount, sampleRate, language, subsampleOffsetUs,
initializationData, adaptive, maxWidth, maxHeight);
}
/**
* @return A {@link MediaFormat} representation of this format.
*/

View file

@ -134,7 +134,9 @@ public class ChunkSampleSource implements SampleSource, SampleSourceReader, Load
} else if (!chunkSource.prepare()) {
return false;
}
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType);
if (chunkSource.getTrackCount() > 0) {
loader = new Loader("Loader:" + chunkSource.getFormat(0).mimeType);
}
state = STATE_PREPARED;
return true;
}

View file

@ -66,19 +66,6 @@ public interface ChunkSource {
*/
MediaFormat getFormat(int track);
/**
* Adaptive video {@link ChunkSource} implementations must return a copy of the provided
* {@link MediaFormat} with the maximum video dimensions set. Other implementations can return
* the provided {@link MediaFormat} directly.
* <p>
* This method should only be called after the source has been prepared.
*
* @param format The format to be copied or returned.
* @return A copy of the provided {@link MediaFormat} with the maximum video dimensions set, or
* the provided format.
*/
MediaFormat getWithMaxVideoDimensions(MediaFormat format);
/**
* Enable the source for the specified track.
* <p>
@ -89,6 +76,19 @@ public interface ChunkSource {
*/
void enable(int track);
/**
* Adaptive video {@link ChunkSource} implementations must return a copy of the provided
* {@link MediaFormat} with the maximum video dimensions set. Other implementations can return
* the provided {@link MediaFormat} directly.
* <p>
* This method should only be called when the source is enabled.
*
* @param format The format to be copied or returned.
* @return A copy of the provided {@link MediaFormat} with the maximum video dimensions set, or
* the provided format.
*/
MediaFormat getWithMaxVideoDimensions(MediaFormat format);
/**
* Indicates to the source that it should still be checking for updates to the stream.
* <p>

View file

@ -36,6 +36,7 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.Stre
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.MimeTypes;
@ -46,6 +47,7 @@ import android.util.Base64;
import android.util.SparseArray;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -53,31 +55,33 @@ import java.util.List;
/**
* An {@link ChunkSource} for SmoothStreaming.
*/
public class SmoothStreamingChunkSource implements ChunkSource {
public class SmoothStreamingChunkSource implements ChunkSource,
SmoothStreamingTrackSelector.Output {
private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000;
private static final int INITIALIZATION_VECTOR_SIZE = 8;
private final MediaFormat mediaFormat;
private final SmoothStreamingTrackSelector trackSelector;
private final DataSource dataSource;
private final FormatEvaluator formatEvaluator;
private final Evaluation evaluation;
private final long liveEdgeLatencyUs;
private final int maxWidth;
private final int maxHeight;
private final TrackEncryptionBox[] trackEncryptionBoxes;
private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private final DrmInitData.Mapped drmInitData;
private final FormatEvaluator adaptiveFormatEvaluator;
// The tracks exposed by this source.
private final ArrayList<ExposedTrack> tracks;
// Mappings from manifest track key.
private final SparseArray<ChunkExtractorWrapper> extractorWrappers;
private final SparseArray<MediaFormat> mediaFormats;
private final DrmInitData drmInitData;
private final Format[] formats;
private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private final int streamElementIndex;
private boolean prepareCalled;
private SmoothStreamingManifest currentManifest;
private int currentManifestChunkOffset;
private boolean finishedCurrentManifest;
private boolean currentManifestFinished;
private ExposedTrack enabledTrack;
private IOException fatalError;
/**
@ -88,10 +92,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
*
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load.
* @param streamElementIndex The index of the stream element in the manifest to be provided by
* the source.
* @param trackIndices The indices of the tracks within the stream element to be considered by
* the source. May be null if all tracks within the element should be considered.
* @param trackSelector Selects tracks from the manifest to be exposed by this source.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
@ -101,122 +102,107 @@ public class SmoothStreamingChunkSource implements ChunkSource {
* Hence a small value may increase the probability of rebuffering and playback failures.
*/
public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
int streamElementIndex, int[] trackIndices, DataSource dataSource,
SmoothStreamingTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) {
this(manifestFetcher, manifestFetcher.getManifest(), streamElementIndex, trackIndices,
dataSource, formatEvaluator, liveEdgeLatencyMs);
this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, dataSource, formatEvaluator,
liveEdgeLatencyMs);
}
/**
* Constructor to use for fixed duration content.
*
* @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}.
* @param streamElementIndex The index of the stream element in the manifest to be provided by
* the source.
* @param trackIndices The indices of the tracks within the stream element to be considered by
* the source. May be null if all tracks within the element should be considered.
* @param trackSelector Selects tracks from the manifest to be exposed by this source.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
*/
public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, int streamElementIndex,
int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
this(null, manifest, streamElementIndex, trackIndices, dataSource, formatEvaluator, 0);
public SmoothStreamingChunkSource(SmoothStreamingManifest manifest,
SmoothStreamingTrackSelector trackSelector, DataSource dataSource,
FormatEvaluator formatEvaluator) {
this(null, manifest, trackSelector, dataSource, formatEvaluator, 0);
}
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
SmoothStreamingManifest initialManifest, int streamElementIndex, int[] trackIndices,
DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) {
SmoothStreamingManifest initialManifest, SmoothStreamingTrackSelector trackSelector,
DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) {
this.manifestFetcher = manifestFetcher;
this.streamElementIndex = streamElementIndex;
this.currentManifest = initialManifest;
this.trackSelector = trackSelector;
this.dataSource = dataSource;
this.formatEvaluator = formatEvaluator;
this.adaptiveFormatEvaluator = adaptiveFormatEvaluator;
this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000;
StreamElement streamElement = getElement(initialManifest);
// TODO: Remove this and pass proper formats instead (b/22996976).
mediaFormat = MediaFormat.createFormatForMimeType(streamElement.tracks[0].format.mimeType,
initialManifest.durationUs);
evaluation = new Evaluation();
TrackEncryptionBox[] trackEncryptionBoxes = null;
ProtectionElement protectionElement = initialManifest.protectionElement;
if (protectionElement != null) {
byte[] keyId = getKeyId(protectionElement.data);
trackEncryptionBoxes = new TrackEncryptionBox[1];
trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId);
DrmInitData.Mapped drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
drmInitData.put(protectionElement.uuid, protectionElement.data);
this.drmInitData = drmInitData;
} else {
drmInitData = null;
}
int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length;
formats = new Format[trackCount];
tracks = new ArrayList<>();
extractorWrappers = new SparseArray<>();
mediaFormats = new SparseArray<>();
int maxWidth = 0;
int maxHeight = 0;
for (int i = 0; i < trackCount; i++) {
int trackIndex = trackIndices != null ? trackIndices[i] : i;
formats[i] = streamElement.tracks[trackIndex].format;
maxWidth = Math.max(maxWidth, formats[i].width);
maxHeight = Math.max(maxHeight, formats[i].height);
MediaFormat mediaFormat = getMediaFormat(streamElement, trackIndex);
int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_vide
: streamElement.type == StreamElement.TYPE_AUDIO ? Track.TYPE_soun
: Track.TYPE_text;
FragmentedMp4Extractor extractor = new FragmentedMp4Extractor(
FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale,
initialManifest.durationUs, mediaFormat, trackEncryptionBoxes,
trackType == Track.TYPE_vide ? 4 : -1));
extractorWrappers.put(trackIndex, new ChunkExtractorWrapper(extractor));
mediaFormats.put(trackIndex, mediaFormat);
ProtectionElement protectionElement = initialManifest.protectionElement;
if (protectionElement != null) {
byte[] keyId = getProtectionElementKeyId(protectionElement.data);
trackEncryptionBoxes = new TrackEncryptionBox[1];
trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId);
drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
drmInitData.put(protectionElement.uuid, protectionElement.data);
} else {
trackEncryptionBoxes = null;
drmInitData = null;
}
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
Arrays.sort(formats, new DecreasingBandwidthComparator());
}
// ChunkSource implementation.
@Override
public final MediaFormat getWithMaxVideoDimensions(MediaFormat format) {
return MimeTypes.isVideo(mediaFormat.mimeType)
? format.copyWithMaxVideoDimensions(maxWidth, maxHeight) : format;
public void maybeThrowError() throws IOException {
if (fatalError != null) {
throw fatalError;
} else {
manifestFetcher.maybeThrowError();
}
}
@Override
public boolean prepare() {
return true;
if (!prepareCalled) {
prepareCalled = true;
try {
trackSelector.selectTracks(currentManifest, this);
} catch (IOException e) {
fatalError = e;
}
}
return fatalError == null;
}
@Override
public int getTrackCount() {
return 1;
return tracks.size();
}
@Override
public final MediaFormat getFormat(int track) {
return mediaFormat;
return tracks.get(track).format;
}
@Override
public void enable(int track) {
fatalError = null;
formatEvaluator.enable();
evaluation.format = null;
enabledTrack = tracks.get(track);
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.enable();
}
if (manifestFetcher != null) {
manifestFetcher.enable();
}
}
@Override
public void disable(List<? extends MediaChunk> queue) {
formatEvaluator.disable();
if (manifestFetcher != null) {
manifestFetcher.disable();
public final MediaFormat getWithMaxVideoDimensions(MediaFormat format) {
if (enabledTrack.isAdaptive() && MimeTypes.isVideo(format.mimeType)) {
return format.copyWithMaxVideoDimensions(
enabledTrack.adaptiveMaxWidth, enabledTrack.adaptiveMaxHeight);
}
return format;
}
@Override
@ -227,9 +213,9 @@ public class SmoothStreamingChunkSource implements ChunkSource {
SmoothStreamingManifest newManifest = manifestFetcher.getManifest();
if (currentManifest != newManifest && newManifest != null) {
StreamElement currentElement = getElement(currentManifest);
StreamElement currentElement = currentManifest.streamElements[enabledTrack.elementIndex];
int currentElementChunkCount = currentElement.chunkCount;
StreamElement newElement = getElement(newManifest);
StreamElement newElement = newManifest.streamElements[enabledTrack.elementIndex];
if (currentElementChunkCount == 0 || newElement.chunkCount == 0) {
// There's no overlap between the old and new elements because at least one is empty.
currentManifestChunkOffset += currentElementChunkCount;
@ -246,10 +232,10 @@ public class SmoothStreamingChunkSource implements ChunkSource {
}
}
currentManifest = newManifest;
finishedCurrentManifest = false;
currentManifestFinished = false;
}
if (finishedCurrentManifest && (SystemClock.elapsedRealtime()
if (currentManifestFinished && (SystemClock.elapsedRealtime()
> manifestFetcher.getManifestLoadStartTimestamp() + MINIMUM_MANIFEST_REFRESH_PERIOD_MS)) {
manifestFetcher.requestRefresh();
}
@ -264,7 +250,14 @@ public class SmoothStreamingChunkSource implements ChunkSource {
}
evaluation.queueSize = queue.size();
formatEvaluator.evaluate(queue, playbackPositionUs, formats, evaluation);
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.evaluate(queue, playbackPositionUs, enabledTrack.adaptiveFormats,
evaluation);
} else {
evaluation.format = enabledTrack.fixedFormat;
evaluation.trigger = Chunk.TRIGGER_MANUAL;
}
Format selectedFormat = evaluation.format;
out.queueSize = evaluation.queueSize;
@ -281,17 +274,17 @@ public class SmoothStreamingChunkSource implements ChunkSource {
// In all cases where we return before instantiating a new chunk, we want out.chunk to be null.
out.chunk = null;
StreamElement streamElement = getElement(currentManifest);
StreamElement streamElement = currentManifest.streamElements[enabledTrack.elementIndex];
if (streamElement.chunkCount == 0) {
// The manifest is currently empty for this stream.
finishedCurrentManifest = true;
currentManifestFinished = true;
return;
}
int chunkIndex;
if (queue.isEmpty()) {
if (currentManifest.isLive) {
seekPositionUs = getLiveSeekPosition();
seekPositionUs = getLiveSeekPosition(currentManifest, liveEdgeLatencyUs);
}
chunkIndex = streamElement.getChunkIndex(seekPositionUs);
} else {
@ -306,12 +299,12 @@ public class SmoothStreamingChunkSource implements ChunkSource {
return;
} else if (chunkIndex >= streamElement.chunkCount) {
// This is beyond the last chunk in the current manifest.
finishedCurrentManifest = true;
currentManifestFinished = true;
return;
} else if (chunkIndex == streamElement.chunkCount - 1) {
// This is the last chunk in the current manifest. Mark the manifest as being finished,
// but continue to return the final chunk.
finishedCurrentManifest = true;
currentManifestFinished = true;
}
}
@ -326,23 +319,17 @@ public class SmoothStreamingChunkSource implements ChunkSource {
: chunkStartTimeUs + streamElement.getChunkDurationUs(chunkIndex);
int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset;
int trackIndex = getTrackIndex(selectedFormat);
Uri uri = streamElement.buildRequestUri(trackIndex, chunkIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null, extractorWrappers.get(trackIndex),
int manifestTrackIndex = getManifestTrackIndex(streamElement, selectedFormat);
int manifestTrackKey = getManifestTrackKey(enabledTrack.elementIndex, manifestTrackIndex);
Uri uri = streamElement.buildRequestUri(manifestTrackIndex, chunkIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null,
extractorWrappers.get(manifestTrackKey),
drmInitData, dataSource, currentAbsoluteChunkIndex, isLastChunk, chunkStartTimeUs,
chunkEndTimeUs, evaluation.trigger, mediaFormats.get(trackIndex));
chunkEndTimeUs, evaluation.trigger,
mediaFormats.get(manifestTrackKey));
out.chunk = mediaChunk;
}
@Override
public void maybeThrowError() throws IOException {
if (fatalError != null) {
throw fatalError;
} else {
manifestFetcher.maybeThrowError();
}
}
@Override
public void onChunkLoadCompleted(Chunk chunk) {
// Do nothing.
@ -353,16 +340,120 @@ public class SmoothStreamingChunkSource implements ChunkSource {
// Do nothing.
}
@Override
public void disable(List<? extends MediaChunk> queue) {
if (enabledTrack.isAdaptive()) {
adaptiveFormatEvaluator.disable();
}
if (manifestFetcher != null) {
manifestFetcher.disable();
}
}
// SmoothStreamingTrackSelector.Output implementation.
@Override
public void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] trackIndices) {
if (adaptiveFormatEvaluator == null) {
// Do nothing.
return;
}
MediaFormat maxHeightMediaFormat = null;
StreamElement streamElement = manifest.streamElements[element];
int maxWidth = -1;
int maxHeight = -1;
Format[] formats = new Format[trackIndices.length];
for (int i = 0; i < formats.length; i++) {
int manifestTrackIndex = trackIndices[i];
formats[i] = streamElement.tracks[manifestTrackIndex].format;
MediaFormat mediaFormat = initManifestTrack(manifest, element, manifestTrackIndex);
if (maxHeightMediaFormat == null || mediaFormat.height > maxHeight) {
maxHeightMediaFormat = mediaFormat;
}
maxWidth = Math.max(maxWidth, mediaFormat.width);
maxHeight = Math.max(maxHeight, mediaFormat.height);
}
Arrays.sort(formats, new DecreasingBandwidthComparator());
MediaFormat adaptiveMediaFormat = maxHeightMediaFormat.copyWithAdaptive(true);
tracks.add(new ExposedTrack(adaptiveMediaFormat, element, formats, maxWidth, maxHeight));
}
@Override
public void fixedTrack(SmoothStreamingManifest manifest, int element, int trackIndex) {
MediaFormat mediaFormat = initManifestTrack(manifest, element, trackIndex);
Format format = manifest.streamElements[element].tracks[trackIndex].format;
tracks.add(new ExposedTrack(mediaFormat, element, format));
}
// Private methods.
private MediaFormat initManifestTrack(SmoothStreamingManifest manifest, int elementIndex,
int trackIndex) {
int manifestTrackKey = getManifestTrackKey(elementIndex, trackIndex);
MediaFormat mediaFormat = mediaFormats.get(manifestTrackKey);
if (mediaFormat != null) {
// Already initialized.
return mediaFormat;
}
// Build the media format.
long durationUs = manifest.durationUs;
StreamElement element = manifest.streamElements[elementIndex];
Format format = element.tracks[trackIndex].format;
byte[][] csdArray = element.tracks[trackIndex].csd;
int mp4TrackType;
switch (element.type) {
case StreamElement.TYPE_VIDEO:
mediaFormat = MediaFormat.createVideoFormat(format.mimeType, MediaFormat.NO_VALUE,
durationUs, format.width, format.height, 0, Arrays.asList(csdArray));
mp4TrackType = Track.TYPE_vide;
break;
case StreamElement.TYPE_AUDIO:
List<byte[]> csd;
if (csdArray != null) {
csd = Arrays.asList(csdArray);
} else {
csd = Collections.singletonList(CodecSpecificDataUtil.buildAacAudioSpecificConfig(
format.audioSamplingRate, format.audioChannels));
}
mediaFormat = MediaFormat.createAudioFormat(format.mimeType, MediaFormat.NO_VALUE,
durationUs, format.audioChannels, format.audioSamplingRate, csd);
mp4TrackType = Track.TYPE_soun;
break;
case StreamElement.TYPE_TEXT:
mediaFormat = MediaFormat.createTextFormat(format.mimeType, format.language, durationUs);
mp4TrackType = Track.TYPE_text;
break;
default:
throw new IllegalStateException("Invalid type: " + element.type);
}
// Build the extractor.
FragmentedMp4Extractor mp4Extractor = new FragmentedMp4Extractor(
FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
Track mp4Track = new Track(trackIndex, mp4TrackType, element.timescale, durationUs, mediaFormat,
trackEncryptionBoxes, mp4TrackType == Track.TYPE_vide ? 4 : -1);
mp4Extractor.setTrack(mp4Track);
// Store the format and a wrapper around the extractor.
mediaFormats.put(manifestTrackKey, mediaFormat);
extractorWrappers.put(manifestTrackKey, new ChunkExtractorWrapper(mp4Extractor));
return mediaFormat;
}
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
* {@code liveEdgeLatencyUs} behind the live edge of the provided manifest.
*
* @param manifest The manifest.
* @param liveEdgeLatencyUs The live edge latency, in microseconds.
* @return The seek position in microseconds.
*/
private long getLiveSeekPosition() {
private static long getLiveSeekPosition(SmoothStreamingManifest manifest,
long liveEdgeLatencyUs) {
long liveEdgeTimestampUs = Long.MIN_VALUE;
for (int i = 0; i < currentManifest.streamElements.length; i++) {
StreamElement streamElement = currentManifest.streamElements[i];
for (int i = 0; i < manifest.streamElements.length; i++) {
StreamElement streamElement = manifest.streamElements[i];
if (streamElement.chunkCount > 0) {
long elementLiveEdgeTimestampUs =
streamElement.getStartTimeUs(streamElement.chunkCount - 1)
@ -373,12 +464,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
return liveEdgeTimestampUs - liveEdgeLatencyUs;
}
private StreamElement getElement(SmoothStreamingManifest manifest) {
return manifest.streamElements[streamElementIndex];
}
private int getTrackIndex(Format format) {
TrackElement[] tracks = currentManifest.streamElements[streamElementIndex].tracks;
private static int getManifestTrackIndex(StreamElement element, Format format) {
TrackElement[] tracks = element.tracks;
for (int i = 0; i < tracks.length; i++) {
if (tracks[i].format.equals(format)) {
return i;
@ -388,31 +475,6 @@ public class SmoothStreamingChunkSource implements ChunkSource {
throw new IllegalStateException("Invalid format: " + format);
}
private static MediaFormat getMediaFormat(StreamElement streamElement, int trackIndex) {
TrackElement trackElement = streamElement.tracks[trackIndex];
Format trackFormat = trackElement.format;
String mimeType = trackFormat.mimeType;
if (streamElement.type == StreamElement.TYPE_VIDEO) {
MediaFormat format = MediaFormat.createVideoFormat(mimeType, MediaFormat.NO_VALUE,
trackFormat.width, trackFormat.height, Arrays.asList(trackElement.csd));
return format;
} else if (streamElement.type == StreamElement.TYPE_AUDIO) {
List<byte[]> csd;
if (trackElement.csd != null) {
csd = Arrays.asList(trackElement.csd);
} else {
csd = Collections.singletonList(CodecSpecificDataUtil.buildAacAudioSpecificConfig(
trackFormat.audioSamplingRate, trackFormat.audioChannels));
}
MediaFormat format = MediaFormat.createAudioFormat(mimeType, MediaFormat.NO_VALUE,
trackFormat.audioChannels, trackFormat.audioSamplingRate, csd);
return format;
} else if (streamElement.type == StreamElement.TYPE_TEXT) {
return MediaFormat.createTextFormat(trackFormat.mimeType, trackFormat.language);
}
return null;
}
private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey,
ChunkExtractorWrapper extractorWrapper, DrmInitData drmInitData, DataSource dataSource,
int chunkIndex, boolean isLast, long chunkStartTimeUs, long chunkEndTimeUs,
@ -426,7 +488,12 @@ public class SmoothStreamingChunkSource implements ChunkSource {
drmInitData, true);
}
private static byte[] getKeyId(byte[] initData) {
private static int getManifestTrackKey(int elementIndex, int trackIndex) {
Assertions.checkState(elementIndex <= 65536 && trackIndex <= 65536);
return (elementIndex << 16) | trackIndex;
}
private static byte[] getProtectionElementKeyId(byte[] initData) {
StringBuilder initDataStringBuilder = new StringBuilder();
for (int i = 0; i < initData.length; i += 2) {
initDataStringBuilder.append((char) initData[i]);
@ -448,4 +515,45 @@ public class SmoothStreamingChunkSource implements ChunkSource {
data[secondPosition] = temp;
}
// Private classes.
private static final class ExposedTrack {
public final MediaFormat format;
private final int elementIndex;
// Non-adaptive track variables.
private final Format fixedFormat;
// Adaptive track variables.
private final Format[] adaptiveFormats;
private final int adaptiveMaxWidth;
private final int adaptiveMaxHeight;
public ExposedTrack(MediaFormat format, int elementIndex, Format fixedFormat) {
this.format = format;
this.elementIndex = elementIndex;
this.fixedFormat = fixedFormat;
this.adaptiveFormats = null;
this.adaptiveMaxWidth = -1;
this.adaptiveMaxHeight = -1;
}
public ExposedTrack(MediaFormat format, int elementIndex, Format[] adaptiveFormats,
int maxWidth, int maxHeight) {
this.format = format;
this.elementIndex = elementIndex;
this.adaptiveFormats = adaptiveFormats;
this.adaptiveMaxWidth = maxWidth;
this.adaptiveMaxHeight = maxHeight;
this.fixedFormat = null;
}
public boolean isAdaptive() {
return adaptiveFormats != null;
}
}
}

View file

@ -0,0 +1,59 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import java.io.IOException;
/**
* Specifies a track selection from a {@link SmoothStreamingManifest}.
*/
public interface SmoothStreamingTrackSelector {
/**
* Defines a selector output.
*/
interface Output {
/**
* Outputs an adaptive track, covering the specified tracks in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param tracks The indices of the tracks within the element.
*/
void adaptiveTrack(SmoothStreamingManifest manifest, int element, int[] tracks);
/**
* Outputs a fixed track corresponding to the specified track in the specified element.
*
* @param manifest The manifest being processed.
* @param element The index of the element within which the adaptive tracks are located.
* @param track The index of the track within the element.
*/
void fixedTrack(SmoothStreamingManifest manifest, int element, int track);
}
/**
* Outputs a track selection for a given manifest.
*
* @param manifest The manifest to process.
* @param output The output to receive tracks.
* @throws IOException If an error occurs processing the manifest.
*/
void selectTracks(SmoothStreamingManifest manifest, Output output) throws IOException;
}