Merge pull request #503 from google/dev

dev -> dev-webm-vp9-opus
This commit is contained in:
ojw28 2015-05-28 17:21:08 +01:00
commit e34ef40188
31 changed files with 787 additions and 1035 deletions

View file

@ -1,102 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.demo;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.CookiePolicy;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Map;
/**
* Utility methods for the demo application.
*/
public class DemoUtil {
public static final int TYPE_DASH = 0;
public static final int TYPE_SS = 1;
public static final int TYPE_HLS = 2;
public static final int TYPE_MP4 = 3;
public static final int TYPE_MP3 = 4;
public static final int TYPE_M4A = 5;
public static final int TYPE_WEBM = 6;
public static final int TYPE_TS = 7;
public static final int TYPE_AAC = 8;
private static final CookieManager defaultCookieManager;
static {
defaultCookieManager = new CookieManager();
defaultCookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ORIGINAL_SERVER);
}
public static byte[] executePost(String url, byte[] data, Map<String, String> requestProperties)
throws IOException {
HttpURLConnection urlConnection = null;
try {
urlConnection = (HttpURLConnection) new URL(url).openConnection();
urlConnection.setRequestMethod("POST");
urlConnection.setDoOutput(data != null);
urlConnection.setDoInput(true);
if (requestProperties != null) {
for (Map.Entry<String, String> requestProperty : requestProperties.entrySet()) {
urlConnection.setRequestProperty(requestProperty.getKey(), requestProperty.getValue());
}
}
if (data != null) {
OutputStream out = new BufferedOutputStream(urlConnection.getOutputStream());
out.write(data);
out.close();
}
InputStream in = new BufferedInputStream(urlConnection.getInputStream());
return convertInputStreamToByteArray(in);
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
}
}
private static byte[] convertInputStreamToByteArray(InputStream inputStream) throws IOException {
byte[] bytes = null;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte data[] = new byte[1024];
int count;
while ((count = inputStream.read(data)) != -1) {
bos.write(data, 0, count);
}
bos.flush();
bos.close();
inputStream.close();
bytes = bos.toByteArray();
return bytes;
}
public static void setDefaultCookieManager() {
CookieHandler currentHandler = CookieHandler.getDefault();
if (currentHandler != defaultCookieManager) {
CookieHandler.setDefault(defaultCookieManager);
}
}
}

View file

@ -25,7 +25,7 @@ import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.ExtractorRendererBuilder;
import com.google.android.exoplayer.demo.player.HlsRendererBuilder;
import com.google.android.exoplayer.demo.player.SmoothStreamingRendererBuilder;
import com.google.android.exoplayer.demo.player.UnsupportedDrmException;
import com.google.android.exoplayer.drm.UnsupportedDrmException;
import com.google.android.exoplayer.extractor.mp3.Mp3Extractor;
import com.google.android.exoplayer.extractor.mp4.Mp4Extractor;
import com.google.android.exoplayer.extractor.ts.AdtsExtractor;
@ -37,6 +37,7 @@ import com.google.android.exoplayer.metadata.TxxxMetadata;
import com.google.android.exoplayer.text.CaptionStyleCompat;
import com.google.android.exoplayer.text.Cue;
import com.google.android.exoplayer.text.SubtitleLayout;
import com.google.android.exoplayer.util.DebugTextViewHelper;
import com.google.android.exoplayer.util.Util;
import com.google.android.exoplayer.util.VerboseLogUtil;
@ -65,6 +66,9 @@ import android.widget.PopupMenu.OnMenuItemClickListener;
import android.widget.TextView;
import android.widget.Toast;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.CookiePolicy;
import java.util.List;
import java.util.Map;
@ -75,14 +79,30 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
DemoPlayer.Listener, DemoPlayer.CaptionListener, DemoPlayer.Id3MetadataListener,
AudioCapabilitiesReceiver.Listener {
public static final int TYPE_DASH = 0;
public static final int TYPE_SS = 1;
public static final int TYPE_HLS = 2;
public static final int TYPE_MP4 = 3;
public static final int TYPE_MP3 = 4;
public static final int TYPE_FMP4 = 5;
public static final int TYPE_WEBM = 6;
public static final int TYPE_TS = 7;
public static final int TYPE_AAC = 8;
public static final int TYPE_M4A = 9;
public static final String CONTENT_TYPE_EXTRA = "content_type";
public static final String CONTENT_ID_EXTRA = "content_id";
private static final String TAG = "PlayerActivity";
private static final int MENU_GROUP_TRACKS = 1;
private static final int ID_OFFSET = 2;
private static final CookieManager defaultCookieManager;
static {
defaultCookieManager = new CookieManager();
defaultCookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ORIGINAL_SERVER);
}
private EventLogger eventLogger;
private MediaController mediaController;
private View debugRootView;
@ -97,6 +117,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
private Button retryButton;
private DemoPlayer player;
private DebugTextViewHelper debugViewHelper;
private boolean playerNeedsPrepare;
private long playerPosition;
@ -162,7 +183,10 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
audioButton = (Button) findViewById(R.id.audio_controls);
textButton = (Button) findViewById(R.id.text_controls);
DemoUtil.setDefaultCookieManager();
CookieHandler currentHandler = CookieHandler.getDefault();
if (currentHandler != defaultCookieManager) {
CookieHandler.setDefault(defaultCookieManager);
}
}
@Override
@ -220,31 +244,26 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
private RendererBuilder getRendererBuilder() {
String userAgent = Util.getUserAgent(this, "ExoPlayerDemo");
switch (contentType) {
case DemoUtil.TYPE_SS:
case TYPE_SS:
return new SmoothStreamingRendererBuilder(this, userAgent, contentUri.toString(),
new SmoothStreamingTestMediaDrmCallback(), debugTextView);
case DemoUtil.TYPE_DASH:
new SmoothStreamingTestMediaDrmCallback());
case TYPE_DASH:
return new DashRendererBuilder(this, userAgent, contentUri.toString(),
new WidevineTestMediaDrmCallback(contentId), debugTextView, audioCapabilities);
case DemoUtil.TYPE_HLS:
return new HlsRendererBuilder(this, userAgent, contentUri.toString(), debugTextView,
audioCapabilities);
case DemoUtil.TYPE_M4A: // There are no file format differences between M4A and MP4.
case DemoUtil.TYPE_MP4:
return new ExtractorRendererBuilder(this, userAgent, contentUri, debugTextView,
new Mp4Extractor());
case DemoUtil.TYPE_MP3:
return new ExtractorRendererBuilder(this, userAgent, contentUri, debugTextView,
new Mp3Extractor());
case DemoUtil.TYPE_TS:
return new ExtractorRendererBuilder(this, userAgent, contentUri, debugTextView,
new WidevineTestMediaDrmCallback(contentId), audioCapabilities);
case TYPE_HLS:
return new HlsRendererBuilder(this, userAgent, contentUri.toString(), audioCapabilities);
case TYPE_M4A: // There are no file format differences between M4A and MP4.
case TYPE_MP4:
return new ExtractorRendererBuilder(this, userAgent, contentUri, new Mp4Extractor());
case TYPE_MP3:
return new ExtractorRendererBuilder(this, userAgent, contentUri, new Mp3Extractor());
case TYPE_TS:
return new ExtractorRendererBuilder(this, userAgent, contentUri,
new TsExtractor(0, audioCapabilities));
case DemoUtil.TYPE_AAC:
return new ExtractorRendererBuilder(this, userAgent, contentUri, debugTextView,
new AdtsExtractor());
case DemoUtil.TYPE_WEBM:
return new ExtractorRendererBuilder(this, userAgent, contentUri, debugTextView,
new WebmExtractor());
case TYPE_AAC:
return new ExtractorRendererBuilder(this, userAgent, contentUri, new AdtsExtractor());
case TYPE_WEBM:
return new ExtractorRendererBuilder(this, userAgent, contentUri, new WebmExtractor());
default:
throw new IllegalStateException("Unsupported type: " + contentType);
}
@ -265,6 +284,8 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
player.addListener(eventLogger);
player.setInfoListener(eventLogger);
player.setInternalErrorListener(eventLogger);
debugViewHelper = new DebugTextViewHelper(player, debugTextView);
debugViewHelper.start();
}
if (playerNeedsPrepare) {
player.prepare();
@ -277,6 +298,8 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
private void releasePlayer() {
if (player != null) {
debugViewHelper.stop();
debugViewHelper = null;
playerPosition = player.getCurrentPosition();
player.release();
player = null;
@ -322,11 +345,9 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
if (e instanceof UnsupportedDrmException) {
// Special case DRM failures.
UnsupportedDrmException unsupportedDrmException = (UnsupportedDrmException) e;
int stringId = unsupportedDrmException.reason == UnsupportedDrmException.REASON_NO_DRM
? R.string.drm_error_not_supported
int stringId = Util.SDK_INT < 18 ? R.string.drm_error_not_supported
: unsupportedDrmException.reason == UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME
? R.string.drm_error_unsupported_scheme
: R.string.drm_error_unknown;
? R.string.drm_error_unsupported_scheme : R.string.drm_error_unknown;
Toast.makeText(getApplicationContext(), stringId, Toast.LENGTH_LONG).show();
}
playerNeedsPrepare = true;

View file

@ -47,12 +47,12 @@ import java.util.Locale;
"http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?"
+ "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&"
+ "ipbits=0&expire=19000000000&signature=51AF5F39AB0CEC3E5497CD9C900EBFEAECCCB5C7."
+ "8506521BFC350652163895D4C26DEE124209AA9E&key=ik0", DemoUtil.TYPE_DASH),
+ "8506521BFC350652163895D4C26DEE124209AA9E&key=ik0", PlayerActivity.TYPE_DASH),
new Sample("Google Play",
"http://www.youtube.com/api/manifest/dash/id/3aa39fa2cc27967f/source/youtube?"
+ "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&"
+ "ipbits=0&expire=19000000000&signature=A2716F75795F5D2AF0E88962FFCD10DB79384F29."
+ "84308FF04844498CE6FBCE4731507882B8307798&key=ik0", DemoUtil.TYPE_DASH),
+ "84308FF04844498CE6FBCE4731507882B8307798&key=ik0", PlayerActivity.TYPE_DASH),
};
public static final Sample[] YOUTUBE_DASH_WEBM = new Sample[] {
@ -60,21 +60,21 @@ import java.util.Locale;
"http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?"
+ "as=fmp4_audio_clear,webm2_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&"
+ "ipbits=0&expire=19000000000&signature=249B04F79E984D7F86B4D8DB48AE6FAF41C17AB3."
+ "7B9F0EC0505E1566E59B8E488E9419F253DDF413&key=ik0", DemoUtil.TYPE_DASH),
+ "7B9F0EC0505E1566E59B8E488E9419F253DDF413&key=ik0", PlayerActivity.TYPE_DASH),
new Sample("Google Play",
"http://www.youtube.com/api/manifest/dash/id/3aa39fa2cc27967f/source/youtube?"
+ "as=fmp4_audio_clear,webm2_sd_hd_clear&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0&"
+ "ipbits=0&expire=19000000000&signature=B1C2A74783AC1CC4865EB312D7DD2D48230CC9FD."
+ "BD153B9882175F1F94BFE5141A5482313EA38E8D&key=ik0", DemoUtil.TYPE_DASH),
+ "BD153B9882175F1F94BFE5141A5482313EA38E8D&key=ik0", PlayerActivity.TYPE_DASH),
};
public static final Sample[] SMOOTHSTREAMING = new Sample[] {
new Sample("Super speed",
"http://playready.directtaps.net/smoothstreaming/SSWSS720H264/SuperSpeedway_720.ism",
DemoUtil.TYPE_SS),
PlayerActivity.TYPE_SS),
new Sample("Super speed (PlayReady)",
"http://playready.directtaps.net/smoothstreaming/SSWSS720H264PR/SuperSpeedway_720.ism",
DemoUtil.TYPE_SS),
PlayerActivity.TYPE_SS),
};
public static final Sample[] WIDEVINE_GTS = new Sample[] {
@ -82,72 +82,72 @@ import java.util.Locale;
"http://www.youtube.com/api/manifest/dash/id/d286538032258a1c/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
+ "&ipbits=0&expire=19000000000&signature=477CF7D478BE26C205045D507E9358F85F84C065."
+ "8971631EB657BC33EC2F48A2FF4211956760C3E9&key=ik0", DemoUtil.TYPE_DASH),
+ "8971631EB657BC33EC2F48A2FF4211956760C3E9&key=ik0", PlayerActivity.TYPE_DASH),
new Sample("WV: HDCP not required", "48fcc369939ac96c",
"http://www.youtube.com/api/manifest/dash/id/48fcc369939ac96c/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
+ "&ipbits=0&expire=19000000000&signature=171DAE48D00B5BE7434BC1A9F84DAE0463C7EA7A."
+ "0925B4DBB5605BEE9F5D088C48F25F5108E96191&key=ik0", DemoUtil.TYPE_DASH),
+ "0925B4DBB5605BEE9F5D088C48F25F5108E96191&key=ik0", PlayerActivity.TYPE_DASH),
new Sample("WV: HDCP required", "e06c39f1151da3df",
"http://www.youtube.com/api/manifest/dash/id/e06c39f1151da3df/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
+ "&ipbits=0&expire=19000000000&signature=8D3B8AF4E3F72B7F127C8D0D39B7AFCF37B30519."
+ "A118BADEBF3582AD2CC257B0EE6E579C6955D8AA&key=ik0", DemoUtil.TYPE_DASH),
+ "A118BADEBF3582AD2CC257B0EE6E579C6955D8AA&key=ik0", PlayerActivity.TYPE_DASH),
new Sample("WV: Secure video path required", "0894c7c8719b28a0",
"http://www.youtube.com/api/manifest/dash/id/0894c7c8719b28a0/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
+ "&ipbits=0&expire=19000000000&signature=A41D835C7387885A4A820628F57E481E00095931."
+ "9D50DBEEB5E37344647EE11BDA129A7FCDE8B7B9&key=ik0", DemoUtil.TYPE_DASH),
+ "9D50DBEEB5E37344647EE11BDA129A7FCDE8B7B9&key=ik0", PlayerActivity.TYPE_DASH),
new Sample("WV: HDCP + secure video path required", "efd045b1eb61888a",
"http://www.youtube.com/api/manifest/dash/id/efd045b1eb61888a/source/youtube"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
+ "&ipbits=0&expire=19000000000&signature=A97C9032C9D0C74F1643DB17C178873887C229E4."
+ "0A657BF6F23C8BC1538F276137383478330B76DE&key=ik0", DemoUtil.TYPE_DASH),
+ "0A657BF6F23C8BC1538F276137383478330B76DE&key=ik0", PlayerActivity.TYPE_DASH),
new Sample("WV: 30s license duration (fails at ~30s)", "f9a34cab7b05881a",
"http://www.youtube.com/api/manifest/dash/id/f9a34cab7b05881a/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,source,id,as&ip=0.0.0.0"
+ "&ipbits=0&expire=19000000000&signature=80648A12A7D5FC1FA02B52B4250E4EB74CF0C5FD."
+ "66A261130CA137AA5C541EA9CED2DBF240829EE6&key=ik0", DemoUtil.TYPE_DASH),
+ "66A261130CA137AA5C541EA9CED2DBF240829EE6&key=ik0", PlayerActivity.TYPE_DASH),
};
public static final Sample[] HLS = new Sample[] {
new Sample("Apple master playlist",
"https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_4x3/"
+ "bipbop_4x3_variant.m3u8", DemoUtil.TYPE_HLS),
+ "bipbop_4x3_variant.m3u8", PlayerActivity.TYPE_HLS),
new Sample("Apple master playlist advanced",
"https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_16x9/"
+ "bipbop_16x9_variant.m3u8", DemoUtil.TYPE_HLS),
+ "bipbop_16x9_variant.m3u8", PlayerActivity.TYPE_HLS),
new Sample("Apple TS media playlist",
"https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_4x3/gear1/"
+ "prog_index.m3u8", DemoUtil.TYPE_HLS),
+ "prog_index.m3u8", PlayerActivity.TYPE_HLS),
new Sample("Apple AAC media playlist",
"https://devimages.apple.com.edgekey.net/streaming/examples/bipbop_4x3/gear0/"
+ "prog_index.m3u8", DemoUtil.TYPE_HLS),
+ "prog_index.m3u8", PlayerActivity.TYPE_HLS),
new Sample("Apple ID3 metadata", "http://devimages.apple.com/samplecode/adDemo/ad.m3u8",
DemoUtil.TYPE_HLS),
PlayerActivity.TYPE_HLS),
};
public static final Sample[] MISC = new Sample[] {
new Sample("Dizzy", "http://html5demos.com/assets/dizzy.mp4",
DemoUtil.TYPE_MP4),
PlayerActivity.TYPE_MP4),
new Sample("Apple AAC 10s", "https://devimages.apple.com.edgekey.net/"
+ "streaming/examples/bipbop_4x3/gear0/fileSequence0.aac",
DemoUtil.TYPE_AAC),
PlayerActivity.TYPE_AAC),
new Sample("Apple TS 10s", "https://devimages.apple.com.edgekey.net/streaming/examples/"
+ "bipbop_4x3/gear1/fileSequence0.ts",
DemoUtil.TYPE_TS),
PlayerActivity.TYPE_TS),
new Sample("Big Buck Bunny (MP4 Video)",
"http://redirector.c.youtube.com/videoplayback?id=604ed5ce52eda7ee&itag=22&source=youtube&"
+ "sparams=ip,ipbits,expire,source,id&ip=0.0.0.0&ipbits=0&expire=19000000000&signature="
+ "513F28C7FDCBEC60A66C86C9A393556C99DC47FB.04C88036EEE12565A1ED864A875A58F15D8B5300"
+ "&key=ik0",
DemoUtil.TYPE_MP4),
PlayerActivity.TYPE_MP4),
new Sample("Google Play (MP3 Audio)",
"http://storage.googleapis.com/exoplayer-test-media-0/play.mp3",
DemoUtil.TYPE_MP3),
PlayerActivity.TYPE_MP3),
new Sample("Google Glass (WebM Video with Vorbis Audio)",
"http://demos.webmproject.org/exoplayer/glass_vp9_vorbis.webm",
DemoUtil.TYPE_WEBM),
PlayerActivity.TYPE_WEBM),
};
private Samples() {}

View file

@ -17,6 +17,7 @@ package com.google.android.exoplayer.demo;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.media.MediaDrm.KeyRequest;
@ -48,7 +49,7 @@ public class SmoothStreamingTestMediaDrmCallback implements MediaDrmCallback {
@Override
public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException {
String url = request.getDefaultUrl() + "&signedRequest=" + new String(request.getData());
return DemoUtil.executePost(url, null, null);
return Util.executePost(url, null, null);
}
@Override
@ -57,7 +58,7 @@ public class SmoothStreamingTestMediaDrmCallback implements MediaDrmCallback {
if (TextUtils.isEmpty(url)) {
url = PLAYREADY_TEST_DEFAULT_URI;
}
return DemoUtil.executePost(url, request.getData(), KEY_REQUEST_PROPERTIES);
return Util.executePost(url, request.getData(), KEY_REQUEST_PROPERTIES);
}
}

View file

@ -16,6 +16,7 @@
package com.google.android.exoplayer.demo;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.media.MediaDrm.KeyRequest;
@ -43,7 +44,7 @@ public class WidevineTestMediaDrmCallback implements MediaDrmCallback {
@Override
public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException {
String url = request.getDefaultUrl() + "&signedRequest=" + new String(request.getData());
return DemoUtil.executePost(url, null, null);
return Util.executePost(url, null, null);
}
@Override
@ -52,7 +53,7 @@ public class WidevineTestMediaDrmCallback implements MediaDrmCallback {
if (TextUtils.isEmpty(url)) {
url = defaultUri;
}
return DemoUtil.executePost(url, request.getData(), null);
return Util.executePost(url, request.getData(), null);
}
}

View file

@ -42,9 +42,9 @@ import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver;
import com.google.android.exoplayer.dash.mpd.UtcTimingElementResolver.UtcTimingCallback;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.drm.UnsupportedDrmException;
import com.google.android.exoplayer.text.TextTrackRenderer;
import com.google.android.exoplayer.text.ttml.TtmlParser;
import com.google.android.exoplayer.text.webvtt.WebvttParser;
@ -57,14 +57,10 @@ import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.MediaCodec;
import android.media.UnsupportedSchemeException;
import android.os.Handler;
import android.util.Log;
import android.util.Pair;
import android.widget.TextView;
import java.io.IOException;
import java.util.ArrayList;
@ -104,7 +100,6 @@ public class DashRendererBuilder implements RendererBuilder,
private final String userAgent;
private final String url;
private final MediaDrmCallback drmCallback;
private final TextView debugTextView;
private final AudioCapabilities audioCapabilities;
private DemoPlayer player;
@ -116,12 +111,11 @@ public class DashRendererBuilder implements RendererBuilder,
private long elapsedRealtimeOffset;
public DashRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, TextView debugTextView, AudioCapabilities audioCapabilities) {
MediaDrmCallback drmCallback, AudioCapabilities audioCapabilities) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.drmCallback = drmCallback;
this.debugTextView = debugTextView;
this.audioCapabilities = audioCapabilities;
}
@ -192,20 +186,18 @@ public class DashRendererBuilder implements RendererBuilder,
// Check drm support if necessary.
boolean filterHdContent = false;
DrmSessionManager drmSessionManager = null;
StreamingDrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_NO_DRM));
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
// HD streams require L1 security.
drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
filterHdContent = videoAdaptationSet != null && videoAdaptationSet.hasContentProtection()
&& !drmSessionManagerData.second;
&& getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
} catch (UnsupportedDrmException e) {
callback.onRenderersError(e);
return;
@ -226,23 +218,18 @@ public class DashRendererBuilder implements RendererBuilder,
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
final TrackRenderer debugRenderer;
if (videoRepresentationIndices == null || videoRepresentationIndices.length == 0) {
videoRenderer = null;
debugRenderer = null;
} else {
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player);
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
}
// Build the audio chunk sources.
@ -259,7 +246,7 @@ public class DashRendererBuilder implements RendererBuilder,
format.audioSamplingRate + "Hz)");
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player));
elapsedRealtimeOffset));
codecs.add(format.codecs);
}
@ -316,8 +303,7 @@ public class DashRendererBuilder implements RendererBuilder,
Representation representation = representations.get(j);
textTrackNameList.add(representation.format.id);
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
mainHandler, player));
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset));
}
}
}
@ -355,34 +341,13 @@ public class DashRendererBuilder implements RendererBuilder,
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
callback.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
}
@TargetApi(18)
private static class V18Compat {
public static Pair<DrmSessionManager, Boolean> getDrmSessionManagerData(DemoPlayer player,
MediaDrmCallback drmCallback) throws UnsupportedDrmException {
try {
StreamingDrmSessionManager streamingDrmSessionManager =
StreamingDrmSessionManager.newWidevineInstance(player.getPlaybackLooper(), drmCallback,
null, player.getMainHandler(), player);
return Pair.create((DrmSessionManager) streamingDrmSessionManager,
getWidevineSecurityLevel(streamingDrmSessionManager) == SECURITY_LEVEL_1);
} catch (UnsupportedSchemeException e) {
throw new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME);
} catch (Exception e) {
throw new UnsupportedDrmException(UnsupportedDrmException.REASON_UNKNOWN, e);
}
}
private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) {
String securityLevelProperty = sessionManager.getPropertyString("securityLevel");
return securityLevelProperty.equals("L1") ? SECURITY_LEVEL_1 : securityLevelProperty
.equals("L3") ? SECURITY_LEVEL_3 : SECURITY_LEVEL_UNKNOWN;
}
private static int getWidevineSecurityLevel(StreamingDrmSessionManager sessionManager) {
String securityLevelProperty = sessionManager.getPropertyString("securityLevel");
return securityLevelProperty.equals("L1") ? SECURITY_LEVEL_1 : securityLevelProperty
.equals("L3") ? SECURITY_LEVEL_3 : SECURITY_LEVEL_UNKNOWN;
}
}

View file

@ -1,138 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.MediaCodecTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.upstream.BandwidthMeter;
import android.widget.TextView;
/**
* A {@link TrackRenderer} that periodically updates debugging information displayed by a
* {@link TextView}.
*/
/* package */ class DebugTrackRenderer extends TrackRenderer implements Runnable {
private final TextView textView;
private final DemoPlayer player;
private final MediaCodecTrackRenderer renderer;
private final BandwidthMeter bandwidthMeter;
private volatile boolean pendingFailure;
private volatile long currentPositionUs;
public DebugTrackRenderer(TextView textView, DemoPlayer player,
MediaCodecTrackRenderer renderer) {
this(textView, player, renderer, null);
}
public DebugTrackRenderer(TextView textView, DemoPlayer player, MediaCodecTrackRenderer renderer,
BandwidthMeter bandwidthMeter) {
this.textView = textView;
this.player = player;
this.renderer = renderer;
this.bandwidthMeter = bandwidthMeter;
}
public void injectFailure() {
pendingFailure = true;
}
@Override
protected boolean isEnded() {
return true;
}
@Override
protected boolean isReady() {
return true;
}
@Override
protected int doPrepare(long positionUs) throws ExoPlaybackException {
maybeFail();
return STATE_PREPARED;
}
@Override
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
maybeFail();
if (positionUs < currentPositionUs || positionUs > currentPositionUs + 1000000) {
currentPositionUs = positionUs;
textView.post(this);
}
}
@Override
public void run() {
textView.setText(getRenderString());
}
private String getRenderString() {
return getTimeString() + " " + getQualityString() + " " + getBandwidthString() + " "
+ renderer.codecCounters.getDebugString();
}
private String getTimeString() {
return "ms(" + (currentPositionUs / 1000) + ")";
}
private String getQualityString() {
Format format = player.getVideoFormat();
return format == null ? "id:? br:? h:?"
: "id:" + format.id + " br:" + format.bitrate + " h:" + format.height;
}
private String getBandwidthString() {
if (bandwidthMeter == null
|| bandwidthMeter.getBitrateEstimate() == BandwidthMeter.NO_ESTIMATE) {
return "bw:?";
} else {
return "bw:" + (bandwidthMeter.getBitrateEstimate() / 1000);
}
}
@Override
protected long getCurrentPositionUs() {
return currentPositionUs;
}
@Override
protected long getDurationUs() {
return TrackRenderer.MATCH_LONGEST_US;
}
@Override
protected long getBufferedPositionUs() {
return TrackRenderer.END_OF_TRACK_US;
}
@Override
protected void seekTo(long timeUs) {
currentPositionUs = timeUs;
}
private void maybeFail() throws ExoPlaybackException {
if (pendingFailure) {
pendingFailure = false;
throw new ExoPlaybackException("fail() was called on DebugTrackRenderer");
}
}
}

View file

@ -15,10 +15,12 @@
*/
package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.CodecCounters;
import com.google.android.exoplayer.DummyTrackRenderer;
import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecTrackRenderer;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TimeRange;
@ -27,13 +29,14 @@ import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.hls.HlsSampleSource;
import com.google.android.exoplayer.metadata.MetadataTrackRenderer;
import com.google.android.exoplayer.metadata.MetadataTrackRenderer.MetadataRenderer;
import com.google.android.exoplayer.text.Cue;
import com.google.android.exoplayer.text.TextRenderer;
import com.google.android.exoplayer.upstream.BandwidthMeter;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.util.DebugTextViewHelper;
import com.google.android.exoplayer.util.PlayerControl;
import android.media.MediaCodec.CryptoException;
@ -55,7 +58,8 @@ import java.util.concurrent.CopyOnWriteArrayList;
public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventListener,
HlsSampleSource.EventListener, DefaultBandwidthMeter.EventListener,
MediaCodecVideoTrackRenderer.EventListener, MediaCodecAudioTrackRenderer.EventListener,
StreamingDrmSessionManager.EventListener, DashChunkSource.EventListener, TextRenderer {
StreamingDrmSessionManager.EventListener, TextRenderer,
MetadataRenderer<Map<String, Object>>, DebugTextViewHelper.Provider {
/**
* Builds renderers for the player.
@ -85,9 +89,10 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
* multiple tracks. An individual element may be null if it does not have multiple tracks.
* @param renderers Renderers indexed by {@link DemoPlayer} TYPE_* constants. An individual
* element may be null if there do not exist tracks of the corresponding type.
* @param bandwidthMeter Provides an estimate of the currently available bandwidth. May be null.
*/
void onRenderers(String[][] trackNames, MultiTrackChunkSource[] multiTrackSources,
TrackRenderer[] renderers);
TrackRenderer[] renderers, BandwidthMeter bandwidthMeter);
/**
* Invoked if a {@link RendererBuilder} encounters an error.
*
@ -164,12 +169,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
public static final int DISABLED_TRACK = -1;
public static final int PRIMARY_TRACK = 0;
public static final int RENDERER_COUNT = 5;
public static final int RENDERER_COUNT = 4;
public static final int TYPE_VIDEO = 0;
public static final int TYPE_AUDIO = 1;
public static final int TYPE_TEXT = 2;
public static final int TYPE_TIMED_METADATA = 3;
public static final int TYPE_DEBUG = 4;
public static final int TYPE_METADATA = 3;
private static final int RENDERER_BUILDING_STATE_IDLE = 1;
private static final int RENDERER_BUILDING_STATE_BUILDING = 2;
@ -188,9 +192,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
private Surface surface;
private InternalRendererBuilderCallback builderCallback;
private TrackRenderer videoRenderer;
private CodecCounters codecCounters;
private Format videoFormat;
private int videoTrackToRestore;
private BandwidthMeter bandwidthMeter;
private MultiTrackChunkSource[] multiTrackSources;
private String[][] trackNames;
private int[] selectedTracks;
@ -276,10 +282,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
}
public Format getVideoFormat() {
return videoFormat;
}
public void setBackgrounded(boolean backgrounded) {
if (this.backgrounded == backgrounded) {
return;
@ -311,7 +313,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
/* package */ void onRenderers(String[][] trackNames,
MultiTrackChunkSource[] multiTrackSources, TrackRenderer[] renderers) {
MultiTrackChunkSource[] multiTrackSources, TrackRenderer[] renderers,
BandwidthMeter bandwidthMeter) {
builderCallback = null;
// Normalize the results.
if (trackNames == null) {
@ -334,7 +337,12 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
// Complete preparation.
this.trackNames = trackNames;
this.videoRenderer = renderers[TYPE_VIDEO];
this.codecCounters = videoRenderer instanceof MediaCodecTrackRenderer
? ((MediaCodecTrackRenderer) videoRenderer).codecCounters
: renderers[TYPE_AUDIO] instanceof MediaCodecTrackRenderer
? ((MediaCodecTrackRenderer) renderers[TYPE_AUDIO]).codecCounters : null;
this.multiTrackSources = multiTrackSources;
this.bandwidthMeter = bandwidthMeter;
pushSurface(false);
pushTrackSelection(TYPE_VIDEO, true);
pushTrackSelection(TYPE_AUDIO, true);
@ -388,6 +396,22 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
return playerState;
}
@Override
public Format getFormat() {
return videoFormat;
}
@Override
public BandwidthMeter getBandwidthMeter() {
return bandwidthMeter;
}
@Override
public CodecCounters getCodecCounters() {
return codecCounters;
}
@Override
public long getCurrentPosition() {
return player.getCurrentPosition();
}
@ -495,9 +519,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
@Override
public void onDecoderInitialized(
String decoderName,
long elapsedRealtimeMs,
public void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
long initializationDurationMs) {
if (infoListener != null) {
infoListener.onDecoderInitialized(decoderName, elapsedRealtimeMs, initializationDurationMs);
@ -513,26 +535,16 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
@Override
public void onCues(List<Cue> cues) {
processCues(cues);
}
@Override
public void onSeekRangeChanged(TimeRange seekRange) {
if (infoListener != null) {
infoListener.onSeekRangeChanged(seekRange);
if (captionListener != null && selectedTracks[TYPE_TEXT] != DISABLED_TRACK) {
captionListener.onCues(cues);
}
}
/* package */ MetadataTrackRenderer.MetadataRenderer<Map<String, Object>>
getId3MetadataRenderer() {
return new MetadataTrackRenderer.MetadataRenderer<Map<String, Object>>() {
@Override
public void onMetadata(Map<String, Object> metadata) {
if (id3MetadataListener != null) {
id3MetadataListener.onId3Metadata(metadata);
}
}
};
@Override
public void onMetadata(Map<String, Object> metadata) {
if (id3MetadataListener != null && selectedTracks[TYPE_METADATA] != DISABLED_TRACK) {
id3MetadataListener.onId3Metadata(metadata);
}
}
@Override
@ -620,13 +632,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
}
/* package */ void processCues(List<Cue> cues) {
if (captionListener == null || selectedTracks[TYPE_TEXT] == DISABLED_TRACK) {
return;
}
captionListener.onCues(cues);
}
private class InternalRendererBuilderCallback implements RendererBuilderCallback {
private boolean canceled;
@ -637,9 +642,9 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
@Override
public void onRenderers(String[][] trackNames, MultiTrackChunkSource[] multiTrackSources,
TrackRenderer[] renderers) {
TrackRenderer[] renderers, BandwidthMeter bandwidthMeter) {
if (!canceled) {
DemoPlayer.this.onRenderers(trackNames, multiTrackSources, renderers);
DemoPlayer.this.onRenderers(trackNames, multiTrackSources, renderers, bandwidthMeter);
}
}

View file

@ -29,7 +29,6 @@ import com.google.android.exoplayer.upstream.DefaultUriDataSource;
import android.content.Context;
import android.media.MediaCodec;
import android.net.Uri;
import android.widget.TextView;
/**
* A {@link RendererBuilder} for streams that can be read using an {@link Extractor}.
@ -41,15 +40,12 @@ public class ExtractorRendererBuilder implements RendererBuilder {
private final Context context;
private final String userAgent;
private final Uri uri;
private final TextView debugTextView;
private final Extractor extractor;
public ExtractorRendererBuilder(Context context, String userAgent, Uri uri,
TextView debugTextView, Extractor extractor) {
public ExtractorRendererBuilder(Context context, String userAgent, Uri uri, Extractor extractor) {
this.context = context;
this.userAgent = userAgent;
this.uri = uri;
this.debugTextView = debugTextView;
this.extractor = extractor;
}
@ -67,16 +63,11 @@ public class ExtractorRendererBuilder implements RendererBuilder {
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player);
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(null, null, renderers);
callback.onRenderers(null, null, renderers, bandwidthMeter);
}
}

View file

@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
@ -32,6 +34,7 @@ import com.google.android.exoplayer.metadata.Id3Parser;
import com.google.android.exoplayer.metadata.MetadataTrackRenderer;
import com.google.android.exoplayer.text.eia608.Eia608TrackRenderer;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultAllocator;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.upstream.DefaultUriDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
@ -40,7 +43,6 @@ import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import android.content.Context;
import android.media.MediaCodec;
import android.os.Handler;
import android.widget.TextView;
import java.io.IOException;
import java.util.Map;
@ -50,24 +52,22 @@ import java.util.Map;
*/
public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<HlsPlaylist> {
private static final int REQUESTED_BUFFER_SIZE = 18 * 1024 * 1024;
private static final long REQUESTED_BUFFER_DURATION_MS = 40000;
private static final int BUFFER_SEGMENT_SIZE = 256 * 1024;
private static final int BUFFER_SEGMENTS = 64;
private final Context context;
private final String userAgent;
private final String url;
private final TextView debugTextView;
private final AudioCapabilities audioCapabilities;
private DemoPlayer player;
private RendererBuilderCallback callback;
public HlsRendererBuilder(Context context, String userAgent, String url, TextView debugTextView,
public HlsRendererBuilder(Context context, String userAgent, String url,
AudioCapabilities audioCapabilities) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.debugTextView = debugTextView;
this.audioCapabilities = audioCapabilities;
}
@ -89,6 +89,7 @@ public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<Hls
@Override
public void onSingleManifest(HlsPlaylist manifest) {
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
int[] variantIndices = null;
@ -106,30 +107,24 @@ public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<Hls
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter,
variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, true, 3, REQUESTED_BUFFER_SIZE,
REQUESTED_BUFFER_DURATION_MS, mainHandler, player, DemoPlayer.TYPE_VIDEO);
HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl,
BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, 3, mainHandler, player, DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource);
MetadataTrackRenderer<Map<String, Object>> id3Renderer =
new MetadataTrackRenderer<>(sampleSource, new Id3Parser(),
player.getId3MetadataRenderer(), mainHandler.getLooper());
new MetadataTrackRenderer<>(sampleSource, new Id3Parser(), player, mainHandler.getLooper());
Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player,
mainHandler.getLooper());
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TIMED_METADATA] = id3Renderer;
renderers[DemoPlayer.TYPE_METADATA] = id3Renderer;
renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(null, null, renderers);
callback.onRenderers(null, null, renderers, bandwidthMeter);
}
}

View file

@ -32,6 +32,7 @@ import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallba
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.drm.UnsupportedDrmException;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
@ -46,16 +47,12 @@ import com.google.android.exoplayer.upstream.DefaultUriDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.MediaCodec;
import android.media.UnsupportedSchemeException;
import android.os.Handler;
import android.widget.TextView;
import java.io.IOException;
import java.util.Arrays;
import java.util.UUID;
/**
* A {@link RendererBuilder} for SmoothStreaming.
@ -73,19 +70,17 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
private final String userAgent;
private final String url;
private final MediaDrmCallback drmCallback;
private final TextView debugTextView;
private DemoPlayer player;
private RendererBuilderCallback callback;
private ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
public SmoothStreamingRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, TextView debugTextView) {
MediaDrmCallback drmCallback) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.drmCallback = drmCallback;
this.debugTextView = debugTextView;
}
@Override
@ -118,12 +113,12 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
if (manifest.protectionElement != null) {
if (Util.SDK_INT < 18) {
callback.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_NO_DRM));
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
drmSessionManager = V18Compat.getDrmSessionManager(manifest.protectionElement.uuid, player,
drmCallback);
drmSessionManager = new StreamingDrmSessionManager(manifest.protectionElement.uuid,
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
} catch (UnsupportedDrmException e) {
callback.onRenderersError(e);
return;
@ -159,10 +154,8 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
final TrackRenderer debugRenderer;
if (videoTrackIndices == null || videoTrackIndices.length == 0) {
videoRenderer = null;
debugRenderer = null;
} else {
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
@ -173,8 +166,6 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
}
// Build the audio renderer.
@ -252,25 +243,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
}
@TargetApi(18)
private static class V18Compat {
public static DrmSessionManager getDrmSessionManager(UUID uuid, DemoPlayer player,
MediaDrmCallback drmCallback) throws UnsupportedDrmException {
try {
return new StreamingDrmSessionManager(uuid, player.getPlaybackLooper(), drmCallback, null,
player.getMainHandler(), player);
} catch (UnsupportedSchemeException e) {
throw new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME);
} catch (Exception e) {
throw new UnsupportedDrmException(UnsupportedDrmException.REASON_UNKNOWN, e);
}
}
callback.onRenderers(trackNames, multiTrackChunkSources, renderers, bandwidthMeter);
}
}

View file

@ -72,7 +72,7 @@ android.libraryVariants.all { variant ->
classpath = files(variant.javaCompile.classpath.files, project.android.getBootClasspath())
options {
links "http://docs.oracle.com/javase/7/docs/api/"
linksOffline "https://d.android.com/reference","${android.sdkDirectory}/docs/reference"
linksOffline "https://developer.android.com/reference","${android.sdkDirectory}/docs/reference"
}
exclude '**/BuildConfig.java'
exclude '**/R.java'

View file

@ -134,8 +134,7 @@ public class MediaCodecUtil {
for (int i = 0; i < numberOfCodecs; i++) {
MediaCodecInfo info = mediaCodecList.getCodecInfoAt(i);
String codecName = info.getName();
if (!info.isEncoder() && codecName.startsWith("OMX.")
&& (secureDecodersExplicit || !codecName.endsWith(".secure"))) {
if (isCodecUsableDecoder(info, codecName, secureDecodersExplicit)) {
String[] supportedTypes = info.getSupportedTypes();
for (int j = 0; j < supportedTypes.length; j++) {
String supportedType = supportedTypes[j];
@ -166,6 +165,28 @@ public class MediaCodecUtil {
return null;
}
/**
* Returns whether the specified codec is usable for decoding on the current device.
*/
private static boolean isCodecUsableDecoder(MediaCodecInfo info, String name,
boolean secureDecodersExplicit) {
if (info.isEncoder() || !name.startsWith("OMX.")
|| (!secureDecodersExplicit && name.endsWith(".secure"))) {
return false;
}
// Workaround an issue where creating a particular MP3 decoder on some HTC devices on platform
// API version 16 crashes mediaserver.
if (Util.SDK_INT == 16
&& ("dlxu".equals(Util.PRODUCT) // HTC Butterfly
|| "protou".equals(Util.PRODUCT)) // HTC Desire X
&& name.equals("OMX.qcom.audio.decoder.mp3")) {
return false;
}
return true;
}
private static boolean isAdaptive(CodecCapabilities capabilities) {
if (Util.SDK_INT >= 19) {
return isAdaptiveV19(capabilities);

View file

@ -18,7 +18,6 @@ package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.BehindLiveWindowException;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.Chunk;
@ -51,8 +50,6 @@ import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.SystemClock;
import android.os.Handler;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
@ -66,20 +63,6 @@ import java.util.List;
*/
public class DashChunkSource implements ChunkSource {
/**
* Interface definition for a callback to be notified of {@link DashChunkSource} events.
*/
public interface EventListener {
/**
* Invoked when the available seek range of the stream has changed.
*
* @param seekRange The range which specifies available content that can be seeked to.
*/
public void onSeekRangeChanged(TimeRange seekRange);
}
/**
* Thrown when an AdaptationSet is missing from the MPD.
*/
@ -96,9 +79,6 @@ public class DashChunkSource implements ChunkSource {
*/
public static final int USE_ALL_TRACKS = -1;
private final Handler eventHandler;
private final EventListener eventListener;
private final TrackInfo trackInfo;
private final DataSource dataSource;
private final FormatEvaluator formatEvaluator;
@ -121,11 +101,6 @@ public class DashChunkSource implements ChunkSource {
private boolean finishedCurrentManifest;
private DrmInitData drmInitData;
private TimeRange seekRange;
private long[] seekRangeValues;
private int firstAvailableSegmentNum;
private int lastAvailableSegmentNum;
private boolean lastChunkWasInitialization;
private IOException fatalError;
@ -167,7 +142,7 @@ public class DashChunkSource implements ChunkSource {
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator,
new SystemClock(), 0, 0, null, null);
new SystemClock(), 0, 0);
}
/**
@ -192,24 +167,19 @@ public class DashChunkSource implements ChunkSource {
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
* @param eventHandler A handler to use when delivering events to {@code EventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
Handler eventHandler, EventListener eventListener) {
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs) {
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
elapsedRealtimeOffsetMs * 1000, eventHandler, eventListener);
elapsedRealtimeOffsetMs * 1000);
}
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
MediaPresentationDescription initialManifest, int adaptationSetIndex,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs,
Handler eventHandler, EventListener eventListener) {
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs) {
this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest;
this.adaptationSetIndex = adaptationSetIndex;
@ -219,11 +189,8 @@ public class DashChunkSource implements ChunkSource {
this.systemClock = systemClock;
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
this.eventHandler = eventHandler;
this.eventListener = eventListener;
this.evaluation = new Evaluation();
this.headerBuilder = new StringBuilder();
this.seekRangeValues = new long[2];
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
Representation[] representations = getFilteredRepresentations(currentManifest,
@ -262,11 +229,6 @@ public class DashChunkSource implements ChunkSource {
return trackInfo;
}
// VisibleForTesting
/* package */ TimeRange getSeekRange() {
return seekRange;
}
@Override
public void enable() {
fatalError = null;
@ -274,16 +236,6 @@ public class DashChunkSource implements ChunkSource {
if (manifestFetcher != null) {
manifestFetcher.enable();
}
DashSegmentIndex segmentIndex =
representationHolders.get(formats[0].id).representation.getIndex();
if (segmentIndex == null) {
seekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, currentManifest.duration * 1000);
notifySeekRangeChanged(seekRange);
} else {
long nowUs = getNowUs();
updateAvailableSegmentBounds(segmentIndex, nowUs);
updateSeekRange(segmentIndex, nowUs);
}
}
@Override
@ -292,7 +244,6 @@ public class DashChunkSource implements ChunkSource {
if (manifestFetcher != null) {
manifestFetcher.disable();
}
seekRange = null;
}
@Override
@ -309,19 +260,31 @@ public class DashChunkSource implements ChunkSource {
RepresentationHolder representationHolder =
representationHolders.get(representation.format.id);
DashSegmentIndex oldIndex = representationHolder.segmentIndex;
int oldIndexLastSegmentNum = oldIndex.getLastSegmentNum();
long oldIndexEndTimeUs = oldIndex.getTimeUs(oldIndexLastSegmentNum)
+ oldIndex.getDurationUs(oldIndexLastSegmentNum);
DashSegmentIndex newIndex = representation.getIndex();
int newFirstSegmentNum = newIndex.getFirstSegmentNum();
int segmentNumShift = oldIndex.getSegmentNum(newIndex.getTimeUs(newFirstSegmentNum))
- newFirstSegmentNum;
int newIndexFirstSegmentNum = newIndex.getFirstSegmentNum();
long newIndexStartTimeUs = newIndex.getTimeUs(newIndexFirstSegmentNum);
if (oldIndexEndTimeUs < newIndexStartTimeUs) {
// There's a gap between the old manifest and the new one which means we've slipped behind
// the live window and can't proceed.
fatalError = new BehindLiveWindowException();
return;
}
int segmentNumShift;
if (oldIndexEndTimeUs == newIndexStartTimeUs) {
// The new manifest continues where the old one ended, with no overlap.
segmentNumShift = oldIndex.getLastSegmentNum() + 1 - newIndexFirstSegmentNum;
} else {
// The new manifest overlaps with the old one.
segmentNumShift = oldIndex.getSegmentNum(newIndexStartTimeUs) - newIndexFirstSegmentNum;
}
representationHolder.segmentNumShift += segmentNumShift;
representationHolder.segmentIndex = newIndex;
}
currentManifest = newManifest;
finishedCurrentManifest = false;
long nowUs = getNowUs();
updateAvailableSegmentBounds(newRepresentations[0].getIndex(), nowUs);
updateSeekRange(newRepresentations[0].getIndex(), nowUs);
}
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
@ -391,21 +354,36 @@ public class DashChunkSource implements ChunkSource {
return;
}
long nowUs;
if (elapsedRealtimeOffsetUs != 0) {
nowUs = (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
} else {
nowUs = System.currentTimeMillis() * 1000;
}
int firstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
int lastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
boolean indexUnbounded = lastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED;
if (indexUnbounded) {
// The index is itself unbounded. We need to use the current time to calculate the range of
// available segments.
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
if (currentManifest.timeShiftBufferDepth != -1) {
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
firstAvailableSegmentNum = Math.max(firstAvailableSegmentNum,
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
}
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
// index of the last completed segment.
lastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
}
int segmentNum;
boolean indexUnbounded = segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED;
if (queue.isEmpty()) {
if (currentManifest.dynamic) {
seekRangeValues = seekRange.getCurrentBoundsUs(seekRangeValues);
seekPositionUs = Math.max(seekPositionUs, seekRangeValues[0]);
seekPositionUs = Math.min(seekPositionUs, seekRangeValues[1]);
seekPositionUs = getLiveSeekPosition(nowUs, indexUnbounded, segmentIndex.isExplicit());
}
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
// if the index is unbounded then the result of getSegmentNum isn't clamped to ensure that
// it doesn't exceed the last available segment. Clamp it here.
if (indexUnbounded) {
segmentNum = Math.min(segmentNum, lastAvailableSegmentNum);
}
} else {
MediaChunk previous = queue.get(out.queueSize - 1);
segmentNum = previous.isLastChunk ? -1
@ -474,59 +452,6 @@ public class DashChunkSource implements ChunkSource {
// Do nothing.
}
private void updateAvailableSegmentBounds(DashSegmentIndex segmentIndex, long nowUs) {
int indexFirstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
int indexLastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
if (indexLastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED) {
// The index is itself unbounded. We need to use the current time to calculate the range of
// available segments.
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
if (currentManifest.timeShiftBufferDepth != -1) {
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
indexFirstAvailableSegmentNum = Math.max(indexFirstAvailableSegmentNum,
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
}
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
// index of the last completed segment.
indexLastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
}
firstAvailableSegmentNum = indexFirstAvailableSegmentNum;
lastAvailableSegmentNum = indexLastAvailableSegmentNum;
}
private void updateSeekRange(DashSegmentIndex segmentIndex, long nowUs) {
long earliestSeekPosition = segmentIndex.getTimeUs(firstAvailableSegmentNum);
long latestSeekPosition = segmentIndex.getTimeUs(lastAvailableSegmentNum)
+ segmentIndex.getDurationUs(lastAvailableSegmentNum);
if (currentManifest.dynamic) {
long liveEdgeTimestampUs;
if (segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED) {
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
} else {
liveEdgeTimestampUs = segmentIndex.getTimeUs(segmentIndex.getLastSegmentNum())
+ segmentIndex.getDurationUs(segmentIndex.getLastSegmentNum());
if (!segmentIndex.isExplicit()) {
// Some segments defined by the index may not be available yet. Bound the calculated live
// edge based on the elapsed time since the manifest became available.
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
nowUs - currentManifest.availabilityStartTime * 1000);
}
}
// it's possible that the live edge latency actually puts our latest position before
// the earliest position in the case of a DVR-like stream that's just starting up, so
// in that case just return the earliest position instead
latestSeekPosition = Math.max(earliestSeekPosition, liveEdgeTimestampUs - liveEdgeLatencyUs);
}
TimeRange newSeekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, earliestSeekPosition,
latestSeekPosition);
if (seekRange == null || !seekRange.equals(newSeekRange)) {
seekRange = newSeekRange;
notifySeekRangeChanged(seekRange);
}
}
private static boolean mimeTypeIsWebm(String mimeType) {
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
}
@ -587,12 +512,36 @@ public class DashChunkSource implements ChunkSource {
}
}
private long getNowUs() {
if (elapsedRealtimeOffsetUs != 0) {
return (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
*
* @param nowUs An estimate of the current server time, in microseconds.
* @param indexUnbounded True if the segment index for this source is unbounded. False otherwise.
* @param indexExplicit True if the segment index is explicit. False otherwise.
* @return The seek position in microseconds.
*/
private long getLiveSeekPosition(long nowUs, boolean indexUnbounded, boolean indexExplicit) {
long liveEdgeTimestampUs;
if (indexUnbounded) {
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
} else {
return System.currentTimeMillis() * 1000;
liveEdgeTimestampUs = Long.MIN_VALUE;
for (RepresentationHolder representationHolder : representationHolders.values()) {
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
int lastSegmentNum = segmentIndex.getLastSegmentNum();
long indexLiveEdgeTimestampUs = segmentIndex.getTimeUs(lastSegmentNum)
+ segmentIndex.getDurationUs(lastSegmentNum);
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, indexLiveEdgeTimestampUs);
}
if (!indexExplicit) {
// Some segments defined by the index may not be available yet. Bound the calculated live
// edge based on the elapsed time since the manifest became available.
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
nowUs - currentManifest.availabilityStartTime * 1000);
}
}
return liveEdgeTimestampUs - liveEdgeLatencyUs;
}
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
@ -643,17 +592,6 @@ public class DashChunkSource implements ChunkSource {
Collections.singletonList(period));
}
private void notifySeekRangeChanged(final TimeRange seekRange) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onSeekRangeChanged(seekRange);
}
});
}
}
private static class RepresentationHolder {
public final Representation representation;

View file

@ -110,11 +110,11 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @throws UnsupportedSchemeException If the specified DRM scheme is not supported.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
public static StreamingDrmSessionManager newWidevineInstance(Looper playbackLooper,
MediaDrmCallback callback, HashMap<String, String> optionalKeyRequestParameters,
Handler eventHandler, EventListener eventListener) throws UnsupportedSchemeException {
Handler eventHandler, EventListener eventListener) throws UnsupportedDrmException {
return new StreamingDrmSessionManager(WIDEVINE_UUID, playbackLooper, callback,
optionalKeyRequestParameters, eventHandler, eventListener);
}
@ -132,11 +132,11 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @throws UnsupportedSchemeException If the specified DRM scheme is not supported.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
public static StreamingDrmSessionManager newPlayReadyInstance(Looper playbackLooper,
MediaDrmCallback callback, String customData, Handler eventHandler,
EventListener eventListener) throws UnsupportedSchemeException {
EventListener eventListener) throws UnsupportedDrmException {
HashMap<String, String> optionalKeyRequestParameters;
if (!TextUtils.isEmpty(customData)) {
optionalKeyRequestParameters = new HashMap<>();
@ -158,17 +158,23 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @throws UnsupportedSchemeException If the specified DRM scheme is not supported.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
public StreamingDrmSessionManager(UUID uuid, Looper playbackLooper, MediaDrmCallback callback,
HashMap<String, String> optionalKeyRequestParameters, Handler eventHandler,
EventListener eventListener) throws UnsupportedSchemeException {
EventListener eventListener) throws UnsupportedDrmException {
this.uuid = uuid;
this.callback = callback;
this.optionalKeyRequestParameters = optionalKeyRequestParameters;
this.eventHandler = eventHandler;
this.eventListener = eventListener;
mediaDrm = new MediaDrm(uuid);
try {
mediaDrm = new MediaDrm(uuid);
} catch (UnsupportedSchemeException e) {
throw new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME, e);
} catch (Exception e) {
throw new UnsupportedDrmException(UnsupportedDrmException.REASON_INSTANTIATION_ERROR, e);
}
mediaDrm.setOnEventListener(new MediaDrmEventListener());
mediaDrmHandler = new MediaDrmHandler(playbackLooper);
postResponseHandler = new PostResponseHandler(playbackLooper);
@ -176,12 +182,12 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
}
@Override
public int getState() {
public final int getState() {
return state;
}
@Override
public MediaCrypto getMediaCrypto() {
public final MediaCrypto getMediaCrypto() {
if (state != STATE_OPENED && state != STATE_OPENED_WITH_KEYS) {
throw new IllegalStateException();
}
@ -197,7 +203,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
}
@Override
public Exception getError() {
public final Exception getError() {
return state == STATE_ERROR ? lastException : null;
}
@ -250,7 +256,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
}
@Override
public void open(DrmInitData drmInitData) {
public final void open(DrmInitData drmInitData) {
if (++openCount != 1) {
return;
}
@ -272,7 +278,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
}
@Override
public void close() {
public final void close() {
if (--openCount != 0) {
return;
}

View file

@ -13,16 +13,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.demo.player;
package com.google.android.exoplayer.drm;
/**
* Exception thrown when the required level of DRM is not supported.
* Thrown when the requested DRM scheme is not supported.
*/
public final class UnsupportedDrmException extends Exception {
public static final int REASON_NO_DRM = 0;
/**
* The requested DRM scheme is unsupported by the device.
*/
public static final int REASON_UNSUPPORTED_SCHEME = 1;
public static final int REASON_UNKNOWN = 2;
/**
* There device advertises support for the requested DRM scheme, but there was an error
* instantiating it. The cause can be retrieved using {@link #getCause()}.
*/
public static final int REASON_INSTANTIATION_ERROR = 2;
public final int reason;

View file

@ -39,7 +39,8 @@ import java.util.List;
public static final int TYPE_ftyp = Util.getIntegerCodeForString("ftyp");
public static final int TYPE_avc1 = Util.getIntegerCodeForString("avc1");
public static final int TYPE_avc3 = Util.getIntegerCodeForString("avc3");
public static final int TYPE_esds = Util.getIntegerCodeForString("esds");
public static final int TYPE_hvc1 = Util.getIntegerCodeForString("hvc1");
public static final int TYPE_hev1 = Util.getIntegerCodeForString("hev1");
public static final int TYPE_mdat = Util.getIntegerCodeForString("mdat");
public static final int TYPE_mp4a = Util.getIntegerCodeForString("mp4a");
public static final int TYPE_ac_3 = Util.getIntegerCodeForString("ac-3");
@ -58,6 +59,8 @@ import java.util.List;
public static final int TYPE_minf = Util.getIntegerCodeForString("minf");
public static final int TYPE_stbl = Util.getIntegerCodeForString("stbl");
public static final int TYPE_avcC = Util.getIntegerCodeForString("avcC");
public static final int TYPE_hvcC = Util.getIntegerCodeForString("hvcC");
public static final int TYPE_esds = Util.getIntegerCodeForString("esds");
public static final int TYPE_moof = Util.getIntegerCodeForString("moof");
public static final int TYPE_traf = Util.getIntegerCodeForString("traf");
public static final int TYPE_mvex = Util.getIntegerCodeForString("mvex");

View file

@ -20,8 +20,8 @@ import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.util.Ac3Util;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.H264Util;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.NalUnitUtil;
import com.google.android.exoplayer.util.ParsableByteArray;
import com.google.android.exoplayer.util.Util;
@ -331,25 +331,22 @@ import java.util.List;
Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive");
int childAtomType = stsd.readInt();
if (childAtomType == Atom.TYPE_avc1 || childAtomType == Atom.TYPE_avc3
|| childAtomType == Atom.TYPE_encv) {
parseAvcFromParent(stsd, childStartPosition, childAtomSize, durationUs, holder, i);
|| childAtomType == Atom.TYPE_encv || childAtomType == Atom.TYPE_mp4v
|| childAtomType == Atom.TYPE_hvc1 || childAtomType == Atom.TYPE_hev1) {
parseVideoSampleEntry(stsd, childStartPosition, childAtomSize, durationUs, holder, i);
} else if (childAtomType == Atom.TYPE_mp4a || childAtomType == Atom.TYPE_enca
|| childAtomType == Atom.TYPE_ac_3) {
parseAudioSampleEntry(stsd, childAtomType, childStartPosition, childAtomSize, durationUs,
holder, i);
} else if (childAtomType == Atom.TYPE_TTML) {
holder.mediaFormat = MediaFormat.createTtmlFormat();
} else if (childAtomType == Atom.TYPE_mp4v) {
holder.mediaFormat = parseMp4vFromParent(stsd, childStartPosition, childAtomSize,
durationUs);
}
stsd.setPosition(childStartPosition + childAtomSize);
}
return holder;
}
/** Returns the media format for an avc1 box. */
private static void parseAvcFromParent(ParsableByteArray parent, int position, int size,
private static void parseVideoSampleEntry(ParsableByteArray parent, int position, int size,
long durationUs, StsdDataHolder out, int entryIndex) {
parent.setPosition(position + Atom.HEADER_SIZE);
@ -361,6 +358,7 @@ import java.util.List;
List<byte[]> initializationData = null;
int childPosition = parent.getPosition();
String mimeType = null;
while (childPosition - position < size) {
parent.setPosition(childPosition);
int childStartPosition = parent.getPosition();
@ -372,9 +370,22 @@ import java.util.List;
Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive");
int childAtomType = parent.readInt();
if (childAtomType == Atom.TYPE_avcC) {
Assertions.checkState(mimeType == null);
mimeType = MimeTypes.VIDEO_H264;
Pair<List<byte[]>, Integer> avcCData = parseAvcCFromParent(parent, childStartPosition);
initializationData = avcCData.first;
out.nalUnitLengthFieldLength = avcCData.second;
} else if (childAtomType == Atom.TYPE_hvcC) {
Assertions.checkState(mimeType == null);
mimeType = MimeTypes.VIDEO_H265;
Pair<List<byte[]>, Integer> hvcCData = parseHvcCFromParent(parent, childStartPosition);
initializationData = hvcCData.first;
out.nalUnitLengthFieldLength = hvcCData.second;
} else if (childAtomType == Atom.TYPE_esds) {
Assertions.checkState(mimeType == null);
mimeType = MimeTypes.VIDEO_MP4V;
initializationData =
Collections.singletonList(parseEsdsFromParent(parent, childStartPosition));
} else if (childAtomType == Atom.TYPE_sinf) {
out.trackEncryptionBoxes[entryIndex] =
parseSinfFromParent(parent, childStartPosition, childAtomSize);
@ -383,9 +394,8 @@ import java.util.List;
}
childPosition += childAtomSize;
}
out.mediaFormat = MediaFormat.createVideoFormat(MimeTypes.VIDEO_H264, MediaFormat.NO_VALUE,
durationUs, width, height, pixelWidthHeightRatio, initializationData);
out.mediaFormat = MediaFormat.createVideoFormat(mimeType, MediaFormat.NO_VALUE, durationUs,
width, height, pixelWidthHeightRatio, initializationData);
}
private static Pair<List<byte[]>, Integer> parseAvcCFromParent(ParsableByteArray parent,
@ -401,15 +411,58 @@ import java.util.List;
// expose the AVC profile and level somewhere useful; Most likely in MediaFormat.
int numSequenceParameterSets = parent.readUnsignedByte() & 0x1F;
for (int j = 0; j < numSequenceParameterSets; j++) {
initializationData.add(H264Util.parseChildNalUnit(parent));
initializationData.add(NalUnitUtil.parseChildNalUnit(parent));
}
int numPictureParameterSets = parent.readUnsignedByte();
for (int j = 0; j < numPictureParameterSets; j++) {
initializationData.add(H264Util.parseChildNalUnit(parent));
initializationData.add(NalUnitUtil.parseChildNalUnit(parent));
}
return Pair.create(initializationData, nalUnitLengthFieldLength);
}
private static Pair<List<byte[]>, Integer> parseHvcCFromParent(ParsableByteArray parent,
int position) {
// Skip to the NAL unit length size field.
parent.setPosition(position + Atom.HEADER_SIZE + 21);
int lengthSizeMinusOne = parent.readUnsignedByte() & 0x03;
// Calculate the combined size of all VPS/SPS/PPS bitstreams.
int numberOfArrays = parent.readUnsignedByte();
int csdLength = 0;
int csdStartPosition = parent.getPosition();
for (int i = 0; i < numberOfArrays; i++) {
parent.skipBytes(1); // completeness (1), nal_unit_type (7)
int numberOfNalUnits = parent.readUnsignedShort();
for (int j = 0; j < numberOfNalUnits; j++) {
int nalUnitLength = parent.readUnsignedShort();
csdLength += 4 + nalUnitLength; // Start code and NAL unit.
parent.skipBytes(nalUnitLength);
}
}
// Concatenate the codec-specific data into a single buffer.
parent.setPosition(csdStartPosition);
byte[] buffer = new byte[csdLength];
int bufferPosition = 0;
for (int i = 0; i < numberOfArrays; i++) {
parent.skipBytes(1); // completeness (1), nal_unit_type (7)
int numberOfNalUnits = parent.readUnsignedShort();
for (int j = 0; j < numberOfNalUnits; j++) {
int nalUnitLength = parent.readUnsignedShort();
System.arraycopy(NalUnitUtil.NAL_START_CODE, 0, buffer, bufferPosition,
NalUnitUtil.NAL_START_CODE.length);
bufferPosition += NalUnitUtil.NAL_START_CODE.length;
System.arraycopy(parent.data, parent.getPosition(), buffer, bufferPosition, nalUnitLength);
bufferPosition += nalUnitLength;
parent.skipBytes(nalUnitLength);
}
}
List<byte[]> initializationData = csdLength == 0 ? Collections.<byte[]>emptyList()
: Collections.singletonList(buffer);
return Pair.create(initializationData, lengthSizeMinusOne + 1);
}
private static TrackEncryptionBox parseSinfFromParent(ParsableByteArray parent, int position,
int size) {
int childPosition = position + Atom.HEADER_SIZE;
@ -462,34 +515,6 @@ import java.util.List;
return null;
}
/** Returns the media format for an mp4v box. */
private static MediaFormat parseMp4vFromParent(ParsableByteArray parent, int position, int size,
long durationUs) {
parent.setPosition(position + Atom.HEADER_SIZE);
parent.skipBytes(24);
int width = parent.readUnsignedShort();
int height = parent.readUnsignedShort();
parent.skipBytes(50);
List<byte[]> initializationData = new ArrayList<>(1);
int childPosition = parent.getPosition();
while (childPosition - position < size) {
parent.setPosition(childPosition);
int childStartPosition = parent.getPosition();
int childAtomSize = parent.readInt();
Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive");
int childAtomType = parent.readInt();
if (childAtomType == Atom.TYPE_esds) {
initializationData.add(parseEsdsFromParent(parent, childStartPosition));
}
childPosition += childAtomSize;
}
return MediaFormat.createVideoFormat(
MimeTypes.VIDEO_MP4V, MediaFormat.NO_VALUE, durationUs, width, height, initializationData);
}
private static void parseAudioSampleEntry(ParsableByteArray parent, int atomType, int position,
int size, long durationUs, StsdDataHolder out, int entryIndex) {
parent.setPosition(position + Atom.HEADER_SIZE);

View file

@ -26,8 +26,8 @@ import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.extractor.mp4.Atom.ContainerAtom;
import com.google.android.exoplayer.extractor.mp4.Atom.LeafAtom;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.H264Util;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.NalUnitUtil;
import com.google.android.exoplayer.util.ParsableByteArray;
import com.google.android.exoplayer.util.Util;
@ -106,7 +106,7 @@ public final class FragmentedMp4Extractor implements Extractor {
public FragmentedMp4Extractor(int workaroundFlags) {
this.workaroundFlags = workaroundFlags;
atomHeader = new ParsableByteArray(Atom.HEADER_SIZE);
nalStartCode = new ParsableByteArray(H264Util.NAL_START_CODE);
nalStartCode = new ParsableByteArray(NalUnitUtil.NAL_START_CODE);
nalLength = new ParsableByteArray(4);
encryptionSignalByte = new ParsableByteArray(1);
extendedTypeScratch = new byte[16];

View file

@ -23,7 +23,7 @@ import com.google.android.exoplayer.extractor.SeekMap;
import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.extractor.mp4.Atom.ContainerAtom;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.H264Util;
import com.google.android.exoplayer.util.NalUnitUtil;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.io.IOException;
@ -72,7 +72,7 @@ public final class Mp4Extractor implements Extractor, SeekMap {
public Mp4Extractor() {
atomHeader = new ParsableByteArray(Atom.LONG_HEADER_SIZE);
containerAtoms = new Stack<>();
nalStartCode = new ParsableByteArray(H264Util.NAL_START_CODE);
nalStartCode = new ParsableByteArray(NalUnitUtil.NAL_START_CODE);
nalLength = new ParsableByteArray(4);
parserState = STATE_READING_ATOM_HEADER;
}

View file

@ -19,8 +19,8 @@ import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.H264Util;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.NalUnitUtil;
import com.google.android.exoplayer.util.ParsableBitArray;
import com.google.android.exoplayer.util.ParsableByteArray;
@ -104,7 +104,7 @@ import java.util.List;
@Override
public void seek() {
seiReader.seek();
H264Util.clearPrefixFlags(prefixFlags);
NalUnitUtil.clearPrefixFlags(prefixFlags);
sps.reset();
pps.reset();
sei.reset();
@ -128,7 +128,7 @@ import java.util.List;
// Scan the appended data, processing NAL units as they are encountered
while (offset < limit) {
int nextNalUnitOffset = H264Util.findNalUnit(dataArray, offset, limit, prefixFlags);
int nextNalUnitOffset = NalUnitUtil.findNalUnit(dataArray, offset, limit, prefixFlags);
if (nextNalUnitOffset < limit) {
// We've seen the start of a NAL unit.
@ -139,7 +139,7 @@ import java.util.List;
feedNalUnitTargetBuffersData(dataArray, offset, nextNalUnitOffset);
}
int nalUnitType = H264Util.getNalUnitType(dataArray, nextNalUnitOffset);
int nalUnitType = NalUnitUtil.getNalUnitType(dataArray, nextNalUnitOffset);
int bytesWrittenPastNalUnit = limit - nextNalUnitOffset;
switch (nalUnitType) {
case NAL_UNIT_TYPE_IDR:

View file

@ -16,6 +16,7 @@
package com.google.android.exoplayer.hls;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
@ -25,8 +26,6 @@ import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.BaseChunkSampleSourceEventListener;
import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.upstream.Allocator;
import com.google.android.exoplayer.upstream.DefaultAllocator;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
import com.google.android.exoplayer.util.Assertions;
@ -52,23 +51,22 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
*/
public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT = 3;
private static final int BUFFER_FRAGMENT_LENGTH = 256 * 1024;
private static final int NO_RESET_PENDING = -1;
private final HlsChunkSource chunkSource;
private final LinkedList<HlsExtractorWrapper> extractors;
private final Allocator allocator;
private final boolean frameAccurateSeeking;
private final int minLoadableRetryCount;
private final int requestedBufferSize;
private final long requestedBufferDurationUs;
private final int bufferSizeContribution;
private final int eventSourceId;
private final LoadControl loadControl;
private final Handler eventHandler;
private final EventListener eventListener;
private int remainingReleaseCount;
private boolean prepared;
private boolean loadControlRegistered;
private int trackCount;
private int enabledTrackCount;
private boolean[] trackEnabledStates;
@ -92,36 +90,35 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
private long currentLoadableExceptionTimestamp;
private long currentLoadStartTimeMs;
public HlsSampleSource(HlsChunkSource chunkSource, boolean frameAccurateSeeking,
int downstreamRendererCount, int requestedBufferSize, long requestedBufferDurationMs) {
this(chunkSource, frameAccurateSeeking, downstreamRendererCount, requestedBufferSize,
requestedBufferDurationMs, null, null, 0);
public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl,
int bufferSizeContribution, boolean frameAccurateSeeking, int downstreamRendererCount) {
this(chunkSource, loadControl, bufferSizeContribution, frameAccurateSeeking,
downstreamRendererCount, null, null, 0);
}
public HlsSampleSource(HlsChunkSource chunkSource, boolean frameAccurateSeeking,
int downstreamRendererCount, int requestedBufferSize, long requestedBufferDurationMs,
public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl,
int bufferSizeContribution, boolean frameAccurateSeeking, int downstreamRendererCount,
Handler eventHandler, EventListener eventListener, int eventSourceId) {
this(chunkSource, frameAccurateSeeking, downstreamRendererCount, requestedBufferSize,
requestedBufferDurationMs, eventHandler, eventListener, eventSourceId,
this(chunkSource, loadControl, bufferSizeContribution, frameAccurateSeeking,
downstreamRendererCount, eventHandler, eventListener, eventSourceId,
DEFAULT_MIN_LOADABLE_RETRY_COUNT);
}
public HlsSampleSource(HlsChunkSource chunkSource, boolean frameAccurateSeeking,
int downstreamRendererCount, int requestedBufferSize, long requestedBufferDurationMs,
Handler eventHandler, EventListener eventListener,
int eventSourceId, int minLoadableRetryCount) {
public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl,
int bufferSizeContribution, boolean frameAccurateSeeking, int downstreamRendererCount,
Handler eventHandler, EventListener eventListener, int eventSourceId,
int minLoadableRetryCount) {
this.chunkSource = chunkSource;
this.loadControl = loadControl;
this.bufferSizeContribution = bufferSizeContribution;
this.frameAccurateSeeking = frameAccurateSeeking;
this.remainingReleaseCount = downstreamRendererCount;
this.requestedBufferSize = requestedBufferSize;
this.requestedBufferDurationUs = requestedBufferDurationMs * 1000;
this.minLoadableRetryCount = minLoadableRetryCount;
this.eventHandler = eventHandler;
this.eventListener = eventListener;
this.eventSourceId = eventSourceId;
this.pendingResetPositionUs = NO_RESET_PENDING;
extractors = new LinkedList<>();
allocator = new DefaultAllocator(BUFFER_FRAGMENT_LENGTH);
}
@Override
@ -150,6 +147,10 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
if (loader == null) {
loader = new Loader("Loader:HLS");
}
if (!loadControlRegistered) {
loadControl.register(this, bufferSizeContribution);
loadControlRegistered = true;
}
if (!loader.isLoading()) {
// We're going to have to start loading a chunk to get what we need for preparation. We should
// attempt to load the chunk at positionUs, so that we'll already be loading the correct chunk
@ -182,6 +183,10 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
trackEnabledStates[track] = true;
downstreamMediaFormats[track] = null;
downstreamFormat = null;
if (!loadControlRegistered) {
loadControl.register(this, bufferSizeContribution);
loadControlRegistered = true;
}
if (enabledTrackCount == 1) {
seekToUs(positionUs);
}
@ -194,12 +199,16 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
enabledTrackCount--;
trackEnabledStates[track] = false;
pendingDiscontinuities[track] = false;
if (loadControlRegistered) {
loadControl.unregister(this);
loadControlRegistered = false;
}
if (enabledTrackCount == 0) {
if (loader.isLoading()) {
loader.cancelLoading();
} else {
clearState();
allocator.trim(0);
loadControl.trimAllocator();
}
}
}
@ -357,7 +366,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
maybeStartLoading();
} else {
clearState();
allocator.trim(0);
loadControl.trimAllocator();
}
}
@ -368,7 +377,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
restartFrom(pendingResetPositionUs);
} else {
clearState();
allocator.trim(0);
loadControl.trimAllocator();
}
}
@ -464,13 +473,23 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
}
private void maybeStartLoading() {
if (currentLoadableExceptionFatal || loadingFinished || loader.isLoading()) {
if (currentLoadableExceptionFatal) {
// We've failed, but we still need to update the control with our current state.
loadControl.update(this, downstreamPositionUs, -1, false, true);
return;
}
long now = SystemClock.elapsedRealtime();
long nextLoadPositionUs = getNextLoadPositionUs();
boolean isBackedOff = currentLoadableException != null;
boolean loadingOrBackedOff = loader.isLoading() || isBackedOff;
// Update the control with our current state, and determine whether we're the next loader.
boolean nextLoader = loadControl.update(this, downstreamPositionUs, nextLoadPositionUs,
loadingOrBackedOff, false);
if (isBackedOff) {
long elapsedMillis = SystemClock.elapsedRealtime() - currentLoadableExceptionTimestamp;
long elapsedMillis = now - currentLoadableExceptionTimestamp;
if (elapsedMillis >= getRetryDelayMillis(currentLoadableExceptionCount)) {
currentLoadableException = null;
loader.startLoading(currentLoadable, this);
@ -478,20 +497,17 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
return;
}
if (previousTsLoadable != null
&& (previousTsLoadable.endTimeUs - downstreamPositionUs >= requestedBufferDurationUs
|| allocator.getTotalBytesAllocated() >= requestedBufferSize)) {
// We already have the target amount of data or time buffered.
if (loader.isLoading() || !nextLoader) {
return;
}
Chunk nextLoadable = chunkSource.getChunkOperation(previousTsLoadable,
pendingResetPositionUs, downstreamPositionUs);
Chunk nextLoadable = chunkSource.getChunkOperation(previousTsLoadable, pendingResetPositionUs,
downstreamPositionUs);
if (nextLoadable == null) {
return;
}
currentLoadStartTimeMs = SystemClock.elapsedRealtime();
currentLoadStartTimeMs = now;
currentLoadable = nextLoadable;
if (isTsChunk(currentLoadable)) {
TsChunk tsChunk = (TsChunk) currentLoadable;
@ -500,7 +516,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
}
HlsExtractorWrapper extractorWrapper = tsChunk.extractorWrapper;
if (extractors.isEmpty() || extractors.getLast() != extractorWrapper) {
extractorWrapper.init(allocator);
extractorWrapper.init(loadControl.getAllocator());
extractors.addLast(extractorWrapper);
}
notifyLoadStarted(tsChunk.dataSpec.length, tsChunk.type, tsChunk.trigger, tsChunk.format,
@ -513,6 +529,18 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
loader.startLoading(currentLoadable, this);
}
/**
* Gets the next load time, assuming that the next load starts where the previous chunk ended (or
* from the pending reset time, if there is one).
*/
private long getNextLoadPositionUs() {
if (isPendingReset()) {
return pendingResetPositionUs;
} else {
return previousTsLoadable.isLastChunk ? -1 : previousTsLoadable.endTimeUs;
}
}
private boolean isTsChunk(Chunk chunk) {
return chunk instanceof TsChunk;
}

View file

@ -217,11 +217,22 @@ public class SmoothStreamingChunkSource implements ChunkSource {
SmoothStreamingManifest newManifest = manifestFetcher.getManifest();
if (currentManifest != newManifest && newManifest != null) {
StreamElement currentElement = getElement(currentManifest);
int currentElementChunkCount = currentElement.chunkCount;
StreamElement newElement = getElement(newManifest);
if (newElement.chunkCount == 0) {
currentManifestChunkOffset += currentElement.chunkCount;
} else if (currentElement.chunkCount > 0) {
currentManifestChunkOffset += currentElement.getChunkIndex(newElement.getStartTimeUs(0));
if (currentElementChunkCount == 0 || newElement.chunkCount == 0) {
// There's no overlap between the old and new elements because at least one is empty.
currentManifestChunkOffset += currentElementChunkCount;
} else {
long currentElementEndTimeUs = currentElement.getStartTimeUs(currentElementChunkCount - 1)
+ currentElement.getChunkDurationUs(currentElementChunkCount - 1);
long newElementStartTimeUs = newElement.getStartTimeUs(0);
if (currentElementEndTimeUs <= newElementStartTimeUs) {
// There's no overlap between the old and new elements.
currentManifestChunkOffset += currentElementChunkCount;
} else {
// The new element overlaps with the old one.
currentManifestChunkOffset += currentElement.getChunkIndex(newElementStartTimeUs);
}
}
currentManifest = newManifest;
finishedCurrentManifest = false;

View file

@ -0,0 +1,125 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.util;
import com.google.android.exoplayer.CodecCounters;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.upstream.BandwidthMeter;
import android.widget.TextView;
/**
* A helper class for periodically updating debug information displayed by a {@link TextView}.
*/
public final class DebugTextViewHelper implements Runnable {
/**
* Provides debug information about an ongoing playback.
*/
public interface Provider {
/**
* Returns the current playback position, in milliseconds.
*/
long getCurrentPosition();
/**
* Returns a format whose information should be displayed, or null.
*/
Format getFormat();
/**
* Returns a {@link BandwidthMeter} whose estimate should be displayed, or null.
*/
BandwidthMeter getBandwidthMeter();
/**
* Returns a {@link CodecCounters} whose information should be displayed, or null.
*/
CodecCounters getCodecCounters();
}
private static final int REFRESH_INTERVAL_MS = 1000;
private final TextView textView;
private final Provider debuggable;
/**
* @param debuggable The {@link Provider} from which debug information should be obtained.
* @param textView The {@link TextView} that should be updated to display the information.
*/
public DebugTextViewHelper(Provider debuggable, TextView textView) {
this.debuggable = debuggable;
this.textView = textView;
}
/**
* Starts periodic updates of the {@link TextView}.
* <p>
* Should be called from the application's main thread.
*/
public void start() {
stop();
run();
}
/**
* Stops periodic updates of the {@link TextView}.
* <p>
* Should be called from the application's main thread.
*/
public void stop() {
textView.removeCallbacks(this);
}
@Override
public void run() {
textView.setText(getRenderString());
textView.postDelayed(this, REFRESH_INTERVAL_MS);
}
private String getRenderString() {
return getTimeString() + " " + getQualityString() + " " + getBandwidthString() + " "
+ getVideoCodecCountersString();
}
private String getTimeString() {
return "ms(" + debuggable.getCurrentPosition() + ")";
}
private String getQualityString() {
Format format = debuggable.getFormat();
return format == null ? "id:? br:? h:?"
: "id:" + format.id + " br:" + format.bitrate + " h:" + format.height;
}
private String getBandwidthString() {
BandwidthMeter bandwidthMeter = debuggable.getBandwidthMeter();
if (bandwidthMeter == null
|| bandwidthMeter.getBitrateEstimate() == BandwidthMeter.NO_ESTIMATE) {
return "bw:?";
} else {
return "bw:" + (bandwidthMeter.getBitrateEstimate() / 1000);
}
}
private String getVideoCodecCountersString() {
CodecCounters codecCounters = debuggable.getCodecCounters();
return codecCounters == null ? "" : codecCounters.getDebugString();
}
}

View file

@ -33,6 +33,7 @@ public class MimeTypes {
public static final String VIDEO_MP4 = BASE_TYPE_VIDEO + "/mp4";
public static final String VIDEO_WEBM = BASE_TYPE_VIDEO + "/webm";
public static final String VIDEO_H264 = BASE_TYPE_VIDEO + "/avc";
public static final String VIDEO_H265 = BASE_TYPE_VIDEO + "/hevc";
public static final String VIDEO_VP8 = BASE_TYPE_VIDEO + "/x-vnd.on2.vp8";
public static final String VIDEO_VP9 = BASE_TYPE_VIDEO + "/x-vnd.on2.vp9";
public static final String VIDEO_MP4V = BASE_TYPE_VIDEO + "/mp4v-es";

View file

@ -18,11 +18,11 @@ package com.google.android.exoplayer.util;
import java.nio.ByteBuffer;
/**
* Utility methods for handling H264 data.
* Utility methods for handling H.264/AVC and H.265/HEVC NAL units.
*/
public final class H264Util {
public final class NalUnitUtil {
/** Four initial bytes that must prefix H.264/AVC NAL units for decoding. */
/** Four initial bytes that must prefix NAL units for decoding. */
public static final byte[] NAL_START_CODE = new byte[] {0, 0, 0, 1};
/**
@ -173,7 +173,7 @@ public final class H264Util {
return result;
}
private H264Util() {
private NalUnitUtil() {
// Prevent instantiation.
}

View file

@ -26,6 +26,7 @@ import android.content.pm.PackageManager.NameNotFoundException;
import android.os.Build;
import android.text.TextUtils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@ -40,6 +41,7 @@ import java.util.Collections;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@ -59,6 +61,12 @@ public final class Util {
*/
public static final int SDK_INT = android.os.Build.VERSION.SDK_INT;
/**
* Like {@link android.os.Build#PRODUCT}, but in a place where it can be conveniently overridden
* for local testing.
*/
public static final String PRODUCT = android.os.Build.PRODUCT;
private static final Pattern XS_DATE_TIME_PATTERN = Pattern.compile(
"(\\d\\d\\d\\d)\\-(\\d\\d)\\-(\\d\\d)[Tt]"
+ "(\\d\\d):(\\d\\d):(\\d\\d)(\\.(\\d+))?"
@ -577,4 +585,56 @@ public final class Util {
+ ") " + "ExoPlayerLib/" + ExoPlayerLibraryInfo.VERSION;
}
/**
* Executes a post request using {@link HttpURLConnection}.
*
* @param url The request URL.
* @param data The request body, or null.
* @param requestProperties Request properties, or null.
* @return The response body.
* @throws IOException If an error occurred making the request.
*/
// TODO: Remove this and use HttpDataSource once DataSpec supports inclusion of a POST body.
public static byte[] executePost(String url, byte[] data, Map<String, String> requestProperties)
throws IOException {
HttpURLConnection urlConnection = null;
try {
urlConnection = (HttpURLConnection) new URL(url).openConnection();
urlConnection.setRequestMethod("POST");
urlConnection.setDoOutput(data != null);
urlConnection.setDoInput(true);
if (requestProperties != null) {
for (Map.Entry<String, String> requestProperty : requestProperties.entrySet()) {
urlConnection.setRequestProperty(requestProperty.getKey(), requestProperty.getValue());
}
}
// Write the request body, if there is one.
if (data != null) {
OutputStream out = urlConnection.getOutputStream();
try {
out.write(data);
} finally {
out.close();
}
}
// Read and return the response body.
InputStream inputStream = urlConnection.getInputStream();
try {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
byte scratch[] = new byte[1024];
int bytesRead;
while ((bytesRead = inputStream.read(scratch)) != -1) {
byteArrayOutputStream.write(scratch, 0, bytesRead);
}
return byteArrayOutputStream.toByteArray();
} finally {
inputStream.close();
}
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
}
}
}

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>ExoPlayerTests</name>
<name>ExoPlayerLibTests</name>
<comment></comment>
<projects>
<project>ExoPlayerLib</project>

View file

@ -15,11 +15,9 @@
*/
package com.google.android.exoplayer.dash;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
import com.google.android.exoplayer.chunk.Format;
@ -57,19 +55,12 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
private static final long VOD_DURATION = 30000;
private static final long LIVE_SEGMENT_COUNT = 5;
private static final long LIVE_SEGMENT_DURATION_MS = 1000;
private static final long LIVE_TIMESHIFT_BUFFER_DEPTH_MS =
LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS;
private static final long AVAILABILITY_START_TIME_MS = 60000;
private static final long AVAILABILITY_REALTIME_OFFSET_MS = 1000;
private static final long AVAILABILITY_CURRENT_TIME_MS =
AVAILABILITY_START_TIME_MS + LIVE_TIMESHIFT_BUFFER_DEPTH_MS - AVAILABILITY_REALTIME_OFFSET_MS;
private static final long LIVE_SEEK_BEYOND_EDGE_MS = 60000;
private static final long AVAILABILITY_START_TIME = 0;
private static final long AVAILABILITY_LATENCY = 5000;
private static final long AVAILABILITY_REALTIME_OFFSET = 1000;
private static final long AVAILABILITY_CURRENT_TIME =
AVAILABILITY_START_TIME + AVAILABILITY_LATENCY - AVAILABILITY_REALTIME_OFFSET;
private static final FakeClock AVAILABILITY_CLOCK = new FakeClock(AVAILABILITY_CURRENT_TIME);
private static final int TALL_HEIGHT = 200;
private static final int WIDE_WIDTH = 400;
@ -99,21 +90,6 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
}
public void testGetSeekRangeOnVod() {
DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO,
null, null, mock(FormatEvaluator.class));
chunkSource.enable();
TimeRange seekRange = chunkSource.getSeekRange();
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
assertEquals(0, seekRangeValuesUs[0]);
assertEquals(VOD_DURATION * 1000, seekRangeValuesUs[1]);
long[] seekRangeValuesMs = seekRange.getCurrentBoundsMs(null);
assertEquals(0, seekRangeValuesMs[0]);
assertEquals(VOD_DURATION, seekRangeValuesMs[1]);
}
public void testMaxVideoDimensionsLegacy() {
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
Representation representation1 =
@ -131,254 +107,147 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
}
public void testLiveEdgeNoLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 0;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
public void testLiveEdgeNoLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(0L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdgeAlmostNoLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 1;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
public void testLiveEdge500msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(500L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge500msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
public void testLiveEdge1000msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1000L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge1000msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 1000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 4000;
long chunkEndTimeMs = 5000;
public void testLiveEdge1001msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1001L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge1001msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 1001;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 3000;
long chunkEndTimeMs = 4000;
public void testLiveEdge2500msLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(2500L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge2500msLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 2500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 2000;
long chunkEndTimeMs = 3000;
public void testLiveEdgeVeryHighLatencyWithTimeline() {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(10000L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdgeVeryHighLatency() {
long startTimeMs = 0;
long liveEdgeLatencyMs = 10000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 0;
long seekRangeEndMs = 0;
long chunkStartTimeMs = 0;
long chunkEndTimeMs = 1000;
public void testLiveEdgeNoLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(0L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
// this should actually return the "5th" segment, but it currently returns the "6th", which
// doesn't actually exist yet; this will be resolved in a subsequent cl (cl/87518875).
//assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
//assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdgeNoLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 0;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
public void testLiveEdgeAlmostNoLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdgeAlmostNoLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 1;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
public void testLiveEdge500msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(500L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge500msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
public void testLiveEdge1000msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1000L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge1000msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 1000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 7000;
long chunkEndTimeMs = 8000;
public void testLiveEdge1001msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1001L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge1001msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 1001;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 6000;
long chunkEndTimeMs = 7000;
public void testLiveEdge2500msLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(2500L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
}
public void testLiveEdge2500msLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 2500;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
long chunkStartTimeMs = 5000;
long chunkEndTimeMs = 6000;
public void testLiveEdgeVeryHighLatencyWithTemplate() {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(10000L);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, 0, 0, out);
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
}
public void testLiveEdgeVeryHighLatencyInProgress() {
long startTimeMs = 3000;
long liveEdgeLatencyMs = 10000;
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
long seekRangeStartMs = 3000;
long seekRangeEndMs = 3000;
long chunkStartTimeMs = 3000;
long chunkEndTimeMs = 4000;
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, 0, 0, 1000);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
}
private static MediaPresentationDescription generateMpd(boolean live,
List<Representation> representations, boolean limitTimeshiftBuffer) {
List<Representation> representations) {
Representation firstRepresentation = representations.get(0);
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
Period period = new Period(null, firstRepresentation.periodStartMs,
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US
: firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, duration, -1, live, -1,
(limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1,
return new MediaPresentationDescription(AVAILABILITY_START_TIME, duration, -1, live, -1, -1,
null, Collections.singletonList(period));
}
@ -387,126 +256,72 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
Representation representation1 =
Representation.newInstance(0, VOD_DURATION, null, 0, TALL_VIDEO, segmentBase1);
Representation.newInstance(0, 0, null, 0, TALL_VIDEO, segmentBase1);
representations.add(representation1);
SingleSegmentBase segmentBase2 = new SingleSegmentBase("https://example.com/2.mp4");
Representation representation2 =
Representation.newInstance(0, VOD_DURATION, null, 0, WIDE_VIDEO, segmentBase2);
Representation.newInstance(0, 0, null, 0, WIDE_VIDEO, segmentBase2);
representations.add(representation2);
return generateMpd(false, representations, false);
return generateMpd(false, representations);
}
private static MediaPresentationDescription generateLiveMpdWithTimeline(long startTime) {
private static MediaPresentationDescription generateLiveMpdWithTimeline() {
List<Representation> representations = new ArrayList<>();
List<SegmentTimelineElement> segmentTimeline = new ArrayList<>();
segmentTimeline.add(new SegmentTimelineElement(0L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(1000L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(2000L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(3000L, 1000L));
segmentTimeline.add(new SegmentTimelineElement(4000L, 1000L));
List<RangedUri> mediaSegments = new ArrayList<>();
long byteStart = 0;
for (int i = 0; i < LIVE_SEGMENT_COUNT; i++) {
segmentTimeline.add(new SegmentTimelineElement(startTime, LIVE_SEGMENT_DURATION_MS));
mediaSegments.add(new RangedUri("", "", byteStart, 500L));
startTime += LIVE_SEGMENT_DURATION_MS;
byteStart += 500;
}
mediaSegments.add(new RangedUri("", "", 0L, 500L));
mediaSegments.add(new RangedUri("", "", 500L, 500L));
mediaSegments.add(new RangedUri("", "", 1000L, 500L));
mediaSegments.add(new RangedUri("", "", 1500L, 500L));
mediaSegments.add(new RangedUri("", "", 2000L, 500L));
MultiSegmentBase segmentBase = new SegmentList(null, 1000, 0,
TrackRenderer.UNKNOWN_TIME_US, 0, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
TrackRenderer.UNKNOWN_TIME_US, 1, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
mediaSegments);
Representation representation = Representation.newInstance(startTime,
TrackRenderer.UNKNOWN_TIME_US, null, 0, REGULAR_VIDEO, segmentBase);
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
null, 0, REGULAR_VIDEO, segmentBase);
representations.add(representation);
return generateMpd(true, representations, false);
return generateMpd(true, representations);
}
private static MediaPresentationDescription generateLiveMpdWithTemplate(
boolean limitTimeshiftBuffer) {
private static MediaPresentationDescription generateLiveMpdWithTemplate() {
List<Representation> representations = new ArrayList<>();
UrlTemplate initializationTemplate = null;
UrlTemplate mediaTemplate = UrlTemplate.compile("$RepresentationID$/$Number$");
MultiSegmentBase segmentBase = new SegmentTemplate(null, 1000, 0,
TrackRenderer.UNKNOWN_TIME_US, 0, LIVE_SEGMENT_DURATION_MS, null,
TrackRenderer.UNKNOWN_TIME_US, 1, 1000, null,
initializationTemplate, mediaTemplate, "http://www.youtube.com");
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
null, 0, REGULAR_VIDEO, segmentBase);
representations.add(representation);
return generateMpd(true, representations, limitTimeshiftBuffer);
return generateMpd(true, representations);
}
private DashChunkSource setupLiveEdgeTimelineTest(long startTime, long liveEdgeLatencyMs) {
MediaPresentationDescription manifest = generateLiveMpdWithTimeline(startTime);
private DashChunkSource setupLiveEdgeTimelineTest(long liveEdgeLatencyMs) {
MediaPresentationDescription manifest = generateLiveMpdWithTimeline();
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
DashChunkSource chunkSource = new DashChunkSource(mockManifestFetcher, manifest,
AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR,
new FakeClock(AVAILABILITY_CURRENT_TIME_MS + startTime), liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET_MS * 1000, null, null);
chunkSource.enable();
return chunkSource;
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET * 1000);
}
private DashChunkSource setupLiveEdgeTemplateTest(long startTime, long liveEdgeLatencyMs,
boolean limitTimeshiftBuffer) {
MediaPresentationDescription manifest = generateLiveMpdWithTemplate(limitTimeshiftBuffer);
private DashChunkSource setupLiveEdgeTemplateTest(long liveEdgeLatencyMs) {
MediaPresentationDescription manifest = generateLiveMpdWithTemplate();
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
DashChunkSource chunkSource = new DashChunkSource(mockManifestFetcher, manifest,
AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR,
new FakeClock(AVAILABILITY_CURRENT_TIME_MS + startTime), liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET_MS * 1000, null, null);
chunkSource.enable();
return chunkSource;
}
private void checkLiveEdgeLatencyWithTimeline(long startTimeMs, long liveEdgeLatencyMs,
long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs, long chunkStartTimeMs,
long chunkEndTimeMs) {
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(startTimeMs, liveEdgeLatencyMs);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, seekPositionMs * 1000, 0, out);
TimeRange seekRange = chunkSource.getSeekRange();
assertNotNull(out.chunk);
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
assertEquals(seekRangeStartMs * 1000, seekRangeValuesUs[0]);
assertEquals(seekRangeEndMs * 1000, seekRangeValuesUs[1]);
assertEquals(chunkStartTimeMs * 1000, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(chunkEndTimeMs * 1000, ((MediaChunk) out.chunk).endTimeUs);
}
private void checkLiveEdgeLatencyWithTemplate(long startTimeMs, long liveEdgeLatencyMs,
long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs, long chunkStartTimeMs,
long chunkEndTimeMs, boolean limitTimeshiftBuffer) {
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(startTimeMs, liveEdgeLatencyMs,
limitTimeshiftBuffer);
List<MediaChunk> queue = new ArrayList<>();
ChunkOperationHolder out = new ChunkOperationHolder();
chunkSource.getChunkOperation(queue, seekPositionMs * 1000, 0, out);
TimeRange seekRange = chunkSource.getSeekRange();
assertNotNull(out.chunk);
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
assertEquals(seekRangeStartMs * 1000, seekRangeValuesUs[0]);
assertEquals(seekRangeEndMs * 1000, seekRangeValuesUs[1]);
assertEquals(chunkStartTimeMs * 1000, ((MediaChunk) out.chunk).startTimeUs);
assertEquals(chunkEndTimeMs * 1000, ((MediaChunk) out.chunk).endTimeUs);
}
private void checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(long startTimeMs,
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeEndMs,
long chunkStartTimeMs, long chunkEndTimeMs) {
checkLiveEdgeLatencyWithTemplate(startTimeMs, liveEdgeLatencyMs, seekPositionMs, 0,
seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs, false);
}
private void checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(long startTimeMs,
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs,
long chunkStartTimeMs, long chunkEndTimeMs) {
checkLiveEdgeLatencyWithTemplate(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs, true);
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
AVAILABILITY_REALTIME_OFFSET * 1000);
}
}

View file

@ -20,9 +20,9 @@ import junit.framework.TestCase;
import java.util.Arrays;
/**
* Tests for {@link H264Util}.
* Tests for {@link NalUnitUtil}.
*/
public class H264UtilTest extends TestCase {
public class NalUnitUtilTest extends TestCase {
private static final int TEST_PARTIAL_NAL_POSITION = 4;
private static final int TEST_NAL_POSITION = 10;
@ -31,19 +31,19 @@ public class H264UtilTest extends TestCase {
byte[] data = buildTestData();
// Should find NAL unit.
int result = H264Util.findNalUnit(data, 0, data.length, null);
int result = NalUnitUtil.findNalUnit(data, 0, data.length, null);
assertEquals(TEST_NAL_POSITION, result);
// Should find NAL unit whose prefix ends one byte before the limit.
result = H264Util.findNalUnit(data, 0, TEST_NAL_POSITION + 4, null);
result = NalUnitUtil.findNalUnit(data, 0, TEST_NAL_POSITION + 4, null);
assertEquals(TEST_NAL_POSITION, result);
// Shouldn't find NAL unit whose prefix ends at the limit (since the limit is exclusive).
result = H264Util.findNalUnit(data, 0, TEST_NAL_POSITION + 3, null);
result = NalUnitUtil.findNalUnit(data, 0, TEST_NAL_POSITION + 3, null);
assertEquals(TEST_NAL_POSITION + 3, result);
// Should find NAL unit whose prefix starts at the offset.
result = H264Util.findNalUnit(data, TEST_NAL_POSITION, data.length, null);
result = NalUnitUtil.findNalUnit(data, TEST_NAL_POSITION, data.length, null);
assertEquals(TEST_NAL_POSITION, result);
// Shouldn't find NAL unit whose prefix starts one byte past the offset.
result = H264Util.findNalUnit(data, TEST_NAL_POSITION + 1, data.length, null);
result = NalUnitUtil.findNalUnit(data, TEST_NAL_POSITION + 1, data.length, null);
assertEquals(data.length, result);
}
@ -54,9 +54,9 @@ public class H264UtilTest extends TestCase {
boolean[] prefixFlags = new boolean[3];
byte[] data1 = Arrays.copyOfRange(data, 0, TEST_NAL_POSITION + 1);
byte[] data2 = Arrays.copyOfRange(data, TEST_NAL_POSITION + 1, data.length);
int result = H264Util.findNalUnit(data1, 0, data1.length, prefixFlags);
int result = NalUnitUtil.findNalUnit(data1, 0, data1.length, prefixFlags);
assertEquals(data1.length, result);
result = H264Util.findNalUnit(data2, 0, data2.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data2, 0, data2.length, prefixFlags);
assertEquals(-1, result);
assertPrefixFlagsCleared(prefixFlags);
@ -64,9 +64,9 @@ public class H264UtilTest extends TestCase {
prefixFlags = new boolean[3];
data1 = Arrays.copyOfRange(data, 0, TEST_NAL_POSITION + 3);
data2 = Arrays.copyOfRange(data, TEST_NAL_POSITION + 3, data.length);
result = H264Util.findNalUnit(data1, 0, data1.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data1, 0, data1.length, prefixFlags);
assertEquals(data1.length, result);
result = H264Util.findNalUnit(data2, 0, data2.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data2, 0, data2.length, prefixFlags);
assertEquals(-3, result);
assertPrefixFlagsCleared(prefixFlags);
@ -75,11 +75,11 @@ public class H264UtilTest extends TestCase {
data1 = Arrays.copyOfRange(data, 0, TEST_NAL_POSITION + 1);
data2 = Arrays.copyOfRange(data, TEST_NAL_POSITION + 1, TEST_NAL_POSITION + 2);
byte[] data3 = Arrays.copyOfRange(data, TEST_NAL_POSITION + 2, data.length);
result = H264Util.findNalUnit(data1, 0, data1.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data1, 0, data1.length, prefixFlags);
assertEquals(data1.length, result);
result = H264Util.findNalUnit(data2, 0, data2.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data2, 0, data2.length, prefixFlags);
assertEquals(data2.length, result);
result = H264Util.findNalUnit(data3, 0, data3.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data3, 0, data3.length, prefixFlags);
assertEquals(-2, result);
assertPrefixFlagsCleared(prefixFlags);
@ -89,13 +89,13 @@ public class H264UtilTest extends TestCase {
data2 = Arrays.copyOfRange(data, TEST_NAL_POSITION + 1, TEST_NAL_POSITION + 2);
data3 = Arrays.copyOfRange(data, TEST_NAL_POSITION + 2, TEST_NAL_POSITION + 3);
byte[] data4 = Arrays.copyOfRange(data, TEST_NAL_POSITION + 2, data.length);
result = H264Util.findNalUnit(data1, 0, data1.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data1, 0, data1.length, prefixFlags);
assertEquals(data1.length, result);
result = H264Util.findNalUnit(data2, 0, data2.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data2, 0, data2.length, prefixFlags);
assertEquals(data2.length, result);
result = H264Util.findNalUnit(data3, 0, data3.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data3, 0, data3.length, prefixFlags);
assertEquals(data3.length, result);
result = H264Util.findNalUnit(data4, 0, data4.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data4, 0, data4.length, prefixFlags);
assertEquals(-3, result);
assertPrefixFlagsCleared(prefixFlags);
@ -103,9 +103,9 @@ public class H264UtilTest extends TestCase {
prefixFlags = new boolean[3];
data1 = Arrays.copyOfRange(data, 0, TEST_PARTIAL_NAL_POSITION + 2);
data2 = Arrays.copyOfRange(data, TEST_PARTIAL_NAL_POSITION + 2, data.length);
result = H264Util.findNalUnit(data1, 0, data1.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data1, 0, data1.length, prefixFlags);
assertEquals(data1.length, result);
result = H264Util.findNalUnit(data2, 0, data2.length, prefixFlags);
result = NalUnitUtil.findNalUnit(data2, 0, data2.length, prefixFlags);
assertEquals(4, result);
assertPrefixFlagsCleared(prefixFlags);
}

View file

@ -12,4 +12,5 @@
# Project target.
target=android-22
android.library=false
android.library.reference.1=../main