cachedPositions = new HashMap<>();
for (File file : files) {
SimpleCacheSpan cacheSpan = SimpleCacheSpan.createCacheEntry(file, index);
if (cacheSpan != null) {
- assertEquals(key, cacheSpan.key);
+ assertThat(cacheSpan.key).isEqualTo(key);
cachedPositions.put(cacheSpan.position, cacheSpan.lastAccessTimestamp);
}
}
- assertEquals(1, (long) cachedPositions.get((long) 0));
- assertEquals(2, (long) cachedPositions.get((long) 1));
- assertEquals(6, (long) cachedPositions.get((long) 5));
+ assertThat(cachedPositions.get((long) 0)).isEqualTo(1);
+ assertThat(cachedPositions.get((long) 1)).isEqualTo(2);
+ assertThat(cachedPositions.get((long) 5)).isEqualTo(6);
}
private static void createTestFile(File file, int length) throws IOException {
@@ -141,14 +146,14 @@ public class SimpleCacheSpanTest extends InstrumentationTestCase {
File cacheFile = createCacheSpanFile(cacheDir, id, offset, 1, lastAccessTimestamp);
SimpleCacheSpan cacheSpan = SimpleCacheSpan.createCacheEntry(cacheFile, index);
String message = cacheFile.toString();
- assertNotNull(message, cacheSpan);
- assertEquals(message, cacheDir, cacheFile.getParentFile());
- assertEquals(message, key, cacheSpan.key);
- assertEquals(message, offset, cacheSpan.position);
- assertEquals(message, 1, cacheSpan.length);
- assertTrue(message, cacheSpan.isCached);
- assertEquals(message, cacheFile, cacheSpan.file);
- assertEquals(message, lastAccessTimestamp, cacheSpan.lastAccessTimestamp);
+ assertWithMessage(message).that(cacheSpan).isNotNull();
+ assertWithMessage(message).that(cacheFile.getParentFile()).isEqualTo(cacheDir);
+ assertWithMessage(message).that(cacheSpan.key).isEqualTo(key);
+ assertWithMessage(message).that(cacheSpan.position).isEqualTo(offset);
+ assertWithMessage(message).that(cacheSpan.length).isEqualTo(1);
+ assertWithMessage(message).that(cacheSpan.isCached).isTrue();
+ assertWithMessage(message).that(cacheSpan.file).isEqualTo(cacheFile);
+ assertWithMessage(message).that(cacheSpan.lastAccessTimestamp).isEqualTo(lastAccessTimestamp);
}
private void assertNullCacheSpan(File parent, String key, long offset,
@@ -156,7 +161,7 @@ public class SimpleCacheSpanTest extends InstrumentationTestCase {
File cacheFile = SimpleCacheSpan.getCacheFile(parent, index.assignIdForKey(key), offset,
lastAccessTimestamp);
CacheSpan cacheSpan = SimpleCacheSpan.createCacheEntry(cacheFile, index);
- assertNull(cacheFile.toString(), cacheSpan);
+ assertWithMessage(cacheFile.toString()).that(cacheSpan).isNull();
}
}
diff --git a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/ColorParserTest.java b/library/core/src/androidTest/java/com/google/android/exoplayer2/util/ColorParserTest.java
deleted file mode 100644
index 641b58b0ce..0000000000
--- a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/ColorParserTest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.android.exoplayer2.util;
-
-import android.graphics.Color;
-import android.test.InstrumentationTestCase;
-
-/**
- * Unit test for ColorParser.
- */
-public class ColorParserTest extends InstrumentationTestCase {
-
- // Negative tests.
-
- public void testParseUnknownColor() {
- try {
- ColorParser.parseTtmlColor("colorOfAnElectron");
- fail();
- } catch (IllegalArgumentException e) {
- // expected
- }
- }
-
- public void testParseNull() {
- try {
- ColorParser.parseTtmlColor(null);
- fail();
- } catch (IllegalArgumentException e) {
- // expected
- }
- }
-
- public void testParseEmpty() {
- try {
- ColorParser.parseTtmlColor("");
- fail();
- } catch (IllegalArgumentException e) {
- // expected
- }
- }
-
- public void testRgbColorParsingRgbValuesNegative() {
- try {
- ColorParser.parseTtmlColor("rgb(-4, 55, 209)");
- fail();
- } catch (IllegalArgumentException e) {
- // expected
- }
- }
-
- // Positive tests.
-
- public void testHexCodeParsing() {
- assertEquals(Color.WHITE, ColorParser.parseTtmlColor("#FFFFFF"));
- assertEquals(Color.WHITE, ColorParser.parseTtmlColor("#FFFFFFFF"));
- assertEquals(Color.parseColor("#FF123456"), ColorParser.parseTtmlColor("#123456"));
- // Hex colors in ColorParser are RGBA, where-as {@link Color#parseColor} takes ARGB.
- assertEquals(Color.parseColor("#00FFFFFF"), ColorParser.parseTtmlColor("#FFFFFF00"));
- assertEquals(Color.parseColor("#78123456"), ColorParser.parseTtmlColor("#12345678"));
- }
-
- public void testRgbColorParsing() {
- assertEquals(Color.WHITE, ColorParser.parseTtmlColor("rgb(255,255,255)"));
- // Spaces are ignored.
- assertEquals(Color.WHITE, ColorParser.parseTtmlColor(" rgb ( 255, 255, 255)"));
- }
-
- public void testRgbColorParsingRgbValuesOutOfBounds() {
- int outOfBounds = ColorParser.parseTtmlColor("rgb(999, 999, 999)");
- int color = Color.rgb(999, 999, 999);
- // Behave like the framework does.
- assertEquals(color, outOfBounds);
- }
-
- public void testRgbaColorParsing() {
- assertEquals(Color.WHITE, ColorParser.parseTtmlColor("rgba(255,255,255,255)"));
- assertEquals(Color.argb(255, 255, 255, 255),
- ColorParser.parseTtmlColor("rgba(255,255,255,255)"));
- assertEquals(Color.BLACK, ColorParser.parseTtmlColor("rgba(0, 0, 0, 255)"));
- assertEquals(Color.argb(0, 0, 0, 255), ColorParser.parseTtmlColor("rgba(0, 0, 255, 0)"));
- assertEquals(Color.RED, ColorParser.parseTtmlColor("rgba(255, 0, 0, 255)"));
- assertEquals(Color.argb(0, 255, 0, 255), ColorParser.parseTtmlColor("rgba(255, 0, 255, 0)"));
- assertEquals(Color.argb(205, 255, 0, 0), ColorParser.parseTtmlColor("rgba(255, 0, 0, 205)"));
- }
-}
diff --git a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/ParsableBitArrayTest.java b/library/core/src/androidTest/java/com/google/android/exoplayer2/util/ParsableBitArrayTest.java
deleted file mode 100644
index d7b2b36740..0000000000
--- a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/ParsableBitArrayTest.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.android.exoplayer2.util;
-
-import android.test.MoreAsserts;
-import junit.framework.TestCase;
-
-/**
- * Tests for {@link ParsableBitArray}.
- */
-public final class ParsableBitArrayTest extends TestCase {
-
- private static final byte[] TEST_DATA = new byte[] {0x3C, (byte) 0xD2, (byte) 0x5F, (byte) 0x01,
- (byte) 0xFF, (byte) 0x14, (byte) 0x60, (byte) 0x99};
-
- private ParsableBitArray testArray;
-
- @Override
- public void setUp() {
- testArray = new ParsableBitArray(TEST_DATA);
- }
-
- public void testReadAllBytes() {
- byte[] bytesRead = new byte[TEST_DATA.length];
- testArray.readBytes(bytesRead, 0, TEST_DATA.length);
- MoreAsserts.assertEquals(TEST_DATA, bytesRead);
- assertEquals(TEST_DATA.length * 8, testArray.getPosition());
- assertEquals(TEST_DATA.length, testArray.getBytePosition());
- }
-
- public void testReadBit() {
- assertReadBitsToEnd(0);
- }
-
- public void testReadBits() {
- assertEquals(getTestDataBits(0, 5), testArray.readBits(5));
- assertEquals(getTestDataBits(5, 0), testArray.readBits(0));
- assertEquals(getTestDataBits(5, 3), testArray.readBits(3));
- assertEquals(getTestDataBits(8, 16), testArray.readBits(16));
- assertEquals(getTestDataBits(24, 3), testArray.readBits(3));
- assertEquals(getTestDataBits(27, 18), testArray.readBits(18));
- assertEquals(getTestDataBits(45, 5), testArray.readBits(5));
- assertEquals(getTestDataBits(50, 14), testArray.readBits(14));
- }
-
- public void testReadBitsToByteArray() {
- byte[] result = new byte[TEST_DATA.length];
- // Test read within byte boundaries.
- testArray.readBits(result, 0, 6);
- assertEquals(TEST_DATA[0] & 0xFC, result[0]);
- // Test read across byte boundaries.
- testArray.readBits(result, 0, 8);
- assertEquals(((TEST_DATA[0] & 0x03) << 6) | ((TEST_DATA[1] & 0xFC) >> 2), result[0]);
- // Test reading across multiple bytes.
- testArray.readBits(result, 1, 50);
- for (int i = 1; i < 7; i++) {
- assertEquals((byte) (((TEST_DATA[i] & 0x03) << 6) | ((TEST_DATA[i + 1] & 0xFC) >> 2)),
- result[i]);
- }
- assertEquals((byte) (TEST_DATA[7] & 0x03) << 6, result[7]);
- assertEquals(0, testArray.bitsLeft());
- // Test read last buffer byte across input data bytes.
- testArray.setPosition(31);
- result[3] = 0;
- testArray.readBits(result, 3, 3);
- assertEquals((byte) 0xE0, result[3]);
- // Test read bits in the middle of a input data byte.
- result[0] = 0;
- assertEquals(34, testArray.getPosition());
- testArray.readBits(result, 0, 3);
- assertEquals((byte) 0xE0, result[0]);
- // Test read 0 bits.
- testArray.setPosition(32);
- result[1] = 0;
- testArray.readBits(result, 1, 0);
- assertEquals(0, result[1]);
- // Test reading a number of bits divisible by 8.
- testArray.setPosition(0);
- testArray.readBits(result, 0, 16);
- assertEquals(TEST_DATA[0], result[0]);
- assertEquals(TEST_DATA[1], result[1]);
- // Test least significant bits are unmodified.
- result[1] = (byte) 0xFF;
- testArray.readBits(result, 0, 9);
- assertEquals(0x5F, result[0]);
- assertEquals(0x7F, result[1]);
- }
-
- public void testRead32BitsByteAligned() {
- assertEquals(getTestDataBits(0, 32), testArray.readBits(32));
- assertEquals(getTestDataBits(32, 32), testArray.readBits(32));
- }
-
- public void testRead32BitsNonByteAligned() {
- assertEquals(getTestDataBits(0, 5), testArray.readBits(5));
- assertEquals(getTestDataBits(5, 32), testArray.readBits(32));
- }
-
- public void testSkipBytes() {
- testArray.skipBytes(2);
- assertReadBitsToEnd(16);
- }
-
- public void testSkipBitsByteAligned() {
- testArray.skipBits(16);
- assertReadBitsToEnd(16);
- }
-
- public void testSkipBitsNonByteAligned() {
- testArray.skipBits(5);
- assertReadBitsToEnd(5);
- }
-
- public void testSetPositionByteAligned() {
- testArray.setPosition(16);
- assertReadBitsToEnd(16);
- }
-
- public void testSetPositionNonByteAligned() {
- testArray.setPosition(5);
- assertReadBitsToEnd(5);
- }
-
- public void testByteAlignFromNonByteAligned() {
- testArray.setPosition(11);
- testArray.byteAlign();
- assertEquals(2, testArray.getBytePosition());
- assertEquals(16, testArray.getPosition());
- assertReadBitsToEnd(16);
- }
-
- public void testByteAlignFromByteAligned() {
- testArray.setPosition(16);
- testArray.byteAlign(); // Should be a no-op.
- assertEquals(2, testArray.getBytePosition());
- assertEquals(16, testArray.getPosition());
- assertReadBitsToEnd(16);
- }
-
- private void assertReadBitsToEnd(int expectedStartPosition) {
- int position = testArray.getPosition();
- assertEquals(expectedStartPosition, position);
- for (int i = position; i < TEST_DATA.length * 8; i++) {
- assertEquals(getTestDataBit(i), testArray.readBit());
- assertEquals(i + 1, testArray.getPosition());
- }
- }
-
- private static int getTestDataBits(int bitPosition, int length) {
- int result = 0;
- for (int i = 0; i < length; i++) {
- result = result << 1;
- if (getTestDataBit(bitPosition++)) {
- result |= 0x1;
- }
- }
- return result;
- }
-
- private static boolean getTestDataBit(int bitPosition) {
- return (TEST_DATA[bitPosition / 8] & (0x80 >>> (bitPosition % 8))) != 0;
- }
-
-}
diff --git a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/UriUtilTest.java b/library/core/src/androidTest/java/com/google/android/exoplayer2/util/UriUtilTest.java
deleted file mode 100644
index 1755c6f70d..0000000000
--- a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/UriUtilTest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.android.exoplayer2.util;
-
-import junit.framework.TestCase;
-
-/**
- * Unit tests for {@link UriUtil}.
- */
-public class UriUtilTest extends TestCase {
-
- /**
- * Tests normal usage of {@link UriUtil#resolve(String, String)}.
- *
- * The test cases are taken from RFC-3986 5.4.1.
- */
- public void testResolveNormal() {
- String base = "http://a/b/c/d;p?q";
-
- assertEquals("g:h", UriUtil.resolve(base, "g:h"));
- assertEquals("http://a/b/c/g", UriUtil.resolve(base, "g"));
- assertEquals("http://a/b/c/g/", UriUtil.resolve(base, "g/"));
- assertEquals("http://a/g", UriUtil.resolve(base, "/g"));
- assertEquals("http://g", UriUtil.resolve(base, "//g"));
- assertEquals("http://a/b/c/d;p?y", UriUtil.resolve(base, "?y"));
- assertEquals("http://a/b/c/g?y", UriUtil.resolve(base, "g?y"));
- assertEquals("http://a/b/c/d;p?q#s", UriUtil.resolve(base, "#s"));
- assertEquals("http://a/b/c/g#s", UriUtil.resolve(base, "g#s"));
- assertEquals("http://a/b/c/g?y#s", UriUtil.resolve(base, "g?y#s"));
- assertEquals("http://a/b/c/;x", UriUtil.resolve(base, ";x"));
- assertEquals("http://a/b/c/g;x", UriUtil.resolve(base, "g;x"));
- assertEquals("http://a/b/c/g;x?y#s", UriUtil.resolve(base, "g;x?y#s"));
- assertEquals("http://a/b/c/d;p?q", UriUtil.resolve(base, ""));
- assertEquals("http://a/b/c/", UriUtil.resolve(base, "."));
- assertEquals("http://a/b/c/", UriUtil.resolve(base, "./"));
- assertEquals("http://a/b/", UriUtil.resolve(base, ".."));
- assertEquals("http://a/b/", UriUtil.resolve(base, "../"));
- assertEquals("http://a/b/g", UriUtil.resolve(base, "../g"));
- assertEquals("http://a/", UriUtil.resolve(base, "../.."));
- assertEquals("http://a/", UriUtil.resolve(base, "../../"));
- assertEquals("http://a/g", UriUtil.resolve(base, "../../g"));
- }
-
- /**
- * Tests abnormal usage of {@link UriUtil#resolve(String, String)}.
- *
- * The test cases are taken from RFC-3986 5.4.2.
- */
- public void testResolveAbnormal() {
- String base = "http://a/b/c/d;p?q";
-
- assertEquals("http://a/g", UriUtil.resolve(base, "../../../g"));
- assertEquals("http://a/g", UriUtil.resolve(base, "../../../../g"));
-
- assertEquals("http://a/g", UriUtil.resolve(base, "/./g"));
- assertEquals("http://a/g", UriUtil.resolve(base, "/../g"));
- assertEquals("http://a/b/c/g.", UriUtil.resolve(base, "g."));
- assertEquals("http://a/b/c/.g", UriUtil.resolve(base, ".g"));
- assertEquals("http://a/b/c/g..", UriUtil.resolve(base, "g.."));
- assertEquals("http://a/b/c/..g", UriUtil.resolve(base, "..g"));
-
- assertEquals("http://a/b/g", UriUtil.resolve(base, "./../g"));
- assertEquals("http://a/b/c/g/", UriUtil.resolve(base, "./g/."));
- assertEquals("http://a/b/c/g/h", UriUtil.resolve(base, "g/./h"));
- assertEquals("http://a/b/c/h", UriUtil.resolve(base, "g/../h"));
- assertEquals("http://a/b/c/g;x=1/y", UriUtil.resolve(base, "g;x=1/./y"));
- assertEquals("http://a/b/c/y", UriUtil.resolve(base, "g;x=1/../y"));
-
- assertEquals("http://a/b/c/g?y/./x", UriUtil.resolve(base, "g?y/./x"));
- assertEquals("http://a/b/c/g?y/../x", UriUtil.resolve(base, "g?y/../x"));
- assertEquals("http://a/b/c/g#s/./x", UriUtil.resolve(base, "g#s/./x"));
- assertEquals("http://a/b/c/g#s/../x", UriUtil.resolve(base, "g#s/../x"));
-
- assertEquals("http:g", UriUtil.resolve(base, "http:g"));
- }
-
- /**
- * Tests additional abnormal usage of {@link UriUtil#resolve(String, String)}.
- */
- public void testResolveAbnormalAdditional() {
- assertEquals("c:e", UriUtil.resolve("http://a/b", "c:d/../e"));
- assertEquals("a:c", UriUtil.resolve("a:b", "../c"));
- }
-
-}
diff --git a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/UtilTest.java b/library/core/src/androidTest/java/com/google/android/exoplayer2/util/UtilTest.java
deleted file mode 100644
index 1d9aff0723..0000000000
--- a/library/core/src/androidTest/java/com/google/android/exoplayer2/util/UtilTest.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.android.exoplayer2.util;
-
-import com.google.android.exoplayer2.testutil.TestUtil;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-import junit.framework.TestCase;
-
-/**
- * Unit tests for {@link Util}.
- */
-public class UtilTest extends TestCase {
-
- public void testArrayBinarySearchFloor() {
- long[] values = new long[0];
- assertEquals(-1, Util.binarySearchFloor(values, 0, false, false));
- assertEquals(0, Util.binarySearchFloor(values, 0, false, true));
-
- values = new long[] {1, 3, 5};
- assertEquals(-1, Util.binarySearchFloor(values, 0, false, false));
- assertEquals(-1, Util.binarySearchFloor(values, 0, true, false));
- assertEquals(0, Util.binarySearchFloor(values, 0, false, true));
- assertEquals(0, Util.binarySearchFloor(values, 0, true, true));
-
- assertEquals(-1, Util.binarySearchFloor(values, 1, false, false));
- assertEquals(0, Util.binarySearchFloor(values, 1, true, false));
- assertEquals(0, Util.binarySearchFloor(values, 1, false, true));
- assertEquals(0, Util.binarySearchFloor(values, 1, true, true));
-
- assertEquals(1, Util.binarySearchFloor(values, 4, false, false));
- assertEquals(1, Util.binarySearchFloor(values, 4, true, false));
-
- assertEquals(1, Util.binarySearchFloor(values, 5, false, false));
- assertEquals(2, Util.binarySearchFloor(values, 5, true, false));
-
- assertEquals(2, Util.binarySearchFloor(values, 6, false, false));
- assertEquals(2, Util.binarySearchFloor(values, 6, true, false));
- }
-
- public void testListBinarySearchFloor() {
- List values = new ArrayList<>();
- assertEquals(-1, Util.binarySearchFloor(values, 0, false, false));
- assertEquals(0, Util.binarySearchFloor(values, 0, false, true));
-
- values.add(1);
- values.add(3);
- values.add(5);
- assertEquals(-1, Util.binarySearchFloor(values, 0, false, false));
- assertEquals(-1, Util.binarySearchFloor(values, 0, true, false));
- assertEquals(0, Util.binarySearchFloor(values, 0, false, true));
- assertEquals(0, Util.binarySearchFloor(values, 0, true, true));
-
- assertEquals(-1, Util.binarySearchFloor(values, 1, false, false));
- assertEquals(0, Util.binarySearchFloor(values, 1, true, false));
- assertEquals(0, Util.binarySearchFloor(values, 1, false, true));
- assertEquals(0, Util.binarySearchFloor(values, 1, true, true));
-
- assertEquals(1, Util.binarySearchFloor(values, 4, false, false));
- assertEquals(1, Util.binarySearchFloor(values, 4, true, false));
-
- assertEquals(1, Util.binarySearchFloor(values, 5, false, false));
- assertEquals(2, Util.binarySearchFloor(values, 5, true, false));
-
- assertEquals(2, Util.binarySearchFloor(values, 6, false, false));
- assertEquals(2, Util.binarySearchFloor(values, 6, true, false));
- }
-
- public void testArrayBinarySearchCeil() {
- long[] values = new long[0];
- assertEquals(0, Util.binarySearchCeil(values, 0, false, false));
- assertEquals(-1, Util.binarySearchCeil(values, 0, false, true));
-
- values = new long[] {1, 3, 5};
- assertEquals(0, Util.binarySearchCeil(values, 0, false, false));
- assertEquals(0, Util.binarySearchCeil(values, 0, true, false));
-
- assertEquals(1, Util.binarySearchCeil(values, 1, false, false));
- assertEquals(0, Util.binarySearchCeil(values, 1, true, false));
-
- assertEquals(1, Util.binarySearchCeil(values, 2, false, false));
- assertEquals(1, Util.binarySearchCeil(values, 2, true, false));
-
- assertEquals(3, Util.binarySearchCeil(values, 5, false, false));
- assertEquals(2, Util.binarySearchCeil(values, 5, true, false));
- assertEquals(2, Util.binarySearchCeil(values, 5, false, true));
- assertEquals(2, Util.binarySearchCeil(values, 5, true, true));
-
- assertEquals(3, Util.binarySearchCeil(values, 6, false, false));
- assertEquals(3, Util.binarySearchCeil(values, 6, true, false));
- assertEquals(2, Util.binarySearchCeil(values, 6, false, true));
- assertEquals(2, Util.binarySearchCeil(values, 6, true, true));
- }
-
- public void testListBinarySearchCeil() {
- List values = new ArrayList<>();
- assertEquals(0, Util.binarySearchCeil(values, 0, false, false));
- assertEquals(-1, Util.binarySearchCeil(values, 0, false, true));
-
- values.add(1);
- values.add(3);
- values.add(5);
- assertEquals(0, Util.binarySearchCeil(values, 0, false, false));
- assertEquals(0, Util.binarySearchCeil(values, 0, true, false));
-
- assertEquals(1, Util.binarySearchCeil(values, 1, false, false));
- assertEquals(0, Util.binarySearchCeil(values, 1, true, false));
-
- assertEquals(1, Util.binarySearchCeil(values, 2, false, false));
- assertEquals(1, Util.binarySearchCeil(values, 2, true, false));
-
- assertEquals(3, Util.binarySearchCeil(values, 5, false, false));
- assertEquals(2, Util.binarySearchCeil(values, 5, true, false));
- assertEquals(2, Util.binarySearchCeil(values, 5, false, true));
- assertEquals(2, Util.binarySearchCeil(values, 5, true, true));
-
- assertEquals(3, Util.binarySearchCeil(values, 6, false, false));
- assertEquals(3, Util.binarySearchCeil(values, 6, true, false));
- assertEquals(2, Util.binarySearchCeil(values, 6, false, true));
- assertEquals(2, Util.binarySearchCeil(values, 6, true, true));
- }
-
- public void testParseXsDuration() {
- assertEquals(150279L, Util.parseXsDuration("PT150.279S"));
- assertEquals(1500L, Util.parseXsDuration("PT1.500S"));
- }
-
- public void testParseXsDateTime() throws Exception {
- assertEquals(1403219262000L, Util.parseXsDateTime("2014-06-19T23:07:42"));
- assertEquals(1407322800000L, Util.parseXsDateTime("2014-08-06T11:00:00Z"));
- assertEquals(1407322800000L, Util.parseXsDateTime("2014-08-06T11:00:00,000Z"));
- assertEquals(1411161535000L, Util.parseXsDateTime("2014-09-19T13:18:55-08:00"));
- assertEquals(1411161535000L, Util.parseXsDateTime("2014-09-19T13:18:55-0800"));
- assertEquals(1411161535000L, Util.parseXsDateTime("2014-09-19T13:18:55.000-0800"));
- assertEquals(1411161535000L, Util.parseXsDateTime("2014-09-19T13:18:55.000-800"));
- }
-
- public void testUnescapeInvalidFileName() {
- assertNull(Util.unescapeFileName("%a"));
- assertNull(Util.unescapeFileName("%xyz"));
- }
-
- public void testEscapeUnescapeFileName() {
- assertEscapeUnescapeFileName("just+a regular+fileName", "just+a regular+fileName");
- assertEscapeUnescapeFileName("key:value", "key%3avalue");
- assertEscapeUnescapeFileName("<>:\"/\\|?*%", "%3c%3e%3a%22%2f%5c%7c%3f%2a%25");
-
- Random random = new Random(0);
- for (int i = 0; i < 1000; i++) {
- String string = TestUtil.buildTestString(1000, random);
- assertEscapeUnescapeFileName(string);
- }
- }
-
- private static void assertEscapeUnescapeFileName(String fileName, String escapedFileName) {
- assertEquals(escapedFileName, Util.escapeFileName(fileName));
- assertEquals(fileName, Util.unescapeFileName(escapedFileName));
- }
-
- private static void assertEscapeUnescapeFileName(String fileName) {
- String escapedFileName = Util.escapeFileName(fileName);
- assertEquals(fileName, Util.unescapeFileName(escapedFileName));
- }
-
-}
diff --git a/library/core/src/main/AndroidManifest.xml b/library/core/src/main/AndroidManifest.xml
index 430930a3ca..1a6971fdcc 100644
--- a/library/core/src/main/AndroidManifest.xml
+++ b/library/core/src/main/AndroidManifest.xml
@@ -14,4 +14,7 @@
limitations under the License.
-->
-
+
+
+
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/BaseRenderer.java b/library/core/src/main/java/com/google/android/exoplayer2/BaseRenderer.java
index 7f14837965..cb917b9b79 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/BaseRenderer.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/BaseRenderer.java
@@ -15,7 +15,10 @@
*/
package com.google.android.exoplayer2;
+import android.support.annotation.Nullable;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import com.google.android.exoplayer2.drm.DrmInitData;
+import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MediaClock;
@@ -32,6 +35,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
private int index;
private int state;
private SampleStream stream;
+ private Format[] streamFormats;
private long streamOffsetUs;
private boolean readEndOfStream;
private boolean streamIsFinal;
@@ -95,6 +99,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
Assertions.checkState(!streamIsFinal);
this.stream = stream;
readEndOfStream = false;
+ streamFormats = formats;
streamOffsetUs = offsetUs;
onStreamChanged(formats, offsetUs);
}
@@ -143,6 +148,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
Assertions.checkState(state == STATE_ENABLED);
state = STATE_DISABLED;
stream = null;
+ streamFormats = null;
streamIsFinal = false;
onDisabled();
}
@@ -154,7 +160,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return ADAPTIVE_NOT_SUPPORTED;
}
- // ExoPlayerComponent implementation.
+ // PlayerMessage.Target implementation.
@Override
public void handleMessage(int what, Object object) throws ExoPlaybackException {
@@ -243,6 +249,11 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
// Methods to be called by subclasses.
+ /** Returns the formats of the currently enabled stream. */
+ protected final Format[] getStreamFormats() {
+ return streamFormats;
+ }
+
/**
* Returns the configuration set when the renderer was most recently enabled.
*/
@@ -309,4 +320,25 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return readEndOfStream ? streamIsFinal : stream.isReady();
}
+ /**
+ * Returns whether {@code drmSessionManager} supports the specified {@code drmInitData}, or true
+ * if {@code drmInitData} is null.
+ *
+ * @param drmSessionManager The drm session manager.
+ * @param drmInitData {@link DrmInitData} of the format to check for support.
+ * @return Whether {@code drmSessionManager} supports the specified {@code drmInitData}, or
+ * true if {@code drmInitData} is null.
+ */
+ protected static boolean supportsFormatDrm(@Nullable DrmSessionManager> drmSessionManager,
+ @Nullable DrmInitData drmInitData) {
+ if (drmInitData == null) {
+ // Content is unencrypted.
+ return true;
+ } else if (drmSessionManager == null) {
+ // Content is encrypted, but no drm session manager is available.
+ return false;
+ }
+ return drmSessionManager.canAcquireSession(drmInitData);
+ }
+
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/C.java b/library/core/src/main/java/com/google/android/exoplayer2/C.java
index e25538a062..de210f5eff 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/C.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/C.java
@@ -23,6 +23,7 @@ import android.media.MediaCodec;
import android.media.MediaFormat;
import android.support.annotation.IntDef;
import android.view.Surface;
+import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -63,6 +64,9 @@ public final class C {
*/
public static final int LENGTH_UNSET = -1;
+ /** Represents an unset or unknown percentage. */
+ public static final int PERCENTAGE_UNSET = -1;
+
/**
* The number of microseconds in one second.
*/
@@ -122,13 +126,22 @@ public final class C {
*/
public static final int AUDIO_SESSION_ID_UNSET = AudioManager.AUDIO_SESSION_ID_GENERATE;
- /**
- * Represents an audio encoding, or an invalid or unset value.
- */
+ /** Represents an audio encoding, or an invalid or unset value. */
@Retention(RetentionPolicy.SOURCE)
- @IntDef({Format.NO_VALUE, ENCODING_INVALID, ENCODING_PCM_8BIT, ENCODING_PCM_16BIT,
- ENCODING_PCM_24BIT, ENCODING_PCM_32BIT, ENCODING_AC3, ENCODING_E_AC3, ENCODING_DTS,
- ENCODING_DTS_HD})
+ @IntDef({
+ Format.NO_VALUE,
+ ENCODING_INVALID,
+ ENCODING_PCM_8BIT,
+ ENCODING_PCM_16BIT,
+ ENCODING_PCM_24BIT,
+ ENCODING_PCM_32BIT,
+ ENCODING_PCM_FLOAT,
+ ENCODING_AC3,
+ ENCODING_E_AC3,
+ ENCODING_DTS,
+ ENCODING_DTS_HD,
+ ENCODING_DOLBY_TRUEHD
+ })
public @interface Encoding {}
/**
@@ -136,44 +149,30 @@ public final class C {
*/
@Retention(RetentionPolicy.SOURCE)
@IntDef({Format.NO_VALUE, ENCODING_INVALID, ENCODING_PCM_8BIT, ENCODING_PCM_16BIT,
- ENCODING_PCM_24BIT, ENCODING_PCM_32BIT})
+ ENCODING_PCM_24BIT, ENCODING_PCM_32BIT, ENCODING_PCM_FLOAT})
public @interface PcmEncoding {}
- /**
- * @see AudioFormat#ENCODING_INVALID
- */
+ /** @see AudioFormat#ENCODING_INVALID */
public static final int ENCODING_INVALID = AudioFormat.ENCODING_INVALID;
- /**
- * @see AudioFormat#ENCODING_PCM_8BIT
- */
+ /** @see AudioFormat#ENCODING_PCM_8BIT */
public static final int ENCODING_PCM_8BIT = AudioFormat.ENCODING_PCM_8BIT;
- /**
- * @see AudioFormat#ENCODING_PCM_16BIT
- */
+ /** @see AudioFormat#ENCODING_PCM_16BIT */
public static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT;
- /**
- * PCM encoding with 24 bits per sample.
- */
+ /** PCM encoding with 24 bits per sample. */
public static final int ENCODING_PCM_24BIT = 0x80000000;
- /**
- * PCM encoding with 32 bits per sample.
- */
+ /** PCM encoding with 32 bits per sample. */
public static final int ENCODING_PCM_32BIT = 0x40000000;
- /**
- * @see AudioFormat#ENCODING_AC3
- */
+ /** @see AudioFormat#ENCODING_PCM_FLOAT */
+ public static final int ENCODING_PCM_FLOAT = AudioFormat.ENCODING_PCM_FLOAT;
+ /** @see AudioFormat#ENCODING_AC3 */
public static final int ENCODING_AC3 = AudioFormat.ENCODING_AC3;
- /**
- * @see AudioFormat#ENCODING_E_AC3
- */
+ /** @see AudioFormat#ENCODING_E_AC3 */
public static final int ENCODING_E_AC3 = AudioFormat.ENCODING_E_AC3;
- /**
- * @see AudioFormat#ENCODING_DTS
- */
+ /** @see AudioFormat#ENCODING_DTS */
public static final int ENCODING_DTS = AudioFormat.ENCODING_DTS;
- /**
- * @see AudioFormat#ENCODING_DTS_HD
- */
+ /** @see AudioFormat#ENCODING_DTS_HD */
public static final int ENCODING_DTS_HD = AudioFormat.ENCODING_DTS_HD;
+ /** @see AudioFormat#ENCODING_DOLBY_TRUEHD */
+ public static final int ENCODING_DOLBY_TRUEHD = AudioFormat.ENCODING_DOLBY_TRUEHD;
/**
* @see AudioFormat#CHANNEL_OUT_7POINT1_SURROUND
@@ -420,6 +419,11 @@ public final class C {
*/
public static final int SELECTION_FLAG_AUTOSELECT = 4;
+ /**
+ * Represents an undetermined language as an ISO 639 alpha-3 language code.
+ */
+ public static final String LANGUAGE_UNDETERMINED = "und";
+
/**
* Represents a streaming or other media type.
*/
@@ -489,6 +493,8 @@ public final class C {
* A data type constant for time synchronization data.
*/
public static final int DATA_TYPE_TIME_SYNCHRONIZATION = 5;
+ /** A data type constant for ads loader data. */
+ public static final int DATA_TYPE_AD = 6;
/**
* Applications or extensions may define custom {@code DATA_TYPE_*} constants greater than or
* equal to this value.
@@ -519,6 +525,10 @@ public final class C {
* A type constant for metadata tracks.
*/
public static final int TRACK_TYPE_METADATA = 4;
+ /**
+ * A type constant for a dummy or empty track.
+ */
+ public static final int TRACK_TYPE_NONE = 5;
/**
* Applications or extensions may define custom {@code TRACK_TYPE_*} constants greater than or
* equal to this value.
@@ -638,37 +648,37 @@ public final class C {
public static final UUID PLAYREADY_UUID = new UUID(0x9A04F07998404286L, 0xAB92E65BE0885F95L);
/**
- * The type of a message that can be passed to a video {@link Renderer} via
- * {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message object
- * should be the target {@link Surface}, or null.
+ * The type of a message that can be passed to a video {@link Renderer} via {@link
+ * ExoPlayer#createMessage(Target)}. The message payload should be the target {@link Surface}, or
+ * null.
*/
public static final int MSG_SET_SURFACE = 1;
/**
- * A type of a message that can be passed to an audio {@link Renderer} via
- * {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message object
- * should be a {@link Float} with 0 being silence and 1 being unity gain.
+ * A type of a message that can be passed to an audio {@link Renderer} via {@link
+ * ExoPlayer#createMessage(Target)}. The message payload should be a {@link Float} with 0 being
+ * silence and 1 being unity gain.
*/
public static final int MSG_SET_VOLUME = 2;
/**
- * A type of a message that can be passed to an audio {@link Renderer} via
- * {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message object
- * should be an {@link com.google.android.exoplayer2.audio.AudioAttributes} instance that will
- * configure the underlying audio track. If not set, the default audio attributes will be used.
- * They are suitable for general media playback.
- *
- * Setting the audio attributes during playback may introduce a short gap in audio output as the
- * audio track is recreated. A new audio session id will also be generated.
- *
- * If tunneling is enabled by the track selector, the specified audio attributes will be ignored,
- * but they will take effect if audio is later played without tunneling.
- *
- * If the device is running a build before platform API version 21, audio attributes cannot be set
- * directly on the underlying audio track. In this case, the usage will be mapped onto an
+ * A type of a message that can be passed to an audio {@link Renderer} via {@link
+ * ExoPlayer#createMessage(Target)}. The message payload should be an {@link
+ * com.google.android.exoplayer2.audio.AudioAttributes} instance that will configure the
+ * underlying audio track. If not set, the default audio attributes will be used. They are
+ * suitable for general media playback.
+ *
+ *
Setting the audio attributes during playback may introduce a short gap in audio output as
+ * the audio track is recreated. A new audio session id will also be generated.
+ *
+ *
If tunneling is enabled by the track selector, the specified audio attributes will be
+ * ignored, but they will take effect if audio is later played without tunneling.
+ *
+ *
If the device is running a build before platform API version 21, audio attributes cannot be
+ * set directly on the underlying audio track. In this case, the usage will be mapped onto an
* equivalent stream type using {@link Util#getStreamTypeForAudioUsage(int)}.
- *
- * To get audio attributes that are equivalent to a legacy stream type, pass the stream type to
+ *
+ *
To get audio attributes that are equivalent to a legacy stream type, pass the stream type to
* {@link Util#getAudioUsageForStreamType(int)} and use the returned {@link C.AudioUsage} to build
* an audio attributes instance.
*/
@@ -676,17 +686,17 @@ public final class C {
/**
* The type of a message that can be passed to a {@link MediaCodec}-based video {@link Renderer}
- * via {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message
- * object should be one of the integer scaling modes in {@link C.VideoScalingMode}.
- *
- * Note that the scaling mode only applies if the {@link Surface} targeted by the renderer is
+ * via {@link ExoPlayer#createMessage(Target)}. The message payload should be one of the integer
+ * scaling modes in {@link C.VideoScalingMode}.
+ *
+ *
Note that the scaling mode only applies if the {@link Surface} targeted by the renderer is
* owned by a {@link android.view.SurfaceView}.
*/
public static final int MSG_SET_SCALING_MODE = 4;
/**
- * Applications or extensions may define custom {@code MSG_*} constants greater than or equal to
- * this value.
+ * Applications or extensions may define custom {@code MSG_*} constants that can be passed to
+ * {@link Renderer}s. These custom constants must be greater than or equal to this value.
*/
public static final int MSG_CUSTOM_BASE = 10000;
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/ControlDispatcher.java b/library/core/src/main/java/com/google/android/exoplayer2/ControlDispatcher.java
new file mode 100644
index 0000000000..f8749fc1a8
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/ControlDispatcher.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import com.google.android.exoplayer2.Player.RepeatMode;
+
+/**
+ * Dispatches operations to the {@link Player}.
+ *
+ * Implementations may choose to suppress (e.g. prevent playback from resuming if audio focus is
+ * denied) or modify (e.g. change the seek position to prevent a user from seeking past a
+ * non-skippable advert) operations.
+ */
+public interface ControlDispatcher {
+
+ /**
+ * Dispatches a {@link Player#setPlayWhenReady(boolean)} operation.
+ *
+ * @param player The {@link Player} to which the operation should be dispatched.
+ * @param playWhenReady Whether playback should proceed when ready.
+ * @return True if the operation was dispatched. False if suppressed.
+ */
+ boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady);
+
+ /**
+ * Dispatches a {@link Player#seekTo(int, long)} operation.
+ *
+ * @param player The {@link Player} to which the operation should be dispatched.
+ * @param windowIndex The index of the window.
+ * @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to
+ * the window's default position.
+ * @return True if the operation was dispatched. False if suppressed.
+ */
+ boolean dispatchSeekTo(Player player, int windowIndex, long positionMs);
+
+ /**
+ * Dispatches a {@link Player#setRepeatMode(int)} operation.
+ *
+ * @param player The {@link Player} to which the operation should be dispatched.
+ * @param repeatMode The repeat mode.
+ * @return True if the operation was dispatched. False if suppressed.
+ */
+ boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode);
+
+ /**
+ * Dispatches a {@link Player#setShuffleModeEnabled(boolean)} operation.
+ *
+ * @param player The {@link Player} to which the operation should be dispatched.
+ * @param shuffleModeEnabled Whether shuffling is enabled.
+ * @return True if the operation was dispatched. False if suppressed.
+ */
+ boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled);
+
+ /**
+ * Dispatches a {@link Player#stop()} operation.
+ *
+ * @param player The {@link Player} to which the operation should be dispatched.
+ * @param reset Whether the player should be reset.
+ * @return True if the operation was dispatched. False if suppressed.
+ */
+ boolean dispatchStop(Player player, boolean reset);
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/DefaultControlDispatcher.java b/library/core/src/main/java/com/google/android/exoplayer2/DefaultControlDispatcher.java
new file mode 100644
index 0000000000..df3ef36b88
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/DefaultControlDispatcher.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import com.google.android.exoplayer2.Player.RepeatMode;
+
+/**
+ * Default {@link ControlDispatcher} that dispatches all operations to the player without
+ * modification.
+ */
+public class DefaultControlDispatcher implements ControlDispatcher {
+
+ @Override
+ public boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady) {
+ player.setPlayWhenReady(playWhenReady);
+ return true;
+ }
+
+ @Override
+ public boolean dispatchSeekTo(Player player, int windowIndex, long positionMs) {
+ player.seekTo(windowIndex, positionMs);
+ return true;
+ }
+
+ @Override
+ public boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode) {
+ player.setRepeatMode(repeatMode);
+ return true;
+ }
+
+ @Override
+ public boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled) {
+ player.setShuffleModeEnabled(shuffleModeEnabled);
+ return true;
+ }
+
+ @Override
+ public boolean dispatchStop(Player player, boolean reset) {
+ player.stop(reset);
+ return true;
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java b/library/core/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java
index d8bc042ad7..b5b364a327 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/DefaultLoadControl.java
@@ -19,13 +19,14 @@ import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.upstream.DefaultAllocator;
+import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.PriorityTaskManager;
import com.google.android.exoplayer2.util.Util;
/**
* The default {@link LoadControl} implementation.
*/
-public final class DefaultLoadControl implements LoadControl {
+public class DefaultLoadControl implements LoadControl {
/**
* The default minimum duration of media that the player will attempt to ensure is buffered at all
@@ -36,7 +37,7 @@ public final class DefaultLoadControl implements LoadControl {
/**
* The default maximum duration of media that the player will attempt to buffer, in milliseconds.
*/
- public static final int DEFAULT_MAX_BUFFER_MS = 30000;
+ public static final int DEFAULT_MAX_BUFFER_MS = 50000;
/**
* The default duration of media that must be buffered for playback to start or resume following a
@@ -51,9 +52,124 @@ public final class DefaultLoadControl implements LoadControl {
*/
public static final int DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS = 5000;
- private static final int ABOVE_HIGH_WATERMARK = 0;
- private static final int BETWEEN_WATERMARKS = 1;
- private static final int BELOW_LOW_WATERMARK = 2;
+ /**
+ * The default target buffer size in bytes. When set to {@link C#LENGTH_UNSET}, the load control
+ * automatically determines its target buffer size.
+ */
+ public static final int DEFAULT_TARGET_BUFFER_BYTES = C.LENGTH_UNSET;
+
+ /** The default prioritization of buffer time constraints over size constraints. */
+ public static final boolean DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS = true;
+
+ /** Builder for {@link DefaultLoadControl}. */
+ public static final class Builder {
+
+ private DefaultAllocator allocator;
+ private int minBufferMs;
+ private int maxBufferMs;
+ private int bufferForPlaybackMs;
+ private int bufferForPlaybackAfterRebufferMs;
+ private int targetBufferBytes;
+ private boolean prioritizeTimeOverSizeThresholds;
+ private PriorityTaskManager priorityTaskManager;
+
+ /** Constructs a new instance. */
+ public Builder() {
+ allocator = null;
+ minBufferMs = DEFAULT_MIN_BUFFER_MS;
+ maxBufferMs = DEFAULT_MAX_BUFFER_MS;
+ bufferForPlaybackMs = DEFAULT_BUFFER_FOR_PLAYBACK_MS;
+ bufferForPlaybackAfterRebufferMs = DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS;
+ targetBufferBytes = DEFAULT_TARGET_BUFFER_BYTES;
+ prioritizeTimeOverSizeThresholds = DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS;
+ priorityTaskManager = null;
+ }
+
+ /**
+ * Sets the {@link DefaultAllocator} used by the loader.
+ *
+ * @param allocator The {@link DefaultAllocator}.
+ * @return This builder, for convenience.
+ */
+ public Builder setAllocator(DefaultAllocator allocator) {
+ this.allocator = allocator;
+ return this;
+ }
+
+ /**
+ * Sets the buffer duration parameters.
+ *
+ * @param minBufferMs The minimum duration of media that the player will attempt to ensure is
+ * buffered at all times, in milliseconds.
+ * @param maxBufferMs The maximum duration of media that the player will attempt to buffer, in
+ * milliseconds.
+ * @param bufferForPlaybackMs The duration of media that must be buffered for playback to start
+ * or resume following a user action such as a seek, in milliseconds.
+ * @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered
+ * for playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be
+ * caused by buffer depletion rather than a user action.
+ * @return This builder, for convenience.
+ */
+ public Builder setBufferDurationsMs(
+ int minBufferMs,
+ int maxBufferMs,
+ int bufferForPlaybackMs,
+ int bufferForPlaybackAfterRebufferMs) {
+ this.minBufferMs = minBufferMs;
+ this.maxBufferMs = maxBufferMs;
+ this.bufferForPlaybackMs = bufferForPlaybackMs;
+ this.bufferForPlaybackAfterRebufferMs = bufferForPlaybackAfterRebufferMs;
+ return this;
+ }
+
+ /**
+ * Sets the target buffer size in bytes. If set to {@link C#LENGTH_UNSET}, the target buffer
+ * size will be calculated using {@link #calculateTargetBufferSize(Renderer[],
+ * TrackSelectionArray)}.
+ *
+ * @param targetBufferBytes The target buffer size in bytes.
+ * @return This builder, for convenience.
+ */
+ public Builder setTargetBufferBytes(int targetBufferBytes) {
+ this.targetBufferBytes = targetBufferBytes;
+ return this;
+ }
+
+ /**
+ * Sets whether the load control prioritizes buffer time constraints over buffer size
+ * constraints.
+ *
+ * @param prioritizeTimeOverSizeThresholds Whether the load control prioritizes buffer time
+ * constraints over buffer size constraints.
+ * @return This builder, for convenience.
+ */
+ public Builder setPrioritizeTimeOverSizeThresholds(boolean prioritizeTimeOverSizeThresholds) {
+ this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds;
+ return this;
+ }
+
+ /** Sets the {@link PriorityTaskManager} to use. */
+ public Builder setPriorityTaskManager(PriorityTaskManager priorityTaskManager) {
+ this.priorityTaskManager = priorityTaskManager;
+ return this;
+ }
+
+ /** Creates a {@link DefaultLoadControl}. */
+ public DefaultLoadControl createDefaultLoadControl() {
+ if (allocator == null) {
+ allocator = new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE);
+ }
+ return new DefaultLoadControl(
+ allocator,
+ minBufferMs,
+ maxBufferMs,
+ bufferForPlaybackMs,
+ bufferForPlaybackAfterRebufferMs,
+ targetBufferBytes,
+ prioritizeTimeOverSizeThresholds,
+ priorityTaskManager);
+ }
+ }
private final DefaultAllocator allocator;
@@ -61,6 +177,8 @@ public final class DefaultLoadControl implements LoadControl {
private final long maxBufferUs;
private final long bufferForPlaybackUs;
private final long bufferForPlaybackAfterRebufferUs;
+ private final int targetBufferBytesOverwrite;
+ private final boolean prioritizeTimeOverSizeThresholds;
private final PriorityTaskManager priorityTaskManager;
private int targetBufferSize;
@@ -73,61 +191,69 @@ public final class DefaultLoadControl implements LoadControl {
this(new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE));
}
- /**
- * Constructs a new instance, using the {@code DEFAULT_*} constants defined in this class.
- *
- * @param allocator The {@link DefaultAllocator} used by the loader.
- */
+ /** @deprecated Use {@link Builder} instead. */
+ @Deprecated
public DefaultLoadControl(DefaultAllocator allocator) {
- this(allocator, DEFAULT_MIN_BUFFER_MS, DEFAULT_MAX_BUFFER_MS, DEFAULT_BUFFER_FOR_PLAYBACK_MS,
- DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS);
+ this(
+ allocator,
+ DEFAULT_MIN_BUFFER_MS,
+ DEFAULT_MAX_BUFFER_MS,
+ DEFAULT_BUFFER_FOR_PLAYBACK_MS,
+ DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS,
+ DEFAULT_TARGET_BUFFER_BYTES,
+ DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS);
}
- /**
- * Constructs a new instance.
- *
- * @param allocator The {@link DefaultAllocator} used by the loader.
- * @param minBufferMs The minimum duration of media that the player will attempt to ensure is
- * buffered at all times, in milliseconds.
- * @param maxBufferMs The maximum duration of media that the player will attempt buffer, in
- * milliseconds.
- * @param bufferForPlaybackMs The duration of media that must be buffered for playback to start or
- * resume following a user action such as a seek, in milliseconds.
- * @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered for
- * playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be caused by
- * buffer depletion rather than a user action.
- */
- public DefaultLoadControl(DefaultAllocator allocator, int minBufferMs, int maxBufferMs,
- long bufferForPlaybackMs, long bufferForPlaybackAfterRebufferMs) {
- this(allocator, minBufferMs, maxBufferMs, bufferForPlaybackMs, bufferForPlaybackAfterRebufferMs,
+ /** @deprecated Use {@link Builder} instead. */
+ @Deprecated
+ public DefaultLoadControl(
+ DefaultAllocator allocator,
+ int minBufferMs,
+ int maxBufferMs,
+ int bufferForPlaybackMs,
+ int bufferForPlaybackAfterRebufferMs,
+ int targetBufferBytes,
+ boolean prioritizeTimeOverSizeThresholds) {
+ this(
+ allocator,
+ minBufferMs,
+ maxBufferMs,
+ bufferForPlaybackMs,
+ bufferForPlaybackAfterRebufferMs,
+ targetBufferBytes,
+ prioritizeTimeOverSizeThresholds,
null);
}
- /**
- * Constructs a new instance.
- *
- * @param allocator The {@link DefaultAllocator} used by the loader.
- * @param minBufferMs The minimum duration of media that the player will attempt to ensure is
- * buffered at all times, in milliseconds.
- * @param maxBufferMs The maximum duration of media that the player will attempt buffer, in
- * milliseconds.
- * @param bufferForPlaybackMs The duration of media that must be buffered for playback to start or
- * resume following a user action such as a seek, in milliseconds.
- * @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered for
- * playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be caused by
- * buffer depletion rather than a user action.
- * @param priorityTaskManager If not null, registers itself as a task with priority
- * {@link C#PRIORITY_PLAYBACK} during loading periods, and unregisters itself during draining
- * periods.
- */
- public DefaultLoadControl(DefaultAllocator allocator, int minBufferMs, int maxBufferMs,
- long bufferForPlaybackMs, long bufferForPlaybackAfterRebufferMs,
+ /** @deprecated Use {@link Builder} instead. */
+ @Deprecated
+ public DefaultLoadControl(
+ DefaultAllocator allocator,
+ int minBufferMs,
+ int maxBufferMs,
+ int bufferForPlaybackMs,
+ int bufferForPlaybackAfterRebufferMs,
+ int targetBufferBytes,
+ boolean prioritizeTimeOverSizeThresholds,
PriorityTaskManager priorityTaskManager) {
+ assertGreaterOrEqual(bufferForPlaybackMs, 0, "bufferForPlaybackMs", "0");
+ assertGreaterOrEqual(
+ bufferForPlaybackAfterRebufferMs, 0, "bufferForPlaybackAfterRebufferMs", "0");
+ assertGreaterOrEqual(minBufferMs, bufferForPlaybackMs, "minBufferMs", "bufferForPlaybackMs");
+ assertGreaterOrEqual(
+ minBufferMs,
+ bufferForPlaybackAfterRebufferMs,
+ "minBufferMs",
+ "bufferForPlaybackAfterRebufferMs");
+ assertGreaterOrEqual(maxBufferMs, minBufferMs, "maxBufferMs", "minBufferMs");
+
this.allocator = allocator;
minBufferUs = minBufferMs * 1000L;
maxBufferUs = maxBufferMs * 1000L;
bufferForPlaybackUs = bufferForPlaybackMs * 1000L;
bufferForPlaybackAfterRebufferUs = bufferForPlaybackAfterRebufferMs * 1000L;
+ targetBufferBytesOverwrite = targetBufferBytes;
+ this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds;
this.priorityTaskManager = priorityTaskManager;
}
@@ -139,12 +265,10 @@ public final class DefaultLoadControl implements LoadControl {
@Override
public void onTracksSelected(Renderer[] renderers, TrackGroupArray trackGroups,
TrackSelectionArray trackSelections) {
- targetBufferSize = 0;
- for (int i = 0; i < renderers.length; i++) {
- if (trackSelections.get(i) != null) {
- targetBufferSize += Util.getDefaultBufferSize(renderers[i].getTrackType());
- }
- }
+ targetBufferSize =
+ targetBufferBytesOverwrite == C.LENGTH_UNSET
+ ? calculateTargetBufferSize(renderers, trackSelections)
+ : targetBufferBytesOverwrite;
allocator.setTargetBufferSize(targetBufferSize);
}
@@ -164,18 +288,32 @@ public final class DefaultLoadControl implements LoadControl {
}
@Override
- public boolean shouldStartPlayback(long bufferedDurationUs, boolean rebuffering) {
- long minBufferDurationUs = rebuffering ? bufferForPlaybackAfterRebufferUs : bufferForPlaybackUs;
- return minBufferDurationUs <= 0 || bufferedDurationUs >= minBufferDurationUs;
+ public long getBackBufferDurationUs() {
+ return 0;
}
@Override
- public boolean shouldContinueLoading(long bufferedDurationUs) {
- int bufferTimeState = getBufferTimeState(bufferedDurationUs);
+ public boolean retainBackBufferFromKeyframe() {
+ return false;
+ }
+
+ @Override
+ public boolean shouldContinueLoading(long bufferedDurationUs, float playbackSpeed) {
boolean targetBufferSizeReached = allocator.getTotalBytesAllocated() >= targetBufferSize;
boolean wasBuffering = isBuffering;
- isBuffering = bufferTimeState == BELOW_LOW_WATERMARK
- || (bufferTimeState == BETWEEN_WATERMARKS && isBuffering && !targetBufferSizeReached);
+ long minBufferUs = this.minBufferUs;
+ if (playbackSpeed > 1) {
+ // The playback speed is faster than real time, so scale up the minimum required media
+ // duration to keep enough media buffered for a playout duration of minBufferUs.
+ long mediaDurationMinBufferUs =
+ Util.getMediaDurationForPlayoutDuration(minBufferUs, playbackSpeed);
+ minBufferUs = Math.min(mediaDurationMinBufferUs, maxBufferUs);
+ }
+ if (bufferedDurationUs < minBufferUs) {
+ isBuffering = prioritizeTimeOverSizeThresholds || !targetBufferSizeReached;
+ } else if (bufferedDurationUs > maxBufferUs || targetBufferSizeReached) {
+ isBuffering = false;
+ } // Else don't change the buffering state
if (priorityTaskManager != null && isBuffering != wasBuffering) {
if (isBuffering) {
priorityTaskManager.add(C.PRIORITY_PLAYBACK);
@@ -186,9 +324,34 @@ public final class DefaultLoadControl implements LoadControl {
return isBuffering;
}
- private int getBufferTimeState(long bufferedDurationUs) {
- return bufferedDurationUs > maxBufferUs ? ABOVE_HIGH_WATERMARK
- : (bufferedDurationUs < minBufferUs ? BELOW_LOW_WATERMARK : BETWEEN_WATERMARKS);
+ @Override
+ public boolean shouldStartPlayback(
+ long bufferedDurationUs, float playbackSpeed, boolean rebuffering) {
+ bufferedDurationUs = Util.getPlayoutDurationForMediaDuration(bufferedDurationUs, playbackSpeed);
+ long minBufferDurationUs = rebuffering ? bufferForPlaybackAfterRebufferUs : bufferForPlaybackUs;
+ return minBufferDurationUs <= 0
+ || bufferedDurationUs >= minBufferDurationUs
+ || (!prioritizeTimeOverSizeThresholds
+ && allocator.getTotalBytesAllocated() >= targetBufferSize);
+ }
+
+ /**
+ * Calculate target buffer size in bytes based on the selected tracks. The player will try not to
+ * exceed this target buffer. Only used when {@code targetBufferBytes} is {@link C#LENGTH_UNSET}.
+ *
+ * @param renderers The renderers for which the track were selected.
+ * @param trackSelectionArray The selected tracks.
+ * @return The target buffer size in bytes.
+ */
+ protected int calculateTargetBufferSize(
+ Renderer[] renderers, TrackSelectionArray trackSelectionArray) {
+ int targetBufferSize = 0;
+ for (int i = 0; i < renderers.length; i++) {
+ if (trackSelectionArray.get(i) != null) {
+ targetBufferSize += Util.getDefaultBufferSize(renderers[i].getTrackType());
+ }
+ }
+ return targetBufferSize;
}
private void reset(boolean resetAllocator) {
@@ -202,4 +365,7 @@ public final class DefaultLoadControl implements LoadControl {
}
}
+ private static void assertGreaterOrEqual(int value1, int value2, String name1, String name2) {
+ Assertions.checkArgument(value1 >= value2, name1 + " cannot be less than " + name2);
+ }
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/DefaultMediaClock.java b/library/core/src/main/java/com/google/android/exoplayer2/DefaultMediaClock.java
new file mode 100644
index 0000000000..ed57cec70c
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/DefaultMediaClock.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.util.Clock;
+import com.google.android.exoplayer2.util.MediaClock;
+import com.google.android.exoplayer2.util.StandaloneMediaClock;
+
+/**
+ * Default {@link MediaClock} which uses a renderer media clock and falls back to a
+ * {@link StandaloneMediaClock} if necessary.
+ */
+/* package */ final class DefaultMediaClock implements MediaClock {
+
+ /**
+ * Listener interface to be notified of changes to the active playback parameters.
+ */
+ public interface PlaybackParameterListener {
+
+ /**
+ * Called when the active playback parameters changed.
+ *
+ * @param newPlaybackParameters The newly active {@link PlaybackParameters}.
+ */
+ void onPlaybackParametersChanged(PlaybackParameters newPlaybackParameters);
+
+ }
+
+ private final StandaloneMediaClock standaloneMediaClock;
+ private final PlaybackParameterListener listener;
+
+ private @Nullable Renderer rendererClockSource;
+ private @Nullable MediaClock rendererClock;
+
+ /**
+ * Creates a new instance with listener for playback parameter changes and a {@link Clock} to use
+ * for the standalone clock implementation.
+ *
+ * @param listener A {@link PlaybackParameterListener} to listen for playback parameter
+ * changes.
+ * @param clock A {@link Clock}.
+ */
+ public DefaultMediaClock(PlaybackParameterListener listener, Clock clock) {
+ this.listener = listener;
+ this.standaloneMediaClock = new StandaloneMediaClock(clock);
+ }
+
+ /**
+ * Starts the standalone fallback clock.
+ */
+ public void start() {
+ standaloneMediaClock.start();
+ }
+
+ /**
+ * Stops the standalone fallback clock.
+ */
+ public void stop() {
+ standaloneMediaClock.stop();
+ }
+
+ /**
+ * Resets the position of the standalone fallback clock.
+ *
+ * @param positionUs The position to set in microseconds.
+ */
+ public void resetPosition(long positionUs) {
+ standaloneMediaClock.resetPosition(positionUs);
+ }
+
+ /**
+ * Notifies the media clock that a renderer has been enabled. Starts using the media clock of the
+ * provided renderer if available.
+ *
+ * @param renderer The renderer which has been enabled.
+ * @throws ExoPlaybackException If the renderer provides a media clock and another renderer media
+ * clock is already provided.
+ */
+ public void onRendererEnabled(Renderer renderer) throws ExoPlaybackException {
+ MediaClock rendererMediaClock = renderer.getMediaClock();
+ if (rendererMediaClock != null && rendererMediaClock != rendererClock) {
+ if (rendererClock != null) {
+ throw ExoPlaybackException.createForUnexpected(
+ new IllegalStateException("Multiple renderer media clocks enabled."));
+ }
+ this.rendererClock = rendererMediaClock;
+ this.rendererClockSource = renderer;
+ rendererClock.setPlaybackParameters(standaloneMediaClock.getPlaybackParameters());
+ ensureSynced();
+ }
+ }
+
+ /**
+ * Notifies the media clock that a renderer has been disabled. Stops using the media clock of this
+ * renderer if used.
+ *
+ * @param renderer The renderer which has been disabled.
+ */
+ public void onRendererDisabled(Renderer renderer) {
+ if (renderer == rendererClockSource) {
+ this.rendererClock = null;
+ this.rendererClockSource = null;
+ }
+ }
+
+ /**
+ * Syncs internal clock if needed and returns current clock position in microseconds.
+ */
+ public long syncAndGetPositionUs() {
+ if (isUsingRendererClock()) {
+ ensureSynced();
+ return rendererClock.getPositionUs();
+ } else {
+ return standaloneMediaClock.getPositionUs();
+ }
+ }
+
+ // MediaClock implementation.
+
+ @Override
+ public long getPositionUs() {
+ if (isUsingRendererClock()) {
+ return rendererClock.getPositionUs();
+ } else {
+ return standaloneMediaClock.getPositionUs();
+ }
+ }
+
+ @Override
+ public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
+ if (rendererClock != null) {
+ playbackParameters = rendererClock.setPlaybackParameters(playbackParameters);
+ }
+ standaloneMediaClock.setPlaybackParameters(playbackParameters);
+ listener.onPlaybackParametersChanged(playbackParameters);
+ return playbackParameters;
+ }
+
+ @Override
+ public PlaybackParameters getPlaybackParameters() {
+ return rendererClock != null ? rendererClock.getPlaybackParameters()
+ : standaloneMediaClock.getPlaybackParameters();
+ }
+
+ private void ensureSynced() {
+ long rendererClockPositionUs = rendererClock.getPositionUs();
+ standaloneMediaClock.resetPosition(rendererClockPositionUs);
+ PlaybackParameters playbackParameters = rendererClock.getPlaybackParameters();
+ if (!playbackParameters.equals(standaloneMediaClock.getPlaybackParameters())) {
+ standaloneMediaClock.setPlaybackParameters(playbackParameters);
+ listener.onPlaybackParametersChanged(playbackParameters);
+ }
+ }
+
+ private boolean isUsingRendererClock() {
+ // Use the renderer clock if the providing renderer has not ended or needs the next sample
+ // stream to reenter the ready state. The latter case uses the standalone clock to avoid getting
+ // stuck if tracks in the current period have uneven durations.
+ // See: https://github.com/google/ExoPlayer/issues/1874.
+ return rendererClockSource != null && !rendererClockSource.isEnded()
+ && (rendererClockSource.isReady() || !rendererClockSource.hasReadStreamToEnd());
+ }
+
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java b/library/core/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java
index 2272306117..6cab53b78a 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/DefaultRenderersFactory.java
@@ -90,28 +90,37 @@ public class DefaultRenderersFactory implements RenderersFactory {
* @param context A {@link Context}.
*/
public DefaultRenderersFactory(Context context) {
- this(context, null);
+ this(context, EXTENSION_RENDERER_MODE_OFF);
}
/**
- * @param context A {@link Context}.
- * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
- * playbacks are not required.
+ * @deprecated Use {@link #DefaultRenderersFactory(Context)} and pass {@link DrmSessionManager}
+ * directly to {@link SimpleExoPlayer} or {@link ExoPlayerFactory}.
*/
- public DefaultRenderersFactory(Context context,
- @Nullable DrmSessionManager drmSessionManager) {
+ @Deprecated
+ public DefaultRenderersFactory(
+ Context context, @Nullable DrmSessionManager drmSessionManager) {
this(context, drmSessionManager, EXTENSION_RENDERER_MODE_OFF);
}
/**
* @param context A {@link Context}.
- * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
- * playbacks are not required.
- * @param extensionRendererMode The extension renderer mode, which determines if and how
- * available extension renderers are used. Note that extensions must be included in the
- * application build for them to be considered available.
+ * @param extensionRendererMode The extension renderer mode, which determines if and how available
+ * extension renderers are used. Note that extensions must be included in the application
+ * build for them to be considered available.
*/
- public DefaultRenderersFactory(Context context,
+ public DefaultRenderersFactory(
+ Context context, @ExtensionRendererMode int extensionRendererMode) {
+ this(context, null, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
+ }
+
+ /**
+ * @deprecated Use {@link #DefaultRenderersFactory(Context, int)} and pass {@link
+ * DrmSessionManager} directly to {@link SimpleExoPlayer} or {@link ExoPlayerFactory}.
+ */
+ @Deprecated
+ public DefaultRenderersFactory(
+ Context context,
@Nullable DrmSessionManager drmSessionManager,
@ExtensionRendererMode int extensionRendererMode) {
this(context, drmSessionManager, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
@@ -119,28 +128,46 @@ public class DefaultRenderersFactory implements RenderersFactory {
/**
* @param context A {@link Context}.
- * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
- * playbacks are not required.
- * @param extensionRendererMode The extension renderer mode, which determines if and how
- * available extension renderers are used. Note that extensions must be included in the
- * application build for them to be considered available.
- * @param allowedVideoJoiningTimeMs The maximum duration for which video renderers can attempt
- * to seamlessly join an ongoing playback.
+ * @param extensionRendererMode The extension renderer mode, which determines if and how available
+ * extension renderers are used. Note that extensions must be included in the application
+ * build for them to be considered available.
+ * @param allowedVideoJoiningTimeMs The maximum duration for which video renderers can attempt to
+ * seamlessly join an ongoing playback.
*/
- public DefaultRenderersFactory(Context context,
+ public DefaultRenderersFactory(
+ Context context,
+ @ExtensionRendererMode int extensionRendererMode,
+ long allowedVideoJoiningTimeMs) {
+ this(context, null, extensionRendererMode, allowedVideoJoiningTimeMs);
+ }
+
+ /**
+ * @deprecated Use {@link #DefaultRenderersFactory(Context, int, long)} and pass {@link
+ * DrmSessionManager} directly to {@link SimpleExoPlayer} or {@link ExoPlayerFactory}.
+ */
+ @Deprecated
+ public DefaultRenderersFactory(
+ Context context,
@Nullable DrmSessionManager drmSessionManager,
- @ExtensionRendererMode int extensionRendererMode, long allowedVideoJoiningTimeMs) {
+ @ExtensionRendererMode int extensionRendererMode,
+ long allowedVideoJoiningTimeMs) {
this.context = context;
- this.drmSessionManager = drmSessionManager;
this.extensionRendererMode = extensionRendererMode;
this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs;
+ this.drmSessionManager = drmSessionManager;
}
@Override
- public Renderer[] createRenderers(Handler eventHandler,
+ public Renderer[] createRenderers(
+ Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
- TextOutput textRendererOutput, MetadataOutput metadataRendererOutput) {
+ TextOutput textRendererOutput,
+ MetadataOutput metadataRendererOutput,
+ @Nullable DrmSessionManager drmSessionManager) {
+ if (drmSessionManager == null) {
+ drmSessionManager = this.drmSessionManager;
+ }
ArrayList renderersList = new ArrayList<>();
buildVideoRenderers(context, drmSessionManager, allowedVideoJoiningTimeMs,
eventHandler, videoRendererEventListener, extensionRendererMode, renderersList);
@@ -172,9 +199,16 @@ public class DefaultRenderersFactory implements RenderersFactory {
long allowedVideoJoiningTimeMs, Handler eventHandler,
VideoRendererEventListener eventListener, @ExtensionRendererMode int extensionRendererMode,
ArrayList out) {
- out.add(new MediaCodecVideoRenderer(context, MediaCodecSelector.DEFAULT,
- allowedVideoJoiningTimeMs, drmSessionManager, false, eventHandler, eventListener,
- MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY));
+ out.add(
+ new MediaCodecVideoRenderer(
+ context,
+ MediaCodecSelector.DEFAULT,
+ allowedVideoJoiningTimeMs,
+ drmSessionManager,
+ /* playClearSamplesWithoutKeys= */ false,
+ eventHandler,
+ eventListener,
+ MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY));
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
@@ -185,18 +219,32 @@ public class DefaultRenderersFactory implements RenderersFactory {
}
try {
- Class> clazz =
- Class.forName("com.google.android.exoplayer2.ext.vp9.LibvpxVideoRenderer");
- Constructor> constructor = clazz.getConstructor(boolean.class, long.class, Handler.class,
- VideoRendererEventListener.class, int.class);
- Renderer renderer = (Renderer) constructor.newInstance(true, allowedVideoJoiningTimeMs,
- eventHandler, eventListener, MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
+ // Full class names used for constructor args so the LINT rule triggers if any of them move.
+ // LINT.IfChange
+ Class> clazz = Class.forName("com.google.android.exoplayer2.ext.vp9.LibvpxVideoRenderer");
+ Constructor> constructor =
+ clazz.getConstructor(
+ boolean.class,
+ long.class,
+ android.os.Handler.class,
+ com.google.android.exoplayer2.video.VideoRendererEventListener.class,
+ int.class);
+ // LINT.ThenChange(../../../../../../../proguard-rules.txt)
+ Renderer renderer =
+ (Renderer)
+ constructor.newInstance(
+ true,
+ allowedVideoJoiningTimeMs,
+ eventHandler,
+ eventListener,
+ MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibvpxVideoRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
- throw new RuntimeException(e);
+ // The extension is present, but instantiation failed.
+ throw new RuntimeException("Error instantiating VP9 extension", e);
}
}
@@ -218,8 +266,16 @@ public class DefaultRenderersFactory implements RenderersFactory {
AudioProcessor[] audioProcessors, Handler eventHandler,
AudioRendererEventListener eventListener, @ExtensionRendererMode int extensionRendererMode,
ArrayList out) {
- out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true,
- eventHandler, eventListener, AudioCapabilities.getCapabilities(context), audioProcessors));
+ out.add(
+ new MediaCodecAudioRenderer(
+ context,
+ MediaCodecSelector.DEFAULT,
+ drmSessionManager,
+ /* playClearSamplesWithoutKeys= */ false,
+ eventHandler,
+ eventListener,
+ AudioCapabilities.getCapabilities(context),
+ audioProcessors));
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
@@ -230,48 +286,67 @@ public class DefaultRenderersFactory implements RenderersFactory {
}
try {
- Class> clazz =
- Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
- Constructor> constructor = clazz.getConstructor(Handler.class,
- AudioRendererEventListener.class, AudioProcessor[].class);
- Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
- audioProcessors);
+ // Full class names used for constructor args so the LINT rule triggers if any of them move.
+ // LINT.IfChange
+ Class> clazz = Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
+ Constructor> constructor =
+ clazz.getConstructor(
+ android.os.Handler.class,
+ com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
+ com.google.android.exoplayer2.audio.AudioProcessor[].class);
+ // LINT.ThenChange(../../../../../../../proguard-rules.txt)
+ Renderer renderer =
+ (Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibopusAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
- throw new RuntimeException(e);
+ // The extension is present, but instantiation failed.
+ throw new RuntimeException("Error instantiating Opus extension", e);
}
try {
- Class> clazz =
- Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
- Constructor> constructor = clazz.getConstructor(Handler.class,
- AudioRendererEventListener.class, AudioProcessor[].class);
- Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
- audioProcessors);
+ // Full class names used for constructor args so the LINT rule triggers if any of them move.
+ // LINT.IfChange
+ Class> clazz = Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
+ Constructor> constructor =
+ clazz.getConstructor(
+ android.os.Handler.class,
+ com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
+ com.google.android.exoplayer2.audio.AudioProcessor[].class);
+ // LINT.ThenChange(../../../../../../../proguard-rules.txt)
+ Renderer renderer =
+ (Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibflacAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
- throw new RuntimeException(e);
+ // The extension is present, but instantiation failed.
+ throw new RuntimeException("Error instantiating FLAC extension", e);
}
try {
+ // Full class names used for constructor args so the LINT rule triggers if any of them move.
+ // LINT.IfChange
Class> clazz =
Class.forName("com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
- Constructor> constructor = clazz.getConstructor(Handler.class,
- AudioRendererEventListener.class, AudioProcessor[].class);
- Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
- audioProcessors);
+ Constructor> constructor =
+ clazz.getConstructor(
+ android.os.Handler.class,
+ com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
+ com.google.android.exoplayer2.audio.AudioProcessor[].class);
+ // LINT.ThenChange(../../../../../../../proguard-rules.txt)
+ Renderer renderer =
+ (Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded FfmpegAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
- throw new RuntimeException(e);
+ // The extension is present, but instantiation failed.
+ throw new RuntimeException("Error instantiating FFmpeg extension", e);
}
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayer.java b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayer.java
index b096b5ae12..6d8dd5b7a8 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayer.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayer.java
@@ -16,11 +16,11 @@
package com.google.android.exoplayer2;
import android.os.Looper;
+import android.support.annotation.Nullable;
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import com.google.android.exoplayer2.metadata.MetadataRenderer;
import com.google.android.exoplayer2.source.ClippingMediaSource;
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
-import com.google.android.exoplayer2.source.DynamicConcatenatingMediaSource;
import com.google.android.exoplayer2.source.ExtractorMediaSource;
import com.google.android.exoplayer2.source.LoopingMediaSource;
import com.google.android.exoplayer2.source.MediaSource;
@@ -33,40 +33,42 @@ import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
/**
- * An extensible media player that plays {@link MediaSource}s. Instances can be obtained from
- * {@link ExoPlayerFactory}.
+ * An extensible media player that plays {@link MediaSource}s. Instances can be obtained from {@link
+ * ExoPlayerFactory}.
*
* Player components
+ *
* ExoPlayer is designed to make few assumptions about (and hence impose few restrictions on) the
* type of the media being played, how and where it is stored, and how it is rendered. Rather than
* implementing the loading and rendering of media directly, ExoPlayer implementations delegate this
* work to components that are injected when a player is created or when it's prepared for playback.
* Components common to all ExoPlayer implementations are:
+ *
*
* - A {@link MediaSource} that defines the media to be played, loads the media, and from
- * which the loaded media can be read. A MediaSource is injected via {@link #prepare(MediaSource)}
- * at the start of playback. The library modules provide default implementations for regular media
- * files ({@link ExtractorMediaSource}), DASH (DashMediaSource), SmoothStreaming (SsMediaSource)
- * and HLS (HlsMediaSource), an implementation for loading single media samples
- * ({@link SingleSampleMediaSource}) that's most often used for side-loaded subtitle files, and
- * implementations for building more complex MediaSources from simpler ones
- * ({@link MergingMediaSource}, {@link ConcatenatingMediaSource},
- * {@link DynamicConcatenatingMediaSource}, {@link LoopingMediaSource} and
- * {@link ClippingMediaSource}).
+ * which the loaded media can be read. A MediaSource is injected via {@link
+ * #prepare(MediaSource)} at the start of playback. The library modules provide default
+ * implementations for regular media files ({@link ExtractorMediaSource}), DASH
+ * (DashMediaSource), SmoothStreaming (SsMediaSource) and HLS (HlsMediaSource), an
+ * implementation for loading single media samples ({@link SingleSampleMediaSource}) that's
+ * most often used for side-loaded subtitle files, and implementations for building more
+ * complex MediaSources from simpler ones ({@link MergingMediaSource}, {@link
+ * ConcatenatingMediaSource}, {@link LoopingMediaSource} and {@link ClippingMediaSource}).
* - {@link Renderer}s that render individual components of the media. The library
- * provides default implementations for common media types ({@link MediaCodecVideoRenderer},
- * {@link MediaCodecAudioRenderer}, {@link TextRenderer} and {@link MetadataRenderer}). A Renderer
- * consumes media from the MediaSource being played. Renderers are injected when the player is
- * created.
+ * provides default implementations for common media types ({@link MediaCodecVideoRenderer},
+ * {@link MediaCodecAudioRenderer}, {@link TextRenderer} and {@link MetadataRenderer}). A
+ * Renderer consumes media from the MediaSource being played. Renderers are injected when the
+ * player is created.
* - A {@link TrackSelector} that selects tracks provided by the MediaSource to be
- * consumed by each of the available Renderers. The library provides a default implementation
- * ({@link DefaultTrackSelector}) suitable for most use cases. A TrackSelector is injected when
- * the player is created.
+ * consumed by each of the available Renderers. The library provides a default implementation
+ * ({@link DefaultTrackSelector}) suitable for most use cases. A TrackSelector is injected
+ * when the player is created.
* - A {@link LoadControl} that controls when the MediaSource buffers more media, and how
- * much media is buffered. The library provides a default implementation
- * ({@link DefaultLoadControl}) suitable for most use cases. A LoadControl is injected when the
- * player is created.
+ * much media is buffered. The library provides a default implementation ({@link
+ * DefaultLoadControl}) suitable for most use cases. A LoadControl is injected when the player
+ * is created.
*
+ *
* An ExoPlayer can be built using the default components provided by the library, but may also
* be built using custom implementations if non-standard behaviors are required. For example a
* custom LoadControl could be injected to change the player's buffering strategy, or a custom
@@ -80,30 +82,31 @@ import com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
* it's possible to load data from a non-standard source, or through a different network stack.
*
*
Threading model
- * The figure below shows ExoPlayer's threading model.
- *
- *
- *
+ *
+ * The figure below shows ExoPlayer's threading model.
+ *
+ *
*
*
- * - It is recommended that ExoPlayer instances are created and accessed from a single application
- * thread. The application's main thread is ideal. Accessing an instance from multiple threads is
- * discouraged, however if an application does wish to do this then it may do so provided that it
- * ensures accesses are synchronized.
- * - Registered listeners are called on the thread that created the ExoPlayer instance, unless
- * the thread that created the ExoPlayer instance does not have a {@link Looper}. In that case,
- * registered listeners will be called on the application's main thread.
- * - An internal playback thread is responsible for playback. Injected player components such as
- * Renderers, MediaSources, TrackSelectors and LoadControls are called by the player on this
- * thread.
- * - When the application performs an operation on the player, for example a seek, a message is
- * delivered to the internal playback thread via a message queue. The internal playback thread
- * consumes messages from the queue and performs the corresponding operations. Similarly, when a
- * playback event occurs on the internal playback thread, a message is delivered to the application
- * thread via a second message queue. The application thread consumes messages from the queue,
- * updating the application visible state and calling corresponding listener methods.
- * - Injected player components may use additional background threads. For example a MediaSource
- * may use background threads to load data. These are implementation specific.
+ * - It is strongly recommended that ExoPlayer instances are created and accessed from a single
+ * application thread. The application's main thread is ideal. Accessing an instance from
+ * multiple threads is discouraged as it may cause synchronization problems.
+ *
- Registered listeners are called on the thread that created the ExoPlayer instance, unless
+ * the thread that created the ExoPlayer instance does not have a {@link Looper}. In that
+ * case, registered listeners will be called on the application's main thread.
+ *
- An internal playback thread is responsible for playback. Injected player components such as
+ * Renderers, MediaSources, TrackSelectors and LoadControls are called by the player on this
+ * thread.
+ *
- When the application performs an operation on the player, for example a seek, a message is
+ * delivered to the internal playback thread via a message queue. The internal playback thread
+ * consumes messages from the queue and performs the corresponding operations. Similarly, when
+ * a playback event occurs on the internal playback thread, a message is delivered to the
+ * application thread via a second message queue. The application thread consumes messages
+ * from the queue, updating the application visible state and calling corresponding listener
+ * methods.
+ *
- Injected player components may use additional background threads. For example a MediaSource
+ * may use background threads to load data. These are implementation specific.
*
*/
public interface ExoPlayer extends Player {
@@ -114,54 +117,28 @@ public interface ExoPlayer extends Player {
@Deprecated
interface EventListener extends Player.EventListener {}
- /**
- * A component of an {@link ExoPlayer} that can receive messages on the playback thread.
- *
- * Messages can be delivered to a component via {@link #sendMessages} and
- * {@link #blockingSendMessages}.
- */
- interface ExoPlayerComponent {
+ /** @deprecated Use {@link PlayerMessage.Target} instead. */
+ @Deprecated
+ interface ExoPlayerComponent extends PlayerMessage.Target {}
- /**
- * Handles a message delivered to the component. Called on the playback thread.
- *
- * @param messageType The message type.
- * @param message The message.
- * @throws ExoPlaybackException If an error occurred whilst handling the message.
- */
- void handleMessage(int messageType, Object message) throws ExoPlaybackException;
-
- }
-
- /**
- * Defines a message and a target {@link ExoPlayerComponent} to receive it.
- */
+ /** @deprecated Use {@link PlayerMessage} instead. */
+ @Deprecated
final class ExoPlayerMessage {
- /**
- * The target to receive the message.
- */
- public final ExoPlayerComponent target;
- /**
- * The type of the message.
- */
+ /** The target to receive the message. */
+ public final PlayerMessage.Target target;
+ /** The type of the message. */
public final int messageType;
- /**
- * The message.
- */
+ /** The message. */
public final Object message;
- /**
- * @param target The target of the message.
- * @param messageType The message type.
- * @param message The message.
- */
- public ExoPlayerMessage(ExoPlayerComponent target, int messageType, Object message) {
+ /** @deprecated Use {@link ExoPlayer#createMessage(PlayerMessage.Target)} instead. */
+ @Deprecated
+ public ExoPlayerMessage(PlayerMessage.Target target, int messageType, Object message) {
this.target = target;
this.messageType = messageType;
this.message = message;
}
-
}
/**
@@ -208,15 +185,25 @@ public interface ExoPlayer extends Player {
*/
Looper getPlaybackLooper();
+ @Override
+ @Nullable
+ ExoPlaybackException getPlaybackError();
+
/**
* Prepares the player to play the provided {@link MediaSource}. Equivalent to
* {@code prepare(mediaSource, true, true)}.
+ *
+ * Note: {@link MediaSource} instances are not designed to be re-used. If you want to prepare a
+ * player more than once with the same piece of media, use a new instance each time.
*/
void prepare(MediaSource mediaSource);
/**
* Prepares the player to play the provided {@link MediaSource}, optionally resetting the playback
* position the default position in the first {@link Timeline.Window}.
+ *
+ * Note: {@link MediaSource} instances are not designed to be re-used. If you want to prepare a
+ * player more than once with the same piece of media, use a new instance each time.
*
* @param mediaSource The {@link MediaSource} to play.
* @param resetPosition Whether the playback position should be reset to the default position in
@@ -229,20 +216,31 @@ public interface ExoPlayer extends Player {
void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState);
/**
- * Sends messages to their target components. The messages are delivered on the playback thread.
- * If a component throws an {@link ExoPlaybackException} then it is propagated out of the player
- * as an error.
- *
- * @param messages The messages to be sent.
+ * Creates a message that can be sent to a {@link PlayerMessage.Target}. By default, the message
+ * will be delivered immediately without blocking on the playback thread. The default {@link
+ * PlayerMessage#getType()} is 0 and the default {@link PlayerMessage#getPayload()} is null. If a
+ * position is specified with {@link PlayerMessage#setPosition(long)}, the message will be
+ * delivered at this position in the current window defined by {@link #getCurrentWindowIndex()}.
+ * Alternatively, the message can be sent at a specific window using {@link
+ * PlayerMessage#setPosition(int, long)}.
*/
+ PlayerMessage createMessage(PlayerMessage.Target target);
+
+ /** @deprecated Use {@link #createMessage(PlayerMessage.Target)} instead. */
+ @Deprecated
void sendMessages(ExoPlayerMessage... messages);
/**
- * Variant of {@link #sendMessages(ExoPlayerMessage...)} that blocks until after the messages have
- * been delivered.
- *
- * @param messages The messages to be sent.
+ * @deprecated Use {@link #createMessage(PlayerMessage.Target)} with {@link
+ * PlayerMessage#blockUntilDelivered()}.
*/
+ @Deprecated
void blockingSendMessages(ExoPlayerMessage... messages);
+ /**
+ * Sets the parameters that control how seek operations are performed.
+ *
+ * @param seekParameters The seek parameters, or {@code null} to use the defaults.
+ */
+ void setSeekParameters(@Nullable SeekParameters seekParameters);
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerFactory.java b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerFactory.java
index b647e541bc..8095ed9c64 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerFactory.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerFactory.java
@@ -17,9 +17,11 @@ package com.google.android.exoplayer2;
import android.content.Context;
import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.trackselection.TrackSelector;
+import com.google.android.exoplayer2.util.Clock;
/**
* A factory for {@link ExoPlayer} instances.
@@ -57,8 +59,8 @@ public final class ExoPlayerFactory {
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager drmSessionManager) {
- RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager);
- return newSimpleInstance(renderersFactory, trackSelector, loadControl);
+ RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
+ return newSimpleInstance(renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
@@ -78,9 +80,8 @@ public final class ExoPlayerFactory {
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl, @Nullable DrmSessionManager drmSessionManager,
@DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode) {
- RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager,
- extensionRendererMode);
- return newSimpleInstance(renderersFactory, trackSelector, loadControl);
+ RenderersFactory renderersFactory = new DefaultRenderersFactory(context, extensionRendererMode);
+ return newSimpleInstance(renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
@@ -103,9 +104,9 @@ public final class ExoPlayerFactory {
LoadControl loadControl, @Nullable DrmSessionManager drmSessionManager,
@DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
- RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager,
- extensionRendererMode, allowedVideoJoiningTimeMs);
- return newSimpleInstance(renderersFactory, trackSelector, loadControl);
+ RenderersFactory renderersFactory =
+ new DefaultRenderersFactory(context, extensionRendererMode, allowedVideoJoiningTimeMs);
+ return newSimpleInstance(renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
@@ -129,6 +130,22 @@ public final class ExoPlayerFactory {
return newSimpleInstance(renderersFactory, trackSelector, new DefaultLoadControl());
}
+ /**
+ * Creates a {@link SimpleExoPlayer} instance.
+ *
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
+ * will not be used for DRM protected playbacks.
+ */
+ public static SimpleExoPlayer newSimpleInstance(
+ RenderersFactory renderersFactory,
+ TrackSelector trackSelector,
+ @Nullable DrmSessionManager drmSessionManager) {
+ return newSimpleInstance(
+ renderersFactory, trackSelector, new DefaultLoadControl(), drmSessionManager);
+ }
+
/**
* Creates a {@link SimpleExoPlayer} instance.
*
@@ -138,7 +155,46 @@ public final class ExoPlayerFactory {
*/
public static SimpleExoPlayer newSimpleInstance(RenderersFactory renderersFactory,
TrackSelector trackSelector, LoadControl loadControl) {
- return new SimpleExoPlayer(renderersFactory, trackSelector, loadControl);
+ return new SimpleExoPlayer(
+ renderersFactory, trackSelector, loadControl, /* drmSessionManager= */ null);
+ }
+
+ /**
+ * Creates a {@link SimpleExoPlayer} instance.
+ *
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ * @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
+ * will not be used for DRM protected playbacks.
+ */
+ public static SimpleExoPlayer newSimpleInstance(
+ RenderersFactory renderersFactory,
+ TrackSelector trackSelector,
+ LoadControl loadControl,
+ @Nullable DrmSessionManager drmSessionManager) {
+ return new SimpleExoPlayer(renderersFactory, trackSelector, loadControl, drmSessionManager);
+ }
+
+ /**
+ * Creates a {@link SimpleExoPlayer} instance.
+ *
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ * @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
+ * will not be used for DRM protected playbacks.
+ * @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
+ * will collect and forward all player events.
+ */
+ public static SimpleExoPlayer newSimpleInstance(
+ RenderersFactory renderersFactory,
+ TrackSelector trackSelector,
+ LoadControl loadControl,
+ @Nullable DrmSessionManager drmSessionManager,
+ AnalyticsCollector.Factory analyticsCollectorFactory) {
+ return new SimpleExoPlayer(
+ renderersFactory, trackSelector, loadControl, drmSessionManager, analyticsCollectorFactory);
}
/**
@@ -160,7 +216,7 @@ public final class ExoPlayerFactory {
*/
public static ExoPlayer newInstance(Renderer[] renderers, TrackSelector trackSelector,
LoadControl loadControl) {
- return new ExoPlayerImpl(renderers, trackSelector, loadControl);
+ return new ExoPlayerImpl(renderers, trackSelector, loadControl, Clock.DEFAULT);
}
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java
index 0ce920a16f..5ca5994b6e 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImpl.java
@@ -21,8 +21,8 @@ import android.os.Looper;
import android.os.Message;
import android.support.annotation.Nullable;
import android.util.Log;
-import com.google.android.exoplayer2.ExoPlayerImplInternal.PlaybackInfo;
-import com.google.android.exoplayer2.ExoPlayerImplInternal.SourceInfo;
+import android.util.Pair;
+import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.source.TrackGroupArray;
@@ -31,7 +31,10 @@ import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
import com.google.android.exoplayer2.util.Assertions;
+import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Util;
+import java.util.ArrayList;
+import java.util.List;
import java.util.concurrent.CopyOnWriteArraySet;
/**
@@ -43,26 +46,22 @@ import java.util.concurrent.CopyOnWriteArraySet;
private final Renderer[] renderers;
private final TrackSelector trackSelector;
- private final TrackSelectionArray emptyTrackSelections;
+ private final TrackSelectorResult emptyTrackSelectorResult;
private final Handler eventHandler;
private final ExoPlayerImplInternal internalPlayer;
+ private final Handler internalPlayerHandler;
private final CopyOnWriteArraySet listeners;
private final Timeline.Window window;
private final Timeline.Period period;
- private boolean tracksSelected;
private boolean playWhenReady;
private @RepeatMode int repeatMode;
private boolean shuffleModeEnabled;
- private int playbackState;
- private int pendingSeekAcks;
- private int pendingPrepareAcks;
- private boolean isLoading;
- private Timeline timeline;
- private Object manifest;
- private TrackGroupArray trackGroups;
- private TrackSelectionArray trackSelections;
+ private int pendingOperationAcks;
+ private boolean hasPendingPrepare;
+ private boolean hasPendingSeek;
private PlaybackParameters playbackParameters;
+ private @Nullable ExoPlaybackException playbackError;
// Playback information when there is no pending seek/set source operation.
private PlaybackInfo playbackInfo;
@@ -78,9 +77,11 @@ import java.util.concurrent.CopyOnWriteArraySet;
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @param clock The {@link Clock} that will be used by the instance.
*/
@SuppressLint("HandlerLeak")
- public ExoPlayerImpl(Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl) {
+ public ExoPlayerImpl(
+ Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl, Clock clock) {
Log.i(TAG, "Init " + Integer.toHexString(System.identityHashCode(this)) + " ["
+ ExoPlayerLibraryInfo.VERSION_SLASHY + "] [" + Util.DEVICE_DEBUG_INFO + "]");
Assertions.checkState(renderers.length > 0);
@@ -89,14 +90,14 @@ import java.util.concurrent.CopyOnWriteArraySet;
this.playWhenReady = false;
this.repeatMode = Player.REPEAT_MODE_OFF;
this.shuffleModeEnabled = false;
- this.playbackState = Player.STATE_IDLE;
this.listeners = new CopyOnWriteArraySet<>();
- emptyTrackSelections = new TrackSelectionArray(new TrackSelection[renderers.length]);
- timeline = Timeline.EMPTY;
+ emptyTrackSelectorResult =
+ new TrackSelectorResult(
+ new RendererConfiguration[renderers.length],
+ new TrackSelection[renderers.length],
+ null);
window = new Timeline.Window();
period = new Timeline.Period();
- trackGroups = TrackGroupArray.EMPTY;
- trackSelections = emptyTrackSelections;
playbackParameters = PlaybackParameters.DEFAULT;
Looper eventLooper = Looper.myLooper() != null ? Looper.myLooper() : Looper.getMainLooper();
eventHandler = new Handler(eventLooper) {
@@ -105,9 +106,35 @@ import java.util.concurrent.CopyOnWriteArraySet;
ExoPlayerImpl.this.handleEvent(msg);
}
};
- playbackInfo = new ExoPlayerImplInternal.PlaybackInfo(0, 0);
- internalPlayer = new ExoPlayerImplInternal(renderers, trackSelector, loadControl, playWhenReady,
- repeatMode, shuffleModeEnabled, eventHandler, playbackInfo, this);
+ playbackInfo =
+ new PlaybackInfo(
+ Timeline.EMPTY,
+ /* startPositionUs= */ 0,
+ TrackGroupArray.EMPTY,
+ emptyTrackSelectorResult);
+ internalPlayer =
+ new ExoPlayerImplInternal(
+ renderers,
+ trackSelector,
+ emptyTrackSelectorResult,
+ loadControl,
+ playWhenReady,
+ repeatMode,
+ shuffleModeEnabled,
+ eventHandler,
+ this,
+ clock);
+ internalPlayerHandler = new Handler(internalPlayer.getPlaybackLooper());
+ }
+
+ @Override
+ public VideoComponent getVideoComponent() {
+ return null;
+ }
+
+ @Override
+ public TextComponent getTextComponent() {
+ return null;
}
@Override
@@ -127,7 +154,12 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public int getPlaybackState() {
- return playbackState;
+ return playbackInfo.playbackState;
+ }
+
+ @Override
+ public @Nullable ExoPlaybackException getPlaybackError() {
+ return playbackError;
}
@Override
@@ -137,26 +169,23 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
- if (resetState) {
- if (!timeline.isEmpty() || manifest != null) {
- timeline = Timeline.EMPTY;
- manifest = null;
- for (Player.EventListener listener : listeners) {
- listener.onTimelineChanged(timeline, manifest);
- }
- }
- if (tracksSelected) {
- tracksSelected = false;
- trackGroups = TrackGroupArray.EMPTY;
- trackSelections = emptyTrackSelections;
- trackSelector.onSelectionActivated(null);
- for (Player.EventListener listener : listeners) {
- listener.onTracksChanged(trackGroups, trackSelections);
- }
- }
- }
- pendingPrepareAcks++;
- internalPlayer.prepare(mediaSource, resetPosition);
+ playbackError = null;
+ PlaybackInfo playbackInfo =
+ getResetPlaybackInfo(
+ resetPosition, resetState, /* playbackState= */ Player.STATE_BUFFERING);
+ // Trigger internal prepare first before updating the playback info and notifying external
+ // listeners to ensure that new operations issued in the listener notifications reach the
+ // player after this prepare. The internal player can't change the playback info immediately
+ // because it uses a callback.
+ hasPendingPrepare = true;
+ pendingOperationAcks++;
+ internalPlayer.prepare(mediaSource, resetPosition, resetState);
+ updatePlaybackInfo(
+ playbackInfo,
+ /* positionDiscontinuity= */ false,
+ /* ignored */ DISCONTINUITY_REASON_INTERNAL,
+ TIMELINE_CHANGE_REASON_RESET,
+ /* seekProcessed= */ false);
}
@Override
@@ -165,7 +194,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
this.playWhenReady = playWhenReady;
internalPlayer.setPlayWhenReady(playWhenReady);
for (Player.EventListener listener : listeners) {
- listener.onPlayerStateChanged(playWhenReady, playbackState);
+ listener.onPlayerStateChanged(playWhenReady, playbackInfo.playbackState);
}
}
}
@@ -209,7 +238,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public boolean isLoading() {
- return isLoading;
+ return playbackInfo.isLoading;
}
@Override
@@ -229,36 +258,41 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public void seekTo(int windowIndex, long positionMs) {
+ Timeline timeline = playbackInfo.timeline;
if (windowIndex < 0 || (!timeline.isEmpty() && windowIndex >= timeline.getWindowCount())) {
throw new IllegalSeekPositionException(timeline, windowIndex, positionMs);
}
- pendingSeekAcks++;
+ hasPendingSeek = true;
+ pendingOperationAcks++;
+ if (isPlayingAd()) {
+ // TODO: Investigate adding support for seeking during ads. This is complicated to do in
+ // general because the midroll ad preceding the seek destination must be played before the
+ // content position can be played, if a different ad is playing at the moment.
+ Log.w(TAG, "seekTo ignored because an ad is playing");
+ eventHandler
+ .obtainMessage(
+ ExoPlayerImplInternal.MSG_PLAYBACK_INFO_CHANGED,
+ /* operationAcks */ 1,
+ /* positionDiscontinuityReason */ C.INDEX_UNSET,
+ playbackInfo)
+ .sendToTarget();
+ return;
+ }
maskingWindowIndex = windowIndex;
if (timeline.isEmpty()) {
+ maskingWindowPositionMs = positionMs == C.TIME_UNSET ? 0 : positionMs;
maskingPeriodIndex = 0;
} else {
- timeline.getWindow(windowIndex, window);
- long resolvedPositionUs =
- positionMs == C.TIME_UNSET ? window.getDefaultPositionUs() : C.msToUs(positionMs);
- int periodIndex = window.firstPeriodIndex;
- long periodPositionUs = window.getPositionInFirstPeriodUs() + resolvedPositionUs;
- long periodDurationUs = timeline.getPeriod(periodIndex, period).getDurationUs();
- while (periodDurationUs != C.TIME_UNSET && periodPositionUs >= periodDurationUs
- && periodIndex < window.lastPeriodIndex) {
- periodPositionUs -= periodDurationUs;
- periodDurationUs = timeline.getPeriod(++periodIndex, period).getDurationUs();
- }
- maskingPeriodIndex = periodIndex;
+ long windowPositionUs = positionMs == C.TIME_UNSET
+ ? timeline.getWindow(windowIndex, window).getDefaultPositionUs() : C.msToUs(positionMs);
+ Pair periodIndexAndPositon =
+ timeline.getPeriodPosition(window, period, windowIndex, windowPositionUs);
+ maskingWindowPositionMs = C.usToMs(windowPositionUs);
+ maskingPeriodIndex = periodIndexAndPositon.first;
}
- if (positionMs == C.TIME_UNSET) {
- maskingWindowPositionMs = 0;
- internalPlayer.seekTo(timeline, windowIndex, C.TIME_UNSET);
- } else {
- maskingWindowPositionMs = positionMs;
- internalPlayer.seekTo(timeline, windowIndex, C.msToUs(positionMs));
- for (Player.EventListener listener : listeners) {
- listener.onPositionDiscontinuity();
- }
+ internalPlayer.seekTo(timeline, windowIndex, C.msToUs(positionMs));
+ for (Player.EventListener listener : listeners) {
+ listener.onPositionDiscontinuity(DISCONTINUITY_REASON_SEEK);
}
}
@@ -275,9 +309,49 @@ import java.util.concurrent.CopyOnWriteArraySet;
return playbackParameters;
}
+ @Override
+ public void setSeekParameters(@Nullable SeekParameters seekParameters) {
+ if (seekParameters == null) {
+ seekParameters = SeekParameters.DEFAULT;
+ }
+ internalPlayer.setSeekParameters(seekParameters);
+ }
+
+ @Override
+ public @Nullable Object getCurrentTag() {
+ int windowIndex = getCurrentWindowIndex();
+ return windowIndex > playbackInfo.timeline.getWindowCount()
+ ? null
+ : playbackInfo.timeline.getWindow(windowIndex, window, /* setTag= */ true).tag;
+ }
+
@Override
public void stop() {
- internalPlayer.stop();
+ stop(/* reset= */ false);
+ }
+
+ @Override
+ public void stop(boolean reset) {
+ if (reset) {
+ playbackError = null;
+ }
+ PlaybackInfo playbackInfo =
+ getResetPlaybackInfo(
+ /* resetPosition= */ reset,
+ /* resetState= */ reset,
+ /* playbackState= */ Player.STATE_IDLE);
+ // Trigger internal stop first before updating the playback info and notifying external
+ // listeners to ensure that new operations issued in the listener notifications reach the
+ // player after this stop. The internal player can't change the playback info immediately
+ // because it uses a callback.
+ pendingOperationAcks++;
+ internalPlayer.stop(reset);
+ updatePlaybackInfo(
+ playbackInfo,
+ /* positionDiscontinuity= */ false,
+ /* ignored */ DISCONTINUITY_REASON_INTERNAL,
+ TIMELINE_CHANGE_REASON_RESET,
+ /* seekProcessed= */ false);
}
@Override
@@ -291,17 +365,52 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public void sendMessages(ExoPlayerMessage... messages) {
- internalPlayer.sendMessages(messages);
+ for (ExoPlayerMessage message : messages) {
+ createMessage(message.target).setType(message.messageType).setPayload(message.message).send();
+ }
+ }
+
+ @Override
+ public PlayerMessage createMessage(Target target) {
+ return new PlayerMessage(
+ internalPlayer,
+ target,
+ playbackInfo.timeline,
+ getCurrentWindowIndex(),
+ internalPlayerHandler);
}
@Override
public void blockingSendMessages(ExoPlayerMessage... messages) {
- internalPlayer.blockingSendMessages(messages);
+ List playerMessages = new ArrayList<>();
+ for (ExoPlayerMessage message : messages) {
+ playerMessages.add(
+ createMessage(message.target)
+ .setType(message.messageType)
+ .setPayload(message.message)
+ .send());
+ }
+ boolean wasInterrupted = false;
+ for (PlayerMessage message : playerMessages) {
+ boolean blockMessage = true;
+ while (blockMessage) {
+ try {
+ message.blockUntilDelivered();
+ blockMessage = false;
+ } catch (InterruptedException e) {
+ wasInterrupted = true;
+ }
+ }
+ }
+ if (wasInterrupted) {
+ // Restore the interrupted status.
+ Thread.currentThread().interrupt();
+ }
}
@Override
public int getCurrentPeriodIndex() {
- if (timeline.isEmpty() || pendingSeekAcks > 0) {
+ if (shouldMaskPosition()) {
return maskingPeriodIndex;
} else {
return playbackInfo.periodId.periodIndex;
@@ -310,15 +419,30 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public int getCurrentWindowIndex() {
- if (timeline.isEmpty() || pendingSeekAcks > 0) {
+ if (shouldMaskPosition()) {
return maskingWindowIndex;
} else {
- return timeline.getPeriod(playbackInfo.periodId.periodIndex, period).windowIndex;
+ return playbackInfo.timeline.getPeriod(playbackInfo.periodId.periodIndex, period).windowIndex;
}
}
+ @Override
+ public int getNextWindowIndex() {
+ Timeline timeline = playbackInfo.timeline;
+ return timeline.isEmpty() ? C.INDEX_UNSET
+ : timeline.getNextWindowIndex(getCurrentWindowIndex(), repeatMode, shuffleModeEnabled);
+ }
+
+ @Override
+ public int getPreviousWindowIndex() {
+ Timeline timeline = playbackInfo.timeline;
+ return timeline.isEmpty() ? C.INDEX_UNSET
+ : timeline.getPreviousWindowIndex(getCurrentWindowIndex(), repeatMode, shuffleModeEnabled);
+ }
+
@Override
public long getDuration() {
+ Timeline timeline = playbackInfo.timeline;
if (timeline.isEmpty()) {
return C.TIME_UNSET;
}
@@ -334,30 +458,25 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public long getCurrentPosition() {
- if (timeline.isEmpty() || pendingSeekAcks > 0) {
+ if (shouldMaskPosition()) {
return maskingWindowPositionMs;
} else {
- timeline.getPeriod(playbackInfo.periodId.periodIndex, period);
- return period.getPositionInWindowMs() + C.usToMs(playbackInfo.positionUs);
+ return playbackInfoPositionUsToWindowPositionMs(playbackInfo.positionUs);
}
}
@Override
public long getBufferedPosition() {
// TODO - Implement this properly.
- if (timeline.isEmpty() || pendingSeekAcks > 0) {
+ if (shouldMaskPosition()) {
return maskingWindowPositionMs;
} else {
- timeline.getPeriod(playbackInfo.periodId.periodIndex, period);
- return period.getPositionInWindowMs() + C.usToMs(playbackInfo.bufferedPositionUs);
+ return playbackInfoPositionUsToWindowPositionMs(playbackInfo.bufferedPositionUs);
}
}
@Override
public int getBufferedPercentage() {
- if (timeline.isEmpty()) {
- return 0;
- }
long position = getBufferedPosition();
long duration = getDuration();
return position == C.TIME_UNSET || duration == C.TIME_UNSET ? 0
@@ -366,33 +485,35 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public boolean isCurrentWindowDynamic() {
+ Timeline timeline = playbackInfo.timeline;
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isDynamic;
}
@Override
public boolean isCurrentWindowSeekable() {
+ Timeline timeline = playbackInfo.timeline;
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isSeekable;
}
@Override
public boolean isPlayingAd() {
- return pendingSeekAcks == 0 && playbackInfo.periodId.adGroupIndex != C.INDEX_UNSET;
+ return !shouldMaskPosition() && playbackInfo.periodId.isAd();
}
@Override
public int getCurrentAdGroupIndex() {
- return pendingSeekAcks == 0 ? playbackInfo.periodId.adGroupIndex : C.INDEX_UNSET;
+ return isPlayingAd() ? playbackInfo.periodId.adGroupIndex : C.INDEX_UNSET;
}
@Override
public int getCurrentAdIndexInAdGroup() {
- return pendingSeekAcks == 0 ? playbackInfo.periodId.adIndexInAdGroup : C.INDEX_UNSET;
+ return isPlayingAd() ? playbackInfo.periodId.adIndexInAdGroup : C.INDEX_UNSET;
}
@Override
public long getContentPosition() {
if (isPlayingAd()) {
- timeline.getPeriod(playbackInfo.periodId.periodIndex, period);
+ playbackInfo.timeline.getPeriod(playbackInfo.periodId.periodIndex, period);
return period.getPositionInWindowMs() + C.usToMs(playbackInfo.contentPositionUs);
} else {
return getCurrentPosition();
@@ -411,104 +532,35 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public TrackGroupArray getCurrentTrackGroups() {
- return trackGroups;
+ return playbackInfo.trackGroups;
}
@Override
public TrackSelectionArray getCurrentTrackSelections() {
- return trackSelections;
+ return playbackInfo.trackSelectorResult.selections;
}
@Override
public Timeline getCurrentTimeline() {
- return timeline;
+ return playbackInfo.timeline;
}
@Override
public Object getCurrentManifest() {
- return manifest;
+ return playbackInfo.manifest;
}
// Not private so it can be called from an inner class without going through a thunk method.
/* package */ void handleEvent(Message msg) {
switch (msg.what) {
- case ExoPlayerImplInternal.MSG_PREPARE_ACK: {
- pendingPrepareAcks--;
+ case ExoPlayerImplInternal.MSG_PLAYBACK_INFO_CHANGED:
+ handlePlaybackInfo(
+ (PlaybackInfo) msg.obj,
+ /* operationAcks= */ msg.arg1,
+ /* positionDiscontinuity= */ msg.arg2 != C.INDEX_UNSET,
+ /* positionDiscontinuityReason= */ msg.arg2);
break;
- }
- case ExoPlayerImplInternal.MSG_STATE_CHANGED: {
- playbackState = msg.arg1;
- for (Player.EventListener listener : listeners) {
- listener.onPlayerStateChanged(playWhenReady, playbackState);
- }
- break;
- }
- case ExoPlayerImplInternal.MSG_LOADING_CHANGED: {
- isLoading = msg.arg1 != 0;
- for (Player.EventListener listener : listeners) {
- listener.onLoadingChanged(isLoading);
- }
- break;
- }
- case ExoPlayerImplInternal.MSG_TRACKS_CHANGED: {
- if (pendingPrepareAcks == 0) {
- TrackSelectorResult trackSelectorResult = (TrackSelectorResult) msg.obj;
- tracksSelected = true;
- trackGroups = trackSelectorResult.groups;
- trackSelections = trackSelectorResult.selections;
- trackSelector.onSelectionActivated(trackSelectorResult.info);
- for (Player.EventListener listener : listeners) {
- listener.onTracksChanged(trackGroups, trackSelections);
- }
- }
- break;
- }
- case ExoPlayerImplInternal.MSG_SEEK_ACK: {
- if (--pendingSeekAcks == 0) {
- playbackInfo = (ExoPlayerImplInternal.PlaybackInfo) msg.obj;
- if (timeline.isEmpty()) {
- // Update the masking variables, which are used when the timeline is empty.
- maskingPeriodIndex = 0;
- maskingWindowIndex = 0;
- maskingWindowPositionMs = 0;
- }
- if (msg.arg1 != 0) {
- for (Player.EventListener listener : listeners) {
- listener.onPositionDiscontinuity();
- }
- }
- }
- break;
- }
- case ExoPlayerImplInternal.MSG_POSITION_DISCONTINUITY: {
- if (pendingSeekAcks == 0) {
- playbackInfo = (ExoPlayerImplInternal.PlaybackInfo) msg.obj;
- for (Player.EventListener listener : listeners) {
- listener.onPositionDiscontinuity();
- }
- }
- break;
- }
- case ExoPlayerImplInternal.MSG_SOURCE_INFO_REFRESHED: {
- SourceInfo sourceInfo = (SourceInfo) msg.obj;
- pendingSeekAcks -= sourceInfo.seekAcks;
- if (pendingPrepareAcks == 0) {
- timeline = sourceInfo.timeline;
- manifest = sourceInfo.manifest;
- playbackInfo = sourceInfo.playbackInfo;
- if (pendingSeekAcks == 0 && timeline.isEmpty()) {
- // Update the masking variables, which are used when the timeline is empty.
- maskingPeriodIndex = 0;
- maskingWindowIndex = 0;
- maskingWindowPositionMs = 0;
- }
- for (Player.EventListener listener : listeners) {
- listener.onTimelineChanged(timeline, manifest);
- }
- }
- break;
- }
- case ExoPlayerImplInternal.MSG_PLAYBACK_PARAMETERS_CHANGED: {
+ case ExoPlayerImplInternal.MSG_PLAYBACK_PARAMETERS_CHANGED:
PlaybackParameters playbackParameters = (PlaybackParameters) msg.obj;
if (!this.playbackParameters.equals(playbackParameters)) {
this.playbackParameters = playbackParameters;
@@ -517,17 +569,136 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
}
break;
- }
- case ExoPlayerImplInternal.MSG_ERROR: {
- ExoPlaybackException exception = (ExoPlaybackException) msg.obj;
+ case ExoPlayerImplInternal.MSG_ERROR:
+ playbackError = (ExoPlaybackException) msg.obj;
for (Player.EventListener listener : listeners) {
- listener.onPlayerError(exception);
+ listener.onPlayerError(playbackError);
}
break;
- }
default:
throw new IllegalStateException();
}
}
+ private void handlePlaybackInfo(
+ PlaybackInfo playbackInfo,
+ int operationAcks,
+ boolean positionDiscontinuity,
+ @DiscontinuityReason int positionDiscontinuityReason) {
+ pendingOperationAcks -= operationAcks;
+ if (pendingOperationAcks == 0) {
+ if (playbackInfo.startPositionUs == C.TIME_UNSET) {
+ // Replace internal unset start position with externally visible start position of zero.
+ playbackInfo =
+ playbackInfo.fromNewPosition(
+ playbackInfo.periodId, /* startPositionUs= */ 0, playbackInfo.contentPositionUs);
+ }
+ if ((!this.playbackInfo.timeline.isEmpty() || hasPendingPrepare)
+ && playbackInfo.timeline.isEmpty()) {
+ // Update the masking variables, which are used when the timeline becomes empty.
+ maskingPeriodIndex = 0;
+ maskingWindowIndex = 0;
+ maskingWindowPositionMs = 0;
+ }
+ @Player.TimelineChangeReason
+ int timelineChangeReason =
+ hasPendingPrepare
+ ? Player.TIMELINE_CHANGE_REASON_PREPARED
+ : Player.TIMELINE_CHANGE_REASON_DYNAMIC;
+ boolean seekProcessed = hasPendingSeek;
+ hasPendingPrepare = false;
+ hasPendingSeek = false;
+ updatePlaybackInfo(
+ playbackInfo,
+ positionDiscontinuity,
+ positionDiscontinuityReason,
+ timelineChangeReason,
+ seekProcessed);
+ }
+ }
+
+ private PlaybackInfo getResetPlaybackInfo(
+ boolean resetPosition, boolean resetState, int playbackState) {
+ if (resetPosition) {
+ maskingWindowIndex = 0;
+ maskingPeriodIndex = 0;
+ maskingWindowPositionMs = 0;
+ } else {
+ maskingWindowIndex = getCurrentWindowIndex();
+ maskingPeriodIndex = getCurrentPeriodIndex();
+ maskingWindowPositionMs = getCurrentPosition();
+ }
+ return new PlaybackInfo(
+ resetState ? Timeline.EMPTY : playbackInfo.timeline,
+ resetState ? null : playbackInfo.manifest,
+ playbackInfo.periodId,
+ playbackInfo.startPositionUs,
+ playbackInfo.contentPositionUs,
+ playbackState,
+ /* isLoading= */ false,
+ resetState ? TrackGroupArray.EMPTY : playbackInfo.trackGroups,
+ resetState ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult);
+ }
+
+ private void updatePlaybackInfo(
+ PlaybackInfo newPlaybackInfo,
+ boolean positionDiscontinuity,
+ @Player.DiscontinuityReason int positionDiscontinuityReason,
+ @Player.TimelineChangeReason int timelineChangeReason,
+ boolean seekProcessed) {
+ boolean timelineOrManifestChanged =
+ playbackInfo.timeline != newPlaybackInfo.timeline
+ || playbackInfo.manifest != newPlaybackInfo.manifest;
+ boolean playbackStateChanged = playbackInfo.playbackState != newPlaybackInfo.playbackState;
+ boolean isLoadingChanged = playbackInfo.isLoading != newPlaybackInfo.isLoading;
+ boolean trackSelectorResultChanged =
+ this.playbackInfo.trackSelectorResult != newPlaybackInfo.trackSelectorResult;
+ playbackInfo = newPlaybackInfo;
+ if (timelineOrManifestChanged || timelineChangeReason == TIMELINE_CHANGE_REASON_PREPARED) {
+ for (Player.EventListener listener : listeners) {
+ listener.onTimelineChanged(
+ playbackInfo.timeline, playbackInfo.manifest, timelineChangeReason);
+ }
+ }
+ if (positionDiscontinuity) {
+ for (Player.EventListener listener : listeners) {
+ listener.onPositionDiscontinuity(positionDiscontinuityReason);
+ }
+ }
+ if (trackSelectorResultChanged) {
+ trackSelector.onSelectionActivated(playbackInfo.trackSelectorResult.info);
+ for (Player.EventListener listener : listeners) {
+ listener.onTracksChanged(
+ playbackInfo.trackGroups, playbackInfo.trackSelectorResult.selections);
+ }
+ }
+ if (isLoadingChanged) {
+ for (Player.EventListener listener : listeners) {
+ listener.onLoadingChanged(playbackInfo.isLoading);
+ }
+ }
+ if (playbackStateChanged) {
+ for (Player.EventListener listener : listeners) {
+ listener.onPlayerStateChanged(playWhenReady, playbackInfo.playbackState);
+ }
+ }
+ if (seekProcessed) {
+ for (Player.EventListener listener : listeners) {
+ listener.onSeekProcessed();
+ }
+ }
+ }
+
+ private long playbackInfoPositionUsToWindowPositionMs(long positionUs) {
+ long positionMs = C.usToMs(positionUs);
+ if (!playbackInfo.periodId.isAd()) {
+ playbackInfo.timeline.getPeriod(playbackInfo.periodId.periodIndex, period);
+ positionMs += period.getPositionInWindowMs();
+ }
+ return positionMs;
+ }
+
+ private boolean shouldMaskPosition() {
+ return playbackInfo.timeline.isEmpty() || pendingOperationAcks > 0;
+ }
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java
index 67586cc07a..ceee25af82 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerImplInternal.java
@@ -21,98 +21,44 @@ import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.os.SystemClock;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
import android.util.Log;
import android.util.Pair;
-import com.google.android.exoplayer2.ExoPlayer.ExoPlayerMessage;
-import com.google.android.exoplayer2.MediaPeriodInfoSequence.MediaPeriodInfo;
-import com.google.android.exoplayer2.source.ClippingMediaPeriod;
+import com.google.android.exoplayer2.DefaultMediaClock.PlaybackParameterListener;
+import com.google.android.exoplayer2.Player.DiscontinuityReason;
import com.google.android.exoplayer2.source.MediaPeriod;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.source.SampleStream;
+import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.trackselection.TrackSelection;
-import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
import com.google.android.exoplayer2.util.Assertions;
-import com.google.android.exoplayer2.util.MediaClock;
-import com.google.android.exoplayer2.util.StandaloneMediaClock;
+import com.google.android.exoplayer2.util.Clock;
+import com.google.android.exoplayer2.util.HandlerWrapper;
import com.google.android.exoplayer2.util.TraceUtil;
+import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
-/**
- * Implements the internal behavior of {@link ExoPlayerImpl}.
- */
-/* package */ final class ExoPlayerImplInternal implements Handler.Callback,
- MediaPeriod.Callback, TrackSelector.InvalidationListener, MediaSource.Listener {
-
- /**
- * Playback position information which is read on the application's thread by
- * {@link ExoPlayerImpl} and read/written internally on the player's thread.
- */
- public static final class PlaybackInfo {
-
- public final MediaPeriodId periodId;
- public final long startPositionUs;
- public final long contentPositionUs;
-
- public volatile long positionUs;
- public volatile long bufferedPositionUs;
-
- public PlaybackInfo(int periodIndex, long startPositionUs) {
- this(new MediaPeriodId(periodIndex), startPositionUs);
- }
-
- public PlaybackInfo(MediaPeriodId periodId, long startPositionUs) {
- this(periodId, startPositionUs, C.TIME_UNSET);
- }
-
- public PlaybackInfo(MediaPeriodId periodId, long startPositionUs, long contentPositionUs) {
- this.periodId = periodId;
- this.startPositionUs = startPositionUs;
- this.contentPositionUs = contentPositionUs;
- positionUs = startPositionUs;
- bufferedPositionUs = startPositionUs;
- }
-
- public PlaybackInfo copyWithPeriodIndex(int periodIndex) {
- PlaybackInfo playbackInfo = new PlaybackInfo(periodId.copyWithPeriodIndex(periodIndex),
- startPositionUs, contentPositionUs);
- playbackInfo.positionUs = positionUs;
- playbackInfo.bufferedPositionUs = bufferedPositionUs;
- return playbackInfo;
- }
-
- }
-
- public static final class SourceInfo {
-
- public final Timeline timeline;
- public final Object manifest;
- public final PlaybackInfo playbackInfo;
- public final int seekAcks;
-
- public SourceInfo(Timeline timeline, Object manifest, PlaybackInfo playbackInfo, int seekAcks) {
- this.timeline = timeline;
- this.manifest = manifest;
- this.playbackInfo = playbackInfo;
- this.seekAcks = seekAcks;
- }
-
- }
+/** Implements the internal behavior of {@link ExoPlayerImpl}. */
+/* package */ final class ExoPlayerImplInternal
+ implements Handler.Callback,
+ MediaPeriod.Callback,
+ TrackSelector.InvalidationListener,
+ MediaSource.SourceInfoRefreshListener,
+ PlaybackParameterListener,
+ PlayerMessage.Sender {
private static final String TAG = "ExoPlayerImplInternal";
// External messages
- public static final int MSG_PREPARE_ACK = 0;
- public static final int MSG_STATE_CHANGED = 1;
- public static final int MSG_LOADING_CHANGED = 2;
- public static final int MSG_TRACKS_CHANGED = 3;
- public static final int MSG_SEEK_ACK = 4;
- public static final int MSG_POSITION_DISCONTINUITY = 5;
- public static final int MSG_SOURCE_INFO_REFRESHED = 6;
- public static final int MSG_PLAYBACK_PARAMETERS_CHANGED = 7;
- public static final int MSG_ERROR = 8;
+ public static final int MSG_PLAYBACK_INFO_CHANGED = 0;
+ public static final int MSG_PLAYBACK_PARAMETERS_CHANGED = 1;
+ public static final int MSG_ERROR = 2;
// Internal messages
private static final int MSG_PREPARE = 0;
@@ -120,113 +66,115 @@ import java.io.IOException;
private static final int MSG_DO_SOME_WORK = 2;
private static final int MSG_SEEK_TO = 3;
private static final int MSG_SET_PLAYBACK_PARAMETERS = 4;
- private static final int MSG_STOP = 5;
- private static final int MSG_RELEASE = 6;
- private static final int MSG_REFRESH_SOURCE_INFO = 7;
- private static final int MSG_PERIOD_PREPARED = 8;
- private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 9;
- private static final int MSG_TRACK_SELECTION_INVALIDATED = 10;
- private static final int MSG_CUSTOM = 11;
+ private static final int MSG_SET_SEEK_PARAMETERS = 5;
+ private static final int MSG_STOP = 6;
+ private static final int MSG_RELEASE = 7;
+ private static final int MSG_REFRESH_SOURCE_INFO = 8;
+ private static final int MSG_PERIOD_PREPARED = 9;
+ private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 10;
+ private static final int MSG_TRACK_SELECTION_INVALIDATED = 11;
private static final int MSG_SET_REPEAT_MODE = 12;
private static final int MSG_SET_SHUFFLE_ENABLED = 13;
+ private static final int MSG_SEND_MESSAGE = 14;
+ private static final int MSG_SEND_MESSAGE_TO_TARGET_THREAD = 15;
private static final int PREPARING_SOURCE_INTERVAL_MS = 10;
private static final int RENDERING_INTERVAL_MS = 10;
private static final int IDLE_INTERVAL_MS = 1000;
- /**
- * Limits the maximum number of periods to buffer ahead of the current playing period. The
- * buffering policy normally prevents buffering too far ahead, but the policy could allow too many
- * small periods to be buffered if the period count were not limited.
- */
- private static final int MAXIMUM_BUFFER_AHEAD_PERIODS = 100;
-
- /**
- * Offset added to all sample timestamps read by renderers to make them non-negative. This is
- * provided for convenience of sources that may return negative timestamps due to prerolling
- * samples from a keyframe before their first sample with timestamp zero, so it must be set to a
- * value greater than or equal to the maximum key-frame interval in seekable periods.
- */
- private static final int RENDERER_TIMESTAMP_OFFSET_US = 60000000;
-
private final Renderer[] renderers;
private final RendererCapabilities[] rendererCapabilities;
private final TrackSelector trackSelector;
+ private final TrackSelectorResult emptyTrackSelectorResult;
private final LoadControl loadControl;
- private final StandaloneMediaClock standaloneMediaClock;
- private final Handler handler;
+ private final HandlerWrapper handler;
private final HandlerThread internalPlaybackThread;
private final Handler eventHandler;
private final ExoPlayer player;
private final Timeline.Window window;
private final Timeline.Period period;
- private final MediaPeriodInfoSequence mediaPeriodInfoSequence;
+ private final long backBufferDurationUs;
+ private final boolean retainBackBufferFromKeyframe;
+ private final DefaultMediaClock mediaClock;
+ private final PlaybackInfoUpdate playbackInfoUpdate;
+ private final ArrayList pendingMessages;
+ private final Clock clock;
+ private final MediaPeriodQueue queue;
+
+ @SuppressWarnings("unused")
+ private SeekParameters seekParameters;
private PlaybackInfo playbackInfo;
- private PlaybackParameters playbackParameters;
- private Renderer rendererMediaClockSource;
- private MediaClock rendererMediaClock;
private MediaSource mediaSource;
private Renderer[] enabledRenderers;
private boolean released;
private boolean playWhenReady;
private boolean rebuffering;
- private boolean isLoading;
- private int state;
- private @Player.RepeatMode int repeatMode;
+ @Player.RepeatMode private int repeatMode;
private boolean shuffleModeEnabled;
- private int customMessagesSent;
- private int customMessagesProcessed;
- private long elapsedRealtimeUs;
- private int pendingInitialSeekCount;
- private SeekPosition pendingSeekPosition;
+ private int pendingPrepareCount;
+ private SeekPosition pendingInitialSeekPosition;
private long rendererPositionUs;
+ private int nextPendingMessageIndex;
- private MediaPeriodHolder loadingPeriodHolder;
- private MediaPeriodHolder readingPeriodHolder;
- private MediaPeriodHolder playingPeriodHolder;
-
- private Timeline timeline;
-
- public ExoPlayerImplInternal(Renderer[] renderers, TrackSelector trackSelector,
- LoadControl loadControl, boolean playWhenReady, @Player.RepeatMode int repeatMode,
- boolean shuffleModeEnabled, Handler eventHandler, PlaybackInfo playbackInfo,
- ExoPlayer player) {
+ public ExoPlayerImplInternal(
+ Renderer[] renderers,
+ TrackSelector trackSelector,
+ TrackSelectorResult emptyTrackSelectorResult,
+ LoadControl loadControl,
+ boolean playWhenReady,
+ @Player.RepeatMode int repeatMode,
+ boolean shuffleModeEnabled,
+ Handler eventHandler,
+ ExoPlayer player,
+ Clock clock) {
this.renderers = renderers;
this.trackSelector = trackSelector;
+ this.emptyTrackSelectorResult = emptyTrackSelectorResult;
this.loadControl = loadControl;
this.playWhenReady = playWhenReady;
this.repeatMode = repeatMode;
this.shuffleModeEnabled = shuffleModeEnabled;
this.eventHandler = eventHandler;
- this.state = Player.STATE_IDLE;
- this.playbackInfo = playbackInfo;
this.player = player;
+ this.clock = clock;
+ this.queue = new MediaPeriodQueue();
+ backBufferDurationUs = loadControl.getBackBufferDurationUs();
+ retainBackBufferFromKeyframe = loadControl.retainBackBufferFromKeyframe();
+
+ seekParameters = SeekParameters.DEFAULT;
+ playbackInfo =
+ new PlaybackInfo(
+ Timeline.EMPTY,
+ /* startPositionUs= */ C.TIME_UNSET,
+ TrackGroupArray.EMPTY,
+ emptyTrackSelectorResult);
+ playbackInfoUpdate = new PlaybackInfoUpdate();
rendererCapabilities = new RendererCapabilities[renderers.length];
for (int i = 0; i < renderers.length; i++) {
renderers[i].setIndex(i);
rendererCapabilities[i] = renderers[i].getCapabilities();
}
- standaloneMediaClock = new StandaloneMediaClock();
+ mediaClock = new DefaultMediaClock(this, clock);
+ pendingMessages = new ArrayList<>();
enabledRenderers = new Renderer[0];
window = new Timeline.Window();
period = new Timeline.Period();
- mediaPeriodInfoSequence = new MediaPeriodInfoSequence();
trackSelector.init(this);
- playbackParameters = PlaybackParameters.DEFAULT;
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
internalPlaybackThread = new HandlerThread("ExoPlayerImplInternal:Handler",
Process.THREAD_PRIORITY_AUDIO);
internalPlaybackThread.start();
- handler = new Handler(internalPlaybackThread.getLooper(), this);
+ handler = clock.createHandler(internalPlaybackThread.getLooper(), this);
}
- public void prepare(MediaSource mediaSource, boolean resetPosition) {
- handler.obtainMessage(MSG_PREPARE, resetPosition ? 1 : 0, 0, mediaSource)
+ public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
+ handler
+ .obtainMessage(MSG_PREPARE, resetPosition ? 1 : 0, resetState ? 1 : 0, mediaSource)
.sendToTarget();
}
@@ -251,38 +199,22 @@ import java.io.IOException;
handler.obtainMessage(MSG_SET_PLAYBACK_PARAMETERS, playbackParameters).sendToTarget();
}
- public void stop() {
- handler.sendEmptyMessage(MSG_STOP);
+ public void setSeekParameters(SeekParameters seekParameters) {
+ handler.obtainMessage(MSG_SET_SEEK_PARAMETERS, seekParameters).sendToTarget();
}
- public void sendMessages(ExoPlayerMessage... messages) {
- if (released) {
- Log.w(TAG, "Ignoring messages sent after release.");
- return;
- }
- customMessagesSent++;
- handler.obtainMessage(MSG_CUSTOM, messages).sendToTarget();
+ public void stop(boolean reset) {
+ handler.obtainMessage(MSG_STOP, reset ? 1 : 0, 0).sendToTarget();
}
- public synchronized void blockingSendMessages(ExoPlayerMessage... messages) {
+ @Override
+ public synchronized void sendMessage(PlayerMessage message) {
if (released) {
Log.w(TAG, "Ignoring messages sent after release.");
+ message.markAsProcessed(/* isDelivered= */ false);
return;
}
- int messageNumber = customMessagesSent++;
- handler.obtainMessage(MSG_CUSTOM, messages).sendToTarget();
- boolean wasInterrupted = false;
- while (customMessagesProcessed <= messageNumber) {
- try {
- wait();
- } catch (InterruptedException e) {
- wasInterrupted = true;
- }
- }
- if (wasInterrupted) {
- // Restore the interrupted status.
- Thread.currentThread().interrupt();
- }
+ handler.obtainMessage(MSG_SEND_MESSAGE, message).sendToTarget();
}
public synchronized void release() {
@@ -302,18 +234,18 @@ import java.io.IOException;
// Restore the interrupted status.
Thread.currentThread().interrupt();
}
- internalPlaybackThread.quit();
}
public Looper getPlaybackLooper() {
return internalPlaybackThread.getLooper();
}
- // MediaSource.Listener implementation.
+ // MediaSource.SourceInfoRefreshListener implementation.
@Override
- public void onSourceInfoRefreshed(Timeline timeline, Object manifest) {
- handler.obtainMessage(MSG_REFRESH_SOURCE_INFO, Pair.create(timeline, manifest)).sendToTarget();
+ public void onSourceInfoRefreshed(MediaSource source, Timeline timeline, Object manifest) {
+ handler.obtainMessage(MSG_REFRESH_SOURCE_INFO,
+ new MediaSourceRefreshInfo(source, timeline, manifest)).sendToTarget();
}
// MediaPeriod.Callback implementation.
@@ -335,6 +267,14 @@ import java.io.IOException;
handler.sendEmptyMessage(MSG_TRACK_SELECTION_INVALIDATED);
}
+ // DefaultMediaClock.PlaybackParameterListener implementation.
+
+ @Override
+ public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
+ eventHandler.obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED, playbackParameters).sendToTarget();
+ updateTrackSelectionPlaybackSpeed(playbackParameters.speed);
+ }
+
// Handler.Callback implementation.
@SuppressWarnings("unchecked")
@@ -342,110 +282,118 @@ import java.io.IOException;
public boolean handleMessage(Message msg) {
try {
switch (msg.what) {
- case MSG_PREPARE: {
- prepareInternal((MediaSource) msg.obj, msg.arg1 != 0);
- return true;
- }
- case MSG_SET_PLAY_WHEN_READY: {
+ case MSG_PREPARE:
+ prepareInternal(
+ (MediaSource) msg.obj,
+ /* resetPosition= */ msg.arg1 != 0,
+ /* resetState= */ msg.arg2 != 0);
+ break;
+ case MSG_SET_PLAY_WHEN_READY:
setPlayWhenReadyInternal(msg.arg1 != 0);
- return true;
- }
- case MSG_SET_REPEAT_MODE: {
+ break;
+ case MSG_SET_REPEAT_MODE:
setRepeatModeInternal(msg.arg1);
- return true;
- }
- case MSG_SET_SHUFFLE_ENABLED: {
+ break;
+ case MSG_SET_SHUFFLE_ENABLED:
setShuffleModeEnabledInternal(msg.arg1 != 0);
- return true;
- }
- case MSG_DO_SOME_WORK: {
+ break;
+ case MSG_DO_SOME_WORK:
doSomeWork();
- return true;
- }
- case MSG_SEEK_TO: {
+ break;
+ case MSG_SEEK_TO:
seekToInternal((SeekPosition) msg.obj);
- return true;
- }
- case MSG_SET_PLAYBACK_PARAMETERS: {
+ break;
+ case MSG_SET_PLAYBACK_PARAMETERS:
setPlaybackParametersInternal((PlaybackParameters) msg.obj);
- return true;
- }
- case MSG_STOP: {
- stopInternal();
- return true;
- }
- case MSG_RELEASE: {
- releaseInternal();
- return true;
- }
- case MSG_PERIOD_PREPARED: {
+ break;
+ case MSG_SET_SEEK_PARAMETERS:
+ setSeekParametersInternal((SeekParameters) msg.obj);
+ break;
+ case MSG_STOP:
+ stopInternal(/* reset= */ msg.arg1 != 0, /* acknowledgeStop= */ true);
+ break;
+ case MSG_PERIOD_PREPARED:
handlePeriodPrepared((MediaPeriod) msg.obj);
- return true;
- }
- case MSG_REFRESH_SOURCE_INFO: {
- handleSourceInfoRefreshed((Pair) msg.obj);
- return true;
- }
- case MSG_SOURCE_CONTINUE_LOADING_REQUESTED: {
+ break;
+ case MSG_REFRESH_SOURCE_INFO:
+ handleSourceInfoRefreshed((MediaSourceRefreshInfo) msg.obj);
+ break;
+ case MSG_SOURCE_CONTINUE_LOADING_REQUESTED:
handleContinueLoadingRequested((MediaPeriod) msg.obj);
- return true;
- }
- case MSG_TRACK_SELECTION_INVALIDATED: {
+ break;
+ case MSG_TRACK_SELECTION_INVALIDATED:
reselectTracksInternal();
+ break;
+ case MSG_SEND_MESSAGE:
+ sendMessageInternal((PlayerMessage) msg.obj);
+ break;
+ case MSG_SEND_MESSAGE_TO_TARGET_THREAD:
+ sendMessageToTargetThread((PlayerMessage) msg.obj);
+ break;
+ case MSG_RELEASE:
+ releaseInternal();
+ // Return immediately to not send playback info updates after release.
return true;
- }
- case MSG_CUSTOM: {
- sendMessagesInternal((ExoPlayerMessage[]) msg.obj);
- return true;
- }
default:
return false;
}
+ maybeNotifyPlaybackInfoChanged();
} catch (ExoPlaybackException e) {
- Log.e(TAG, "Renderer error.", e);
+ Log.e(TAG, "Playback error.", e);
+ stopInternal(/* reset= */ false, /* acknowledgeStop= */ false);
eventHandler.obtainMessage(MSG_ERROR, e).sendToTarget();
- stopInternal();
- return true;
+ maybeNotifyPlaybackInfoChanged();
} catch (IOException e) {
Log.e(TAG, "Source error.", e);
+ stopInternal(/* reset= */ false, /* acknowledgeStop= */ false);
eventHandler.obtainMessage(MSG_ERROR, ExoPlaybackException.createForSource(e)).sendToTarget();
- stopInternal();
- return true;
+ maybeNotifyPlaybackInfoChanged();
} catch (RuntimeException e) {
Log.e(TAG, "Internal runtime error.", e);
+ stopInternal(/* reset= */ false, /* acknowledgeStop= */ false);
eventHandler.obtainMessage(MSG_ERROR, ExoPlaybackException.createForUnexpected(e))
.sendToTarget();
- stopInternal();
- return true;
+ maybeNotifyPlaybackInfoChanged();
}
+ return true;
}
// Private methods.
private void setState(int state) {
- if (this.state != state) {
- this.state = state;
- eventHandler.obtainMessage(MSG_STATE_CHANGED, state, 0).sendToTarget();
+ if (playbackInfo.playbackState != state) {
+ playbackInfo = playbackInfo.copyWithPlaybackState(state);
}
}
private void setIsLoading(boolean isLoading) {
- if (this.isLoading != isLoading) {
- this.isLoading = isLoading;
- eventHandler.obtainMessage(MSG_LOADING_CHANGED, isLoading ? 1 : 0, 0).sendToTarget();
+ if (playbackInfo.isLoading != isLoading) {
+ playbackInfo = playbackInfo.copyWithIsLoading(isLoading);
}
}
- private void prepareInternal(MediaSource mediaSource, boolean resetPosition) {
- eventHandler.sendEmptyMessage(MSG_PREPARE_ACK);
- resetInternal(true);
- loadControl.onPrepared();
- if (resetPosition) {
- playbackInfo = new PlaybackInfo(0, C.TIME_UNSET);
+ private void maybeNotifyPlaybackInfoChanged() {
+ if (playbackInfoUpdate.hasPendingUpdate(playbackInfo)) {
+ eventHandler
+ .obtainMessage(
+ MSG_PLAYBACK_INFO_CHANGED,
+ playbackInfoUpdate.operationAcks,
+ playbackInfoUpdate.positionDiscontinuity
+ ? playbackInfoUpdate.discontinuityReason
+ : C.INDEX_UNSET,
+ playbackInfo)
+ .sendToTarget();
+ playbackInfoUpdate.reset(playbackInfo);
}
+ }
+
+ private void prepareInternal(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
+ pendingPrepareCount++;
+ resetInternal(/* releaseMediaSource= */ true, resetPosition, resetState);
+ loadControl.onPrepared();
this.mediaSource = mediaSource;
- mediaSource.prepareSource(player, true, this);
setState(Player.STATE_BUFFERING);
+ mediaSource.prepareSource(player, /* isTopLevelSource= */ true, /* listener= */ this);
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
@@ -456,10 +404,10 @@ import java.io.IOException;
stopRenderers();
updatePlaybackPositions();
} else {
- if (state == Player.STATE_READY) {
+ if (playbackInfo.playbackState == Player.STATE_READY) {
startRenderers();
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
- } else if (state == Player.STATE_BUFFERING) {
+ } else if (playbackInfo.playbackState == Player.STATE_BUFFERING) {
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
}
@@ -468,196 +416,152 @@ import java.io.IOException;
private void setRepeatModeInternal(@Player.RepeatMode int repeatMode)
throws ExoPlaybackException {
this.repeatMode = repeatMode;
- mediaPeriodInfoSequence.setRepeatMode(repeatMode);
- validateExistingPeriodHolders();
+ if (!queue.updateRepeatMode(repeatMode)) {
+ seekToCurrentPosition(/* sendDiscontinuity= */ true);
+ }
}
private void setShuffleModeEnabledInternal(boolean shuffleModeEnabled)
throws ExoPlaybackException {
this.shuffleModeEnabled = shuffleModeEnabled;
- mediaPeriodInfoSequence.setShuffleModeEnabled(shuffleModeEnabled);
- validateExistingPeriodHolders();
+ if (!queue.updateShuffleModeEnabled(shuffleModeEnabled)) {
+ seekToCurrentPosition(/* sendDiscontinuity= */ true);
+ }
}
- private void validateExistingPeriodHolders() throws ExoPlaybackException {
- // Find the last existing period holder that matches the new period order.
- MediaPeriodHolder lastValidPeriodHolder = playingPeriodHolder != null
- ? playingPeriodHolder : loadingPeriodHolder;
- if (lastValidPeriodHolder == null) {
- return;
- }
- while (true) {
- int nextPeriodIndex = timeline.getNextPeriodIndex(lastValidPeriodHolder.info.id.periodIndex,
- period, window, repeatMode);
- while (lastValidPeriodHolder.next != null
- && !lastValidPeriodHolder.info.isLastInTimelinePeriod) {
- lastValidPeriodHolder = lastValidPeriodHolder.next;
+ private void seekToCurrentPosition(boolean sendDiscontinuity) throws ExoPlaybackException {
+ // Renderers may have read from a period that's been removed. Seek back to the current
+ // position of the playing period to make sure none of the removed period is played.
+ MediaPeriodId periodId = queue.getPlayingPeriod().info.id;
+ long newPositionUs =
+ seekToPeriodPosition(periodId, playbackInfo.positionUs, /* forceDisableRenderers= */ true);
+ if (newPositionUs != playbackInfo.positionUs) {
+ playbackInfo =
+ playbackInfo.fromNewPosition(periodId, newPositionUs, playbackInfo.contentPositionUs);
+ if (sendDiscontinuity) {
+ playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_INTERNAL);
}
- if (nextPeriodIndex == C.INDEX_UNSET || lastValidPeriodHolder.next == null
- || lastValidPeriodHolder.next.info.id.periodIndex != nextPeriodIndex) {
- break;
- }
- lastValidPeriodHolder = lastValidPeriodHolder.next;
- }
-
- // Release any period holders that don't match the new period order.
- int loadingPeriodHolderIndex = loadingPeriodHolder.index;
- int readingPeriodHolderIndex =
- readingPeriodHolder != null ? readingPeriodHolder.index : C.INDEX_UNSET;
- if (lastValidPeriodHolder.next != null) {
- releasePeriodHoldersFrom(lastValidPeriodHolder.next);
- lastValidPeriodHolder.next = null;
- }
-
- // Update the period info for the last holder, as it may now be the last period in the timeline.
- lastValidPeriodHolder.info =
- mediaPeriodInfoSequence.getUpdatedMediaPeriodInfo(lastValidPeriodHolder.info);
-
- // Handle cases where loadingPeriodHolder or readingPeriodHolder have been removed.
- boolean seenLoadingPeriodHolder = loadingPeriodHolderIndex <= lastValidPeriodHolder.index;
- if (!seenLoadingPeriodHolder) {
- loadingPeriodHolder = lastValidPeriodHolder;
- }
- boolean seenReadingPeriodHolder = readingPeriodHolderIndex != C.INDEX_UNSET
- && readingPeriodHolderIndex <= lastValidPeriodHolder.index;
- if (!seenReadingPeriodHolder && playingPeriodHolder != null) {
- // Renderers may have read from a period that's been removed. Seek back to the current
- // position of the playing period to make sure none of the removed period is played.
- MediaPeriodId periodId = playingPeriodHolder.info.id;
- long newPositionUs = seekToPeriodPosition(periodId, playbackInfo.positionUs);
- playbackInfo = new PlaybackInfo(periodId, newPositionUs, playbackInfo.contentPositionUs);
}
}
private void startRenderers() throws ExoPlaybackException {
rebuffering = false;
- standaloneMediaClock.start();
+ mediaClock.start();
for (Renderer renderer : enabledRenderers) {
renderer.start();
}
}
private void stopRenderers() throws ExoPlaybackException {
- standaloneMediaClock.stop();
+ mediaClock.stop();
for (Renderer renderer : enabledRenderers) {
ensureStopped(renderer);
}
}
private void updatePlaybackPositions() throws ExoPlaybackException {
- if (playingPeriodHolder == null) {
+ if (!queue.hasPlayingPeriod()) {
return;
}
// Update the playback position.
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
long periodPositionUs = playingPeriodHolder.mediaPeriod.readDiscontinuity();
if (periodPositionUs != C.TIME_UNSET) {
resetRendererPosition(periodPositionUs);
- } else {
- if (rendererMediaClockSource != null && !rendererMediaClockSource.isEnded()) {
- rendererPositionUs = rendererMediaClock.getPositionUs();
- standaloneMediaClock.setPositionUs(rendererPositionUs);
- } else {
- rendererPositionUs = standaloneMediaClock.getPositionUs();
+ // A MediaPeriod may report a discontinuity at the current playback position to ensure the
+ // renderers are flushed. Only report the discontinuity externally if the position changed.
+ if (periodPositionUs != playbackInfo.positionUs) {
+ playbackInfo = playbackInfo.fromNewPosition(playbackInfo.periodId, periodPositionUs,
+ playbackInfo.contentPositionUs);
+ playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_INTERNAL);
}
+ } else {
+ rendererPositionUs = mediaClock.syncAndGetPositionUs();
periodPositionUs = playingPeriodHolder.toPeriodTime(rendererPositionUs);
+ maybeTriggerPendingMessages(playbackInfo.positionUs, periodPositionUs);
+ playbackInfo.positionUs = periodPositionUs;
}
- playbackInfo.positionUs = periodPositionUs;
- elapsedRealtimeUs = SystemClock.elapsedRealtime() * 1000;
// Update the buffered position.
- long bufferedPositionUs = enabledRenderers.length == 0 ? C.TIME_END_OF_SOURCE
- : playingPeriodHolder.mediaPeriod.getBufferedPositionUs();
- playbackInfo.bufferedPositionUs = bufferedPositionUs == C.TIME_END_OF_SOURCE
- ? playingPeriodHolder.info.durationUs : bufferedPositionUs;
+ playbackInfo.bufferedPositionUs =
+ enabledRenderers.length == 0
+ ? playingPeriodHolder.info.durationUs
+ : playingPeriodHolder.getBufferedPositionUs(/* convertEosToDuration= */ true);
}
private void doSomeWork() throws ExoPlaybackException, IOException {
- long operationStartTimeMs = SystemClock.elapsedRealtime();
+ long operationStartTimeMs = clock.uptimeMillis();
updatePeriods();
- if (playingPeriodHolder == null) {
+ if (!queue.hasPlayingPeriod()) {
// We're still waiting for the first period to be prepared.
maybeThrowPeriodPrepareError();
scheduleNextWork(operationStartTimeMs, PREPARING_SOURCE_INTERVAL_MS);
return;
}
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
TraceUtil.beginSection("doSomeWork");
updatePlaybackPositions();
- playingPeriodHolder.mediaPeriod.discardBuffer(playbackInfo.positionUs);
+ long rendererPositionElapsedRealtimeUs = SystemClock.elapsedRealtime() * 1000;
- boolean allRenderersEnded = true;
- boolean allRenderersReadyOrEnded = true;
+ playingPeriodHolder.mediaPeriod.discardBuffer(playbackInfo.positionUs - backBufferDurationUs,
+ retainBackBufferFromKeyframe);
+
+ boolean renderersEnded = true;
+ boolean renderersReadyOrEnded = true;
for (Renderer renderer : enabledRenderers) {
// TODO: Each renderer should return the maximum delay before which it wishes to be called
// again. The minimum of these values should then be used as the delay before the next
// invocation of this method.
- renderer.render(rendererPositionUs, elapsedRealtimeUs);
- allRenderersEnded = allRenderersEnded && renderer.isEnded();
- // Determine whether the renderer is ready (or ended). If it's not, throw an error that's
- // preventing the renderer from making progress, if such an error exists.
- boolean rendererReadyOrEnded = renderer.isReady() || renderer.isEnded();
+ renderer.render(rendererPositionUs, rendererPositionElapsedRealtimeUs);
+ renderersEnded = renderersEnded && renderer.isEnded();
+ // Determine whether the renderer is ready (or ended). We override to assume the renderer is
+ // ready if it needs the next sample stream. This is necessary to avoid getting stuck if
+ // tracks in the current period have uneven durations. See:
+ // https://github.com/google/ExoPlayer/issues/1874
+ boolean rendererReadyOrEnded = renderer.isReady() || renderer.isEnded()
+ || rendererWaitingForNextStream(renderer);
if (!rendererReadyOrEnded) {
renderer.maybeThrowStreamError();
}
- allRenderersReadyOrEnded = allRenderersReadyOrEnded && rendererReadyOrEnded;
+ renderersReadyOrEnded = renderersReadyOrEnded && rendererReadyOrEnded;
}
-
- if (!allRenderersReadyOrEnded) {
+ if (!renderersReadyOrEnded) {
maybeThrowPeriodPrepareError();
}
- // The standalone media clock never changes playback parameters, so just check the renderer.
- if (rendererMediaClock != null) {
- PlaybackParameters playbackParameters = rendererMediaClock.getPlaybackParameters();
- if (!playbackParameters.equals(this.playbackParameters)) {
- // TODO: Make LoadControl, period transition position projection, adaptive track selection
- // and potentially any time-related code in renderers take into account the playback speed.
- this.playbackParameters = playbackParameters;
- standaloneMediaClock.synchronize(rendererMediaClock);
- eventHandler.obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED, playbackParameters)
- .sendToTarget();
- }
- }
-
long playingPeriodDurationUs = playingPeriodHolder.info.durationUs;
- if (allRenderersEnded
+ if (renderersEnded
&& (playingPeriodDurationUs == C.TIME_UNSET
- || playingPeriodDurationUs <= playbackInfo.positionUs)
+ || playingPeriodDurationUs <= playbackInfo.positionUs)
&& playingPeriodHolder.info.isFinal) {
setState(Player.STATE_ENDED);
stopRenderers();
- } else if (state == Player.STATE_BUFFERING) {
- boolean isNewlyReady = enabledRenderers.length > 0
- ? (allRenderersReadyOrEnded
- && loadingPeriodHolder.haveSufficientBuffer(rebuffering, rendererPositionUs))
- : isTimelineReady(playingPeriodDurationUs);
- if (isNewlyReady) {
- setState(Player.STATE_READY);
- if (playWhenReady) {
- startRenderers();
- }
- }
- } else if (state == Player.STATE_READY) {
- boolean isStillReady = enabledRenderers.length > 0 ? allRenderersReadyOrEnded
- : isTimelineReady(playingPeriodDurationUs);
- if (!isStillReady) {
- rebuffering = playWhenReady;
- setState(Player.STATE_BUFFERING);
- stopRenderers();
+ } else if (playbackInfo.playbackState == Player.STATE_BUFFERING
+ && shouldTransitionToReadyState(renderersReadyOrEnded)) {
+ setState(Player.STATE_READY);
+ if (playWhenReady) {
+ startRenderers();
}
+ } else if (playbackInfo.playbackState == Player.STATE_READY
+ && !(enabledRenderers.length == 0 ? isTimelineReady() : renderersReadyOrEnded)) {
+ rebuffering = playWhenReady;
+ setState(Player.STATE_BUFFERING);
+ stopRenderers();
}
- if (state == Player.STATE_BUFFERING) {
+ if (playbackInfo.playbackState == Player.STATE_BUFFERING) {
for (Renderer renderer : enabledRenderers) {
renderer.maybeThrowStreamError();
}
}
- if ((playWhenReady && state == Player.STATE_READY) || state == Player.STATE_BUFFERING) {
+ if ((playWhenReady && playbackInfo.playbackState == Player.STATE_READY)
+ || playbackInfo.playbackState == Player.STATE_BUFFERING) {
scheduleNextWork(operationStartTimeMs, RENDERING_INTERVAL_MS);
- } else if (enabledRenderers.length != 0 && state != Player.STATE_ENDED) {
+ } else if (enabledRenderers.length != 0 && playbackInfo.playbackState != Player.STATE_ENDED) {
scheduleNextWork(operationStartTimeMs, IDLE_INTERVAL_MS);
} else {
handler.removeMessages(MSG_DO_SOME_WORK);
@@ -668,116 +572,122 @@ import java.io.IOException;
private void scheduleNextWork(long thisOperationStartTimeMs, long intervalMs) {
handler.removeMessages(MSG_DO_SOME_WORK);
- long nextOperationStartTimeMs = thisOperationStartTimeMs + intervalMs;
- long nextOperationDelayMs = nextOperationStartTimeMs - SystemClock.elapsedRealtime();
- if (nextOperationDelayMs <= 0) {
- handler.sendEmptyMessage(MSG_DO_SOME_WORK);
- } else {
- handler.sendEmptyMessageDelayed(MSG_DO_SOME_WORK, nextOperationDelayMs);
- }
+ handler.sendEmptyMessageAtTime(MSG_DO_SOME_WORK, thisOperationStartTimeMs + intervalMs);
}
private void seekToInternal(SeekPosition seekPosition) throws ExoPlaybackException {
- if (timeline == null) {
- pendingInitialSeekCount++;
- pendingSeekPosition = seekPosition;
- return;
- }
+ playbackInfoUpdate.incrementPendingOperationAcks(/* operationAcks= */ 1);
- Pair periodPosition = resolveSeekPosition(seekPosition);
- if (periodPosition == null) {
+ MediaPeriodId periodId;
+ long periodPositionUs;
+ long contentPositionUs;
+ boolean seekPositionAdjusted;
+ Pair resolvedSeekPosition =
+ resolveSeekPosition(seekPosition, /* trySubsequentPeriods= */ true);
+ if (resolvedSeekPosition == null) {
// The seek position was valid for the timeline that it was performed into, but the
- // timeline has changed and a suitable seek position could not be resolved in the new one.
- playbackInfo = new PlaybackInfo(0, 0);
- eventHandler.obtainMessage(MSG_SEEK_ACK, 1, 0, playbackInfo).sendToTarget();
- // Set the internal position to (0,TIME_UNSET) so that a subsequent seek to (0,0) isn't
- // ignored.
- playbackInfo = new PlaybackInfo(0, C.TIME_UNSET);
- setState(Player.STATE_ENDED);
- // Reset, but retain the source so that it can still be used should a seek occur.
- resetInternal(false);
- return;
+ // timeline has changed or is not ready and a suitable seek position could not be resolved.
+ periodId = new MediaPeriodId(getFirstPeriodIndex());
+ periodPositionUs = C.TIME_UNSET;
+ contentPositionUs = C.TIME_UNSET;
+ seekPositionAdjusted = true;
+ } else {
+ // Update the resolved seek position to take ads into account.
+ int periodIndex = resolvedSeekPosition.first;
+ contentPositionUs = resolvedSeekPosition.second;
+ periodId = queue.resolveMediaPeriodIdForAds(periodIndex, contentPositionUs);
+ if (periodId.isAd()) {
+ periodPositionUs = 0;
+ seekPositionAdjusted = true;
+ } else {
+ periodPositionUs = resolvedSeekPosition.second;
+ seekPositionAdjusted = seekPosition.windowPositionUs == C.TIME_UNSET;
+ }
}
- boolean seekPositionAdjusted = seekPosition.windowPositionUs == C.TIME_UNSET;
- int periodIndex = periodPosition.first;
- long periodPositionUs = periodPosition.second;
- long contentPositionUs = periodPositionUs;
- MediaPeriodId periodId =
- mediaPeriodInfoSequence.resolvePeriodPositionForAds(periodIndex, periodPositionUs);
- if (periodId.isAd()) {
- seekPositionAdjusted = true;
- periodPositionUs = 0;
- }
try {
- if (periodId.equals(playbackInfo.periodId)
- && ((periodPositionUs / 1000) == (playbackInfo.positionUs / 1000))) {
- // Seek position equals the current position. Do nothing.
- return;
+ if (mediaSource == null || pendingPrepareCount > 0) {
+ // Save seek position for later, as we are still waiting for a prepared source.
+ pendingInitialSeekPosition = seekPosition;
+ } else if (periodPositionUs == C.TIME_UNSET) {
+ // End playback, as we didn't manage to find a valid seek position.
+ setState(Player.STATE_ENDED);
+ resetInternal(
+ /* releaseMediaSource= */ false, /* resetPosition= */ true, /* resetState= */ false);
+ } else {
+ // Execute the seek in the current media periods.
+ long newPeriodPositionUs = periodPositionUs;
+ if (periodId.equals(playbackInfo.periodId)) {
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
+ if (playingPeriodHolder != null && newPeriodPositionUs != 0) {
+ newPeriodPositionUs =
+ playingPeriodHolder.mediaPeriod.getAdjustedSeekPositionUs(
+ newPeriodPositionUs, seekParameters);
+ }
+ if (C.usToMs(newPeriodPositionUs) == C.usToMs(playbackInfo.positionUs)) {
+ // Seek will be performed to the current position. Do nothing.
+ periodPositionUs = playbackInfo.positionUs;
+ return;
+ }
+ }
+ newPeriodPositionUs = seekToPeriodPosition(periodId, newPeriodPositionUs);
+ seekPositionAdjusted |= periodPositionUs != newPeriodPositionUs;
+ periodPositionUs = newPeriodPositionUs;
}
- long newPeriodPositionUs = seekToPeriodPosition(periodId, periodPositionUs);
- seekPositionAdjusted |= periodPositionUs != newPeriodPositionUs;
- periodPositionUs = newPeriodPositionUs;
} finally {
- playbackInfo = new PlaybackInfo(periodId, periodPositionUs, contentPositionUs);
- eventHandler.obtainMessage(MSG_SEEK_ACK, seekPositionAdjusted ? 1 : 0, 0, playbackInfo)
- .sendToTarget();
+ playbackInfo = playbackInfo.fromNewPosition(periodId, periodPositionUs, contentPositionUs);
+ if (seekPositionAdjusted) {
+ playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_SEEK_ADJUSTMENT);
+ }
}
}
private long seekToPeriodPosition(MediaPeriodId periodId, long periodPositionUs)
throws ExoPlaybackException {
+ // Force disable renderers if they are reading from a period other than the one being played.
+ return seekToPeriodPosition(
+ periodId, periodPositionUs, queue.getPlayingPeriod() != queue.getReadingPeriod());
+ }
+
+ private long seekToPeriodPosition(
+ MediaPeriodId periodId, long periodPositionUs, boolean forceDisableRenderers)
+ throws ExoPlaybackException {
stopRenderers();
rebuffering = false;
setState(Player.STATE_BUFFERING);
- MediaPeriodHolder newPlayingPeriodHolder = null;
- if (playingPeriodHolder == null) {
- // We're still waiting for the first period to be prepared.
- if (loadingPeriodHolder != null) {
- loadingPeriodHolder.release();
- }
- } else {
- // Clear the timeline, but keep the requested period if it is already prepared.
- MediaPeriodHolder periodHolder = playingPeriodHolder;
- while (periodHolder != null) {
- if (shouldKeepPeriodHolder(periodId, periodPositionUs, periodHolder)) {
- newPlayingPeriodHolder = periodHolder;
- } else {
- periodHolder.release();
- }
- periodHolder = periodHolder.next;
+ // Clear the timeline, but keep the requested period if it is already prepared.
+ MediaPeriodHolder oldPlayingPeriodHolder = queue.getPlayingPeriod();
+ MediaPeriodHolder newPlayingPeriodHolder = oldPlayingPeriodHolder;
+ while (newPlayingPeriodHolder != null) {
+ if (shouldKeepPeriodHolder(periodId, periodPositionUs, newPlayingPeriodHolder)) {
+ queue.removeAfter(newPlayingPeriodHolder);
+ break;
}
+ newPlayingPeriodHolder = queue.advancePlayingPeriod();
}
- // Disable all the renderers if the period being played is changing, or if the renderers are
- // reading from a period other than the one being played.
- if (playingPeriodHolder != newPlayingPeriodHolder
- || playingPeriodHolder != readingPeriodHolder) {
+ // Disable all the renderers if the period being played is changing, or if forced.
+ if (oldPlayingPeriodHolder != newPlayingPeriodHolder || forceDisableRenderers) {
for (Renderer renderer : enabledRenderers) {
- renderer.disable();
+ disableRenderer(renderer);
}
enabledRenderers = new Renderer[0];
- rendererMediaClock = null;
- rendererMediaClockSource = null;
- playingPeriodHolder = null;
+ oldPlayingPeriodHolder = null;
}
// Update the holders.
if (newPlayingPeriodHolder != null) {
- newPlayingPeriodHolder.next = null;
- loadingPeriodHolder = newPlayingPeriodHolder;
- readingPeriodHolder = newPlayingPeriodHolder;
- setPlayingPeriodHolder(newPlayingPeriodHolder);
- if (playingPeriodHolder.hasEnabledTracks) {
- periodPositionUs = playingPeriodHolder.mediaPeriod.seekToUs(periodPositionUs);
+ updatePlayingPeriodRenderers(oldPlayingPeriodHolder);
+ if (newPlayingPeriodHolder.hasEnabledTracks) {
+ periodPositionUs = newPlayingPeriodHolder.mediaPeriod.seekToUs(periodPositionUs);
+ newPlayingPeriodHolder.mediaPeriod.discardBuffer(
+ periodPositionUs - backBufferDurationUs, retainBackBufferFromKeyframe);
}
resetRendererPosition(periodPositionUs);
maybeContinueLoading();
} else {
- loadingPeriodHolder = null;
- readingPeriodHolder = null;
- playingPeriodHolder = null;
+ queue.clear(/* keepFrontPeriodUid= */ true);
resetRendererPosition(periodPositionUs);
}
@@ -785,10 +695,10 @@ import java.io.IOException;
return periodPositionUs;
}
- private boolean shouldKeepPeriodHolder(MediaPeriodId seekPeriodId, long positionUs,
- MediaPeriodHolder holder) {
+ private boolean shouldKeepPeriodHolder(
+ MediaPeriodId seekPeriodId, long positionUs, MediaPeriodHolder holder) {
if (seekPeriodId.equals(holder.info.id) && holder.prepared) {
- timeline.getPeriod(holder.info.id.periodIndex, period);
+ playbackInfo.timeline.getPeriod(holder.info.id.periodIndex, period);
int nextAdGroupIndex = period.getAdGroupIndexAfterPositionUs(positionUs);
if (nextAdGroupIndex == C.INDEX_UNSET
|| period.getAdGroupTimeUs(nextAdGroupIndex) == holder.info.endPositionUs) {
@@ -799,87 +709,252 @@ import java.io.IOException;
}
private void resetRendererPosition(long periodPositionUs) throws ExoPlaybackException {
- rendererPositionUs = playingPeriodHolder == null
- ? periodPositionUs + RENDERER_TIMESTAMP_OFFSET_US
- : playingPeriodHolder.toRendererTime(periodPositionUs);
- standaloneMediaClock.setPositionUs(rendererPositionUs);
+ rendererPositionUs =
+ !queue.hasPlayingPeriod()
+ ? periodPositionUs
+ : queue.getPlayingPeriod().toRendererTime(periodPositionUs);
+ mediaClock.resetPosition(rendererPositionUs);
for (Renderer renderer : enabledRenderers) {
renderer.resetPosition(rendererPositionUs);
}
}
private void setPlaybackParametersInternal(PlaybackParameters playbackParameters) {
- playbackParameters = rendererMediaClock != null
- ? rendererMediaClock.setPlaybackParameters(playbackParameters)
- : standaloneMediaClock.setPlaybackParameters(playbackParameters);
- this.playbackParameters = playbackParameters;
- eventHandler.obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED, playbackParameters).sendToTarget();
+ mediaClock.setPlaybackParameters(playbackParameters);
}
- private void stopInternal() {
- resetInternal(true);
+ private void setSeekParametersInternal(SeekParameters seekParameters) {
+ this.seekParameters = seekParameters;
+ }
+
+ private void stopInternal(boolean reset, boolean acknowledgeStop) {
+ resetInternal(
+ /* releaseMediaSource= */ true, /* resetPosition= */ reset, /* resetState= */ reset);
+ playbackInfoUpdate.incrementPendingOperationAcks(
+ pendingPrepareCount + (acknowledgeStop ? 1 : 0));
+ pendingPrepareCount = 0;
loadControl.onStopped();
setState(Player.STATE_IDLE);
}
private void releaseInternal() {
- resetInternal(true);
+ resetInternal(
+ /* releaseMediaSource= */ true, /* resetPosition= */ true, /* resetState= */ true);
loadControl.onReleased();
setState(Player.STATE_IDLE);
+ internalPlaybackThread.quit();
synchronized (this) {
released = true;
notifyAll();
}
}
- private void resetInternal(boolean releaseMediaSource) {
+ private int getFirstPeriodIndex() {
+ Timeline timeline = playbackInfo.timeline;
+ return timeline.isEmpty()
+ ? 0
+ : timeline.getWindow(timeline.getFirstWindowIndex(shuffleModeEnabled), window)
+ .firstPeriodIndex;
+ }
+
+ private void resetInternal(
+ boolean releaseMediaSource, boolean resetPosition, boolean resetState) {
handler.removeMessages(MSG_DO_SOME_WORK);
rebuffering = false;
- standaloneMediaClock.stop();
- rendererMediaClock = null;
- rendererMediaClockSource = null;
- rendererPositionUs = RENDERER_TIMESTAMP_OFFSET_US;
+ mediaClock.stop();
+ rendererPositionUs = 0;
for (Renderer renderer : enabledRenderers) {
try {
- ensureStopped(renderer);
- renderer.disable();
+ disableRenderer(renderer);
} catch (ExoPlaybackException | RuntimeException e) {
// There's nothing we can do.
Log.e(TAG, "Stop failed.", e);
}
}
enabledRenderers = new Renderer[0];
- releasePeriodHoldersFrom(playingPeriodHolder != null ? playingPeriodHolder
- : loadingPeriodHolder);
- loadingPeriodHolder = null;
- readingPeriodHolder = null;
- playingPeriodHolder = null;
+ queue.clear(/* keepFrontPeriodUid= */ !resetPosition);
setIsLoading(false);
+ if (resetPosition) {
+ pendingInitialSeekPosition = null;
+ }
+ if (resetState) {
+ queue.setTimeline(Timeline.EMPTY);
+ for (PendingMessageInfo pendingMessageInfo : pendingMessages) {
+ pendingMessageInfo.message.markAsProcessed(/* isDelivered= */ false);
+ }
+ pendingMessages.clear();
+ nextPendingMessageIndex = 0;
+ }
+ playbackInfo =
+ new PlaybackInfo(
+ resetState ? Timeline.EMPTY : playbackInfo.timeline,
+ resetState ? null : playbackInfo.manifest,
+ resetPosition ? new MediaPeriodId(getFirstPeriodIndex()) : playbackInfo.periodId,
+ // Set the start position to TIME_UNSET so that a subsequent seek to 0 isn't ignored.
+ resetPosition ? C.TIME_UNSET : playbackInfo.positionUs,
+ resetPosition ? C.TIME_UNSET : playbackInfo.contentPositionUs,
+ playbackInfo.playbackState,
+ /* isLoading= */ false,
+ resetState ? TrackGroupArray.EMPTY : playbackInfo.trackGroups,
+ resetState ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult);
if (releaseMediaSource) {
if (mediaSource != null) {
- mediaSource.releaseSource();
+ mediaSource.releaseSource(/* listener= */ this);
mediaSource = null;
}
- mediaPeriodInfoSequence.setTimeline(null);
- timeline = null;
}
}
- private void sendMessagesInternal(ExoPlayerMessage[] messages) throws ExoPlaybackException {
- try {
- for (ExoPlayerMessage message : messages) {
- message.target.handleMessage(message.messageType, message.message);
+ private void sendMessageInternal(PlayerMessage message) throws ExoPlaybackException {
+ if (message.getPositionMs() == C.TIME_UNSET) {
+ // If no delivery time is specified, trigger immediate message delivery.
+ sendMessageToTarget(message);
+ } else if (mediaSource == null || pendingPrepareCount > 0) {
+ // Still waiting for initial timeline to resolve position.
+ pendingMessages.add(new PendingMessageInfo(message));
+ } else {
+ PendingMessageInfo pendingMessageInfo = new PendingMessageInfo(message);
+ if (resolvePendingMessagePosition(pendingMessageInfo)) {
+ pendingMessages.add(pendingMessageInfo);
+ // Ensure new message is inserted according to playback order.
+ Collections.sort(pendingMessages);
+ } else {
+ message.markAsProcessed(/* isDelivered= */ false);
}
- if (state == Player.STATE_READY || state == Player.STATE_BUFFERING) {
+ }
+ }
+
+ private void sendMessageToTarget(PlayerMessage message) throws ExoPlaybackException {
+ if (message.getHandler().getLooper() == handler.getLooper()) {
+ deliverMessage(message);
+ if (playbackInfo.playbackState == Player.STATE_READY
+ || playbackInfo.playbackState == Player.STATE_BUFFERING) {
// The message may have caused something to change that now requires us to do work.
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
+ } else {
+ handler.obtainMessage(MSG_SEND_MESSAGE_TO_TARGET_THREAD, message).sendToTarget();
+ }
+ }
+
+ private void sendMessageToTargetThread(final PlayerMessage message) {
+ Handler handler = message.getHandler();
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ try {
+ deliverMessage(message);
+ } catch (ExoPlaybackException e) {
+ Log.e(TAG, "Unexpected error delivering message on external thread.", e);
+ throw new RuntimeException(e);
+ }
+ }
+ });
+ }
+
+ private void deliverMessage(PlayerMessage message) throws ExoPlaybackException {
+ try {
+ message.getTarget().handleMessage(message.getType(), message.getPayload());
} finally {
- synchronized (this) {
- customMessagesProcessed++;
- notifyAll();
+ message.markAsProcessed(/* isDelivered= */ true);
+ }
+ }
+
+ private void resolvePendingMessagePositions() {
+ for (int i = pendingMessages.size() - 1; i >= 0; i--) {
+ if (!resolvePendingMessagePosition(pendingMessages.get(i))) {
+ // Unable to resolve a new position for the message. Remove it.
+ pendingMessages.get(i).message.markAsProcessed(/* isDelivered= */ false);
+ pendingMessages.remove(i);
}
}
+ // Re-sort messages by playback order.
+ Collections.sort(pendingMessages);
+ }
+
+ private boolean resolvePendingMessagePosition(PendingMessageInfo pendingMessageInfo) {
+ if (pendingMessageInfo.resolvedPeriodUid == null) {
+ // Position is still unresolved. Try to find window in current timeline.
+ Pair periodPosition =
+ resolveSeekPosition(
+ new SeekPosition(
+ pendingMessageInfo.message.getTimeline(),
+ pendingMessageInfo.message.getWindowIndex(),
+ C.msToUs(pendingMessageInfo.message.getPositionMs())),
+ /* trySubsequentPeriods= */ false);
+ if (periodPosition == null) {
+ return false;
+ }
+ pendingMessageInfo.setResolvedPosition(
+ periodPosition.first,
+ periodPosition.second,
+ playbackInfo.timeline.getPeriod(periodPosition.first, period, true).uid);
+ } else {
+ // Position has been resolved for a previous timeline. Try to find the updated period index.
+ int index = playbackInfo.timeline.getIndexOfPeriod(pendingMessageInfo.resolvedPeriodUid);
+ if (index == C.INDEX_UNSET) {
+ return false;
+ }
+ pendingMessageInfo.resolvedPeriodIndex = index;
+ }
+ return true;
+ }
+
+ private void maybeTriggerPendingMessages(long oldPeriodPositionUs, long newPeriodPositionUs)
+ throws ExoPlaybackException {
+ if (pendingMessages.isEmpty() || playbackInfo.periodId.isAd()) {
+ return;
+ }
+ // If this is the first call from the start position, include oldPeriodPositionUs in potential
+ // trigger positions.
+ if (playbackInfo.startPositionUs == oldPeriodPositionUs) {
+ oldPeriodPositionUs--;
+ }
+ // Correct next index if necessary (e.g. after seeking, timeline changes, or new messages)
+ int currentPeriodIndex = playbackInfo.periodId.periodIndex;
+ PendingMessageInfo previousInfo =
+ nextPendingMessageIndex > 0 ? pendingMessages.get(nextPendingMessageIndex - 1) : null;
+ while (previousInfo != null
+ && (previousInfo.resolvedPeriodIndex > currentPeriodIndex
+ || (previousInfo.resolvedPeriodIndex == currentPeriodIndex
+ && previousInfo.resolvedPeriodTimeUs > oldPeriodPositionUs))) {
+ nextPendingMessageIndex--;
+ previousInfo =
+ nextPendingMessageIndex > 0 ? pendingMessages.get(nextPendingMessageIndex - 1) : null;
+ }
+ PendingMessageInfo nextInfo =
+ nextPendingMessageIndex < pendingMessages.size()
+ ? pendingMessages.get(nextPendingMessageIndex)
+ : null;
+ while (nextInfo != null
+ && nextInfo.resolvedPeriodUid != null
+ && (nextInfo.resolvedPeriodIndex < currentPeriodIndex
+ || (nextInfo.resolvedPeriodIndex == currentPeriodIndex
+ && nextInfo.resolvedPeriodTimeUs <= oldPeriodPositionUs))) {
+ nextPendingMessageIndex++;
+ nextInfo =
+ nextPendingMessageIndex < pendingMessages.size()
+ ? pendingMessages.get(nextPendingMessageIndex)
+ : null;
+ }
+ // Check if any message falls within the covered time span.
+ while (nextInfo != null
+ && nextInfo.resolvedPeriodUid != null
+ && nextInfo.resolvedPeriodIndex == currentPeriodIndex
+ && nextInfo.resolvedPeriodTimeUs > oldPeriodPositionUs
+ && nextInfo.resolvedPeriodTimeUs <= newPeriodPositionUs) {
+ sendMessageToTarget(nextInfo.message);
+ if (nextInfo.message.getDeleteAfterDelivery()) {
+ pendingMessages.remove(nextPendingMessageIndex);
+ } else {
+ nextPendingMessageIndex++;
+ }
+ nextInfo =
+ nextPendingMessageIndex < pendingMessages.size()
+ ? pendingMessages.get(nextPendingMessageIndex)
+ : null;
+ }
}
private void ensureStopped(Renderer renderer) throws ExoPlaybackException {
@@ -888,20 +963,28 @@ import java.io.IOException;
}
}
+ private void disableRenderer(Renderer renderer) throws ExoPlaybackException {
+ mediaClock.onRendererDisabled(renderer);
+ ensureStopped(renderer);
+ renderer.disable();
+ }
+
private void reselectTracksInternal() throws ExoPlaybackException {
- if (playingPeriodHolder == null) {
+ if (!queue.hasPlayingPeriod()) {
// We don't have tracks yet, so we don't care.
return;
}
+ float playbackSpeed = mediaClock.getPlaybackParameters().speed;
// Reselect tracks on each period in turn, until the selection changes.
- MediaPeriodHolder periodHolder = playingPeriodHolder;
+ MediaPeriodHolder periodHolder = queue.getPlayingPeriod();
+ MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod();
boolean selectionsChangedForReadPeriod = true;
while (true) {
if (periodHolder == null || !periodHolder.prepared) {
// The reselection did not change any prepared periods.
return;
}
- if (periodHolder.selectTracks()) {
+ if (periodHolder.selectTracks(playbackSpeed)) {
// Selected tracks have changed for this period.
break;
}
@@ -914,17 +997,20 @@ import java.io.IOException;
if (selectionsChangedForReadPeriod) {
// Update streams and rebuffer for the new selection, recreating all streams if reading ahead.
- boolean recreateStreams = readingPeriodHolder != playingPeriodHolder;
- releasePeriodHoldersFrom(playingPeriodHolder.next);
- playingPeriodHolder.next = null;
- loadingPeriodHolder = playingPeriodHolder;
- readingPeriodHolder = playingPeriodHolder;
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
+ boolean recreateStreams = queue.removeAfter(playingPeriodHolder);
boolean[] streamResetFlags = new boolean[renderers.length];
- long periodPositionUs = playingPeriodHolder.updatePeriodTrackSelection(
- playbackInfo.positionUs, recreateStreams, streamResetFlags);
- if (periodPositionUs != playbackInfo.positionUs) {
- playbackInfo.positionUs = periodPositionUs;
+ long periodPositionUs =
+ playingPeriodHolder.applyTrackSelection(
+ playbackInfo.positionUs, recreateStreams, streamResetFlags);
+ updateLoadControlTrackSelection(
+ playingPeriodHolder.trackGroups, playingPeriodHolder.trackSelectorResult);
+ if (playbackInfo.playbackState != Player.STATE_ENDED
+ && periodPositionUs != playbackInfo.positionUs) {
+ playbackInfo = playbackInfo.fromNewPosition(playbackInfo.periodId, periodPositionUs,
+ playbackInfo.contentPositionUs);
+ playbackInfoUpdate.setPositionDiscontinuity(Player.DISCONTINUITY_REASON_INTERNAL);
resetRendererPosition(periodPositionUs);
}
@@ -940,55 +1026,90 @@ import java.io.IOException;
if (rendererWasEnabledFlags[i]) {
if (sampleStream != renderer.getStream()) {
// We need to disable the renderer.
- if (renderer == rendererMediaClockSource) {
- // The renderer is providing the media clock.
- if (sampleStream == null) {
- // The renderer won't be re-enabled. Sync standaloneMediaClock so that it can take
- // over timing responsibilities.
- standaloneMediaClock.synchronize(rendererMediaClock);
- }
- rendererMediaClock = null;
- rendererMediaClockSource = null;
- }
- ensureStopped(renderer);
- renderer.disable();
+ disableRenderer(renderer);
} else if (streamResetFlags[i]) {
// The renderer will continue to consume from its current stream, but needs to be reset.
renderer.resetPosition(rendererPositionUs);
}
}
}
- eventHandler.obtainMessage(MSG_TRACKS_CHANGED, periodHolder.trackSelectorResult)
- .sendToTarget();
+ playbackInfo =
+ playbackInfo.copyWithTrackInfo(
+ playingPeriodHolder.trackGroups, playingPeriodHolder.trackSelectorResult);
enableRenderers(rendererWasEnabledFlags, enabledRendererCount);
} else {
// Release and re-prepare/buffer periods after the one whose selection changed.
- loadingPeriodHolder = periodHolder;
- periodHolder = loadingPeriodHolder.next;
- while (periodHolder != null) {
- periodHolder.release();
- periodHolder = periodHolder.next;
- }
- loadingPeriodHolder.next = null;
- if (loadingPeriodHolder.prepared) {
- long loadingPeriodPositionUs = Math.max(loadingPeriodHolder.info.startPositionUs,
- loadingPeriodHolder.toPeriodTime(rendererPositionUs));
- loadingPeriodHolder.updatePeriodTrackSelection(loadingPeriodPositionUs, false);
+ queue.removeAfter(periodHolder);
+ if (periodHolder.prepared) {
+ long loadingPeriodPositionUs =
+ Math.max(
+ periodHolder.info.startPositionUs, periodHolder.toPeriodTime(rendererPositionUs));
+ periodHolder.applyTrackSelection(loadingPeriodPositionUs, false);
+ updateLoadControlTrackSelection(periodHolder.trackGroups, periodHolder.trackSelectorResult);
}
}
- maybeContinueLoading();
- updatePlaybackPositions();
- handler.sendEmptyMessage(MSG_DO_SOME_WORK);
+ if (playbackInfo.playbackState != Player.STATE_ENDED) {
+ maybeContinueLoading();
+ updatePlaybackPositions();
+ handler.sendEmptyMessage(MSG_DO_SOME_WORK);
+ }
}
- private boolean isTimelineReady(long playingPeriodDurationUs) {
+ private void updateLoadControlTrackSelection(
+ TrackGroupArray trackGroups, TrackSelectorResult trackSelectorResult) {
+ loadControl.onTracksSelected(renderers, trackGroups, trackSelectorResult.selections);
+ }
+
+ private void updateTrackSelectionPlaybackSpeed(float playbackSpeed) {
+ MediaPeriodHolder periodHolder = queue.getFrontPeriod();
+ while (periodHolder != null) {
+ if (periodHolder.trackSelectorResult != null) {
+ TrackSelection[] trackSelections = periodHolder.trackSelectorResult.selections.getAll();
+ for (TrackSelection trackSelection : trackSelections) {
+ if (trackSelection != null) {
+ trackSelection.onPlaybackSpeed(playbackSpeed);
+ }
+ }
+ }
+ periodHolder = periodHolder.next;
+ }
+ }
+
+ private boolean shouldTransitionToReadyState(boolean renderersReadyOrEnded) {
+ if (enabledRenderers.length == 0) {
+ // If there are no enabled renderers, determine whether we're ready based on the timeline.
+ return isTimelineReady();
+ }
+ if (!renderersReadyOrEnded) {
+ return false;
+ }
+ if (!playbackInfo.isLoading) {
+ // Renderers are ready and we're not loading. Transition to ready, since the alternative is
+ // getting stuck waiting for additional media that's not being loaded.
+ return true;
+ }
+ // Renderers are ready and we're loading. Ask the LoadControl whether to transition.
+ MediaPeriodHolder loadingHolder = queue.getLoadingPeriod();
+ long bufferedPositionUs = loadingHolder.getBufferedPositionUs(!loadingHolder.info.isFinal);
+ return bufferedPositionUs == C.TIME_END_OF_SOURCE
+ || loadControl.shouldStartPlayback(
+ bufferedPositionUs - loadingHolder.toPeriodTime(rendererPositionUs),
+ mediaClock.getPlaybackParameters().speed,
+ rebuffering);
+ }
+
+ private boolean isTimelineReady() {
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
+ long playingPeriodDurationUs = playingPeriodHolder.info.durationUs;
return playingPeriodDurationUs == C.TIME_UNSET
|| playbackInfo.positionUs < playingPeriodDurationUs
|| (playingPeriodHolder.next != null
- && (playingPeriodHolder.next.prepared || playingPeriodHolder.next.info.id.isAd()));
+ && (playingPeriodHolder.next.prepared || playingPeriodHolder.next.info.id.isAd()));
}
private void maybeThrowPeriodPrepareError() throws IOException {
+ MediaPeriodHolder loadingPeriodHolder = queue.getLoadingPeriod();
+ MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod();
if (loadingPeriodHolder != null && !loadingPeriodHolder.prepared
&& (readingPeriodHolder == null || readingPeriodHolder.next == loadingPeriodHolder)) {
for (Renderer renderer : enabledRenderers) {
@@ -1000,57 +1121,72 @@ import java.io.IOException;
}
}
- private void handleSourceInfoRefreshed(Pair timelineAndManifest)
+ private void handleSourceInfoRefreshed(MediaSourceRefreshInfo sourceRefreshInfo)
throws ExoPlaybackException {
- Timeline oldTimeline = timeline;
- timeline = timelineAndManifest.first;
- mediaPeriodInfoSequence.setTimeline(timeline);
- Object manifest = timelineAndManifest.second;
+ if (sourceRefreshInfo.source != mediaSource) {
+ // Stale event.
+ return;
+ }
- if (oldTimeline == null) {
- if (pendingInitialSeekCount > 0) {
- Pair periodPosition = resolveSeekPosition(pendingSeekPosition);
- int processedInitialSeekCount = pendingInitialSeekCount;
- pendingInitialSeekCount = 0;
- pendingSeekPosition = null;
+ Timeline oldTimeline = playbackInfo.timeline;
+ Timeline timeline = sourceRefreshInfo.timeline;
+ Object manifest = sourceRefreshInfo.manifest;
+ queue.setTimeline(timeline);
+ playbackInfo = playbackInfo.copyWithTimeline(timeline, manifest);
+ resolvePendingMessagePositions();
+
+ if (pendingPrepareCount > 0) {
+ playbackInfoUpdate.incrementPendingOperationAcks(pendingPrepareCount);
+ pendingPrepareCount = 0;
+ if (pendingInitialSeekPosition != null) {
+ Pair periodPosition =
+ resolveSeekPosition(pendingInitialSeekPosition, /* trySubsequentPeriods= */ true);
+ pendingInitialSeekPosition = null;
if (periodPosition == null) {
// The seek position was valid for the timeline that it was performed into, but the
// timeline has changed and a suitable seek position could not be resolved in the new one.
- handleSourceInfoRefreshEndedPlayback(manifest, processedInitialSeekCount);
+ handleSourceInfoRefreshEndedPlayback();
} else {
int periodIndex = periodPosition.first;
long positionUs = periodPosition.second;
- MediaPeriodId periodId =
- mediaPeriodInfoSequence.resolvePeriodPositionForAds(periodIndex, positionUs);
- playbackInfo = new PlaybackInfo(periodId, periodId.isAd() ? 0 : positionUs, positionUs);
- notifySourceInfoRefresh(manifest, processedInitialSeekCount);
+ MediaPeriodId periodId = queue.resolveMediaPeriodIdForAds(periodIndex, positionUs);
+ playbackInfo =
+ playbackInfo.fromNewPosition(
+ periodId, periodId.isAd() ? 0 : positionUs, /* contentPositionUs= */ positionUs);
}
} else if (playbackInfo.startPositionUs == C.TIME_UNSET) {
if (timeline.isEmpty()) {
- handleSourceInfoRefreshEndedPlayback(manifest);
+ handleSourceInfoRefreshEndedPlayback();
} else {
- Pair defaultPosition = getPeriodPosition(0, C.TIME_UNSET);
+ Pair defaultPosition = getPeriodPosition(timeline,
+ timeline.getFirstWindowIndex(shuffleModeEnabled), C.TIME_UNSET);
int periodIndex = defaultPosition.first;
long startPositionUs = defaultPosition.second;
- MediaPeriodId periodId = mediaPeriodInfoSequence.resolvePeriodPositionForAds(periodIndex,
- startPositionUs);
- playbackInfo = new PlaybackInfo(periodId, periodId.isAd() ? 0 : startPositionUs,
- startPositionUs);
- notifySourceInfoRefresh(manifest);
+ MediaPeriodId periodId = queue.resolveMediaPeriodIdForAds(periodIndex, startPositionUs);
+ playbackInfo =
+ playbackInfo.fromNewPosition(
+ periodId,
+ periodId.isAd() ? 0 : startPositionUs,
+ /* contentPositionUs= */ startPositionUs);
}
- } else {
- notifySourceInfoRefresh(manifest);
}
return;
}
int playingPeriodIndex = playbackInfo.periodId.periodIndex;
- MediaPeriodHolder periodHolder = playingPeriodHolder != null ? playingPeriodHolder
- : loadingPeriodHolder;
- if (periodHolder == null && playingPeriodIndex >= oldTimeline.getPeriodCount()) {
- notifySourceInfoRefresh(manifest);
+ long contentPositionUs = playbackInfo.contentPositionUs;
+ if (oldTimeline.isEmpty()) {
+ // If the old timeline is empty, the period queue is also empty.
+ if (!timeline.isEmpty()) {
+ MediaPeriodId periodId =
+ queue.resolveMediaPeriodIdForAds(playingPeriodIndex, contentPositionUs);
+ playbackInfo =
+ playbackInfo.fromNewPosition(
+ periodId, periodId.isAd() ? 0 : contentPositionUs, contentPositionUs);
+ }
return;
}
+ MediaPeriodHolder periodHolder = queue.getFrontPeriod();
Object playingPeriodUid = periodHolder == null
? oldTimeline.getPeriod(playingPeriodIndex, period, true).uid : periodHolder.uid;
int periodIndex = timeline.getIndexOfPeriod(playingPeriodUid);
@@ -1060,14 +1196,15 @@ import java.io.IOException;
int newPeriodIndex = resolveSubsequentPeriod(playingPeriodIndex, oldTimeline, timeline);
if (newPeriodIndex == C.INDEX_UNSET) {
// We failed to resolve a suitable restart position.
- handleSourceInfoRefreshEndedPlayback(manifest);
+ handleSourceInfoRefreshEndedPlayback();
return;
}
// We resolved a subsequent period. Seek to the default position in the corresponding window.
- Pair defaultPosition = getPeriodPosition(
+ Pair defaultPosition = getPeriodPosition(timeline,
timeline.getPeriod(newPeriodIndex, period).windowIndex, C.TIME_UNSET);
newPeriodIndex = defaultPosition.first;
- long newPositionUs = defaultPosition.second;
+ contentPositionUs = defaultPosition.second;
+ MediaPeriodId periodId = queue.resolveMediaPeriodIdForAds(newPeriodIndex, contentPositionUs);
timeline.getPeriod(newPeriodIndex, period, true);
if (periodHolder != null) {
// Clear the index of each holder that doesn't contain the default position. If a holder
@@ -1077,18 +1214,15 @@ import java.io.IOException;
while (periodHolder.next != null) {
periodHolder = periodHolder.next;
if (periodHolder.uid.equals(newPeriodUid)) {
- periodHolder.info = mediaPeriodInfoSequence.getUpdatedMediaPeriodInfo(periodHolder.info,
- newPeriodIndex);
+ periodHolder.info = queue.getUpdatedMediaPeriodInfo(periodHolder.info, newPeriodIndex);
} else {
periodHolder.info = periodHolder.info.copyWithPeriodIndex(C.INDEX_UNSET);
}
}
}
// Actually do the seek.
- MediaPeriodId periodId = new MediaPeriodId(newPeriodIndex);
- newPositionUs = seekToPeriodPosition(periodId, newPositionUs);
- playbackInfo = new PlaybackInfo(periodId, newPositionUs);
- notifySourceInfoRefresh(manifest);
+ long seekPositionUs = seekToPeriodPosition(periodId, periodId.isAd() ? 0 : contentPositionUs);
+ playbackInfo = playbackInfo.fromNewPosition(periodId, seekPositionUs, contentPositionUs);
return;
}
@@ -1097,96 +1231,28 @@ import java.io.IOException;
playbackInfo = playbackInfo.copyWithPeriodIndex(periodIndex);
}
- if (playbackInfo.periodId.isAd()) {
- // Check that the playing ad hasn't been marked as played. If it has, skip forward.
- MediaPeriodId periodId = mediaPeriodInfoSequence.resolvePeriodPositionForAds(periodIndex,
- playbackInfo.contentPositionUs);
- if (!periodId.isAd() || periodId.adIndexInAdGroup != playbackInfo.periodId.adIndexInAdGroup) {
- long newPositionUs = seekToPeriodPosition(periodId, playbackInfo.contentPositionUs);
- long contentPositionUs = periodId.isAd() ? playbackInfo.contentPositionUs : C.TIME_UNSET;
- playbackInfo = new PlaybackInfo(periodId, newPositionUs, contentPositionUs);
- notifySourceInfoRefresh(manifest);
+ MediaPeriodId playingPeriodId = playbackInfo.periodId;
+ if (playingPeriodId.isAd()) {
+ MediaPeriodId periodId = queue.resolveMediaPeriodIdForAds(periodIndex, contentPositionUs);
+ if (!periodId.equals(playingPeriodId)) {
+ // The previously playing ad should no longer be played, so skip it.
+ long seekPositionUs =
+ seekToPeriodPosition(periodId, periodId.isAd() ? 0 : contentPositionUs);
+ playbackInfo = playbackInfo.fromNewPosition(periodId, seekPositionUs, contentPositionUs);
return;
}
}
- if (periodHolder == null) {
- // We don't have any period holders, so we're done.
- notifySourceInfoRefresh(manifest);
- return;
- }
-
- // Update the holder indices. If we find a subsequent holder that's inconsistent with the new
- // timeline then take appropriate action.
- periodHolder = updatePeriodInfo(periodHolder, periodIndex);
- while (periodHolder.next != null) {
- MediaPeriodHolder previousPeriodHolder = periodHolder;
- periodHolder = periodHolder.next;
- periodIndex = timeline.getNextPeriodIndex(periodIndex, period, window, repeatMode);
- if (periodIndex != C.INDEX_UNSET
- && periodHolder.uid.equals(timeline.getPeriod(periodIndex, period, true).uid)) {
- // The holder is consistent with the new timeline. Update its index and continue.
- periodHolder = updatePeriodInfo(periodHolder, periodIndex);
- } else {
- // The holder is inconsistent with the new timeline.
- boolean seenReadingPeriodHolder =
- readingPeriodHolder != null && readingPeriodHolder.index < periodHolder.index;
- if (!seenReadingPeriodHolder) {
- // Renderers may have read from a period that's been removed. Seek back to the current
- // position of the playing period to make sure none of the removed period is played.
- long newPositionUs =
- seekToPeriodPosition(playingPeriodHolder.info.id, playbackInfo.positionUs);
- playbackInfo = new PlaybackInfo(playingPeriodHolder.info.id, newPositionUs,
- playbackInfo.contentPositionUs);
- } else {
- // Update the loading period to be the last period that's still valid, and release all
- // subsequent periods.
- loadingPeriodHolder = previousPeriodHolder;
- loadingPeriodHolder.next = null;
- // Release the rest of the timeline.
- releasePeriodHoldersFrom(periodHolder);
- }
- break;
- }
- }
-
- notifySourceInfoRefresh(manifest);
- }
-
- private MediaPeriodHolder updatePeriodInfo(MediaPeriodHolder periodHolder, int periodIndex) {
- while (true) {
- periodHolder.info =
- mediaPeriodInfoSequence.getUpdatedMediaPeriodInfo(periodHolder.info, periodIndex);
- if (periodHolder.info.isLastInTimelinePeriod || periodHolder.next == null) {
- return periodHolder;
- }
- periodHolder = periodHolder.next;
+ if (!queue.updateQueuedPeriods(playingPeriodId, rendererPositionUs)) {
+ seekToCurrentPosition(/* sendDiscontinuity= */ false);
}
}
- private void handleSourceInfoRefreshEndedPlayback(Object manifest) {
- handleSourceInfoRefreshEndedPlayback(manifest, 0);
- }
-
- private void handleSourceInfoRefreshEndedPlayback(Object manifest,
- int processedInitialSeekCount) {
- // Set the playback position to (0,0) for notifying the eventHandler.
- playbackInfo = new PlaybackInfo(0, 0);
- notifySourceInfoRefresh(manifest, processedInitialSeekCount);
- // Set the internal position to (0,TIME_UNSET) so that a subsequent seek to (0,0) isn't ignored.
- playbackInfo = new PlaybackInfo(0, C.TIME_UNSET);
+ private void handleSourceInfoRefreshEndedPlayback() {
setState(Player.STATE_ENDED);
// Reset, but retain the source so that it can still be used should a seek occur.
- resetInternal(false);
- }
-
- private void notifySourceInfoRefresh(Object manifest) {
- notifySourceInfoRefresh(manifest, 0);
- }
-
- private void notifySourceInfoRefresh(Object manifest, int processedInitialSeekCount) {
- eventHandler.obtainMessage(MSG_SOURCE_INFO_REFRESHED,
- new SourceInfo(timeline, manifest, playbackInfo, processedInitialSeekCount)).sendToTarget();
+ resetInternal(
+ /* releaseMediaSource= */ false, /* resetPosition= */ true, /* resetState= */ false);
}
/**
@@ -1199,12 +1265,13 @@ import java.io.IOException;
* @return The index in the new timeline of the first subsequent period, or {@link C#INDEX_UNSET}
* if no such period was found.
*/
- private int resolveSubsequentPeriod(int oldPeriodIndex, Timeline oldTimeline,
- Timeline newTimeline) {
+ private int resolveSubsequentPeriod(
+ int oldPeriodIndex, Timeline oldTimeline, Timeline newTimeline) {
int newPeriodIndex = C.INDEX_UNSET;
int maxIterations = oldTimeline.getPeriodCount();
for (int i = 0; i < maxIterations && newPeriodIndex == C.INDEX_UNSET; i++) {
- oldPeriodIndex = oldTimeline.getNextPeriodIndex(oldPeriodIndex, period, window, repeatMode);
+ oldPeriodIndex = oldTimeline.getNextPeriodIndex(oldPeriodIndex, period, window, repeatMode,
+ shuffleModeEnabled);
if (oldPeriodIndex == C.INDEX_UNSET) {
// We've reached the end of the old timeline.
break;
@@ -1220,14 +1287,22 @@ import java.io.IOException;
* internal timeline.
*
* @param seekPosition The position to resolve.
+ * @param trySubsequentPeriods Whether the position can be resolved to a subsequent matching
+ * period if the original period is no longer available.
* @return The resolved position, or null if resolution was not successful.
* @throws IllegalSeekPositionException If the window index of the seek position is outside the
* bounds of the timeline.
*/
- private Pair resolveSeekPosition(SeekPosition seekPosition) {
+ private Pair resolveSeekPosition(
+ SeekPosition seekPosition, boolean trySubsequentPeriods) {
+ Timeline timeline = playbackInfo.timeline;
Timeline seekTimeline = seekPosition.timeline;
+ if (timeline.isEmpty()) {
+ // We don't have a valid timeline yet, so we can't resolve the position.
+ return null;
+ }
if (seekTimeline.isEmpty()) {
- // The application performed a blind seek without a non-empty timeline (most likely based on
+ // The application performed a blind seek with an empty timeline (most likely based on
// knowledge of what the future timeline will be). Use the internal timeline.
seekTimeline = timeline;
}
@@ -1252,11 +1327,14 @@ import java.io.IOException;
// We successfully located the period in the internal timeline.
return Pair.create(periodIndex, periodPosition.second);
}
- // Try and find a subsequent period from the seek timeline in the internal timeline.
- periodIndex = resolveSubsequentPeriod(periodPosition.first, seekTimeline, timeline);
- if (periodIndex != C.INDEX_UNSET) {
- // We found one. Map the SeekPosition onto the corresponding default position.
- return getPeriodPosition(timeline.getPeriod(periodIndex, period).windowIndex, C.TIME_UNSET);
+ if (trySubsequentPeriods) {
+ // Try and find a subsequent period from the seek timeline in the internal timeline.
+ periodIndex = resolveSubsequentPeriod(periodPosition.first, seekTimeline, timeline);
+ if (periodIndex != C.INDEX_UNSET) {
+ // We found one. Map the SeekPosition onto the corresponding default position.
+ return getPeriodPosition(
+ timeline, timeline.getPeriod(periodIndex, period).windowIndex, C.TIME_UNSET);
+ }
}
// We didn't find one. Give up.
return null;
@@ -1266,12 +1344,17 @@ import java.io.IOException;
* Calls {@link Timeline#getPeriodPosition(Timeline.Window, Timeline.Period, int, long)} using the
* current timeline.
*/
- private Pair getPeriodPosition(int windowIndex, long windowPositionUs) {
+ private Pair getPeriodPosition(
+ Timeline timeline, int windowIndex, long windowPositionUs) {
return timeline.getPeriodPosition(window, period, windowIndex, windowPositionUs);
}
private void updatePeriods() throws ExoPlaybackException, IOException {
- if (timeline == null) {
+ if (mediaSource == null) {
+ // The player has no media source yet.
+ return;
+ }
+ if (pendingPrepareCount > 0) {
// We're waiting to get information about periods.
mediaSource.maybeThrowSourceInfoRefreshError();
return;
@@ -1279,28 +1362,42 @@ import java.io.IOException;
// Update the loading period if required.
maybeUpdateLoadingPeriod();
+ MediaPeriodHolder loadingPeriodHolder = queue.getLoadingPeriod();
if (loadingPeriodHolder == null || loadingPeriodHolder.isFullyBuffered()) {
setIsLoading(false);
- } else if (loadingPeriodHolder != null && !isLoading) {
+ } else if (!playbackInfo.isLoading) {
maybeContinueLoading();
}
- if (playingPeriodHolder == null) {
+ if (!queue.hasPlayingPeriod()) {
// We're waiting for the first period to be prepared.
return;
}
- // Update the playing and reading periods.
- while (playingPeriodHolder != readingPeriodHolder
+ // Advance the playing period if necessary.
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
+ MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod();
+ boolean advancedPlayingPeriod = false;
+ while (playWhenReady && playingPeriodHolder != readingPeriodHolder
&& rendererPositionUs >= playingPeriodHolder.next.rendererPositionOffsetUs) {
// All enabled renderers' streams have been read to the end, and the playback position reached
// the end of the playing period, so advance playback to the next period.
- playingPeriodHolder.release();
- setPlayingPeriodHolder(playingPeriodHolder.next);
- playbackInfo = new PlaybackInfo(playingPeriodHolder.info.id,
+ if (advancedPlayingPeriod) {
+ // If we advance more than one period at a time, notify listeners after each update.
+ maybeNotifyPlaybackInfoChanged();
+ }
+ int discontinuityReason =
+ playingPeriodHolder.info.isLastInTimelinePeriod
+ ? Player.DISCONTINUITY_REASON_PERIOD_TRANSITION
+ : Player.DISCONTINUITY_REASON_AD_INSERTION;
+ MediaPeriodHolder oldPlayingPeriodHolder = playingPeriodHolder;
+ playingPeriodHolder = queue.advancePlayingPeriod();
+ updatePlayingPeriodRenderers(oldPlayingPeriodHolder);
+ playbackInfo = playbackInfo.fromNewPosition(playingPeriodHolder.info.id,
playingPeriodHolder.info.startPositionUs, playingPeriodHolder.info.contentPositionUs);
+ playbackInfoUpdate.setPositionDiscontinuity(discontinuityReason);
updatePlaybackPositions();
- eventHandler.obtainMessage(MSG_POSITION_DISCONTINUITY, playbackInfo).sendToTarget();
+ advancedPlayingPeriod = true;
}
if (readingPeriodHolder.info.isFinal) {
@@ -1317,371 +1414,217 @@ import java.io.IOException;
return;
}
+ // Advance the reading period if necessary.
+ if (readingPeriodHolder.next == null || !readingPeriodHolder.next.prepared) {
+ // We don't have a successor to advance the reading period to.
+ return;
+ }
+
for (int i = 0; i < renderers.length; i++) {
Renderer renderer = renderers[i];
SampleStream sampleStream = readingPeriodHolder.sampleStreams[i];
if (renderer.getStream() != sampleStream
|| (sampleStream != null && !renderer.hasReadStreamToEnd())) {
+ // The current reading period is still being read by at least one renderer.
return;
}
}
- if (readingPeriodHolder.next != null && readingPeriodHolder.next.prepared) {
- TrackSelectorResult oldTrackSelectorResult = readingPeriodHolder.trackSelectorResult;
- readingPeriodHolder = readingPeriodHolder.next;
- TrackSelectorResult newTrackSelectorResult = readingPeriodHolder.trackSelectorResult;
+ TrackSelectorResult oldTrackSelectorResult = readingPeriodHolder.trackSelectorResult;
+ readingPeriodHolder = queue.advanceReadingPeriod();
+ TrackSelectorResult newTrackSelectorResult = readingPeriodHolder.trackSelectorResult;
- boolean initialDiscontinuity =
- readingPeriodHolder.mediaPeriod.readDiscontinuity() != C.TIME_UNSET;
- for (int i = 0; i < renderers.length; i++) {
- Renderer renderer = renderers[i];
- TrackSelection oldSelection = oldTrackSelectorResult.selections.get(i);
- if (oldSelection == null) {
- // The renderer has no current stream and will be enabled when we play the next period.
- } else if (initialDiscontinuity) {
- // The new period starts with a discontinuity, so the renderer will play out all data then
- // be disabled and re-enabled when it starts playing the next period.
+ boolean initialDiscontinuity =
+ readingPeriodHolder.mediaPeriod.readDiscontinuity() != C.TIME_UNSET;
+ for (int i = 0; i < renderers.length; i++) {
+ Renderer renderer = renderers[i];
+ boolean rendererWasEnabled = oldTrackSelectorResult.isRendererEnabled(i);
+ if (!rendererWasEnabled) {
+ // The renderer was disabled and will be enabled when we play the next period.
+ } else if (initialDiscontinuity) {
+ // The new period starts with a discontinuity, so the renderer will play out all data then
+ // be disabled and re-enabled when it starts playing the next period.
+ renderer.setCurrentStreamFinal();
+ } else if (!renderer.isCurrentStreamFinal()) {
+ TrackSelection newSelection = newTrackSelectorResult.selections.get(i);
+ boolean newRendererEnabled = newTrackSelectorResult.isRendererEnabled(i);
+ boolean isNoSampleRenderer = rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE;
+ RendererConfiguration oldConfig = oldTrackSelectorResult.rendererConfigurations[i];
+ RendererConfiguration newConfig = newTrackSelectorResult.rendererConfigurations[i];
+ if (newRendererEnabled && newConfig.equals(oldConfig) && !isNoSampleRenderer) {
+ // Replace the renderer's SampleStream so the transition to playing the next period can
+ // be seamless.
+ // This should be avoided for no-sample renderer, because skipping ahead for such
+ // renderer doesn't have any benefit (the renderer does not consume the sample stream),
+ // and it will change the provided rendererOffsetUs while the renderer is still
+ // rendering from the playing media period.
+ Format[] formats = getFormats(newSelection);
+ renderer.replaceStream(formats, readingPeriodHolder.sampleStreams[i],
+ readingPeriodHolder.getRendererOffset());
+ } else {
+ // The renderer will be disabled when transitioning to playing the next period, because
+ // there's no new selection, or because a configuration change is required, or because
+ // it's a no-sample renderer for which rendererOffsetUs should be updated only when
+ // starting to play the next period. Mark the SampleStream as final to play out any
+ // remaining data.
renderer.setCurrentStreamFinal();
- } else if (!renderer.isCurrentStreamFinal()) {
- TrackSelection newSelection = newTrackSelectorResult.selections.get(i);
- RendererConfiguration oldConfig = oldTrackSelectorResult.rendererConfigurations[i];
- RendererConfiguration newConfig = newTrackSelectorResult.rendererConfigurations[i];
- if (newSelection != null && newConfig.equals(oldConfig)) {
- // Replace the renderer's SampleStream so the transition to playing the next period can
- // be seamless.
- Format[] formats = new Format[newSelection.length()];
- for (int j = 0; j < formats.length; j++) {
- formats[j] = newSelection.getFormat(j);
- }
- renderer.replaceStream(formats, readingPeriodHolder.sampleStreams[i],
- readingPeriodHolder.getRendererOffset());
- } else {
- // The renderer will be disabled when transitioning to playing the next period, either
- // because there's no new selection or because a configuration change is required. Mark
- // the SampleStream as final to play out any remaining data.
- renderer.setCurrentStreamFinal();
- }
}
}
}
}
private void maybeUpdateLoadingPeriod() throws IOException {
- MediaPeriodInfo info;
- if (loadingPeriodHolder == null) {
- info = mediaPeriodInfoSequence.getFirstMediaPeriodInfo(playbackInfo);
- } else {
- if (loadingPeriodHolder.info.isFinal || !loadingPeriodHolder.isFullyBuffered()
- || loadingPeriodHolder.info.durationUs == C.TIME_UNSET) {
- return;
+ queue.reevaluateBuffer(rendererPositionUs);
+ if (queue.shouldLoadNextMediaPeriod()) {
+ MediaPeriodInfo info = queue.getNextMediaPeriodInfo(rendererPositionUs, playbackInfo);
+ if (info == null) {
+ mediaSource.maybeThrowSourceInfoRefreshError();
+ } else {
+ Object uid = playbackInfo.timeline.getPeriod(info.id.periodIndex, period, true).uid;
+ MediaPeriod mediaPeriod =
+ queue.enqueueNextMediaPeriod(
+ rendererCapabilities,
+ trackSelector,
+ loadControl.getAllocator(),
+ mediaSource,
+ uid,
+ info);
+ mediaPeriod.prepare(this, info.startPositionUs);
+ setIsLoading(true);
}
- if (playingPeriodHolder != null) {
- int bufferAheadPeriodCount = loadingPeriodHolder.index - playingPeriodHolder.index;
- if (bufferAheadPeriodCount == MAXIMUM_BUFFER_AHEAD_PERIODS) {
- // We are already buffering the maximum number of periods ahead.
- return;
- }
- }
- info = mediaPeriodInfoSequence.getNextMediaPeriodInfo(loadingPeriodHolder.info,
- loadingPeriodHolder.getRendererOffset(), rendererPositionUs);
}
- if (info == null) {
- mediaSource.maybeThrowSourceInfoRefreshError();
- return;
- }
-
- long rendererPositionOffsetUs = loadingPeriodHolder == null
- ? RENDERER_TIMESTAMP_OFFSET_US
- : (loadingPeriodHolder.getRendererOffset() + loadingPeriodHolder.info.durationUs);
- int holderIndex = loadingPeriodHolder == null ? 0 : loadingPeriodHolder.index + 1;
- Object uid = timeline.getPeriod(info.id.periodIndex, period, true).uid;
- MediaPeriodHolder newPeriodHolder = new MediaPeriodHolder(renderers, rendererCapabilities,
- rendererPositionOffsetUs, trackSelector, loadControl, mediaSource, uid, holderIndex, info);
- if (loadingPeriodHolder != null) {
- loadingPeriodHolder.next = newPeriodHolder;
- }
- loadingPeriodHolder = newPeriodHolder;
- loadingPeriodHolder.mediaPeriod.prepare(this, info.startPositionUs);
- setIsLoading(true);
}
- private void handlePeriodPrepared(MediaPeriod period) throws ExoPlaybackException {
- if (loadingPeriodHolder == null || loadingPeriodHolder.mediaPeriod != period) {
+ private void handlePeriodPrepared(MediaPeriod mediaPeriod) throws ExoPlaybackException {
+ if (!queue.isLoading(mediaPeriod)) {
// Stale event.
return;
}
- loadingPeriodHolder.handlePrepared();
- if (playingPeriodHolder == null) {
+ MediaPeriodHolder loadingPeriodHolder = queue.getLoadingPeriod();
+ loadingPeriodHolder.handlePrepared(mediaClock.getPlaybackParameters().speed);
+ updateLoadControlTrackSelection(
+ loadingPeriodHolder.trackGroups, loadingPeriodHolder.trackSelectorResult);
+ if (!queue.hasPlayingPeriod()) {
// This is the first prepared period, so start playing it.
- readingPeriodHolder = loadingPeriodHolder;
- resetRendererPosition(readingPeriodHolder.info.startPositionUs);
- setPlayingPeriodHolder(readingPeriodHolder);
+ MediaPeriodHolder playingPeriodHolder = queue.advancePlayingPeriod();
+ resetRendererPosition(playingPeriodHolder.info.startPositionUs);
+ updatePlayingPeriodRenderers(/* oldPlayingPeriodHolder= */ null);
}
maybeContinueLoading();
}
- private void handleContinueLoadingRequested(MediaPeriod period) {
- if (loadingPeriodHolder == null || loadingPeriodHolder.mediaPeriod != period) {
+ private void handleContinueLoadingRequested(MediaPeriod mediaPeriod) {
+ if (!queue.isLoading(mediaPeriod)) {
// Stale event.
return;
}
+ queue.reevaluateBuffer(rendererPositionUs);
maybeContinueLoading();
}
private void maybeContinueLoading() {
- boolean continueLoading = loadingPeriodHolder.shouldContinueLoading(rendererPositionUs);
+ MediaPeriodHolder loadingPeriodHolder = queue.getLoadingPeriod();
+ long nextLoadPositionUs = loadingPeriodHolder.getNextLoadPositionUs();
+ if (nextLoadPositionUs == C.TIME_END_OF_SOURCE) {
+ setIsLoading(false);
+ return;
+ }
+ long bufferedDurationUs =
+ nextLoadPositionUs - loadingPeriodHolder.toPeriodTime(rendererPositionUs);
+ boolean continueLoading =
+ loadControl.shouldContinueLoading(
+ bufferedDurationUs, mediaClock.getPlaybackParameters().speed);
setIsLoading(continueLoading);
if (continueLoading) {
loadingPeriodHolder.continueLoading(rendererPositionUs);
}
}
- private void releasePeriodHoldersFrom(MediaPeriodHolder periodHolder) {
- while (periodHolder != null) {
- periodHolder.release();
- periodHolder = periodHolder.next;
- }
- }
-
- private void setPlayingPeriodHolder(MediaPeriodHolder periodHolder) throws ExoPlaybackException {
- if (playingPeriodHolder == periodHolder) {
+ private void updatePlayingPeriodRenderers(@Nullable MediaPeriodHolder oldPlayingPeriodHolder)
+ throws ExoPlaybackException {
+ MediaPeriodHolder newPlayingPeriodHolder = queue.getPlayingPeriod();
+ if (newPlayingPeriodHolder == null || oldPlayingPeriodHolder == newPlayingPeriodHolder) {
return;
}
-
int enabledRendererCount = 0;
boolean[] rendererWasEnabledFlags = new boolean[renderers.length];
for (int i = 0; i < renderers.length; i++) {
Renderer renderer = renderers[i];
rendererWasEnabledFlags[i] = renderer.getState() != Renderer.STATE_DISABLED;
- TrackSelection newSelection = periodHolder.trackSelectorResult.selections.get(i);
- if (newSelection != null) {
+ if (newPlayingPeriodHolder.trackSelectorResult.isRendererEnabled(i)) {
enabledRendererCount++;
}
- if (rendererWasEnabledFlags[i] && (newSelection == null
- || (renderer.isCurrentStreamFinal()
- && renderer.getStream() == playingPeriodHolder.sampleStreams[i]))) {
+ if (rendererWasEnabledFlags[i]
+ && (!newPlayingPeriodHolder.trackSelectorResult.isRendererEnabled(i)
+ || (renderer.isCurrentStreamFinal()
+ && renderer.getStream() == oldPlayingPeriodHolder.sampleStreams[i]))) {
// The renderer should be disabled before playing the next period, either because it's not
// needed to play the next period, or because we need to re-enable it as its current stream
// is final and it's not reading ahead.
- if (renderer == rendererMediaClockSource) {
- // Sync standaloneMediaClock so that it can take over timing responsibilities.
- standaloneMediaClock.synchronize(rendererMediaClock);
- rendererMediaClock = null;
- rendererMediaClockSource = null;
- }
- ensureStopped(renderer);
- renderer.disable();
+ disableRenderer(renderer);
}
}
-
- playingPeriodHolder = periodHolder;
- eventHandler.obtainMessage(MSG_TRACKS_CHANGED, periodHolder.trackSelectorResult).sendToTarget();
+ playbackInfo =
+ playbackInfo.copyWithTrackInfo(
+ newPlayingPeriodHolder.trackGroups, newPlayingPeriodHolder.trackSelectorResult);
enableRenderers(rendererWasEnabledFlags, enabledRendererCount);
}
- private void enableRenderers(boolean[] rendererWasEnabledFlags, int enabledRendererCount)
+ private void enableRenderers(boolean[] rendererWasEnabledFlags, int totalEnabledRendererCount)
throws ExoPlaybackException {
- enabledRenderers = new Renderer[enabledRendererCount];
- enabledRendererCount = 0;
+ enabledRenderers = new Renderer[totalEnabledRendererCount];
+ int enabledRendererCount = 0;
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
for (int i = 0; i < renderers.length; i++) {
- Renderer renderer = renderers[i];
- TrackSelection newSelection = playingPeriodHolder.trackSelectorResult.selections.get(i);
- if (newSelection != null) {
- enabledRenderers[enabledRendererCount++] = renderer;
- if (renderer.getState() == Renderer.STATE_DISABLED) {
- RendererConfiguration rendererConfiguration =
- playingPeriodHolder.trackSelectorResult.rendererConfigurations[i];
- // The renderer needs enabling with its new track selection.
- boolean playing = playWhenReady && state == Player.STATE_READY;
- // Consider as joining only if the renderer was previously disabled.
- boolean joining = !rendererWasEnabledFlags[i] && playing;
- // Build an array of formats contained by the selection.
- Format[] formats = new Format[newSelection.length()];
- for (int j = 0; j < formats.length; j++) {
- formats[j] = newSelection.getFormat(j);
- }
- // Enable the renderer.
- renderer.enable(rendererConfiguration, formats, playingPeriodHolder.sampleStreams[i],
- rendererPositionUs, joining, playingPeriodHolder.getRendererOffset());
- MediaClock mediaClock = renderer.getMediaClock();
- if (mediaClock != null) {
- if (rendererMediaClock != null) {
- throw ExoPlaybackException.createForUnexpected(
- new IllegalStateException("Multiple renderer media clocks enabled."));
- }
- rendererMediaClock = mediaClock;
- rendererMediaClockSource = renderer;
- rendererMediaClock.setPlaybackParameters(playbackParameters);
- }
- // Start the renderer if playing.
- if (playing) {
- renderer.start();
- }
- }
+ if (playingPeriodHolder.trackSelectorResult.isRendererEnabled(i)) {
+ enableRenderer(i, rendererWasEnabledFlags[i], enabledRendererCount++);
}
}
}
- /**
- * Holds a {@link MediaPeriod} with information required to play it as part of a timeline.
- */
- private static final class MediaPeriodHolder {
-
- public final MediaPeriod mediaPeriod;
- public final Object uid;
- public final int index;
- public final SampleStream[] sampleStreams;
- public final boolean[] mayRetainStreamFlags;
- public final long rendererPositionOffsetUs;
-
- public MediaPeriodInfo info;
- public boolean prepared;
- public boolean hasEnabledTracks;
- public MediaPeriodHolder next;
- public TrackSelectorResult trackSelectorResult;
-
- private final Renderer[] renderers;
- private final RendererCapabilities[] rendererCapabilities;
- private final TrackSelector trackSelector;
- private final LoadControl loadControl;
- private final MediaSource mediaSource;
-
- private TrackSelectorResult periodTrackSelectorResult;
-
- public MediaPeriodHolder(Renderer[] renderers, RendererCapabilities[] rendererCapabilities,
- long rendererPositionOffsetUs, TrackSelector trackSelector, LoadControl loadControl,
- MediaSource mediaSource, Object periodUid, int index, MediaPeriodInfo info) {
- this.renderers = renderers;
- this.rendererCapabilities = rendererCapabilities;
- this.rendererPositionOffsetUs = rendererPositionOffsetUs;
- this.trackSelector = trackSelector;
- this.loadControl = loadControl;
- this.mediaSource = mediaSource;
- this.uid = Assertions.checkNotNull(periodUid);
- this.index = index;
- this.info = info;
- sampleStreams = new SampleStream[renderers.length];
- mayRetainStreamFlags = new boolean[renderers.length];
- MediaPeriod mediaPeriod = mediaSource.createPeriod(info.id, loadControl.getAllocator());
- if (info.endPositionUs != C.TIME_END_OF_SOURCE) {
- ClippingMediaPeriod clippingMediaPeriod = new ClippingMediaPeriod(mediaPeriod, true);
- clippingMediaPeriod.setClipping(0, info.endPositionUs);
- mediaPeriod = clippingMediaPeriod;
- }
- this.mediaPeriod = mediaPeriod;
- }
-
- public long toRendererTime(long periodTimeUs) {
- return periodTimeUs + getRendererOffset();
- }
-
- public long toPeriodTime(long rendererTimeUs) {
- return rendererTimeUs - getRendererOffset();
- }
-
- public long getRendererOffset() {
- return index == 0 ? rendererPositionOffsetUs
- : (rendererPositionOffsetUs - info.startPositionUs);
- }
-
- public boolean isFullyBuffered() {
- return prepared
- && (!hasEnabledTracks || mediaPeriod.getBufferedPositionUs() == C.TIME_END_OF_SOURCE);
- }
-
- public boolean haveSufficientBuffer(boolean rebuffering, long rendererPositionUs) {
- long bufferedPositionUs = !prepared ? info.startPositionUs
- : mediaPeriod.getBufferedPositionUs();
- if (bufferedPositionUs == C.TIME_END_OF_SOURCE) {
- if (info.isFinal) {
- return true;
- }
- bufferedPositionUs = info.durationUs;
- }
- return loadControl.shouldStartPlayback(bufferedPositionUs - toPeriodTime(rendererPositionUs),
- rebuffering);
- }
-
- public void handlePrepared() throws ExoPlaybackException {
- prepared = true;
- selectTracks();
- long newStartPositionUs = updatePeriodTrackSelection(info.startPositionUs, false);
- info = info.copyWithStartPositionUs(newStartPositionUs);
- }
-
- public boolean shouldContinueLoading(long rendererPositionUs) {
- long nextLoadPositionUs = !prepared ? 0 : mediaPeriod.getNextLoadPositionUs();
- if (nextLoadPositionUs == C.TIME_END_OF_SOURCE) {
- return false;
- } else {
- long loadingPeriodPositionUs = toPeriodTime(rendererPositionUs);
- long bufferedDurationUs = nextLoadPositionUs - loadingPeriodPositionUs;
- return loadControl.shouldContinueLoading(bufferedDurationUs);
+ private void enableRenderer(
+ int rendererIndex, boolean wasRendererEnabled, int enabledRendererIndex)
+ throws ExoPlaybackException {
+ MediaPeriodHolder playingPeriodHolder = queue.getPlayingPeriod();
+ Renderer renderer = renderers[rendererIndex];
+ enabledRenderers[enabledRendererIndex] = renderer;
+ if (renderer.getState() == Renderer.STATE_DISABLED) {
+ RendererConfiguration rendererConfiguration =
+ playingPeriodHolder.trackSelectorResult.rendererConfigurations[rendererIndex];
+ TrackSelection newSelection = playingPeriodHolder.trackSelectorResult.selections.get(
+ rendererIndex);
+ Format[] formats = getFormats(newSelection);
+ // The renderer needs enabling with its new track selection.
+ boolean playing = playWhenReady && playbackInfo.playbackState == Player.STATE_READY;
+ // Consider as joining only if the renderer was previously disabled.
+ boolean joining = !wasRendererEnabled && playing;
+ // Enable the renderer.
+ renderer.enable(rendererConfiguration, formats,
+ playingPeriodHolder.sampleStreams[rendererIndex], rendererPositionUs,
+ joining, playingPeriodHolder.getRendererOffset());
+ mediaClock.onRendererEnabled(renderer);
+ // Start the renderer if playing.
+ if (playing) {
+ renderer.start();
}
}
+ }
- public void continueLoading(long rendererPositionUs) {
- long loadingPeriodPositionUs = toPeriodTime(rendererPositionUs);
- mediaPeriod.continueLoading(loadingPeriodPositionUs);
+ private boolean rendererWaitingForNextStream(Renderer renderer) {
+ MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod();
+ return readingPeriodHolder.next != null && readingPeriodHolder.next.prepared
+ && renderer.hasReadStreamToEnd();
+ }
+
+ @NonNull
+ private static Format[] getFormats(TrackSelection newSelection) {
+ // Build an array of formats contained by the selection.
+ int length = newSelection != null ? newSelection.length() : 0;
+ Format[] formats = new Format[length];
+ for (int i = 0; i < length; i++) {
+ formats[i] = newSelection.getFormat(i);
}
-
- public boolean selectTracks() throws ExoPlaybackException {
- TrackSelectorResult selectorResult = trackSelector.selectTracks(rendererCapabilities,
- mediaPeriod.getTrackGroups());
- if (selectorResult.isEquivalent(periodTrackSelectorResult)) {
- return false;
- }
- trackSelectorResult = selectorResult;
- return true;
- }
-
- public long updatePeriodTrackSelection(long positionUs, boolean forceRecreateStreams) {
- return updatePeriodTrackSelection(positionUs, forceRecreateStreams,
- new boolean[renderers.length]);
- }
-
- public long updatePeriodTrackSelection(long positionUs, boolean forceRecreateStreams,
- boolean[] streamResetFlags) {
- TrackSelectionArray trackSelections = trackSelectorResult.selections;
- for (int i = 0; i < trackSelections.length; i++) {
- mayRetainStreamFlags[i] = !forceRecreateStreams
- && trackSelectorResult.isEquivalent(periodTrackSelectorResult, i);
- }
-
- // Disable streams on the period and get new streams for updated/newly-enabled tracks.
- positionUs = mediaPeriod.selectTracks(trackSelections.getAll(), mayRetainStreamFlags,
- sampleStreams, streamResetFlags, positionUs);
- periodTrackSelectorResult = trackSelectorResult;
-
- // Update whether we have enabled tracks and sanity check the expected streams are non-null.
- hasEnabledTracks = false;
- for (int i = 0; i < sampleStreams.length; i++) {
- if (sampleStreams[i] != null) {
- Assertions.checkState(trackSelections.get(i) != null);
- hasEnabledTracks = true;
- } else {
- Assertions.checkState(trackSelections.get(i) == null);
- }
- }
-
- // The track selection has changed.
- loadControl.onTracksSelected(renderers, trackSelectorResult.groups, trackSelections);
- return positionUs;
- }
-
- public void release() {
- try {
- if (info.endPositionUs != C.TIME_END_OF_SOURCE) {
- mediaSource.releasePeriod(((ClippingMediaPeriod) mediaPeriod).mediaPeriod);
- } else {
- mediaSource.releasePeriod(mediaPeriod);
- }
- } catch (RuntimeException e) {
- // There's nothing we can do.
- Log.e(TAG, "Period release failed.", e);
- }
- }
-
+ return formats;
}
private static final class SeekPosition {
@@ -1695,7 +1638,90 @@ import java.io.IOException;
this.windowIndex = windowIndex;
this.windowPositionUs = windowPositionUs;
}
+ }
+ private static final class PendingMessageInfo implements Comparable {
+
+ public final PlayerMessage message;
+
+ public int resolvedPeriodIndex;
+ public long resolvedPeriodTimeUs;
+ public @Nullable Object resolvedPeriodUid;
+
+ public PendingMessageInfo(PlayerMessage message) {
+ this.message = message;
+ }
+
+ public void setResolvedPosition(int periodIndex, long periodTimeUs, Object periodUid) {
+ resolvedPeriodIndex = periodIndex;
+ resolvedPeriodTimeUs = periodTimeUs;
+ resolvedPeriodUid = periodUid;
+ }
+
+ @Override
+ public int compareTo(@NonNull PendingMessageInfo other) {
+ if ((resolvedPeriodUid == null) != (other.resolvedPeriodUid == null)) {
+ // PendingMessageInfos with a resolved period position are always smaller.
+ return resolvedPeriodUid != null ? -1 : 1;
+ }
+ if (resolvedPeriodUid == null) {
+ // Don't sort message with unresolved positions.
+ return 0;
+ }
+ // Sort resolved media times by period index and then by period position.
+ int comparePeriodIndex = resolvedPeriodIndex - other.resolvedPeriodIndex;
+ if (comparePeriodIndex != 0) {
+ return comparePeriodIndex;
+ }
+ return Util.compareLong(resolvedPeriodTimeUs, other.resolvedPeriodTimeUs);
+ }
+ }
+
+ private static final class MediaSourceRefreshInfo {
+
+ public final MediaSource source;
+ public final Timeline timeline;
+ public final Object manifest;
+
+ public MediaSourceRefreshInfo(MediaSource source, Timeline timeline, Object manifest) {
+ this.source = source;
+ this.timeline = timeline;
+ this.manifest = manifest;
+ }
+ }
+
+ private static final class PlaybackInfoUpdate {
+
+ private PlaybackInfo lastPlaybackInfo;
+ private int operationAcks;
+ private boolean positionDiscontinuity;
+ private @DiscontinuityReason int discontinuityReason;
+
+ public boolean hasPendingUpdate(PlaybackInfo playbackInfo) {
+ return playbackInfo != lastPlaybackInfo || operationAcks > 0 || positionDiscontinuity;
+ }
+
+ public void reset(PlaybackInfo playbackInfo) {
+ lastPlaybackInfo = playbackInfo;
+ operationAcks = 0;
+ positionDiscontinuity = false;
+ }
+
+ public void incrementPendingOperationAcks(int operationAcks) {
+ this.operationAcks += operationAcks;
+ }
+
+ public void setPositionDiscontinuity(@DiscontinuityReason int discontinuityReason) {
+ if (positionDiscontinuity
+ && this.discontinuityReason != Player.DISCONTINUITY_REASON_INTERNAL) {
+ // We always prefer non-internal discontinuity reasons. We also assume that we won't report
+ // more than one non-internal discontinuity per message iteration.
+ Assertions.checkArgument(discontinuityReason == Player.DISCONTINUITY_REASON_INTERNAL);
+ return;
+ }
+ positionDiscontinuity = true;
+ this.discontinuityReason = discontinuityReason;
+ }
}
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java
index 33f992964a..98d5fe91b7 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/ExoPlayerLibraryInfo.java
@@ -27,27 +27,23 @@ public final class ExoPlayerLibraryInfo {
*/
public static final String TAG = "ExoPlayer";
- /**
- * The version of the library expressed as a string, for example "1.2.3".
- */
+ /** The version of the library expressed as a string, for example "1.2.3". */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
- public static final String VERSION = "2.5.1";
+ public static final String VERSION = "2.8.0";
- /**
- * The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}.
- */
+ /** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
- public static final String VERSION_SLASHY = "ExoPlayerLib/2.5.1";
+ public static final String VERSION_SLASHY = "ExoPlayerLib/2.8.0";
/**
* The version of the library expressed as an integer, for example 1002003.
- *
- * Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
+ *
+ *
Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
* corresponding integer version 1002003 (001-002-003), and "123.45.6" has the corresponding
* integer version 123045006 (123-045-006).
*/
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
- public static final int VERSION_INT = 2005001;
+ public static final int VERSION_INT = 2008000;
/**
* Whether the library was compiled with {@link com.google.android.exoplayer2.util.Assertions}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/Format.java b/library/core/src/main/java/com/google/android/exoplayer2/Format.java
index c6be2e2eba..61d416da09 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/Format.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/Format.java
@@ -15,17 +15,14 @@
*/
package com.google.android.exoplayer2;
-import android.annotation.SuppressLint;
-import android.annotation.TargetApi;
-import android.media.MediaFormat;
import android.os.Parcel;
import android.os.Parcelable;
+import android.support.annotation.Nullable;
import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.ColorInfo;
-import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -47,29 +44,21 @@ public final class Format implements Parcelable {
*/
public static final long OFFSET_SAMPLE_RELATIVE = Long.MAX_VALUE;
- /**
- * An identifier for the format, or null if unknown or not applicable.
- */
- public final String id;
+ /** An identifier for the format, or null if unknown or not applicable. */
+ public final @Nullable String id;
/**
* The average bandwidth in bits per second, or {@link #NO_VALUE} if unknown or not applicable.
*/
public final int bitrate;
- /**
- * Codecs of the format as described in RFC 6381, or null if unknown or not applicable.
- */
- public final String codecs;
- /**
- * Metadata, or null if unknown or not applicable.
- */
- public final Metadata metadata;
+ /** Codecs of the format as described in RFC 6381, or null if unknown or not applicable. */
+ public final @Nullable String codecs;
+ /** Metadata, or null if unknown or not applicable. */
+ public final @Nullable Metadata metadata;
// Container specific.
- /**
- * The mime type of the container, or null if unknown or not applicable.
- */
- public final String containerMimeType;
+ /** The mime type of the container, or null if unknown or not applicable. */
+ public final @Nullable String containerMimeType;
// Elementary stream specific.
@@ -77,7 +66,7 @@ public final class Format implements Parcelable {
* The mime type of the elementary stream (i.e. the individual samples), or null if unknown or not
* applicable.
*/
- public final String sampleMimeType;
+ public final @Nullable String sampleMimeType;
/**
* The maximum size of a buffer of data (typically one sample), or {@link #NO_VALUE} if unknown or
* not applicable.
@@ -88,10 +77,8 @@ public final class Format implements Parcelable {
* if initialization data is not required.
*/
public final List initializationData;
- /**
- * DRM initialization data if the stream is protected, or null otherwise.
- */
- public final DrmInitData drmInitData;
+ /** DRM initialization data if the stream is protected, or null otherwise. */
+ public final @Nullable DrmInitData drmInitData;
// Video specific.
@@ -109,14 +96,10 @@ public final class Format implements Parcelable {
public final float frameRate;
/**
* The clockwise rotation that should be applied to the video for it to be rendered in the correct
- * orientation, or {@link #NO_VALUE} if unknown or not applicable. Only 0, 90, 180 and 270 are
- * supported.
+ * orientation, or 0 if unknown or not applicable. Only 0, 90, 180 and 270 are supported.
*/
public final int rotationDegrees;
- /**
- * The width to height ratio of pixels in the video, or {@link #NO_VALUE} if unknown or not
- * applicable.
- */
+ /** The width to height ratio of pixels in the video, or 1.0 if unknown or not applicable. */
public final float pixelWidthHeightRatio;
/**
* The stereo layout for 360/3D/VR video, or {@link #NO_VALUE} if not applicable. Valid stereo
@@ -125,14 +108,10 @@ public final class Format implements Parcelable {
*/
@C.StereoMode
public final int stereoMode;
- /**
- * The projection data for 360/VR video, or null if not applicable.
- */
- public final byte[] projectionData;
- /**
- * The color metadata associated with the video, helps with accurate color reproduction.
- */
- public final ColorInfo colorInfo;
+ /** The projection data for 360/VR video, or null if not applicable. */
+ public final @Nullable byte[] projectionData;
+ /** The color metadata associated with the video, helps with accurate color reproduction. */
+ public final @Nullable ColorInfo colorInfo;
// Audio specific.
@@ -153,11 +132,12 @@ public final class Format implements Parcelable {
@C.PcmEncoding
public final int pcmEncoding;
/**
- * The number of samples to trim from the start of the decoded audio stream.
+ * The number of frames to trim from the start of the decoded audio stream, or 0 if not
+ * applicable.
*/
public final int encoderDelay;
/**
- * The number of samples to trim from the end of the decoded audio stream.
+ * The number of frames to trim from the end of the decoded audio stream, or 0 if not applicable.
*/
public final int encoderPadding;
@@ -178,10 +158,8 @@ public final class Format implements Parcelable {
@C.SelectionFlags
public final int selectionFlags;
- /**
- * The language, or null if unknown or not applicable.
- */
- public final String language;
+ /** The language, or null if unknown or not applicable. */
+ public final @Nullable String language;
/**
* The Accessibility channel, or {@link #NO_VALUE} if not known or applicable.
@@ -193,36 +171,72 @@ public final class Format implements Parcelable {
// Video.
- public static Format createVideoContainerFormat(String id, String containerMimeType,
- String sampleMimeType, String codecs, int bitrate, int width, int height,
- float frameRate, List initializationData, @C.SelectionFlags int selectionFlags) {
+ public static Format createVideoContainerFormat(
+ @Nullable String id,
+ @Nullable String containerMimeType,
+ String sampleMimeType,
+ String codecs,
+ int bitrate,
+ int width,
+ int height,
+ float frameRate,
+ List initializationData,
+ @C.SelectionFlags int selectionFlags) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, width,
height, frameRate, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, selectionFlags, null, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
initializationData, null, null);
}
- public static Format createVideoSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, int maxInputSize, int width, int height, float frameRate,
- List initializationData, DrmInitData drmInitData) {
+ public static Format createVideoSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int maxInputSize,
+ int width,
+ int height,
+ float frameRate,
+ List initializationData,
+ @Nullable DrmInitData drmInitData) {
return createVideoSampleFormat(id, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, initializationData, NO_VALUE, NO_VALUE, drmInitData);
}
- public static Format createVideoSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, int maxInputSize, int width, int height, float frameRate,
- List initializationData, int rotationDegrees, float pixelWidthHeightRatio,
- DrmInitData drmInitData) {
+ public static Format createVideoSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int maxInputSize,
+ int width,
+ int height,
+ float frameRate,
+ List initializationData,
+ int rotationDegrees,
+ float pixelWidthHeightRatio,
+ @Nullable DrmInitData drmInitData) {
return createVideoSampleFormat(id, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, initializationData, rotationDegrees, pixelWidthHeightRatio, null,
NO_VALUE, null, drmInitData);
}
- public static Format createVideoSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, int maxInputSize, int width, int height, float frameRate,
- List initializationData, int rotationDegrees, float pixelWidthHeightRatio,
- byte[] projectionData, @C.StereoMode int stereoMode, ColorInfo colorInfo,
- DrmInitData drmInitData) {
+ public static Format createVideoSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int maxInputSize,
+ int width,
+ int height,
+ float frameRate,
+ List initializationData,
+ int rotationDegrees,
+ float pixelWidthHeightRatio,
+ byte[] projectionData,
+ @C.StereoMode int stereoMode,
+ @Nullable ColorInfo colorInfo,
+ @Nullable DrmInitData drmInitData) {
return new Format(id, null, sampleMimeType, codecs, bitrate, maxInputSize, width, height,
frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode,
colorInfo, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, 0, null, NO_VALUE,
@@ -231,37 +245,73 @@ public final class Format implements Parcelable {
// Audio.
- public static Format createAudioContainerFormat(String id, String containerMimeType,
- String sampleMimeType, String codecs, int bitrate, int channelCount, int sampleRate,
- List initializationData, @C.SelectionFlags int selectionFlags, String language) {
+ public static Format createAudioContainerFormat(
+ @Nullable String id,
+ @Nullable String containerMimeType,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int channelCount,
+ int sampleRate,
+ List initializationData,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, channelCount, sampleRate,
NO_VALUE, NO_VALUE, NO_VALUE, selectionFlags, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
initializationData, null, null);
}
- public static Format createAudioSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, int maxInputSize, int channelCount, int sampleRate,
- List initializationData, DrmInitData drmInitData,
- @C.SelectionFlags int selectionFlags, String language) {
+ public static Format createAudioSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int maxInputSize,
+ int channelCount,
+ int sampleRate,
+ List initializationData,
+ @Nullable DrmInitData drmInitData,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language) {
return createAudioSampleFormat(id, sampleMimeType, codecs, bitrate, maxInputSize, channelCount,
sampleRate, NO_VALUE, initializationData, drmInitData, selectionFlags, language);
}
- public static Format createAudioSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, int maxInputSize, int channelCount, int sampleRate,
- @C.PcmEncoding int pcmEncoding, List initializationData, DrmInitData drmInitData,
- @C.SelectionFlags int selectionFlags, String language) {
+ public static Format createAudioSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int maxInputSize,
+ int channelCount,
+ int sampleRate,
+ @C.PcmEncoding int pcmEncoding,
+ List initializationData,
+ @Nullable DrmInitData drmInitData,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language) {
return createAudioSampleFormat(id, sampleMimeType, codecs, bitrate, maxInputSize, channelCount,
sampleRate, pcmEncoding, NO_VALUE, NO_VALUE, initializationData, drmInitData,
selectionFlags, language, null);
}
- public static Format createAudioSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, int maxInputSize, int channelCount, int sampleRate,
- @C.PcmEncoding int pcmEncoding, int encoderDelay, int encoderPadding,
- List initializationData, DrmInitData drmInitData,
- @C.SelectionFlags int selectionFlags, String language, Metadata metadata) {
+ public static Format createAudioSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int maxInputSize,
+ int channelCount,
+ int sampleRate,
+ @C.PcmEncoding int pcmEncoding,
+ int encoderDelay,
+ int encoderPadding,
+ List initializationData,
+ @Nullable DrmInitData drmInitData,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language,
+ @Nullable Metadata metadata) {
return new Format(id, null, sampleMimeType, codecs, bitrate, maxInputSize, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, channelCount, sampleRate, pcmEncoding,
encoderDelay, encoderPadding, selectionFlags, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
@@ -270,50 +320,87 @@ public final class Format implements Parcelable {
// Text.
- public static Format createTextContainerFormat(String id, String containerMimeType,
- String sampleMimeType, String codecs, int bitrate, @C.SelectionFlags int selectionFlags,
- String language) {
+ public static Format createTextContainerFormat(
+ @Nullable String id,
+ @Nullable String containerMimeType,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language) {
return createTextContainerFormat(id, containerMimeType, sampleMimeType, codecs, bitrate,
selectionFlags, language, NO_VALUE);
}
- public static Format createTextContainerFormat(String id, String containerMimeType,
- String sampleMimeType, String codecs, int bitrate, @C.SelectionFlags int selectionFlags,
- String language, int accessibilityChannel) {
+ public static Format createTextContainerFormat(
+ @Nullable String id,
+ @Nullable String containerMimeType,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language,
+ int accessibilityChannel) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, selectionFlags, language, accessibilityChannel,
OFFSET_SAMPLE_RELATIVE, null, null, null);
}
- public static Format createTextSampleFormat(String id, String sampleMimeType,
- @C.SelectionFlags int selectionFlags, String language) {
+ public static Format createTextSampleFormat(
+ @Nullable String id,
+ String sampleMimeType,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language) {
return createTextSampleFormat(id, sampleMimeType, selectionFlags, language, null);
}
- public static Format createTextSampleFormat(String id, String sampleMimeType,
- @C.SelectionFlags int selectionFlags, String language, DrmInitData drmInitData) {
+ public static Format createTextSampleFormat(
+ @Nullable String id,
+ String sampleMimeType,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language,
+ @Nullable DrmInitData drmInitData) {
return createTextSampleFormat(id, sampleMimeType, null, NO_VALUE, selectionFlags, language,
NO_VALUE, drmInitData, OFFSET_SAMPLE_RELATIVE, Collections.emptyList());
}
- public static Format createTextSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, @C.SelectionFlags int selectionFlags, String language, int accessibilityChannel,
- DrmInitData drmInitData) {
+ public static Format createTextSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language,
+ int accessibilityChannel,
+ @Nullable DrmInitData drmInitData) {
return createTextSampleFormat(id, sampleMimeType, codecs, bitrate, selectionFlags, language,
accessibilityChannel, drmInitData, OFFSET_SAMPLE_RELATIVE, Collections.emptyList());
}
- public static Format createTextSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, @C.SelectionFlags int selectionFlags, String language, DrmInitData drmInitData,
+ public static Format createTextSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language,
+ @Nullable DrmInitData drmInitData,
long subsampleOffsetUs) {
return createTextSampleFormat(id, sampleMimeType, codecs, bitrate, selectionFlags, language,
NO_VALUE, drmInitData, subsampleOffsetUs, Collections.emptyList());
}
- public static Format createTextSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, @C.SelectionFlags int selectionFlags, String language,
- int accessibilityChannel, DrmInitData drmInitData, long subsampleOffsetUs,
+ public static Format createTextSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language,
+ int accessibilityChannel,
+ @Nullable DrmInitData drmInitData,
+ long subsampleOffsetUs,
List initializationData) {
return new Format(id, null, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
@@ -323,46 +410,105 @@ public final class Format implements Parcelable {
// Image.
- public static Format createImageSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, List initializationData, String language, DrmInitData drmInitData) {
- return new Format(id, null, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, 0, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE, initializationData, drmInitData,
+ public static Format createImageSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @C.SelectionFlags int selectionFlags,
+ List initializationData,
+ @Nullable String language,
+ @Nullable DrmInitData drmInitData) {
+ return new Format(
+ id,
+ null,
+ sampleMimeType,
+ codecs,
+ bitrate,
+ NO_VALUE,
+ NO_VALUE,
+ NO_VALUE,
+ NO_VALUE,
+ NO_VALUE,
+ NO_VALUE,
+ null,
+ NO_VALUE,
+ null,
+ NO_VALUE,
+ NO_VALUE,
+ NO_VALUE,
+ NO_VALUE,
+ NO_VALUE,
+ selectionFlags,
+ language,
+ NO_VALUE,
+ OFFSET_SAMPLE_RELATIVE,
+ initializationData,
+ drmInitData,
null);
}
// Generic.
- public static Format createContainerFormat(String id, String containerMimeType,
- String sampleMimeType, String codecs, int bitrate, @C.SelectionFlags int selectionFlags,
- String language) {
+ public static Format createContainerFormat(
+ @Nullable String id,
+ @Nullable String containerMimeType,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, selectionFlags, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE, null, null,
null);
}
- public static Format createSampleFormat(String id, String sampleMimeType,
- long subsampleOffsetUs) {
+ public static Format createSampleFormat(
+ @Nullable String id, @Nullable String sampleMimeType, long subsampleOffsetUs) {
return new Format(id, null, sampleMimeType, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, 0, null, NO_VALUE, subsampleOffsetUs, null, null, null);
}
- public static Format createSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, DrmInitData drmInitData) {
+ public static Format createSampleFormat(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ @Nullable DrmInitData drmInitData) {
return new Format(id, null, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, 0, null, NO_VALUE, OFFSET_SAMPLE_RELATIVE, null, drmInitData, null);
}
- /* package */ Format(String id, String containerMimeType, String sampleMimeType, String codecs,
- int bitrate, int maxInputSize, int width, int height, float frameRate, int rotationDegrees,
- float pixelWidthHeightRatio, byte[] projectionData, @C.StereoMode int stereoMode,
- ColorInfo colorInfo, int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding,
- int encoderDelay, int encoderPadding, @C.SelectionFlags int selectionFlags, String language,
- int accessibilityChannel, long subsampleOffsetUs, List initializationData,
- DrmInitData drmInitData, Metadata metadata) {
+ /* package */ Format(
+ @Nullable String id,
+ @Nullable String containerMimeType,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int maxInputSize,
+ int width,
+ int height,
+ float frameRate,
+ int rotationDegrees,
+ float pixelWidthHeightRatio,
+ @Nullable byte[] projectionData,
+ @C.StereoMode int stereoMode,
+ @Nullable ColorInfo colorInfo,
+ int channelCount,
+ int sampleRate,
+ @C.PcmEncoding int pcmEncoding,
+ int encoderDelay,
+ int encoderPadding,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language,
+ int accessibilityChannel,
+ long subsampleOffsetUs,
+ @Nullable List initializationData,
+ @Nullable DrmInitData drmInitData,
+ @Nullable Metadata metadata) {
this.id = id;
this.containerMimeType = containerMimeType;
this.sampleMimeType = sampleMimeType;
@@ -372,16 +518,17 @@ public final class Format implements Parcelable {
this.width = width;
this.height = height;
this.frameRate = frameRate;
- this.rotationDegrees = rotationDegrees;
- this.pixelWidthHeightRatio = pixelWidthHeightRatio;
+ this.rotationDegrees = rotationDegrees == Format.NO_VALUE ? 0 : rotationDegrees;
+ this.pixelWidthHeightRatio =
+ pixelWidthHeightRatio == Format.NO_VALUE ? 1 : pixelWidthHeightRatio;
this.projectionData = projectionData;
this.stereoMode = stereoMode;
this.colorInfo = colorInfo;
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.pcmEncoding = pcmEncoding;
- this.encoderDelay = encoderDelay;
- this.encoderPadding = encoderPadding;
+ this.encoderDelay = encoderDelay == Format.NO_VALUE ? 0 : encoderDelay;
+ this.encoderPadding = encoderPadding == Format.NO_VALUE ? 0 : encoderPadding;
this.selectionFlags = selectionFlags;
this.language = language;
this.accessibilityChannel = accessibilityChannel;
@@ -405,7 +552,7 @@ public final class Format implements Parcelable {
frameRate = in.readFloat();
rotationDegrees = in.readInt();
pixelWidthHeightRatio = in.readFloat();
- boolean hasProjectionData = in.readInt() != 0;
+ boolean hasProjectionData = Util.readBoolean(in);
projectionData = hasProjectionData ? in.createByteArray() : null;
stereoMode = in.readInt();
colorInfo = in.readParcelable(ColorInfo.class.getClassLoader());
@@ -443,8 +590,15 @@ public final class Format implements Parcelable {
drmInitData, metadata);
}
- public Format copyWithContainerInfo(String id, String codecs, int bitrate, int width, int height,
- @C.SelectionFlags int selectionFlags, String language) {
+ public Format copyWithContainerInfo(
+ @Nullable String id,
+ @Nullable String sampleMimeType,
+ @Nullable String codecs,
+ int bitrate,
+ int width,
+ int height,
+ @C.SelectionFlags int selectionFlags,
+ @Nullable String language) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode,
colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
@@ -464,8 +618,8 @@ public final class Format implements Parcelable {
float frameRate = this.frameRate == NO_VALUE ? manifestFormat.frameRate : this.frameRate;
@C.SelectionFlags int selectionFlags = this.selectionFlags | manifestFormat.selectionFlags;
String language = this.language == null ? manifestFormat.language : this.language;
- DrmInitData drmInitData = manifestFormat.drmInitData != null ? manifestFormat.drmInitData
- : this.drmInitData;
+ DrmInitData drmInitData =
+ DrmInitData.createSessionCreationData(manifestFormat.drmInitData, this.drmInitData);
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode,
colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
@@ -481,7 +635,7 @@ public final class Format implements Parcelable {
drmInitData, metadata);
}
- public Format copyWithDrmInitData(DrmInitData drmInitData) {
+ public Format copyWithDrmInitData(@Nullable DrmInitData drmInitData) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode,
colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
@@ -489,7 +643,7 @@ public final class Format implements Parcelable {
drmInitData, metadata);
}
- public Format copyWithMetadata(Metadata metadata) {
+ public Format copyWithMetadata(@Nullable Metadata metadata) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode,
colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
@@ -513,31 +667,6 @@ public final class Format implements Parcelable {
return width == NO_VALUE || height == NO_VALUE ? NO_VALUE : (width * height);
}
- /**
- * Returns a {@link MediaFormat} representation of this format.
- */
- @SuppressLint("InlinedApi")
- @TargetApi(16)
- public final MediaFormat getFrameworkMediaFormatV16() {
- MediaFormat format = new MediaFormat();
- format.setString(MediaFormat.KEY_MIME, sampleMimeType);
- maybeSetStringV16(format, MediaFormat.KEY_LANGUAGE, language);
- maybeSetIntegerV16(format, MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
- maybeSetIntegerV16(format, MediaFormat.KEY_WIDTH, width);
- maybeSetIntegerV16(format, MediaFormat.KEY_HEIGHT, height);
- maybeSetFloatV16(format, MediaFormat.KEY_FRAME_RATE, frameRate);
- maybeSetIntegerV16(format, "rotation-degrees", rotationDegrees);
- maybeSetIntegerV16(format, MediaFormat.KEY_CHANNEL_COUNT, channelCount);
- maybeSetIntegerV16(format, MediaFormat.KEY_SAMPLE_RATE, sampleRate);
- maybeSetIntegerV16(format, "encoder-delay", encoderDelay);
- maybeSetIntegerV16(format, "encoder-padding", encoderPadding);
- for (int i = 0; i < initializationData.size(); i++) {
- format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
- }
- maybeSetColorInfoV24(format, colorInfo);
- return format;
- }
-
@Override
public String toString() {
return "Format(" + id + ", " + containerMimeType + ", " + sampleMimeType + ", " + bitrate + ", "
@@ -568,7 +697,7 @@ public final class Format implements Parcelable {
}
@Override
- public boolean equals(Object obj) {
+ public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
@@ -576,24 +705,44 @@ public final class Format implements Parcelable {
return false;
}
Format other = (Format) obj;
- if (bitrate != other.bitrate || maxInputSize != other.maxInputSize
- || width != other.width || height != other.height || frameRate != other.frameRate
- || rotationDegrees != other.rotationDegrees
- || pixelWidthHeightRatio != other.pixelWidthHeightRatio || stereoMode != other.stereoMode
- || channelCount != other.channelCount || sampleRate != other.sampleRate
- || pcmEncoding != other.pcmEncoding || encoderDelay != other.encoderDelay
- || encoderPadding != other.encoderPadding || subsampleOffsetUs != other.subsampleOffsetUs
- || selectionFlags != other.selectionFlags || !Util.areEqual(id, other.id)
- || !Util.areEqual(language, other.language)
- || accessibilityChannel != other.accessibilityChannel
- || !Util.areEqual(containerMimeType, other.containerMimeType)
- || !Util.areEqual(sampleMimeType, other.sampleMimeType)
- || !Util.areEqual(codecs, other.codecs)
- || !Util.areEqual(drmInitData, other.drmInitData)
- || !Util.areEqual(metadata, other.metadata)
- || !Util.areEqual(colorInfo, other.colorInfo)
- || !Arrays.equals(projectionData, other.projectionData)
- || initializationData.size() != other.initializationData.size()) {
+ return bitrate == other.bitrate
+ && maxInputSize == other.maxInputSize
+ && width == other.width
+ && height == other.height
+ && frameRate == other.frameRate
+ && rotationDegrees == other.rotationDegrees
+ && pixelWidthHeightRatio == other.pixelWidthHeightRatio
+ && stereoMode == other.stereoMode
+ && channelCount == other.channelCount
+ && sampleRate == other.sampleRate
+ && pcmEncoding == other.pcmEncoding
+ && encoderDelay == other.encoderDelay
+ && encoderPadding == other.encoderPadding
+ && subsampleOffsetUs == other.subsampleOffsetUs
+ && selectionFlags == other.selectionFlags
+ && Util.areEqual(id, other.id)
+ && Util.areEqual(language, other.language)
+ && accessibilityChannel == other.accessibilityChannel
+ && Util.areEqual(containerMimeType, other.containerMimeType)
+ && Util.areEqual(sampleMimeType, other.sampleMimeType)
+ && Util.areEqual(codecs, other.codecs)
+ && Util.areEqual(drmInitData, other.drmInitData)
+ && Util.areEqual(metadata, other.metadata)
+ && Util.areEqual(colorInfo, other.colorInfo)
+ && Arrays.equals(projectionData, other.projectionData)
+ && initializationDataEquals(other);
+ }
+
+ /**
+ * Returns whether the {@link #initializationData}s belonging to this format and {@code other} are
+ * equal.
+ *
+ * @param other The other format whose {@link #initializationData} is being compared.
+ * @return Whether the {@link #initializationData}s belonging to this format and {@code other} are
+ * equal.
+ */
+ public boolean initializationDataEquals(Format other) {
+ if (initializationData.size() != other.initializationData.size()) {
return false;
}
for (int i = 0; i < initializationData.size(); i++) {
@@ -604,45 +753,6 @@ public final class Format implements Parcelable {
return true;
}
- @TargetApi(24)
- private static void maybeSetColorInfoV24(MediaFormat format, ColorInfo colorInfo) {
- if (colorInfo == null) {
- return;
- }
- maybeSetIntegerV16(format, MediaFormat.KEY_COLOR_TRANSFER, colorInfo.colorTransfer);
- maybeSetIntegerV16(format, MediaFormat.KEY_COLOR_STANDARD, colorInfo.colorSpace);
- maybeSetIntegerV16(format, MediaFormat.KEY_COLOR_RANGE, colorInfo.colorRange);
- maybeSetByteBufferV16(format, MediaFormat.KEY_HDR_STATIC_INFO, colorInfo.hdrStaticInfo);
- }
-
- @TargetApi(16)
- private static void maybeSetStringV16(MediaFormat format, String key, String value) {
- if (value != null) {
- format.setString(key, value);
- }
- }
-
- @TargetApi(16)
- private static void maybeSetIntegerV16(MediaFormat format, String key, int value) {
- if (value != NO_VALUE) {
- format.setInteger(key, value);
- }
- }
-
- @TargetApi(16)
- private static void maybeSetFloatV16(MediaFormat format, String key, float value) {
- if (value != NO_VALUE) {
- format.setFloat(key, value);
- }
- }
-
- @TargetApi(16)
- private static void maybeSetByteBufferV16(MediaFormat format, String key, byte[] value) {
- if (value != null) {
- format.setByteBuffer(key, ByteBuffer.wrap(value));
- }
- }
-
// Utility methods
/**
@@ -695,7 +805,7 @@ public final class Format implements Parcelable {
dest.writeFloat(frameRate);
dest.writeInt(rotationDegrees);
dest.writeFloat(pixelWidthHeightRatio);
- dest.writeInt(projectionData != null ? 1 : 0);
+ Util.writeBoolean(dest, projectionData != null);
if (projectionData != null) {
dest.writeByteArray(projectionData);
}
@@ -732,5 +842,4 @@ public final class Format implements Parcelable {
}
};
-
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/LoadControl.java b/library/core/src/main/java/com/google/android/exoplayer2/LoadControl.java
index c092480222..80be0b9e71 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/LoadControl.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/LoadControl.java
@@ -56,23 +56,58 @@ public interface LoadControl {
Allocator getAllocator();
/**
- * Called by the player to determine whether sufficient media is buffered for playback to be
- * started or resumed.
+ * Returns the duration of media to retain in the buffer prior to the current playback position,
+ * for fast backward seeking.
+ *
+ * Note: If {@link #retainBackBufferFromKeyframe()} is false then seeking in the back-buffer will
+ * only be fast if the back-buffer contains a keyframe prior to the seek position.
+ *
+ * Note: Implementations should return a single value. Dynamic changes to the back-buffer are not
+ * currently supported.
*
- * @param bufferedDurationUs The duration of media that's currently buffered.
- * @param rebuffering Whether the player is rebuffering. A rebuffer is defined to be caused by
- * buffer depletion rather than a user action. Hence this parameter is false during initial
- * buffering and when buffering as a result of a seek operation.
- * @return Whether playback should be allowed to start or resume.
+ * @return The duration of media to retain in the buffer prior to the current playback position,
+ * in microseconds.
*/
- boolean shouldStartPlayback(long bufferedDurationUs, boolean rebuffering);
+ long getBackBufferDurationUs();
+
+ /**
+ * Returns whether media should be retained from the keyframe before the current playback position
+ * minus {@link #getBackBufferDurationUs()}, rather than any sample before or at that position.
+ *
+ * Warning: Returning true will cause the back-buffer size to depend on the spacing of keyframes
+ * in the media being played. Returning true is not recommended unless you control the media and
+ * are comfortable with the back-buffer size exceeding {@link #getBackBufferDurationUs()} by as
+ * much as the maximum duration between adjacent keyframes in the media.
+ *
+ * Note: Implementations should return a single value. Dynamic changes to the back-buffer are not
+ * currently supported.
+ *
+ * @return Whether media should be retained from the keyframe before the current playback position
+ * minus {@link #getBackBufferDurationUs()}, rather than any sample before or at that position.
+ */
+ boolean retainBackBufferFromKeyframe();
/**
* Called by the player to determine whether it should continue to load the source.
*
* @param bufferedDurationUs The duration of media that's currently buffered.
+ * @param playbackSpeed The current playback speed.
* @return Whether the loading should continue.
*/
- boolean shouldContinueLoading(long bufferedDurationUs);
+ boolean shouldContinueLoading(long bufferedDurationUs, float playbackSpeed);
+ /**
+ * Called repeatedly by the player when it's loading the source, has yet to start playback, and
+ * has the minimum amount of data necessary for playback to be started. The value returned
+ * determines whether playback is actually started. The load control may opt to return {@code
+ * false} until some condition has been met (e.g. a certain amount of media is buffered).
+ *
+ * @param bufferedDurationUs The duration of media that's currently buffered.
+ * @param playbackSpeed The current playback speed.
+ * @param rebuffering Whether the player is rebuffering. A rebuffer is defined to be caused by
+ * buffer depletion rather than a user action. Hence this parameter is false during initial
+ * buffering and when buffering as a result of a seek operation.
+ * @return Whether playback should be allowed to start or resume.
+ */
+ boolean shouldStartPlayback(long bufferedDurationUs, float playbackSpeed, boolean rebuffering);
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodHolder.java b/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodHolder.java
new file mode 100644
index 0000000000..2f71d0d547
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodHolder.java
@@ -0,0 +1,286 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import android.util.Log;
+import com.google.android.exoplayer2.source.ClippingMediaPeriod;
+import com.google.android.exoplayer2.source.EmptySampleStream;
+import com.google.android.exoplayer2.source.MediaPeriod;
+import com.google.android.exoplayer2.source.MediaSource;
+import com.google.android.exoplayer2.source.SampleStream;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.trackselection.TrackSelection;
+import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import com.google.android.exoplayer2.trackselection.TrackSelector;
+import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
+import com.google.android.exoplayer2.upstream.Allocator;
+import com.google.android.exoplayer2.util.Assertions;
+
+/** Holds a {@link MediaPeriod} with information required to play it as part of a timeline. */
+/* package */ final class MediaPeriodHolder {
+
+ private static final String TAG = "MediaPeriodHolder";
+
+ public final MediaPeriod mediaPeriod;
+ public final Object uid;
+ public final SampleStream[] sampleStreams;
+ public final boolean[] mayRetainStreamFlags;
+
+ public long rendererPositionOffsetUs;
+ public boolean prepared;
+ public boolean hasEnabledTracks;
+ public MediaPeriodInfo info;
+ public MediaPeriodHolder next;
+ public TrackGroupArray trackGroups;
+ public TrackSelectorResult trackSelectorResult;
+
+ private final RendererCapabilities[] rendererCapabilities;
+ private final TrackSelector trackSelector;
+ private final MediaSource mediaSource;
+
+ private TrackSelectorResult periodTrackSelectorResult;
+
+ /**
+ * Creates a new holder with information required to play it as part of a timeline.
+ *
+ * @param rendererCapabilities The renderer capabilities.
+ * @param rendererPositionOffsetUs The time offset of the start of the media period to provide to
+ * renderers.
+ * @param trackSelector The track selector.
+ * @param allocator The allocator.
+ * @param mediaSource The media source that produced the media period.
+ * @param uid The unique identifier for the containing timeline period.
+ * @param info Information used to identify this media period in its timeline period.
+ */
+ public MediaPeriodHolder(
+ RendererCapabilities[] rendererCapabilities,
+ long rendererPositionOffsetUs,
+ TrackSelector trackSelector,
+ Allocator allocator,
+ MediaSource mediaSource,
+ Object uid,
+ MediaPeriodInfo info) {
+ this.rendererCapabilities = rendererCapabilities;
+ this.rendererPositionOffsetUs = rendererPositionOffsetUs - info.startPositionUs;
+ this.trackSelector = trackSelector;
+ this.mediaSource = mediaSource;
+ this.uid = Assertions.checkNotNull(uid);
+ this.info = info;
+ sampleStreams = new SampleStream[rendererCapabilities.length];
+ mayRetainStreamFlags = new boolean[rendererCapabilities.length];
+ MediaPeriod mediaPeriod = mediaSource.createPeriod(info.id, allocator);
+ if (info.endPositionUs != C.TIME_END_OF_SOURCE) {
+ mediaPeriod =
+ new ClippingMediaPeriod(
+ mediaPeriod,
+ /* enableInitialDiscontinuity= */ true,
+ /* startUs= */ 0,
+ info.endPositionUs);
+ }
+ this.mediaPeriod = mediaPeriod;
+ }
+
+ public long toRendererTime(long periodTimeUs) {
+ return periodTimeUs + getRendererOffset();
+ }
+
+ public long toPeriodTime(long rendererTimeUs) {
+ return rendererTimeUs - getRendererOffset();
+ }
+
+ public long getRendererOffset() {
+ return rendererPositionOffsetUs;
+ }
+
+ public boolean isFullyBuffered() {
+ return prepared
+ && (!hasEnabledTracks || mediaPeriod.getBufferedPositionUs() == C.TIME_END_OF_SOURCE);
+ }
+
+ public long getDurationUs() {
+ return info.durationUs;
+ }
+
+ /**
+ * Returns the buffered position in microseconds. If the period is buffered to the end then
+ * {@link C#TIME_END_OF_SOURCE} is returned unless {@code convertEosToDuration} is true, in which
+ * case the period duration is returned.
+ *
+ * @param convertEosToDuration Whether to return the period duration rather than
+ * {@link C#TIME_END_OF_SOURCE} if the period is fully buffered.
+ * @return The buffered position in microseconds.
+ */
+ public long getBufferedPositionUs(boolean convertEosToDuration) {
+ if (!prepared) {
+ return info.startPositionUs;
+ }
+ long bufferedPositionUs = mediaPeriod.getBufferedPositionUs();
+ return bufferedPositionUs == C.TIME_END_OF_SOURCE && convertEosToDuration
+ ? info.durationUs
+ : bufferedPositionUs;
+ }
+
+ public long getNextLoadPositionUs() {
+ return !prepared ? 0 : mediaPeriod.getNextLoadPositionUs();
+ }
+
+ public void handlePrepared(float playbackSpeed) throws ExoPlaybackException {
+ prepared = true;
+ trackGroups = mediaPeriod.getTrackGroups();
+ selectTracks(playbackSpeed);
+ long newStartPositionUs = applyTrackSelection(info.startPositionUs, false);
+ rendererPositionOffsetUs += info.startPositionUs - newStartPositionUs;
+ info = info.copyWithStartPositionUs(newStartPositionUs);
+ }
+
+ public void reevaluateBuffer(long rendererPositionUs) {
+ if (prepared) {
+ mediaPeriod.reevaluateBuffer(toPeriodTime(rendererPositionUs));
+ }
+ }
+
+ public void continueLoading(long rendererPositionUs) {
+ long loadingPeriodPositionUs = toPeriodTime(rendererPositionUs);
+ mediaPeriod.continueLoading(loadingPeriodPositionUs);
+ }
+
+ public boolean selectTracks(float playbackSpeed) throws ExoPlaybackException {
+ TrackSelectorResult selectorResult =
+ trackSelector.selectTracks(rendererCapabilities, trackGroups);
+ if (selectorResult.isEquivalent(periodTrackSelectorResult)) {
+ return false;
+ }
+ trackSelectorResult = selectorResult;
+ for (TrackSelection trackSelection : trackSelectorResult.selections.getAll()) {
+ if (trackSelection != null) {
+ trackSelection.onPlaybackSpeed(playbackSpeed);
+ }
+ }
+ return true;
+ }
+
+ public long applyTrackSelection(long positionUs, boolean forceRecreateStreams) {
+ return applyTrackSelection(
+ positionUs, forceRecreateStreams, new boolean[rendererCapabilities.length]);
+ }
+
+ public long applyTrackSelection(
+ long positionUs, boolean forceRecreateStreams, boolean[] streamResetFlags) {
+ for (int i = 0; i < trackSelectorResult.length; i++) {
+ mayRetainStreamFlags[i] =
+ !forceRecreateStreams && trackSelectorResult.isEquivalent(periodTrackSelectorResult, i);
+ }
+
+ // Undo the effect of previous call to associate no-sample renderers with empty tracks
+ // so the mediaPeriod receives back whatever it sent us before.
+ disassociateNoSampleRenderersWithEmptySampleStream(sampleStreams);
+ updatePeriodTrackSelectorResult(trackSelectorResult);
+ // Disable streams on the period and get new streams for updated/newly-enabled tracks.
+ TrackSelectionArray trackSelections = trackSelectorResult.selections;
+ positionUs =
+ mediaPeriod.selectTracks(
+ trackSelections.getAll(),
+ mayRetainStreamFlags,
+ sampleStreams,
+ streamResetFlags,
+ positionUs);
+ associateNoSampleRenderersWithEmptySampleStream(sampleStreams);
+
+ // Update whether we have enabled tracks and sanity check the expected streams are non-null.
+ hasEnabledTracks = false;
+ for (int i = 0; i < sampleStreams.length; i++) {
+ if (sampleStreams[i] != null) {
+ Assertions.checkState(trackSelectorResult.isRendererEnabled(i));
+ // hasEnabledTracks should be true only when non-empty streams exists.
+ if (rendererCapabilities[i].getTrackType() != C.TRACK_TYPE_NONE) {
+ hasEnabledTracks = true;
+ }
+ } else {
+ Assertions.checkState(trackSelections.get(i) == null);
+ }
+ }
+ return positionUs;
+ }
+
+ public void release() {
+ updatePeriodTrackSelectorResult(null);
+ try {
+ if (info.endPositionUs != C.TIME_END_OF_SOURCE) {
+ mediaSource.releasePeriod(((ClippingMediaPeriod) mediaPeriod).mediaPeriod);
+ } else {
+ mediaSource.releasePeriod(mediaPeriod);
+ }
+ } catch (RuntimeException e) {
+ // There's nothing we can do.
+ Log.e(TAG, "Period release failed.", e);
+ }
+ }
+
+ private void updatePeriodTrackSelectorResult(TrackSelectorResult trackSelectorResult) {
+ if (periodTrackSelectorResult != null) {
+ disableTrackSelectionsInResult(periodTrackSelectorResult);
+ }
+ periodTrackSelectorResult = trackSelectorResult;
+ if (periodTrackSelectorResult != null) {
+ enableTrackSelectionsInResult(periodTrackSelectorResult);
+ }
+ }
+
+ private void enableTrackSelectionsInResult(TrackSelectorResult trackSelectorResult) {
+ for (int i = 0; i < trackSelectorResult.length; i++) {
+ boolean rendererEnabled = trackSelectorResult.isRendererEnabled(i);
+ TrackSelection trackSelection = trackSelectorResult.selections.get(i);
+ if (rendererEnabled && trackSelection != null) {
+ trackSelection.enable();
+ }
+ }
+ }
+
+ private void disableTrackSelectionsInResult(TrackSelectorResult trackSelectorResult) {
+ for (int i = 0; i < trackSelectorResult.length; i++) {
+ boolean rendererEnabled = trackSelectorResult.isRendererEnabled(i);
+ TrackSelection trackSelection = trackSelectorResult.selections.get(i);
+ if (rendererEnabled && trackSelection != null) {
+ trackSelection.disable();
+ }
+ }
+ }
+
+ /**
+ * For each renderer of type {@link C#TRACK_TYPE_NONE}, we will remove the dummy {@link
+ * EmptySampleStream} that was associated with it.
+ */
+ private void disassociateNoSampleRenderersWithEmptySampleStream(SampleStream[] sampleStreams) {
+ for (int i = 0; i < rendererCapabilities.length; i++) {
+ if (rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE) {
+ sampleStreams[i] = null;
+ }
+ }
+ }
+
+ /**
+ * For each renderer of type {@link C#TRACK_TYPE_NONE} that was enabled, we will associate it with
+ * a dummy {@link EmptySampleStream}.
+ */
+ private void associateNoSampleRenderersWithEmptySampleStream(SampleStream[] sampleStreams) {
+ for (int i = 0; i < rendererCapabilities.length; i++) {
+ if (rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE
+ && trackSelectorResult.isRendererEnabled(i)) {
+ sampleStreams[i] = new EmptySampleStream();
+ }
+ }
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodInfo.java b/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodInfo.java
new file mode 100644
index 0000000000..fce1780b71
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodInfo.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import com.google.android.exoplayer2.source.MediaPeriod;
+import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
+
+/** Stores the information required to load and play a {@link MediaPeriod}. */
+/* package */ final class MediaPeriodInfo {
+
+ /** The media period's identifier. */
+ public final MediaPeriodId id;
+ /** The start position of the media to play within the media period, in microseconds. */
+ public final long startPositionUs;
+ /**
+ * The end position of the media to play within the media period, in microseconds, or {@link
+ * C#TIME_END_OF_SOURCE} if the end position is the end of the media period.
+ */
+ public final long endPositionUs;
+ /**
+ * If this is an ad, the position to play in the next content media period. {@link C#TIME_UNSET}
+ * otherwise.
+ */
+ public final long contentPositionUs;
+ /**
+ * The duration of the media period, like {@link #endPositionUs} but with {@link
+ * C#TIME_END_OF_SOURCE} resolved to the timeline period duration. May be {@link C#TIME_UNSET} if
+ * the end position is not known.
+ */
+ public final long durationUs;
+ /**
+ * Whether this is the last media period in its timeline period (e.g., a postroll ad, or a media
+ * period corresponding to a timeline period without ads).
+ */
+ public final boolean isLastInTimelinePeriod;
+ /**
+ * Whether this is the last media period in the entire timeline. If true, {@link
+ * #isLastInTimelinePeriod} will also be true.
+ */
+ public final boolean isFinal;
+
+ MediaPeriodInfo(
+ MediaPeriodId id,
+ long startPositionUs,
+ long endPositionUs,
+ long contentPositionUs,
+ long durationUs,
+ boolean isLastInTimelinePeriod,
+ boolean isFinal) {
+ this.id = id;
+ this.startPositionUs = startPositionUs;
+ this.endPositionUs = endPositionUs;
+ this.contentPositionUs = contentPositionUs;
+ this.durationUs = durationUs;
+ this.isLastInTimelinePeriod = isLastInTimelinePeriod;
+ this.isFinal = isFinal;
+ }
+
+ /**
+ * Returns a copy of this instance with the period identifier's period index set to the specified
+ * value.
+ */
+ public MediaPeriodInfo copyWithPeriodIndex(int periodIndex) {
+ return new MediaPeriodInfo(
+ id.copyWithPeriodIndex(periodIndex),
+ startPositionUs,
+ endPositionUs,
+ contentPositionUs,
+ durationUs,
+ isLastInTimelinePeriod,
+ isFinal);
+ }
+
+ /** Returns a copy of this instance with the start position set to the specified value. */
+ public MediaPeriodInfo copyWithStartPositionUs(long startPositionUs) {
+ return new MediaPeriodInfo(
+ id,
+ startPositionUs,
+ endPositionUs,
+ contentPositionUs,
+ durationUs,
+ isLastInTimelinePeriod,
+ isFinal);
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodInfoSequence.java b/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodInfoSequence.java
deleted file mode 100644
index 9e8c2645c1..0000000000
--- a/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodInfoSequence.java
+++ /dev/null
@@ -1,360 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.android.exoplayer2;
-
-import android.util.Pair;
-import com.google.android.exoplayer2.ExoPlayerImplInternal.PlaybackInfo;
-import com.google.android.exoplayer2.Player.RepeatMode;
-import com.google.android.exoplayer2.source.MediaPeriod;
-import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
-
-/**
- * Provides a sequence of {@link MediaPeriodInfo}s to the player, determining the order and
- * start/end positions for {@link MediaPeriod}s to load and play.
- */
-/* package */ final class MediaPeriodInfoSequence {
-
- // TODO: Consider merging this class with the MediaPeriodHolder queue in ExoPlayerImplInternal.
-
- /**
- * Stores the information required to load and play a {@link MediaPeriod}.
- */
- public static final class MediaPeriodInfo {
-
- /**
- * The media period's identifier.
- */
- public final MediaPeriodId id;
- /**
- * The start position of the media to play within the media period, in microseconds.
- */
- public final long startPositionUs;
- /**
- * The end position of the media to play within the media period, in microseconds, or
- * {@link C#TIME_END_OF_SOURCE} if the end position is the end of the media period.
- */
- public final long endPositionUs;
- /**
- * If this is an ad, the position to play in the next content media period. {@link C#TIME_UNSET}
- * otherwise.
- */
- public final long contentPositionUs;
- /**
- * The duration of the media to play within the media period, in microseconds, or
- * {@link C#TIME_UNSET} if not known.
- */
- public final long durationUs;
- /**
- * Whether this is the last media period in its timeline period (e.g., a postroll ad, or a media
- * period corresponding to a timeline period without ads).
- */
- public final boolean isLastInTimelinePeriod;
- /**
- * Whether this is the last media period in the entire timeline. If true,
- * {@link #isLastInTimelinePeriod} will also be true.
- */
- public final boolean isFinal;
-
- private MediaPeriodInfo(MediaPeriodId id, long startPositionUs, long endPositionUs,
- long contentPositionUs, long durationUs, boolean isLastInTimelinePeriod, boolean isFinal) {
- this.id = id;
- this.startPositionUs = startPositionUs;
- this.endPositionUs = endPositionUs;
- this.contentPositionUs = contentPositionUs;
- this.durationUs = durationUs;
- this.isLastInTimelinePeriod = isLastInTimelinePeriod;
- this.isFinal = isFinal;
- }
-
- /**
- * Returns a copy of this instance with the period identifier's period index set to the
- * specified value.
- */
- public MediaPeriodInfo copyWithPeriodIndex(int periodIndex) {
- return new MediaPeriodInfo(id.copyWithPeriodIndex(periodIndex), startPositionUs,
- endPositionUs, contentPositionUs, durationUs, isLastInTimelinePeriod, isFinal);
- }
-
- /**
- * Returns a copy of this instance with the start position set to the specified value.
- */
- public MediaPeriodInfo copyWithStartPositionUs(long startPositionUs) {
- return new MediaPeriodInfo(id, startPositionUs, endPositionUs, contentPositionUs, durationUs,
- isLastInTimelinePeriod, isFinal);
- }
-
- }
-
- private final Timeline.Period period;
- private final Timeline.Window window;
-
- private Timeline timeline;
- private @RepeatMode int repeatMode;
- private boolean shuffleModeEnabled;
-
- /**
- * Creates a new media period info sequence.
- */
- public MediaPeriodInfoSequence() {
- period = new Timeline.Period();
- window = new Timeline.Window();
- }
-
- /**
- * Sets the {@link Timeline}. Call {@link #getUpdatedMediaPeriodInfo} to update period information
- * taking into account the new timeline.
- */
- public void setTimeline(Timeline timeline) {
- this.timeline = timeline;
- }
-
- /**
- * Sets the {@link RepeatMode}. Call {@link #getUpdatedMediaPeriodInfo} to update period
- * information taking into account the new repeat mode.
- */
- public void setRepeatMode(@RepeatMode int repeatMode) {
- this.repeatMode = repeatMode;
- }
-
- /**
- * Sets whether shuffling is enabled. Call {@link #getUpdatedMediaPeriodInfo} to update period
- * information taking into account the shuffle mode.
- */
- public void setShuffleModeEnabled(boolean shuffleModeEnabled) {
- this.shuffleModeEnabled = shuffleModeEnabled;
- }
-
- /**
- * Returns the first {@link MediaPeriodInfo} to play, based on the specified playback position.
- */
- public MediaPeriodInfo getFirstMediaPeriodInfo(PlaybackInfo playbackInfo) {
- return getMediaPeriodInfo(playbackInfo.periodId, playbackInfo.contentPositionUs,
- playbackInfo.startPositionUs);
- }
-
- /**
- * Returns the {@link MediaPeriodInfo} following {@code currentMediaPeriodInfo}.
- *
- * @param currentMediaPeriodInfo The current media period info.
- * @param rendererOffsetUs The current renderer offset in microseconds.
- * @param rendererPositionUs The current renderer position in microseconds.
- * @return The following media period info, or {@code null} if it is not yet possible to get the
- * next media period info.
- */
- public MediaPeriodInfo getNextMediaPeriodInfo(MediaPeriodInfo currentMediaPeriodInfo,
- long rendererOffsetUs, long rendererPositionUs) {
- // TODO: This method is called repeatedly from ExoPlayerImplInternal.maybeUpdateLoadingPeriod
- // but if the timeline is not ready to provide the next period it can't return a non-null value
- // until the timeline is updated. Store whether the next timeline period is ready when the
- // timeline is updated, to avoid repeatedly checking the same timeline.
- if (currentMediaPeriodInfo.isLastInTimelinePeriod) {
- int nextPeriodIndex = timeline.getNextPeriodIndex(currentMediaPeriodInfo.id.periodIndex,
- period, window, repeatMode);
- if (nextPeriodIndex == C.INDEX_UNSET) {
- // We can't create a next period yet.
- return null;
- }
-
- long startPositionUs;
- int nextWindowIndex = timeline.getPeriod(nextPeriodIndex, period).windowIndex;
- if (timeline.getWindow(nextWindowIndex, window).firstPeriodIndex == nextPeriodIndex) {
- // We're starting to buffer a new window. When playback transitions to this window we'll
- // want it to be from its default start position. The expected delay until playback
- // transitions is equal the duration of media that's currently buffered (assuming no
- // interruptions). Hence we project the default start position forward by the duration of
- // the buffer, and start buffering from this point.
- long defaultPositionProjectionUs =
- rendererOffsetUs + currentMediaPeriodInfo.durationUs - rendererPositionUs;
- Pair defaultPosition = timeline.getPeriodPosition(window, period,
- nextWindowIndex, C.TIME_UNSET, Math.max(0, defaultPositionProjectionUs));
- if (defaultPosition == null) {
- return null;
- }
- nextPeriodIndex = defaultPosition.first;
- startPositionUs = defaultPosition.second;
- } else {
- startPositionUs = 0;
- }
- MediaPeriodId periodId = resolvePeriodPositionForAds(nextPeriodIndex, startPositionUs);
- return getMediaPeriodInfo(periodId, startPositionUs, startPositionUs);
- }
-
- MediaPeriodId currentPeriodId = currentMediaPeriodInfo.id;
- if (currentPeriodId.isAd()) {
- int currentAdGroupIndex = currentPeriodId.adGroupIndex;
- timeline.getPeriod(currentPeriodId.periodIndex, period);
- int adCountInCurrentAdGroup = period.getAdCountInAdGroup(currentAdGroupIndex);
- if (adCountInCurrentAdGroup == C.LENGTH_UNSET) {
- return null;
- }
- int nextAdIndexInAdGroup = currentPeriodId.adIndexInAdGroup + 1;
- if (nextAdIndexInAdGroup < adCountInCurrentAdGroup) {
- // Play the next ad in the ad group if it's available.
- return !period.isAdAvailable(currentAdGroupIndex, nextAdIndexInAdGroup) ? null
- : getMediaPeriodInfoForAd(currentPeriodId.periodIndex, currentAdGroupIndex,
- nextAdIndexInAdGroup, currentMediaPeriodInfo.contentPositionUs);
- } else {
- // Play content from the ad group position.
- int nextAdGroupIndex =
- period.getAdGroupIndexAfterPositionUs(currentMediaPeriodInfo.contentPositionUs);
- long endUs = nextAdGroupIndex == C.INDEX_UNSET ? C.TIME_END_OF_SOURCE
- : period.getAdGroupTimeUs(nextAdGroupIndex);
- return getMediaPeriodInfoForContent(currentPeriodId.periodIndex,
- currentMediaPeriodInfo.contentPositionUs, endUs);
- }
- } else if (currentMediaPeriodInfo.endPositionUs != C.TIME_END_OF_SOURCE) {
- // Play the next ad group if it's available.
- int nextAdGroupIndex =
- period.getAdGroupIndexForPositionUs(currentMediaPeriodInfo.endPositionUs);
- return !period.isAdAvailable(nextAdGroupIndex, 0) ? null
- : getMediaPeriodInfoForAd(currentPeriodId.periodIndex, nextAdGroupIndex, 0,
- currentMediaPeriodInfo.endPositionUs);
- } else {
- // Check if the postroll ad should be played.
- int adGroupCount = period.getAdGroupCount();
- if (adGroupCount == 0
- || period.getAdGroupTimeUs(adGroupCount - 1) != C.TIME_END_OF_SOURCE
- || period.hasPlayedAdGroup(adGroupCount - 1)
- || !period.isAdAvailable(adGroupCount - 1, 0)) {
- return null;
- }
- long contentDurationUs = period.getDurationUs();
- return getMediaPeriodInfoForAd(currentPeriodId.periodIndex, adGroupCount - 1, 0,
- contentDurationUs);
- }
- }
-
- /**
- * Resolves the specified timeline period and position to a {@link MediaPeriodId} that should be
- * played, returning an identifier for an ad group if one needs to be played before the specified
- * position, or an identifier for a content media period if not.
- */
- public MediaPeriodId resolvePeriodPositionForAds(int periodIndex, long positionUs) {
- timeline.getPeriod(periodIndex, period);
- int adGroupIndex = period.getAdGroupIndexForPositionUs(positionUs);
- if (adGroupIndex == C.INDEX_UNSET) {
- return new MediaPeriodId(periodIndex);
- } else {
- int adIndexInAdGroup = period.getPlayedAdCount(adGroupIndex);
- return new MediaPeriodId(periodIndex, adGroupIndex, adIndexInAdGroup);
- }
- }
-
- /**
- * Returns the {@code mediaPeriodInfo} updated to take into account the current timeline.
- */
- public MediaPeriodInfo getUpdatedMediaPeriodInfo(MediaPeriodInfo mediaPeriodInfo) {
- return getUpdatedMediaPeriodInfo(mediaPeriodInfo, mediaPeriodInfo.id);
- }
-
- /**
- * Returns the {@code mediaPeriodInfo} updated to take into account the current timeline,
- * resetting the identifier of the media period to the specified {@code newPeriodIndex}.
- */
- public MediaPeriodInfo getUpdatedMediaPeriodInfo(MediaPeriodInfo mediaPeriodInfo,
- int newPeriodIndex) {
- return getUpdatedMediaPeriodInfo(mediaPeriodInfo,
- mediaPeriodInfo.id.copyWithPeriodIndex(newPeriodIndex));
- }
-
- // Internal methods.
-
- private MediaPeriodInfo getUpdatedMediaPeriodInfo(MediaPeriodInfo info, MediaPeriodId newId) {
- long startPositionUs = info.startPositionUs;
- long endPositionUs = info.endPositionUs;
- boolean isLastInPeriod = isLastInPeriod(newId, endPositionUs);
- boolean isLastInTimeline = isLastInTimeline(newId, isLastInPeriod);
- timeline.getPeriod(newId.periodIndex, period);
- long durationUs = newId.isAd()
- ? period.getAdDurationUs(newId.adGroupIndex, newId.adIndexInAdGroup)
- : (endPositionUs == C.TIME_END_OF_SOURCE ? period.getDurationUs() : endPositionUs);
- return new MediaPeriodInfo(newId, startPositionUs, endPositionUs, info.contentPositionUs,
- durationUs, isLastInPeriod, isLastInTimeline);
- }
-
- private MediaPeriodInfo getMediaPeriodInfo(MediaPeriodId id, long contentPositionUs,
- long startPositionUs) {
- timeline.getPeriod(id.periodIndex, period);
- if (id.isAd()) {
- if (!period.isAdAvailable(id.adGroupIndex, id.adIndexInAdGroup)) {
- return null;
- }
- return getMediaPeriodInfoForAd(id.periodIndex, id.adGroupIndex, id.adIndexInAdGroup,
- contentPositionUs);
- } else {
- int nextAdGroupIndex = period.getAdGroupIndexAfterPositionUs(startPositionUs);
- long endUs = nextAdGroupIndex == C.INDEX_UNSET ? C.TIME_END_OF_SOURCE
- : period.getAdGroupTimeUs(nextAdGroupIndex);
- return getMediaPeriodInfoForContent(id.periodIndex, startPositionUs, endUs);
- }
- }
-
- private MediaPeriodInfo getMediaPeriodInfoForAd(int periodIndex, int adGroupIndex,
- int adIndexInAdGroup, long contentPositionUs) {
- MediaPeriodId id = new MediaPeriodId(periodIndex, adGroupIndex, adIndexInAdGroup);
- boolean isLastInPeriod = isLastInPeriod(id, C.TIME_END_OF_SOURCE);
- boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod);
- long durationUs = timeline.getPeriod(id.periodIndex, period)
- .getAdDurationUs(id.adGroupIndex, id.adIndexInAdGroup);
- long startPositionUs = adIndexInAdGroup == period.getPlayedAdCount(adGroupIndex)
- ? period.getAdResumePositionUs() : 0;
- return new MediaPeriodInfo(id, startPositionUs, C.TIME_END_OF_SOURCE, contentPositionUs,
- durationUs, isLastInPeriod, isLastInTimeline);
- }
-
- private MediaPeriodInfo getMediaPeriodInfoForContent(int periodIndex, long startPositionUs,
- long endUs) {
- MediaPeriodId id = new MediaPeriodId(periodIndex);
- boolean isLastInPeriod = isLastInPeriod(id, endUs);
- boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod);
- timeline.getPeriod(id.periodIndex, period);
- long durationUs = endUs == C.TIME_END_OF_SOURCE ? period.getDurationUs() : endUs;
- return new MediaPeriodInfo(id, startPositionUs, endUs, C.TIME_UNSET, durationUs, isLastInPeriod,
- isLastInTimeline);
- }
-
- private boolean isLastInPeriod(MediaPeriodId id, long endPositionUs) {
- int adGroupCount = timeline.getPeriod(id.periodIndex, period).getAdGroupCount();
- if (adGroupCount == 0) {
- return true;
- }
-
- int lastAdGroupIndex = adGroupCount - 1;
- boolean isAd = id.isAd();
- if (period.getAdGroupTimeUs(lastAdGroupIndex) != C.TIME_END_OF_SOURCE) {
- // There's no postroll ad.
- return !isAd && endPositionUs == C.TIME_END_OF_SOURCE;
- }
-
- int postrollAdCount = period.getAdCountInAdGroup(lastAdGroupIndex);
- if (postrollAdCount == C.LENGTH_UNSET) {
- // We won't know if this is the last ad until we know how many postroll ads there are.
- return false;
- }
-
- boolean isLastAd = isAd && id.adGroupIndex == lastAdGroupIndex
- && id.adIndexInAdGroup == postrollAdCount - 1;
- return isLastAd || (!isAd && period.getPlayedAdCount(lastAdGroupIndex) == postrollAdCount);
- }
-
- private boolean isLastInTimeline(MediaPeriodId id, boolean isLastMediaPeriodInPeriod) {
- int windowIndex = timeline.getPeriod(id.periodIndex, period).windowIndex;
- return !timeline.getWindow(windowIndex, window).isDynamic
- && timeline.isLastPeriod(id.periodIndex, period, window, repeatMode)
- && isLastMediaPeriodInPeriod;
- }
-
-}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodQueue.java b/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodQueue.java
new file mode 100644
index 0000000000..717f873622
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/MediaPeriodQueue.java
@@ -0,0 +1,750 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import android.support.annotation.Nullable;
+import android.util.Pair;
+import com.google.android.exoplayer2.Player.RepeatMode;
+import com.google.android.exoplayer2.source.MediaPeriod;
+import com.google.android.exoplayer2.source.MediaSource;
+import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
+import com.google.android.exoplayer2.trackselection.TrackSelector;
+import com.google.android.exoplayer2.upstream.Allocator;
+import com.google.android.exoplayer2.util.Assertions;
+
+/**
+ * Holds a queue of media periods, from the currently playing media period at the front to the
+ * loading media period at the end of the queue, with methods for controlling loading and updating
+ * the queue. Also has a reference to the media period currently being read.
+ */
+@SuppressWarnings("UngroupedOverloads")
+/* package */ final class MediaPeriodQueue {
+
+ /**
+ * Limits the maximum number of periods to buffer ahead of the current playing period. The
+ * buffering policy normally prevents buffering too far ahead, but the policy could allow too many
+ * small periods to be buffered if the period count were not limited.
+ */
+ private static final int MAXIMUM_BUFFER_AHEAD_PERIODS = 100;
+
+ private final Timeline.Period period;
+ private final Timeline.Window window;
+
+ private long nextWindowSequenceNumber;
+ private Timeline timeline;
+ private @RepeatMode int repeatMode;
+ private boolean shuffleModeEnabled;
+ private MediaPeriodHolder playing;
+ private MediaPeriodHolder reading;
+ private MediaPeriodHolder loading;
+ private int length;
+ private Object oldFrontPeriodUid;
+ private long oldFrontPeriodWindowSequenceNumber;
+
+ /** Creates a new media period queue. */
+ public MediaPeriodQueue() {
+ period = new Timeline.Period();
+ window = new Timeline.Window();
+ }
+
+ /**
+ * Sets the {@link Timeline}. Call {@link #updateQueuedPeriods(MediaPeriodId, long)} to update the
+ * queued media periods to take into account the new timeline.
+ */
+ public void setTimeline(Timeline timeline) {
+ this.timeline = timeline;
+ }
+
+ /**
+ * Sets the {@link RepeatMode} and returns whether the repeat mode change has been fully handled.
+ * If not, it is necessary to seek to the current playback position.
+ */
+ public boolean updateRepeatMode(@RepeatMode int repeatMode) {
+ this.repeatMode = repeatMode;
+ return updateForPlaybackModeChange();
+ }
+
+ /**
+ * Sets whether shuffling is enabled and returns whether the shuffle mode change has been fully
+ * handled. If not, it is necessary to seek to the current playback position.
+ */
+ public boolean updateShuffleModeEnabled(boolean shuffleModeEnabled) {
+ this.shuffleModeEnabled = shuffleModeEnabled;
+ return updateForPlaybackModeChange();
+ }
+
+ /** Returns whether {@code mediaPeriod} is the current loading media period. */
+ public boolean isLoading(MediaPeriod mediaPeriod) {
+ return loading != null && loading.mediaPeriod == mediaPeriod;
+ }
+
+ /**
+ * If there is a loading period, reevaluates its buffer.
+ *
+ * @param rendererPositionUs The current renderer position.
+ */
+ public void reevaluateBuffer(long rendererPositionUs) {
+ if (loading != null) {
+ loading.reevaluateBuffer(rendererPositionUs);
+ }
+ }
+
+ /** Returns whether a new loading media period should be enqueued, if available. */
+ public boolean shouldLoadNextMediaPeriod() {
+ return loading == null
+ || (!loading.info.isFinal
+ && loading.isFullyBuffered()
+ && loading.info.durationUs != C.TIME_UNSET
+ && length < MAXIMUM_BUFFER_AHEAD_PERIODS);
+ }
+
+ /**
+ * Returns the {@link MediaPeriodInfo} for the next media period to load.
+ *
+ * @param rendererPositionUs The current renderer position.
+ * @param playbackInfo The current playback information.
+ * @return The {@link MediaPeriodInfo} for the next media period to load, or {@code null} if not
+ * yet known.
+ */
+ public @Nullable MediaPeriodInfo getNextMediaPeriodInfo(
+ long rendererPositionUs, PlaybackInfo playbackInfo) {
+ return loading == null
+ ? getFirstMediaPeriodInfo(playbackInfo)
+ : getFollowingMediaPeriodInfo(loading, rendererPositionUs);
+ }
+
+ /**
+ * Enqueues a new media period based on the specified information as the new loading media period,
+ * and returns it.
+ *
+ * @param rendererCapabilities The renderer capabilities.
+ * @param trackSelector The track selector.
+ * @param allocator The allocator.
+ * @param mediaSource The media source that produced the media period.
+ * @param uid The unique identifier for the containing timeline period.
+ * @param info Information used to identify this media period in its timeline period.
+ */
+ public MediaPeriod enqueueNextMediaPeriod(
+ RendererCapabilities[] rendererCapabilities,
+ TrackSelector trackSelector,
+ Allocator allocator,
+ MediaSource mediaSource,
+ Object uid,
+ MediaPeriodInfo info) {
+ long rendererPositionOffsetUs =
+ loading == null
+ ? info.startPositionUs
+ : (loading.getRendererOffset() + loading.info.durationUs);
+ MediaPeriodHolder newPeriodHolder =
+ new MediaPeriodHolder(
+ rendererCapabilities,
+ rendererPositionOffsetUs,
+ trackSelector,
+ allocator,
+ mediaSource,
+ uid,
+ info);
+ if (loading != null) {
+ Assertions.checkState(hasPlayingPeriod());
+ loading.next = newPeriodHolder;
+ }
+ oldFrontPeriodUid = null;
+ loading = newPeriodHolder;
+ length++;
+ return newPeriodHolder.mediaPeriod;
+ }
+
+ /**
+ * Returns the loading period holder which is at the end of the queue, or null if the queue is
+ * empty.
+ */
+ public MediaPeriodHolder getLoadingPeriod() {
+ return loading;
+ }
+
+ /**
+ * Returns the playing period holder which is at the front of the queue, or null if the queue is
+ * empty or hasn't started playing.
+ */
+ public MediaPeriodHolder getPlayingPeriod() {
+ return playing;
+ }
+
+ /**
+ * Returns the reading period holder, or null if the queue is empty or the player hasn't started
+ * reading.
+ */
+ public MediaPeriodHolder getReadingPeriod() {
+ return reading;
+ }
+
+ /**
+ * Returns the period holder in the front of the queue which is the playing period holder when
+ * playing, or null if the queue is empty.
+ */
+ public MediaPeriodHolder getFrontPeriod() {
+ return hasPlayingPeriod() ? playing : loading;
+ }
+
+ /** Returns whether the reading and playing period holders are set. */
+ public boolean hasPlayingPeriod() {
+ return playing != null;
+ }
+
+ /**
+ * Continues reading from the next period holder in the queue.
+ *
+ * @return The updated reading period holder.
+ */
+ public MediaPeriodHolder advanceReadingPeriod() {
+ Assertions.checkState(reading != null && reading.next != null);
+ reading = reading.next;
+ return reading;
+ }
+
+ /**
+ * Dequeues the playing period holder from the front of the queue and advances the playing period
+ * holder to be the next item in the queue. If the playing period holder is unset, set it to the
+ * item in the front of the queue.
+ *
+ * @return The updated playing period holder, or null if the queue is or becomes empty.
+ */
+ public MediaPeriodHolder advancePlayingPeriod() {
+ if (playing != null) {
+ if (playing == reading) {
+ reading = playing.next;
+ }
+ playing.release();
+ playing = playing.next;
+ length--;
+ if (length == 0) {
+ loading = null;
+ }
+ } else {
+ playing = loading;
+ reading = loading;
+ }
+ return playing;
+ }
+
+ /**
+ * Removes all period holders after the given period holder. This process may also remove the
+ * currently reading period holder. If that is the case, the reading period holder is set to be
+ * the same as the playing period holder at the front of the queue.
+ *
+ * @param mediaPeriodHolder The media period holder that shall be the new end of the queue.
+ * @return Whether the reading period has been removed.
+ */
+ public boolean removeAfter(MediaPeriodHolder mediaPeriodHolder) {
+ Assertions.checkState(mediaPeriodHolder != null);
+ boolean removedReading = false;
+ loading = mediaPeriodHolder;
+ while (mediaPeriodHolder.next != null) {
+ mediaPeriodHolder = mediaPeriodHolder.next;
+ if (mediaPeriodHolder == reading) {
+ reading = playing;
+ removedReading = true;
+ }
+ mediaPeriodHolder.release();
+ length--;
+ }
+ loading.next = null;
+ return removedReading;
+ }
+
+ /**
+ * Clears the queue.
+ *
+ * @param keepFrontPeriodUid Whether the queue should keep the id of the media period in the front
+ * of queue (typically the playing one) for later reuse.
+ */
+ public void clear(boolean keepFrontPeriodUid) {
+ MediaPeriodHolder front = getFrontPeriod();
+ if (front != null) {
+ oldFrontPeriodUid = keepFrontPeriodUid ? front.uid : null;
+ oldFrontPeriodWindowSequenceNumber = front.info.id.windowSequenceNumber;
+ front.release();
+ removeAfter(front);
+ } else if (!keepFrontPeriodUid) {
+ oldFrontPeriodUid = null;
+ }
+ playing = null;
+ loading = null;
+ reading = null;
+ length = 0;
+ }
+
+ /**
+ * Updates media periods in the queue to take into account the latest timeline, and returns
+ * whether the timeline change has been fully handled. If not, it is necessary to seek to the
+ * current playback position. The method assumes that the first media period in the queue is still
+ * consistent with the new timeline.
+ *
+ * @param playingPeriodId The current playing media period identifier.
+ * @param rendererPositionUs The current renderer position in microseconds.
+ * @return Whether the timeline change has been handled completely.
+ */
+ public boolean updateQueuedPeriods(MediaPeriodId playingPeriodId, long rendererPositionUs) {
+ // TODO: Merge this into setTimeline so that the queue gets updated as soon as the new timeline
+ // is set, once all cases handled by ExoPlayerImplInternal.handleSourceInfoRefreshed can be
+ // handled here.
+ int periodIndex = playingPeriodId.periodIndex;
+ // The front period is either playing now, or is being loaded and will become the playing
+ // period.
+ MediaPeriodHolder previousPeriodHolder = null;
+ MediaPeriodHolder periodHolder = getFrontPeriod();
+ while (periodHolder != null) {
+ if (previousPeriodHolder == null) {
+ periodHolder.info = getUpdatedMediaPeriodInfo(periodHolder.info, periodIndex);
+ } else {
+ // Check this period holder still follows the previous one, based on the new timeline.
+ if (periodIndex == C.INDEX_UNSET
+ || !periodHolder.uid.equals(timeline.getPeriod(periodIndex, period, true).uid)) {
+ // The holder uid is inconsistent with the new timeline.
+ return !removeAfter(previousPeriodHolder);
+ }
+ MediaPeriodInfo periodInfo =
+ getFollowingMediaPeriodInfo(previousPeriodHolder, rendererPositionUs);
+ if (periodInfo == null) {
+ // We've loaded a next media period that is not in the new timeline.
+ return !removeAfter(previousPeriodHolder);
+ }
+ // Update the period index.
+ periodHolder.info = getUpdatedMediaPeriodInfo(periodHolder.info, periodIndex);
+ // Check the media period information matches the new timeline.
+ if (!canKeepMediaPeriodHolder(periodHolder, periodInfo)) {
+ return !removeAfter(previousPeriodHolder);
+ }
+ }
+
+ if (periodHolder.info.isLastInTimelinePeriod) {
+ // Move on to the next timeline period index, if there is one.
+ periodIndex =
+ timeline.getNextPeriodIndex(
+ periodIndex, period, window, repeatMode, shuffleModeEnabled);
+ }
+
+ previousPeriodHolder = periodHolder;
+ periodHolder = periodHolder.next;
+ }
+ return true;
+ }
+
+ /**
+ * Returns new media period info based on specified {@code mediaPeriodInfo} but taking into
+ * account the current timeline, and with the period index updated to {@code newPeriodIndex}.
+ *
+ * @param mediaPeriodInfo Media period info for a media period based on an old timeline.
+ * @param newPeriodIndex The new period index in the new timeline for the existing media period.
+ * @return The updated media period info for the current timeline.
+ */
+ public MediaPeriodInfo getUpdatedMediaPeriodInfo(
+ MediaPeriodInfo mediaPeriodInfo, int newPeriodIndex) {
+ return getUpdatedMediaPeriodInfo(
+ mediaPeriodInfo, mediaPeriodInfo.id.copyWithPeriodIndex(newPeriodIndex));
+ }
+
+ /**
+ * Resolves the specified timeline period and position to a {@link MediaPeriodId} that should be
+ * played, returning an identifier for an ad group if one needs to be played before the specified
+ * position, or an identifier for a content media period if not.
+ *
+ * @param periodIndex The index of the timeline period to play.
+ * @param positionUs The next content position in the period to play.
+ * @return The identifier for the first media period to play, taking into account unplayed ads.
+ */
+ public MediaPeriodId resolveMediaPeriodIdForAds(int periodIndex, long positionUs) {
+ long windowSequenceNumber = resolvePeriodIndexToWindowSequenceNumber(periodIndex);
+ return resolveMediaPeriodIdForAds(periodIndex, positionUs, windowSequenceNumber);
+ }
+
+ // Internal methods.
+
+ /**
+ * Resolves the specified timeline period and position to a {@link MediaPeriodId} that should be
+ * played, returning an identifier for an ad group if one needs to be played before the specified
+ * position, or an identifier for a content media period if not.
+ *
+ * @param periodIndex The index of the timeline period to play.
+ * @param positionUs The next content position in the period to play.
+ * @param windowSequenceNumber The sequence number of the window in the buffered sequence of
+ * windows this period is part of.
+ * @return The identifier for the first media period to play, taking into account unplayed ads.
+ */
+ private MediaPeriodId resolveMediaPeriodIdForAds(
+ int periodIndex, long positionUs, long windowSequenceNumber) {
+ timeline.getPeriod(periodIndex, period);
+ int adGroupIndex = period.getAdGroupIndexForPositionUs(positionUs);
+ if (adGroupIndex == C.INDEX_UNSET) {
+ return new MediaPeriodId(periodIndex, windowSequenceNumber);
+ } else {
+ int adIndexInAdGroup = period.getFirstAdIndexToPlay(adGroupIndex);
+ return new MediaPeriodId(periodIndex, adGroupIndex, adIndexInAdGroup, windowSequenceNumber);
+ }
+ }
+
+ /**
+ * Resolves the specified period index to a corresponding window sequence number. Either by
+ * reusing the window sequence number of an existing matching media period or by creating a new
+ * window sequence number.
+ *
+ * @param periodIndex The index of the timeline period.
+ * @return A window sequence number for a media period created for this timeline period.
+ */
+ private long resolvePeriodIndexToWindowSequenceNumber(int periodIndex) {
+ Object periodUid = timeline.getPeriod(periodIndex, period, /* setIds= */ true).uid;
+ int windowIndex = period.windowIndex;
+ if (oldFrontPeriodUid != null) {
+ int oldFrontPeriodIndex = timeline.getIndexOfPeriod(oldFrontPeriodUid);
+ if (oldFrontPeriodIndex != C.INDEX_UNSET) {
+ int oldFrontWindowIndex = timeline.getPeriod(oldFrontPeriodIndex, period).windowIndex;
+ if (oldFrontWindowIndex == windowIndex) {
+ // Try to match old front uid after the queue has been cleared.
+ return oldFrontPeriodWindowSequenceNumber;
+ }
+ }
+ }
+ MediaPeriodHolder mediaPeriodHolder = getFrontPeriod();
+ while (mediaPeriodHolder != null) {
+ if (mediaPeriodHolder.uid.equals(periodUid)) {
+ // Reuse window sequence number of first exact period match.
+ return mediaPeriodHolder.info.id.windowSequenceNumber;
+ }
+ mediaPeriodHolder = mediaPeriodHolder.next;
+ }
+ mediaPeriodHolder = getFrontPeriod();
+ while (mediaPeriodHolder != null) {
+ int indexOfHolderInTimeline = timeline.getIndexOfPeriod(mediaPeriodHolder.uid);
+ if (indexOfHolderInTimeline != C.INDEX_UNSET) {
+ int holderWindowIndex = timeline.getPeriod(indexOfHolderInTimeline, period).windowIndex;
+ if (holderWindowIndex == windowIndex) {
+ // As an alternative, try to match other periods of the same window.
+ return mediaPeriodHolder.info.id.windowSequenceNumber;
+ }
+ }
+ mediaPeriodHolder = mediaPeriodHolder.next;
+ }
+ // If no match is found, create new sequence number.
+ return nextWindowSequenceNumber++;
+ }
+
+ /**
+ * Returns whether {@code periodHolder} can be kept for playing the media period described by
+ * {@code info}.
+ */
+ private boolean canKeepMediaPeriodHolder(MediaPeriodHolder periodHolder, MediaPeriodInfo info) {
+ MediaPeriodInfo periodHolderInfo = periodHolder.info;
+ return periodHolderInfo.startPositionUs == info.startPositionUs
+ && periodHolderInfo.endPositionUs == info.endPositionUs
+ && periodHolderInfo.id.equals(info.id);
+ }
+
+ /**
+ * Updates the queue for any playback mode change, and returns whether the change was fully
+ * handled. If not, it is necessary to seek to the current playback position.
+ */
+ private boolean updateForPlaybackModeChange() {
+ // Find the last existing period holder that matches the new period order.
+ MediaPeriodHolder lastValidPeriodHolder = getFrontPeriod();
+ if (lastValidPeriodHolder == null) {
+ return true;
+ }
+ while (true) {
+ int nextPeriodIndex =
+ timeline.getNextPeriodIndex(
+ lastValidPeriodHolder.info.id.periodIndex,
+ period,
+ window,
+ repeatMode,
+ shuffleModeEnabled);
+ while (lastValidPeriodHolder.next != null
+ && !lastValidPeriodHolder.info.isLastInTimelinePeriod) {
+ lastValidPeriodHolder = lastValidPeriodHolder.next;
+ }
+ if (nextPeriodIndex == C.INDEX_UNSET
+ || lastValidPeriodHolder.next == null
+ || lastValidPeriodHolder.next.info.id.periodIndex != nextPeriodIndex) {
+ break;
+ }
+ lastValidPeriodHolder = lastValidPeriodHolder.next;
+ }
+
+ // Release any period holders that don't match the new period order.
+ boolean readingPeriodRemoved = removeAfter(lastValidPeriodHolder);
+
+ // Update the period info for the last holder, as it may now be the last period in the timeline.
+ lastValidPeriodHolder.info =
+ getUpdatedMediaPeriodInfo(lastValidPeriodHolder.info, lastValidPeriodHolder.info.id);
+
+ // If renderers may have read from a period that's been removed, it is necessary to restart.
+ return !readingPeriodRemoved || !hasPlayingPeriod();
+ }
+
+ /**
+ * Returns the first {@link MediaPeriodInfo} to play, based on the specified playback position.
+ */
+ private MediaPeriodInfo getFirstMediaPeriodInfo(PlaybackInfo playbackInfo) {
+ return getMediaPeriodInfo(
+ playbackInfo.periodId, playbackInfo.contentPositionUs, playbackInfo.startPositionUs);
+ }
+
+ /**
+ * Returns the {@link MediaPeriodInfo} for the media period following {@code mediaPeriodHolder}'s
+ * media period.
+ *
+ * @param mediaPeriodHolder The media period holder.
+ * @param rendererPositionUs The current renderer position in microseconds.
+ * @return The following media period's info, or {@code null} if it is not yet possible to get the
+ * next media period info.
+ */
+ private @Nullable MediaPeriodInfo getFollowingMediaPeriodInfo(
+ MediaPeriodHolder mediaPeriodHolder, long rendererPositionUs) {
+ // TODO: This method is called repeatedly from ExoPlayerImplInternal.maybeUpdateLoadingPeriod
+ // but if the timeline is not ready to provide the next period it can't return a non-null value
+ // until the timeline is updated. Store whether the next timeline period is ready when the
+ // timeline is updated, to avoid repeatedly checking the same timeline.
+ MediaPeriodInfo mediaPeriodInfo = mediaPeriodHolder.info;
+ if (mediaPeriodInfo.isLastInTimelinePeriod) {
+ int nextPeriodIndex =
+ timeline.getNextPeriodIndex(
+ mediaPeriodInfo.id.periodIndex, period, window, repeatMode, shuffleModeEnabled);
+ if (nextPeriodIndex == C.INDEX_UNSET) {
+ // We can't create a next period yet.
+ return null;
+ }
+
+ long startPositionUs;
+ int nextWindowIndex =
+ timeline.getPeriod(nextPeriodIndex, period, /* setIds= */ true).windowIndex;
+ Object nextPeriodUid = period.uid;
+ long windowSequenceNumber = mediaPeriodInfo.id.windowSequenceNumber;
+ if (timeline.getWindow(nextWindowIndex, window).firstPeriodIndex == nextPeriodIndex) {
+ // We're starting to buffer a new window. When playback transitions to this window we'll
+ // want it to be from its default start position. The expected delay until playback
+ // transitions is equal the duration of media that's currently buffered (assuming no
+ // interruptions). Hence we project the default start position forward by the duration of
+ // the buffer, and start buffering from this point.
+ long defaultPositionProjectionUs =
+ mediaPeriodHolder.getRendererOffset() + mediaPeriodInfo.durationUs - rendererPositionUs;
+ Pair defaultPosition =
+ timeline.getPeriodPosition(
+ window,
+ period,
+ nextWindowIndex,
+ C.TIME_UNSET,
+ Math.max(0, defaultPositionProjectionUs));
+ if (defaultPosition == null) {
+ return null;
+ }
+ nextPeriodIndex = defaultPosition.first;
+ startPositionUs = defaultPosition.second;
+ if (mediaPeriodHolder.next != null && mediaPeriodHolder.next.uid.equals(nextPeriodUid)) {
+ windowSequenceNumber = mediaPeriodHolder.next.info.id.windowSequenceNumber;
+ } else {
+ windowSequenceNumber = nextWindowSequenceNumber++;
+ }
+ } else {
+ startPositionUs = 0;
+ }
+ MediaPeriodId periodId =
+ resolveMediaPeriodIdForAds(nextPeriodIndex, startPositionUs, windowSequenceNumber);
+ return getMediaPeriodInfo(periodId, startPositionUs, startPositionUs);
+ }
+
+ MediaPeriodId currentPeriodId = mediaPeriodInfo.id;
+ timeline.getPeriod(currentPeriodId.periodIndex, period);
+ if (currentPeriodId.isAd()) {
+ int adGroupIndex = currentPeriodId.adGroupIndex;
+ int adCountInCurrentAdGroup = period.getAdCountInAdGroup(adGroupIndex);
+ if (adCountInCurrentAdGroup == C.LENGTH_UNSET) {
+ return null;
+ }
+ int nextAdIndexInAdGroup =
+ period.getNextAdIndexToPlay(adGroupIndex, currentPeriodId.adIndexInAdGroup);
+ if (nextAdIndexInAdGroup < adCountInCurrentAdGroup) {
+ // Play the next ad in the ad group if it's available.
+ return !period.isAdAvailable(adGroupIndex, nextAdIndexInAdGroup)
+ ? null
+ : getMediaPeriodInfoForAd(
+ currentPeriodId.periodIndex,
+ adGroupIndex,
+ nextAdIndexInAdGroup,
+ mediaPeriodInfo.contentPositionUs,
+ currentPeriodId.windowSequenceNumber);
+ } else {
+ // Play content from the ad group position.
+ return getMediaPeriodInfoForContent(
+ currentPeriodId.periodIndex,
+ mediaPeriodInfo.contentPositionUs,
+ currentPeriodId.windowSequenceNumber);
+ }
+ } else if (mediaPeriodInfo.endPositionUs != C.TIME_END_OF_SOURCE) {
+ // Play the next ad group if it's available.
+ int nextAdGroupIndex = period.getAdGroupIndexForPositionUs(mediaPeriodInfo.endPositionUs);
+ if (nextAdGroupIndex == C.INDEX_UNSET) {
+ // The next ad group can't be played. Play content from the ad group position instead.
+ return getMediaPeriodInfoForContent(
+ currentPeriodId.periodIndex,
+ mediaPeriodInfo.endPositionUs,
+ currentPeriodId.windowSequenceNumber);
+ }
+ int adIndexInAdGroup = period.getFirstAdIndexToPlay(nextAdGroupIndex);
+ return !period.isAdAvailable(nextAdGroupIndex, adIndexInAdGroup)
+ ? null
+ : getMediaPeriodInfoForAd(
+ currentPeriodId.periodIndex,
+ nextAdGroupIndex,
+ adIndexInAdGroup,
+ mediaPeriodInfo.endPositionUs,
+ currentPeriodId.windowSequenceNumber);
+ } else {
+ // Check if the postroll ad should be played.
+ int adGroupCount = period.getAdGroupCount();
+ if (adGroupCount == 0) {
+ return null;
+ }
+ int adGroupIndex = adGroupCount - 1;
+ if (period.getAdGroupTimeUs(adGroupIndex) != C.TIME_END_OF_SOURCE
+ || period.hasPlayedAdGroup(adGroupIndex)) {
+ return null;
+ }
+ int adIndexInAdGroup = period.getFirstAdIndexToPlay(adGroupIndex);
+ if (!period.isAdAvailable(adGroupIndex, adIndexInAdGroup)) {
+ return null;
+ }
+ long contentDurationUs = period.getDurationUs();
+ return getMediaPeriodInfoForAd(
+ currentPeriodId.periodIndex,
+ adGroupIndex,
+ adIndexInAdGroup,
+ contentDurationUs,
+ currentPeriodId.windowSequenceNumber);
+ }
+ }
+
+ private MediaPeriodInfo getUpdatedMediaPeriodInfo(MediaPeriodInfo info, MediaPeriodId newId) {
+ long startPositionUs = info.startPositionUs;
+ long endPositionUs = info.endPositionUs;
+ boolean isLastInPeriod = isLastInPeriod(newId, endPositionUs);
+ boolean isLastInTimeline = isLastInTimeline(newId, isLastInPeriod);
+ timeline.getPeriod(newId.periodIndex, period);
+ long durationUs =
+ newId.isAd()
+ ? period.getAdDurationUs(newId.adGroupIndex, newId.adIndexInAdGroup)
+ : (endPositionUs == C.TIME_END_OF_SOURCE ? period.getDurationUs() : endPositionUs);
+ return new MediaPeriodInfo(
+ newId,
+ startPositionUs,
+ endPositionUs,
+ info.contentPositionUs,
+ durationUs,
+ isLastInPeriod,
+ isLastInTimeline);
+ }
+
+ private MediaPeriodInfo getMediaPeriodInfo(
+ MediaPeriodId id, long contentPositionUs, long startPositionUs) {
+ timeline.getPeriod(id.periodIndex, period);
+ if (id.isAd()) {
+ if (!period.isAdAvailable(id.adGroupIndex, id.adIndexInAdGroup)) {
+ return null;
+ }
+ return getMediaPeriodInfoForAd(
+ id.periodIndex,
+ id.adGroupIndex,
+ id.adIndexInAdGroup,
+ contentPositionUs,
+ id.windowSequenceNumber);
+ } else {
+ return getMediaPeriodInfoForContent(id.periodIndex, startPositionUs, id.windowSequenceNumber);
+ }
+ }
+
+ private MediaPeriodInfo getMediaPeriodInfoForAd(
+ int periodIndex,
+ int adGroupIndex,
+ int adIndexInAdGroup,
+ long contentPositionUs,
+ long windowSequenceNumber) {
+ MediaPeriodId id =
+ new MediaPeriodId(periodIndex, adGroupIndex, adIndexInAdGroup, windowSequenceNumber);
+ boolean isLastInPeriod = isLastInPeriod(id, C.TIME_END_OF_SOURCE);
+ boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod);
+ long durationUs =
+ timeline
+ .getPeriod(id.periodIndex, period)
+ .getAdDurationUs(id.adGroupIndex, id.adIndexInAdGroup);
+ long startPositionUs =
+ adIndexInAdGroup == period.getFirstAdIndexToPlay(adGroupIndex)
+ ? period.getAdResumePositionUs()
+ : 0;
+ return new MediaPeriodInfo(
+ id,
+ startPositionUs,
+ C.TIME_END_OF_SOURCE,
+ contentPositionUs,
+ durationUs,
+ isLastInPeriod,
+ isLastInTimeline);
+ }
+
+ private MediaPeriodInfo getMediaPeriodInfoForContent(
+ int periodIndex, long startPositionUs, long windowSequenceNumber) {
+ MediaPeriodId id = new MediaPeriodId(periodIndex, windowSequenceNumber);
+ timeline.getPeriod(id.periodIndex, period);
+ int nextAdGroupIndex = period.getAdGroupIndexAfterPositionUs(startPositionUs);
+ long endUs =
+ nextAdGroupIndex == C.INDEX_UNSET
+ ? C.TIME_END_OF_SOURCE
+ : period.getAdGroupTimeUs(nextAdGroupIndex);
+ boolean isLastInPeriod = isLastInPeriod(id, endUs);
+ boolean isLastInTimeline = isLastInTimeline(id, isLastInPeriod);
+ long durationUs = endUs == C.TIME_END_OF_SOURCE ? period.getDurationUs() : endUs;
+ return new MediaPeriodInfo(
+ id, startPositionUs, endUs, C.TIME_UNSET, durationUs, isLastInPeriod, isLastInTimeline);
+ }
+
+ private boolean isLastInPeriod(MediaPeriodId id, long endPositionUs) {
+ int adGroupCount = timeline.getPeriod(id.periodIndex, period).getAdGroupCount();
+ if (adGroupCount == 0) {
+ return true;
+ }
+
+ int lastAdGroupIndex = adGroupCount - 1;
+ boolean isAd = id.isAd();
+ if (period.getAdGroupTimeUs(lastAdGroupIndex) != C.TIME_END_OF_SOURCE) {
+ // There's no postroll ad.
+ return !isAd && endPositionUs == C.TIME_END_OF_SOURCE;
+ }
+
+ int postrollAdCount = period.getAdCountInAdGroup(lastAdGroupIndex);
+ if (postrollAdCount == C.LENGTH_UNSET) {
+ // We won't know if this is the last ad until we know how many postroll ads there are.
+ return false;
+ }
+
+ boolean isLastAd =
+ isAd && id.adGroupIndex == lastAdGroupIndex && id.adIndexInAdGroup == postrollAdCount - 1;
+ return isLastAd || (!isAd && period.getFirstAdIndexToPlay(lastAdGroupIndex) == postrollAdCount);
+ }
+
+ private boolean isLastInTimeline(MediaPeriodId id, boolean isLastMediaPeriodInPeriod) {
+ int windowIndex = timeline.getPeriod(id.periodIndex, period).windowIndex;
+ return !timeline.getWindow(windowIndex, window).isDynamic
+ && timeline.isLastPeriod(id.periodIndex, period, window, repeatMode, shuffleModeEnabled)
+ && isLastMediaPeriodInPeriod;
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/NoSampleRenderer.java b/library/core/src/main/java/com/google/android/exoplayer2/NoSampleRenderer.java
new file mode 100644
index 0000000000..593d3d1fce
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/NoSampleRenderer.java
@@ -0,0 +1,278 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import com.google.android.exoplayer2.source.SampleStream;
+import com.google.android.exoplayer2.util.Assertions;
+import com.google.android.exoplayer2.util.MediaClock;
+import java.io.IOException;
+
+/**
+ * A {@link Renderer} implementation whose track type is {@link C#TRACK_TYPE_NONE} and does not
+ * consume data from its {@link SampleStream}.
+ */
+public abstract class NoSampleRenderer implements Renderer, RendererCapabilities {
+
+ private RendererConfiguration configuration;
+ private int index;
+ private int state;
+ private SampleStream stream;
+ private boolean streamIsFinal;
+
+ @Override
+ public final int getTrackType() {
+ return C.TRACK_TYPE_NONE;
+ }
+
+ @Override
+ public final RendererCapabilities getCapabilities() {
+ return this;
+ }
+
+ @Override
+ public final void setIndex(int index) {
+ this.index = index;
+ }
+
+ @Override
+ public MediaClock getMediaClock() {
+ return null;
+ }
+
+ @Override
+ public final int getState() {
+ return state;
+ }
+
+ /**
+ * Replaces the {@link SampleStream} that will be associated with this renderer.
+ *
+ * This method may be called when the renderer is in the following states:
+ * {@link #STATE_DISABLED}.
+ *
+ * @param configuration The renderer configuration.
+ * @param formats The enabled formats. Should be empty.
+ * @param stream The {@link SampleStream} from which the renderer should consume.
+ * @param positionUs The player's current position.
+ * @param joining Whether this renderer is being enabled to join an ongoing playback.
+ * @param offsetUs The offset that should be subtracted from {@code positionUs}
+ * to get the playback position with respect to the media.
+ * @throws ExoPlaybackException If an error occurs.
+ */
+ @Override
+ public final void enable(RendererConfiguration configuration, Format[] formats,
+ SampleStream stream, long positionUs, boolean joining, long offsetUs)
+ throws ExoPlaybackException {
+ Assertions.checkState(state == STATE_DISABLED);
+ this.configuration = configuration;
+ state = STATE_ENABLED;
+ onEnabled(joining);
+ replaceStream(formats, stream, offsetUs);
+ onPositionReset(positionUs, joining);
+ }
+
+ @Override
+ public final void start() throws ExoPlaybackException {
+ Assertions.checkState(state == STATE_ENABLED);
+ state = STATE_STARTED;
+ onStarted();
+ }
+
+ /**
+ * Replaces the {@link SampleStream} that will be associated with this renderer.
+ *
+ * This method may be called when the renderer is in the following states:
+ * {@link #STATE_ENABLED}, {@link #STATE_STARTED}.
+ *
+ * @param formats The enabled formats. Should be empty.
+ * @param stream The {@link SampleStream} to be associated with this renderer.
+ * @param offsetUs The offset that should be subtracted from {@code positionUs} in
+ * {@link #render(long, long)} to get the playback position with respect to the media.
+ * @throws ExoPlaybackException If an error occurs.
+ */
+ @Override
+ public final void replaceStream(Format[] formats, SampleStream stream, long offsetUs)
+ throws ExoPlaybackException {
+ Assertions.checkState(!streamIsFinal);
+ this.stream = stream;
+ onRendererOffsetChanged(offsetUs);
+ }
+
+ @Override
+ public final SampleStream getStream() {
+ return stream;
+ }
+
+ @Override
+ public final boolean hasReadStreamToEnd() {
+ return true;
+ }
+
+ @Override
+ public final void setCurrentStreamFinal() {
+ streamIsFinal = true;
+ }
+
+ @Override
+ public final boolean isCurrentStreamFinal() {
+ return streamIsFinal;
+ }
+
+ @Override
+ public final void maybeThrowStreamError() throws IOException {
+ }
+
+ @Override
+ public final void resetPosition(long positionUs) throws ExoPlaybackException {
+ streamIsFinal = false;
+ onPositionReset(positionUs, false);
+ }
+
+ @Override
+ public final void stop() throws ExoPlaybackException {
+ Assertions.checkState(state == STATE_STARTED);
+ state = STATE_ENABLED;
+ onStopped();
+ }
+
+ @Override
+ public final void disable() {
+ Assertions.checkState(state == STATE_ENABLED);
+ state = STATE_DISABLED;
+ stream = null;
+ streamIsFinal = false;
+ onDisabled();
+ }
+
+ @Override
+ public boolean isReady() {
+ return true;
+ }
+
+ @Override
+ public boolean isEnded() {
+ return true;
+ }
+
+ // RendererCapabilities implementation.
+
+ @Override
+ public int supportsFormat(Format format) throws ExoPlaybackException {
+ return FORMAT_UNSUPPORTED_TYPE;
+ }
+
+ @Override
+ public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException {
+ return ADAPTIVE_NOT_SUPPORTED;
+ }
+
+ // PlayerMessage.Target implementation.
+
+ @Override
+ public void handleMessage(int what, Object object) throws ExoPlaybackException {
+ // Do nothing.
+ }
+
+ // Methods to be overridden by subclasses.
+
+ /**
+ * Called when the renderer is enabled.
+ *
+ * The default implementation is a no-op.
+ *
+ * @param joining Whether this renderer is being enabled to join an ongoing playback.
+ * @throws ExoPlaybackException If an error occurs.
+ */
+ protected void onEnabled(boolean joining) throws ExoPlaybackException {
+ // Do nothing.
+ }
+
+ /**
+ * Called when the renderer's offset has been changed.
+ *
+ * The default implementation is a no-op.
+ *
+ * @param offsetUs The offset that should be subtracted from {@code positionUs} in
+ * {@link #render(long, long)} to get the playback position with respect to the media.
+ * @throws ExoPlaybackException If an error occurs.
+ */
+ protected void onRendererOffsetChanged(long offsetUs) throws ExoPlaybackException {
+ // Do nothing.
+ }
+
+ /**
+ * Called when the position is reset. This occurs when the renderer is enabled after
+ * {@link #onRendererOffsetChanged(long)} has been called, and also when a position
+ * discontinuity is encountered.
+ *
+ * The default implementation is a no-op.
+ *
+ * @param positionUs The new playback position in microseconds.
+ * @param joining Whether this renderer is being enabled to join an ongoing playback.
+ * @throws ExoPlaybackException If an error occurs.
+ */
+ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
+ // Do nothing.
+ }
+
+ /**
+ * Called when the renderer is started.
+ *
+ * The default implementation is a no-op.
+ *
+ * @throws ExoPlaybackException If an error occurs.
+ */
+ protected void onStarted() throws ExoPlaybackException {
+ // Do nothing.
+ }
+
+ /**
+ * Called when the renderer is stopped.
+ *
+ * The default implementation is a no-op.
+ *
+ * @throws ExoPlaybackException If an error occurs.
+ */
+ protected void onStopped() throws ExoPlaybackException {
+ // Do nothing.
+ }
+
+ /**
+ * Called when the renderer is disabled.
+ *
+ * The default implementation is a no-op.
+ */
+ protected void onDisabled() {
+ // Do nothing.
+ }
+
+ // Methods to be called by subclasses.
+
+ /**
+ * Returns the configuration set when the renderer was most recently enabled.
+ */
+ protected final RendererConfiguration getConfiguration() {
+ return configuration;
+ }
+
+ /**
+ * Returns the index of the renderer within the player.
+ */
+ protected final int getIndex() {
+ return index;
+ }
+
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/PlaybackInfo.java b/library/core/src/main/java/com/google/android/exoplayer2/PlaybackInfo.java
new file mode 100644
index 0000000000..80de073e2d
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/PlaybackInfo.java
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
+
+/**
+ * Information about an ongoing playback.
+ */
+/* package */ final class PlaybackInfo {
+
+ public final Timeline timeline;
+ public final @Nullable Object manifest;
+ public final MediaPeriodId periodId;
+ public final long startPositionUs;
+ public final long contentPositionUs;
+ public final int playbackState;
+ public final boolean isLoading;
+ public final TrackGroupArray trackGroups;
+ public final TrackSelectorResult trackSelectorResult;
+
+ public volatile long positionUs;
+ public volatile long bufferedPositionUs;
+
+ public PlaybackInfo(
+ Timeline timeline,
+ long startPositionUs,
+ TrackGroupArray trackGroups,
+ TrackSelectorResult trackSelectorResult) {
+ this(
+ timeline,
+ /* manifest= */ null,
+ new MediaPeriodId(/* periodIndex= */ 0),
+ startPositionUs,
+ /* contentPositionUs =*/ C.TIME_UNSET,
+ Player.STATE_IDLE,
+ /* isLoading= */ false,
+ trackGroups,
+ trackSelectorResult);
+ }
+
+ public PlaybackInfo(
+ Timeline timeline,
+ @Nullable Object manifest,
+ MediaPeriodId periodId,
+ long startPositionUs,
+ long contentPositionUs,
+ int playbackState,
+ boolean isLoading,
+ TrackGroupArray trackGroups,
+ TrackSelectorResult trackSelectorResult) {
+ this.timeline = timeline;
+ this.manifest = manifest;
+ this.periodId = periodId;
+ this.startPositionUs = startPositionUs;
+ this.contentPositionUs = contentPositionUs;
+ this.positionUs = startPositionUs;
+ this.bufferedPositionUs = startPositionUs;
+ this.playbackState = playbackState;
+ this.isLoading = isLoading;
+ this.trackGroups = trackGroups;
+ this.trackSelectorResult = trackSelectorResult;
+ }
+
+ public PlaybackInfo fromNewPosition(
+ MediaPeriodId periodId, long startPositionUs, long contentPositionUs) {
+ return new PlaybackInfo(
+ timeline,
+ manifest,
+ periodId,
+ startPositionUs,
+ periodId.isAd() ? contentPositionUs : C.TIME_UNSET,
+ playbackState,
+ isLoading,
+ trackGroups,
+ trackSelectorResult);
+ }
+
+ public PlaybackInfo copyWithPeriodIndex(int periodIndex) {
+ PlaybackInfo playbackInfo =
+ new PlaybackInfo(
+ timeline,
+ manifest,
+ periodId.copyWithPeriodIndex(periodIndex),
+ startPositionUs,
+ contentPositionUs,
+ playbackState,
+ isLoading,
+ trackGroups,
+ trackSelectorResult);
+ copyMutablePositions(this, playbackInfo);
+ return playbackInfo;
+ }
+
+ public PlaybackInfo copyWithTimeline(Timeline timeline, Object manifest) {
+ PlaybackInfo playbackInfo =
+ new PlaybackInfo(
+ timeline,
+ manifest,
+ periodId,
+ startPositionUs,
+ contentPositionUs,
+ playbackState,
+ isLoading,
+ trackGroups,
+ trackSelectorResult);
+ copyMutablePositions(this, playbackInfo);
+ return playbackInfo;
+ }
+
+ public PlaybackInfo copyWithPlaybackState(int playbackState) {
+ PlaybackInfo playbackInfo =
+ new PlaybackInfo(
+ timeline,
+ manifest,
+ periodId,
+ startPositionUs,
+ contentPositionUs,
+ playbackState,
+ isLoading,
+ trackGroups,
+ trackSelectorResult);
+ copyMutablePositions(this, playbackInfo);
+ return playbackInfo;
+ }
+
+ public PlaybackInfo copyWithIsLoading(boolean isLoading) {
+ PlaybackInfo playbackInfo =
+ new PlaybackInfo(
+ timeline,
+ manifest,
+ periodId,
+ startPositionUs,
+ contentPositionUs,
+ playbackState,
+ isLoading,
+ trackGroups,
+ trackSelectorResult);
+ copyMutablePositions(this, playbackInfo);
+ return playbackInfo;
+ }
+
+ public PlaybackInfo copyWithTrackInfo(
+ TrackGroupArray trackGroups, TrackSelectorResult trackSelectorResult) {
+ PlaybackInfo playbackInfo =
+ new PlaybackInfo(
+ timeline,
+ manifest,
+ periodId,
+ startPositionUs,
+ contentPositionUs,
+ playbackState,
+ isLoading,
+ trackGroups,
+ trackSelectorResult);
+ copyMutablePositions(this, playbackInfo);
+ return playbackInfo;
+ }
+
+ private static void copyMutablePositions(PlaybackInfo from, PlaybackInfo to) {
+ to.positionUs = from.positionUs;
+ to.bufferedPositionUs = from.bufferedPositionUs;
+ }
+
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java b/library/core/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java
index 90aded7660..6f2db4ff5e 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/PlaybackParameters.java
@@ -15,53 +15,80 @@
*/
package com.google.android.exoplayer2;
+import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.util.Assertions;
+
/**
* The parameters that apply to playback.
*/
public final class PlaybackParameters {
/**
- * The default playback parameters: real-time playback with no pitch modification.
+ * The default playback parameters: real-time playback with no pitch modification or silence
+ * skipping.
*/
- public static final PlaybackParameters DEFAULT = new PlaybackParameters(1f, 1f);
+ public static final PlaybackParameters DEFAULT = new PlaybackParameters(/* speed= */ 1f);
- /**
- * The factor by which playback will be sped up.
- */
+ /** The factor by which playback will be sped up. */
public final float speed;
- /**
- * The factor by which the audio pitch will be scaled.
- */
+ /** The factor by which the audio pitch will be scaled. */
public final float pitch;
+ /** Whether to skip silence in the input. */
+ public final boolean skipSilence;
+
private final int scaledUsPerMs;
/**
- * Creates new playback parameters.
+ * Creates new playback parameters that set the playback speed.
*
- * @param speed The factor by which playback will be sped up.
- * @param pitch The factor by which the audio pitch will be scaled.
+ * @param speed The factor by which playback will be sped up. Must be greater than zero.
+ */
+ public PlaybackParameters(float speed) {
+ this(speed, /* pitch= */ 1f, /* skipSilence= */ false);
+ }
+
+ /**
+ * Creates new playback parameters that set the playback speed and audio pitch scaling factor.
+ *
+ * @param speed The factor by which playback will be sped up. Must be greater than zero.
+ * @param pitch The factor by which the audio pitch will be scaled. Must be greater than zero.
*/
public PlaybackParameters(float speed, float pitch) {
+ this(speed, pitch, /* skipSilence= */ false);
+ }
+
+ /**
+ * Creates new playback parameters that set the playback speed, audio pitch scaling factor and
+ * whether to skip silence in the audio stream.
+ *
+ * @param speed The factor by which playback will be sped up. Must be greater than zero.
+ * @param pitch The factor by which the audio pitch will be scaled. Must be greater than zero.
+ * @param skipSilence Whether to skip silences in the audio stream.
+ */
+ public PlaybackParameters(float speed, float pitch, boolean skipSilence) {
+ Assertions.checkArgument(speed > 0);
+ Assertions.checkArgument(pitch > 0);
this.speed = speed;
this.pitch = pitch;
+ this.skipSilence = skipSilence;
scaledUsPerMs = Math.round(speed * 1000f);
}
/**
- * Scales the millisecond duration {@code timeMs} by the playback speed, returning the result in
- * microseconds.
+ * Returns the media time in microseconds that will elapse in {@code timeMs} milliseconds of
+ * wallclock time.
*
* @param timeMs The time to scale, in milliseconds.
* @return The scaled time, in microseconds.
*/
- public long getSpeedAdjustedDurationUs(long timeMs) {
+ public long getMediaTimeUsForPlayoutTimeMs(long timeMs) {
return timeMs * scaledUsPerMs;
}
@Override
- public boolean equals(Object obj) {
+ public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
@@ -69,14 +96,17 @@ public final class PlaybackParameters {
return false;
}
PlaybackParameters other = (PlaybackParameters) obj;
- return this.speed == other.speed && this.pitch == other.pitch;
+ return this.speed == other.speed
+ && this.pitch == other.pitch
+ && this.skipSilence == other.skipSilence;
}
-
+
@Override
public int hashCode() {
int result = 17;
result = 31 * result + Float.floatToRawIntBits(speed);
result = 31 * result + Float.floatToRawIntBits(pitch);
+ result = 31 * result + (skipSilence ? 1 : 0);
return result;
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/PlaybackPreparer.java b/library/core/src/main/java/com/google/android/exoplayer2/PlaybackPreparer.java
new file mode 100644
index 0000000000..8ff7f50402
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/PlaybackPreparer.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+/** Called to prepare a playback. */
+public interface PlaybackPreparer {
+
+ /** Called to prepare a playback. */
+ void preparePlayback();
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/Player.java b/library/core/src/main/java/com/google/android/exoplayer2/Player.java
index 6eee930018..328816d709 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/Player.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/Player.java
@@ -18,8 +18,14 @@ package com.google.android.exoplayer2;
import android.os.Looper;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.TextureView;
import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.text.TextOutput;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import com.google.android.exoplayer2.video.VideoListener;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -44,6 +50,130 @@ import java.lang.annotation.RetentionPolicy;
*/
public interface Player {
+ /** The video component of a {@link Player}. */
+ interface VideoComponent {
+
+ /**
+ * Sets the video scaling mode.
+ *
+ * @param videoScalingMode The video scaling mode.
+ */
+ void setVideoScalingMode(@C.VideoScalingMode int videoScalingMode);
+
+ /** Returns the video scaling mode. */
+ @C.VideoScalingMode
+ int getVideoScalingMode();
+
+ /**
+ * Adds a listener to receive video events.
+ *
+ * @param listener The listener to register.
+ */
+ void addVideoListener(VideoListener listener);
+
+ /**
+ * Removes a listener of video events.
+ *
+ * @param listener The listener to unregister.
+ */
+ void removeVideoListener(VideoListener listener);
+
+ /**
+ * Clears any {@link Surface}, {@link SurfaceHolder}, {@link SurfaceView} or {@link TextureView}
+ * currently set on the player.
+ */
+ void clearVideoSurface();
+
+ /**
+ * Sets the {@link Surface} onto which video will be rendered. The caller is responsible for
+ * tracking the lifecycle of the surface, and must clear the surface by calling {@code
+ * setVideoSurface(null)} if the surface is destroyed.
+ *
+ *
If the surface is held by a {@link SurfaceView}, {@link TextureView} or {@link
+ * SurfaceHolder} then it's recommended to use {@link #setVideoSurfaceView(SurfaceView)}, {@link
+ * #setVideoTextureView(TextureView)} or {@link #setVideoSurfaceHolder(SurfaceHolder)} rather
+ * than this method, since passing the holder allows the player to track the lifecycle of the
+ * surface automatically.
+ *
+ * @param surface The {@link Surface}.
+ */
+ void setVideoSurface(Surface surface);
+
+ /**
+ * Clears the {@link Surface} onto which video is being rendered if it matches the one passed.
+ * Else does nothing.
+ *
+ * @param surface The surface to clear.
+ */
+ void clearVideoSurface(Surface surface);
+
+ /**
+ * Sets the {@link SurfaceHolder} that holds the {@link Surface} onto which video will be
+ * rendered. The player will track the lifecycle of the surface automatically.
+ *
+ * @param surfaceHolder The surface holder.
+ */
+ void setVideoSurfaceHolder(SurfaceHolder surfaceHolder);
+
+ /**
+ * Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being
+ * rendered if it matches the one passed. Else does nothing.
+ *
+ * @param surfaceHolder The surface holder to clear.
+ */
+ void clearVideoSurfaceHolder(SurfaceHolder surfaceHolder);
+
+ /**
+ * Sets the {@link SurfaceView} onto which video will be rendered. The player will track the
+ * lifecycle of the surface automatically.
+ *
+ * @param surfaceView The surface view.
+ */
+ void setVideoSurfaceView(SurfaceView surfaceView);
+
+ /**
+ * Clears the {@link SurfaceView} onto which video is being rendered if it matches the one
+ * passed. Else does nothing.
+ *
+ * @param surfaceView The texture view to clear.
+ */
+ void clearVideoSurfaceView(SurfaceView surfaceView);
+
+ /**
+ * Sets the {@link TextureView} onto which video will be rendered. The player will track the
+ * lifecycle of the surface automatically.
+ *
+ * @param textureView The texture view.
+ */
+ void setVideoTextureView(TextureView textureView);
+
+ /**
+ * Clears the {@link TextureView} onto which video is being rendered if it matches the one
+ * passed. Else does nothing.
+ *
+ * @param textureView The texture view to clear.
+ */
+ void clearVideoTextureView(TextureView textureView);
+ }
+
+ /** The text component of a {@link Player}. */
+ interface TextComponent {
+
+ /**
+ * Registers an output to receive text events.
+ *
+ * @param listener The output to register.
+ */
+ void addTextOutput(TextOutput listener);
+
+ /**
+ * Removes a text output.
+ *
+ * @param listener The output to remove.
+ */
+ void removeTextOutput(TextOutput listener);
+ }
+
/**
* Listener of changes in player state.
*/
@@ -55,12 +185,13 @@ public interface Player {
* Note that if the timeline has changed then a position discontinuity may also have occurred.
* For example, the current period index may have changed as a result of periods being added or
* removed from the timeline. This will not be reported via a separate call to
- * {@link #onPositionDiscontinuity()}.
+ * {@link #onPositionDiscontinuity(int)}.
*
* @param timeline The latest timeline. Never null, but may be empty.
* @param manifest The latest manifest. May be null.
+ * @param reason The {@link TimelineChangeReason} responsible for this timeline change.
*/
- void onTimelineChanged(Timeline timeline, Object manifest);
+ void onTimelineChanged(Timeline timeline, Object manifest, @TimelineChangeReason int reason);
/**
* Called when the available or selected tracks change.
@@ -118,9 +249,12 @@ public interface Player {
* when the source introduces a discontinuity internally).
*
* When a position discontinuity occurs as a result of a change to the timeline this method is
- * not called. {@link #onTimelineChanged(Timeline, Object)} is called in this case.
+ * not called. {@link #onTimelineChanged(Timeline, Object, int)} is called in this
+ * case.
+ *
+ * @param reason The {@link DiscontinuityReason} responsible for the discontinuity.
*/
- void onPositionDiscontinuity();
+ void onPositionDiscontinuity(@DiscontinuityReason int reason);
/**
* Called when the current playback parameters change. The playback parameters may change due to
@@ -132,6 +266,81 @@ public interface Player {
*/
void onPlaybackParametersChanged(PlaybackParameters playbackParameters);
+ /**
+ * Called when all pending seek requests have been processed by the player. This is guaranteed
+ * to happen after any necessary changes to the player state were reported to
+ * {@link #onPlayerStateChanged(boolean, int)}.
+ */
+ void onSeekProcessed();
+
+ }
+
+ /**
+ * {@link EventListener} allowing selective overrides. All methods are implemented as no-ops.
+ */
+ abstract class DefaultEventListener implements EventListener {
+
+ @Override
+ public void onTimelineChanged(Timeline timeline, Object manifest,
+ @TimelineChangeReason int reason) {
+ // Call deprecated version. Otherwise, do nothing.
+ onTimelineChanged(timeline, manifest);
+ }
+
+ @Override
+ public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onLoadingChanged(boolean isLoading) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onRepeatModeChanged(@RepeatMode int repeatMode) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onPlayerError(ExoPlaybackException error) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onPositionDiscontinuity(@DiscontinuityReason int reason) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onSeekProcessed() {
+ // Do nothing.
+ }
+
+ /**
+ * @deprecated Use {@link DefaultEventListener#onTimelineChanged(Timeline, Object, int)}
+ * instead.
+ */
+ @Deprecated
+ public void onTimelineChanged(Timeline timeline, Object manifest) {
+ // Do nothing.
+ }
+
}
/**
@@ -172,6 +381,61 @@ public interface Player {
*/
int REPEAT_MODE_ALL = 2;
+ /** Reasons for position discontinuities. */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({
+ DISCONTINUITY_REASON_PERIOD_TRANSITION,
+ DISCONTINUITY_REASON_SEEK,
+ DISCONTINUITY_REASON_SEEK_ADJUSTMENT,
+ DISCONTINUITY_REASON_AD_INSERTION,
+ DISCONTINUITY_REASON_INTERNAL
+ })
+ public @interface DiscontinuityReason {}
+ /**
+ * Automatic playback transition from one period in the timeline to the next. The period index may
+ * be the same as it was before the discontinuity in case the current period is repeated.
+ */
+ int DISCONTINUITY_REASON_PERIOD_TRANSITION = 0;
+ /** Seek within the current period or to another period. */
+ int DISCONTINUITY_REASON_SEEK = 1;
+ /**
+ * Seek adjustment due to being unable to seek to the requested position or because the seek was
+ * permitted to be inexact.
+ */
+ int DISCONTINUITY_REASON_SEEK_ADJUSTMENT = 2;
+ /** Discontinuity to or from an ad within one period in the timeline. */
+ int DISCONTINUITY_REASON_AD_INSERTION = 3;
+ /** Discontinuity introduced internally by the source. */
+ int DISCONTINUITY_REASON_INTERNAL = 4;
+
+ /**
+ * Reasons for timeline and/or manifest changes.
+ */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({TIMELINE_CHANGE_REASON_PREPARED, TIMELINE_CHANGE_REASON_RESET,
+ TIMELINE_CHANGE_REASON_DYNAMIC})
+ public @interface TimelineChangeReason {}
+ /**
+ * Timeline and manifest changed as a result of a player initialization with new media.
+ */
+ int TIMELINE_CHANGE_REASON_PREPARED = 0;
+ /**
+ * Timeline and manifest changed as a result of a player reset.
+ */
+ int TIMELINE_CHANGE_REASON_RESET = 1;
+ /**
+ * Timeline or manifest changed as a result of an dynamic update introduced by the played media.
+ */
+ int TIMELINE_CHANGE_REASON_DYNAMIC = 2;
+
+ /** Returns the component of this player for video output, or null if video is not supported. */
+ @Nullable
+ VideoComponent getVideoComponent();
+
+ /** Returns the component of this player for text output, or null if text is not supported. */
+ @Nullable
+ TextComponent getTextComponent();
+
/**
* Register a listener to receive events from the player. The listener's methods will be called on
* the thread that was used to construct the player. However, if the thread used to construct the
@@ -195,6 +459,17 @@ public interface Player {
*/
int getPlaybackState();
+ /**
+ * Returns the error that caused playback to fail. This is the same error that will have been
+ * reported via {@link Player.EventListener#onPlayerError(ExoPlaybackException)} at the time of
+ * failure. It can be queried using this method until {@code stop(true)} is called or the player
+ * is re-prepared.
+ *
+ * @return The error, or {@code null}.
+ */
+ @Nullable
+ ExoPlaybackException getPlaybackError();
+
/**
* Sets whether playback should proceed when {@link #getPlaybackState()} == {@link #STATE_READY}.
*
@@ -276,6 +551,8 @@ public interface Player {
* @param windowIndex The index of the window.
* @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to
* the window's default position.
+ * @throws IllegalSeekPositionException If the player has a non-empty timeline and the provided
+ * {@code windowIndex} is not within the bounds of the current timeline.
*/
void seekTo(int windowIndex, long positionMs);
@@ -302,17 +579,29 @@ public interface Player {
PlaybackParameters getPlaybackParameters();
/**
- * Stops playback. Use {@code setPlayWhenReady(false)} rather than this method if the intention
- * is to pause playback.
- *
- * Calling this method will cause the playback state to transition to {@link #STATE_IDLE}. The
+ * Stops playback without resetting the player. Use {@code setPlayWhenReady(false)} rather than
+ * this method if the intention is to pause playback.
+ *
+ *
Calling this method will cause the playback state to transition to {@link #STATE_IDLE}. The
* player instance can still be used, and {@link #release()} must still be called on the player if
* it's no longer required.
- *
- * Calling this method does not reset the playback position.
+ *
+ *
Calling this method does not reset the playback position.
*/
void stop();
+ /**
+ * Stops playback and optionally resets the player. Use {@code setPlayWhenReady(false)} rather
+ * than this method if the intention is to pause playback.
+ *
+ *
Calling this method will cause the playback state to transition to {@link #STATE_IDLE}. The
+ * player instance can still be used, and {@link #release()} must still be called on the player if
+ * it's no longer required.
+ *
+ * @param reset Whether the player should be reset.
+ */
+ void stop(boolean reset);
+
/**
* Releases the player. This method must be called when the player is no longer required. The
* player must not be used after calling this method.
@@ -363,6 +652,26 @@ public interface Player {
*/
int getCurrentWindowIndex();
+ /**
+ * Returns the index of the next timeline window to be played, which may depend on the current
+ * repeat mode and whether shuffle mode is enabled. Returns {@link C#INDEX_UNSET} if the window
+ * currently being played is the last window.
+ */
+ int getNextWindowIndex();
+
+ /**
+ * Returns the index of the previous timeline window to be played, which may depend on the current
+ * repeat mode and whether shuffle mode is enabled. Returns {@link C#INDEX_UNSET} if the window
+ * currently being played is the first window.
+ */
+ int getPreviousWindowIndex();
+
+ /**
+ * Returns the tag of the currently playing window in the timeline. May be null if no tag is set
+ * or the timeline is not yet available.
+ */
+ @Nullable Object getCurrentTag();
+
/**
* Returns the duration of the current window in milliseconds, or {@link C#TIME_UNSET} if the
* duration is not known.
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/PlayerMessage.java b/library/core/src/main/java/com/google/android/exoplayer2/PlayerMessage.java
new file mode 100644
index 0000000000..408cbecaf1
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/PlayerMessage.java
@@ -0,0 +1,281 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import android.os.Handler;
+import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.util.Assertions;
+
+/**
+ * Defines a player message which can be sent with a {@link Sender} and received by a {@link
+ * Target}.
+ */
+public final class PlayerMessage {
+
+ /** A target for messages. */
+ public interface Target {
+
+ /**
+ * Handles a message delivered to the target.
+ *
+ * @param messageType The message type.
+ * @param payload The message payload.
+ * @throws ExoPlaybackException If an error occurred whilst handling the message. Should only be
+ * thrown by targets that handle messages on the playback thread.
+ */
+ void handleMessage(int messageType, Object payload) throws ExoPlaybackException;
+ }
+
+ /** A sender for messages. */
+ public interface Sender {
+
+ /**
+ * Sends a message.
+ *
+ * @param message The message to be sent.
+ */
+ void sendMessage(PlayerMessage message);
+ }
+
+ private final Target target;
+ private final Sender sender;
+ private final Timeline timeline;
+
+ private int type;
+ private Object payload;
+ private Handler handler;
+ private int windowIndex;
+ private long positionMs;
+ private boolean deleteAfterDelivery;
+ private boolean isSent;
+ private boolean isDelivered;
+ private boolean isProcessed;
+
+ /**
+ * Creates a new message.
+ *
+ * @param sender The {@link Sender} used to send the message.
+ * @param target The {@link Target} the message is sent to.
+ * @param timeline The timeline used when setting the position with {@link #setPosition(long)}. If
+ * set to {@link Timeline#EMPTY}, any position can be specified.
+ * @param defaultWindowIndex The default window index in the {@code timeline} when no other window
+ * index is specified.
+ * @param defaultHandler The default handler to send the message on when no other handler is
+ * specified.
+ */
+ public PlayerMessage(
+ Sender sender,
+ Target target,
+ Timeline timeline,
+ int defaultWindowIndex,
+ Handler defaultHandler) {
+ this.sender = sender;
+ this.target = target;
+ this.timeline = timeline;
+ this.handler = defaultHandler;
+ this.windowIndex = defaultWindowIndex;
+ this.positionMs = C.TIME_UNSET;
+ this.deleteAfterDelivery = true;
+ }
+
+ /** Returns the timeline used for setting the position with {@link #setPosition(long)}. */
+ public Timeline getTimeline() {
+ return timeline;
+ }
+
+ /** Returns the target the message is sent to. */
+ public Target getTarget() {
+ return target;
+ }
+
+ /**
+ * Sets the message type forwarded to {@link Target#handleMessage(int, Object)}.
+ *
+ * @param messageType The message type.
+ * @return This message.
+ * @throws IllegalStateException If {@link #send()} has already been called.
+ */
+ public PlayerMessage setType(int messageType) {
+ Assertions.checkState(!isSent);
+ this.type = messageType;
+ return this;
+ }
+
+ /** Returns the message type forwarded to {@link Target#handleMessage(int, Object)}. */
+ public int getType() {
+ return type;
+ }
+
+ /**
+ * Sets the message payload forwarded to {@link Target#handleMessage(int, Object)}.
+ *
+ * @param payload The message payload.
+ * @return This message.
+ * @throws IllegalStateException If {@link #send()} has already been called.
+ */
+ public PlayerMessage setPayload(@Nullable Object payload) {
+ Assertions.checkState(!isSent);
+ this.payload = payload;
+ return this;
+ }
+
+ /** Returns the message payload forwarded to {@link Target#handleMessage(int, Object)}. */
+ public Object getPayload() {
+ return payload;
+ }
+
+ /**
+ * Sets the handler the message is delivered on.
+ *
+ * @param handler A {@link Handler}.
+ * @return This message.
+ * @throws IllegalStateException If {@link #send()} has already been called.
+ */
+ public PlayerMessage setHandler(Handler handler) {
+ Assertions.checkState(!isSent);
+ this.handler = handler;
+ return this;
+ }
+
+ /** Returns the handler the message is delivered on. */
+ public Handler getHandler() {
+ return handler;
+ }
+
+ /**
+ * Sets a position in the current window at which the message will be delivered.
+ *
+ * @param positionMs The position in the current window at which the message will be sent, in
+ * milliseconds.
+ * @return This message.
+ * @throws IllegalStateException If {@link #send()} has already been called.
+ */
+ public PlayerMessage setPosition(long positionMs) {
+ Assertions.checkState(!isSent);
+ this.positionMs = positionMs;
+ return this;
+ }
+
+ /**
+ * Returns position in window at {@link #getWindowIndex()} at which the message will be delivered,
+ * in milliseconds. If {@link C#TIME_UNSET}, the message will be delivered immediately.
+ */
+ public long getPositionMs() {
+ return positionMs;
+ }
+
+ /**
+ * Sets a position in a window at which the message will be delivered.
+ *
+ * @param windowIndex The index of the window at which the message will be sent.
+ * @param positionMs The position in the window with index {@code windowIndex} at which the
+ * message will be sent, in milliseconds.
+ * @return This message.
+ * @throws IllegalSeekPositionException If the timeline returned by {@link #getTimeline()} is not
+ * empty and the provided window index is not within the bounds of the timeline.
+ * @throws IllegalStateException If {@link #send()} has already been called.
+ */
+ public PlayerMessage setPosition(int windowIndex, long positionMs) {
+ Assertions.checkState(!isSent);
+ Assertions.checkArgument(positionMs != C.TIME_UNSET);
+ if (windowIndex < 0 || (!timeline.isEmpty() && windowIndex >= timeline.getWindowCount())) {
+ throw new IllegalSeekPositionException(timeline, windowIndex, positionMs);
+ }
+ this.windowIndex = windowIndex;
+ this.positionMs = positionMs;
+ return this;
+ }
+
+ /** Returns window index at which the message will be delivered. */
+ public int getWindowIndex() {
+ return windowIndex;
+ }
+
+ /**
+ * Sets whether the message will be deleted after delivery. If false, the message will be resent
+ * if playback reaches the specified position again. Only allowed to be false if a position is set
+ * with {@link #setPosition(long)}.
+ *
+ * @param deleteAfterDelivery Whether the message is deleted after delivery.
+ * @return This message.
+ * @throws IllegalStateException If {@link #send()} has already been called.
+ */
+ public PlayerMessage setDeleteAfterDelivery(boolean deleteAfterDelivery) {
+ Assertions.checkState(!isSent);
+ this.deleteAfterDelivery = deleteAfterDelivery;
+ return this;
+ }
+
+ /** Returns whether the message will be deleted after delivery. */
+ public boolean getDeleteAfterDelivery() {
+ return deleteAfterDelivery;
+ }
+
+ /**
+ * Sends the message. If the target throws an {@link ExoPlaybackException} then it is propagated
+ * out of the player as an error using {@link
+ * Player.EventListener#onPlayerError(ExoPlaybackException)}.
+ *
+ * @return This message.
+ * @throws IllegalStateException If {@link #send()} has already been called.
+ */
+ public PlayerMessage send() {
+ Assertions.checkState(!isSent);
+ if (positionMs == C.TIME_UNSET) {
+ Assertions.checkArgument(deleteAfterDelivery);
+ }
+ isSent = true;
+ sender.sendMessage(this);
+ return this;
+ }
+
+ /**
+ * Blocks until after the message has been delivered or the player is no longer able to deliver
+ * the message.
+ *
+ *
Note that this method can't be called if the current thread is the same thread used by the
+ * message handler set with {@link #setHandler(Handler)} as it would cause a deadlock.
+ *
+ * @return Whether the message was delivered successfully.
+ * @throws IllegalStateException If this method is called before {@link #send()}.
+ * @throws IllegalStateException If this method is called on the same thread used by the message
+ * handler set with {@link #setHandler(Handler)}.
+ * @throws InterruptedException If the current thread is interrupted while waiting for the message
+ * to be delivered.
+ */
+ public synchronized boolean blockUntilDelivered() throws InterruptedException {
+ Assertions.checkState(isSent);
+ Assertions.checkState(handler.getLooper().getThread() != Thread.currentThread());
+ while (!isProcessed) {
+ wait();
+ }
+ return isDelivered;
+ }
+
+ /**
+ * Marks the message as processed. Should only be called by a {@link Sender} and may be called
+ * multiple times.
+ *
+ * @param isDelivered Whether the message has been delivered to its target. The message is
+ * considered as being delivered when this method has been called with {@code isDelivered} set
+ * to true at least once.
+ */
+ public synchronized void markAsProcessed(boolean isDelivered) {
+ this.isDelivered |= isDelivered;
+ isProcessed = true;
+ notifyAll();
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/Renderer.java b/library/core/src/main/java/com/google/android/exoplayer2/Renderer.java
index e16caec980..e53db4568d 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/Renderer.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/Renderer.java
@@ -15,23 +15,29 @@
*/
package com.google.android.exoplayer2;
-import com.google.android.exoplayer2.ExoPlayer.ExoPlayerComponent;
+import android.support.annotation.IntDef;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.MediaClock;
import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
/**
* Renders media read from a {@link SampleStream}.
- *
- * Internally, a renderer's lifecycle is managed by the owning {@link ExoPlayer}. The renderer is
- * transitioned through various states as the overall playback state changes. The valid state
- * transitions are shown below, annotated with the methods that are called during each transition.
- *
- *
- *
+ *
+ * Internally, a renderer's lifecycle is managed by the owning {@link ExoPlayer}. The renderer is
+ * transitioned through various states as the overall playback state and enabled tracks change. The
+ * valid state transitions are shown below, annotated with the methods that are called during each
+ * transition.
+ *
+ *
*/
-public interface Renderer extends ExoPlayerComponent {
+public interface Renderer extends PlayerMessage.Target {
+ /** The renderer states. */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({STATE_DISABLED, STATE_ENABLED, STATE_STARTED})
+ @interface State {}
/**
* The renderer is disabled.
*/
@@ -82,8 +88,10 @@ public interface Renderer extends ExoPlayerComponent {
/**
* Returns the current state of the renderer.
*
- * @return The current state (one of the {@code STATE_*} constants).
+ * @return The current state. One of {@link #STATE_DISABLED}, {@link #STATE_ENABLED} and {@link
+ * #STATE_STARTED}.
*/
+ @State
int getState();
/**
@@ -226,7 +234,7 @@ public interface Renderer extends ExoPlayerComponent {
/**
* Whether the renderer is ready for the {@link ExoPlayer} instance to transition to
- * {@link ExoPlayer#STATE_ENDED}. The player will make this transition as soon as {@code true} is
+ * {@link Player#STATE_ENDED}. The player will make this transition as soon as {@code true} is
* returned by all of its {@link Renderer}s.
*
* This method may be called when the renderer is in the following states:
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java b/library/core/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java
index 3f1be20cfb..de0d481386 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/RendererCapabilities.java
@@ -34,7 +34,9 @@ public interface RendererCapabilities {
int FORMAT_HANDLED = 0b100;
/**
* The {@link Renderer} is capable of rendering formats with the same mime type, but the
- * properties of the format exceed the renderer's capability.
+ * properties of the format exceed the renderer's capabilities. There is a chance the renderer
+ * will be able to play the format in practice because some renderers report their capabilities
+ * conservatively, but the expected outcome is that playback will fail.
*
* Example: The {@link Renderer} is capable of rendering H264 and the format's mime type is
* {@link MimeTypes#VIDEO_H264}, but the format's resolution exceeds the maximum limit supported
@@ -42,12 +44,12 @@ public interface RendererCapabilities {
*/
int FORMAT_EXCEEDS_CAPABILITIES = 0b011;
/**
- * The {@link Renderer} is capable of rendering formats with the same mime type, but the
- * drm scheme used is not supported.
+ * The {@link Renderer} is capable of rendering formats with the same mime type, but is not
+ * capable of rendering the format because the format's drm protection is not supported.
*
* Example: The {@link Renderer} is capable of rendering H264 and the format's mime type is
- * {@link MimeTypes#VIDEO_H264}, but the format indicates cbcs encryption, which is not supported
- * by the underlying content decryption module.
+ * {@link MimeTypes#VIDEO_H264}, but the format indicates PlayReady drm protection where-as the
+ * renderer only supports Widevine.
*/
int FORMAT_UNSUPPORTED_DRM = 0b010;
/**
@@ -121,9 +123,11 @@ public interface RendererCapabilities {
* {@link #FORMAT_UNSUPPORTED_SUBTYPE} and {@link #FORMAT_UNSUPPORTED_TYPE}.
*
The level of support for adapting from the format to another format of the same mime type.
* One of {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and
- * {@link #ADAPTIVE_NOT_SUPPORTED}.
+ * {@link #ADAPTIVE_NOT_SUPPORTED}. Only set if the level of support for the format itself is
+ * {@link #FORMAT_HANDLED} or {@link #FORMAT_EXCEEDS_CAPABILITIES}.
* The level of support for tunneling. One of {@link #TUNNELING_SUPPORTED} and
- * {@link #TUNNELING_NOT_SUPPORTED}.
+ * {@link #TUNNELING_NOT_SUPPORTED}. Only set if the level of support for the format itself is
+ * {@link #FORMAT_HANDLED} or {@link #FORMAT_EXCEEDS_CAPABILITIES}.
*
* The individual properties can be retrieved by performing a bitwise AND with
* {@link #FORMAT_SUPPORT_MASK}, {@link #ADAPTIVE_SUPPORT_MASK} and
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java b/library/core/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java
index 93bbd1e4b6..684072efc6 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/RendererConfiguration.java
@@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer2;
+import android.support.annotation.Nullable;
+
/**
* The configuration of a {@link Renderer}.
*/
@@ -41,7 +43,7 @@ public final class RendererConfiguration {
}
@Override
- public boolean equals(Object obj) {
+ public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/RenderersFactory.java b/library/core/src/main/java/com/google/android/exoplayer2/RenderersFactory.java
index a08ba448a4..e221898471 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/RenderersFactory.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/RenderersFactory.java
@@ -16,7 +16,10 @@
package com.google.android.exoplayer2;
import android.os.Handler;
+import android.support.annotation.Nullable;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
+import com.google.android.exoplayer2.drm.DrmSessionManager;
+import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.metadata.MetadataOutput;
import com.google.android.exoplayer2.text.TextOutput;
import com.google.android.exoplayer2.video.VideoRendererEventListener;
@@ -31,14 +34,17 @@ public interface RenderersFactory {
*
* @param eventHandler A handler to use when invoking event listeners and outputs.
* @param videoRendererEventListener An event listener for video renderers.
- * @param videoRendererEventListener An event listener for audio renderers.
+ * @param audioRendererEventListener An event listener for audio renderers.
* @param textRendererOutput An output for text renderers.
* @param metadataRendererOutput An output for metadata renderers.
+ * @param drmSessionManager A drm session manager used by renderers.
* @return The {@link Renderer instances}.
*/
- Renderer[] createRenderers(Handler eventHandler,
+ Renderer[] createRenderers(
+ Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
- AudioRendererEventListener audioRendererEventListener, TextOutput textRendererOutput,
- MetadataOutput metadataRendererOutput);
-
+ AudioRendererEventListener audioRendererEventListener,
+ TextOutput textRendererOutput,
+ MetadataOutput metadataRendererOutput,
+ @Nullable DrmSessionManager drmSessionManager);
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/SeekParameters.java b/library/core/src/main/java/com/google/android/exoplayer2/SeekParameters.java
new file mode 100644
index 0000000000..ca0433f96d
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/SeekParameters.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2;
+
+import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.util.Assertions;
+
+/**
+ * Parameters that apply to seeking.
+ *
+ * The predefined {@link #EXACT}, {@link #CLOSEST_SYNC}, {@link #PREVIOUS_SYNC} and {@link
+ * #NEXT_SYNC} parameters are suitable for most use cases. Seeking to sync points is typically
+ * faster but less accurate than exact seeking.
+ *
+ *
In the general case, an instance specifies a maximum tolerance before ({@link
+ * #toleranceBeforeUs}) and after ({@link #toleranceAfterUs}) a requested seek position ({@code x}).
+ * If one or more sync points falls within the window {@code [x - toleranceBeforeUs, x +
+ * toleranceAfterUs]} then the seek will be performed to the sync point within the window that's
+ * closest to {@code x}. If no sync point falls within the window then the seek will be performed to
+ * {@code x - toleranceBeforeUs}. Internally the player may need to seek to an earlier sync point
+ * and discard media until this position is reached.
+ */
+public final class SeekParameters {
+
+ /** Parameters for exact seeking. */
+ public static final SeekParameters EXACT = new SeekParameters(0, 0);
+ /** Parameters for seeking to the closest sync point. */
+ public static final SeekParameters CLOSEST_SYNC =
+ new SeekParameters(Long.MAX_VALUE, Long.MAX_VALUE);
+ /** Parameters for seeking to the sync point immediately before a requested seek position. */
+ public static final SeekParameters PREVIOUS_SYNC = new SeekParameters(Long.MAX_VALUE, 0);
+ /** Parameters for seeking to the sync point immediately after a requested seek position. */
+ public static final SeekParameters NEXT_SYNC = new SeekParameters(0, Long.MAX_VALUE);
+ /** Default parameters. */
+ public static final SeekParameters DEFAULT = EXACT;
+
+ /**
+ * The maximum time that the actual position seeked to may precede the requested seek position, in
+ * microseconds.
+ */
+ public final long toleranceBeforeUs;
+ /**
+ * The maximum time that the actual position seeked to may exceed the requested seek position, in
+ * microseconds.
+ */
+ public final long toleranceAfterUs;
+
+ /**
+ * @param toleranceBeforeUs The maximum time that the actual position seeked to may precede the
+ * requested seek position, in microseconds. Must be non-negative.
+ * @param toleranceAfterUs The maximum time that the actual position seeked to may exceed the
+ * requested seek position, in microseconds. Must be non-negative.
+ */
+ public SeekParameters(long toleranceBeforeUs, long toleranceAfterUs) {
+ Assertions.checkArgument(toleranceBeforeUs >= 0);
+ Assertions.checkArgument(toleranceAfterUs >= 0);
+ this.toleranceBeforeUs = toleranceBeforeUs;
+ this.toleranceAfterUs = toleranceAfterUs;
+ }
+
+ @Override
+ public boolean equals(@Nullable Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ SeekParameters other = (SeekParameters) obj;
+ return toleranceBeforeUs == other.toleranceBeforeUs
+ && toleranceAfterUs == other.toleranceAfterUs;
+ }
+
+ @Override
+ public int hashCode() {
+ return (31 * (int) toleranceBeforeUs) + (int) toleranceAfterUs;
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java b/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java
index 9fcc4d2128..482e2c970a 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java
@@ -27,9 +27,14 @@ import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
+import com.google.android.exoplayer2.analytics.AnalyticsCollector;
+import com.google.android.exoplayer2.analytics.AnalyticsListener;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.decoder.DecoderCounters;
+import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
+import com.google.android.exoplayer2.drm.DrmSessionManager;
+import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.MetadataOutput;
import com.google.android.exoplayer2.source.MediaSource;
@@ -38,8 +43,11 @@ import com.google.android.exoplayer2.text.Cue;
import com.google.android.exoplayer2.text.TextOutput;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.trackselection.TrackSelector;
+import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoRendererEventListener;
+import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import java.util.concurrent.CopyOnWriteArraySet;
@@ -48,51 +56,26 @@ import java.util.concurrent.CopyOnWriteArraySet;
* be obtained from {@link ExoPlayerFactory}.
*/
@TargetApi(16)
-public class SimpleExoPlayer implements ExoPlayer {
+public class SimpleExoPlayer implements ExoPlayer, Player.VideoComponent, Player.TextComponent {
- /**
- * A listener for video rendering information from a {@link SimpleExoPlayer}.
- */
- public interface VideoListener {
-
- /**
- * Called each time there's a change in the size of the video being rendered.
- *
- * @param width The video width in pixels.
- * @param height The video height in pixels.
- * @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise
- * rotation in degrees that the application should apply for the video for it to be rendered
- * in the correct orientation. This value will always be zero on API levels 21 and above,
- * since the renderer will apply all necessary rotations internally. On earlier API levels
- * this is not possible. Applications that use {@link android.view.TextureView} can apply
- * the rotation by calling {@link android.view.TextureView#setTransform}. Applications that
- * do not expect to encounter rotated videos can safely ignore this parameter.
- * @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case
- * of square pixels this will be equal to 1.0. Different values are indicative of anamorphic
- * content.
- */
- void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
- float pixelWidthHeightRatio);
-
- /**
- * Called when a frame is rendered for the first time since setting the surface, and when a
- * frame is rendered for the first time since a video track was selected.
- */
- void onRenderedFirstFrame();
-
- }
+ /** @deprecated Use {@link com.google.android.exoplayer2.video.VideoListener}. */
+ @Deprecated
+ public interface VideoListener extends com.google.android.exoplayer2.video.VideoListener {}
private static final String TAG = "SimpleExoPlayer";
protected final Renderer[] renderers;
private final ExoPlayer player;
+ private final Handler eventHandler;
private final ComponentListener componentListener;
- private final CopyOnWriteArraySet videoListeners;
+ private final CopyOnWriteArraySet
+ videoListeners;
private final CopyOnWriteArraySet textOutputs;
private final CopyOnWriteArraySet metadataOutputs;
- private final int videoRendererCount;
- private final int audioRendererCount;
+ private final CopyOnWriteArraySet videoDebugListeners;
+ private final CopyOnWriteArraySet audioDebugListeners;
+ private final AnalyticsCollector analyticsCollector;
private Format videoFormat;
private Format audioFormat;
@@ -103,40 +86,91 @@ public class SimpleExoPlayer implements ExoPlayer {
private int videoScalingMode;
private SurfaceHolder surfaceHolder;
private TextureView textureView;
- private AudioRendererEventListener audioDebugListener;
- private VideoRendererEventListener videoDebugListener;
private DecoderCounters videoDecoderCounters;
private DecoderCounters audioDecoderCounters;
private int audioSessionId;
private AudioAttributes audioAttributes;
private float audioVolume;
+ private MediaSource mediaSource;
- protected SimpleExoPlayer(RenderersFactory renderersFactory, TrackSelector trackSelector,
- LoadControl loadControl) {
+ /**
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ * @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
+ * will not be used for DRM protected playbacks.
+ */
+ protected SimpleExoPlayer(
+ RenderersFactory renderersFactory,
+ TrackSelector trackSelector,
+ LoadControl loadControl,
+ @Nullable DrmSessionManager drmSessionManager) {
+ this(
+ renderersFactory,
+ trackSelector,
+ loadControl,
+ drmSessionManager,
+ new AnalyticsCollector.Factory());
+ }
+
+ /**
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ * @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
+ * will not be used for DRM protected playbacks.
+ * @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
+ * will collect and forward all player events.
+ */
+ protected SimpleExoPlayer(
+ RenderersFactory renderersFactory,
+ TrackSelector trackSelector,
+ LoadControl loadControl,
+ @Nullable DrmSessionManager drmSessionManager,
+ AnalyticsCollector.Factory analyticsCollectorFactory) {
+ this(
+ renderersFactory,
+ trackSelector,
+ loadControl,
+ drmSessionManager,
+ analyticsCollectorFactory,
+ Clock.DEFAULT);
+ }
+
+ /**
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ * @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
+ * will not be used for DRM protected playbacks.
+ * @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
+ * will collect and forward all player events.
+ * @param clock The {@link Clock} that will be used by the instance. Should always be {@link
+ * Clock#DEFAULT}, unless the player is being used from a test.
+ */
+ protected SimpleExoPlayer(
+ RenderersFactory renderersFactory,
+ TrackSelector trackSelector,
+ LoadControl loadControl,
+ @Nullable DrmSessionManager drmSessionManager,
+ AnalyticsCollector.Factory analyticsCollectorFactory,
+ Clock clock) {
componentListener = new ComponentListener();
videoListeners = new CopyOnWriteArraySet<>();
textOutputs = new CopyOnWriteArraySet<>();
metadataOutputs = new CopyOnWriteArraySet<>();
+ videoDebugListeners = new CopyOnWriteArraySet<>();
+ audioDebugListeners = new CopyOnWriteArraySet<>();
Looper eventLooper = Looper.myLooper() != null ? Looper.myLooper() : Looper.getMainLooper();
- Handler eventHandler = new Handler(eventLooper);
- renderers = renderersFactory.createRenderers(eventHandler, componentListener, componentListener,
- componentListener, componentListener);
-
- // Obtain counts of video and audio renderers.
- int videoRendererCount = 0;
- int audioRendererCount = 0;
- for (Renderer renderer : renderers) {
- switch (renderer.getTrackType()) {
- case C.TRACK_TYPE_VIDEO:
- videoRendererCount++;
- break;
- case C.TRACK_TYPE_AUDIO:
- audioRendererCount++;
- break;
- }
- }
- this.videoRendererCount = videoRendererCount;
- this.audioRendererCount = audioRendererCount;
+ eventHandler = new Handler(eventLooper);
+ renderers =
+ renderersFactory.createRenderers(
+ eventHandler,
+ componentListener,
+ componentListener,
+ componentListener,
+ componentListener,
+ drmSessionManager);
// Set initial values.
audioVolume = 1;
@@ -145,81 +179,73 @@ public class SimpleExoPlayer implements ExoPlayer {
videoScalingMode = C.VIDEO_SCALING_MODE_DEFAULT;
// Build the player and associated objects.
- player = createExoPlayerImpl(renderers, trackSelector, loadControl);
+ player = createExoPlayerImpl(renderers, trackSelector, loadControl, clock);
+ analyticsCollector = analyticsCollectorFactory.createAnalyticsCollector(player, clock);
+ addListener(analyticsCollector);
+ videoDebugListeners.add(analyticsCollector);
+ audioDebugListeners.add(analyticsCollector);
+ addMetadataOutput(analyticsCollector);
+ if (drmSessionManager instanceof DefaultDrmSessionManager) {
+ ((DefaultDrmSessionManager) drmSessionManager).addListener(eventHandler, analyticsCollector);
+ }
+ }
+
+ @Override
+ public VideoComponent getVideoComponent() {
+ return this;
+ }
+
+ @Override
+ public TextComponent getTextComponent() {
+ return this;
}
/**
* Sets the video scaling mode.
- *
- * Note that the scaling mode only applies if a {@link MediaCodec}-based video {@link Renderer} is
- * enabled and if the output surface is owned by a {@link android.view.SurfaceView}.
+ *
+ *
Note that the scaling mode only applies if a {@link MediaCodec}-based video {@link Renderer}
+ * is enabled and if the output surface is owned by a {@link android.view.SurfaceView}.
*
* @param videoScalingMode The video scaling mode.
*/
+ @Override
public void setVideoScalingMode(@C.VideoScalingMode int videoScalingMode) {
this.videoScalingMode = videoScalingMode;
- ExoPlayerMessage[] messages = new ExoPlayerMessage[videoRendererCount];
- int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) {
- messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_SCALING_MODE,
- videoScalingMode);
+ player
+ .createMessage(renderer)
+ .setType(C.MSG_SET_SCALING_MODE)
+ .setPayload(videoScalingMode)
+ .send();
}
}
- player.sendMessages(messages);
}
- /**
- * Returns the video scaling mode.
- */
+ @Override
public @C.VideoScalingMode int getVideoScalingMode() {
return videoScalingMode;
}
- /**
- * Clears any {@link Surface}, {@link SurfaceHolder}, {@link SurfaceView} or {@link TextureView}
- * currently set on the player.
- */
+ @Override
public void clearVideoSurface() {
setVideoSurface(null);
}
- /**
- * Sets the {@link Surface} onto which video will be rendered. The caller is responsible for
- * tracking the lifecycle of the surface, and must clear the surface by calling
- * {@code setVideoSurface(null)} if the surface is destroyed.
- *
- * If the surface is held by a {@link SurfaceView}, {@link TextureView} or {@link SurfaceHolder}
- * then it's recommended to use {@link #setVideoSurfaceView(SurfaceView)},
- * {@link #setVideoTextureView(TextureView)} or {@link #setVideoSurfaceHolder(SurfaceHolder)}
- * rather than this method, since passing the holder allows the player to track the lifecycle of
- * the surface automatically.
- *
- * @param surface The {@link Surface}.
- */
+ @Override
public void setVideoSurface(Surface surface) {
removeSurfaceCallbacks();
setVideoSurfaceInternal(surface, false);
}
- /**
- * Clears the {@link Surface} onto which video is being rendered if it matches the one passed.
- * Else does nothing.
- *
- * @param surface The surface to clear.
- */
+ @Override
public void clearVideoSurface(Surface surface) {
if (surface != null && surface == this.surface) {
setVideoSurface(null);
}
}
- /**
- * Sets the {@link SurfaceHolder} that holds the {@link Surface} onto which video will be
- * rendered. The player will track the lifecycle of the surface automatically.
- *
- * @param surfaceHolder The surface holder.
- */
+ @Override
public void setVideoSurfaceHolder(SurfaceHolder surfaceHolder) {
removeSurfaceCallbacks();
this.surfaceHolder = surfaceHolder;
@@ -232,44 +258,24 @@ public class SimpleExoPlayer implements ExoPlayer {
}
}
- /**
- * Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being
- * rendered if it matches the one passed. Else does nothing.
- *
- * @param surfaceHolder The surface holder to clear.
- */
+ @Override
public void clearVideoSurfaceHolder(SurfaceHolder surfaceHolder) {
if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) {
setVideoSurfaceHolder(null);
}
}
- /**
- * Sets the {@link SurfaceView} onto which video will be rendered. The player will track the
- * lifecycle of the surface automatically.
- *
- * @param surfaceView The surface view.
- */
+ @Override
public void setVideoSurfaceView(SurfaceView surfaceView) {
setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
}
- /**
- * Clears the {@link SurfaceView} onto which video is being rendered if it matches the one passed.
- * Else does nothing.
- *
- * @param surfaceView The texture view to clear.
- */
+ @Override
public void clearVideoSurfaceView(SurfaceView surfaceView) {
clearVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
}
- /**
- * Sets the {@link TextureView} onto which video will be rendered. The player will track the
- * lifecycle of the surface automatically.
- *
- * @param textureView The texture view.
- */
+ @Override
public void setVideoTextureView(TextureView textureView) {
removeSurfaceCallbacks();
this.textureView = textureView;
@@ -286,12 +292,7 @@ public class SimpleExoPlayer implements ExoPlayer {
}
}
- /**
- * Clears the {@link TextureView} onto which video is being rendered if it matches the one passed.
- * Else does nothing.
- *
- * @param textureView The texture view to clear.
- */
+ @Override
public void clearVideoTextureView(TextureView textureView) {
if (textureView != null && textureView == this.textureView) {
setVideoTextureView(null);
@@ -329,6 +330,29 @@ public class SimpleExoPlayer implements ExoPlayer {
return Util.getStreamTypeForAudioUsage(audioAttributes.usage);
}
+ /** Returns the {@link AnalyticsCollector} used for collecting analytics events. */
+ public AnalyticsCollector getAnalyticsCollector() {
+ return analyticsCollector;
+ }
+
+ /**
+ * Adds an {@link AnalyticsListener} to receive analytics events.
+ *
+ * @param listener The listener to be added.
+ */
+ public void addAnalyticsListener(AnalyticsListener listener) {
+ analyticsCollector.addListener(listener);
+ }
+
+ /**
+ * Removes an {@link AnalyticsListener}.
+ *
+ * @param listener The listener to be removed.
+ */
+ public void removeAnalyticsListener(AnalyticsListener listener) {
+ analyticsCollector.removeListener(listener);
+ }
+
/**
* Sets the attributes for audio playback, used by the underlying audio track. If not set, the
* default audio attributes will be used. They are suitable for general media playback.
@@ -347,15 +371,15 @@ public class SimpleExoPlayer implements ExoPlayer {
*/
public void setAudioAttributes(AudioAttributes audioAttributes) {
this.audioAttributes = audioAttributes;
- ExoPlayerMessage[] messages = new ExoPlayerMessage[audioRendererCount];
- int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) {
- messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_AUDIO_ATTRIBUTES,
- audioAttributes);
+ player
+ .createMessage(renderer)
+ .setType(C.MSG_SET_AUDIO_ATTRIBUTES)
+ .setPayload(audioAttributes)
+ .send();
}
}
- player.sendMessages(messages);
}
/**
@@ -372,14 +396,11 @@ public class SimpleExoPlayer implements ExoPlayer {
*/
public void setVolume(float audioVolume) {
this.audioVolume = audioVolume;
- ExoPlayerMessage[] messages = new ExoPlayerMessage[audioRendererCount];
- int count = 0;
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) {
- messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_VOLUME, audioVolume);
+ player.createMessage(renderer).setType(C.MSG_SET_VOLUME).setPayload(audioVolume).send();
}
}
- player.sendMessages(messages);
}
/**
@@ -443,21 +464,13 @@ public class SimpleExoPlayer implements ExoPlayer {
return audioDecoderCounters;
}
- /**
- * Adds a listener to receive video events.
- *
- * @param listener The listener to register.
- */
- public void addVideoListener(VideoListener listener) {
+ @Override
+ public void addVideoListener(com.google.android.exoplayer2.video.VideoListener listener) {
videoListeners.add(listener);
}
- /**
- * Removes a listener of video events.
- *
- * @param listener The listener to unregister.
- */
- public void removeVideoListener(VideoListener listener) {
+ @Override
+ public void removeVideoListener(com.google.android.exoplayer2.video.VideoListener listener) {
videoListeners.remove(listener);
}
@@ -465,7 +478,7 @@ public class SimpleExoPlayer implements ExoPlayer {
* Sets a listener to receive video events, removing all existing listeners.
*
* @param listener The listener.
- * @deprecated Use {@link #addVideoListener(VideoListener)}.
+ * @deprecated Use {@link #addVideoListener(com.google.android.exoplayer2.video.VideoListener)}.
*/
@Deprecated
public void setVideoListener(VideoListener listener) {
@@ -476,30 +489,23 @@ public class SimpleExoPlayer implements ExoPlayer {
}
/**
- * Equivalent to {@link #removeVideoListener(VideoListener)}.
+ * Equivalent to {@link #removeVideoListener(com.google.android.exoplayer2.video.VideoListener)}.
*
* @param listener The listener to clear.
- * @deprecated Use {@link #removeVideoListener(VideoListener)}.
+ * @deprecated Use {@link
+ * #removeVideoListener(com.google.android.exoplayer2.video.VideoListener)}.
*/
@Deprecated
public void clearVideoListener(VideoListener listener) {
removeVideoListener(listener);
}
- /**
- * Registers an output to receive text events.
- *
- * @param listener The output to register.
- */
+ @Override
public void addTextOutput(TextOutput listener) {
textOutputs.add(listener);
}
- /**
- * Removes a text output.
- *
- * @param listener The output to remove.
- */
+ @Override
public void removeTextOutput(TextOutput listener) {
textOutputs.remove(listener);
}
@@ -530,7 +536,7 @@ public class SimpleExoPlayer implements ExoPlayer {
}
/**
- * Registers an output to receive metadata events.
+ * Adds a {@link MetadataOutput} to receive metadata.
*
* @param listener The output to register.
*/
@@ -539,7 +545,7 @@ public class SimpleExoPlayer implements ExoPlayer {
}
/**
- * Removes a metadata output.
+ * Removes a {@link MetadataOutput}.
*
* @param listener The output to remove.
*/
@@ -555,7 +561,7 @@ public class SimpleExoPlayer implements ExoPlayer {
*/
@Deprecated
public void setMetadataOutput(MetadataOutput output) {
- metadataOutputs.clear();
+ metadataOutputs.retainAll(Collections.singleton(analyticsCollector));
if (output != null) {
addMetadataOutput(output);
}
@@ -573,21 +579,63 @@ public class SimpleExoPlayer implements ExoPlayer {
}
/**
- * Sets a listener to receive debug events from the video renderer.
- *
- * @param listener The listener.
+ * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug
+ * information.
*/
+ @Deprecated
public void setVideoDebugListener(VideoRendererEventListener listener) {
- videoDebugListener = listener;
+ videoDebugListeners.retainAll(Collections.singleton(analyticsCollector));
+ if (listener != null) {
+ addVideoDebugListener(listener);
+ }
}
/**
- * Sets a listener to receive debug events from the audio renderer.
- *
- * @param listener The listener.
+ * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug
+ * information.
*/
+ @Deprecated
+ public void addVideoDebugListener(VideoRendererEventListener listener) {
+ videoDebugListeners.add(listener);
+ }
+
+ /**
+ * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} and {@link
+ * #removeAnalyticsListener(AnalyticsListener)} to get more detailed debug information.
+ */
+ @Deprecated
+ public void removeVideoDebugListener(VideoRendererEventListener listener) {
+ videoDebugListeners.remove(listener);
+ }
+
+ /**
+ * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug
+ * information.
+ */
+ @Deprecated
public void setAudioDebugListener(AudioRendererEventListener listener) {
- audioDebugListener = listener;
+ audioDebugListeners.retainAll(Collections.singleton(analyticsCollector));
+ if (listener != null) {
+ addAudioDebugListener(listener);
+ }
+ }
+
+ /**
+ * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} to get more detailed debug
+ * information.
+ */
+ @Deprecated
+ public void addAudioDebugListener(AudioRendererEventListener listener) {
+ audioDebugListeners.add(listener);
+ }
+
+ /**
+ * @deprecated Use {@link #addAnalyticsListener(AnalyticsListener)} and {@link
+ * #removeAnalyticsListener(AnalyticsListener)} to get more detailed debug information.
+ */
+ @Deprecated
+ public void removeAudioDebugListener(AudioRendererEventListener listener) {
+ audioDebugListeners.remove(listener);
}
// ExoPlayer implementation
@@ -612,13 +660,26 @@ public class SimpleExoPlayer implements ExoPlayer {
return player.getPlaybackState();
}
+ @Override
+ public ExoPlaybackException getPlaybackError() {
+ return player.getPlaybackError();
+ }
+
@Override
public void prepare(MediaSource mediaSource) {
- player.prepare(mediaSource);
+ prepare(mediaSource, /* resetPosition= */ true, /* resetState= */ true);
}
@Override
public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
+ if (this.mediaSource != mediaSource) {
+ if (this.mediaSource != null) {
+ this.mediaSource.removeEventListener(analyticsCollector);
+ analyticsCollector.resetForNewMediaSource();
+ }
+ mediaSource.addEventListener(eventHandler, analyticsCollector);
+ this.mediaSource = mediaSource;
+ }
player.prepare(mediaSource, resetPosition, resetState);
}
@@ -659,26 +720,30 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void seekToDefaultPosition() {
+ analyticsCollector.notifySeekStarted();
player.seekToDefaultPosition();
}
@Override
public void seekToDefaultPosition(int windowIndex) {
+ analyticsCollector.notifySeekStarted();
player.seekToDefaultPosition(windowIndex);
}
@Override
public void seekTo(long positionMs) {
+ analyticsCollector.notifySeekStarted();
player.seekTo(positionMs);
}
@Override
public void seekTo(int windowIndex, long positionMs) {
+ analyticsCollector.notifySeekStarted();
player.seekTo(windowIndex, positionMs);
}
@Override
- public void setPlaybackParameters(PlaybackParameters playbackParameters) {
+ public void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters) {
player.setPlaybackParameters(playbackParameters);
}
@@ -687,9 +752,29 @@ public class SimpleExoPlayer implements ExoPlayer {
return player.getPlaybackParameters();
}
+ @Override
+ public void setSeekParameters(@Nullable SeekParameters seekParameters) {
+ player.setSeekParameters(seekParameters);
+ }
+
+ @Override
+ public @Nullable Object getCurrentTag() {
+ return player.getCurrentTag();
+ }
+
@Override
public void stop() {
- player.stop();
+ stop(/* reset= */ false);
+ }
+
+ @Override
+ public void stop(boolean reset) {
+ player.stop(reset);
+ if (mediaSource != null) {
+ mediaSource.removeEventListener(analyticsCollector);
+ mediaSource = null;
+ analyticsCollector.resetForNewMediaSource();
+ }
}
@Override
@@ -702,6 +787,9 @@ public class SimpleExoPlayer implements ExoPlayer {
}
surface = null;
}
+ if (mediaSource != null) {
+ mediaSource.removeEventListener(analyticsCollector);
+ }
}
@Override
@@ -709,6 +797,11 @@ public class SimpleExoPlayer implements ExoPlayer {
player.sendMessages(messages);
}
+ @Override
+ public PlayerMessage createMessage(PlayerMessage.Target target) {
+ return player.createMessage(target);
+ }
+
@Override
public void blockingSendMessages(ExoPlayerMessage... messages) {
player.blockingSendMessages(messages);
@@ -754,6 +847,16 @@ public class SimpleExoPlayer implements ExoPlayer {
return player.getCurrentWindowIndex();
}
+ @Override
+ public int getNextWindowIndex() {
+ return player.getNextWindowIndex();
+ }
+
+ @Override
+ public int getPreviousWindowIndex() {
+ return player.getPreviousWindowIndex();
+ }
+
@Override
public long getDuration() {
return player.getDuration();
@@ -807,16 +910,17 @@ public class SimpleExoPlayer implements ExoPlayer {
// Internal methods.
/**
- * Creates the ExoPlayer implementation used by this {@link SimpleExoPlayer}.
+ * Creates the {@link ExoPlayer} implementation used by this instance.
*
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @param clock The {@link Clock} that will be used by this instance.
* @return A new {@link ExoPlayer} instance.
*/
- protected ExoPlayer createExoPlayerImpl(Renderer[] renderers, TrackSelector trackSelector,
- LoadControl loadControl) {
- return new ExoPlayerImpl(renderers, trackSelector, loadControl);
+ protected ExoPlayer createExoPlayerImpl(
+ Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl, Clock clock) {
+ return new ExoPlayerImpl(renderers, trackSelector, loadControl, clock);
}
private void removeSurfaceCallbacks() {
@@ -837,22 +941,26 @@ public class SimpleExoPlayer implements ExoPlayer {
private void setVideoSurfaceInternal(Surface surface, boolean ownsSurface) {
// Note: We don't turn this method into a no-op if the surface is being replaced with itself
// so as to ensure onRenderedFirstFrame callbacks are still called in this case.
- ExoPlayerMessage[] messages = new ExoPlayerMessage[videoRendererCount];
- int count = 0;
+ List messages = new ArrayList<>();
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) {
- messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_SURFACE, surface);
+ messages.add(
+ player.createMessage(renderer).setType(C.MSG_SET_SURFACE).setPayload(surface).send());
}
}
if (this.surface != null && this.surface != surface) {
// We're replacing a surface. Block to ensure that it's not accessed after the method returns.
- player.blockingSendMessages(messages);
+ try {
+ for (PlayerMessage message : messages) {
+ message.blockUntilDelivered();
+ }
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
// If we created the previous surface, we are responsible for releasing it.
if (this.ownsSurface) {
this.surface.release();
}
- } else {
- player.sendMessages(messages);
}
this.surface = surface;
this.ownsSurface = ownsSurface;
@@ -867,7 +975,7 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onVideoEnabled(DecoderCounters counters) {
videoDecoderCounters = counters;
- if (videoDebugListener != null) {
+ for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoEnabled(counters);
}
}
@@ -875,7 +983,7 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onVideoDecoderInitialized(String decoderName, long initializedTimestampMs,
long initializationDurationMs) {
- if (videoDebugListener != null) {
+ for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoDecoderInitialized(decoderName, initializedTimestampMs,
initializationDurationMs);
}
@@ -884,14 +992,14 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onVideoInputFormatChanged(Format format) {
videoFormat = format;
- if (videoDebugListener != null) {
+ for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoInputFormatChanged(format);
}
}
@Override
public void onDroppedFrames(int count, long elapsed) {
- if (videoDebugListener != null) {
+ for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onDroppedFrames(count, elapsed);
}
}
@@ -899,11 +1007,11 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
float pixelWidthHeightRatio) {
- for (VideoListener videoListener : videoListeners) {
+ for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) {
videoListener.onVideoSizeChanged(width, height, unappliedRotationDegrees,
pixelWidthHeightRatio);
}
- if (videoDebugListener != null) {
+ for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoSizeChanged(width, height, unappliedRotationDegrees,
pixelWidthHeightRatio);
}
@@ -912,18 +1020,18 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onRenderedFirstFrame(Surface surface) {
if (SimpleExoPlayer.this.surface == surface) {
- for (VideoListener videoListener : videoListeners) {
+ for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) {
videoListener.onRenderedFirstFrame();
}
}
- if (videoDebugListener != null) {
+ for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onRenderedFirstFrame(surface);
}
}
@Override
public void onVideoDisabled(DecoderCounters counters) {
- if (videoDebugListener != null) {
+ for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoDisabled(counters);
}
videoFormat = null;
@@ -935,7 +1043,7 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onAudioEnabled(DecoderCounters counters) {
audioDecoderCounters = counters;
- if (audioDebugListener != null) {
+ for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
audioDebugListener.onAudioEnabled(counters);
}
}
@@ -943,7 +1051,7 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onAudioSessionId(int sessionId) {
audioSessionId = sessionId;
- if (audioDebugListener != null) {
+ for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
audioDebugListener.onAudioSessionId(sessionId);
}
}
@@ -951,7 +1059,7 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onAudioDecoderInitialized(String decoderName, long initializedTimestampMs,
long initializationDurationMs) {
- if (audioDebugListener != null) {
+ for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
audioDebugListener.onAudioDecoderInitialized(decoderName, initializedTimestampMs,
initializationDurationMs);
}
@@ -960,22 +1068,22 @@ public class SimpleExoPlayer implements ExoPlayer {
@Override
public void onAudioInputFormatChanged(Format format) {
audioFormat = format;
- if (audioDebugListener != null) {
+ for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
audioDebugListener.onAudioInputFormatChanged(format);
}
}
@Override
- public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
+ public void onAudioSinkUnderrun(int bufferSize, long bufferSizeMs,
long elapsedSinceLastFeedMs) {
- if (audioDebugListener != null) {
- audioDebugListener.onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
+ audioDebugListener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}
@Override
public void onAudioDisabled(DecoderCounters counters) {
- if (audioDebugListener != null) {
+ for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
audioDebugListener.onAudioDisabled(counters);
}
audioFormat = null;
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/Timeline.java b/library/core/src/main/java/com/google/android/exoplayer2/Timeline.java
index 7d4c1995eb..600fbc3014 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/Timeline.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/Timeline.java
@@ -15,7 +15,9 @@
*/
package com.google.android.exoplayer2;
+import android.support.annotation.Nullable;
import android.util.Pair;
+import com.google.android.exoplayer2.source.ads.AdPlaybackState;
import com.google.android.exoplayer2.util.Assertions;
/**
@@ -117,10 +119,8 @@ public abstract class Timeline {
*/
public static final class Window {
- /**
- * An identifier for the window. Not necessarily unique.
- */
- public Object id;
+ /** A tag for the window. Not necessarily unique. */
+ public @Nullable Object tag;
/**
* The start time of the presentation to which this window belongs in milliseconds since the
@@ -173,13 +173,19 @@ public abstract class Timeline {
*/
public long positionInFirstPeriodUs;
- /**
- * Sets the data held by this window.
- */
- public Window set(Object id, long presentationStartTimeMs, long windowStartTimeMs,
- boolean isSeekable, boolean isDynamic, long defaultPositionUs, long durationUs,
- int firstPeriodIndex, int lastPeriodIndex, long positionInFirstPeriodUs) {
- this.id = id;
+ /** Sets the data held by this window. */
+ public Window set(
+ @Nullable Object tag,
+ long presentationStartTimeMs,
+ long windowStartTimeMs,
+ boolean isSeekable,
+ boolean isDynamic,
+ long defaultPositionUs,
+ long durationUs,
+ int firstPeriodIndex,
+ int lastPeriodIndex,
+ long positionInFirstPeriodUs) {
+ this.tag = tag;
this.presentationStartTimeMs = presentationStartTimeMs;
this.windowStartTimeMs = windowStartTimeMs;
this.isSeekable = isSeekable;
@@ -278,12 +284,7 @@ public abstract class Timeline {
public long durationUs;
private long positionInWindowUs;
- private long[] adGroupTimesUs;
- private int[] adCounts;
- private int[] adsLoadedCounts;
- private int[] adsPlayedCounts;
- private long[][] adDurationsUs;
- private long adResumePositionUs;
+ private AdPlaybackState adPlaybackState;
/**
* Sets the data held by this period.
@@ -300,8 +301,7 @@ public abstract class Timeline {
*/
public Period set(Object id, Object uid, int windowIndex, long durationUs,
long positionInWindowUs) {
- return set(id, uid, windowIndex, durationUs, positionInWindowUs, null, null, null, null,
- null, C.TIME_UNSET);
+ return set(id, uid, windowIndex, durationUs, positionInWindowUs, AdPlaybackState.NONE);
}
/**
@@ -315,33 +315,23 @@ public abstract class Timeline {
* @param positionInWindowUs The position of the start of this period relative to the start of
* the window to which it belongs, in milliseconds. May be negative if the start of the
* period is not within the window.
- * @param adGroupTimesUs The times of ad groups relative to the start of the period, in
- * microseconds. A final element with the value {@link C#TIME_END_OF_SOURCE} indicates that
- * the period has a postroll ad.
- * @param adCounts The number of ads in each ad group. An element may be {@link C#LENGTH_UNSET}
- * if the number of ads is not yet known.
- * @param adsLoadedCounts The number of ads loaded so far in each ad group.
- * @param adsPlayedCounts The number of ads played so far in each ad group.
- * @param adDurationsUs The duration of each ad in each ad group, in microseconds. An element
- * may be {@link C#TIME_UNSET} if the duration is not yet known.
- * @param adResumePositionUs The position offset in the first unplayed ad at which to begin
- * playback, in microseconds.
+ * @param adPlaybackState The state of the period's ads, or {@link AdPlaybackState#NONE} if
+ * there are no ads.
* @return This period, for convenience.
*/
- public Period set(Object id, Object uid, int windowIndex, long durationUs,
- long positionInWindowUs, long[] adGroupTimesUs, int[] adCounts, int[] adsLoadedCounts,
- int[] adsPlayedCounts, long[][] adDurationsUs, long adResumePositionUs) {
+ public Period set(
+ Object id,
+ Object uid,
+ int windowIndex,
+ long durationUs,
+ long positionInWindowUs,
+ AdPlaybackState adPlaybackState) {
this.id = id;
this.uid = uid;
this.windowIndex = windowIndex;
this.durationUs = durationUs;
this.positionInWindowUs = positionInWindowUs;
- this.adGroupTimesUs = adGroupTimesUs;
- this.adCounts = adCounts;
- this.adsLoadedCounts = adsLoadedCounts;
- this.adsPlayedCounts = adsPlayedCounts;
- this.adDurationsUs = adDurationsUs;
- this.adResumePositionUs = adResumePositionUs;
+ this.adPlaybackState = adPlaybackState;
return this;
}
@@ -381,7 +371,7 @@ public abstract class Timeline {
* Returns the number of ad groups in the period.
*/
public int getAdGroupCount() {
- return adGroupTimesUs == null ? 0 : adGroupTimesUs.length;
+ return adPlaybackState.adGroupCount;
}
/**
@@ -392,17 +382,33 @@ public abstract class Timeline {
* @return The time of the ad group at the index, in microseconds.
*/
public long getAdGroupTimeUs(int adGroupIndex) {
- return adGroupTimesUs[adGroupIndex];
+ return adPlaybackState.adGroupTimesUs[adGroupIndex];
}
/**
- * Returns the number of ads that have been played in the specified ad group in the period.
+ * Returns the index of the first ad in the specified ad group that should be played, or the
+ * number of ads in the ad group if no ads should be played.
*
* @param adGroupIndex The ad group index.
- * @return The number of ads that have been played.
+ * @return The index of the first ad that should be played, or the number of ads in the ad group
+ * if no ads should be played.
*/
- public int getPlayedAdCount(int adGroupIndex) {
- return adsPlayedCounts[adGroupIndex];
+ public int getFirstAdIndexToPlay(int adGroupIndex) {
+ return adPlaybackState.adGroups[adGroupIndex].getFirstAdIndexToPlay();
+ }
+
+ /**
+ * Returns the index of the next ad in the specified ad group that should be played after
+ * playing {@code adIndexInAdGroup}, or the number of ads in the ad group if no later ads should
+ * be played.
+ *
+ * @param adGroupIndex The ad group index.
+ * @param lastPlayedAdIndex The last played ad index in the ad group.
+ * @return The index of the next ad that should be played, or the number of ads in the ad group
+ * if the ad group does not have any ads remaining to play.
+ */
+ public int getNextAdIndexToPlay(int adGroupIndex, int lastPlayedAdIndex) {
+ return adPlaybackState.adGroups[adGroupIndex].getNextAdIndexToPlay(lastPlayedAdIndex);
}
/**
@@ -412,51 +418,30 @@ public abstract class Timeline {
* @return Whether the ad group at index {@code adGroupIndex} has been played.
*/
public boolean hasPlayedAdGroup(int adGroupIndex) {
- return adCounts[adGroupIndex] != C.INDEX_UNSET
- && adsPlayedCounts[adGroupIndex] == adCounts[adGroupIndex];
+ return !adPlaybackState.adGroups[adGroupIndex].hasUnplayedAds();
}
/**
* Returns the index of the ad group at or before {@code positionUs}, if that ad group is
- * unplayed. Returns {@link C#INDEX_UNSET} if the ad group before {@code positionUs} has been
- * played, or if there is no such ad group.
+ * unplayed. Returns {@link C#INDEX_UNSET} if the ad group at or before {@code positionUs} has
+ * no ads remaining to be played, or if there is no such ad group.
*
* @param positionUs The position at or before which to find an ad group, in microseconds.
* @return The index of the ad group, or {@link C#INDEX_UNSET}.
*/
public int getAdGroupIndexForPositionUs(long positionUs) {
- if (adGroupTimesUs == null) {
- return C.INDEX_UNSET;
- }
- // Use a linear search as the array elements may not be increasing due to TIME_END_OF_SOURCE.
- // In practice we expect there to be few ad groups so the search shouldn't be expensive.
- int index = adGroupTimesUs.length - 1;
- while (index >= 0 && (adGroupTimesUs[index] == C.TIME_END_OF_SOURCE
- || adGroupTimesUs[index] > positionUs)) {
- index--;
- }
- return index >= 0 && !hasPlayedAdGroup(index) ? index : C.INDEX_UNSET;
+ return adPlaybackState.getAdGroupIndexForPositionUs(positionUs);
}
/**
- * Returns the index of the next unplayed ad group after {@code positionUs}. Returns
- * {@link C#INDEX_UNSET} if there is no such ad group.
+ * Returns the index of the next ad group after {@code positionUs} that has ads remaining to be
+ * played. Returns {@link C#INDEX_UNSET} if there is no such ad group.
*
* @param positionUs The position after which to find an ad group, in microseconds.
* @return The index of the ad group, or {@link C#INDEX_UNSET}.
*/
public int getAdGroupIndexAfterPositionUs(long positionUs) {
- if (adGroupTimesUs == null) {
- return C.INDEX_UNSET;
- }
- // Use a linear search as the array elements may not be increasing due to TIME_END_OF_SOURCE.
- // In practice we expect there to be few ad groups so the search shouldn't be expensive.
- int index = 0;
- while (index < adGroupTimesUs.length && adGroupTimesUs[index] != C.TIME_END_OF_SOURCE
- && (positionUs >= adGroupTimesUs[index] || hasPlayedAdGroup(index))) {
- index++;
- }
- return index < adGroupTimesUs.length ? index : C.INDEX_UNSET;
+ return adPlaybackState.getAdGroupIndexAfterPositionUs(positionUs);
}
/**
@@ -467,7 +452,7 @@ public abstract class Timeline {
* @return The number of ads in the ad group, or {@link C#LENGTH_UNSET} if not yet known.
*/
public int getAdCountInAdGroup(int adGroupIndex) {
- return adCounts[adGroupIndex];
+ return adPlaybackState.adGroups[adGroupIndex].count;
}
/**
@@ -478,7 +463,9 @@ public abstract class Timeline {
* @return Whether the URL for the specified ad is known.
*/
public boolean isAdAvailable(int adGroupIndex, int adIndexInAdGroup) {
- return adIndexInAdGroup < adsLoadedCounts[adGroupIndex];
+ AdPlaybackState.AdGroup adGroup = adPlaybackState.adGroups[adGroupIndex];
+ return adGroup.count != C.LENGTH_UNSET
+ && adGroup.states[adIndexInAdGroup] != AdPlaybackState.AD_STATE_UNAVAILABLE;
}
/**
@@ -490,10 +477,8 @@ public abstract class Timeline {
* @return The duration of the ad, or {@link C#TIME_UNSET} if not yet known.
*/
public long getAdDurationUs(int adGroupIndex, int adIndexInAdGroup) {
- if (adIndexInAdGroup >= adDurationsUs[adGroupIndex].length) {
- return C.TIME_UNSET;
- }
- return adDurationsUs[adGroupIndex][adIndexInAdGroup];
+ AdPlaybackState.AdGroup adGroup = adPlaybackState.adGroups[adGroupIndex];
+ return adGroup.count != C.LENGTH_UNSET ? adGroup.durationsUs[adIndexInAdGroup] : C.TIME_UNSET;
}
/**
@@ -501,43 +486,41 @@ public abstract class Timeline {
* microseconds.
*/
public long getAdResumePositionUs() {
- return adResumePositionUs;
+ return adPlaybackState.adResumePositionUs;
}
}
- /**
- * An empty timeline.
- */
- public static final Timeline EMPTY = new Timeline() {
+ /** An empty timeline. */
+ public static final Timeline EMPTY =
+ new Timeline() {
- @Override
- public int getWindowCount() {
- return 0;
- }
+ @Override
+ public int getWindowCount() {
+ return 0;
+ }
- @Override
- public Window getWindow(int windowIndex, Window window, boolean setIds,
- long defaultPositionProjectionUs) {
- throw new IndexOutOfBoundsException();
- }
+ @Override
+ public Window getWindow(
+ int windowIndex, Window window, boolean setTag, long defaultPositionProjectionUs) {
+ throw new IndexOutOfBoundsException();
+ }
- @Override
- public int getPeriodCount() {
- return 0;
- }
+ @Override
+ public int getPeriodCount() {
+ return 0;
+ }
- @Override
- public Period getPeriod(int periodIndex, Period period, boolean setIds) {
- throw new IndexOutOfBoundsException();
- }
+ @Override
+ public Period getPeriod(int periodIndex, Period period, boolean setIds) {
+ throw new IndexOutOfBoundsException();
+ }
- @Override
- public int getIndexOfPeriod(Object uid) {
- return C.INDEX_UNSET;
- }
-
- };
+ @Override
+ public int getIndexOfPeriod(Object uid) {
+ return C.INDEX_UNSET;
+ }
+ };
/**
* Returns whether the timeline is empty.
@@ -553,20 +536,24 @@ public abstract class Timeline {
/**
* Returns the index of the window after the window at index {@code windowIndex} depending on the
- * {@code repeatMode}.
+ * {@code repeatMode} and whether shuffling is enabled.
*
* @param windowIndex Index of a window in the timeline.
* @param repeatMode A repeat mode.
+ * @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the next window, or {@link C#INDEX_UNSET} if this is the last window.
*/
- public int getNextWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode) {
+ public int getNextWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode,
+ boolean shuffleModeEnabled) {
switch (repeatMode) {
case Player.REPEAT_MODE_OFF:
- return windowIndex == getWindowCount() - 1 ? C.INDEX_UNSET : windowIndex + 1;
+ return windowIndex == getLastWindowIndex(shuffleModeEnabled) ? C.INDEX_UNSET
+ : windowIndex + 1;
case Player.REPEAT_MODE_ONE:
return windowIndex;
case Player.REPEAT_MODE_ALL:
- return windowIndex == getWindowCount() - 1 ? 0 : windowIndex + 1;
+ return windowIndex == getLastWindowIndex(shuffleModeEnabled)
+ ? getFirstWindowIndex(shuffleModeEnabled) : windowIndex + 1;
default:
throw new IllegalStateException();
}
@@ -574,28 +561,56 @@ public abstract class Timeline {
/**
* Returns the index of the window before the window at index {@code windowIndex} depending on the
- * {@code repeatMode}.
+ * {@code repeatMode} and whether shuffling is enabled.
*
* @param windowIndex Index of a window in the timeline.
* @param repeatMode A repeat mode.
+ * @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the previous window, or {@link C#INDEX_UNSET} if this is the first window.
*/
- public int getPreviousWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode) {
+ public int getPreviousWindowIndex(int windowIndex, @Player.RepeatMode int repeatMode,
+ boolean shuffleModeEnabled) {
switch (repeatMode) {
case Player.REPEAT_MODE_OFF:
- return windowIndex == 0 ? C.INDEX_UNSET : windowIndex - 1;
+ return windowIndex == getFirstWindowIndex(shuffleModeEnabled) ? C.INDEX_UNSET
+ : windowIndex - 1;
case Player.REPEAT_MODE_ONE:
return windowIndex;
case Player.REPEAT_MODE_ALL:
- return windowIndex == 0 ? getWindowCount() - 1 : windowIndex - 1;
+ return windowIndex == getFirstWindowIndex(shuffleModeEnabled)
+ ? getLastWindowIndex(shuffleModeEnabled) : windowIndex - 1;
default:
throw new IllegalStateException();
}
}
+ /**
+ * Returns the index of the last window in the playback order depending on whether shuffling is
+ * enabled.
+ *
+ * @param shuffleModeEnabled Whether shuffling is enabled.
+ * @return The index of the last window in the playback order, or {@link C#INDEX_UNSET} if the
+ * timeline is empty.
+ */
+ public int getLastWindowIndex(boolean shuffleModeEnabled) {
+ return isEmpty() ? C.INDEX_UNSET : getWindowCount() - 1;
+ }
+
+ /**
+ * Returns the index of the first window in the playback order depending on whether shuffling is
+ * enabled.
+ *
+ * @param shuffleModeEnabled Whether shuffling is enabled.
+ * @return The index of the first window in the playback order, or {@link C#INDEX_UNSET} if the
+ * timeline is empty.
+ */
+ public int getFirstWindowIndex(boolean shuffleModeEnabled) {
+ return isEmpty() ? C.INDEX_UNSET : 0;
+ }
+
/**
* Populates a {@link Window} with data for the window at the specified index. Does not populate
- * {@link Window#id}.
+ * {@link Window#tag}.
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
@@ -610,12 +625,12 @@ public abstract class Timeline {
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
- * @param setIds Whether {@link Window#id} should be populated. If false, the field will be set to
- * null. The caller should pass false for efficiency reasons unless the field is required.
+ * @param setTag Whether {@link Window#tag} should be populated. If false, the field will be set
+ * to null. The caller should pass false for efficiency reasons unless the field is required.
* @return The populated {@link Window}, for convenience.
*/
- public Window getWindow(int windowIndex, Window window, boolean setIds) {
- return getWindow(windowIndex, window, setIds, 0);
+ public final Window getWindow(int windowIndex, Window window, boolean setTag) {
+ return getWindow(windowIndex, window, setTag, 0);
}
/**
@@ -623,14 +638,14 @@ public abstract class Timeline {
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
- * @param setIds Whether {@link Window#id} should be populated. If false, the field will be set to
- * null. The caller should pass false for efficiency reasons unless the field is required.
+ * @param setTag Whether {@link Window#tag} should be populated. If false, the field will be set
+ * to null. The caller should pass false for efficiency reasons unless the field is required.
* @param defaultPositionProjectionUs A duration into the future that the populated window's
* default start position should be projected.
* @return The populated {@link Window}, for convenience.
*/
- public abstract Window getWindow(int windowIndex, Window window, boolean setIds,
- long defaultPositionProjectionUs);
+ public abstract Window getWindow(
+ int windowIndex, Window window, boolean setTag, long defaultPositionProjectionUs);
/**
* Returns the number of periods in the timeline.
@@ -639,19 +654,20 @@ public abstract class Timeline {
/**
* Returns the index of the period after the period at index {@code periodIndex} depending on the
- * {@code repeatMode}.
+ * {@code repeatMode} and whether shuffling is enabled.
*
* @param periodIndex Index of a period in the timeline.
* @param period A {@link Period} to be used internally. Must not be null.
* @param window A {@link Window} to be used internally. Must not be null.
* @param repeatMode A repeat mode.
+ * @param shuffleModeEnabled Whether shuffling is enabled.
* @return The index of the next period, or {@link C#INDEX_UNSET} if this is the last period.
*/
public final int getNextPeriodIndex(int periodIndex, Period period, Window window,
- @Player.RepeatMode int repeatMode) {
+ @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) {
int windowIndex = getPeriod(periodIndex, period).windowIndex;
if (getWindow(windowIndex, window).lastPeriodIndex == periodIndex) {
- int nextWindowIndex = getNextWindowIndex(windowIndex, repeatMode);
+ int nextWindowIndex = getNextWindowIndex(windowIndex, repeatMode, shuffleModeEnabled);
if (nextWindowIndex == C.INDEX_UNSET) {
return C.INDEX_UNSET;
}
@@ -662,29 +678,19 @@ public abstract class Timeline {
/**
* Returns whether the given period is the last period of the timeline depending on the
- * {@code repeatMode}.
+ * {@code repeatMode} and whether shuffling is enabled.
*
* @param periodIndex A period index.
* @param period A {@link Period} to be used internally. Must not be null.
* @param window A {@link Window} to be used internally. Must not be null.
* @param repeatMode A repeat mode.
+ * @param shuffleModeEnabled Whether shuffling is enabled.
* @return Whether the period of the given index is the last period of the timeline.
*/
public final boolean isLastPeriod(int periodIndex, Period period, Window window,
- @Player.RepeatMode int repeatMode) {
- return getNextPeriodIndex(periodIndex, period, window, repeatMode) == C.INDEX_UNSET;
- }
-
- /**
- * Populates a {@link Period} with data for the period at the specified index. Does not populate
- * {@link Period#id} and {@link Period#uid}.
- *
- * @param periodIndex The index of the period.
- * @param period The {@link Period} to populate. Must not be null.
- * @return The populated {@link Period}, for convenience.
- */
- public final Period getPeriod(int periodIndex, Period period) {
- return getPeriod(periodIndex, period, false);
+ @Player.RepeatMode int repeatMode, boolean shuffleModeEnabled) {
+ return getNextPeriodIndex(periodIndex, period, window, repeatMode, shuffleModeEnabled)
+ == C.INDEX_UNSET;
}
/**
@@ -731,6 +737,18 @@ public abstract class Timeline {
return Pair.create(periodIndex, periodPositionUs);
}
+ /**
+ * Populates a {@link Period} with data for the period at the specified index. Does not populate
+ * {@link Period#id} and {@link Period#uid}.
+ *
+ * @param periodIndex The index of the period.
+ * @param period The {@link Period} to populate. Must not be null.
+ * @return The populated {@link Period}, for convenience.
+ */
+ public final Period getPeriod(int periodIndex, Period period) {
+ return getPeriod(periodIndex, period, false);
+ }
+
/**
* Populates a {@link Period} with data for the period at the specified index.
*
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsCollector.java b/library/core/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsCollector.java
new file mode 100644
index 0000000000..43ef308f27
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsCollector.java
@@ -0,0 +1,798 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.analytics;
+
+import android.net.NetworkInfo;
+import android.support.annotation.Nullable;
+import android.view.Surface;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.PlaybackParameters;
+import com.google.android.exoplayer2.Player;
+import com.google.android.exoplayer2.Timeline;
+import com.google.android.exoplayer2.Timeline.Period;
+import com.google.android.exoplayer2.Timeline.Window;
+import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
+import com.google.android.exoplayer2.audio.AudioRendererEventListener;
+import com.google.android.exoplayer2.decoder.DecoderCounters;
+import com.google.android.exoplayer2.drm.DefaultDrmSessionEventListener;
+import com.google.android.exoplayer2.metadata.Metadata;
+import com.google.android.exoplayer2.metadata.MetadataOutput;
+import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
+import com.google.android.exoplayer2.source.MediaSourceEventListener;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import com.google.android.exoplayer2.upstream.BandwidthMeter;
+import com.google.android.exoplayer2.util.Assertions;
+import com.google.android.exoplayer2.util.Clock;
+import com.google.android.exoplayer2.video.VideoRendererEventListener;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
+
+/**
+ * Data collector which is able to forward analytics events to {@link AnalyticsListener}s by
+ * listening to all available ExoPlayer listeners.
+ */
+public class AnalyticsCollector
+ implements Player.EventListener,
+ MetadataOutput,
+ AudioRendererEventListener,
+ VideoRendererEventListener,
+ MediaSourceEventListener,
+ BandwidthMeter.EventListener,
+ DefaultDrmSessionEventListener {
+
+ /** Factory for an analytics collector. */
+ public static class Factory {
+
+ /**
+ * Creates an analytics collector for the specified player.
+ *
+ * @param player The {@link Player} for which data will be collected.
+ * @param clock A {@link Clock} used to generate timestamps.
+ * @return An analytics collector.
+ */
+ public AnalyticsCollector createAnalyticsCollector(Player player, Clock clock) {
+ return new AnalyticsCollector(player, clock);
+ }
+ }
+
+ private final CopyOnWriteArraySet listeners;
+ private final Player player;
+ private final Clock clock;
+ private final Window window;
+ private final MediaPeriodQueueTracker mediaPeriodQueueTracker;
+
+ /**
+ * Creates an analytics collector for the specified player.
+ *
+ * @param player The {@link Player} for which data will be collected.
+ * @param clock A {@link Clock} used to generate timestamps.
+ */
+ protected AnalyticsCollector(Player player, Clock clock) {
+ this.player = Assertions.checkNotNull(player);
+ this.clock = Assertions.checkNotNull(clock);
+ listeners = new CopyOnWriteArraySet<>();
+ mediaPeriodQueueTracker = new MediaPeriodQueueTracker();
+ window = new Window();
+ }
+
+ /**
+ * Adds a listener for analytics events.
+ *
+ * @param listener The listener to add.
+ */
+ public void addListener(AnalyticsListener listener) {
+ listeners.add(listener);
+ }
+
+ /**
+ * Removes a previously added analytics event listener.
+ *
+ * @param listener The listener to remove.
+ */
+ public void removeListener(AnalyticsListener listener) {
+ listeners.remove(listener);
+ }
+
+ // External events.
+
+ /**
+ * Notify analytics collector that a seek operation will start. Should be called before the player
+ * adjusts its state and position to the seek.
+ */
+ public final void notifySeekStarted() {
+ if (!mediaPeriodQueueTracker.isSeeking()) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ mediaPeriodQueueTracker.onSeekStarted();
+ for (AnalyticsListener listener : listeners) {
+ listener.onSeekStarted(eventTime);
+ }
+ }
+ }
+
+ /**
+ * Notify analytics collector that the viewport size changed.
+ *
+ * @param width The new width of the viewport in device-independent pixels (dp).
+ * @param height The new height of the viewport in device-independent pixels (dp).
+ */
+ public final void notifyViewportSizeChanged(int width, int height) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onViewportSizeChange(eventTime, width, height);
+ }
+ }
+
+ /**
+ * Notify analytics collector that the network type or connectivity changed.
+ *
+ * @param networkInfo The new network info, or null if no network connection exists.
+ */
+ public final void notifyNetworkTypeChanged(@Nullable NetworkInfo networkInfo) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onNetworkTypeChanged(eventTime, networkInfo);
+ }
+ }
+
+ /**
+ * Resets the analytics collector for a new media source. Should be called before the player is
+ * prepared with a new media source.
+ */
+ public final void resetForNewMediaSource() {
+ // Copying the list is needed because onMediaPeriodReleased will modify the list.
+ List activeMediaPeriods =
+ new ArrayList<>(mediaPeriodQueueTracker.activeMediaPeriods);
+ for (WindowAndMediaPeriodId mediaPeriod : activeMediaPeriods) {
+ onMediaPeriodReleased(mediaPeriod.windowIndex, mediaPeriod.mediaPeriodId);
+ }
+ }
+
+ // MetadataOutput implementation.
+
+ @Override
+ public final void onMetadata(Metadata metadata) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onMetadata(eventTime, metadata);
+ }
+ }
+
+ // AudioRendererEventListener implementation.
+
+ @Override
+ public final void onAudioEnabled(DecoderCounters counters) {
+ // The renderers are only enabled after we changed the playing media period.
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_AUDIO, counters);
+ }
+ }
+
+ @Override
+ public final void onAudioSessionId(int audioSessionId) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onAudioSessionId(eventTime, audioSessionId);
+ }
+ }
+
+ @Override
+ public final void onAudioDecoderInitialized(
+ String decoderName, long initializedTimestampMs, long initializationDurationMs) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderInitialized(
+ eventTime, C.TRACK_TYPE_AUDIO, decoderName, initializationDurationMs);
+ }
+ }
+
+ @Override
+ public final void onAudioInputFormatChanged(Format format) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_AUDIO, format);
+ }
+ }
+
+ @Override
+ public final void onAudioSinkUnderrun(
+ int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onAudioUnderrun(eventTime, bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ }
+ }
+
+ @Override
+ public final void onAudioDisabled(DecoderCounters counters) {
+ // The renderers are disabled after we changed the playing media period on the playback thread
+ // but before this change is reported to the app thread.
+ EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_AUDIO, counters);
+ }
+ }
+
+ // VideoRendererEventListener implementation.
+
+ @Override
+ public final void onVideoEnabled(DecoderCounters counters) {
+ // The renderers are only enabled after we changed the playing media period.
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderEnabled(eventTime, C.TRACK_TYPE_VIDEO, counters);
+ }
+ }
+
+ @Override
+ public final void onVideoDecoderInitialized(
+ String decoderName, long initializedTimestampMs, long initializationDurationMs) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderInitialized(
+ eventTime, C.TRACK_TYPE_VIDEO, decoderName, initializationDurationMs);
+ }
+ }
+
+ @Override
+ public final void onVideoInputFormatChanged(Format format) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderInputFormatChanged(eventTime, C.TRACK_TYPE_VIDEO, format);
+ }
+ }
+
+ @Override
+ public final void onDroppedFrames(int count, long elapsedMs) {
+ EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDroppedVideoFrames(eventTime, count, elapsedMs);
+ }
+ }
+
+ @Override
+ public final void onVideoSizeChanged(
+ int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onVideoSizeChanged(
+ eventTime, width, height, unappliedRotationDegrees, pixelWidthHeightRatio);
+ }
+ }
+
+ @Override
+ public final void onRenderedFirstFrame(Surface surface) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onRenderedFirstFrame(eventTime, surface);
+ }
+ }
+
+ @Override
+ public final void onVideoDisabled(DecoderCounters counters) {
+ // The renderers are disabled after we changed the playing media period on the playback thread
+ // but before this change is reported to the app thread.
+ EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDecoderDisabled(eventTime, C.TRACK_TYPE_VIDEO, counters);
+ }
+ }
+
+ // MediaSourceEventListener implementation.
+
+ @Override
+ public final void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) {
+ mediaPeriodQueueTracker.onMediaPeriodCreated(windowIndex, mediaPeriodId);
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onMediaPeriodCreated(eventTime);
+ }
+ }
+
+ @Override
+ public final void onMediaPeriodReleased(int windowIndex, MediaPeriodId mediaPeriodId) {
+ mediaPeriodQueueTracker.onMediaPeriodReleased(windowIndex, mediaPeriodId);
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onMediaPeriodReleased(eventTime);
+ }
+ }
+
+ @Override
+ public final void onLoadStarted(
+ int windowIndex,
+ @Nullable MediaPeriodId mediaPeriodId,
+ LoadEventInfo loadEventInfo,
+ MediaLoadData mediaLoadData) {
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onLoadStarted(eventTime, loadEventInfo, mediaLoadData);
+ }
+ }
+
+ @Override
+ public final void onLoadCompleted(
+ int windowIndex,
+ @Nullable MediaPeriodId mediaPeriodId,
+ LoadEventInfo loadEventInfo,
+ MediaLoadData mediaLoadData) {
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onLoadCompleted(eventTime, loadEventInfo, mediaLoadData);
+ }
+ }
+
+ @Override
+ public final void onLoadCanceled(
+ int windowIndex,
+ @Nullable MediaPeriodId mediaPeriodId,
+ LoadEventInfo loadEventInfo,
+ MediaLoadData mediaLoadData) {
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onLoadCanceled(eventTime, loadEventInfo, mediaLoadData);
+ }
+ }
+
+ @Override
+ public final void onLoadError(
+ int windowIndex,
+ @Nullable MediaPeriodId mediaPeriodId,
+ LoadEventInfo loadEventInfo,
+ MediaLoadData mediaLoadData,
+ IOException error,
+ boolean wasCanceled) {
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onLoadError(eventTime, loadEventInfo, mediaLoadData, error, wasCanceled);
+ }
+ }
+
+ @Override
+ public final void onReadingStarted(int windowIndex, MediaPeriodId mediaPeriodId) {
+ mediaPeriodQueueTracker.onReadingStarted(windowIndex, mediaPeriodId);
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onReadingStarted(eventTime);
+ }
+ }
+
+ @Override
+ public final void onUpstreamDiscarded(
+ int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) {
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onUpstreamDiscarded(eventTime, mediaLoadData);
+ }
+ }
+
+ @Override
+ public final void onDownstreamFormatChanged(
+ int windowIndex, @Nullable MediaPeriodId mediaPeriodId, MediaLoadData mediaLoadData) {
+ EventTime eventTime = generateEventTime(windowIndex, mediaPeriodId);
+ for (AnalyticsListener listener : listeners) {
+ listener.onDownstreamFormatChanged(eventTime, mediaLoadData);
+ }
+ }
+
+ // Player.EventListener implementation.
+
+ // TODO: Add onFinishedReportingChanges to Player.EventListener to know when a set of simultaneous
+ // callbacks finished. This helps to assign exactly the same EventTime to all of them instead of
+ // having slightly different real times.
+
+ @Override
+ public final void onTimelineChanged(
+ Timeline timeline, Object manifest, @Player.TimelineChangeReason int reason) {
+ mediaPeriodQueueTracker.onTimelineChanged(timeline);
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onTimelineChanged(eventTime, reason);
+ }
+ }
+
+ @Override
+ public final void onTracksChanged(
+ TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onTracksChanged(eventTime, trackGroups, trackSelections);
+ }
+ }
+
+ @Override
+ public final void onLoadingChanged(boolean isLoading) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onLoadingChanged(eventTime, isLoading);
+ }
+ }
+
+ @Override
+ public final void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onPlayerStateChanged(eventTime, playWhenReady, playbackState);
+ }
+ }
+
+ @Override
+ public final void onRepeatModeChanged(@Player.RepeatMode int repeatMode) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onRepeatModeChanged(eventTime, repeatMode);
+ }
+ }
+
+ @Override
+ public final void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onShuffleModeChanged(eventTime, shuffleModeEnabled);
+ }
+ }
+
+ @Override
+ public final void onPlayerError(ExoPlaybackException error) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onPlayerError(eventTime, error);
+ }
+ }
+
+ @Override
+ public final void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) {
+ mediaPeriodQueueTracker.onPositionDiscontinuity(reason);
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onPositionDiscontinuity(eventTime, reason);
+ }
+ }
+
+ @Override
+ public final void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onPlaybackParametersChanged(eventTime, playbackParameters);
+ }
+ }
+
+ @Override
+ public final void onSeekProcessed() {
+ if (mediaPeriodQueueTracker.isSeeking()) {
+ mediaPeriodQueueTracker.onSeekProcessed();
+ EventTime eventTime = generatePlayingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onSeekProcessed(eventTime);
+ }
+ }
+ }
+
+ // BandwidthMeter.Listener implementation.
+
+ @Override
+ public final void onBandwidthSample(int elapsedMs, long bytes, long bitrate) {
+ EventTime eventTime = generateLoadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onBandwidthEstimate(eventTime, elapsedMs, bytes, bitrate);
+ }
+ }
+
+ // DefaultDrmSessionManager.EventListener implementation.
+
+ @Override
+ public final void onDrmKeysLoaded() {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDrmKeysLoaded(eventTime);
+ }
+ }
+
+ @Override
+ public final void onDrmSessionManagerError(Exception error) {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDrmSessionManagerError(eventTime, error);
+ }
+ }
+
+ @Override
+ public final void onDrmKeysRestored() {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDrmKeysRestored(eventTime);
+ }
+ }
+
+ @Override
+ public final void onDrmKeysRemoved() {
+ EventTime eventTime = generateReadingMediaPeriodEventTime();
+ for (AnalyticsListener listener : listeners) {
+ listener.onDrmKeysRemoved(eventTime);
+ }
+ }
+
+ // Internal methods.
+
+ /** Returns read-only set of registered listeners. */
+ protected Set getListeners() {
+ return Collections.unmodifiableSet(listeners);
+ }
+
+ /** Returns a new {@link EventTime} for the specified window index and media period id. */
+ protected EventTime generateEventTime(int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
+ long realtimeMs = clock.elapsedRealtime();
+ Timeline timeline = player.getCurrentTimeline();
+ long eventPositionMs;
+ if (windowIndex == player.getCurrentWindowIndex()) {
+ if (mediaPeriodId != null && mediaPeriodId.isAd()) {
+ // This event is for an ad in the currently playing window.
+ eventPositionMs =
+ player.getCurrentAdGroupIndex() == mediaPeriodId.adGroupIndex
+ && player.getCurrentAdIndexInAdGroup() == mediaPeriodId.adIndexInAdGroup
+ ? player.getCurrentPosition()
+ : 0 /* Assume start position of 0 for a future ad. */;
+ } else {
+ // This event is for content in the currently playing window.
+ eventPositionMs = player.getContentPosition();
+ }
+ } else if (windowIndex >= timeline.getWindowCount()
+ || (mediaPeriodId != null && mediaPeriodId.isAd())) {
+ // This event is for an unknown future window or for an ad in a future window.
+ // Assume start position of zero.
+ eventPositionMs = 0;
+ } else {
+ // This event is for content in a future window. Assume default start position.
+ eventPositionMs = timeline.getWindow(windowIndex, window).getDefaultPositionMs();
+ }
+ // TODO(b/30792113): implement this properly (player.getTotalBufferedDuration()).
+ long bufferedDurationMs = player.getBufferedPosition() - player.getContentPosition();
+ return new EventTime(
+ realtimeMs,
+ timeline,
+ windowIndex,
+ mediaPeriodId,
+ eventPositionMs,
+ player.getCurrentPosition(),
+ bufferedDurationMs);
+ }
+
+ private EventTime generateEventTime(@Nullable WindowAndMediaPeriodId mediaPeriod) {
+ if (mediaPeriod == null) {
+ int windowIndex = player.getCurrentWindowIndex();
+ MediaPeriodId mediaPeriodId = mediaPeriodQueueTracker.tryResolveWindowIndex(windowIndex);
+ return generateEventTime(windowIndex, mediaPeriodId);
+ }
+ return generateEventTime(mediaPeriod.windowIndex, mediaPeriod.mediaPeriodId);
+ }
+
+ private EventTime generateLastReportedPlayingMediaPeriodEventTime() {
+ return generateEventTime(mediaPeriodQueueTracker.getLastReportedPlayingMediaPeriod());
+ }
+
+ private EventTime generatePlayingMediaPeriodEventTime() {
+ return generateEventTime(mediaPeriodQueueTracker.getPlayingMediaPeriod());
+ }
+
+ private EventTime generateReadingMediaPeriodEventTime() {
+ return generateEventTime(mediaPeriodQueueTracker.getReadingMediaPeriod());
+ }
+
+ private EventTime generateLoadingMediaPeriodEventTime() {
+ return generateEventTime(mediaPeriodQueueTracker.getLoadingMediaPeriod());
+ }
+
+ /** Keeps track of the active media periods and currently playing and reading media period. */
+ private static final class MediaPeriodQueueTracker {
+
+ // TODO: Investigate reporting MediaPeriodId in renderer events and adding a listener of queue
+ // changes, which would hopefully remove the need to track the queue here.
+
+ private final ArrayList activeMediaPeriods;
+ private final Period period;
+
+ private WindowAndMediaPeriodId lastReportedPlayingMediaPeriod;
+ private WindowAndMediaPeriodId readingMediaPeriod;
+ private Timeline timeline;
+ private boolean isSeeking;
+
+ public MediaPeriodQueueTracker() {
+ activeMediaPeriods = new ArrayList<>();
+ period = new Period();
+ timeline = Timeline.EMPTY;
+ }
+
+ /**
+ * Returns the {@link WindowAndMediaPeriodId} of the media period in the front of the queue.
+ * This is the playing media period unless the player hasn't started playing yet (in which case
+ * it is the loading media period or null). While the player is seeking or preparing, this
+ * method will always return null to reflect the uncertainty about the current playing period.
+ * May also be null, if the timeline is empty or no media period is active yet.
+ */
+ public @Nullable WindowAndMediaPeriodId getPlayingMediaPeriod() {
+ return activeMediaPeriods.isEmpty() || timeline.isEmpty() || isSeeking
+ ? null
+ : activeMediaPeriods.get(0);
+ }
+
+ /**
+ * Returns the {@link WindowAndMediaPeriodId} of the currently playing media period. This is the
+ * publicly reported period which should always match {@link Player#getCurrentPeriodIndex()}
+ * unless the player is currently seeking or being prepared in which case the previous period is
+ * reported until the seek or preparation is processed. May be null, if no media period is
+ * active yet.
+ */
+ public @Nullable WindowAndMediaPeriodId getLastReportedPlayingMediaPeriod() {
+ return lastReportedPlayingMediaPeriod;
+ }
+
+ /**
+ * Returns the {@link WindowAndMediaPeriodId} of the media period currently being read by the
+ * player. May be null, if the player is not reading a media period.
+ */
+ public @Nullable WindowAndMediaPeriodId getReadingMediaPeriod() {
+ return readingMediaPeriod;
+ }
+
+ /**
+ * Returns the {@link MediaPeriodId} of the media period at the end of the queue which is
+ * currently loading or will be the next one loading. May be null, if no media period is active
+ * yet.
+ */
+ public @Nullable WindowAndMediaPeriodId getLoadingMediaPeriod() {
+ return activeMediaPeriods.isEmpty()
+ ? null
+ : activeMediaPeriods.get(activeMediaPeriods.size() - 1);
+ }
+
+ /** Returns whether the player is currently seeking. */
+ public boolean isSeeking() {
+ return isSeeking;
+ }
+
+ /**
+ * Tries to find an existing media period id from the specified window index. Only returns a
+ * non-null media period id if there is a unique, unambiguous match.
+ */
+ public @Nullable MediaPeriodId tryResolveWindowIndex(int windowIndex) {
+ MediaPeriodId match = null;
+ if (timeline != null) {
+ int timelinePeriodCount = timeline.getPeriodCount();
+ for (int i = 0; i < activeMediaPeriods.size(); i++) {
+ WindowAndMediaPeriodId mediaPeriod = activeMediaPeriods.get(i);
+ int periodIndex = mediaPeriod.mediaPeriodId.periodIndex;
+ if (periodIndex < timelinePeriodCount
+ && timeline.getPeriod(periodIndex, period).windowIndex == windowIndex) {
+ if (match != null) {
+ // Ambiguous match.
+ return null;
+ }
+ match = mediaPeriod.mediaPeriodId;
+ }
+ }
+ }
+ return match;
+ }
+
+ /** Updates the queue with a reported position discontinuity . */
+ public void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) {
+ updateLastReportedPlayingMediaPeriod();
+ }
+
+ /** Updates the queue with a reported timeline change. */
+ public void onTimelineChanged(Timeline timeline) {
+ for (int i = 0; i < activeMediaPeriods.size(); i++) {
+ activeMediaPeriods.set(
+ i, updateMediaPeriodToNewTimeline(activeMediaPeriods.get(i), timeline));
+ }
+ if (readingMediaPeriod != null) {
+ readingMediaPeriod = updateMediaPeriodToNewTimeline(readingMediaPeriod, timeline);
+ }
+ this.timeline = timeline;
+ updateLastReportedPlayingMediaPeriod();
+ }
+
+ /** Updates the queue with a reported start of seek. */
+ public void onSeekStarted() {
+ isSeeking = true;
+ }
+
+ /** Updates the queue with a reported processed seek. */
+ public void onSeekProcessed() {
+ isSeeking = false;
+ updateLastReportedPlayingMediaPeriod();
+ }
+
+ /** Updates the queue with a newly created media period. */
+ public void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) {
+ activeMediaPeriods.add(new WindowAndMediaPeriodId(windowIndex, mediaPeriodId));
+ if (activeMediaPeriods.size() == 1 && !timeline.isEmpty()) {
+ updateLastReportedPlayingMediaPeriod();
+ }
+ }
+
+ /** Updates the queue with a released media period. */
+ public void onMediaPeriodReleased(int windowIndex, MediaPeriodId mediaPeriodId) {
+ WindowAndMediaPeriodId mediaPeriod = new WindowAndMediaPeriodId(windowIndex, mediaPeriodId);
+ activeMediaPeriods.remove(mediaPeriod);
+ if (mediaPeriod.equals(readingMediaPeriod)) {
+ readingMediaPeriod = activeMediaPeriods.isEmpty() ? null : activeMediaPeriods.get(0);
+ }
+ }
+
+ /** Update the queue with a change in the reading media period. */
+ public void onReadingStarted(int windowIndex, MediaPeriodId mediaPeriodId) {
+ readingMediaPeriod = new WindowAndMediaPeriodId(windowIndex, mediaPeriodId);
+ }
+
+ private void updateLastReportedPlayingMediaPeriod() {
+ if (!activeMediaPeriods.isEmpty()) {
+ lastReportedPlayingMediaPeriod = activeMediaPeriods.get(0);
+ }
+ }
+
+ private WindowAndMediaPeriodId updateMediaPeriodToNewTimeline(
+ WindowAndMediaPeriodId mediaPeriod, Timeline newTimeline) {
+ if (newTimeline.isEmpty() || timeline.isEmpty()) {
+ return mediaPeriod;
+ }
+ Object uid =
+ timeline.getPeriod(mediaPeriod.mediaPeriodId.periodIndex, period, /* setIds= */ true).uid;
+ int newPeriodIndex = newTimeline.getIndexOfPeriod(uid);
+ if (newPeriodIndex == C.INDEX_UNSET) {
+ return mediaPeriod;
+ }
+ int newWindowIndex = newTimeline.getPeriod(newPeriodIndex, period).windowIndex;
+ return new WindowAndMediaPeriodId(
+ newWindowIndex, mediaPeriod.mediaPeriodId.copyWithPeriodIndex(newPeriodIndex));
+ }
+ }
+
+ private static final class WindowAndMediaPeriodId {
+
+ public final int windowIndex;
+ public final MediaPeriodId mediaPeriodId;
+
+ public WindowAndMediaPeriodId(int windowIndex, MediaPeriodId mediaPeriodId) {
+ this.windowIndex = windowIndex;
+ this.mediaPeriodId = mediaPeriodId;
+ }
+
+ @Override
+ public boolean equals(@Nullable Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+ WindowAndMediaPeriodId that = (WindowAndMediaPeriodId) other;
+ return windowIndex == that.windowIndex && mediaPeriodId.equals(that.mediaPeriodId);
+ }
+
+ @Override
+ public int hashCode() {
+ return 31 * windowIndex + mediaPeriodId.hashCode();
+ }
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsListener.java b/library/core/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsListener.java
new file mode 100644
index 0000000000..48057f2bff
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/analytics/AnalyticsListener.java
@@ -0,0 +1,465 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.analytics;
+
+import android.net.NetworkInfo;
+import android.support.annotation.Nullable;
+import android.view.Surface;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.PlaybackParameters;
+import com.google.android.exoplayer2.Player;
+import com.google.android.exoplayer2.Player.DiscontinuityReason;
+import com.google.android.exoplayer2.Player.TimelineChangeReason;
+import com.google.android.exoplayer2.Timeline;
+import com.google.android.exoplayer2.audio.AudioSink;
+import com.google.android.exoplayer2.decoder.DecoderCounters;
+import com.google.android.exoplayer2.metadata.Metadata;
+import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
+import com.google.android.exoplayer2.source.MediaSourceEventListener.LoadEventInfo;
+import com.google.android.exoplayer2.source.MediaSourceEventListener.MediaLoadData;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import java.io.IOException;
+
+/**
+ * A listener for analytics events.
+ *
+ * All events are recorded with an {@link EventTime} specifying the elapsed real time and media
+ * time at the time of the event.
+ */
+public interface AnalyticsListener {
+
+ /** Time information of an event. */
+ final class EventTime {
+
+ /**
+ * Elapsed real-time as returned by {@code SystemClock.elapsedRealtime()} at the time of the
+ * event, in milliseconds.
+ */
+ public final long realtimeMs;
+
+ /** Timeline at the time of the event. */
+ public final Timeline timeline;
+
+ /**
+ * Window index in the {@code timeline} this event belongs to, or the prospective window index
+ * if the timeline is not yet known and empty.
+ */
+ public final int windowIndex;
+
+ /**
+ * Media period identifier for the media period this event belongs to, or {@code null} if the
+ * event is not associated with a specific media period.
+ */
+ public final @Nullable MediaPeriodId mediaPeriodId;
+
+ /**
+ * Position in the window or ad this event belongs to at the time of the event, in milliseconds.
+ */
+ public final long eventPlaybackPositionMs;
+
+ /**
+ * Position in the current timeline window ({@code timeline.getCurrentWindowIndex()} or the
+ * currently playing ad at the time of the event, in milliseconds.
+ */
+ public final long currentPlaybackPositionMs;
+
+ /**
+ * Total buffered duration from {@link #currentPlaybackPositionMs} at the time of the event, in
+ * milliseconds. This includes pre-buffered data for subsequent ads and windows.
+ */
+ public final long totalBufferedDurationMs;
+
+ /**
+ * @param realtimeMs Elapsed real-time as returned by {@code SystemClock.elapsedRealtime()} at
+ * the time of the event, in milliseconds.
+ * @param timeline Timeline at the time of the event.
+ * @param windowIndex Window index in the {@code timeline} this event belongs to, or the
+ * prospective window index if the timeline is not yet known and empty.
+ * @param mediaPeriodId Media period identifier for the media period this event belongs to, or
+ * {@code null} if the event is not associated with a specific media period.
+ * @param eventPlaybackPositionMs Position in the window or ad this event belongs to at the time
+ * of the event, in milliseconds.
+ * @param currentPlaybackPositionMs Position in the current timeline window ({@code
+ * timeline.getCurrentWindowIndex()} or the currently playing ad at the time of the event,
+ * in milliseconds.
+ * @param totalBufferedDurationMs Total buffered duration from {@link
+ * #currentPlaybackPositionMs} at the time of the event, in milliseconds. This includes
+ * pre-buffered data for subsequent ads and windows.
+ */
+ public EventTime(
+ long realtimeMs,
+ Timeline timeline,
+ int windowIndex,
+ @Nullable MediaPeriodId mediaPeriodId,
+ long eventPlaybackPositionMs,
+ long currentPlaybackPositionMs,
+ long totalBufferedDurationMs) {
+ this.realtimeMs = realtimeMs;
+ this.timeline = timeline;
+ this.windowIndex = windowIndex;
+ this.mediaPeriodId = mediaPeriodId;
+ this.eventPlaybackPositionMs = eventPlaybackPositionMs;
+ this.currentPlaybackPositionMs = currentPlaybackPositionMs;
+ this.totalBufferedDurationMs = totalBufferedDurationMs;
+ }
+ }
+
+ /**
+ * Called when the player state changed.
+ *
+ * @param eventTime The event time.
+ * @param playWhenReady Whether the playback will proceed when ready.
+ * @param playbackState One of the {@link Player}.STATE constants.
+ */
+ void onPlayerStateChanged(EventTime eventTime, boolean playWhenReady, int playbackState);
+
+ /**
+ * Called when the timeline changed.
+ *
+ * @param eventTime The event time.
+ * @param reason The reason for the timeline change.
+ */
+ void onTimelineChanged(EventTime eventTime, @TimelineChangeReason int reason);
+
+ /**
+ * Called when a position discontinuity occurred.
+ *
+ * @param eventTime The event time.
+ * @param reason The reason for the position discontinuity.
+ */
+ void onPositionDiscontinuity(EventTime eventTime, @DiscontinuityReason int reason);
+
+ /**
+ * Called when a seek operation started.
+ *
+ * @param eventTime The event time.
+ */
+ void onSeekStarted(EventTime eventTime);
+
+ /**
+ * Called when a seek operation was processed.
+ *
+ * @param eventTime The event time.
+ */
+ void onSeekProcessed(EventTime eventTime);
+
+ /**
+ * Called when the playback parameters changed.
+ *
+ * @param eventTime The event time.
+ * @param playbackParameters The new playback parameters.
+ */
+ void onPlaybackParametersChanged(EventTime eventTime, PlaybackParameters playbackParameters);
+
+ /**
+ * Called when the repeat mode changed.
+ *
+ * @param eventTime The event time.
+ * @param repeatMode The new repeat mode.
+ */
+ void onRepeatModeChanged(EventTime eventTime, @Player.RepeatMode int repeatMode);
+
+ /**
+ * Called when the shuffle mode changed.
+ *
+ * @param eventTime The event time.
+ * @param shuffleModeEnabled Whether the shuffle mode is enabled.
+ */
+ void onShuffleModeChanged(EventTime eventTime, boolean shuffleModeEnabled);
+
+ /**
+ * Called when the player starts or stops loading data from a source.
+ *
+ * @param eventTime The event time.
+ * @param isLoading Whether the player is loading.
+ */
+ void onLoadingChanged(EventTime eventTime, boolean isLoading);
+
+ /**
+ * Called when a fatal player error occurred.
+ *
+ * @param eventTime The event time.
+ * @param error The error.
+ */
+ void onPlayerError(EventTime eventTime, ExoPlaybackException error);
+
+ /**
+ * Called when the available or selected tracks for the renderers changed.
+ *
+ * @param eventTime The event time.
+ * @param trackGroups The available tracks. May be empty.
+ * @param trackSelections The track selections for each renderer. May contain null elements.
+ */
+ void onTracksChanged(
+ EventTime eventTime, TrackGroupArray trackGroups, TrackSelectionArray trackSelections);
+
+ /**
+ * Called when a media source started loading data.
+ *
+ * @param eventTime The event time.
+ * @param loadEventInfo The {@link LoadEventInfo} defining the load event.
+ * @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
+ */
+ void onLoadStarted(EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData);
+
+ /**
+ * Called when a media source completed loading data.
+ *
+ * @param eventTime The event time.
+ * @param loadEventInfo The {@link LoadEventInfo} defining the load event.
+ * @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
+ */
+ void onLoadCompleted(
+ EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData);
+
+ /**
+ * Called when a media source canceled loading data.
+ *
+ * @param eventTime The event time.
+ * @param loadEventInfo The {@link LoadEventInfo} defining the load event.
+ * @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
+ */
+ void onLoadCanceled(
+ EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData);
+
+ /**
+ * Called when a media source loading error occurred. These errors are just for informational
+ * purposes and the player may recover.
+ *
+ * @param eventTime The event time.
+ * @param loadEventInfo The {@link LoadEventInfo} defining the load event.
+ * @param mediaLoadData The {@link MediaLoadData} defining the data being loaded.
+ * @param error The load error.
+ * @param wasCanceled Whether the load was canceled as a result of the error.
+ */
+ void onLoadError(
+ EventTime eventTime,
+ LoadEventInfo loadEventInfo,
+ MediaLoadData mediaLoadData,
+ IOException error,
+ boolean wasCanceled);
+
+ /**
+ * Called when the downstream format sent to the renderers changed.
+ *
+ * @param eventTime The event time.
+ * @param mediaLoadData The {@link MediaLoadData} defining the newly selected media data.
+ */
+ void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData);
+
+ /**
+ * Called when data is removed from the back of a media buffer, typically so that it can be
+ * re-buffered in a different format.
+ *
+ * @param eventTime The event time.
+ * @param mediaLoadData The {@link MediaLoadData} defining the media being discarded.
+ */
+ void onUpstreamDiscarded(EventTime eventTime, MediaLoadData mediaLoadData);
+
+ /**
+ * Called when a media source created a media period.
+ *
+ * @param eventTime The event time.
+ */
+ void onMediaPeriodCreated(EventTime eventTime);
+
+ /**
+ * Called when a media source released a media period.
+ *
+ * @param eventTime The event time.
+ */
+ void onMediaPeriodReleased(EventTime eventTime);
+
+ /**
+ * Called when the player started reading a media period.
+ *
+ * @param eventTime The event time.
+ */
+ void onReadingStarted(EventTime eventTime);
+
+ /**
+ * Called when the bandwidth estimate for the current data source has been updated.
+ *
+ * @param eventTime The event time.
+ * @param totalLoadTimeMs The total time spend loading this update is based on, in milliseconds.
+ * @param totalBytesLoaded The total bytes loaded this update is based on.
+ * @param bitrateEstimate The bandwidth estimate, in bits per second.
+ */
+ void onBandwidthEstimate(
+ EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate);
+
+ /**
+ * Called when the viewport size of the output surface changed.
+ *
+ * @param eventTime The event time.
+ * @param width The width of the viewport in device-independent pixels (dp).
+ * @param height The height of the viewport in device-independent pixels (dp).
+ */
+ void onViewportSizeChange(EventTime eventTime, int width, int height);
+
+ /**
+ * Called when the type of the network connection changed.
+ *
+ * @param eventTime The event time.
+ * @param networkInfo The network info for the current connection, or null if disconnected.
+ */
+ void onNetworkTypeChanged(EventTime eventTime, @Nullable NetworkInfo networkInfo);
+
+ /**
+ * Called when there is {@link Metadata} associated with the current playback time.
+ *
+ * @param eventTime The event time.
+ * @param metadata The metadata.
+ */
+ void onMetadata(EventTime eventTime, Metadata metadata);
+
+ /**
+ * Called when an audio or video decoder has been enabled.
+ *
+ * @param eventTime The event time.
+ * @param trackType The track type of the enabled decoder. Either {@link C#TRACK_TYPE_AUDIO} or
+ * {@link C#TRACK_TYPE_VIDEO}.
+ * @param decoderCounters The accumulated event counters associated with this decoder.
+ */
+ void onDecoderEnabled(EventTime eventTime, int trackType, DecoderCounters decoderCounters);
+
+ /**
+ * Called when an audio or video decoder has been initialized.
+ *
+ * @param eventTime The event time.
+ * @param trackType The track type of the initialized decoder. Either {@link C#TRACK_TYPE_AUDIO}
+ * or {@link C#TRACK_TYPE_VIDEO}.
+ * @param decoderName The decoder that was created.
+ * @param initializationDurationMs Time taken to initialize the decoder, in milliseconds.
+ */
+ void onDecoderInitialized(
+ EventTime eventTime, int trackType, String decoderName, long initializationDurationMs);
+
+ /**
+ * Called when an audio or video decoder input format changed.
+ *
+ * @param eventTime The event time.
+ * @param trackType The track type of the decoder whose format changed. Either {@link
+ * C#TRACK_TYPE_AUDIO} or {@link C#TRACK_TYPE_VIDEO}.
+ * @param format The new input format for the decoder.
+ */
+ void onDecoderInputFormatChanged(EventTime eventTime, int trackType, Format format);
+
+ /**
+ * Called when an audio or video decoder has been disabled.
+ *
+ * @param eventTime The event time.
+ * @param trackType The track type of the disabled decoder. Either {@link C#TRACK_TYPE_AUDIO} or
+ * {@link C#TRACK_TYPE_VIDEO}.
+ * @param decoderCounters The accumulated event counters associated with this decoder.
+ */
+ void onDecoderDisabled(EventTime eventTime, int trackType, DecoderCounters decoderCounters);
+
+ /**
+ * Called when the audio session id is set.
+ *
+ * @param eventTime The event time.
+ * @param audioSessionId The audio session id.
+ */
+ void onAudioSessionId(EventTime eventTime, int audioSessionId);
+
+ /**
+ * Called when an audio underrun occurred.
+ *
+ * @param eventTime The event time.
+ * @param bufferSize The size of the {@link AudioSink}'s buffer, in bytes.
+ * @param bufferSizeMs The size of the {@link AudioSink}'s buffer, in milliseconds, if it is
+ * configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output,
+ * as the buffered media can have a variable bitrate so the duration may be unknown.
+ * @param elapsedSinceLastFeedMs The time since the {@link AudioSink} was last fed data.
+ */
+ void onAudioUnderrun(
+ EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
+
+ /**
+ * Called after video frames have been dropped.
+ *
+ * @param eventTime The event time.
+ * @param droppedFrames The number of dropped frames since the last call to this method.
+ * @param elapsedMs The duration in milliseconds over which the frames were dropped. This duration
+ * is timed from when the renderer was started or from when dropped frames were last reported
+ * (whichever was more recent), and not from when the first of the reported drops occurred.
+ */
+ void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs);
+
+ /**
+ * Called before a frame is rendered for the first time since setting the surface, and each time
+ * there's a change in the size or pixel aspect ratio of the video being rendered.
+ *
+ * @param eventTime The event time.
+ * @param width The width of the video.
+ * @param height The height of the video.
+ * @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise
+ * rotation in degrees that the application should apply for the video for it to be rendered
+ * in the correct orientation. This value will always be zero on API levels 21 and above,
+ * since the renderer will apply all necessary rotations internally.
+ * @param pixelWidthHeightRatio The width to height ratio of each pixel.
+ */
+ void onVideoSizeChanged(
+ EventTime eventTime,
+ int width,
+ int height,
+ int unappliedRotationDegrees,
+ float pixelWidthHeightRatio);
+
+ /**
+ * Called when a frame is rendered for the first time since setting the surface, and when a frame
+ * is rendered for the first time since the renderer was reset.
+ *
+ * @param eventTime The event time.
+ * @param surface The {@link Surface} to which a first frame has been rendered, or {@code null} if
+ * the renderer renders to something that isn't a {@link Surface}.
+ */
+ void onRenderedFirstFrame(EventTime eventTime, Surface surface);
+
+ /**
+ * Called each time drm keys are loaded.
+ *
+ * @param eventTime The event time.
+ */
+ void onDrmKeysLoaded(EventTime eventTime);
+
+ /**
+ * Called when a drm error occurs. These errors are just for informational purposes and the player
+ * may recover.
+ *
+ * @param eventTime The event time.
+ * @param error The error.
+ */
+ void onDrmSessionManagerError(EventTime eventTime, Exception error);
+
+ /**
+ * Called each time offline drm keys are restored.
+ *
+ * @param eventTime The event time.
+ */
+ void onDrmKeysRestored(EventTime eventTime);
+
+ /**
+ * Called each time offline drm keys are removed.
+ *
+ * @param eventTime The event time.
+ */
+ void onDrmKeysRemoved(EventTime eventTime);
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsListener.java b/library/core/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsListener.java
new file mode 100644
index 0000000000..4a49de56b0
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/analytics/DefaultAnalyticsListener.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.analytics;
+
+import android.net.NetworkInfo;
+import android.view.Surface;
+import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.PlaybackParameters;
+import com.google.android.exoplayer2.decoder.DecoderCounters;
+import com.google.android.exoplayer2.metadata.Metadata;
+import com.google.android.exoplayer2.source.MediaSourceEventListener.LoadEventInfo;
+import com.google.android.exoplayer2.source.MediaSourceEventListener.MediaLoadData;
+import com.google.android.exoplayer2.source.TrackGroupArray;
+import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
+import java.io.IOException;
+
+/**
+ * {@link AnalyticsListener} allowing selective overrides. All methods are implemented as no-ops.
+ */
+public abstract class DefaultAnalyticsListener implements AnalyticsListener {
+
+ @Override
+ public void onPlayerStateChanged(EventTime eventTime, boolean playWhenReady, int playbackState) {}
+
+ @Override
+ public void onTimelineChanged(EventTime eventTime, int reason) {}
+
+ @Override
+ public void onPositionDiscontinuity(EventTime eventTime, int reason) {}
+
+ @Override
+ public void onSeekStarted(EventTime eventTime) {}
+
+ @Override
+ public void onSeekProcessed(EventTime eventTime) {}
+
+ @Override
+ public void onPlaybackParametersChanged(
+ EventTime eventTime, PlaybackParameters playbackParameters) {}
+
+ @Override
+ public void onRepeatModeChanged(EventTime eventTime, int repeatMode) {}
+
+ @Override
+ public void onShuffleModeChanged(EventTime eventTime, boolean shuffleModeEnabled) {}
+
+ @Override
+ public void onLoadingChanged(EventTime eventTime, boolean isLoading) {}
+
+ @Override
+ public void onPlayerError(EventTime eventTime, ExoPlaybackException error) {}
+
+ @Override
+ public void onTracksChanged(
+ EventTime eventTime, TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {}
+
+ @Override
+ public void onLoadStarted(
+ EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) {}
+
+ @Override
+ public void onLoadCompleted(
+ EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) {}
+
+ @Override
+ public void onLoadCanceled(
+ EventTime eventTime, LoadEventInfo loadEventInfo, MediaLoadData mediaLoadData) {}
+
+ @Override
+ public void onLoadError(
+ EventTime eventTime,
+ LoadEventInfo loadEventInfo,
+ MediaLoadData mediaLoadData,
+ IOException error,
+ boolean wasCanceled) {}
+
+ @Override
+ public void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData) {}
+
+ @Override
+ public void onUpstreamDiscarded(EventTime eventTime, MediaLoadData mediaLoadData) {}
+
+ @Override
+ public void onMediaPeriodCreated(EventTime eventTime) {}
+
+ @Override
+ public void onMediaPeriodReleased(EventTime eventTime) {}
+
+ @Override
+ public void onReadingStarted(EventTime eventTime) {}
+
+ @Override
+ public void onBandwidthEstimate(
+ EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) {}
+
+ @Override
+ public void onViewportSizeChange(EventTime eventTime, int width, int height) {}
+
+ @Override
+ public void onNetworkTypeChanged(EventTime eventTime, NetworkInfo networkInfo) {}
+
+ @Override
+ public void onMetadata(EventTime eventTime, Metadata metadata) {}
+
+ @Override
+ public void onDecoderEnabled(
+ EventTime eventTime, int trackType, DecoderCounters decoderCounters) {}
+
+ @Override
+ public void onDecoderInitialized(
+ EventTime eventTime, int trackType, String decoderName, long initializationDurationMs) {}
+
+ @Override
+ public void onDecoderInputFormatChanged(EventTime eventTime, int trackType, Format format) {}
+
+ @Override
+ public void onDecoderDisabled(
+ EventTime eventTime, int trackType, DecoderCounters decoderCounters) {}
+
+ @Override
+ public void onAudioSessionId(EventTime eventTime, int audioSessionId) {}
+
+ @Override
+ public void onAudioUnderrun(
+ EventTime eventTime, int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {}
+
+ @Override
+ public void onDroppedVideoFrames(EventTime eventTime, int droppedFrames, long elapsedMs) {}
+
+ @Override
+ public void onVideoSizeChanged(
+ EventTime eventTime,
+ int width,
+ int height,
+ int unappliedRotationDegrees,
+ float pixelWidthHeightRatio) {}
+
+ @Override
+ public void onRenderedFirstFrame(EventTime eventTime, Surface surface) {}
+
+ @Override
+ public void onDrmKeysLoaded(EventTime eventTime) {}
+
+ @Override
+ public void onDrmSessionManagerError(EventTime eventTime, Exception error) {}
+
+ @Override
+ public void onDrmKeysRestored(EventTime eventTime) {}
+
+ @Override
+ public void onDrmKeysRemoved(EventTime eventTime) {}
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java
index e1a70e2579..c61b8ff24c 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/Ac3Util.java
@@ -15,39 +15,35 @@
*/
package com.google.android.exoplayer2.audio;
+import android.support.annotation.IntDef;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.audio.Ac3Util.SyncFrameInfo.StreamType;
import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.ParsableBitArray;
import com.google.android.exoplayer2.util.ParsableByteArray;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
-/**
- * Utility methods for parsing (E-)AC-3 syncframes, which are access units in (E-)AC-3 bitstreams.
- */
+/** Utility methods for parsing Dolby TrueHD and (E-)AC3 syncframes. */
public final class Ac3Util {
- /**
- * Holds sample format information as presented by a syncframe header.
- */
- public static final class Ac3SyncFrameInfo {
+ /** Holds sample format information as presented by a syncframe header. */
+ public static final class SyncFrameInfo {
- /**
- * Undefined AC3 stream type.
- */
+ /** AC3 stream types. See also ETSI TS 102 366 E.1.3.1.1. */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({STREAM_TYPE_UNDEFINED, STREAM_TYPE_TYPE0, STREAM_TYPE_TYPE1, STREAM_TYPE_TYPE2})
+ public @interface StreamType {}
+ /** Undefined AC3 stream type. */
public static final int STREAM_TYPE_UNDEFINED = -1;
- /**
- * Type 0 AC3 stream type. See ETSI TS 102 366 E.1.3.1.1.
- */
+ /** Type 0 AC3 stream type. */
public static final int STREAM_TYPE_TYPE0 = 0;
- /**
- * Type 1 AC3 stream type. See ETSI TS 102 366 E.1.3.1.1.
- */
+ /** Type 1 AC3 stream type. */
public static final int STREAM_TYPE_TYPE1 = 1;
- /**
- * Type 2 AC3 stream type. See ETSI TS 102 366 E.1.3.1.1.
- */
+ /** Type 2 AC3 stream type. */
public static final int STREAM_TYPE_TYPE2 = 2;
/**
@@ -56,10 +52,10 @@ public final class Ac3Util {
*/
public final String mimeType;
/**
- * The type of the stream if {@link #mimeType} is {@link MimeTypes#AUDIO_E_AC3}, or
- * {@link #STREAM_TYPE_UNDEFINED} otherwise.
+ * The type of the stream if {@link #mimeType} is {@link MimeTypes#AUDIO_E_AC3}, or {@link
+ * #STREAM_TYPE_UNDEFINED} otherwise.
*/
- public final int streamType;
+ public final @StreamType int streamType;
/**
* The audio sampling rate in Hz.
*/
@@ -77,8 +73,13 @@ public final class Ac3Util {
*/
public final int sampleCount;
- private Ac3SyncFrameInfo(String mimeType, int streamType, int channelCount, int sampleRate,
- int frameSize, int sampleCount) {
+ private SyncFrameInfo(
+ String mimeType,
+ @StreamType int streamType,
+ int channelCount,
+ int sampleRate,
+ int frameSize,
+ int sampleCount) {
this.mimeType = mimeType;
this.streamType = streamType;
this.channelCount = channelCount;
@@ -89,6 +90,17 @@ public final class Ac3Util {
}
+ /**
+ * The number of samples to store in each output chunk when rechunking TrueHD streams. The number
+ * of samples extracted from the container corresponding to one syncframe must be an integer
+ * multiple of this value.
+ */
+ public static final int TRUEHD_RECHUNK_SAMPLE_COUNT = 16;
+ /**
+ * The number of bytes that must be parsed from a TrueHD syncframe to calculate the sample count.
+ */
+ public static final int TRUEHD_SYNCFRAME_PREFIX_LENGTH = 10;
+
/**
* The number of new samples per (E-)AC-3 audio block.
*/
@@ -181,7 +193,14 @@ public final class Ac3Util {
channelCount += 2;
}
}
- return Format.createAudioSampleFormat(trackId, MimeTypes.AUDIO_E_AC3, null, Format.NO_VALUE,
+ String mimeType = MimeTypes.AUDIO_E_AC3;
+ if (data.bytesLeft() > 0) {
+ nextByte = data.readUnsignedByte();
+ if ((nextByte & 0x01) != 0) { // flag_ec3_extension_type_a
+ mimeType = MimeTypes.AUDIO_E_AC3_JOC;
+ }
+ }
+ return Format.createAudioSampleFormat(trackId, mimeType, null, Format.NO_VALUE,
Format.NO_VALUE, channelCount, sampleRate, null, drmInitData, 0, language);
}
@@ -192,35 +211,196 @@ public final class Ac3Util {
* @param data The data to parse, positioned at the start of the syncframe.
* @return The (E-)AC-3 format data parsed from the header.
*/
- public static Ac3SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) {
+ public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) {
int initialPosition = data.getPosition();
data.skipBits(40);
boolean isEac3 = data.readBits(5) == 16;
data.setPosition(initialPosition);
String mimeType;
- int streamType = Ac3SyncFrameInfo.STREAM_TYPE_UNDEFINED;
+ @StreamType int streamType = SyncFrameInfo.STREAM_TYPE_UNDEFINED;
int sampleRate;
int acmod;
int frameSize;
int sampleCount;
+ boolean lfeon;
+ int channelCount;
if (isEac3) {
- mimeType = MimeTypes.AUDIO_E_AC3;
+ // Syntax from ETSI TS 102 366 V1.2.1 subsections E.1.2.1 and E.1.2.2.
data.skipBits(16); // syncword
- streamType = data.readBits(2);
+ switch (data.readBits(2)) { // strmtyp
+ case 0:
+ streamType = SyncFrameInfo.STREAM_TYPE_TYPE0;
+ break;
+ case 1:
+ streamType = SyncFrameInfo.STREAM_TYPE_TYPE1;
+ break;
+ case 2:
+ streamType = SyncFrameInfo.STREAM_TYPE_TYPE2;
+ break;
+ default:
+ streamType = SyncFrameInfo.STREAM_TYPE_UNDEFINED;
+ break;
+ }
data.skipBits(3); // substreamid
frameSize = (data.readBits(11) + 1) * 2;
int fscod = data.readBits(2);
int audioBlocks;
+ int numblkscod;
if (fscod == 3) {
+ numblkscod = 3;
sampleRate = SAMPLE_RATE_BY_FSCOD2[data.readBits(2)];
audioBlocks = 6;
} else {
- int numblkscod = data.readBits(2);
+ numblkscod = data.readBits(2);
audioBlocks = BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[numblkscod];
sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
}
sampleCount = AUDIO_SAMPLES_PER_AUDIO_BLOCK * audioBlocks;
acmod = data.readBits(3);
+ lfeon = data.readBit();
+ channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0);
+ data.skipBits(5 + 5); // bsid, dialnorm
+ if (data.readBit()) { // compre
+ data.skipBits(8); // compr
+ }
+ if (acmod == 0) {
+ data.skipBits(5); // dialnorm2
+ if (data.readBit()) { // compr2e
+ data.skipBits(8); // compr2
+ }
+ }
+ if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE1 && data.readBit()) { // chanmape
+ data.skipBits(16); // chanmap
+ }
+ if (data.readBit()) { // mixmdate
+ if (acmod > 2) {
+ data.skipBits(2); // dmixmod
+ }
+ if ((acmod & 0x01) != 0 && acmod > 2) {
+ data.skipBits(3 + 3); // ltrtcmixlev, lorocmixlev
+ }
+ if ((acmod & 0x04) != 0) {
+ data.skipBits(6); // ltrtsurmixlev, lorosurmixlev
+ }
+ if (lfeon && data.readBit()) { // lfemixlevcode
+ data.skipBits(5); // lfemixlevcod
+ }
+ if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE0) {
+ if (data.readBit()) { // pgmscle
+ data.skipBits(6); //pgmscl
+ }
+ if (acmod == 0 && data.readBit()) { // pgmscl2e
+ data.skipBits(6); // pgmscl2
+ }
+ if (data.readBit()) { // extpgmscle
+ data.skipBits(6); // extpgmscl
+ }
+ int mixdef = data.readBits(2);
+ if (mixdef == 1) {
+ data.skipBits(1 + 1 + 3); // premixcmpsel, drcsrc, premixcmpscl
+ } else if (mixdef == 2) {
+ data.skipBits(12); // mixdata
+ } else if (mixdef == 3) {
+ int mixdeflen = data.readBits(5);
+ if (data.readBit()) { // mixdata2e
+ data.skipBits(1 + 1 + 3); // premixcmpsel, drcsrc, premixcmpscl
+ if (data.readBit()) { // extpgmlscle
+ data.skipBits(4); // extpgmlscl
+ }
+ if (data.readBit()) { // extpgmcscle
+ data.skipBits(4); // extpgmcscl
+ }
+ if (data.readBit()) { // extpgmrscle
+ data.skipBits(4); // extpgmrscl
+ }
+ if (data.readBit()) { // extpgmlsscle
+ data.skipBits(4); // extpgmlsscl
+ }
+ if (data.readBit()) { // extpgmrsscle
+ data.skipBits(4); // extpgmrsscl
+ }
+ if (data.readBit()) { // extpgmlfescle
+ data.skipBits(4); // extpgmlfescl
+ }
+ if (data.readBit()) { // dmixscle
+ data.skipBits(4); // dmixscl
+ }
+ if (data.readBit()) { // addche
+ if (data.readBit()) { // extpgmaux1scle
+ data.skipBits(4); // extpgmaux1scl
+ }
+ if (data.readBit()) { // extpgmaux2scle
+ data.skipBits(4); // extpgmaux2scl
+ }
+ }
+ }
+ if (data.readBit()) { // mixdata3e
+ data.skipBits(5); // spchdat
+ if (data.readBit()) { // addspchdate
+ data.skipBits(5 + 2); // spchdat1, spchan1att
+ if (data.readBit()) { // addspdat1e
+ data.skipBits(5 + 3); // spchdat2, spchan2att
+ }
+ }
+ }
+ data.skipBits(8 * (mixdeflen + 2)); // mixdata
+ data.byteAlign(); // mixdatafill
+ }
+ if (acmod < 2) {
+ if (data.readBit()) { // paninfoe
+ data.skipBits(8 + 6); // panmean, paninfo
+ }
+ if (acmod == 0) {
+ if (data.readBit()) { // paninfo2e
+ data.skipBits(8 + 6); // panmean2, paninfo2
+ }
+ }
+ }
+ if (data.readBit()) { // frmmixcfginfoe
+ if (numblkscod == 0) {
+ data.skipBits(5); // blkmixcfginfo[0]
+ } else {
+ for (int blk = 0; blk < audioBlocks; blk++) {
+ if (data.readBit()) { // blkmixcfginfoe
+ data.skipBits(5); // blkmixcfginfo[blk]
+ }
+ }
+ }
+ }
+ }
+ }
+ if (data.readBit()) { // infomdate
+ data.skipBits(3 + 1 + 1); // bsmod, copyrightb, origbs
+ if (acmod == 2) {
+ data.skipBits(2 + 2); // dsurmod, dheadphonmod
+ }
+ if (acmod >= 6) {
+ data.skipBits(2); // dsurexmod
+ }
+ if (data.readBit()) { // audioprodie
+ data.skipBits(5 + 2 + 1); // mixlevel, roomtyp, adconvtyp
+ }
+ if (acmod == 0 && data.readBit()) { // audioprodi2e
+ data.skipBits(5 + 2 + 1); // mixlevel2, roomtyp2, adconvtyp2
+ }
+ if (fscod < 3) {
+ data.skipBit(); // sourcefscod
+ }
+ }
+ if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE0 && numblkscod != 3) {
+ data.skipBit(); // convsync
+ }
+ if (streamType == SyncFrameInfo.STREAM_TYPE_TYPE2
+ && (numblkscod == 3 || data.readBit())) { // blkid
+ data.skipBits(6); // frmsizecod
+ }
+ mimeType = MimeTypes.AUDIO_E_AC3;
+ if (data.readBit()) { // addbsie
+ int addbsil = data.readBits(6);
+ if (addbsil == 1 && data.readBits(8) == 1) { // addbsi
+ mimeType = MimeTypes.AUDIO_E_AC3_JOC;
+ }
+ }
} else /* is AC-3 */ {
mimeType = MimeTypes.AUDIO_AC3;
data.skipBits(16 + 16); // syncword, crc1
@@ -240,11 +420,11 @@ public final class Ac3Util {
}
sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
sampleCount = AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT;
+ lfeon = data.readBit();
+ channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0);
}
- boolean lfeon = data.readBit();
- int channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0);
- return new Ac3SyncFrameInfo(mimeType, streamType, channelCount, sampleRate, frameSize,
- sampleCount);
+ return new SyncFrameInfo(
+ mimeType, streamType, channelCount, sampleRate, frameSize, sampleCount);
}
/**
@@ -283,6 +463,62 @@ public final class Ac3Util {
: BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[(buffer.get(buffer.position() + 4) & 0x30) >> 4]);
}
+ /**
+ * Returns the offset relative to the buffer's position of the start of a TrueHD syncframe, or
+ * {@link C#INDEX_UNSET} if no syncframe was found. The buffer's position is not modified.
+ *
+ * @param buffer The {@link ByteBuffer} within which to find a syncframe.
+ * @return The offset relative to the buffer's position of the start of a TrueHD syncframe, or
+ * {@link C#INDEX_UNSET} if no syncframe was found.
+ */
+ public static int findTrueHdSyncframeOffset(ByteBuffer buffer) {
+ int startIndex = buffer.position();
+ int endIndex = buffer.limit() - TRUEHD_SYNCFRAME_PREFIX_LENGTH;
+ for (int i = startIndex; i <= endIndex; i++) {
+ // The syncword ends 0xBA for TrueHD or 0xBB for MLP.
+ if ((buffer.getInt(i + 4) & 0xFEFFFFFF) == 0xBA6F72F8) {
+ return i - startIndex;
+ }
+ }
+ return C.INDEX_UNSET;
+ }
+
+ /**
+ * Returns the number of audio samples represented by the given TrueHD syncframe, or 0 if the
+ * buffer is not the start of a syncframe.
+ *
+ * @param syncframe The bytes from which to read the syncframe. Must be at least {@link
+ * #TRUEHD_SYNCFRAME_PREFIX_LENGTH} bytes long.
+ * @return The number of audio samples represented by the syncframe, or 0 if the buffer doesn't
+ * contain the start of a syncframe.
+ */
+ public static int parseTrueHdSyncframeAudioSampleCount(byte[] syncframe) {
+ // TODO: Link to specification if available.
+ // The syncword ends 0xBA for TrueHD or 0xBB for MLP.
+ if (syncframe[4] != (byte) 0xF8
+ || syncframe[5] != (byte) 0x72
+ || syncframe[6] != (byte) 0x6F
+ || (syncframe[7] & 0xFE) != 0xBA) {
+ return 0;
+ }
+ boolean isMlp = (syncframe[7] & 0xFF) == 0xBB;
+ return 40 << ((syncframe[isMlp ? 9 : 8] >> 4) & 0x07);
+ }
+
+ /**
+ * Reads the number of audio samples represented by a TrueHD syncframe. The buffer's position is
+ * not modified.
+ *
+ * @param buffer The {@link ByteBuffer} from which to read the syncframe.
+ * @param offset The offset of the start of the syncframe relative to the buffer's position.
+ * @return The number of audio samples represented by the syncframe.
+ */
+ public static int parseTrueHdSyncframeAudioSampleCount(ByteBuffer buffer, int offset) {
+ // TODO: Link to specification if available.
+ boolean isMlp = (buffer.get(buffer.position() + offset + 7) & 0xFF) == 0xBB;
+ return 40 << ((buffer.get(buffer.position() + offset + (isMlp ? 9 : 8)) >> 4) & 0x07);
+ }
+
private static int getAc3SyncframeSize(int fscod, int frmsizecod) {
int halfFrmsizecod = frmsizecod / 2;
if (fscod < 0 || fscod >= SAMPLE_RATE_BY_FSCOD.length || frmsizecod < 0
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java
index 337200da8f..5e963a2540 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioAttributes.java
@@ -16,6 +16,7 @@
package com.google.android.exoplayer2.audio;
import android.annotation.TargetApi;
+import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
/**
@@ -119,7 +120,7 @@ public final class AudioAttributes {
}
@Override
- public boolean equals(Object obj) {
+ public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java
index 499ea488c7..4b03a5047b 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioCapabilities.java
@@ -22,6 +22,7 @@ import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioFormat;
import android.media.AudioManager;
+import android.support.annotation.Nullable;
import java.util.Arrays;
/**
@@ -96,7 +97,7 @@ public final class AudioCapabilities {
}
@Override
- public boolean equals(Object other) {
+ public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioDecoderException.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioDecoderException.java
index b5ee052924..ac4f632d62 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioDecoderException.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioDecoderException.java
@@ -15,27 +15,21 @@
*/
package com.google.android.exoplayer2.audio;
-/**
- * Thrown when an audio decoder error occurs.
- */
-public abstract class AudioDecoderException extends Exception {
+/** Thrown when an audio decoder error occurs. */
+public class AudioDecoderException extends Exception {
- /**
- * @param detailMessage The detail message for this exception.
- */
- public AudioDecoderException(String detailMessage) {
- super(detailMessage);
+ /** @param message The detail message for this exception. */
+ public AudioDecoderException(String message) {
+ super(message);
}
/**
- * @param detailMessage The detail message for this exception.
- * @param cause the cause (which is saved for later retrieval by the
- * {@link #getCause()} method). (A null value is
- * permitted, and indicates that the cause is nonexistent or
- * unknown.)
+ * @param message The detail message for this exception.
+ * @param cause the cause (which is saved for later retrieval by the {@link #getCause()} method).
+ * A null value is permitted, and indicates that the cause is nonexistent or unknown.
*/
- public AudioDecoderException(String detailMessage, Throwable cause) {
- super(detailMessage, cause);
+ public AudioDecoderException(String message, Throwable cause) {
+ super(message, cause);
}
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java
index eced040812..f82be31f72 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java
@@ -20,13 +20,22 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
- * Interface for audio processors.
+ * Interface for audio processors, which take audio data as input and transform it, potentially
+ * modifying its channel count, encoding and/or sample rate.
+ *
+ *
Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
+ * call {@link #isActive()} to determine whether the processor is active. {@link
+ * #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()}, {@link #isEnded()},
+ * {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and {@link
+ * #getOutputSampleRateHz()} may only be called if the processor is active. Call {@link #reset()} to
+ * reset the processor to its unconfigured state and release any resources.
+ *
+ *
In addition to being able to modify the format of audio, implementations may allow parameters
+ * to be set that affect the output audio and whether the processor is active/inactive.
*/
public interface AudioProcessor {
- /**
- * Exception thrown when a processor can't be configured for a given input audio format.
- */
+ /** Exception thrown when a processor can't be configured for a given input audio format. */
final class UnhandledFormatException extends Exception {
public UnhandledFormatException(int sampleRateHz, int channelCount, @C.Encoding int encoding) {
@@ -36,45 +45,49 @@ public interface AudioProcessor {
}
- /**
- * An empty, direct {@link ByteBuffer}.
- */
+ /** An empty, direct {@link ByteBuffer}. */
ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/**
- * Configures the processor to process input audio with the specified format. After calling this
- * method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
- * processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
- * processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
- * result of the call. If it's active, {@link #getOutputChannelCount()} and
- * {@link #getOutputEncoding()} return the processor's output format.
+ * Configures the processor to process input audio with the specified format and returns whether
+ * to {@link #flush()} it. After calling this method, if the processor is active, {@link
+ * #getOutputSampleRateHz()}, {@link #getOutputChannelCount()} and {@link #getOutputEncoding()}
+ * return its output format.
*
* @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio.
- * @return {@code true} if the processor must be flushed or the value returned by
- * {@link #isActive()} has changed as a result of the call.
+ * @return Whether to {@link #flush()} the processor.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
*/
boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws UnhandledFormatException;
- /**
- * Returns whether the processor is configured and active.
- */
+ /** Returns whether the processor is configured and will process input buffers. */
boolean isActive();
/**
- * Returns the number of audio channels in the data output by the processor.
+ * Returns the number of audio channels in the data output by the processor. The value may change
+ * as a result of calling {@link #configure(int, int, int)} and is undefined if the instance is
+ * not active.
*/
int getOutputChannelCount();
/**
- * Returns the audio encoding used in the data output by the processor.
+ * Returns the audio encoding used in the data output by the processor. The value may change as a
+ * result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
+ * active.
*/
@C.Encoding
int getOutputEncoding();
+ /**
+ * Returns the sample rate of audio output by the processor, in hertz. The value may change as a
+ * result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
+ * active.
+ */
+ int getOutputSampleRateHz();
+
/**
* Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
@@ -110,14 +123,9 @@ public interface AudioProcessor {
*/
boolean isEnded();
- /**
- * Clears any state in preparation for receiving a new stream of input buffers.
- */
+ /** Clears any state in preparation for receiving a new stream of input buffers. */
void flush();
- /**
- * Resets the processor to its initial state.
- */
+ /** Resets the processor to its unconfigured state. */
void reset();
-
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java
index 5f9f599f01..7a4958a61a 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioRendererEventListener.java
@@ -63,15 +63,15 @@ public interface AudioRendererEventListener {
void onAudioInputFormatChanged(Format format);
/**
- * Called when an {@link AudioTrack} underrun occurs.
+ * Called when an {@link AudioSink} underrun occurs.
*
- * @param bufferSize The size of the {@link AudioTrack}'s buffer, in bytes.
- * @param bufferSizeMs The size of the {@link AudioTrack}'s buffer, in milliseconds, if it is
+ * @param bufferSize The size of the {@link AudioSink}'s buffer, in bytes.
+ * @param bufferSizeMs The size of the {@link AudioSink}'s buffer, in milliseconds, if it is
* configured for PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output,
* as the buffered media can have a variable bitrate so the duration may be unknown.
- * @param elapsedSinceLastFeedMs The time since the {@link AudioTrack} was last fed data.
+ * @param elapsedSinceLastFeedMs The time since the {@link AudioSink} was last fed data.
*/
- void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
+ void onAudioSinkUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
/**
* Called when the renderer is disabled.
@@ -144,7 +144,7 @@ public interface AudioRendererEventListener {
}
/**
- * Invokes {@link AudioRendererEventListener#onAudioTrackUnderrun(int, long, long)}.
+ * Invokes {@link AudioRendererEventListener#onAudioSinkUnderrun(int, long, long)}.
*/
public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs,
final long elapsedSinceLastFeedMs) {
@@ -152,7 +152,7 @@ public interface AudioRendererEventListener {
handler.post(new Runnable() {
@Override
public void run() {
- listener.onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ listener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
});
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java
new file mode 100644
index 0000000000..07584d575e
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioSink.java
@@ -0,0 +1,331 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.audio;
+
+import android.media.AudioTrack;
+import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.PlaybackParameters;
+import java.nio.ByteBuffer;
+
+/**
+ * A sink that consumes audio data.
+ *
+ * Before starting playback, specify the input audio format by calling
+ * {@link #configure(int, int, int, int, int[], int, int)}.
+ *
+ * Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
+ * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
+ *
+ * Call {@link #configure(int, int, int, int, int[], int, int)} whenever the input format changes.
+ * The sink will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long)}.
+ *
+ * Call {@link #reset()} to prepare the sink to receive audio data from a new playback position.
+ *
+ * Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers will
+ * be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #reset()}. Call
+ * {@link #release()} when the instance is no longer required.
+ *
+ * The implementation may be backed by a platform {@link AudioTrack}. In this case,
+ * {@link #setAudioSessionId(int)}, {@link #setAudioAttributes(AudioAttributes)},
+ * {@link #enableTunnelingV21(int)} and/or {@link #disableTunneling()} may be called before writing
+ * data to the sink. These methods may also be called after writing data to the sink, in which case
+ * it will be reinitialized as required. For implementations that are not based on platform
+ * {@link AudioTrack}s, calling methods relating to audio sessions, audio attributes, and tunneling
+ * may have no effect.
+ */
+public interface AudioSink {
+
+ /**
+ * Listener for audio sink events.
+ */
+ interface Listener {
+
+ /**
+ * Called if the audio sink has started rendering audio to a new platform audio session.
+ *
+ * @param audioSessionId The newly generated audio session's identifier.
+ */
+ void onAudioSessionId(int audioSessionId);
+
+ /**
+ * Called when the audio sink handles a buffer whose timestamp is discontinuous with the last
+ * buffer handled since it was reset.
+ */
+ void onPositionDiscontinuity();
+
+ /**
+ * Called when the audio sink runs out of data.
+ *
+ * An audio sink implementation may never call this method (for example, if audio data is
+ * consumed in batches rather than based on the sink's own clock).
+ *
+ * @param bufferSize The size of the sink's buffer, in bytes.
+ * @param bufferSizeMs The size of the sink's buffer, in milliseconds, if it is configured for
+ * PCM output. {@link C#TIME_UNSET} if it is configured for encoded audio output, as the
+ * buffered media can have a variable bitrate so the duration may be unknown.
+ * @param elapsedSinceLastFeedMs The time since the sink was last fed data, in milliseconds.
+ */
+ void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
+
+ }
+
+ /**
+ * Thrown when a failure occurs configuring the sink.
+ */
+ final class ConfigurationException extends Exception {
+
+ /**
+ * Creates a new configuration exception with the specified {@code cause} and no message.
+ */
+ public ConfigurationException(Throwable cause) {
+ super(cause);
+ }
+
+ /**
+ * Creates a new configuration exception with the specified {@code message} and no cause.
+ */
+ public ConfigurationException(String message) {
+ super(message);
+ }
+
+ }
+
+ /**
+ * Thrown when a failure occurs initializing the sink.
+ */
+ final class InitializationException extends Exception {
+
+ /**
+ * The underlying {@link AudioTrack}'s state, if applicable.
+ */
+ public final int audioTrackState;
+
+ /**
+ * @param audioTrackState The underlying {@link AudioTrack}'s state, if applicable.
+ * @param sampleRate The requested sample rate in Hz.
+ * @param channelConfig The requested channel configuration.
+ * @param bufferSize The requested buffer size in bytes.
+ */
+ public InitializationException(int audioTrackState, int sampleRate, int channelConfig,
+ int bufferSize) {
+ super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", "
+ + channelConfig + ", " + bufferSize + ")");
+ this.audioTrackState = audioTrackState;
+ }
+
+ }
+
+ /**
+ * Thrown when a failure occurs writing to the sink.
+ */
+ final class WriteException extends Exception {
+
+ /**
+ * The error value returned from the sink implementation. If the sink writes to a platform
+ * {@link AudioTrack}, this will be the error value returned from
+ * {@link AudioTrack#write(byte[], int, int)} or {@link AudioTrack#write(ByteBuffer, int, int)}.
+ * Otherwise, the meaning of the error code depends on the sink implementation.
+ */
+ public final int errorCode;
+
+ /**
+ * @param errorCode The error value returned from the sink implementation.
+ */
+ public WriteException(int errorCode) {
+ super("AudioTrack write failed: " + errorCode);
+ this.errorCode = errorCode;
+ }
+
+ }
+
+ /**
+ * Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set.
+ */
+ long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE;
+
+ /**
+ * Sets the listener for sink events, which should be the audio renderer.
+ *
+ * @param listener The listener for sink events, which should be the audio renderer.
+ */
+ void setListener(Listener listener);
+
+ /**
+ * Returns whether it's possible to play audio in the specified encoding.
+ *
+ * @param encoding The audio encoding.
+ * @return Whether it's possible to play audio in the specified encoding.
+ */
+ boolean isEncodingSupported(@C.Encoding int encoding);
+
+ /**
+ * Returns the playback position in the stream starting at zero, in microseconds, or
+ * {@link #CURRENT_POSITION_NOT_SET} if it is not yet available.
+ *
+ * @param sourceEnded Specify {@code true} if no more input buffers will be provided.
+ * @return The playback position relative to the start of playback, in microseconds.
+ */
+ long getCurrentPositionUs(boolean sourceEnded);
+
+ /**
+ * Configures (or reconfigures) the sink.
+ *
+ * @param inputEncoding The encoding of audio data provided in the input buffers.
+ * @param inputChannelCount The number of channels.
+ * @param inputSampleRate The sample rate in Hz.
+ * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
+ * suitable buffer size.
+ * @param outputChannels A mapping from input to output channels that is applied to this sink's
+ * input as a preprocessing step, if handling PCM input. Specify {@code null} to leave the
+ * input unchanged. Otherwise, the element at index {@code i} specifies index of the input
+ * channel to map to output channel {@code i} when preprocessing input buffers. After the map
+ * is applied the audio data will have {@code outputChannels.length} channels.
+ * @param trimStartFrames The number of audio frames to trim from the start of data written to the
+ * sink after this call.
+ * @param trimEndFrames The number of audio frames to trim from data written to the sink
+ * immediately preceding the next call to {@link #reset()} or this method.
+ * @throws ConfigurationException If an error occurs configuring the sink.
+ */
+ void configure(
+ @C.Encoding int inputEncoding,
+ int inputChannelCount,
+ int inputSampleRate,
+ int specifiedBufferSize,
+ @Nullable int[] outputChannels,
+ int trimStartFrames,
+ int trimEndFrames)
+ throws ConfigurationException;
+
+ /**
+ * Starts or resumes consuming audio if initialized.
+ */
+ void play();
+
+ /**
+ * Signals to the sink that the next buffer is discontinuous with the previous buffer.
+ */
+ void handleDiscontinuity();
+
+ /**
+ * Attempts to process data from a {@link ByteBuffer}, starting from its current position and
+ * ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the
+ * number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if
+ * {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset.
+ *
+ * Returns whether the data was handled in full. If the data was not handled in full then the same
+ * {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed,
+ * except in the case of an intervening call to {@link #reset()} (or to
+ * {@link #configure(int, int, int, int, int[], int, int)} that causes the sink to be reset).
+ *
+ * @param buffer The buffer containing audio data.
+ * @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
+ * @return Whether the buffer was handled fully.
+ * @throws InitializationException If an error occurs initializing the sink.
+ * @throws WriteException If an error occurs writing the audio data.
+ */
+ boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
+ throws InitializationException, WriteException;
+
+ /**
+ * Processes any remaining data. {@link #isEnded()} will return {@code true} when no data remains.
+ *
+ * @throws WriteException If an error occurs draining data to the sink.
+ */
+ void playToEndOfStream() throws WriteException;
+
+ /**
+ * Returns whether {@link #playToEndOfStream} has been called and all buffers have been processed.
+ */
+ boolean isEnded();
+
+ /**
+ * Returns whether the sink has data pending that has not been consumed yet.
+ */
+ boolean hasPendingData();
+
+ /**
+ * Attempts to set the playback parameters and returns the active playback parameters, which may
+ * differ from those passed in.
+ *
+ * @param playbackParameters The new playback parameters to attempt to set.
+ * @return The active playback parameters.
+ */
+ PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters);
+
+ /**
+ * Gets the active {@link PlaybackParameters}.
+ */
+ PlaybackParameters getPlaybackParameters();
+
+ /**
+ * Sets attributes for audio playback. If the attributes have changed and if the sink is not
+ * configured for use with tunneling, then it is reset and the audio session id is cleared.
+ *
+ * If the sink is configured for use with tunneling then the audio attributes are ignored. The
+ * sink is not reset and the audio session id is not cleared. The passed attributes will be used
+ * if the sink is later re-configured into non-tunneled mode.
+ *
+ * @param audioAttributes The attributes for audio playback.
+ */
+ void setAudioAttributes(AudioAttributes audioAttributes);
+
+ /**
+ * Sets the audio session id.
+ */
+ void setAudioSessionId(int audioSessionId);
+
+ /**
+ * Enables tunneling, if possible. The sink is reset if tunneling was previously disabled or if
+ * the audio session id has changed. Enabling tunneling is only possible if the sink is based on a
+ * platform {@link AudioTrack}, and requires platform API version 21 onwards.
+ *
+ * @param tunnelingAudioSessionId The audio session id to use.
+ * @throws IllegalStateException Thrown if enabling tunneling on platform API version < 21.
+ */
+ void enableTunnelingV21(int tunnelingAudioSessionId);
+
+ /**
+ * Disables tunneling. If tunneling was previously enabled then the sink is reset and any audio
+ * session id is cleared.
+ */
+ void disableTunneling();
+
+ /**
+ * Sets the playback volume.
+ *
+ * @param volume A volume in the range [0.0, 1.0].
+ */
+ void setVolume(float volume);
+
+ /**
+ * Pauses playback.
+ */
+ void pause();
+
+ /**
+ * Resets the sink, after which it is ready to receive buffers from a new playback position.
+ *
+ * The audio session may remain active until {@link #release()} is called.
+ */
+ void reset();
+
+ /**
+ * Releases any resources associated with this instance.
+ */
+ void release();
+
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTimestampPoller.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTimestampPoller.java
new file mode 100644
index 0000000000..47120e7375
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTimestampPoller.java
@@ -0,0 +1,307 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.audio;
+
+import android.annotation.TargetApi;
+import android.media.AudioTimestamp;
+import android.media.AudioTrack;
+import android.support.annotation.IntDef;
+import android.support.annotation.Nullable;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.util.Util;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Polls the {@link AudioTrack} timestamp, if the platform supports it, taking care of polling at
+ * the appropriate rate to detect when the timestamp starts to advance.
+ *
+ *
When the audio track isn't paused, call {@link #maybePollTimestamp(long)} regularly to check
+ * for timestamp updates. If it returns {@code true}, call {@link #getTimestampPositionFrames()} and
+ * {@link #getTimestampSystemTimeUs()} to access the updated timestamp, then call {@link
+ * #acceptTimestamp()} or {@link #rejectTimestamp()} to accept or reject it.
+ *
+ *
If {@link #hasTimestamp()} returns {@code true}, call {@link #getTimestampSystemTimeUs()} to
+ * get the system time at which the latest timestamp was sampled and {@link
+ * #getTimestampPositionFrames()} to get its position in frames. If {@link #isTimestampAdvancing()}
+ * returns {@code true}, the caller should assume that the timestamp has been increasing in real
+ * time since it was sampled. Otherwise, it may be stationary.
+ *
+ *
Call {@link #reset()} when pausing or resuming the track.
+ */
+/* package */ final class AudioTimestampPoller {
+
+ /** Timestamp polling states. */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({
+ STATE_INITIALIZING,
+ STATE_TIMESTAMP,
+ STATE_TIMESTAMP_ADVANCING,
+ STATE_NO_TIMESTAMP,
+ STATE_ERROR
+ })
+ private @interface State {}
+ /** State when first initializing. */
+ private static final int STATE_INITIALIZING = 0;
+ /** State when we have a timestamp and we don't know if it's advancing. */
+ private static final int STATE_TIMESTAMP = 1;
+ /** State when we have a timestamp and we know it is advancing. */
+ private static final int STATE_TIMESTAMP_ADVANCING = 2;
+ /** State when the no timestamp is available. */
+ private static final int STATE_NO_TIMESTAMP = 3;
+ /** State when the last timestamp was rejected as invalid. */
+ private static final int STATE_ERROR = 4;
+
+ /** The polling interval for {@link #STATE_INITIALIZING} and {@link #STATE_TIMESTAMP}. */
+ private static final int FAST_POLL_INTERVAL_US = 5_000;
+ /**
+ * The polling interval for {@link #STATE_TIMESTAMP_ADVANCING} and {@link #STATE_NO_TIMESTAMP}.
+ */
+ private static final int SLOW_POLL_INTERVAL_US = 10_000_000;
+ /** The polling interval for {@link #STATE_ERROR}. */
+ private static final int ERROR_POLL_INTERVAL_US = 500_000;
+
+ /**
+ * The minimum duration to remain in {@link #STATE_INITIALIZING} if no timestamps are being
+ * returned before transitioning to {@link #STATE_NO_TIMESTAMP}.
+ */
+ private static final int INITIALIZING_DURATION_US = 500_000;
+
+ private final @Nullable AudioTimestampV19 audioTimestamp;
+
+ private @State int state;
+ private long initializeSystemTimeUs;
+ private long sampleIntervalUs;
+ private long lastTimestampSampleTimeUs;
+ private long initialTimestampPositionFrames;
+
+ /**
+ * Creates a new audio timestamp poller.
+ *
+ * @param audioTrack The audio track that will provide timestamps, if the platform supports it.
+ */
+ public AudioTimestampPoller(AudioTrack audioTrack) {
+ if (Util.SDK_INT >= 19) {
+ audioTimestamp = new AudioTimestampV19(audioTrack);
+ reset();
+ } else {
+ audioTimestamp = null;
+ updateState(STATE_NO_TIMESTAMP);
+ }
+ }
+
+ /**
+ * Polls the timestamp if required and returns whether it was updated. If {@code true}, the latest
+ * timestamp is available via {@link #getTimestampSystemTimeUs()} and {@link
+ * #getTimestampPositionFrames()}, and the caller should call {@link #acceptTimestamp()} if the
+ * timestamp was valid, or {@link #rejectTimestamp()} otherwise. The values returned by {@link
+ * #hasTimestamp()} and {@link #isTimestampAdvancing()} may be updated.
+ *
+ * @param systemTimeUs The current system time, in microseconds.
+ * @return Whether the timestamp was updated.
+ */
+ public boolean maybePollTimestamp(long systemTimeUs) {
+ if (audioTimestamp == null || (systemTimeUs - lastTimestampSampleTimeUs) < sampleIntervalUs) {
+ return false;
+ }
+ lastTimestampSampleTimeUs = systemTimeUs;
+ boolean updatedTimestamp = audioTimestamp.maybeUpdateTimestamp();
+ switch (state) {
+ case STATE_INITIALIZING:
+ if (updatedTimestamp) {
+ if (audioTimestamp.getTimestampSystemTimeUs() >= initializeSystemTimeUs) {
+ // We have an initial timestamp, but don't know if it's advancing yet.
+ initialTimestampPositionFrames = audioTimestamp.getTimestampPositionFrames();
+ updateState(STATE_TIMESTAMP);
+ } else {
+ // Drop the timestamp, as it was sampled before the last reset.
+ updatedTimestamp = false;
+ }
+ } else if (systemTimeUs - initializeSystemTimeUs > INITIALIZING_DURATION_US) {
+ // We haven't received a timestamp for a while, so they probably aren't available for the
+ // current audio route. Poll infrequently in case the route changes later.
+ // TODO: Ideally we should listen for audio route changes in order to detect when a
+ // timestamp becomes available again.
+ updateState(STATE_NO_TIMESTAMP);
+ }
+ break;
+ case STATE_TIMESTAMP:
+ if (updatedTimestamp) {
+ long timestampPositionFrames = audioTimestamp.getTimestampPositionFrames();
+ if (timestampPositionFrames > initialTimestampPositionFrames) {
+ updateState(STATE_TIMESTAMP_ADVANCING);
+ }
+ } else {
+ reset();
+ }
+ break;
+ case STATE_TIMESTAMP_ADVANCING:
+ if (!updatedTimestamp) {
+ // The audio route may have changed, so reset polling.
+ reset();
+ }
+ break;
+ case STATE_NO_TIMESTAMP:
+ if (updatedTimestamp) {
+ // The audio route may have changed, so reset polling.
+ reset();
+ }
+ break;
+ case STATE_ERROR:
+ // Do nothing. If the caller accepts any new timestamp we'll reset polling.
+ break;
+ default:
+ throw new IllegalStateException();
+ }
+ return updatedTimestamp;
+ }
+
+ /**
+ * Rejects the timestamp last polled in {@link #maybePollTimestamp(long)}. The instance will enter
+ * the error state and poll timestamps infrequently until the next call to {@link
+ * #acceptTimestamp()}.
+ */
+ public void rejectTimestamp() {
+ updateState(STATE_ERROR);
+ }
+
+ /**
+ * Accepts the timestamp last polled in {@link #maybePollTimestamp(long)}. If the instance is in
+ * the error state, it will begin to poll timestamps frequently again.
+ */
+ public void acceptTimestamp() {
+ if (state == STATE_ERROR) {
+ reset();
+ }
+ }
+
+ /**
+ * Returns whether this instance has a timestamp that can be used to calculate the audio track
+ * position. If {@code true}, call {@link #getTimestampSystemTimeUs()} and {@link
+ * #getTimestampSystemTimeUs()} to access the timestamp.
+ */
+ public boolean hasTimestamp() {
+ return state == STATE_TIMESTAMP || state == STATE_TIMESTAMP_ADVANCING;
+ }
+
+ /**
+ * Returns whether the timestamp appears to be advancing. If {@code true}, call {@link
+ * #getTimestampSystemTimeUs()} and {@link #getTimestampSystemTimeUs()} to access the timestamp. A
+ * current position for the track can be extrapolated based on elapsed real time since the system
+ * time at which the timestamp was sampled.
+ */
+ public boolean isTimestampAdvancing() {
+ return state == STATE_TIMESTAMP_ADVANCING;
+ }
+
+ /** Resets polling. Should be called whenever the audio track is paused or resumed. */
+ public void reset() {
+ if (audioTimestamp != null) {
+ updateState(STATE_INITIALIZING);
+ }
+ }
+
+ /**
+ * If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns
+ * the system time at which the latest timestamp was sampled, in microseconds.
+ */
+ public long getTimestampSystemTimeUs() {
+ return audioTimestamp != null ? audioTimestamp.getTimestampSystemTimeUs() : C.TIME_UNSET;
+ }
+
+ /**
+ * If {@link #maybePollTimestamp(long)} or {@link #hasTimestamp()} returned {@code true}, returns
+ * the latest timestamp's position in frames.
+ */
+ public long getTimestampPositionFrames() {
+ return audioTimestamp != null ? audioTimestamp.getTimestampPositionFrames() : C.POSITION_UNSET;
+ }
+
+ private void updateState(@State int state) {
+ this.state = state;
+ switch (state) {
+ case STATE_INITIALIZING:
+ // Force polling a timestamp immediately, and poll quickly.
+ lastTimestampSampleTimeUs = 0;
+ initialTimestampPositionFrames = C.POSITION_UNSET;
+ initializeSystemTimeUs = System.nanoTime() / 1000;
+ sampleIntervalUs = FAST_POLL_INTERVAL_US;
+ break;
+ case STATE_TIMESTAMP:
+ sampleIntervalUs = FAST_POLL_INTERVAL_US;
+ break;
+ case STATE_TIMESTAMP_ADVANCING:
+ case STATE_NO_TIMESTAMP:
+ sampleIntervalUs = SLOW_POLL_INTERVAL_US;
+ break;
+ case STATE_ERROR:
+ sampleIntervalUs = ERROR_POLL_INTERVAL_US;
+ break;
+ default:
+ throw new IllegalStateException();
+ }
+ }
+
+ @TargetApi(19)
+ private static final class AudioTimestampV19 {
+
+ private final AudioTrack audioTrack;
+ private final AudioTimestamp audioTimestamp;
+
+ private long rawTimestampFramePositionWrapCount;
+ private long lastTimestampRawPositionFrames;
+ private long lastTimestampPositionFrames;
+
+ /**
+ * Creates a new {@link AudioTimestamp} wrapper.
+ *
+ * @param audioTrack The audio track that will provide timestamps.
+ */
+ public AudioTimestampV19(AudioTrack audioTrack) {
+ this.audioTrack = audioTrack;
+ audioTimestamp = new AudioTimestamp();
+ }
+
+ /**
+ * Attempts to update the audio track timestamp. Returns {@code true} if the timestamp was
+ * updated, in which case the updated timestamp system time and position can be accessed with
+ * {@link #getTimestampSystemTimeUs()} and {@link #getTimestampPositionFrames()}. Returns {@code
+ * false} if no timestamp is available, in which case those methods should not be called.
+ */
+ public boolean maybeUpdateTimestamp() {
+ boolean updated = audioTrack.getTimestamp(audioTimestamp);
+ if (updated) {
+ long rawPositionFrames = audioTimestamp.framePosition;
+ if (lastTimestampRawPositionFrames > rawPositionFrames) {
+ // The value must have wrapped around.
+ rawTimestampFramePositionWrapCount++;
+ }
+ lastTimestampRawPositionFrames = rawPositionFrames;
+ lastTimestampPositionFrames =
+ rawPositionFrames + (rawTimestampFramePositionWrapCount << 32);
+ }
+ return updated;
+ }
+
+ public long getTimestampSystemTimeUs() {
+ return audioTimestamp.nanoTime / 1000;
+ }
+
+ public long getTimestampPositionFrames() {
+ return lastTimestampPositionFrames;
+ }
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java
deleted file mode 100644
index d7ebd69fbf..0000000000
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrack.java
+++ /dev/null
@@ -1,1731 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.android.exoplayer2.audio;
-
-import android.annotation.SuppressLint;
-import android.annotation.TargetApi;
-import android.media.AudioFormat;
-import android.media.AudioManager;
-import android.media.AudioTimestamp;
-import android.os.ConditionVariable;
-import android.os.SystemClock;
-import android.support.annotation.Nullable;
-import android.util.Log;
-import com.google.android.exoplayer2.C;
-import com.google.android.exoplayer2.PlaybackParameters;
-import com.google.android.exoplayer2.util.Assertions;
-import com.google.android.exoplayer2.util.MimeTypes;
-import com.google.android.exoplayer2.util.Util;
-import java.lang.reflect.Method;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.ArrayList;
-import java.util.LinkedList;
-
-/**
- * Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles
- * playback position smoothing, non-blocking writes and reconfiguration.
- *
- * Before starting playback, specify the input format by calling
- * {@link #configure(String, int, int, int, int)}. Optionally call {@link #setAudioSessionId(int)},
- * {@link #setAudioAttributes(AudioAttributes)}, {@link #enableTunnelingV21(int)} and
- * {@link #disableTunneling()} to configure audio playback. These methods may be called after
- * writing data to the track, in which case it will be reinitialized as required.
- *
- * Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
- * when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
- *
- * Call {@link #configure(String, int, int, int, int)} whenever the input format changes. The track
- * will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long)}.
- *
- * Calling {@link #reset()} releases the underlying {@link android.media.AudioTrack} (and so does
- * calling {@link #configure(String, int, int, int, int)} unless the format is unchanged). It is
- * safe to call {@link #handleBuffer(ByteBuffer, long)} after {@link #reset()} without calling
- * {@link #configure(String, int, int, int, int)}.
- *
- * Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers will
- * be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #reset}. Call
- * {@link #release()} when the instance is no longer required.
- */
-public final class AudioTrack {
-
- /**
- * Listener for audio track events.
- */
- public interface Listener {
-
- /**
- * Called when the audio track has been initialized with a newly generated audio session id.
- *
- * @param audioSessionId The newly generated audio session id.
- */
- void onAudioSessionId(int audioSessionId);
-
- /**
- * Called when the audio track handles a buffer whose timestamp is discontinuous with the last
- * buffer handled since it was reset.
- */
- void onPositionDiscontinuity();
-
- /**
- * Called when the audio track underruns.
- *
- * @param bufferSize The size of the track's buffer, in bytes.
- * @param bufferSizeMs The size of the track's buffer, in milliseconds, if it is configured for
- * PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, as the
- * buffered media can have a variable bitrate so the duration may be unknown.
- * @param elapsedSinceLastFeedMs The time since the track was last fed data, in milliseconds.
- */
- void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
-
- }
-
- /**
- * Thrown when a failure occurs configuring the track.
- */
- public static final class ConfigurationException extends Exception {
-
- public ConfigurationException(Throwable cause) {
- super(cause);
- }
-
- public ConfigurationException(String message) {
- super(message);
- }
-
- }
-
- /**
- * Thrown when a failure occurs initializing an {@link android.media.AudioTrack}.
- */
- public static final class InitializationException extends Exception {
-
- /**
- * The state as reported by {@link android.media.AudioTrack#getState()}.
- */
- public final int audioTrackState;
-
- /**
- * @param audioTrackState The state as reported by {@link android.media.AudioTrack#getState()}.
- * @param sampleRate The requested sample rate in Hz.
- * @param channelConfig The requested channel configuration.
- * @param bufferSize The requested buffer size in bytes.
- */
- public InitializationException(int audioTrackState, int sampleRate, int channelConfig,
- int bufferSize) {
- super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", "
- + channelConfig + ", " + bufferSize + ")");
- this.audioTrackState = audioTrackState;
- }
-
- }
-
- /**
- * Thrown when a failure occurs writing to an {@link android.media.AudioTrack}.
- */
- public static final class WriteException extends Exception {
-
- /**
- * The error value returned from {@link android.media.AudioTrack#write(byte[], int, int)} or
- * {@link android.media.AudioTrack#write(ByteBuffer, int, int)}.
- */
- public final int errorCode;
-
- /**
- * @param errorCode The error value returned from
- * {@link android.media.AudioTrack#write(byte[], int, int)} or
- * {@link android.media.AudioTrack#write(ByteBuffer, int, int)}.
- */
- public WriteException(int errorCode) {
- super("AudioTrack write failed: " + errorCode);
- this.errorCode = errorCode;
- }
-
- }
-
- /**
- * Thrown when {@link android.media.AudioTrack#getTimestamp} returns a spurious timestamp, if
- * {@code AudioTrack#failOnSpuriousAudioTimestamp} is set.
- */
- public static final class InvalidAudioTrackTimestampException extends RuntimeException {
-
- /**
- * @param detailMessage The detail message for this exception.
- */
- public InvalidAudioTrackTimestampException(String detailMessage) {
- super(detailMessage);
- }
-
- }
-
- /**
- * Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set.
- */
- public static final long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE;
-
- /**
- * A minimum length for the {@link android.media.AudioTrack} buffer, in microseconds.
- */
- private static final long MIN_BUFFER_DURATION_US = 250000;
- /**
- * A maximum length for the {@link android.media.AudioTrack} buffer, in microseconds.
- */
- private static final long MAX_BUFFER_DURATION_US = 750000;
- /**
- * The length for passthrough {@link android.media.AudioTrack} buffers, in microseconds.
- */
- private static final long PASSTHROUGH_BUFFER_DURATION_US = 250000;
- /**
- * A multiplication factor to apply to the minimum buffer size requested by the underlying
- * {@link android.media.AudioTrack}.
- */
- private static final int BUFFER_MULTIPLICATION_FACTOR = 4;
-
- /**
- * @see android.media.AudioTrack#PLAYSTATE_STOPPED
- */
- private static final int PLAYSTATE_STOPPED = android.media.AudioTrack.PLAYSTATE_STOPPED;
- /**
- * @see android.media.AudioTrack#PLAYSTATE_PAUSED
- */
- private static final int PLAYSTATE_PAUSED = android.media.AudioTrack.PLAYSTATE_PAUSED;
- /**
- * @see android.media.AudioTrack#PLAYSTATE_PLAYING
- */
- private static final int PLAYSTATE_PLAYING = android.media.AudioTrack.PLAYSTATE_PLAYING;
- /**
- * @see android.media.AudioTrack#ERROR_BAD_VALUE
- */
- private static final int ERROR_BAD_VALUE = android.media.AudioTrack.ERROR_BAD_VALUE;
- /**
- * @see android.media.AudioTrack#MODE_STATIC
- */
- private static final int MODE_STATIC = android.media.AudioTrack.MODE_STATIC;
- /**
- * @see android.media.AudioTrack#MODE_STREAM
- */
- private static final int MODE_STREAM = android.media.AudioTrack.MODE_STREAM;
- /**
- * @see android.media.AudioTrack#STATE_INITIALIZED
- */
- private static final int STATE_INITIALIZED = android.media.AudioTrack.STATE_INITIALIZED;
- /**
- * @see android.media.AudioTrack#WRITE_NON_BLOCKING
- */
- @SuppressLint("InlinedApi")
- private static final int WRITE_NON_BLOCKING = android.media.AudioTrack.WRITE_NON_BLOCKING;
-
- private static final String TAG = "AudioTrack";
-
- /**
- * AudioTrack timestamps are deemed spurious if they are offset from the system clock by more
- * than this amount.
- *
- * This is a fail safe that should not be required on correctly functioning devices.
- */
- private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 5 * C.MICROS_PER_SECOND;
-
- /**
- * AudioTrack latencies are deemed impossibly large if they are greater than this amount.
- *
- * This is a fail safe that should not be required on correctly functioning devices.
- */
- private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
-
- private static final int START_NOT_SET = 0;
- private static final int START_IN_SYNC = 1;
- private static final int START_NEED_SYNC = 2;
-
- private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10;
- private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000;
- private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
-
- /**
- * The minimum number of output bytes from {@link #sonicAudioProcessor} at which the speedup is
- * calculated using the input/output byte counts from the processor, rather than using the
- * current playback parameters speed.
- */
- private static final int SONIC_MIN_BYTES_FOR_SPEEDUP = 1024;
-
- /**
- * Whether to enable a workaround for an issue where an audio effect does not keep its session
- * active across releasing/initializing a new audio track, on platform builds where
- * {@link Util#SDK_INT} < 21.
- *
- * The flag must be set before creating a player.
- */
- public static boolean enablePreV21AudioSessionWorkaround = false;
-
- /**
- * Whether to throw an {@link InvalidAudioTrackTimestampException} when a spurious timestamp is
- * reported from {@link android.media.AudioTrack#getTimestamp}.
- *
- * The flag must be set before creating a player. Should be set to {@code true} for testing and
- * debugging purposes only.
- */
- public static boolean failOnSpuriousAudioTimestamp = false;
-
- @Nullable private final AudioCapabilities audioCapabilities;
- private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
- private final SonicAudioProcessor sonicAudioProcessor;
- private final AudioProcessor[] availableAudioProcessors;
- private final Listener listener;
- private final ConditionVariable releasingConditionVariable;
- private final long[] playheadOffsets;
- private final AudioTrackUtil audioTrackUtil;
- private final LinkedList playbackParametersCheckpoints;
-
- /**
- * Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}).
- */
- private android.media.AudioTrack keepSessionIdAudioTrack;
-
- private android.media.AudioTrack audioTrack;
- private int sampleRate;
- private int channelConfig;
- @C.Encoding
- private int encoding;
- @C.Encoding
- private int outputEncoding;
- private AudioAttributes audioAttributes;
- private boolean passthrough;
- private int bufferSize;
- private long bufferSizeUs;
-
- private PlaybackParameters drainingPlaybackParameters;
- private PlaybackParameters playbackParameters;
- private long playbackParametersOffsetUs;
- private long playbackParametersPositionUs;
-
- private ByteBuffer avSyncHeader;
- private int bytesUntilNextAvSync;
-
- private int nextPlayheadOffsetIndex;
- private int playheadOffsetCount;
- private long smoothedPlayheadOffsetUs;
- private long lastPlayheadSampleTimeUs;
- private boolean audioTimestampSet;
- private long lastTimestampSampleTimeUs;
-
- private Method getLatencyMethod;
- private int pcmFrameSize;
- private long submittedPcmBytes;
- private long submittedEncodedFrames;
- private int outputPcmFrameSize;
- private long writtenPcmBytes;
- private long writtenEncodedFrames;
- private int framesPerEncodedSample;
- private int startMediaTimeState;
- private long startMediaTimeUs;
- private long resumeSystemTimeUs;
- private long latencyUs;
- private float volume;
-
- private AudioProcessor[] audioProcessors;
- private ByteBuffer[] outputBuffers;
- private ByteBuffer inputBuffer;
- private ByteBuffer outputBuffer;
- private byte[] preV21OutputBuffer;
- private int preV21OutputBufferOffset;
- private int drainingAudioProcessorIndex;
- private boolean handledEndOfStream;
-
- private boolean playing;
- private int audioSessionId;
- private boolean tunneling;
- private boolean hasData;
- private long lastFeedElapsedRealtimeMs;
-
- /**
- * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
- * default capabilities (no encoded audio passthrough support) should be assumed.
- * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
- * output. May be empty.
- * @param listener Listener for audio track events.
- */
- public AudioTrack(@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors,
- Listener listener) {
- this.audioCapabilities = audioCapabilities;
- this.listener = listener;
- releasingConditionVariable = new ConditionVariable(true);
- if (Util.SDK_INT >= 18) {
- try {
- getLatencyMethod =
- android.media.AudioTrack.class.getMethod("getLatency", (Class>[]) null);
- } catch (NoSuchMethodException e) {
- // There's no guarantee this method exists. Do nothing.
- }
- }
- if (Util.SDK_INT >= 19) {
- audioTrackUtil = new AudioTrackUtilV19();
- } else {
- audioTrackUtil = new AudioTrackUtil();
- }
- channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
- sonicAudioProcessor = new SonicAudioProcessor();
- availableAudioProcessors = new AudioProcessor[3 + audioProcessors.length];
- availableAudioProcessors[0] = new ResamplingAudioProcessor();
- availableAudioProcessors[1] = channelMappingAudioProcessor;
- System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
- availableAudioProcessors[2 + audioProcessors.length] = sonicAudioProcessor;
- playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
- volume = 1.0f;
- startMediaTimeState = START_NOT_SET;
- audioAttributes = AudioAttributes.DEFAULT;
- audioSessionId = C.AUDIO_SESSION_ID_UNSET;
- playbackParameters = PlaybackParameters.DEFAULT;
- drainingAudioProcessorIndex = C.INDEX_UNSET;
- this.audioProcessors = new AudioProcessor[0];
- outputBuffers = new ByteBuffer[0];
- playbackParametersCheckpoints = new LinkedList<>();
- }
-
- /**
- * Returns whether it's possible to play audio in the specified format using encoded passthrough.
- *
- * @param mimeType The format mime type.
- * @return Whether it's possible to play audio in the format using encoded passthrough.
- */
- public boolean isPassthroughSupported(String mimeType) {
- return audioCapabilities != null
- && audioCapabilities.supportsEncoding(getEncodingForMimeType(mimeType));
- }
-
- /**
- * Returns the playback position in the stream starting at zero, in microseconds, or
- * {@link #CURRENT_POSITION_NOT_SET} if it is not yet available.
- *
- * If the device supports it, the method uses the playback timestamp from
- * {@link android.media.AudioTrack#getTimestamp}. Otherwise, it derives a smoothed position by
- * sampling the {@link android.media.AudioTrack}'s frame position.
- *
- * @param sourceEnded Specify {@code true} if no more input buffers will be provided.
- * @return The playback position relative to the start of playback, in microseconds.
- */
- public long getCurrentPositionUs(boolean sourceEnded) {
- if (!hasCurrentPositionUs()) {
- return CURRENT_POSITION_NOT_SET;
- }
-
- if (audioTrack.getPlayState() == PLAYSTATE_PLAYING) {
- maybeSampleSyncParams();
- }
-
- long systemClockUs = System.nanoTime() / 1000;
- long positionUs;
- if (audioTimestampSet) {
- // Calculate the speed-adjusted position using the timestamp (which may be in the future).
- long elapsedSinceTimestampUs = systemClockUs - (audioTrackUtil.getTimestampNanoTime() / 1000);
- long elapsedSinceTimestampFrames = durationUsToFrames(elapsedSinceTimestampUs);
- long elapsedFrames = audioTrackUtil.getTimestampFramePosition() + elapsedSinceTimestampFrames;
- positionUs = framesToDurationUs(elapsedFrames);
- } else {
- if (playheadOffsetCount == 0) {
- // The AudioTrack has started, but we don't have any samples to compute a smoothed position.
- positionUs = audioTrackUtil.getPositionUs();
- } else {
- // getPlayheadPositionUs() only has a granularity of ~20 ms, so we base the position off the
- // system clock (and a smoothed offset between it and the playhead position) so as to
- // prevent jitter in the reported positions.
- positionUs = systemClockUs + smoothedPlayheadOffsetUs;
- }
- if (!sourceEnded) {
- positionUs -= latencyUs;
- }
- }
-
- return startMediaTimeUs + applySpeedup(positionUs);
- }
-
- /**
- * Configures (or reconfigures) the audio track.
- *
- * @param mimeType The mime type.
- * @param channelCount The number of channels.
- * @param sampleRate The sample rate in Hz.
- * @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT},
- * {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and
- * {@link C#ENCODING_PCM_32BIT}.
- * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
- * suitable buffer size automatically.
- * @throws ConfigurationException If an error occurs configuring the track.
- */
- public void configure(String mimeType, int channelCount, int sampleRate,
- @C.PcmEncoding int pcmEncoding, int specifiedBufferSize) throws ConfigurationException {
- configure(mimeType, channelCount, sampleRate, pcmEncoding, specifiedBufferSize, null);
- }
-
- /**
- * Configures (or reconfigures) the audio track.
- *
- * @param mimeType The mime type.
- * @param channelCount The number of channels.
- * @param sampleRate The sample rate in Hz.
- * @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT},
- * {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and
- * {@link C#ENCODING_PCM_32BIT}.
- * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
- * suitable buffer size automatically.
- * @param outputChannels A mapping from input to output channels that is applied to this track's
- * input as a preprocessing step, if handling PCM input. Specify {@code null} to leave the
- * input unchanged. Otherwise, the element at index {@code i} specifies index of the input
- * channel to map to output channel {@code i} when preprocessing input buffers. After the
- * map is applied the audio data will have {@code outputChannels.length} channels.
- * @throws ConfigurationException If an error occurs configuring the track.
- */
- public void configure(String mimeType, int channelCount, int sampleRate,
- @C.PcmEncoding int pcmEncoding, int specifiedBufferSize, int[] outputChannels)
- throws ConfigurationException {
- boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType);
- @C.Encoding int encoding = passthrough ? getEncodingForMimeType(mimeType) : pcmEncoding;
- boolean flush = false;
- if (!passthrough) {
- pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
- channelMappingAudioProcessor.setChannelMap(outputChannels);
- for (AudioProcessor audioProcessor : availableAudioProcessors) {
- try {
- flush |= audioProcessor.configure(sampleRate, channelCount, encoding);
- } catch (AudioProcessor.UnhandledFormatException e) {
- throw new ConfigurationException(e);
- }
- if (audioProcessor.isActive()) {
- channelCount = audioProcessor.getOutputChannelCount();
- encoding = audioProcessor.getOutputEncoding();
- }
- }
- if (flush) {
- resetAudioProcessors();
- }
- }
-
- int channelConfig;
- switch (channelCount) {
- case 1:
- channelConfig = AudioFormat.CHANNEL_OUT_MONO;
- break;
- case 2:
- channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
- break;
- case 3:
- channelConfig = AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
- break;
- case 4:
- channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
- break;
- case 5:
- channelConfig = AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
- break;
- case 6:
- channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
- break;
- case 7:
- channelConfig = AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
- break;
- case 8:
- channelConfig = C.CHANNEL_OUT_7POINT1_SURROUND;
- break;
- default:
- throw new ConfigurationException("Unsupported channel count: " + channelCount);
- }
-
- // Workaround for overly strict channel configuration checks on nVidia Shield.
- if (Util.SDK_INT <= 23 && "foster".equals(Util.DEVICE) && "NVIDIA".equals(Util.MANUFACTURER)) {
- switch (channelCount) {
- case 7:
- channelConfig = C.CHANNEL_OUT_7POINT1_SURROUND;
- break;
- case 3:
- case 5:
- channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
- break;
- default:
- break;
- }
- }
-
- // Workaround for Nexus Player not reporting support for mono passthrough.
- // (See [Internal: b/34268671].)
- if (Util.SDK_INT <= 25 && "fugu".equals(Util.DEVICE) && passthrough && channelCount == 1) {
- channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
- }
-
- if (!flush && isInitialized() && this.encoding == encoding && this.sampleRate == sampleRate
- && this.channelConfig == channelConfig) {
- // We already have an audio track with the correct sample rate, channel config and encoding.
- return;
- }
-
- reset();
-
- this.encoding = encoding;
- this.passthrough = passthrough;
- this.sampleRate = sampleRate;
- this.channelConfig = channelConfig;
- outputEncoding = passthrough ? encoding : C.ENCODING_PCM_16BIT;
- outputPcmFrameSize = Util.getPcmFrameSize(C.ENCODING_PCM_16BIT, channelCount);
-
- if (specifiedBufferSize != 0) {
- bufferSize = specifiedBufferSize;
- } else if (passthrough) {
- // TODO: Set the minimum buffer size using getMinBufferSize when it takes the encoding into
- // account. [Internal: b/25181305]
- if (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3) {
- // AC-3 allows bitrates up to 640 kbit/s.
- bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 80 * 1024 / C.MICROS_PER_SECOND);
- } else /* (outputEncoding == C.ENCODING_DTS || outputEncoding == C.ENCODING_DTS_HD */ {
- // DTS allows an 'open' bitrate, but we assume the maximum listed value: 1536 kbit/s.
- bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 192 * 1024 / C.MICROS_PER_SECOND);
- }
- } else {
- int minBufferSize =
- android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding);
- Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
- int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
- int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
- int maxAppBufferSize = (int) Math.max(minBufferSize,
- durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
- bufferSize = multipliedBufferSize < minAppBufferSize ? minAppBufferSize
- : multipliedBufferSize > maxAppBufferSize ? maxAppBufferSize
- : multipliedBufferSize;
- }
- bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(bufferSize / outputPcmFrameSize);
-
- // The old playback parameters may no longer be applicable so try to reset them now.
- setPlaybackParameters(playbackParameters);
- }
-
- private void resetAudioProcessors() {
- ArrayList newAudioProcessors = new ArrayList<>();
- for (AudioProcessor audioProcessor : availableAudioProcessors) {
- if (audioProcessor.isActive()) {
- newAudioProcessors.add(audioProcessor);
- } else {
- audioProcessor.flush();
- }
- }
- int count = newAudioProcessors.size();
- audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
- outputBuffers = new ByteBuffer[count];
- for (int i = 0; i < count; i++) {
- AudioProcessor audioProcessor = audioProcessors[i];
- audioProcessor.flush();
- outputBuffers[i] = audioProcessor.getOutput();
- }
- }
-
- private void initialize() throws InitializationException {
- // If we're asynchronously releasing a previous audio track then we block until it has been
- // released. This guarantees that we cannot end up in a state where we have multiple audio
- // track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
- // the shared memory that's available for audio track buffers. This would in turn cause the
- // initialization of the audio track to fail.
- releasingConditionVariable.block();
-
- audioTrack = initializeAudioTrack();
- int audioSessionId = audioTrack.getAudioSessionId();
- if (enablePreV21AudioSessionWorkaround) {
- if (Util.SDK_INT < 21) {
- // The workaround creates an audio track with a two byte buffer on the same session, and
- // does not release it until this object is released, which keeps the session active.
- if (keepSessionIdAudioTrack != null
- && audioSessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
- releaseKeepSessionIdAudioTrack();
- }
- if (keepSessionIdAudioTrack == null) {
- keepSessionIdAudioTrack = initializeKeepSessionIdAudioTrack(audioSessionId);
- }
- }
- }
- if (this.audioSessionId != audioSessionId) {
- this.audioSessionId = audioSessionId;
- listener.onAudioSessionId(audioSessionId);
- }
-
- audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds());
- setVolumeInternal();
- hasData = false;
- }
-
- /**
- * Starts or resumes playing audio if the audio track has been initialized.
- */
- public void play() {
- playing = true;
- if (isInitialized()) {
- resumeSystemTimeUs = System.nanoTime() / 1000;
- audioTrack.play();
- }
- }
-
- /**
- * Signals to the audio track that the next buffer is discontinuous with the previous buffer.
- */
- public void handleDiscontinuity() {
- // Force resynchronization after a skipped buffer.
- if (startMediaTimeState == START_IN_SYNC) {
- startMediaTimeState = START_NEED_SYNC;
- }
- }
-
- /**
- * Attempts to process data from a {@link ByteBuffer}, starting from its current position and
- * ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the
- * number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if
- * {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset.
- *
- * Returns whether the data was handled in full. If the data was not handled in full then the same
- * {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed,
- * except in the case of an interleaving call to {@link #reset()} (or an interleaving call to
- * {@link #configure(String, int, int, int, int)} that caused the track to be reset).
- *
- * @param buffer The buffer containing audio data.
- * @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
- * @return Whether the buffer was handled fully.
- * @throws InitializationException If an error occurs initializing the track.
- * @throws WriteException If an error occurs writing the audio data.
- */
- @SuppressWarnings("ReferenceEquality")
- public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
- throws InitializationException, WriteException {
- Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer);
- if (!isInitialized()) {
- initialize();
- if (playing) {
- play();
- }
- }
-
- if (needsPassthroughWorkarounds()) {
- // An AC-3 audio track continues to play data written while it is paused. Stop writing so its
- // buffer empties. See [Internal: b/18899620].
- if (audioTrack.getPlayState() == PLAYSTATE_PAUSED) {
- // We force an underrun to pause the track, so don't notify the listener in this case.
- hasData = false;
- return false;
- }
-
- // A new AC-3 audio track's playback position continues to increase from the old track's
- // position for a short time after is has been released. Avoid writing data until the playback
- // head position actually returns to zero.
- if (audioTrack.getPlayState() == PLAYSTATE_STOPPED
- && audioTrackUtil.getPlaybackHeadPosition() != 0) {
- return false;
- }
- }
-
- boolean hadData = hasData;
- hasData = hasPendingData();
- if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED) {
- long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
- listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs), elapsedSinceLastFeedMs);
- }
-
- if (inputBuffer == null) {
- // We are seeing this buffer for the first time.
- if (!buffer.hasRemaining()) {
- // The buffer is empty.
- return true;
- }
-
- if (passthrough && framesPerEncodedSample == 0) {
- // If this is the first encoded sample, calculate the sample size in frames.
- framesPerEncodedSample = getFramesPerEncodedSample(outputEncoding, buffer);
- }
-
- if (drainingPlaybackParameters != null) {
- if (!drainAudioProcessorsToEndOfStream()) {
- // Don't process any more input until draining completes.
- return false;
- }
- // Store the position and corresponding media time from which the parameters will apply.
- playbackParametersCheckpoints.add(new PlaybackParametersCheckpoint(
- drainingPlaybackParameters, Math.max(0, presentationTimeUs),
- framesToDurationUs(getWrittenFrames())));
- drainingPlaybackParameters = null;
- // The audio processors have drained, so flush them. This will cause any active speed
- // adjustment audio processor to start producing audio with the new parameters.
- resetAudioProcessors();
- }
-
- if (startMediaTimeState == START_NOT_SET) {
- startMediaTimeUs = Math.max(0, presentationTimeUs);
- startMediaTimeState = START_IN_SYNC;
- } else {
- // Sanity check that presentationTimeUs is consistent with the expected value.
- long expectedPresentationTimeUs = startMediaTimeUs
- + framesToDurationUs(getSubmittedFrames());
- if (startMediaTimeState == START_IN_SYNC
- && Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) {
- Log.e(TAG, "Discontinuity detected [expected " + expectedPresentationTimeUs + ", got "
- + presentationTimeUs + "]");
- startMediaTimeState = START_NEED_SYNC;
- }
- if (startMediaTimeState == START_NEED_SYNC) {
- // Adjust startMediaTimeUs to be consistent with the current buffer's start time and the
- // number of bytes submitted.
- startMediaTimeUs += (presentationTimeUs - expectedPresentationTimeUs);
- startMediaTimeState = START_IN_SYNC;
- listener.onPositionDiscontinuity();
- }
- }
-
- if (passthrough) {
- submittedEncodedFrames += framesPerEncodedSample;
- } else {
- submittedPcmBytes += buffer.remaining();
- }
-
- inputBuffer = buffer;
- }
-
- if (passthrough) {
- // Passthrough buffers are not processed.
- writeBuffer(inputBuffer, presentationTimeUs);
- } else {
- processBuffers(presentationTimeUs);
- }
-
- if (!inputBuffer.hasRemaining()) {
- inputBuffer = null;
- return true;
- }
- return false;
- }
-
- private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
- int count = audioProcessors.length;
- int index = count;
- while (index >= 0) {
- ByteBuffer input = index > 0 ? outputBuffers[index - 1]
- : (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
- if (index == count) {
- writeBuffer(input, avSyncPresentationTimeUs);
- } else {
- AudioProcessor audioProcessor = audioProcessors[index];
- audioProcessor.queueInput(input);
- ByteBuffer output = audioProcessor.getOutput();
- outputBuffers[index] = output;
- if (output.hasRemaining()) {
- // Handle the output as input to the next audio processor or the AudioTrack.
- index++;
- continue;
- }
- }
-
- if (input.hasRemaining()) {
- // The input wasn't consumed and no output was produced, so give up for now.
- return;
- }
-
- // Get more input from upstream.
- index--;
- }
- }
-
- @SuppressWarnings("ReferenceEquality")
- private boolean writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs)
- throws WriteException {
- if (!buffer.hasRemaining()) {
- return true;
- }
- if (outputBuffer != null) {
- Assertions.checkArgument(outputBuffer == buffer);
- } else {
- outputBuffer = buffer;
- if (Util.SDK_INT < 21) {
- int bytesRemaining = buffer.remaining();
- if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) {
- preV21OutputBuffer = new byte[bytesRemaining];
- }
- int originalPosition = buffer.position();
- buffer.get(preV21OutputBuffer, 0, bytesRemaining);
- buffer.position(originalPosition);
- preV21OutputBufferOffset = 0;
- }
- }
- int bytesRemaining = buffer.remaining();
- int bytesWritten = 0;
- if (Util.SDK_INT < 21) { // passthrough == false
- // Work out how many bytes we can write without the risk of blocking.
- int bytesPending =
- (int) (writtenPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * outputPcmFrameSize));
- int bytesToWrite = bufferSize - bytesPending;
- if (bytesToWrite > 0) {
- bytesToWrite = Math.min(bytesRemaining, bytesToWrite);
- bytesWritten = audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite);
- if (bytesWritten > 0) {
- preV21OutputBufferOffset += bytesWritten;
- buffer.position(buffer.position() + bytesWritten);
- }
- }
- } else if (tunneling) {
- Assertions.checkState(avSyncPresentationTimeUs != C.TIME_UNSET);
- bytesWritten = writeNonBlockingWithAvSyncV21(audioTrack, buffer, bytesRemaining,
- avSyncPresentationTimeUs);
- } else {
- bytesWritten = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
- }
-
- lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
-
- if (bytesWritten < 0) {
- throw new WriteException(bytesWritten);
- }
-
- if (!passthrough) {
- writtenPcmBytes += bytesWritten;
- }
- if (bytesWritten == bytesRemaining) {
- if (passthrough) {
- writtenEncodedFrames += framesPerEncodedSample;
- }
- outputBuffer = null;
- return true;
- }
- return false;
- }
-
- /**
- * Plays out remaining audio. {@link #isEnded()} will return {@code true} when playback has ended.
- *
- * @throws WriteException If an error occurs draining data to the track.
- */
- public void playToEndOfStream() throws WriteException {
- if (handledEndOfStream || !isInitialized()) {
- return;
- }
-
- if (drainAudioProcessorsToEndOfStream()) {
- // The audio processors have drained, so drain the underlying audio track.
- audioTrackUtil.handleEndOfStream(getWrittenFrames());
- bytesUntilNextAvSync = 0;
- handledEndOfStream = true;
- }
- }
-
- private boolean drainAudioProcessorsToEndOfStream() throws WriteException {
- boolean audioProcessorNeedsEndOfStream = false;
- if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
- drainingAudioProcessorIndex = passthrough ? audioProcessors.length : 0;
- audioProcessorNeedsEndOfStream = true;
- }
- while (drainingAudioProcessorIndex < audioProcessors.length) {
- AudioProcessor audioProcessor = audioProcessors[drainingAudioProcessorIndex];
- if (audioProcessorNeedsEndOfStream) {
- audioProcessor.queueEndOfStream();
- }
- processBuffers(C.TIME_UNSET);
- if (!audioProcessor.isEnded()) {
- return false;
- }
- audioProcessorNeedsEndOfStream = true;
- drainingAudioProcessorIndex++;
- }
-
- // Finish writing any remaining output to the track.
- if (outputBuffer != null) {
- writeBuffer(outputBuffer, C.TIME_UNSET);
- if (outputBuffer != null) {
- return false;
- }
- }
- drainingAudioProcessorIndex = C.INDEX_UNSET;
- return true;
- }
-
- /**
- * Returns whether all buffers passed to {@link #handleBuffer(ByteBuffer, long)} have been
- * completely processed and played.
- */
- public boolean isEnded() {
- return !isInitialized() || (handledEndOfStream && !hasPendingData());
- }
-
- /**
- * Returns whether the audio track has more data pending that will be played back.
- */
- public boolean hasPendingData() {
- return isInitialized()
- && (getWrittenFrames() > audioTrackUtil.getPlaybackHeadPosition()
- || overrideHasPendingData());
- }
-
- /**
- * Attempts to set the playback parameters and returns the active playback parameters, which may
- * differ from those passed in.
- *
- * @param playbackParameters The new playback parameters to attempt to set.
- * @return The active playback parameters.
- */
- public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
- if (passthrough) {
- // The playback parameters are always the default in passthrough mode.
- this.playbackParameters = PlaybackParameters.DEFAULT;
- return this.playbackParameters;
- }
- playbackParameters = new PlaybackParameters(
- sonicAudioProcessor.setSpeed(playbackParameters.speed),
- sonicAudioProcessor.setPitch(playbackParameters.pitch));
- PlaybackParameters lastSetPlaybackParameters =
- drainingPlaybackParameters != null ? drainingPlaybackParameters
- : !playbackParametersCheckpoints.isEmpty()
- ? playbackParametersCheckpoints.getLast().playbackParameters
- : this.playbackParameters;
- if (!playbackParameters.equals(lastSetPlaybackParameters)) {
- if (isInitialized()) {
- // Drain the audio processors so we can determine the frame position at which the new
- // parameters apply.
- drainingPlaybackParameters = playbackParameters;
- } else {
- this.playbackParameters = playbackParameters;
- }
- }
- return this.playbackParameters;
- }
-
- /**
- * Gets the {@link PlaybackParameters}.
- */
- public PlaybackParameters getPlaybackParameters() {
- return playbackParameters;
- }
-
- /**
- * Sets the attributes for audio playback. If the attributes have changed and if the audio track
- * is not configured for use with tunneling, then the audio track is reset and the audio session
- * id is cleared.
- *
- * If the audio track is configured for use with tunneling then the audio attributes are ignored.
- * The audio track is not reset and the audio session id is not cleared. The passed attributes
- * will be used if the audio track is later re-configured into non-tunneled mode.
- *
- * @param audioAttributes The attributes for audio playback.
- */
- public void setAudioAttributes(AudioAttributes audioAttributes) {
- if (this.audioAttributes.equals(audioAttributes)) {
- return;
- }
- this.audioAttributes = audioAttributes;
- if (tunneling) {
- // The audio attributes are ignored in tunneling mode, so no need to reset.
- return;
- }
- reset();
- audioSessionId = C.AUDIO_SESSION_ID_UNSET;
- }
-
- /**
- * Sets the audio session id. The audio track is reset if the audio session id has changed.
- */
- public void setAudioSessionId(int audioSessionId) {
- if (this.audioSessionId != audioSessionId) {
- this.audioSessionId = audioSessionId;
- reset();
- }
- }
-
- /**
- * Enables tunneling. The audio track is reset if tunneling was previously disabled or if the
- * audio session id has changed. Enabling tunneling requires platform API version 21 onwards.
- *
- * If this instance has {@link AudioProcessor}s and tunneling is enabled, care must be taken that
- * audio processors do not output buffers with a different duration than their input, and buffer
- * processors must produce output corresponding to their last input immediately after that input
- * is queued.
- *
- * @param tunnelingAudioSessionId The audio session id to use.
- * @throws IllegalStateException Thrown if enabling tunneling on platform API version < 21.
- */
- public void enableTunnelingV21(int tunnelingAudioSessionId) {
- Assertions.checkState(Util.SDK_INT >= 21);
- if (!tunneling || audioSessionId != tunnelingAudioSessionId) {
- tunneling = true;
- audioSessionId = tunnelingAudioSessionId;
- reset();
- }
- }
-
- /**
- * Disables tunneling. If tunneling was previously enabled then the audio track is reset and the
- * audio session id is cleared.
- */
- public void disableTunneling() {
- if (tunneling) {
- tunneling = false;
- audioSessionId = C.AUDIO_SESSION_ID_UNSET;
- reset();
- }
- }
-
- /**
- * Sets the playback volume.
- *
- * @param volume A volume in the range [0.0, 1.0].
- */
- public void setVolume(float volume) {
- if (this.volume != volume) {
- this.volume = volume;
- setVolumeInternal();
- }
- }
-
- private void setVolumeInternal() {
- if (!isInitialized()) {
- // Do nothing.
- } else if (Util.SDK_INT >= 21) {
- setVolumeInternalV21(audioTrack, volume);
- } else {
- setVolumeInternalV3(audioTrack, volume);
- }
- }
-
- /**
- * Pauses playback.
- */
- public void pause() {
- playing = false;
- if (isInitialized()) {
- resetSyncParams();
- audioTrackUtil.pause();
- }
- }
-
- /**
- * Releases the underlying audio track asynchronously.
- *
- * Calling {@link #handleBuffer(ByteBuffer, long)} will block until the audio track has been
- * released, so it is safe to use the audio track immediately after a reset. The audio session may
- * remain active until {@link #release()} is called.
- */
- public void reset() {
- if (isInitialized()) {
- submittedPcmBytes = 0;
- submittedEncodedFrames = 0;
- writtenPcmBytes = 0;
- writtenEncodedFrames = 0;
- framesPerEncodedSample = 0;
- if (drainingPlaybackParameters != null) {
- playbackParameters = drainingPlaybackParameters;
- drainingPlaybackParameters = null;
- } else if (!playbackParametersCheckpoints.isEmpty()) {
- playbackParameters = playbackParametersCheckpoints.getLast().playbackParameters;
- }
- playbackParametersCheckpoints.clear();
- playbackParametersOffsetUs = 0;
- playbackParametersPositionUs = 0;
- inputBuffer = null;
- outputBuffer = null;
- for (int i = 0; i < audioProcessors.length; i++) {
- AudioProcessor audioProcessor = audioProcessors[i];
- audioProcessor.flush();
- outputBuffers[i] = audioProcessor.getOutput();
- }
- handledEndOfStream = false;
- drainingAudioProcessorIndex = C.INDEX_UNSET;
- avSyncHeader = null;
- bytesUntilNextAvSync = 0;
- startMediaTimeState = START_NOT_SET;
- latencyUs = 0;
- resetSyncParams();
- int playState = audioTrack.getPlayState();
- if (playState == PLAYSTATE_PLAYING) {
- audioTrack.pause();
- }
- // AudioTrack.release can take some time, so we call it on a background thread.
- final android.media.AudioTrack toRelease = audioTrack;
- audioTrack = null;
- audioTrackUtil.reconfigure(null, false);
- releasingConditionVariable.close();
- new Thread() {
- @Override
- public void run() {
- try {
- toRelease.flush();
- toRelease.release();
- } finally {
- releasingConditionVariable.open();
- }
- }
- }.start();
- }
- }
-
- /**
- * Releases all resources associated with this instance.
- */
- public void release() {
- reset();
- releaseKeepSessionIdAudioTrack();
- for (AudioProcessor audioProcessor : availableAudioProcessors) {
- audioProcessor.reset();
- }
- audioSessionId = C.AUDIO_SESSION_ID_UNSET;
- playing = false;
- }
-
- /**
- * Releases {@link #keepSessionIdAudioTrack} asynchronously, if it is non-{@code null}.
- */
- private void releaseKeepSessionIdAudioTrack() {
- if (keepSessionIdAudioTrack == null) {
- return;
- }
-
- // AudioTrack.release can take some time, so we call it on a background thread.
- final android.media.AudioTrack toRelease = keepSessionIdAudioTrack;
- keepSessionIdAudioTrack = null;
- new Thread() {
- @Override
- public void run() {
- toRelease.release();
- }
- }.start();
- }
-
- /**
- * Returns whether {@link #getCurrentPositionUs} can return the current playback position.
- */
- private boolean hasCurrentPositionUs() {
- return isInitialized() && startMediaTimeState != START_NOT_SET;
- }
-
- /**
- * Returns the underlying audio track {@code positionUs} with any applicable speedup applied.
- */
- private long applySpeedup(long positionUs) {
- while (!playbackParametersCheckpoints.isEmpty()
- && positionUs >= playbackParametersCheckpoints.getFirst().positionUs) {
- // We are playing (or about to play) media with the new playback parameters, so update them.
- PlaybackParametersCheckpoint checkpoint = playbackParametersCheckpoints.remove();
- playbackParameters = checkpoint.playbackParameters;
- playbackParametersPositionUs = checkpoint.positionUs;
- playbackParametersOffsetUs = checkpoint.mediaTimeUs - startMediaTimeUs;
- }
-
- if (playbackParameters.speed == 1f) {
- return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs;
- }
-
- if (playbackParametersCheckpoints.isEmpty()
- && sonicAudioProcessor.getOutputByteCount() >= SONIC_MIN_BYTES_FOR_SPEEDUP) {
- return playbackParametersOffsetUs
- + Util.scaleLargeTimestamp(positionUs - playbackParametersPositionUs,
- sonicAudioProcessor.getInputByteCount(), sonicAudioProcessor.getOutputByteCount());
- }
-
- // We are playing drained data at a previous playback speed, or don't have enough bytes to
- // calculate an accurate speedup, so fall back to multiplying by the speed.
- return playbackParametersOffsetUs
- + (long) ((double) playbackParameters.speed * (positionUs - playbackParametersPositionUs));
- }
-
- /**
- * Updates the audio track latency and playback position parameters.
- */
- private void maybeSampleSyncParams() {
- long playbackPositionUs = audioTrackUtil.getPositionUs();
- if (playbackPositionUs == 0) {
- // The AudioTrack hasn't output anything yet.
- return;
- }
- long systemClockUs = System.nanoTime() / 1000;
- if (systemClockUs - lastPlayheadSampleTimeUs >= MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US) {
- // Take a new sample and update the smoothed offset between the system clock and the playhead.
- playheadOffsets[nextPlayheadOffsetIndex] = playbackPositionUs - systemClockUs;
- nextPlayheadOffsetIndex = (nextPlayheadOffsetIndex + 1) % MAX_PLAYHEAD_OFFSET_COUNT;
- if (playheadOffsetCount < MAX_PLAYHEAD_OFFSET_COUNT) {
- playheadOffsetCount++;
- }
- lastPlayheadSampleTimeUs = systemClockUs;
- smoothedPlayheadOffsetUs = 0;
- for (int i = 0; i < playheadOffsetCount; i++) {
- smoothedPlayheadOffsetUs += playheadOffsets[i] / playheadOffsetCount;
- }
- }
-
- if (needsPassthroughWorkarounds()) {
- // Don't sample the timestamp and latency if this is an AC-3 passthrough AudioTrack on
- // platform API versions 21/22, as incorrect values are returned. See [Internal: b/21145353].
- return;
- }
-
- if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
- audioTimestampSet = audioTrackUtil.updateTimestamp();
- if (audioTimestampSet) {
- // Perform sanity checks on the timestamp.
- long audioTimestampUs = audioTrackUtil.getTimestampNanoTime() / 1000;
- long audioTimestampFramePosition = audioTrackUtil.getTimestampFramePosition();
- if (audioTimestampUs < resumeSystemTimeUs) {
- // The timestamp corresponds to a time before the track was most recently resumed.
- audioTimestampSet = false;
- } else if (Math.abs(audioTimestampUs - systemClockUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
- // The timestamp time base is probably wrong.
- String message = "Spurious audio timestamp (system clock mismatch): "
- + audioTimestampFramePosition + ", " + audioTimestampUs + ", " + systemClockUs + ", "
- + playbackPositionUs + ", " + getSubmittedFrames() + ", " + getWrittenFrames();
- if (failOnSpuriousAudioTimestamp) {
- throw new InvalidAudioTrackTimestampException(message);
- }
- Log.w(TAG, message);
- audioTimestampSet = false;
- } else if (Math.abs(framesToDurationUs(audioTimestampFramePosition) - playbackPositionUs)
- > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
- // The timestamp frame position is probably wrong.
- String message = "Spurious audio timestamp (frame position mismatch): "
- + audioTimestampFramePosition + ", " + audioTimestampUs + ", " + systemClockUs + ", "
- + playbackPositionUs + ", " + getSubmittedFrames() + ", " + getWrittenFrames();
- if (failOnSpuriousAudioTimestamp) {
- throw new InvalidAudioTrackTimestampException(message);
- }
- Log.w(TAG, message);
- audioTimestampSet = false;
- }
- }
- if (getLatencyMethod != null && !passthrough) {
- try {
- // Compute the audio track latency, excluding the latency due to the buffer (leaving
- // latency due to the mixer and audio hardware driver).
- latencyUs = (Integer) getLatencyMethod.invoke(audioTrack, (Object[]) null) * 1000L
- - bufferSizeUs;
- // Sanity check that the latency is non-negative.
- latencyUs = Math.max(latencyUs, 0);
- // Sanity check that the latency isn't too large.
- if (latencyUs > MAX_LATENCY_US) {
- Log.w(TAG, "Ignoring impossibly large audio latency: " + latencyUs);
- latencyUs = 0;
- }
- } catch (Exception e) {
- // The method existed, but doesn't work. Don't try again.
- getLatencyMethod = null;
- }
- }
- lastTimestampSampleTimeUs = systemClockUs;
- }
- }
-
- private boolean isInitialized() {
- return audioTrack != null;
- }
-
- private long framesToDurationUs(long frameCount) {
- return (frameCount * C.MICROS_PER_SECOND) / sampleRate;
- }
-
- private long durationUsToFrames(long durationUs) {
- return (durationUs * sampleRate) / C.MICROS_PER_SECOND;
- }
-
- private long getSubmittedFrames() {
- return passthrough ? submittedEncodedFrames : (submittedPcmBytes / pcmFrameSize);
- }
-
- private long getWrittenFrames() {
- return passthrough ? writtenEncodedFrames : (writtenPcmBytes / outputPcmFrameSize);
- }
-
- private void resetSyncParams() {
- smoothedPlayheadOffsetUs = 0;
- playheadOffsetCount = 0;
- nextPlayheadOffsetIndex = 0;
- lastPlayheadSampleTimeUs = 0;
- audioTimestampSet = false;
- lastTimestampSampleTimeUs = 0;
- }
-
- /**
- * Returns whether to work around problems with passthrough audio tracks.
- * See [Internal: b/18899620, b/19187573, b/21145353].
- */
- private boolean needsPassthroughWorkarounds() {
- return Util.SDK_INT < 23
- && (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3);
- }
-
- /**
- * Returns whether the audio track should behave as though it has pending data. This is to work
- * around an issue on platform API versions 21/22 where AC-3 audio tracks can't be paused, so we
- * empty their buffers when paused. In this case, they should still behave as if they have
- * pending data, otherwise writing will never resume.
- */
- private boolean overrideHasPendingData() {
- return needsPassthroughWorkarounds()
- && audioTrack.getPlayState() == PLAYSTATE_PAUSED
- && audioTrack.getPlaybackHeadPosition() == 0;
- }
-
- private android.media.AudioTrack initializeAudioTrack() throws InitializationException {
- android.media.AudioTrack audioTrack;
- if (Util.SDK_INT >= 21) {
- audioTrack = createAudioTrackV21();
- } else {
- int streamType = Util.getStreamTypeForAudioUsage(audioAttributes.usage);
- if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
- audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
- outputEncoding, bufferSize, MODE_STREAM);
- } else {
- // Re-attach to the same audio session.
- audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
- outputEncoding, bufferSize, MODE_STREAM, audioSessionId);
- }
- }
-
- int state = audioTrack.getState();
- if (state != STATE_INITIALIZED) {
- try {
- audioTrack.release();
- } catch (Exception e) {
- // The track has already failed to initialize, so it wouldn't be that surprising if release
- // were to fail too. Swallow the exception.
- }
- throw new InitializationException(state, sampleRate, channelConfig, bufferSize);
- }
- return audioTrack;
- }
-
- @TargetApi(21)
- private android.media.AudioTrack createAudioTrackV21() {
- android.media.AudioAttributes attributes;
- if (tunneling) {
- attributes = new android.media.AudioAttributes.Builder()
- .setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE)
- .setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC)
- .setUsage(android.media.AudioAttributes.USAGE_MEDIA)
- .build();
- } else {
- attributes = audioAttributes.getAudioAttributesV21();
- }
- AudioFormat format = new AudioFormat.Builder()
- .setChannelMask(channelConfig)
- .setEncoding(outputEncoding)
- .setSampleRate(sampleRate)
- .build();
- int audioSessionId = this.audioSessionId != C.AUDIO_SESSION_ID_UNSET ? this.audioSessionId
- : AudioManager.AUDIO_SESSION_ID_GENERATE;
- return new android.media.AudioTrack(attributes, format, bufferSize, MODE_STREAM,
- audioSessionId);
- }
-
- private android.media.AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
- int sampleRate = 4000; // Equal to private android.media.AudioTrack.MIN_SAMPLE_RATE.
- int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
- @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
- int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
- return new android.media.AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding,
- bufferSize, MODE_STATIC, audioSessionId);
- }
-
- @C.Encoding
- private static int getEncodingForMimeType(String mimeType) {
- switch (mimeType) {
- case MimeTypes.AUDIO_AC3:
- return C.ENCODING_AC3;
- case MimeTypes.AUDIO_E_AC3:
- return C.ENCODING_E_AC3;
- case MimeTypes.AUDIO_DTS:
- return C.ENCODING_DTS;
- case MimeTypes.AUDIO_DTS_HD:
- return C.ENCODING_DTS_HD;
- default:
- return C.ENCODING_INVALID;
- }
- }
-
- private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) {
- if (encoding == C.ENCODING_DTS || encoding == C.ENCODING_DTS_HD) {
- return DtsUtil.parseDtsAudioSampleCount(buffer);
- } else if (encoding == C.ENCODING_AC3) {
- return Ac3Util.getAc3SyncframeAudioSampleCount();
- } else if (encoding == C.ENCODING_E_AC3) {
- return Ac3Util.parseEAc3SyncframeAudioSampleCount(buffer);
- } else {
- throw new IllegalStateException("Unexpected audio encoding: " + encoding);
- }
- }
-
- @TargetApi(21)
- private static int writeNonBlockingV21(android.media.AudioTrack audioTrack, ByteBuffer buffer,
- int size) {
- return audioTrack.write(buffer, size, WRITE_NON_BLOCKING);
- }
-
- @TargetApi(21)
- private int writeNonBlockingWithAvSyncV21(android.media.AudioTrack audioTrack,
- ByteBuffer buffer, int size, long presentationTimeUs) {
- // TODO: Uncomment this when [Internal ref: b/33627517] is clarified or fixed.
- // if (Util.SDK_INT >= 23) {
- // // The underlying platform AudioTrack writes AV sync headers directly.
- // return audioTrack.write(buffer, size, WRITE_NON_BLOCKING, presentationTimeUs * 1000);
- // }
- if (avSyncHeader == null) {
- avSyncHeader = ByteBuffer.allocate(16);
- avSyncHeader.order(ByteOrder.BIG_ENDIAN);
- avSyncHeader.putInt(0x55550001);
- }
- if (bytesUntilNextAvSync == 0) {
- avSyncHeader.putInt(4, size);
- avSyncHeader.putLong(8, presentationTimeUs * 1000);
- avSyncHeader.position(0);
- bytesUntilNextAvSync = size;
- }
- int avSyncHeaderBytesRemaining = avSyncHeader.remaining();
- if (avSyncHeaderBytesRemaining > 0) {
- int result = audioTrack.write(avSyncHeader, avSyncHeaderBytesRemaining, WRITE_NON_BLOCKING);
- if (result < 0) {
- bytesUntilNextAvSync = 0;
- return result;
- }
- if (result < avSyncHeaderBytesRemaining) {
- return 0;
- }
- }
- int result = writeNonBlockingV21(audioTrack, buffer, size);
- if (result < 0) {
- bytesUntilNextAvSync = 0;
- return result;
- }
- bytesUntilNextAvSync -= result;
- return result;
- }
-
- @TargetApi(21)
- private static void setVolumeInternalV21(android.media.AudioTrack audioTrack, float volume) {
- audioTrack.setVolume(volume);
- }
-
- @SuppressWarnings("deprecation")
- private static void setVolumeInternalV3(android.media.AudioTrack audioTrack, float volume) {
- audioTrack.setStereoVolume(volume, volume);
- }
-
- /**
- * Wraps an {@link android.media.AudioTrack} to expose useful utility methods.
- */
- private static class AudioTrackUtil {
-
- protected android.media.AudioTrack audioTrack;
- private boolean needsPassthroughWorkaround;
- private int sampleRate;
- private long lastRawPlaybackHeadPosition;
- private long rawPlaybackHeadWrapCount;
- private long passthroughWorkaroundPauseOffset;
-
- private long stopTimestampUs;
- private long stopPlaybackHeadPosition;
- private long endPlaybackHeadPosition;
-
- /**
- * Reconfigures the audio track utility helper to use the specified {@code audioTrack}.
- *
- * @param audioTrack The audio track to wrap.
- * @param needsPassthroughWorkaround Whether to workaround issues with pausing AC-3 passthrough
- * audio tracks on platform API version 21/22.
- */
- public void reconfigure(android.media.AudioTrack audioTrack,
- boolean needsPassthroughWorkaround) {
- this.audioTrack = audioTrack;
- this.needsPassthroughWorkaround = needsPassthroughWorkaround;
- stopTimestampUs = C.TIME_UNSET;
- lastRawPlaybackHeadPosition = 0;
- rawPlaybackHeadWrapCount = 0;
- passthroughWorkaroundPauseOffset = 0;
- if (audioTrack != null) {
- sampleRate = audioTrack.getSampleRate();
- }
- }
-
- /**
- * Stops the audio track in a way that ensures media written to it is played out in full, and
- * that {@link #getPlaybackHeadPosition()} and {@link #getPositionUs()} continue to increment as
- * the remaining media is played out.
- *
- * @param writtenFrames The total number of frames that have been written.
- */
- public void handleEndOfStream(long writtenFrames) {
- stopPlaybackHeadPosition = getPlaybackHeadPosition();
- stopTimestampUs = SystemClock.elapsedRealtime() * 1000;
- endPlaybackHeadPosition = writtenFrames;
- audioTrack.stop();
- }
-
- /**
- * Pauses the audio track unless the end of the stream has been handled, in which case calling
- * this method does nothing.
- */
- public void pause() {
- if (stopTimestampUs != C.TIME_UNSET) {
- // We don't want to knock the audio track back into the paused state.
- return;
- }
- audioTrack.pause();
- }
-
- /**
- * {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be
- * interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method
- * returns the playback head position as a long that will only wrap around if the value exceeds
- * {@link Long#MAX_VALUE} (which in practice will never happen).
- *
- * @return The playback head position, in frames.
- */
- public long getPlaybackHeadPosition() {
- if (stopTimestampUs != C.TIME_UNSET) {
- // Simulate the playback head position up to the total number of frames submitted.
- long elapsedTimeSinceStopUs = (SystemClock.elapsedRealtime() * 1000) - stopTimestampUs;
- long framesSinceStop = (elapsedTimeSinceStopUs * sampleRate) / C.MICROS_PER_SECOND;
- return Math.min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop);
- }
-
- int state = audioTrack.getPlayState();
- if (state == PLAYSTATE_STOPPED) {
- // The audio track hasn't been started.
- return 0;
- }
-
- long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
- if (needsPassthroughWorkaround) {
- // Work around an issue with passthrough/direct AudioTracks on platform API versions 21/22
- // where the playback head position jumps back to zero on paused passthrough/direct audio
- // tracks. See [Internal: b/19187573].
- if (state == PLAYSTATE_PAUSED && rawPlaybackHeadPosition == 0) {
- passthroughWorkaroundPauseOffset = lastRawPlaybackHeadPosition;
- }
- rawPlaybackHeadPosition += passthroughWorkaroundPauseOffset;
- }
- if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
- // The value must have wrapped around.
- rawPlaybackHeadWrapCount++;
- }
- lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
- return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
- }
-
- /**
- * Returns the duration of played media since reconfiguration, in microseconds.
- */
- public long getPositionUs() {
- return (getPlaybackHeadPosition() * C.MICROS_PER_SECOND) / sampleRate;
- }
-
- /**
- * Updates the values returned by {@link #getTimestampNanoTime()} and
- * {@link #getTimestampFramePosition()}.
- *
- * @return Whether the timestamp values were updated.
- */
- public boolean updateTimestamp() {
- return false;
- }
-
- /**
- * Returns the {@link android.media.AudioTimestamp#nanoTime} obtained during the most recent
- * call to {@link #updateTimestamp()} that returned true.
- *
- * @return The nanoTime obtained during the most recent call to {@link #updateTimestamp()} that
- * returned true.
- * @throws UnsupportedOperationException If the implementation does not support audio timestamp
- * queries. {@link #updateTimestamp()} will always return false in this case.
- */
- public long getTimestampNanoTime() {
- // Should never be called if updateTimestamp() returned false.
- throw new UnsupportedOperationException();
- }
-
- /**
- * Returns the {@link android.media.AudioTimestamp#framePosition} obtained during the most
- * recent call to {@link #updateTimestamp()} that returned true. The value is adjusted so that
- * wrap around only occurs if the value exceeds {@link Long#MAX_VALUE} (which in practice will
- * never happen).
- *
- * @return The framePosition obtained during the most recent call to {@link #updateTimestamp()}
- * that returned true.
- * @throws UnsupportedOperationException If the implementation does not support audio timestamp
- * queries. {@link #updateTimestamp()} will always return false in this case.
- */
- public long getTimestampFramePosition() {
- // Should never be called if updateTimestamp() returned false.
- throw new UnsupportedOperationException();
- }
-
- }
-
- @TargetApi(19)
- private static class AudioTrackUtilV19 extends AudioTrackUtil {
-
- private final AudioTimestamp audioTimestamp;
-
- private long rawTimestampFramePositionWrapCount;
- private long lastRawTimestampFramePosition;
- private long lastTimestampFramePosition;
-
- public AudioTrackUtilV19() {
- audioTimestamp = new AudioTimestamp();
- }
-
- @Override
- public void reconfigure(android.media.AudioTrack audioTrack,
- boolean needsPassthroughWorkaround) {
- super.reconfigure(audioTrack, needsPassthroughWorkaround);
- rawTimestampFramePositionWrapCount = 0;
- lastRawTimestampFramePosition = 0;
- lastTimestampFramePosition = 0;
- }
-
- @Override
- public boolean updateTimestamp() {
- boolean updated = audioTrack.getTimestamp(audioTimestamp);
- if (updated) {
- long rawFramePosition = audioTimestamp.framePosition;
- if (lastRawTimestampFramePosition > rawFramePosition) {
- // The value must have wrapped around.
- rawTimestampFramePositionWrapCount++;
- }
- lastRawTimestampFramePosition = rawFramePosition;
- lastTimestampFramePosition = rawFramePosition + (rawTimestampFramePositionWrapCount << 32);
- }
- return updated;
- }
-
- @Override
- public long getTimestampNanoTime() {
- return audioTimestamp.nanoTime;
- }
-
- @Override
- public long getTimestampFramePosition() {
- return lastTimestampFramePosition;
- }
-
- }
-
- /**
- * Stores playback parameters with the position and media time at which they apply.
- */
- private static final class PlaybackParametersCheckpoint {
-
- private final PlaybackParameters playbackParameters;
- private final long mediaTimeUs;
- private final long positionUs;
-
- private PlaybackParametersCheckpoint(PlaybackParameters playbackParameters, long mediaTimeUs,
- long positionUs) {
- this.playbackParameters = playbackParameters;
- this.mediaTimeUs = mediaTimeUs;
- this.positionUs = positionUs;
- }
-
- }
-
-}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrackPositionTracker.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrackPositionTracker.java
new file mode 100644
index 0000000000..4714db8902
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioTrackPositionTracker.java
@@ -0,0 +1,535 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.audio;
+
+import android.media.AudioTimestamp;
+import android.media.AudioTrack;
+import android.os.SystemClock;
+import android.support.annotation.IntDef;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.util.Assertions;
+import com.google.android.exoplayer2.util.Util;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.reflect.Method;
+
+/**
+ * Wraps an {@link AudioTrack}, exposing a position based on {@link
+ * AudioTrack#getPlaybackHeadPosition()} and {@link AudioTrack#getTimestamp(AudioTimestamp)}.
+ *
+ *
Call {@link #setAudioTrack(AudioTrack, int, int, int)} to set the audio track to wrap. Call
+ * {@link #mayHandleBuffer(long)} if there is input data to write to the track. If it returns false,
+ * the audio track position is stabilizing and no data may be written. Call {@link #start()}
+ * immediately before calling {@link AudioTrack#play()}. Call {@link #pause()} when pausing the
+ * track. Call {@link #handleEndOfStream(long)} when no more data will be written to the track. When
+ * the audio track will no longer be used, call {@link #reset()}.
+ */
+/* package */ final class AudioTrackPositionTracker {
+
+ /** Listener for position tracker events. */
+ public interface Listener {
+
+ /**
+ * Called when the frame position is too far from the expected frame position.
+ *
+ * @param audioTimestampPositionFrames The frame position of the last known audio track
+ * timestamp.
+ * @param audioTimestampSystemTimeUs The system time associated with the last known audio track
+ * timestamp, in microseconds.
+ * @param systemTimeUs The current time.
+ * @param playbackPositionUs The current playback head position in microseconds.
+ */
+ void onPositionFramesMismatch(
+ long audioTimestampPositionFrames,
+ long audioTimestampSystemTimeUs,
+ long systemTimeUs,
+ long playbackPositionUs);
+
+ /**
+ * Called when the system time associated with the last known audio track timestamp is
+ * unexpectedly far from the current time.
+ *
+ * @param audioTimestampPositionFrames The frame position of the last known audio track
+ * timestamp.
+ * @param audioTimestampSystemTimeUs The system time associated with the last known audio track
+ * timestamp, in microseconds.
+ * @param systemTimeUs The current time.
+ * @param playbackPositionUs The current playback head position in microseconds.
+ */
+ void onSystemTimeUsMismatch(
+ long audioTimestampPositionFrames,
+ long audioTimestampSystemTimeUs,
+ long systemTimeUs,
+ long playbackPositionUs);
+
+ /**
+ * Called when the audio track has provided an invalid latency.
+ *
+ * @param latencyUs The reported latency in microseconds.
+ */
+ void onInvalidLatency(long latencyUs);
+
+ /**
+ * Called when the audio track runs out of data to play.
+ *
+ * @param bufferSize The size of the sink's buffer, in bytes.
+ * @param bufferSizeMs The size of the sink's buffer, in milliseconds, if it is configured for
+ * PCM output. {@link C#TIME_UNSET} if it is configured for encoded audio output, as the
+ * buffered media can have a variable bitrate so the duration may be unknown.
+ */
+ void onUnderrun(int bufferSize, long bufferSizeMs);
+ }
+
+ /** {@link AudioTrack} playback states. */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({PLAYSTATE_STOPPED, PLAYSTATE_PAUSED, PLAYSTATE_PLAYING})
+ private @interface PlayState {}
+ /** @see AudioTrack#PLAYSTATE_STOPPED */
+ private static final int PLAYSTATE_STOPPED = AudioTrack.PLAYSTATE_STOPPED;
+ /** @see AudioTrack#PLAYSTATE_PAUSED */
+ private static final int PLAYSTATE_PAUSED = AudioTrack.PLAYSTATE_PAUSED;
+ /** @see AudioTrack#PLAYSTATE_PLAYING */
+ private static final int PLAYSTATE_PLAYING = AudioTrack.PLAYSTATE_PLAYING;
+
+ /**
+ * AudioTrack timestamps are deemed spurious if they are offset from the system clock by more than
+ * this amount.
+ *
+ *
This is a fail safe that should not be required on correctly functioning devices.
+ */
+ private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 5 * C.MICROS_PER_SECOND;
+
+ /**
+ * AudioTrack latencies are deemed impossibly large if they are greater than this amount.
+ *
+ *
This is a fail safe that should not be required on correctly functioning devices.
+ */
+ private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
+
+ private static final long FORCE_RESET_WORKAROUND_TIMEOUT_MS = 200;
+
+ private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10;
+ private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000;
+ private static final int MIN_LATENCY_SAMPLE_INTERVAL_US = 500000;
+
+ private final Listener listener;
+ private final long[] playheadOffsets;
+
+ private AudioTrack audioTrack;
+ private int outputPcmFrameSize;
+ private int bufferSize;
+ private AudioTimestampPoller audioTimestampPoller;
+ private int outputSampleRate;
+ private boolean needsPassthroughWorkarounds;
+ private long bufferSizeUs;
+
+ private long smoothedPlayheadOffsetUs;
+ private long lastPlayheadSampleTimeUs;
+
+ private Method getLatencyMethod;
+ private long latencyUs;
+ private boolean hasData;
+
+ private boolean isOutputPcm;
+ private long lastLatencySampleTimeUs;
+ private long lastRawPlaybackHeadPosition;
+ private long rawPlaybackHeadWrapCount;
+ private long passthroughWorkaroundPauseOffset;
+ private int nextPlayheadOffsetIndex;
+ private int playheadOffsetCount;
+ private long stopTimestampUs;
+ private long forceResetWorkaroundTimeMs;
+ private long stopPlaybackHeadPosition;
+ private long endPlaybackHeadPosition;
+
+ /**
+ * Creates a new audio track position tracker.
+ *
+ * @param listener A listener for position tracking events.
+ */
+ public AudioTrackPositionTracker(Listener listener) {
+ this.listener = Assertions.checkNotNull(listener);
+ if (Util.SDK_INT >= 18) {
+ try {
+ getLatencyMethod = AudioTrack.class.getMethod("getLatency", (Class>[]) null);
+ } catch (NoSuchMethodException e) {
+ // There's no guarantee this method exists. Do nothing.
+ }
+ }
+ playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
+ }
+
+ /**
+ * Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this
+ * track's position, until the next call to {@link #reset()}.
+ *
+ * @param audioTrack The audio track to wrap.
+ * @param outputEncoding The encoding of the audio track.
+ * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored
+ * otherwise.
+ * @param bufferSize The audio track buffer size in bytes.
+ */
+ public void setAudioTrack(
+ AudioTrack audioTrack,
+ @C.Encoding int outputEncoding,
+ int outputPcmFrameSize,
+ int bufferSize) {
+ this.audioTrack = audioTrack;
+ this.outputPcmFrameSize = outputPcmFrameSize;
+ this.bufferSize = bufferSize;
+ audioTimestampPoller = new AudioTimestampPoller(audioTrack);
+ outputSampleRate = audioTrack.getSampleRate();
+ needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding);
+ isOutputPcm = Util.isEncodingPcm(outputEncoding);
+ bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
+ lastRawPlaybackHeadPosition = 0;
+ rawPlaybackHeadWrapCount = 0;
+ passthroughWorkaroundPauseOffset = 0;
+ hasData = false;
+ stopTimestampUs = C.TIME_UNSET;
+ forceResetWorkaroundTimeMs = C.TIME_UNSET;
+ latencyUs = 0;
+ }
+
+ public long getCurrentPositionUs(boolean sourceEnded) {
+ if (audioTrack.getPlayState() == PLAYSTATE_PLAYING) {
+ maybeSampleSyncParams();
+ }
+
+ // If the device supports it, use the playback timestamp from AudioTrack.getTimestamp.
+ // Otherwise, derive a smoothed position by sampling the track's frame position.
+ long systemTimeUs = System.nanoTime() / 1000;
+ if (audioTimestampPoller.hasTimestamp()) {
+ // Calculate the speed-adjusted position using the timestamp (which may be in the future).
+ long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
+ long timestampPositionUs = framesToDurationUs(timestampPositionFrames);
+ if (!audioTimestampPoller.isTimestampAdvancing()) {
+ return timestampPositionUs;
+ }
+ long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs();
+ return timestampPositionUs + elapsedSinceTimestampUs;
+ } else {
+ long positionUs;
+ if (playheadOffsetCount == 0) {
+ // The AudioTrack has started, but we don't have any samples to compute a smoothed position.
+ positionUs = getPlaybackHeadPositionUs();
+ } else {
+ // getPlaybackHeadPositionUs() only has a granularity of ~20 ms, so we base the position off
+ // the system clock (and a smoothed offset between it and the playhead position) so as to
+ // prevent jitter in the reported positions.
+ positionUs = systemTimeUs + smoothedPlayheadOffsetUs;
+ }
+ if (!sourceEnded) {
+ positionUs -= latencyUs;
+ }
+ return positionUs;
+ }
+ }
+
+ /** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */
+ public void start() {
+ audioTimestampPoller.reset();
+ }
+
+ /** Returns whether the audio track is in the playing state. */
+ public boolean isPlaying() {
+ return audioTrack.getPlayState() == PLAYSTATE_PLAYING;
+ }
+
+ /**
+ * Checks the state of the audio track and returns whether the caller can write data to the track.
+ * Notifies {@link Listener#onUnderrun(int, long)} if the track has underrun.
+ *
+ * @param writtenFrames The number of frames that have been written.
+ * @return Whether the caller can write data to the track.
+ */
+ public boolean mayHandleBuffer(long writtenFrames) {
+ @PlayState int playState = audioTrack.getPlayState();
+ if (needsPassthroughWorkarounds) {
+ // An AC-3 audio track continues to play data written while it is paused. Stop writing so its
+ // buffer empties. See [Internal: b/18899620].
+ if (playState == PLAYSTATE_PAUSED) {
+ // We force an underrun to pause the track, so don't notify the listener in this case.
+ hasData = false;
+ return false;
+ }
+
+ // A new AC-3 audio track's playback position continues to increase from the old track's
+ // position for a short time after is has been released. Avoid writing data until the playback
+ // head position actually returns to zero.
+ if (playState == PLAYSTATE_STOPPED && getPlaybackHeadPosition() == 0) {
+ return false;
+ }
+ }
+
+ boolean hadData = hasData;
+ hasData = hasPendingData(writtenFrames);
+ if (hadData && !hasData && playState != PLAYSTATE_STOPPED && listener != null) {
+ listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs));
+ }
+
+ return true;
+ }
+
+ /**
+ * Returns an estimate of the number of additional bytes that can be written to the audio track's
+ * buffer without running out of space.
+ *
+ *
May only be called if the output encoding is one of the PCM encodings.
+ *
+ * @param writtenBytes The number of bytes written to the audio track so far.
+ * @return An estimate of the number of bytes that can be written.
+ */
+ public int getAvailableBufferSize(long writtenBytes) {
+ int bytesPending = (int) (writtenBytes - (getPlaybackHeadPosition() * outputPcmFrameSize));
+ return bufferSize - bytesPending;
+ }
+
+ /** Returns whether the track is in an invalid state and must be recreated. */
+ public boolean isStalled(long writtenFrames) {
+ return forceResetWorkaroundTimeMs != C.TIME_UNSET
+ && writtenFrames > 0
+ && SystemClock.elapsedRealtime() - forceResetWorkaroundTimeMs
+ >= FORCE_RESET_WORKAROUND_TIMEOUT_MS;
+ }
+
+ /**
+ * Records the writing position at which the stream ended, so that the reported position can
+ * continue to increment while remaining data is played out.
+ *
+ * @param writtenFrames The number of frames that have been written.
+ */
+ public void handleEndOfStream(long writtenFrames) {
+ stopPlaybackHeadPosition = getPlaybackHeadPosition();
+ stopTimestampUs = SystemClock.elapsedRealtime() * 1000;
+ endPlaybackHeadPosition = writtenFrames;
+ }
+
+ /**
+ * Returns whether the audio track has any pending data to play out at its current position.
+ *
+ * @param writtenFrames The number of frames written to the audio track.
+ * @return Whether the audio track has any pending data to play out.
+ */
+ public boolean hasPendingData(long writtenFrames) {
+ return writtenFrames > getPlaybackHeadPosition()
+ || forceHasPendingData();
+ }
+
+ /**
+ * Pauses the audio track position tracker, returning whether the audio track needs to be paused
+ * to cause playback to pause. If {@code false} is returned the audio track will pause without
+ * further interaction, as the end of stream has been handled.
+ */
+ public boolean pause() {
+ resetSyncParams();
+ if (stopTimestampUs == C.TIME_UNSET) {
+ // The audio track is going to be paused, so reset the timestamp poller to ensure it doesn't
+ // supply an advancing position.
+ audioTimestampPoller.reset();
+ return true;
+ }
+ // We've handled the end of the stream already, so there's no need to pause the track.
+ return false;
+ }
+
+ /**
+ * Resets the position tracker. Should be called when the audio track previous passed to {@link
+ * #setAudioTrack(AudioTrack, int, int, int)} is no longer in use.
+ */
+ public void reset() {
+ resetSyncParams();
+ audioTrack = null;
+ audioTimestampPoller = null;
+ }
+
+ private void maybeSampleSyncParams() {
+ long playbackPositionUs = getPlaybackHeadPositionUs();
+ if (playbackPositionUs == 0) {
+ // The AudioTrack hasn't output anything yet.
+ return;
+ }
+ long systemTimeUs = System.nanoTime() / 1000;
+ if (systemTimeUs - lastPlayheadSampleTimeUs >= MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US) {
+ // Take a new sample and update the smoothed offset between the system clock and the playhead.
+ playheadOffsets[nextPlayheadOffsetIndex] = playbackPositionUs - systemTimeUs;
+ nextPlayheadOffsetIndex = (nextPlayheadOffsetIndex + 1) % MAX_PLAYHEAD_OFFSET_COUNT;
+ if (playheadOffsetCount < MAX_PLAYHEAD_OFFSET_COUNT) {
+ playheadOffsetCount++;
+ }
+ lastPlayheadSampleTimeUs = systemTimeUs;
+ smoothedPlayheadOffsetUs = 0;
+ for (int i = 0; i < playheadOffsetCount; i++) {
+ smoothedPlayheadOffsetUs += playheadOffsets[i] / playheadOffsetCount;
+ }
+ }
+
+ if (needsPassthroughWorkarounds) {
+ // Don't sample the timestamp and latency if this is an AC-3 passthrough AudioTrack on
+ // platform API versions 21/22, as incorrect values are returned. See [Internal: b/21145353].
+ return;
+ }
+
+ maybePollAndCheckTimestamp(systemTimeUs, playbackPositionUs);
+ maybeUpdateLatency(systemTimeUs);
+ }
+
+ private void maybePollAndCheckTimestamp(long systemTimeUs, long playbackPositionUs) {
+ if (!audioTimestampPoller.maybePollTimestamp(systemTimeUs)) {
+ return;
+ }
+
+ // Perform sanity checks on the timestamp and accept/reject it.
+ long audioTimestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs();
+ long audioTimestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
+ if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
+ listener.onSystemTimeUsMismatch(
+ audioTimestampPositionFrames,
+ audioTimestampSystemTimeUs,
+ systemTimeUs,
+ playbackPositionUs);
+ audioTimestampPoller.rejectTimestamp();
+ } else if (Math.abs(framesToDurationUs(audioTimestampPositionFrames) - playbackPositionUs)
+ > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
+ listener.onPositionFramesMismatch(
+ audioTimestampPositionFrames,
+ audioTimestampSystemTimeUs,
+ systemTimeUs,
+ playbackPositionUs);
+ audioTimestampPoller.rejectTimestamp();
+ } else {
+ audioTimestampPoller.acceptTimestamp();
+ }
+ }
+
+ private void maybeUpdateLatency(long systemTimeUs) {
+ if (isOutputPcm
+ && getLatencyMethod != null
+ && systemTimeUs - lastLatencySampleTimeUs >= MIN_LATENCY_SAMPLE_INTERVAL_US) {
+ try {
+ // Compute the audio track latency, excluding the latency due to the buffer (leaving
+ // latency due to the mixer and audio hardware driver).
+ latencyUs =
+ (Integer) getLatencyMethod.invoke(audioTrack, (Object[]) null) * 1000L - bufferSizeUs;
+ // Sanity check that the latency is non-negative.
+ latencyUs = Math.max(latencyUs, 0);
+ // Sanity check that the latency isn't too large.
+ if (latencyUs > MAX_LATENCY_US) {
+ listener.onInvalidLatency(latencyUs);
+ latencyUs = 0;
+ }
+ } catch (Exception e) {
+ // The method existed, but doesn't work. Don't try again.
+ getLatencyMethod = null;
+ }
+ lastLatencySampleTimeUs = systemTimeUs;
+ }
+ }
+
+ private long framesToDurationUs(long frameCount) {
+ return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
+ }
+
+ private void resetSyncParams() {
+ smoothedPlayheadOffsetUs = 0;
+ playheadOffsetCount = 0;
+ nextPlayheadOffsetIndex = 0;
+ lastPlayheadSampleTimeUs = 0;
+ }
+
+ /**
+ * If passthrough workarounds are enabled, pausing is implemented by forcing the AudioTrack to
+ * underrun. In this case, still behave as if we have pending data, otherwise writing won't
+ * resume.
+ */
+ private boolean forceHasPendingData() {
+ return needsPassthroughWorkarounds
+ && audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PAUSED
+ && getPlaybackHeadPosition() == 0;
+ }
+
+ /**
+ * Returns whether to work around problems with passthrough audio tracks. See [Internal:
+ * b/18899620, b/19187573, b/21145353].
+ */
+ private static boolean needsPassthroughWorkarounds(@C.Encoding int outputEncoding) {
+ return Util.SDK_INT < 23
+ && (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3);
+ }
+
+ private long getPlaybackHeadPositionUs() {
+ return framesToDurationUs(getPlaybackHeadPosition());
+ }
+
+ /**
+ * {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as an
+ * unsigned 32 bit integer, which also wraps around periodically. This method returns the playback
+ * head position as a long that will only wrap around if the value exceeds {@link Long#MAX_VALUE}
+ * (which in practice will never happen).
+ *
+ * @return The playback head position, in frames.
+ */
+ private long getPlaybackHeadPosition() {
+ if (stopTimestampUs != C.TIME_UNSET) {
+ // Simulate the playback head position up to the total number of frames submitted.
+ long elapsedTimeSinceStopUs = (SystemClock.elapsedRealtime() * 1000) - stopTimestampUs;
+ long framesSinceStop = (elapsedTimeSinceStopUs * outputSampleRate) / C.MICROS_PER_SECOND;
+ return Math.min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop);
+ }
+
+ int state = audioTrack.getPlayState();
+ if (state == PLAYSTATE_STOPPED) {
+ // The audio track hasn't been started.
+ return 0;
+ }
+
+ long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
+ if (needsPassthroughWorkarounds) {
+ // Work around an issue with passthrough/direct AudioTracks on platform API versions 21/22
+ // where the playback head position jumps back to zero on paused passthrough/direct audio
+ // tracks. See [Internal: b/19187573].
+ if (state == PLAYSTATE_PAUSED && rawPlaybackHeadPosition == 0) {
+ passthroughWorkaroundPauseOffset = lastRawPlaybackHeadPosition;
+ }
+ rawPlaybackHeadPosition += passthroughWorkaroundPauseOffset;
+ }
+
+ if (Util.SDK_INT <= 28) {
+ if (rawPlaybackHeadPosition == 0
+ && lastRawPlaybackHeadPosition > 0
+ && state == PLAYSTATE_PLAYING) {
+ // If connecting a Bluetooth audio device fails, the AudioTrack may be left in a state
+ // where its Java API is in the playing state, but the native track is stopped. When this
+ // happens the playback head position gets stuck at zero. In this case, return the old
+ // playback head position and force the track to be reset after
+ // {@link #FORCE_RESET_WORKAROUND_TIMEOUT_MS} has elapsed.
+ if (forceResetWorkaroundTimeMs == C.TIME_UNSET) {
+ forceResetWorkaroundTimeMs = SystemClock.elapsedRealtime();
+ }
+ return lastRawPlaybackHeadPosition;
+ } else {
+ forceResetWorkaroundTimeMs = C.TIME_UNSET;
+ }
+ }
+
+ if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
+ // The value must have wrapped around.
+ rawPlaybackHeadWrapCount++;
+ }
+ lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
+ return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java
index b755776f1e..e53eb08c83 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/ChannelMappingAudioProcessor.java
@@ -15,9 +15,11 @@
*/
package com.google.android.exoplayer2.audio;
+import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.util.Assertions;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
@@ -30,17 +32,15 @@ import java.util.Arrays;
private int channelCount;
private int sampleRateHz;
- private int[] pendingOutputChannels;
+ private @Nullable int[] pendingOutputChannels;
private boolean active;
- private int[] outputChannels;
+ private @Nullable int[] outputChannels;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
- /**
- * Creates a new processor that applies a channel mapping.
- */
+ /** Creates a new processor that applies a channel mapping. */
public ChannelMappingAudioProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
@@ -52,9 +52,11 @@ import java.util.Arrays;
* Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)}
* to start using the new channel map.
*
- * @see AudioTrack#configure(String, int, int, int, int, int[])
+ * @param outputChannels The mapping from input to output channel indices, or {@code null} to
+ * leave the input unchanged.
+ * @see AudioSink#configure(int, int, int, int, int[], int, int)
*/
- public void setChannelMap(int[] outputChannels) {
+ public void setChannelMap(@Nullable int[] outputChannels) {
pendingOutputChannels = outputChannels;
}
@@ -103,8 +105,14 @@ import java.util.Arrays;
return C.ENCODING_PCM_16BIT;
}
+ @Override
+ public int getOutputSampleRateHz() {
+ return sampleRateHz;
+ }
+
@Override
public void queueInput(ByteBuffer inputBuffer) {
+ Assertions.checkState(outputChannels != null);
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int frameCount = (limit - position) / (2 * channelCount);
@@ -156,6 +164,7 @@ import java.util.Arrays;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputChannels = null;
+ pendingOutputChannels = null;
active = false;
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java
new file mode 100644
index 0000000000..1025cb953b
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java
@@ -0,0 +1,1328 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.audio;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.ConditionVariable;
+import android.os.SystemClock;
+import android.support.annotation.IntDef;
+import android.support.annotation.Nullable;
+import android.util.Log;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.PlaybackParameters;
+import com.google.android.exoplayer2.util.Assertions;
+import com.google.android.exoplayer2.util.Util;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+
+/**
+ * Plays audio data. The implementation delegates to an {@link AudioTrack} and handles playback
+ * position smoothing, non-blocking writes and reconfiguration.
+ *
+ * If tunneling mode is enabled, care must be taken that audio processors do not output buffers with
+ * a different duration than their input, and buffer processors must produce output corresponding to
+ * their last input immediately after that input is queued. This means that, for example, speed
+ * adjustment is not possible while using tunneling.
+ */
+public final class DefaultAudioSink implements AudioSink {
+
+ /**
+ * Thrown when the audio track has provided a spurious timestamp, if {@link
+ * #failOnSpuriousAudioTimestamp} is set.
+ */
+ public static final class InvalidAudioTrackTimestampException extends RuntimeException {
+
+ /**
+ * Creates a new invalid timestamp exception with the specified message.
+ *
+ * @param message The detail message for this exception.
+ */
+ private InvalidAudioTrackTimestampException(String message) {
+ super(message);
+ }
+
+ }
+
+ /**
+ * Provides a chain of audio processors, which are used for any user-defined processing and
+ * applying playback parameters (if supported). Because applying playback parameters can skip and
+ * stretch/compress audio, the sink will query the chain for information on how to transform its
+ * output position to map it onto a media position, via {@link #getMediaDuration(long)} and {@link
+ * #getSkippedOutputFrameCount()}.
+ */
+ public interface AudioProcessorChain {
+
+ /**
+ * Returns the fixed chain of audio processors that will process audio. This method is called
+ * once during initialization, but audio processors may change state to become active/inactive
+ * during playback.
+ */
+ AudioProcessor[] getAudioProcessors();
+
+ /**
+ * Configures audio processors to apply the specified playback parameters immediately, returning
+ * the new parameters, which may differ from those passed in. Only called when processors have
+ * no input pending.
+ *
+ * @param playbackParameters The playback parameters to try to apply.
+ * @return The playback parameters that were actually applied.
+ */
+ PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters);
+
+ /**
+ * Scales the specified playout duration to take into account speedup due to audio processing,
+ * returning an input media duration, in arbitrary units.
+ */
+ long getMediaDuration(long playoutDuration);
+
+ /**
+ * Returns the number of output audio frames skipped since the audio processors were last
+ * flushed.
+ */
+ long getSkippedOutputFrameCount();
+ }
+
+ /**
+ * The default audio processor chain, which applies a (possibly empty) chain of user-defined audio
+ * processors followed by {@link SilenceSkippingAudioProcessor} and {@link SonicAudioProcessor}.
+ */
+ public static class DefaultAudioProcessorChain implements AudioProcessorChain {
+
+ private final AudioProcessor[] audioProcessors;
+ private final SilenceSkippingAudioProcessor silenceSkippingAudioProcessor;
+ private final SonicAudioProcessor sonicAudioProcessor;
+
+ /**
+ * Creates a new default chain of audio processors, with the user-defined {@code
+ * audioProcessors} applied before silence skipping and playback parameters.
+ */
+ public DefaultAudioProcessorChain(AudioProcessor... audioProcessors) {
+ this.audioProcessors = Arrays.copyOf(audioProcessors, audioProcessors.length + 2);
+ silenceSkippingAudioProcessor = new SilenceSkippingAudioProcessor();
+ sonicAudioProcessor = new SonicAudioProcessor();
+ this.audioProcessors[audioProcessors.length] = silenceSkippingAudioProcessor;
+ this.audioProcessors[audioProcessors.length + 1] = sonicAudioProcessor;
+ }
+
+ @Override
+ public AudioProcessor[] getAudioProcessors() {
+ return audioProcessors;
+ }
+
+ @Override
+ public PlaybackParameters applyPlaybackParameters(PlaybackParameters playbackParameters) {
+ silenceSkippingAudioProcessor.setEnabled(playbackParameters.skipSilence);
+ return new PlaybackParameters(
+ sonicAudioProcessor.setSpeed(playbackParameters.speed),
+ sonicAudioProcessor.setPitch(playbackParameters.pitch),
+ playbackParameters.skipSilence);
+ }
+
+ @Override
+ public long getMediaDuration(long playoutDuration) {
+ return sonicAudioProcessor.scaleDurationForSpeedup(playoutDuration);
+ }
+
+ @Override
+ public long getSkippedOutputFrameCount() {
+ return silenceSkippingAudioProcessor.getSkippedFrames();
+ }
+ }
+
+ /**
+ * A minimum length for the {@link AudioTrack} buffer, in microseconds.
+ */
+ private static final long MIN_BUFFER_DURATION_US = 250000;
+ /**
+ * A maximum length for the {@link AudioTrack} buffer, in microseconds.
+ */
+ private static final long MAX_BUFFER_DURATION_US = 750000;
+ /**
+ * The length for passthrough {@link AudioTrack} buffers, in microseconds.
+ */
+ private static final long PASSTHROUGH_BUFFER_DURATION_US = 250000;
+ /**
+ * A multiplication factor to apply to the minimum buffer size requested by the underlying
+ * {@link AudioTrack}.
+ */
+ private static final int BUFFER_MULTIPLICATION_FACTOR = 4;
+
+ /**
+ * @see AudioTrack#ERROR_BAD_VALUE
+ */
+ private static final int ERROR_BAD_VALUE = AudioTrack.ERROR_BAD_VALUE;
+ /**
+ * @see AudioTrack#MODE_STATIC
+ */
+ private static final int MODE_STATIC = AudioTrack.MODE_STATIC;
+ /**
+ * @see AudioTrack#MODE_STREAM
+ */
+ private static final int MODE_STREAM = AudioTrack.MODE_STREAM;
+ /**
+ * @see AudioTrack#STATE_INITIALIZED
+ */
+ private static final int STATE_INITIALIZED = AudioTrack.STATE_INITIALIZED;
+ /**
+ * @see AudioTrack#WRITE_NON_BLOCKING
+ */
+ @SuppressLint("InlinedApi")
+ private static final int WRITE_NON_BLOCKING = AudioTrack.WRITE_NON_BLOCKING;
+
+ private static final String TAG = "AudioTrack";
+
+ /**
+ * Represents states of the {@link #startMediaTimeUs} value.
+ */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({START_NOT_SET, START_IN_SYNC, START_NEED_SYNC})
+ private @interface StartMediaTimeState {}
+ private static final int START_NOT_SET = 0;
+ private static final int START_IN_SYNC = 1;
+ private static final int START_NEED_SYNC = 2;
+
+ /**
+ * Whether to enable a workaround for an issue where an audio effect does not keep its session
+ * active across releasing/initializing a new audio track, on platform builds where
+ * {@link Util#SDK_INT} < 21.
+ *
+ * The flag must be set before creating a player.
+ */
+ public static boolean enablePreV21AudioSessionWorkaround = false;
+
+ /**
+ * Whether to throw an {@link InvalidAudioTrackTimestampException} when a spurious timestamp is
+ * reported from {@link AudioTrack#getTimestamp}.
+ *
+ * The flag must be set before creating a player. Should be set to {@code true} for testing and
+ * debugging purposes only.
+ */
+ public static boolean failOnSpuriousAudioTimestamp = false;
+
+ @Nullable private final AudioCapabilities audioCapabilities;
+ private final AudioProcessorChain audioProcessorChain;
+ private final boolean enableConvertHighResIntPcmToFloat;
+ private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
+ private final TrimmingAudioProcessor trimmingAudioProcessor;
+ private final AudioProcessor[] toIntPcmAvailableAudioProcessors;
+ private final AudioProcessor[] toFloatPcmAvailableAudioProcessors;
+ private final ConditionVariable releasingConditionVariable;
+ private final AudioTrackPositionTracker audioTrackPositionTracker;
+ private final ArrayDeque playbackParametersCheckpoints;
+
+ @Nullable private Listener listener;
+ /** Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). */
+ @Nullable private AudioTrack keepSessionIdAudioTrack;
+
+ private AudioTrack audioTrack;
+ private boolean isInputPcm;
+ private boolean shouldConvertHighResIntPcmToFloat;
+ private int inputSampleRate;
+ private int outputSampleRate;
+ private int outputChannelConfig;
+ private @C.Encoding int outputEncoding;
+ private AudioAttributes audioAttributes;
+ private boolean processingEnabled;
+ private boolean canApplyPlaybackParameters;
+ private int bufferSize;
+
+ @Nullable private PlaybackParameters afterDrainPlaybackParameters;
+ private PlaybackParameters playbackParameters;
+ private long playbackParametersOffsetUs;
+ private long playbackParametersPositionUs;
+
+ @Nullable private ByteBuffer avSyncHeader;
+ private int bytesUntilNextAvSync;
+
+ private int pcmFrameSize;
+ private long submittedPcmBytes;
+ private long submittedEncodedFrames;
+ private int outputPcmFrameSize;
+ private long writtenPcmBytes;
+ private long writtenEncodedFrames;
+ private int framesPerEncodedSample;
+ private @StartMediaTimeState int startMediaTimeState;
+ private long startMediaTimeUs;
+ private float volume;
+
+ private AudioProcessor[] activeAudioProcessors;
+ private ByteBuffer[] outputBuffers;
+ @Nullable private ByteBuffer inputBuffer;
+ @Nullable private ByteBuffer outputBuffer;
+ private byte[] preV21OutputBuffer;
+ private int preV21OutputBufferOffset;
+ private int drainingAudioProcessorIndex;
+ private boolean handledEndOfStream;
+
+ private boolean playing;
+ private int audioSessionId;
+ private boolean tunneling;
+ private long lastFeedElapsedRealtimeMs;
+
+ /**
+ * Creates a new default audio sink.
+ *
+ * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
+ * default capabilities (no encoded audio passthrough support) should be assumed.
+ * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
+ * output. May be empty.
+ */
+ public DefaultAudioSink(
+ @Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors) {
+ this(audioCapabilities, audioProcessors, /* enableConvertHighResIntPcmToFloat= */ false);
+ }
+
+ /**
+ * Creates a new default audio sink, optionally using float output for high resolution PCM.
+ *
+ * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
+ * default capabilities (no encoded audio passthrough support) should be assumed.
+ * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
+ * output. May be empty.
+ * @param enableConvertHighResIntPcmToFloat Whether to enable conversion of high resolution
+ * integer PCM to 32-bit float for output, if possible. Functionality that uses 16-bit integer
+ * audio processing (for example, speed and pitch adjustment) will not be available when float
+ * output is in use.
+ */
+ public DefaultAudioSink(
+ @Nullable AudioCapabilities audioCapabilities,
+ AudioProcessor[] audioProcessors,
+ boolean enableConvertHighResIntPcmToFloat) {
+ this(
+ audioCapabilities,
+ new DefaultAudioProcessorChain(audioProcessors),
+ enableConvertHighResIntPcmToFloat);
+ }
+
+ /**
+ * Creates a new default audio sink, optionally using float output for high resolution PCM and
+ * with the specified {@code audioProcessorChain}.
+ *
+ * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
+ * default capabilities (no encoded audio passthrough support) should be assumed.
+ * @param audioProcessorChain An {@link AudioProcessorChain} which is used to apply playback
+ * parameters adjustments. The instance passed in must not be reused in other sinks.
+ * @param enableConvertHighResIntPcmToFloat Whether to enable conversion of high resolution
+ * integer PCM to 32-bit float for output, if possible. Functionality that uses 16-bit integer
+ * audio processing (for example, speed and pitch adjustment) will not be available when float
+ * output is in use.
+ */
+ public DefaultAudioSink(
+ @Nullable AudioCapabilities audioCapabilities,
+ AudioProcessorChain audioProcessorChain,
+ boolean enableConvertHighResIntPcmToFloat) {
+ this.audioCapabilities = audioCapabilities;
+ this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain);
+ this.enableConvertHighResIntPcmToFloat = enableConvertHighResIntPcmToFloat;
+ releasingConditionVariable = new ConditionVariable(true);
+ audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
+ channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
+ trimmingAudioProcessor = new TrimmingAudioProcessor();
+ ArrayList toIntPcmAudioProcessors = new ArrayList<>();
+ Collections.addAll(
+ toIntPcmAudioProcessors,
+ new ResamplingAudioProcessor(),
+ channelMappingAudioProcessor,
+ trimmingAudioProcessor);
+ Collections.addAll(toIntPcmAudioProcessors, audioProcessorChain.getAudioProcessors());
+ toIntPcmAvailableAudioProcessors =
+ toIntPcmAudioProcessors.toArray(new AudioProcessor[toIntPcmAudioProcessors.size()]);
+ toFloatPcmAvailableAudioProcessors = new AudioProcessor[] {new FloatResamplingAudioProcessor()};
+ volume = 1.0f;
+ startMediaTimeState = START_NOT_SET;
+ audioAttributes = AudioAttributes.DEFAULT;
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ playbackParameters = PlaybackParameters.DEFAULT;
+ drainingAudioProcessorIndex = C.INDEX_UNSET;
+ activeAudioProcessors = new AudioProcessor[0];
+ outputBuffers = new ByteBuffer[0];
+ playbackParametersCheckpoints = new ArrayDeque<>();
+ }
+
+ // AudioSink implementation.
+
+ @Override
+ public void setListener(Listener listener) {
+ this.listener = listener;
+ }
+
+ @Override
+ public boolean isEncodingSupported(@C.Encoding int encoding) {
+ if (Util.isEncodingPcm(encoding)) {
+ // AudioTrack supports 16-bit integer PCM output in all platform API versions, and float
+ // output from platform API version 21 only. Other integer PCM encodings are resampled by this
+ // sink to 16-bit PCM.
+ return encoding != C.ENCODING_PCM_FLOAT || Util.SDK_INT >= 21;
+ } else {
+ return audioCapabilities != null && audioCapabilities.supportsEncoding(encoding);
+ }
+ }
+
+ @Override
+ public long getCurrentPositionUs(boolean sourceEnded) {
+ if (!isInitialized() || startMediaTimeState == START_NOT_SET) {
+ return CURRENT_POSITION_NOT_SET;
+ }
+ long positionUs = audioTrackPositionTracker.getCurrentPositionUs(sourceEnded);
+ positionUs = Math.min(positionUs, framesToDurationUs(getWrittenFrames()));
+ return startMediaTimeUs + applySkipping(applySpeedup(positionUs));
+ }
+
+ @Override
+ public void configure(
+ @C.Encoding int inputEncoding,
+ int inputChannelCount,
+ int inputSampleRate,
+ int specifiedBufferSize,
+ @Nullable int[] outputChannels,
+ int trimStartFrames,
+ int trimEndFrames)
+ throws ConfigurationException {
+ boolean flush = false;
+ this.inputSampleRate = inputSampleRate;
+ int channelCount = inputChannelCount;
+ int sampleRate = inputSampleRate;
+ isInputPcm = Util.isEncodingPcm(inputEncoding);
+ shouldConvertHighResIntPcmToFloat =
+ enableConvertHighResIntPcmToFloat
+ && isEncodingSupported(C.ENCODING_PCM_32BIT)
+ && Util.isEncodingHighResolutionIntegerPcm(inputEncoding);
+ if (isInputPcm) {
+ pcmFrameSize = Util.getPcmFrameSize(inputEncoding, channelCount);
+ }
+ @C.Encoding int encoding = inputEncoding;
+ boolean processingEnabled = isInputPcm && inputEncoding != C.ENCODING_PCM_FLOAT;
+ canApplyPlaybackParameters = processingEnabled && !shouldConvertHighResIntPcmToFloat;
+ if (processingEnabled) {
+ trimmingAudioProcessor.setTrimFrameCount(trimStartFrames, trimEndFrames);
+ channelMappingAudioProcessor.setChannelMap(outputChannels);
+ for (AudioProcessor audioProcessor : getAvailableAudioProcessors()) {
+ try {
+ flush |= audioProcessor.configure(sampleRate, channelCount, encoding);
+ } catch (AudioProcessor.UnhandledFormatException e) {
+ throw new ConfigurationException(e);
+ }
+ if (audioProcessor.isActive()) {
+ channelCount = audioProcessor.getOutputChannelCount();
+ sampleRate = audioProcessor.getOutputSampleRateHz();
+ encoding = audioProcessor.getOutputEncoding();
+ }
+ }
+ }
+
+ int channelConfig;
+ switch (channelCount) {
+ case 1:
+ channelConfig = AudioFormat.CHANNEL_OUT_MONO;
+ break;
+ case 2:
+ channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
+ break;
+ case 3:
+ channelConfig = AudioFormat.CHANNEL_OUT_STEREO | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
+ break;
+ case 4:
+ channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
+ break;
+ case 5:
+ channelConfig = AudioFormat.CHANNEL_OUT_QUAD | AudioFormat.CHANNEL_OUT_FRONT_CENTER;
+ break;
+ case 6:
+ channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
+ break;
+ case 7:
+ channelConfig = AudioFormat.CHANNEL_OUT_5POINT1 | AudioFormat.CHANNEL_OUT_BACK_CENTER;
+ break;
+ case 8:
+ channelConfig = C.CHANNEL_OUT_7POINT1_SURROUND;
+ break;
+ default:
+ throw new ConfigurationException("Unsupported channel count: " + channelCount);
+ }
+
+ // Workaround for overly strict channel configuration checks on nVidia Shield.
+ if (Util.SDK_INT <= 23 && "foster".equals(Util.DEVICE) && "NVIDIA".equals(Util.MANUFACTURER)) {
+ switch (channelCount) {
+ case 7:
+ channelConfig = C.CHANNEL_OUT_7POINT1_SURROUND;
+ break;
+ case 3:
+ case 5:
+ channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
+ break;
+ default:
+ break;
+ }
+ }
+
+ // Workaround for Nexus Player not reporting support for mono passthrough.
+ // (See [Internal: b/34268671].)
+ if (Util.SDK_INT <= 25 && "fugu".equals(Util.DEVICE) && !isInputPcm && channelCount == 1) {
+ channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
+ }
+
+ if (!flush
+ && isInitialized()
+ && outputEncoding == encoding
+ && outputSampleRate == sampleRate
+ && outputChannelConfig == channelConfig) {
+ // We already have an audio track with the correct sample rate, channel config and encoding.
+ return;
+ }
+
+ reset();
+
+ this.processingEnabled = processingEnabled;
+ outputSampleRate = sampleRate;
+ outputChannelConfig = channelConfig;
+ outputEncoding = encoding;
+ outputPcmFrameSize =
+ isInputPcm ? Util.getPcmFrameSize(outputEncoding, channelCount) : C.LENGTH_UNSET;
+ if (specifiedBufferSize != 0) {
+ bufferSize = specifiedBufferSize;
+ } else if (isInputPcm) {
+ int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding);
+ Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
+ int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
+ int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
+ int maxAppBufferSize = (int) Math.max(minBufferSize,
+ durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
+ bufferSize = Util.constrainValue(multipliedBufferSize, minAppBufferSize, maxAppBufferSize);
+ } else {
+ // TODO: Set the minimum buffer size using getMinBufferSize when it takes the encoding into
+ // account. [Internal: b/25181305]
+ if (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3) {
+ // AC-3 allows bitrates up to 640 kbit/s.
+ bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 80 * 1024 / C.MICROS_PER_SECOND);
+ } else if (outputEncoding == C.ENCODING_DTS) {
+ // DTS allows an 'open' bitrate, but we assume the maximum listed value: 1536 kbit/s.
+ bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 192 * 1024 / C.MICROS_PER_SECOND);
+ } else /* outputEncoding == C.ENCODING_DTS_HD || outputEncoding == C.ENCODING_DOLBY_TRUEHD*/ {
+ // HD passthrough requires a larger buffer to avoid underrun.
+ bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 192 * 6 * 1024 / C.MICROS_PER_SECOND);
+ }
+ }
+ }
+
+ private void setupAudioProcessors() {
+ ArrayList newAudioProcessors = new ArrayList<>();
+ for (AudioProcessor audioProcessor : getAvailableAudioProcessors()) {
+ if (audioProcessor.isActive()) {
+ newAudioProcessors.add(audioProcessor);
+ } else {
+ audioProcessor.flush();
+ }
+ }
+ int count = newAudioProcessors.size();
+ activeAudioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
+ outputBuffers = new ByteBuffer[count];
+ flushAudioProcessors();
+ }
+
+ private void flushAudioProcessors() {
+ for (int i = 0; i < activeAudioProcessors.length; i++) {
+ AudioProcessor audioProcessor = activeAudioProcessors[i];
+ audioProcessor.flush();
+ outputBuffers[i] = audioProcessor.getOutput();
+ }
+ }
+
+ private void initialize() throws InitializationException {
+ // If we're asynchronously releasing a previous audio track then we block until it has been
+ // released. This guarantees that we cannot end up in a state where we have multiple audio
+ // track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
+ // the shared memory that's available for audio track buffers. This would in turn cause the
+ // initialization of the audio track to fail.
+ releasingConditionVariable.block();
+
+ audioTrack = initializeAudioTrack();
+ int audioSessionId = audioTrack.getAudioSessionId();
+ if (enablePreV21AudioSessionWorkaround) {
+ if (Util.SDK_INT < 21) {
+ // The workaround creates an audio track with a two byte buffer on the same session, and
+ // does not release it until this object is released, which keeps the session active.
+ if (keepSessionIdAudioTrack != null
+ && audioSessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
+ releaseKeepSessionIdAudioTrack();
+ }
+ if (keepSessionIdAudioTrack == null) {
+ keepSessionIdAudioTrack = initializeKeepSessionIdAudioTrack(audioSessionId);
+ }
+ }
+ }
+ if (this.audioSessionId != audioSessionId) {
+ this.audioSessionId = audioSessionId;
+ if (listener != null) {
+ listener.onAudioSessionId(audioSessionId);
+ }
+ }
+
+ playbackParameters =
+ canApplyPlaybackParameters
+ ? audioProcessorChain.applyPlaybackParameters(playbackParameters)
+ : PlaybackParameters.DEFAULT;
+ setupAudioProcessors();
+
+ audioTrackPositionTracker.setAudioTrack(
+ audioTrack, outputEncoding, outputPcmFrameSize, bufferSize);
+ setVolumeInternal();
+ }
+
+ @Override
+ public void play() {
+ playing = true;
+ if (isInitialized()) {
+ audioTrackPositionTracker.start();
+ audioTrack.play();
+ }
+ }
+
+ @Override
+ public void handleDiscontinuity() {
+ // Force resynchronization after a skipped buffer.
+ if (startMediaTimeState == START_IN_SYNC) {
+ startMediaTimeState = START_NEED_SYNC;
+ }
+ }
+
+ @Override
+ @SuppressWarnings("ReferenceEquality")
+ public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
+ throws InitializationException, WriteException {
+ Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer);
+ if (!isInitialized()) {
+ initialize();
+ if (playing) {
+ play();
+ }
+ }
+
+ if (!audioTrackPositionTracker.mayHandleBuffer(getWrittenFrames())) {
+ return false;
+ }
+
+ if (inputBuffer == null) {
+ // We are seeing this buffer for the first time.
+ if (!buffer.hasRemaining()) {
+ // The buffer is empty.
+ return true;
+ }
+
+ if (!isInputPcm && framesPerEncodedSample == 0) {
+ // If this is the first encoded sample, calculate the sample size in frames.
+ framesPerEncodedSample = getFramesPerEncodedSample(outputEncoding, buffer);
+ if (framesPerEncodedSample == 0) {
+ // We still don't know the number of frames per sample, so drop the buffer.
+ // For TrueHD this can occur after some seek operations, as not every sample starts with
+ // a syncframe header. If we chunked samples together so the extracted samples always
+ // started with a syncframe header, the chunks would be too large.
+ return true;
+ }
+ }
+
+ if (afterDrainPlaybackParameters != null) {
+ if (!drainAudioProcessorsToEndOfStream()) {
+ // Don't process any more input until draining completes.
+ return false;
+ }
+ PlaybackParameters newPlaybackParameters = afterDrainPlaybackParameters;
+ afterDrainPlaybackParameters = null;
+ newPlaybackParameters = audioProcessorChain.applyPlaybackParameters(newPlaybackParameters);
+ // Store the position and corresponding media time from which the parameters will apply.
+ playbackParametersCheckpoints.add(
+ new PlaybackParametersCheckpoint(
+ newPlaybackParameters,
+ Math.max(0, presentationTimeUs),
+ framesToDurationUs(getWrittenFrames())));
+ // Update the set of active audio processors to take into account the new parameters.
+ setupAudioProcessors();
+ }
+
+ if (startMediaTimeState == START_NOT_SET) {
+ startMediaTimeUs = Math.max(0, presentationTimeUs);
+ startMediaTimeState = START_IN_SYNC;
+ } else {
+ // Sanity check that presentationTimeUs is consistent with the expected value.
+ long expectedPresentationTimeUs =
+ startMediaTimeUs + inputFramesToDurationUs(getSubmittedFrames());
+ if (startMediaTimeState == START_IN_SYNC
+ && Math.abs(expectedPresentationTimeUs - presentationTimeUs) > 200000) {
+ Log.e(TAG, "Discontinuity detected [expected " + expectedPresentationTimeUs + ", got "
+ + presentationTimeUs + "]");
+ startMediaTimeState = START_NEED_SYNC;
+ }
+ if (startMediaTimeState == START_NEED_SYNC) {
+ // Adjust startMediaTimeUs to be consistent with the current buffer's start time and the
+ // number of bytes submitted.
+ startMediaTimeUs += (presentationTimeUs - expectedPresentationTimeUs);
+ startMediaTimeState = START_IN_SYNC;
+ if (listener != null) {
+ listener.onPositionDiscontinuity();
+ }
+ }
+ }
+
+ if (isInputPcm) {
+ submittedPcmBytes += buffer.remaining();
+ } else {
+ submittedEncodedFrames += framesPerEncodedSample;
+ }
+
+ inputBuffer = buffer;
+ }
+
+ if (processingEnabled) {
+ processBuffers(presentationTimeUs);
+ } else {
+ writeBuffer(inputBuffer, presentationTimeUs);
+ }
+
+ if (!inputBuffer.hasRemaining()) {
+ inputBuffer = null;
+ return true;
+ }
+
+ if (audioTrackPositionTracker.isStalled(getWrittenFrames())) {
+ Log.w(TAG, "Resetting stalled audio track");
+ reset();
+ return true;
+ }
+
+ return false;
+ }
+
+ private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
+ int count = activeAudioProcessors.length;
+ int index = count;
+ while (index >= 0) {
+ ByteBuffer input = index > 0 ? outputBuffers[index - 1]
+ : (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
+ if (index == count) {
+ writeBuffer(input, avSyncPresentationTimeUs);
+ } else {
+ AudioProcessor audioProcessor = activeAudioProcessors[index];
+ audioProcessor.queueInput(input);
+ ByteBuffer output = audioProcessor.getOutput();
+ outputBuffers[index] = output;
+ if (output.hasRemaining()) {
+ // Handle the output as input to the next audio processor or the AudioTrack.
+ index++;
+ continue;
+ }
+ }
+
+ if (input.hasRemaining()) {
+ // The input wasn't consumed and no output was produced, so give up for now.
+ return;
+ }
+
+ // Get more input from upstream.
+ index--;
+ }
+ }
+
+ @SuppressWarnings("ReferenceEquality")
+ private void writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs) throws WriteException {
+ if (!buffer.hasRemaining()) {
+ return;
+ }
+ if (outputBuffer != null) {
+ Assertions.checkArgument(outputBuffer == buffer);
+ } else {
+ outputBuffer = buffer;
+ if (Util.SDK_INT < 21) {
+ int bytesRemaining = buffer.remaining();
+ if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) {
+ preV21OutputBuffer = new byte[bytesRemaining];
+ }
+ int originalPosition = buffer.position();
+ buffer.get(preV21OutputBuffer, 0, bytesRemaining);
+ buffer.position(originalPosition);
+ preV21OutputBufferOffset = 0;
+ }
+ }
+ int bytesRemaining = buffer.remaining();
+ int bytesWritten = 0;
+ if (Util.SDK_INT < 21) { // isInputPcm == true
+ // Work out how many bytes we can write without the risk of blocking.
+ int bytesToWrite = audioTrackPositionTracker.getAvailableBufferSize(writtenPcmBytes);
+ if (bytesToWrite > 0) {
+ bytesToWrite = Math.min(bytesRemaining, bytesToWrite);
+ bytesWritten = audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite);
+ if (bytesWritten > 0) {
+ preV21OutputBufferOffset += bytesWritten;
+ buffer.position(buffer.position() + bytesWritten);
+ }
+ }
+ } else if (tunneling) {
+ Assertions.checkState(avSyncPresentationTimeUs != C.TIME_UNSET);
+ bytesWritten = writeNonBlockingWithAvSyncV21(audioTrack, buffer, bytesRemaining,
+ avSyncPresentationTimeUs);
+ } else {
+ bytesWritten = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
+ }
+
+ lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
+
+ if (bytesWritten < 0) {
+ throw new WriteException(bytesWritten);
+ }
+
+ if (isInputPcm) {
+ writtenPcmBytes += bytesWritten;
+ }
+ if (bytesWritten == bytesRemaining) {
+ if (!isInputPcm) {
+ writtenEncodedFrames += framesPerEncodedSample;
+ }
+ outputBuffer = null;
+ }
+ }
+
+ @Override
+ public void playToEndOfStream() throws WriteException {
+ if (handledEndOfStream || !isInitialized()) {
+ return;
+ }
+
+ if (drainAudioProcessorsToEndOfStream()) {
+ // The audio processors have drained, so drain the underlying audio track.
+ audioTrackPositionTracker.handleEndOfStream(getWrittenFrames());
+ audioTrack.stop();
+ bytesUntilNextAvSync = 0;
+ handledEndOfStream = true;
+ }
+ }
+
+ private boolean drainAudioProcessorsToEndOfStream() throws WriteException {
+ boolean audioProcessorNeedsEndOfStream = false;
+ if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
+ drainingAudioProcessorIndex = processingEnabled ? 0 : activeAudioProcessors.length;
+ audioProcessorNeedsEndOfStream = true;
+ }
+ while (drainingAudioProcessorIndex < activeAudioProcessors.length) {
+ AudioProcessor audioProcessor = activeAudioProcessors[drainingAudioProcessorIndex];
+ if (audioProcessorNeedsEndOfStream) {
+ audioProcessor.queueEndOfStream();
+ }
+ processBuffers(C.TIME_UNSET);
+ if (!audioProcessor.isEnded()) {
+ return false;
+ }
+ audioProcessorNeedsEndOfStream = true;
+ drainingAudioProcessorIndex++;
+ }
+
+ // Finish writing any remaining output to the track.
+ if (outputBuffer != null) {
+ writeBuffer(outputBuffer, C.TIME_UNSET);
+ if (outputBuffer != null) {
+ return false;
+ }
+ }
+ drainingAudioProcessorIndex = C.INDEX_UNSET;
+ return true;
+ }
+
+ @Override
+ public boolean isEnded() {
+ return !isInitialized() || (handledEndOfStream && !hasPendingData());
+ }
+
+ @Override
+ public boolean hasPendingData() {
+ return isInitialized() && audioTrackPositionTracker.hasPendingData(getWrittenFrames());
+ }
+
+ @Override
+ public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
+ if (isInitialized() && !canApplyPlaybackParameters) {
+ this.playbackParameters = PlaybackParameters.DEFAULT;
+ return this.playbackParameters;
+ }
+ PlaybackParameters lastSetPlaybackParameters =
+ afterDrainPlaybackParameters != null
+ ? afterDrainPlaybackParameters
+ : !playbackParametersCheckpoints.isEmpty()
+ ? playbackParametersCheckpoints.getLast().playbackParameters
+ : this.playbackParameters;
+ if (!playbackParameters.equals(lastSetPlaybackParameters)) {
+ if (isInitialized()) {
+ // Drain the audio processors so we can determine the frame position at which the new
+ // parameters apply.
+ afterDrainPlaybackParameters = playbackParameters;
+ } else {
+ // Update the playback parameters now.
+ this.playbackParameters = audioProcessorChain.applyPlaybackParameters(playbackParameters);
+ }
+ }
+ return this.playbackParameters;
+ }
+
+ @Override
+ public PlaybackParameters getPlaybackParameters() {
+ return playbackParameters;
+ }
+
+ @Override
+ public void setAudioAttributes(AudioAttributes audioAttributes) {
+ if (this.audioAttributes.equals(audioAttributes)) {
+ return;
+ }
+ this.audioAttributes = audioAttributes;
+ if (tunneling) {
+ // The audio attributes are ignored in tunneling mode, so no need to reset.
+ return;
+ }
+ reset();
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ }
+
+ @Override
+ public void setAudioSessionId(int audioSessionId) {
+ if (this.audioSessionId != audioSessionId) {
+ this.audioSessionId = audioSessionId;
+ reset();
+ }
+ }
+
+ @Override
+ public void enableTunnelingV21(int tunnelingAudioSessionId) {
+ Assertions.checkState(Util.SDK_INT >= 21);
+ if (!tunneling || audioSessionId != tunnelingAudioSessionId) {
+ tunneling = true;
+ audioSessionId = tunnelingAudioSessionId;
+ reset();
+ }
+ }
+
+ @Override
+ public void disableTunneling() {
+ if (tunneling) {
+ tunneling = false;
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ reset();
+ }
+ }
+
+ @Override
+ public void setVolume(float volume) {
+ if (this.volume != volume) {
+ this.volume = volume;
+ setVolumeInternal();
+ }
+ }
+
+ private void setVolumeInternal() {
+ if (!isInitialized()) {
+ // Do nothing.
+ } else if (Util.SDK_INT >= 21) {
+ setVolumeInternalV21(audioTrack, volume);
+ } else {
+ setVolumeInternalV3(audioTrack, volume);
+ }
+ }
+
+ @Override
+ public void pause() {
+ playing = false;
+ if (isInitialized() && audioTrackPositionTracker.pause()) {
+ audioTrack.pause();
+ }
+ }
+
+ @Override
+ public void reset() {
+ if (isInitialized()) {
+ submittedPcmBytes = 0;
+ submittedEncodedFrames = 0;
+ writtenPcmBytes = 0;
+ writtenEncodedFrames = 0;
+ framesPerEncodedSample = 0;
+ if (afterDrainPlaybackParameters != null) {
+ playbackParameters = afterDrainPlaybackParameters;
+ afterDrainPlaybackParameters = null;
+ } else if (!playbackParametersCheckpoints.isEmpty()) {
+ playbackParameters = playbackParametersCheckpoints.getLast().playbackParameters;
+ }
+ playbackParametersCheckpoints.clear();
+ playbackParametersOffsetUs = 0;
+ playbackParametersPositionUs = 0;
+ inputBuffer = null;
+ outputBuffer = null;
+ flushAudioProcessors();
+ handledEndOfStream = false;
+ drainingAudioProcessorIndex = C.INDEX_UNSET;
+ avSyncHeader = null;
+ bytesUntilNextAvSync = 0;
+ startMediaTimeState = START_NOT_SET;
+ if (audioTrackPositionTracker.isPlaying()) {
+ audioTrack.pause();
+ }
+ // AudioTrack.release can take some time, so we call it on a background thread.
+ final AudioTrack toRelease = audioTrack;
+ audioTrack = null;
+ audioTrackPositionTracker.reset();
+ releasingConditionVariable.close();
+ new Thread() {
+ @Override
+ public void run() {
+ try {
+ toRelease.flush();
+ toRelease.release();
+ } finally {
+ releasingConditionVariable.open();
+ }
+ }
+ }.start();
+ }
+ }
+
+ @Override
+ public void release() {
+ reset();
+ releaseKeepSessionIdAudioTrack();
+ for (AudioProcessor audioProcessor : toIntPcmAvailableAudioProcessors) {
+ audioProcessor.reset();
+ }
+ for (AudioProcessor audioProcessor : toFloatPcmAvailableAudioProcessors) {
+ audioProcessor.reset();
+ }
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ playing = false;
+ }
+
+ /**
+ * Releases {@link #keepSessionIdAudioTrack} asynchronously, if it is non-{@code null}.
+ */
+ private void releaseKeepSessionIdAudioTrack() {
+ if (keepSessionIdAudioTrack == null) {
+ return;
+ }
+
+ // AudioTrack.release can take some time, so we call it on a background thread.
+ final AudioTrack toRelease = keepSessionIdAudioTrack;
+ keepSessionIdAudioTrack = null;
+ new Thread() {
+ @Override
+ public void run() {
+ toRelease.release();
+ }
+ }.start();
+ }
+
+ private long applySpeedup(long positionUs) {
+ @Nullable PlaybackParametersCheckpoint checkpoint = null;
+ while (!playbackParametersCheckpoints.isEmpty()
+ && positionUs >= playbackParametersCheckpoints.getFirst().positionUs) {
+ checkpoint = playbackParametersCheckpoints.remove();
+ }
+ if (checkpoint != null) {
+ // We are playing (or about to play) media with the new playback parameters, so update them.
+ playbackParameters = checkpoint.playbackParameters;
+ playbackParametersPositionUs = checkpoint.positionUs;
+ playbackParametersOffsetUs = checkpoint.mediaTimeUs - startMediaTimeUs;
+ }
+
+ if (playbackParameters.speed == 1f) {
+ return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs;
+ }
+
+ if (playbackParametersCheckpoints.isEmpty()) {
+ return playbackParametersOffsetUs
+ + audioProcessorChain.getMediaDuration(positionUs - playbackParametersPositionUs);
+ }
+
+ // We are playing data at a previous playback speed, so fall back to multiplying by the speed.
+ return playbackParametersOffsetUs
+ + Util.getMediaDurationForPlayoutDuration(
+ positionUs - playbackParametersPositionUs, playbackParameters.speed);
+ }
+
+ private long applySkipping(long positionUs) {
+ return positionUs + framesToDurationUs(audioProcessorChain.getSkippedOutputFrameCount());
+ }
+
+ private boolean isInitialized() {
+ return audioTrack != null;
+ }
+
+ private long inputFramesToDurationUs(long frameCount) {
+ return (frameCount * C.MICROS_PER_SECOND) / inputSampleRate;
+ }
+
+ private long framesToDurationUs(long frameCount) {
+ return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
+ }
+
+ private long durationUsToFrames(long durationUs) {
+ return (durationUs * outputSampleRate) / C.MICROS_PER_SECOND;
+ }
+
+ private long getSubmittedFrames() {
+ return isInputPcm ? (submittedPcmBytes / pcmFrameSize) : submittedEncodedFrames;
+ }
+
+ private long getWrittenFrames() {
+ return isInputPcm ? (writtenPcmBytes / outputPcmFrameSize) : writtenEncodedFrames;
+ }
+
+ private AudioTrack initializeAudioTrack() throws InitializationException {
+ AudioTrack audioTrack;
+ if (Util.SDK_INT >= 21) {
+ audioTrack = createAudioTrackV21();
+ } else {
+ int streamType = Util.getStreamTypeForAudioUsage(audioAttributes.usage);
+ if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
+ audioTrack =
+ new AudioTrack(
+ streamType,
+ outputSampleRate,
+ outputChannelConfig,
+ outputEncoding,
+ bufferSize,
+ MODE_STREAM);
+ } else {
+ // Re-attach to the same audio session.
+ audioTrack =
+ new AudioTrack(
+ streamType,
+ outputSampleRate,
+ outputChannelConfig,
+ outputEncoding,
+ bufferSize,
+ MODE_STREAM,
+ audioSessionId);
+ }
+ }
+
+ int state = audioTrack.getState();
+ if (state != STATE_INITIALIZED) {
+ try {
+ audioTrack.release();
+ } catch (Exception e) {
+ // The track has already failed to initialize, so it wouldn't be that surprising if release
+ // were to fail too. Swallow the exception.
+ }
+ throw new InitializationException(state, outputSampleRate, outputChannelConfig, bufferSize);
+ }
+ return audioTrack;
+ }
+
+ @TargetApi(21)
+ private AudioTrack createAudioTrackV21() {
+ android.media.AudioAttributes attributes;
+ if (tunneling) {
+ attributes = new android.media.AudioAttributes.Builder()
+ .setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE)
+ .setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC)
+ .setUsage(android.media.AudioAttributes.USAGE_MEDIA)
+ .build();
+ } else {
+ attributes = audioAttributes.getAudioAttributesV21();
+ }
+ AudioFormat format =
+ new AudioFormat.Builder()
+ .setChannelMask(outputChannelConfig)
+ .setEncoding(outputEncoding)
+ .setSampleRate(outputSampleRate)
+ .build();
+ int audioSessionId = this.audioSessionId != C.AUDIO_SESSION_ID_UNSET ? this.audioSessionId
+ : AudioManager.AUDIO_SESSION_ID_GENERATE;
+ return new AudioTrack(attributes, format, bufferSize, MODE_STREAM, audioSessionId);
+ }
+
+ private AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) {
+ int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE.
+ int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
+ @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
+ int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
+ return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize,
+ MODE_STATIC, audioSessionId);
+ }
+
+ private AudioProcessor[] getAvailableAudioProcessors() {
+ return shouldConvertHighResIntPcmToFloat
+ ? toFloatPcmAvailableAudioProcessors
+ : toIntPcmAvailableAudioProcessors;
+ }
+
+ private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) {
+ if (encoding == C.ENCODING_DTS || encoding == C.ENCODING_DTS_HD) {
+ return DtsUtil.parseDtsAudioSampleCount(buffer);
+ } else if (encoding == C.ENCODING_AC3) {
+ return Ac3Util.getAc3SyncframeAudioSampleCount();
+ } else if (encoding == C.ENCODING_E_AC3) {
+ return Ac3Util.parseEAc3SyncframeAudioSampleCount(buffer);
+ } else if (encoding == C.ENCODING_DOLBY_TRUEHD) {
+ int syncframeOffset = Ac3Util.findTrueHdSyncframeOffset(buffer);
+ return syncframeOffset == C.INDEX_UNSET
+ ? 0
+ : (Ac3Util.parseTrueHdSyncframeAudioSampleCount(buffer, syncframeOffset)
+ * Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT);
+ } else {
+ throw new IllegalStateException("Unexpected audio encoding: " + encoding);
+ }
+ }
+
+ @TargetApi(21)
+ private static int writeNonBlockingV21(AudioTrack audioTrack, ByteBuffer buffer, int size) {
+ return audioTrack.write(buffer, size, WRITE_NON_BLOCKING);
+ }
+
+ @TargetApi(21)
+ private int writeNonBlockingWithAvSyncV21(AudioTrack audioTrack, ByteBuffer buffer, int size,
+ long presentationTimeUs) {
+ // TODO: Uncomment this when [Internal ref: b/33627517] is clarified or fixed.
+ // if (Util.SDK_INT >= 23) {
+ // // The underlying platform AudioTrack writes AV sync headers directly.
+ // return audioTrack.write(buffer, size, WRITE_NON_BLOCKING, presentationTimeUs * 1000);
+ // }
+ if (avSyncHeader == null) {
+ avSyncHeader = ByteBuffer.allocate(16);
+ avSyncHeader.order(ByteOrder.BIG_ENDIAN);
+ avSyncHeader.putInt(0x55550001);
+ }
+ if (bytesUntilNextAvSync == 0) {
+ avSyncHeader.putInt(4, size);
+ avSyncHeader.putLong(8, presentationTimeUs * 1000);
+ avSyncHeader.position(0);
+ bytesUntilNextAvSync = size;
+ }
+ int avSyncHeaderBytesRemaining = avSyncHeader.remaining();
+ if (avSyncHeaderBytesRemaining > 0) {
+ int result = audioTrack.write(avSyncHeader, avSyncHeaderBytesRemaining, WRITE_NON_BLOCKING);
+ if (result < 0) {
+ bytesUntilNextAvSync = 0;
+ return result;
+ }
+ if (result < avSyncHeaderBytesRemaining) {
+ return 0;
+ }
+ }
+ int result = writeNonBlockingV21(audioTrack, buffer, size);
+ if (result < 0) {
+ bytesUntilNextAvSync = 0;
+ return result;
+ }
+ bytesUntilNextAvSync -= result;
+ return result;
+ }
+
+ @TargetApi(21)
+ private static void setVolumeInternalV21(AudioTrack audioTrack, float volume) {
+ audioTrack.setVolume(volume);
+ }
+
+ @SuppressWarnings("deprecation")
+ private static void setVolumeInternalV3(AudioTrack audioTrack, float volume) {
+ audioTrack.setStereoVolume(volume, volume);
+ }
+
+ /**
+ * Stores playback parameters with the position and media time at which they apply.
+ */
+ private static final class PlaybackParametersCheckpoint {
+
+ private final PlaybackParameters playbackParameters;
+ private final long mediaTimeUs;
+ private final long positionUs;
+
+ private PlaybackParametersCheckpoint(PlaybackParameters playbackParameters, long mediaTimeUs,
+ long positionUs) {
+ this.playbackParameters = playbackParameters;
+ this.mediaTimeUs = mediaTimeUs;
+ this.positionUs = positionUs;
+ }
+
+ }
+
+ private final class PositionTrackerListener implements AudioTrackPositionTracker.Listener {
+
+ @Override
+ public void onPositionFramesMismatch(
+ long audioTimestampPositionFrames,
+ long audioTimestampSystemTimeUs,
+ long systemTimeUs,
+ long playbackPositionUs) {
+ String message =
+ "Spurious audio timestamp (frame position mismatch): "
+ + audioTimestampPositionFrames
+ + ", "
+ + audioTimestampSystemTimeUs
+ + ", "
+ + systemTimeUs
+ + ", "
+ + playbackPositionUs
+ + ", "
+ + getSubmittedFrames()
+ + ", "
+ + getWrittenFrames();
+ if (failOnSpuriousAudioTimestamp) {
+ throw new InvalidAudioTrackTimestampException(message);
+ }
+ Log.w(TAG, message);
+ }
+
+ @Override
+ public void onSystemTimeUsMismatch(
+ long audioTimestampPositionFrames,
+ long audioTimestampSystemTimeUs,
+ long systemTimeUs,
+ long playbackPositionUs) {
+ String message =
+ "Spurious audio timestamp (system clock mismatch): "
+ + audioTimestampPositionFrames
+ + ", "
+ + audioTimestampSystemTimeUs
+ + ", "
+ + systemTimeUs
+ + ", "
+ + playbackPositionUs
+ + ", "
+ + getSubmittedFrames()
+ + ", "
+ + getWrittenFrames();
+ if (failOnSpuriousAudioTimestamp) {
+ throw new InvalidAudioTrackTimestampException(message);
+ }
+ Log.w(TAG, message);
+ }
+
+ @Override
+ public void onInvalidLatency(long latencyUs) {
+ Log.w(TAG, "Ignoring impossibly large audio latency: " + latencyUs);
+ }
+
+ @Override
+ public void onUnderrun(int bufferSize, long bufferSizeMs) {
+ if (listener != null) {
+ long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
+ listener.onUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ }
+ }
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java
index 9e9b927fab..dc07b1a646 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/DtsUtil.java
@@ -20,12 +20,22 @@ import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.ParsableBitArray;
import java.nio.ByteBuffer;
+import java.util.Arrays;
/**
* Utility methods for parsing DTS frames.
*/
public final class DtsUtil {
+ private static final int SYNC_VALUE_BE = 0x7FFE8001;
+ private static final int SYNC_VALUE_14B_BE = 0x1FFFE800;
+ private static final int SYNC_VALUE_LE = 0xFE7F0180;
+ private static final int SYNC_VALUE_14B_LE = 0xFF1F00E8;
+ private static final byte FIRST_BYTE_BE = (byte) (SYNC_VALUE_BE >>> 24);
+ private static final byte FIRST_BYTE_14B_BE = (byte) (SYNC_VALUE_14B_BE >>> 24);
+ private static final byte FIRST_BYTE_LE = (byte) (SYNC_VALUE_LE >>> 24);
+ private static final byte FIRST_BYTE_14B_LE = (byte) (SYNC_VALUE_14B_LE >>> 24);
+
/**
* Maps AMODE to the number of channels. See ETSI TS 102 114 table 5.4.
*/
@@ -45,6 +55,20 @@ public final class DtsUtil {
384, 448, 512, 640, 768, 896, 1024, 1152, 1280, 1536, 1920, 2048, 2304, 2560, 2688, 2816,
2823, 2944, 3072, 3840, 4096, 6144, 7680};
+ /**
+ * Returns whether a given integer matches a DTS sync word. Synchronization and storage modes are
+ * defined in ETSI TS 102 114 V1.1.1 (2002-08), Section 5.3.
+ *
+ * @param word An integer.
+ * @return Whether a given integer matches a DTS sync word.
+ */
+ public static boolean isSyncWord(int word) {
+ return word == SYNC_VALUE_BE
+ || word == SYNC_VALUE_LE
+ || word == SYNC_VALUE_14B_BE
+ || word == SYNC_VALUE_14B_LE;
+ }
+
/**
* Returns the DTS format given {@code data} containing the DTS frame according to ETSI TS 102 114
* subsections 5.3/5.4.
@@ -57,8 +81,8 @@ public final class DtsUtil {
*/
public static Format parseDtsFormat(byte[] frame, String trackId, String language,
DrmInitData drmInitData) {
- ParsableBitArray frameBits = new ParsableBitArray(frame);
- frameBits.skipBits(4 * 8 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
+ ParsableBitArray frameBits = getNormalizedFrameHeader(frame);
+ frameBits.skipBits(32 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
int amode = frameBits.readBits(6);
int channelCount = CHANNELS_BY_AMODE[amode];
int sfreq = frameBits.readBits(4);
@@ -79,8 +103,21 @@ public final class DtsUtil {
* @return The number of audio samples represented by the frame.
*/
public static int parseDtsAudioSampleCount(byte[] data) {
- // See ETSI TS 102 114 subsection 5.4.1.
- int nblks = ((data[4] & 0x01) << 6) | ((data[5] & 0xFC) >> 2);
+ int nblks;
+ switch (data[0]) {
+ case FIRST_BYTE_LE:
+ nblks = ((data[5] & 0x01) << 6) | ((data[4] & 0xFC) >> 2);
+ break;
+ case FIRST_BYTE_14B_LE:
+ nblks = ((data[4] & 0x07) << 4) | ((data[7] & 0x3C) >> 2);
+ break;
+ case FIRST_BYTE_14B_BE:
+ nblks = ((data[5] & 0x07) << 4) | ((data[6] & 0x3C) >> 2);
+ break;
+ default:
+ // We blindly assume FIRST_BYTE_BE if none of the others match.
+ nblks = ((data[4] & 0x01) << 6) | ((data[5] & 0xFC) >> 2);
+ }
return (nblks + 1) * 32;
}
@@ -94,8 +131,21 @@ public final class DtsUtil {
public static int parseDtsAudioSampleCount(ByteBuffer buffer) {
// See ETSI TS 102 114 subsection 5.4.1.
int position = buffer.position();
- int nblks = ((buffer.get(position + 4) & 0x01) << 6)
- | ((buffer.get(position + 5) & 0xFC) >> 2);
+ int nblks;
+ switch (buffer.get(position)) {
+ case FIRST_BYTE_LE:
+ nblks = ((buffer.get(position + 5) & 0x01) << 6) | ((buffer.get(position + 4) & 0xFC) >> 2);
+ break;
+ case FIRST_BYTE_14B_LE:
+ nblks = ((buffer.get(position + 4) & 0x07) << 4) | ((buffer.get(position + 7) & 0x3C) >> 2);
+ break;
+ case FIRST_BYTE_14B_BE:
+ nblks = ((buffer.get(position + 5) & 0x07) << 4) | ((buffer.get(position + 6) & 0x3C) >> 2);
+ break;
+ default:
+ // We blindly assume FIRST_BYTE_BE if none of the others match.
+ nblks = ((buffer.get(position + 4) & 0x01) << 6) | ((buffer.get(position + 5) & 0xFC) >> 2);
+ }
return (nblks + 1) * 32;
}
@@ -106,9 +156,59 @@ public final class DtsUtil {
* @return The frame's size in bytes.
*/
public static int getDtsFrameSize(byte[] data) {
- return (((data[5] & 0x02) << 12)
- | ((data[6] & 0xFF) << 4)
- | ((data[7] & 0xF0) >> 4)) + 1;
+ int fsize;
+ boolean uses14BitPerWord = false;
+ switch (data[0]) {
+ case FIRST_BYTE_14B_BE:
+ fsize = (((data[6] & 0x03) << 12) | ((data[7] & 0xFF) << 4) | ((data[8] & 0x3C) >> 2)) + 1;
+ uses14BitPerWord = true;
+ break;
+ case FIRST_BYTE_LE:
+ fsize = (((data[4] & 0x03) << 12) | ((data[7] & 0xFF) << 4) | ((data[6] & 0xF0) >> 4)) + 1;
+ break;
+ case FIRST_BYTE_14B_LE:
+ fsize = (((data[7] & 0x03) << 12) | ((data[6] & 0xFF) << 4) | ((data[9] & 0x3C) >> 2)) + 1;
+ uses14BitPerWord = true;
+ break;
+ default:
+ // We blindly assume FIRST_BYTE_BE if none of the others match.
+ fsize = (((data[5] & 0x03) << 12) | ((data[6] & 0xFF) << 4) | ((data[7] & 0xF0) >> 4)) + 1;
+ }
+
+ // If the frame is stored in 14-bit mode, adjust the frame size to reflect the actual byte size.
+ return uses14BitPerWord ? fsize * 16 / 14 : fsize;
+ }
+
+ private static ParsableBitArray getNormalizedFrameHeader(byte[] frameHeader) {
+ if (frameHeader[0] == FIRST_BYTE_BE) {
+ // The frame is already 16-bit mode, big endian.
+ return new ParsableBitArray(frameHeader);
+ }
+ // Data is not normalized, but we don't want to modify frameHeader.
+ frameHeader = Arrays.copyOf(frameHeader, frameHeader.length);
+ if (isLittleEndianFrameHeader(frameHeader)) {
+ // Change endianness.
+ for (int i = 0; i < frameHeader.length - 1; i += 2) {
+ byte temp = frameHeader[i];
+ frameHeader[i] = frameHeader[i + 1];
+ frameHeader[i + 1] = temp;
+ }
+ }
+ ParsableBitArray frameBits = new ParsableBitArray(frameHeader);
+ if (frameHeader[0] == (byte) (SYNC_VALUE_14B_BE >> 24)) {
+ // Discard the 2 most significant bits of each 16 bit word.
+ ParsableBitArray scratchBits = new ParsableBitArray(frameHeader);
+ while (scratchBits.bitsLeft() >= 16) {
+ scratchBits.skipBits(2);
+ frameBits.putInt(scratchBits.readBits(14), 14);
+ }
+ }
+ frameBits.reset(frameHeader);
+ return frameBits;
+ }
+
+ private static boolean isLittleEndianFrameHeader(byte[] frameHeader) {
+ return frameHeader[0] == FIRST_BYTE_LE || frameHeader[0] == FIRST_BYTE_14B_LE;
}
private DtsUtil() {}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/FloatResamplingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/FloatResamplingAudioProcessor.java
new file mode 100644
index 0000000000..e3c91cd344
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/FloatResamplingAudioProcessor.java
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.audio;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.util.Util;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * An {@link AudioProcessor} that converts 24-bit and 32-bit integer PCM audio to 32-bit float PCM
+ * audio.
+ */
+/* package */ final class FloatResamplingAudioProcessor implements AudioProcessor {
+
+ private static final int FLOAT_NAN_AS_INT = Float.floatToIntBits(Float.NaN);
+ private static final double PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR = 1.0 / 0x7FFFFFFF;
+
+ private int sampleRateHz;
+ private int channelCount;
+ private @C.PcmEncoding int sourceEncoding;
+ private ByteBuffer buffer;
+ private ByteBuffer outputBuffer;
+ private boolean inputEnded;
+
+ /** Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_FLOAT}. */
+ public FloatResamplingAudioProcessor() {
+ sampleRateHz = Format.NO_VALUE;
+ channelCount = Format.NO_VALUE;
+ sourceEncoding = C.ENCODING_INVALID;
+ buffer = EMPTY_BUFFER;
+ outputBuffer = EMPTY_BUFFER;
+ }
+
+ @Override
+ public boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
+ throws UnhandledFormatException {
+ if (!Util.isEncodingHighResolutionIntegerPcm(encoding)) {
+ throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
+ }
+ if (this.sampleRateHz == sampleRateHz
+ && this.channelCount == channelCount
+ && sourceEncoding == encoding) {
+ return false;
+ }
+ this.sampleRateHz = sampleRateHz;
+ this.channelCount = channelCount;
+ sourceEncoding = encoding;
+ return true;
+ }
+
+ @Override
+ public boolean isActive() {
+ return Util.isEncodingHighResolutionIntegerPcm(sourceEncoding);
+ }
+
+ @Override
+ public int getOutputChannelCount() {
+ return channelCount;
+ }
+
+ @Override
+ public int getOutputEncoding() {
+ return C.ENCODING_PCM_FLOAT;
+ }
+
+ @Override
+ public int getOutputSampleRateHz() {
+ return sampleRateHz;
+ }
+
+ @Override
+ public void queueInput(ByteBuffer inputBuffer) {
+ boolean isInput32Bit = sourceEncoding == C.ENCODING_PCM_32BIT;
+ int position = inputBuffer.position();
+ int limit = inputBuffer.limit();
+ int size = limit - position;
+
+ int resampledSize = isInput32Bit ? size : (size / 3) * 4;
+ if (buffer.capacity() < resampledSize) {
+ buffer = ByteBuffer.allocateDirect(resampledSize).order(ByteOrder.nativeOrder());
+ } else {
+ buffer.clear();
+ }
+ if (isInput32Bit) {
+ for (int i = position; i < limit; i += 4) {
+ int pcm32BitInteger =
+ (inputBuffer.get(i) & 0xFF)
+ | ((inputBuffer.get(i + 1) & 0xFF) << 8)
+ | ((inputBuffer.get(i + 2) & 0xFF) << 16)
+ | ((inputBuffer.get(i + 3) & 0xFF) << 24);
+ writePcm32BitFloat(pcm32BitInteger, buffer);
+ }
+ } else {
+ for (int i = position; i < limit; i += 3) {
+ int pcm32BitInteger =
+ ((inputBuffer.get(i) & 0xFF) << 8)
+ | ((inputBuffer.get(i + 1) & 0xFF) << 16)
+ | ((inputBuffer.get(i + 2) & 0xFF) << 24);
+ writePcm32BitFloat(pcm32BitInteger, buffer);
+ }
+ }
+
+ inputBuffer.position(inputBuffer.limit());
+ buffer.flip();
+ outputBuffer = buffer;
+ }
+
+ @Override
+ public void queueEndOfStream() {
+ inputEnded = true;
+ }
+
+ @Override
+ public ByteBuffer getOutput() {
+ ByteBuffer outputBuffer = this.outputBuffer;
+ this.outputBuffer = EMPTY_BUFFER;
+ return outputBuffer;
+ }
+
+ @SuppressWarnings("ReferenceEquality")
+ @Override
+ public boolean isEnded() {
+ return inputEnded && outputBuffer == EMPTY_BUFFER;
+ }
+
+ @Override
+ public void flush() {
+ outputBuffer = EMPTY_BUFFER;
+ inputEnded = false;
+ }
+
+ @Override
+ public void reset() {
+ flush();
+ sampleRateHz = Format.NO_VALUE;
+ channelCount = Format.NO_VALUE;
+ sourceEncoding = C.ENCODING_INVALID;
+ buffer = EMPTY_BUFFER;
+ }
+
+ /**
+ * Converts the provided 32-bit integer to a 32-bit float value and writes it to {@code buffer}.
+ *
+ * @param pcm32BitInt The 32-bit integer value to convert to 32-bit float in [-1.0, 1.0].
+ * @param buffer The output buffer.
+ */
+ private static void writePcm32BitFloat(int pcm32BitInt, ByteBuffer buffer) {
+ float pcm32BitFloat = (float) (PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR * pcm32BitInt);
+ int floatBits = Float.floatToIntBits(pcm32BitFloat);
+ if (floatBits == FLOAT_NAN_AS_INT) {
+ floatBits = Float.floatToIntBits((float) 0.0);
+ }
+ buffer.putInt(floatBits);
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java
index e146238dcc..9ab066ee7d 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/MediaCodecAudioRenderer.java
@@ -15,7 +15,10 @@
*/
package com.google.android.exoplayer2.audio;
+import android.annotation.SuppressLint;
import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
import android.media.MediaCodec;
import android.media.MediaCrypto;
import android.media.MediaFormat;
@@ -24,45 +27,73 @@ import android.os.Handler;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters;
+import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
+import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer;
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException;
+import com.google.android.exoplayer2.mediacodec.MediaFormatUtil;
import com.google.android.exoplayer2.util.MediaClock;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
/**
- * Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
+ * Decodes and renders audio using {@link MediaCodec} and an {@link AudioSink}.
+ *
+ * This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)}
+ * on the playback thread:
+ *
+ *
+ * - Message with type {@link C#MSG_SET_VOLUME} to set the volume. The message payload should be
+ * a {@link Float} with 0 being silence and 1 being unity gain.
+ *
- Message with type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The
+ * message payload should be an {@link com.google.android.exoplayer2.audio.AudioAttributes}
+ * instance that will configure the underlying audio track.
+ *
*/
@TargetApi(16)
public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock {
+ private final Context context;
private final EventDispatcher eventDispatcher;
- private final AudioTrack audioTrack;
+ private final AudioSink audioSink;
+ private int codecMaxInputSize;
private boolean passthroughEnabled;
private boolean codecNeedsDiscardChannelsWorkaround;
private android.media.MediaFormat passthroughMediaFormat;
+ @C.Encoding
private int pcmEncoding;
private int channelCount;
+ private int encoderDelay;
+ private int encoderPadding;
private long currentPositionUs;
+ private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity;
/**
+ * @param context A context.
* @param mediaCodecSelector A decoder selector.
*/
- public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector) {
- this(mediaCodecSelector, null, true);
+ public MediaCodecAudioRenderer(Context context, MediaCodecSelector mediaCodecSelector) {
+ this(
+ context,
+ mediaCodecSelector,
+ /* drmSessionManager= */ null,
+ /* playClearSamplesWithoutKeys= */ false);
}
/**
+ * @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
@@ -72,24 +103,43 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
*/
- public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
+ public MediaCodecAudioRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys) {
- this(mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, null, null);
+ this(
+ context,
+ mediaCodecSelector,
+ drmSessionManager,
+ playClearSamplesWithoutKeys,
+ /* eventHandler= */ null,
+ /* eventListener= */ null);
}
/**
+ * @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
- public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
- @Nullable Handler eventHandler, @Nullable AudioRendererEventListener eventListener) {
- this(mediaCodecSelector, null, true, eventHandler, eventListener);
+ public MediaCodecAudioRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
+ @Nullable Handler eventHandler,
+ @Nullable AudioRendererEventListener eventListener) {
+ this(
+ context,
+ mediaCodecSelector,
+ /* drmSessionManager= */ null,
+ /* playClearSamplesWithoutKeys= */ false,
+ eventHandler,
+ eventListener);
}
/**
+ * @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
@@ -102,15 +152,25 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
- public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
+ public MediaCodecAudioRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager drmSessionManager,
- boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler,
+ boolean playClearSamplesWithoutKeys,
+ @Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener) {
- this(mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, eventHandler,
- eventListener, null);
+ this(
+ context,
+ mediaCodecSelector,
+ drmSessionManager,
+ playClearSamplesWithoutKeys,
+ eventHandler,
+ eventListener,
+ (AudioCapabilities) null);
}
/**
+ * @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
@@ -127,31 +187,90 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before
* output.
*/
- public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
+ public MediaCodecAudioRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager drmSessionManager,
- boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler,
+ boolean playClearSamplesWithoutKeys,
+ @Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
- @Nullable AudioCapabilities audioCapabilities, AudioProcessor... audioProcessors) {
+ @Nullable AudioCapabilities audioCapabilities,
+ AudioProcessor... audioProcessors) {
+ this(
+ context,
+ mediaCodecSelector,
+ drmSessionManager,
+ playClearSamplesWithoutKeys,
+ eventHandler,
+ eventListener,
+ new DefaultAudioSink(audioCapabilities, audioProcessors));
+ }
+
+ /**
+ * @param context A context.
+ * @param mediaCodecSelector A decoder selector.
+ * @param drmSessionManager For use with encrypted content. May be null if support for encrypted
+ * content is not required.
+ * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
+ * For example a media file may start with a short clear region so as to allow playback to
+ * begin in parallel with key acquisition. This parameter specifies whether the renderer is
+ * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
+ * has obtained the keys necessary to decrypt encrypted regions of the media.
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param audioSink The sink to which audio will be output.
+ */
+ public MediaCodecAudioRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
+ @Nullable DrmSessionManager drmSessionManager,
+ boolean playClearSamplesWithoutKeys,
+ @Nullable Handler eventHandler,
+ @Nullable AudioRendererEventListener eventListener,
+ AudioSink audioSink) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
- audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
+ this.context = context.getApplicationContext();
+ this.audioSink = audioSink;
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
+ audioSink.setListener(new AudioSinkListener());
}
@Override
- protected int supportsFormat(MediaCodecSelector mediaCodecSelector, Format format)
+ protected int supportsFormat(MediaCodecSelector mediaCodecSelector,
+ DrmSessionManager drmSessionManager, Format format)
throws DecoderQueryException {
String mimeType = format.sampleMimeType;
if (!MimeTypes.isAudio(mimeType)) {
return FORMAT_UNSUPPORTED_TYPE;
}
int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
- if (allowPassthrough(mimeType) && mediaCodecSelector.getPassthroughDecoderInfo() != null) {
+ boolean supportsFormatDrm = supportsFormatDrm(drmSessionManager, format.drmInitData);
+ if (supportsFormatDrm && allowPassthrough(mimeType)
+ && mediaCodecSelector.getPassthroughDecoderInfo() != null) {
return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | FORMAT_HANDLED;
}
- MediaCodecInfo decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
- if (decoderInfo == null) {
+ if ((MimeTypes.AUDIO_RAW.equals(mimeType) && !audioSink.isEncodingSupported(format.pcmEncoding))
+ || !audioSink.isEncodingSupported(C.ENCODING_PCM_16BIT)) {
+ // Assume the decoder outputs 16-bit PCM, unless the input is raw.
return FORMAT_UNSUPPORTED_SUBTYPE;
}
+ boolean requiresSecureDecryption = false;
+ DrmInitData drmInitData = format.drmInitData;
+ if (drmInitData != null) {
+ for (int i = 0; i < drmInitData.schemeDataCount; i++) {
+ requiresSecureDecryption |= drmInitData.get(i).requiresSecureDecryption;
+ }
+ }
+ MediaCodecInfo decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType,
+ requiresSecureDecryption);
+ if (decoderInfo == null) {
+ return requiresSecureDecryption && mediaCodecSelector.getDecoderInfo(mimeType, false) != null
+ ? FORMAT_UNSUPPORTED_DRM : FORMAT_UNSUPPORTED_SUBTYPE;
+ }
+ if (!supportsFormatDrm) {
+ return FORMAT_UNSUPPORTED_DRM;
+ }
// Note: We assume support for unknown sampleRate and channelCount.
boolean decoderCapable = Util.SDK_INT < 21
|| ((format.sampleRate == Format.NO_VALUE
@@ -168,42 +287,55 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (allowPassthrough(format.sampleMimeType)) {
MediaCodecInfo passthroughDecoderInfo = mediaCodecSelector.getPassthroughDecoderInfo();
if (passthroughDecoderInfo != null) {
- passthroughEnabled = true;
return passthroughDecoderInfo;
}
}
- passthroughEnabled = false;
return super.getDecoderInfo(mediaCodecSelector, format, requiresSecureDecoder);
}
/**
* Returns whether encoded audio passthrough should be used for playing back the input format.
- * This implementation returns true if the {@link AudioTrack}'s audio capabilities indicate that
- * passthrough is supported.
+ * This implementation returns true if the {@link AudioSink} indicates that encoded audio output
+ * is supported.
*
* @param mimeType The type of input media.
- * @return Whether passthrough playback should be used.
+ * @return Whether passthrough playback is supported.
*/
protected boolean allowPassthrough(String mimeType) {
- return audioTrack.isPassthroughSupported(mimeType);
+ @C.Encoding int encoding = MimeTypes.getEncoding(mimeType);
+ return encoding != C.ENCODING_INVALID && audioSink.isEncodingSupported(encoding);
}
@Override
protected void configureCodec(MediaCodecInfo codecInfo, MediaCodec codec, Format format,
MediaCrypto crypto) {
+ codecMaxInputSize = getCodecMaxInputSize(codecInfo, format, getStreamFormats());
codecNeedsDiscardChannelsWorkaround = codecNeedsDiscardChannelsWorkaround(codecInfo.name);
+ passthroughEnabled = codecInfo.passthrough;
+ String codecMimeType = codecInfo.mimeType == null ? MimeTypes.AUDIO_RAW : codecInfo.mimeType;
+ MediaFormat mediaFormat = getMediaFormat(format, codecMimeType, codecMaxInputSize);
+ codec.configure(mediaFormat, /* surface= */ null, crypto, /* flags= */ 0);
if (passthroughEnabled) {
- // Override the MIME type used to configure the codec if we are using a passthrough decoder.
- passthroughMediaFormat = format.getFrameworkMediaFormatV16();
- passthroughMediaFormat.setString(MediaFormat.KEY_MIME, MimeTypes.AUDIO_RAW);
- codec.configure(passthroughMediaFormat, null, crypto, 0);
+ // Store the input MIME type if we're using the passthrough codec.
+ passthroughMediaFormat = mediaFormat;
passthroughMediaFormat.setString(MediaFormat.KEY_MIME, format.sampleMimeType);
} else {
- codec.configure(format.getFrameworkMediaFormatV16(), null, crypto, 0);
passthroughMediaFormat = null;
}
}
+ @Override
+ protected @KeepCodecResult int canKeepCodec(
+ MediaCodec codec, MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) {
+ return KEEP_CODEC_RESULT_NO;
+ // TODO: Determine when codecs can be safely kept. When doing so, also uncomment the commented
+ // out code in getCodecMaxInputSize.
+ // return getCodecMaxInputSize(codecInfo, newFormat) <= codecMaxInputSize
+ // && areAdaptationCompatible(oldFormat, newFormat)
+ // ? KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION
+ // : KEEP_CODEC_RESULT_NO;
+ }
+
@Override
public MediaClock getMediaClock() {
return this;
@@ -224,15 +356,22 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
pcmEncoding = MimeTypes.AUDIO_RAW.equals(newFormat.sampleMimeType) ? newFormat.pcmEncoding
: C.ENCODING_PCM_16BIT;
channelCount = newFormat.channelCount;
+ encoderDelay = newFormat.encoderDelay;
+ encoderPadding = newFormat.encoderPadding;
}
@Override
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat)
throws ExoPlaybackException {
- boolean passthrough = passthroughMediaFormat != null;
- String mimeType = passthrough ? passthroughMediaFormat.getString(MediaFormat.KEY_MIME)
- : MimeTypes.AUDIO_RAW;
- MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat;
+ @C.Encoding int encoding;
+ MediaFormat format;
+ if (passthroughMediaFormat != null) {
+ encoding = MimeTypes.getEncoding(passthroughMediaFormat.getString(MediaFormat.KEY_MIME));
+ format = passthroughMediaFormat;
+ } else {
+ encoding = pcmEncoding;
+ format = outputFormat;
+ }
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int[] channelMap;
@@ -246,8 +385,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
try {
- audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0, channelMap);
- } catch (AudioTrack.ConfigurationException e) {
+ audioSink.configure(encoding, channelCount, sampleRate, 0, channelMap, encoderDelay,
+ encoderPadding);
+ } catch (AudioSink.ConfigurationException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
}
@@ -258,21 +398,21 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances
* should be released in {@link #onDisabled()} (if not before).
*
- * @see AudioTrack.Listener#onAudioSessionId(int)
+ * @see AudioSink.Listener#onAudioSessionId(int)
*/
protected void onAudioSessionId(int audioSessionId) {
// Do nothing.
}
/**
- * @see AudioTrack.Listener#onPositionDiscontinuity()
+ * @see AudioSink.Listener#onPositionDiscontinuity()
*/
protected void onAudioTrackPositionDiscontinuity() {
// Do nothing.
}
/**
- * @see AudioTrack.Listener#onUnderrun(int, long, long)
+ * @see AudioSink.Listener#onUnderrun(int, long, long)
*/
protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
long elapsedSinceLastFeedMs) {
@@ -285,36 +425,38 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
eventDispatcher.enabled(decoderCounters);
int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
- audioTrack.enableTunnelingV21(tunnelingAudioSessionId);
+ audioSink.enableTunnelingV21(tunnelingAudioSessionId);
} else {
- audioTrack.disableTunneling();
+ audioSink.disableTunneling();
}
}
@Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
super.onPositionReset(positionUs, joining);
- audioTrack.reset();
+ audioSink.reset();
currentPositionUs = positionUs;
+ allowFirstBufferPositionDiscontinuity = true;
allowPositionDiscontinuity = true;
}
@Override
protected void onStarted() {
super.onStarted();
- audioTrack.play();
+ audioSink.play();
}
@Override
protected void onStopped() {
- audioTrack.pause();
+ updateCurrentPosition();
+ audioSink.pause();
super.onStopped();
}
@Override
protected void onDisabled() {
try {
- audioTrack.release();
+ audioSink.release();
} finally {
try {
super.onDisabled();
@@ -327,33 +469,43 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override
public boolean isEnded() {
- return super.isEnded() && audioTrack.isEnded();
+ return super.isEnded() && audioSink.isEnded();
}
@Override
public boolean isReady() {
- return audioTrack.hasPendingData() || super.isReady();
+ return audioSink.hasPendingData() || super.isReady();
}
@Override
public long getPositionUs() {
- long newCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
- if (newCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) {
- currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs
- : Math.max(currentPositionUs, newCurrentPositionUs);
- allowPositionDiscontinuity = false;
+ if (getState() == STATE_STARTED) {
+ updateCurrentPosition();
}
return currentPositionUs;
}
@Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
- return audioTrack.setPlaybackParameters(playbackParameters);
+ return audioSink.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
- return audioTrack.getPlaybackParameters();
+ return audioSink.getPlaybackParameters();
+ }
+
+ @Override
+ protected void onQueueInputBuffer(DecoderInputBuffer buffer) {
+ if (allowFirstBufferPositionDiscontinuity && !buffer.isDecodeOnly()) {
+ // TODO: Remove this hack once we have a proper fix for [Internal: b/71876314].
+ // Allow the position to jump if the first presentable input buffer has a timestamp that
+ // differs significantly from what was expected.
+ if (Math.abs(buffer.timeUs - currentPositionUs) > 500000) {
+ currentPositionUs = buffer.timeUs;
+ }
+ allowFirstBufferPositionDiscontinuity = false;
+ }
}
@Override
@@ -369,17 +521,17 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (shouldSkip) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.skippedOutputBufferCount++;
- audioTrack.handleDiscontinuity();
+ audioSink.handleDiscontinuity();
return true;
}
try {
- if (audioTrack.handleBuffer(buffer, bufferPresentationTimeUs)) {
+ if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.renderedOutputBufferCount++;
return true;
}
- } catch (AudioTrack.InitializationException | AudioTrack.WriteException e) {
+ } catch (AudioSink.InitializationException | AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
return false;
@@ -388,8 +540,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override
protected void renderToEndOfStream() throws ExoPlaybackException {
try {
- audioTrack.playToEndOfStream();
- } catch (AudioTrack.WriteException e) {
+ audioSink.playToEndOfStream();
+ } catch (AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
}
@@ -398,11 +550,11 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
switch (messageType) {
case C.MSG_SET_VOLUME:
- audioTrack.setVolume((Float) message);
+ audioSink.setVolume((Float) message);
break;
case C.MSG_SET_AUDIO_ATTRIBUTES:
AudioAttributes audioAttributes = (AudioAttributes) message;
- audioTrack.setAudioAttributes(audioAttributes);
+ audioSink.setAudioAttributes(audioAttributes);
break;
default:
super.handleMessage(messageType, message);
@@ -410,6 +562,117 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
}
+ /**
+ * Returns a maximum input size suitable for configuring a codec for {@code format} in a way that
+ * will allow possible adaptation to other compatible formats in {@code streamFormats}.
+ *
+ * @param codecInfo A {@link MediaCodecInfo} describing the decoder.
+ * @param format The format for which the codec is being configured.
+ * @param streamFormats The possible stream formats.
+ * @return A suitable maximum input size.
+ */
+ protected int getCodecMaxInputSize(
+ MediaCodecInfo codecInfo, Format format, Format[] streamFormats) {
+ int maxInputSize = getCodecMaxInputSize(codecInfo, format);
+ // if (streamFormats.length == 1) {
+ // // The single entry in streamFormats must correspond to the format for which the codec is
+ // // being configured.
+ // return maxInputSize;
+ // }
+ // for (Format streamFormat : streamFormats) {
+ // if (areAdaptationCompatible(format, streamFormat)) {
+ // maxInputSize = Math.max(maxInputSize, getCodecMaxInputSize(codecInfo, streamFormat));
+ // }
+ // }
+ return maxInputSize;
+ }
+
+ /**
+ * Returns a maximum input buffer size for a given format.
+ *
+ * @param codecInfo A {@link MediaCodecInfo} describing the decoder.
+ * @param format The format.
+ * @return A maximum input buffer size in bytes, or {@link Format#NO_VALUE} if a maximum could not
+ * be determined.
+ */
+ private int getCodecMaxInputSize(MediaCodecInfo codecInfo, Format format) {
+ if (Util.SDK_INT < 24 && "OMX.google.raw.decoder".equals(codecInfo.name)) {
+ // OMX.google.raw.decoder didn't resize its output buffers correctly prior to N, so there's no
+ // point requesting a non-default input size. Doing so may cause a native crash, where-as not
+ // doing so will cause a more controlled failure when attempting to fill an input buffer. See:
+ // https://github.com/google/ExoPlayer/issues/4057.
+ boolean needsRawDecoderWorkaround = true;
+ if (Util.SDK_INT == 23) {
+ PackageManager packageManager = context.getPackageManager();
+ if (packageManager != null
+ && packageManager.hasSystemFeature(PackageManager.FEATURE_LEANBACK)) {
+ // The workaround is not required for AndroidTV devices running M.
+ needsRawDecoderWorkaround = false;
+ }
+ }
+ if (needsRawDecoderWorkaround) {
+ return Format.NO_VALUE;
+ }
+ }
+ return format.maxInputSize;
+ }
+
+ /**
+ * Returns the framework {@link MediaFormat} that can be used to configure a {@link MediaCodec}
+ * for decoding the given {@link Format} for playback.
+ *
+ * @param format The format of the media.
+ * @param codecMimeType The MIME type handled by the codec.
+ * @param codecMaxInputSize The maximum input size supported by the codec.
+ * @return The framework media format.
+ */
+ @SuppressLint("InlinedApi")
+ protected MediaFormat getMediaFormat(Format format, String codecMimeType, int codecMaxInputSize) {
+ MediaFormat mediaFormat = new MediaFormat();
+ // Set format parameters that should always be set.
+ mediaFormat.setString(MediaFormat.KEY_MIME, codecMimeType);
+ mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, format.channelCount);
+ mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, format.sampleRate);
+ MediaFormatUtil.setCsdBuffers(mediaFormat, format.initializationData);
+ // Set codec max values.
+ MediaFormatUtil.maybeSetInteger(mediaFormat, MediaFormat.KEY_MAX_INPUT_SIZE, codecMaxInputSize);
+ // Set codec configuration values.
+ if (Util.SDK_INT >= 23) {
+ mediaFormat.setInteger(MediaFormat.KEY_PRIORITY, 0 /* realtime priority */);
+ }
+ return mediaFormat;
+ }
+
+ private void updateCurrentPosition() {
+ long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded());
+ if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) {
+ currentPositionUs =
+ allowPositionDiscontinuity
+ ? newCurrentPositionUs
+ : Math.max(currentPositionUs, newCurrentPositionUs);
+ allowPositionDiscontinuity = false;
+ }
+ }
+
+ /**
+ * Returns whether a codec with suitable maximum input size will support adaptation between two
+ * {@link Format}s.
+ *
+ * @param first The first format.
+ * @param second The second format.
+ * @return Whether the codec will support adaptation between the two {@link Format}s.
+ */
+ private static boolean areAdaptationCompatible(Format first, Format second) {
+ return first.sampleMimeType.equals(second.sampleMimeType)
+ && first.channelCount == second.channelCount
+ && first.sampleRate == second.sampleRate
+ && first.encoderDelay == 0
+ && first.encoderPadding == 0
+ && second.encoderDelay == 0
+ && second.encoderPadding == 0
+ && first.initializationDataEquals(second);
+ }
+
/**
* Returns whether the decoder is known to output six audio channels when provided with input with
* fewer than six channels.
@@ -424,7 +687,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|| Util.DEVICE.startsWith("heroqlte"));
}
- private final class AudioTrackListener implements AudioTrack.Listener {
+ private final class AudioSinkListener implements AudioSink.Listener {
@Override
public void onAudioSessionId(int audioSessionId) {
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java
index 0dd062150d..eac0bffd65 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/ResamplingAudioProcessor.java
@@ -21,21 +21,19 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
- * An {@link AudioProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}.
+ * An {@link AudioProcessor} that converts 8-bit, 24-bit and 32-bit integer PCM audio to 16-bit
+ * integer PCM audio.
*/
/* package */ final class ResamplingAudioProcessor implements AudioProcessor {
private int sampleRateHz;
private int channelCount;
- @C.PcmEncoding
- private int encoding;
+ private @C.PcmEncoding int encoding;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
- /**
- * Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
- */
+ /** Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}. */
public ResamplingAudioProcessor() {
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
@@ -58,9 +56,6 @@ import java.nio.ByteOrder;
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.encoding = encoding;
- if (encoding == C.ENCODING_PCM_16BIT) {
- buffer = EMPTY_BUFFER;
- }
return true;
}
@@ -79,6 +74,11 @@ import java.nio.ByteOrder;
return C.ENCODING_PCM_16BIT;
}
+ @Override
+ public int getOutputSampleRateHz() {
+ return sampleRateHz;
+ }
+
@Override
public void queueInput(ByteBuffer inputBuffer) {
// Prepare the output buffer.
@@ -97,6 +97,7 @@ import java.nio.ByteOrder;
resampledSize = size / 2;
break;
case C.ENCODING_PCM_16BIT:
+ case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
@@ -132,6 +133,7 @@ import java.nio.ByteOrder;
}
break;
case C.ENCODING_PCM_16BIT:
+ case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
@@ -170,10 +172,10 @@ import java.nio.ByteOrder;
@Override
public void reset() {
flush();
- buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE;
channelCount = Format.NO_VALUE;
encoding = C.ENCODING_INVALID;
+ buffer = EMPTY_BUFFER;
}
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/SilenceSkippingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/SilenceSkippingAudioProcessor.java
new file mode 100644
index 0000000000..a289ced128
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/SilenceSkippingAudioProcessor.java
@@ -0,0 +1,412 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.audio;
+
+import android.support.annotation.IntDef;
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.Format;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * An {@link AudioProcessor} that skips silence in the input stream. Input and output are 16-bit
+ * PCM.
+ */
+public final class SilenceSkippingAudioProcessor implements AudioProcessor {
+
+ /**
+ * The minimum duration of audio that must be below {@link #SILENCE_THRESHOLD_LEVEL} to classify
+ * that part of audio as silent, in microseconds.
+ */
+ private static final long MINIMUM_SILENCE_DURATION_US = 100_000;
+ /**
+ * The duration of silence by which to extend non-silent sections, in microseconds. The value must
+ * not exceed {@link #MINIMUM_SILENCE_DURATION_US}.
+ */
+ private static final long PADDING_SILENCE_US = 10_000;
+ /**
+ * The absolute level below which an individual PCM sample is classified as silent. Note: the
+ * specified value will be rounded so that the threshold check only depends on the more
+ * significant byte, for efficiency.
+ */
+ private static final short SILENCE_THRESHOLD_LEVEL = 1024;
+
+ /**
+ * Threshold for classifying an individual PCM sample as silent based on its more significant
+ * byte. This is {@link #SILENCE_THRESHOLD_LEVEL} divided by 256 with rounding.
+ */
+ private static final byte SILENCE_THRESHOLD_LEVEL_MSB = (SILENCE_THRESHOLD_LEVEL + 128) >> 8;
+
+ /** Trimming states. */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({
+ STATE_NOISY,
+ STATE_MAYBE_SILENT,
+ STATE_SILENT,
+ })
+ private @interface State {}
+ /** State when the input is not silent. */
+ private static final int STATE_NOISY = 0;
+ /** State when the input may be silent but we haven't read enough yet to know. */
+ private static final int STATE_MAYBE_SILENT = 1;
+ /** State when the input is silent. */
+ private static final int STATE_SILENT = 2;
+
+ private int channelCount;
+ private int sampleRateHz;
+ private int bytesPerFrame;
+
+ private boolean enabled;
+
+ private ByteBuffer buffer;
+ private ByteBuffer outputBuffer;
+ private boolean inputEnded;
+
+ /**
+ * Buffers audio data that may be classified as silence while in {@link #STATE_MAYBE_SILENT}. If
+ * the input becomes noisy before the buffer has filled, it will be output. Otherwise, the buffer
+ * contents will be dropped and the state will transition to {@link #STATE_SILENT}.
+ */
+ private byte[] maybeSilenceBuffer;
+
+ /**
+ * Stores the latest part of the input while silent. It will be output as padding if the next
+ * input is noisy.
+ */
+ private byte[] paddingBuffer;
+
+ private @State int state;
+ private int maybeSilenceBufferSize;
+ private int paddingSize;
+ private boolean hasOutputNoise;
+ private long skippedFrames;
+
+ /** Creates a new silence trimming audio processor. */
+ public SilenceSkippingAudioProcessor() {
+ buffer = EMPTY_BUFFER;
+ outputBuffer = EMPTY_BUFFER;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ maybeSilenceBuffer = new byte[0];
+ paddingBuffer = new byte[0];
+ }
+
+ /**
+ * Sets whether to skip silence in the input. Calling this method will discard any data buffered
+ * within the processor, and may update the value returned by {@link #isActive()}.
+ *
+ * @param enabled Whether to skip silence in the input.
+ */
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ flush();
+ }
+
+ /**
+ * Returns the total number of frames of input audio that were skipped due to being classified as
+ * silence since the last call to {@link #flush()}.
+ */
+ public long getSkippedFrames() {
+ return skippedFrames;
+ }
+
+ // AudioProcessor implementation.
+
+ @Override
+ public boolean configure(int sampleRateHz, int channelCount, int encoding)
+ throws UnhandledFormatException {
+ if (encoding != C.ENCODING_PCM_16BIT) {
+ throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
+ }
+ if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
+ return false;
+ }
+ this.sampleRateHz = sampleRateHz;
+ this.channelCount = channelCount;
+ bytesPerFrame = channelCount * 2;
+ return true;
+ }
+
+ @Override
+ public boolean isActive() {
+ return sampleRateHz != Format.NO_VALUE && enabled;
+ }
+
+ @Override
+ public int getOutputChannelCount() {
+ return channelCount;
+ }
+
+ @Override
+ public @C.Encoding int getOutputEncoding() {
+ return C.ENCODING_PCM_16BIT;
+ }
+
+ @Override
+ public int getOutputSampleRateHz() {
+ return sampleRateHz;
+ }
+
+ @Override
+ public void queueInput(ByteBuffer inputBuffer) {
+ while (inputBuffer.hasRemaining() && !outputBuffer.hasRemaining()) {
+ switch (state) {
+ case STATE_NOISY:
+ processNoisy(inputBuffer);
+ break;
+ case STATE_MAYBE_SILENT:
+ processMaybeSilence(inputBuffer);
+ break;
+ case STATE_SILENT:
+ processSilence(inputBuffer);
+ break;
+ default:
+ throw new IllegalStateException();
+ }
+ }
+ }
+
+ @Override
+ public void queueEndOfStream() {
+ inputEnded = true;
+ if (maybeSilenceBufferSize > 0) {
+ // We haven't received enough silence to transition to the silent state, so output the buffer.
+ output(maybeSilenceBuffer, maybeSilenceBufferSize);
+ }
+ if (!hasOutputNoise) {
+ skippedFrames += paddingSize / bytesPerFrame;
+ }
+ }
+
+ @Override
+ public ByteBuffer getOutput() {
+ ByteBuffer outputBuffer = this.outputBuffer;
+ this.outputBuffer = EMPTY_BUFFER;
+ return outputBuffer;
+ }
+
+ @SuppressWarnings("ReferenceEquality")
+ @Override
+ public boolean isEnded() {
+ return inputEnded && outputBuffer == EMPTY_BUFFER;
+ }
+
+ @Override
+ public void flush() {
+ if (isActive()) {
+ int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame;
+ if (maybeSilenceBuffer.length != maybeSilenceBufferSize) {
+ maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
+ }
+ paddingSize = durationUsToFrames(PADDING_SILENCE_US) * bytesPerFrame;
+ if (paddingBuffer.length != paddingSize) {
+ paddingBuffer = new byte[paddingSize];
+ }
+ }
+ state = STATE_NOISY;
+ outputBuffer = EMPTY_BUFFER;
+ inputEnded = false;
+ skippedFrames = 0;
+ maybeSilenceBufferSize = 0;
+ hasOutputNoise = false;
+ }
+
+ @Override
+ public void reset() {
+ enabled = false;
+ flush();
+ buffer = EMPTY_BUFFER;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ paddingSize = 0;
+ maybeSilenceBuffer = new byte[0];
+ paddingBuffer = new byte[0];
+ }
+
+ // Internal methods.
+
+ /**
+ * Incrementally processes new input from {@code inputBuffer} while in {@link #STATE_NOISY},
+ * updating the state if needed.
+ */
+ private void processNoisy(ByteBuffer inputBuffer) {
+ int limit = inputBuffer.limit();
+
+ // Check if there's any noise within the maybe silence buffer duration.
+ inputBuffer.limit(Math.min(limit, inputBuffer.position() + maybeSilenceBuffer.length));
+ int noiseLimit = findNoiseLimit(inputBuffer);
+ if (noiseLimit == inputBuffer.position()) {
+ // The buffer contains the start of possible silence.
+ state = STATE_MAYBE_SILENT;
+ } else {
+ inputBuffer.limit(noiseLimit);
+ output(inputBuffer);
+ }
+
+ // Restore the limit.
+ inputBuffer.limit(limit);
+ }
+
+ /**
+ * Incrementally processes new input from {@code inputBuffer} while in {@link
+ * #STATE_MAYBE_SILENT}, updating the state if needed.
+ */
+ private void processMaybeSilence(ByteBuffer inputBuffer) {
+ int limit = inputBuffer.limit();
+ int noisePosition = findNoisePosition(inputBuffer);
+ int maybeSilenceInputSize = noisePosition - inputBuffer.position();
+ int maybeSilenceBufferRemaining = maybeSilenceBuffer.length - maybeSilenceBufferSize;
+ if (noisePosition < limit && maybeSilenceInputSize < maybeSilenceBufferRemaining) {
+ // The maybe silence buffer isn't full, so output it and switch back to the noisy state.
+ output(maybeSilenceBuffer, maybeSilenceBufferSize);
+ maybeSilenceBufferSize = 0;
+ state = STATE_NOISY;
+ } else {
+ // Fill as much of the maybe silence buffer as possible.
+ int bytesToWrite = Math.min(maybeSilenceInputSize, maybeSilenceBufferRemaining);
+ inputBuffer.limit(inputBuffer.position() + bytesToWrite);
+ inputBuffer.get(maybeSilenceBuffer, maybeSilenceBufferSize, bytesToWrite);
+ maybeSilenceBufferSize += bytesToWrite;
+ if (maybeSilenceBufferSize == maybeSilenceBuffer.length) {
+ // We've reached a period of silence, so skip it, taking in to account padding for both
+ // the noisy to silent transition and any future silent to noisy transition.
+ if (hasOutputNoise) {
+ output(maybeSilenceBuffer, paddingSize);
+ skippedFrames += (maybeSilenceBufferSize - paddingSize * 2) / bytesPerFrame;
+ } else {
+ skippedFrames += (maybeSilenceBufferSize - paddingSize) / bytesPerFrame;
+ }
+ updatePaddingBuffer(inputBuffer, maybeSilenceBuffer, maybeSilenceBufferSize);
+ maybeSilenceBufferSize = 0;
+ state = STATE_SILENT;
+ }
+
+ // Restore the limit.
+ inputBuffer.limit(limit);
+ }
+ }
+
+ /**
+ * Incrementally processes new input from {@code inputBuffer} while in {@link #STATE_SILENT},
+ * updating the state if needed.
+ */
+ private void processSilence(ByteBuffer inputBuffer) {
+ int limit = inputBuffer.limit();
+ int noisyPosition = findNoisePosition(inputBuffer);
+ inputBuffer.limit(noisyPosition);
+ skippedFrames += inputBuffer.remaining() / bytesPerFrame;
+ updatePaddingBuffer(inputBuffer, paddingBuffer, paddingSize);
+ if (noisyPosition < limit) {
+ // Output the padding, which may include previous input as well as new input, then transition
+ // back to the noisy state.
+ output(paddingBuffer, paddingSize);
+ state = STATE_NOISY;
+
+ // Restore the limit.
+ inputBuffer.limit(limit);
+ }
+ }
+
+ /**
+ * Copies {@code length} elements from {@code data} to populate a new output buffer from the
+ * processor.
+ */
+ private void output(byte[] data, int length) {
+ prepareForOutput(length);
+ buffer.put(data, 0, length);
+ buffer.flip();
+ outputBuffer = buffer;
+ }
+
+ /**
+ * Copies remaining bytes from {@code data} to populate a new output buffer from the processor.
+ */
+ private void output(ByteBuffer data) {
+ prepareForOutput(data.remaining());
+ buffer.put(data);
+ buffer.flip();
+ outputBuffer = buffer;
+ }
+
+ /** Prepares to output {@code size} bytes in {@code buffer}. */
+ private void prepareForOutput(int size) {
+ if (buffer.capacity() < size) {
+ buffer = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder());
+ } else {
+ buffer.clear();
+ }
+ if (size > 0) {
+ hasOutputNoise = true;
+ }
+ }
+
+ /**
+ * Fills {@link #paddingBuffer} using data from {@code input}, plus any additional buffered data
+ * at the end of {@code buffer} (up to its {@code size}) required to fill it, advancing the input
+ * position.
+ */
+ private void updatePaddingBuffer(ByteBuffer input, byte[] buffer, int size) {
+ int fromInputSize = Math.min(input.remaining(), paddingSize);
+ int fromBufferSize = paddingSize - fromInputSize;
+ System.arraycopy(
+ /* src= */ buffer,
+ /* srcPos= */ size - fromBufferSize,
+ /* dest= */ paddingBuffer,
+ /* destPos= */ 0,
+ /* length= */ fromBufferSize);
+ input.position(input.limit() - fromInputSize);
+ input.get(paddingBuffer, fromBufferSize, fromInputSize);
+ }
+
+ /**
+ * Returns the number of input frames corresponding to {@code durationUs} microseconds of audio.
+ */
+ private int durationUsToFrames(long durationUs) {
+ return (int) ((durationUs * sampleRateHz) / C.MICROS_PER_SECOND);
+ }
+
+ /**
+ * Returns the earliest byte position in [position, limit) of {@code buffer} that contains a frame
+ * classified as a noisy frame, or the limit of the buffer if no such frame exists.
+ */
+ private int findNoisePosition(ByteBuffer buffer) {
+ // The input is in ByteOrder.nativeOrder(), which is little endian on Android.
+ for (int i = buffer.position() + 1; i < buffer.limit(); i += 2) {
+ if (Math.abs(buffer.get(i)) > SILENCE_THRESHOLD_LEVEL_MSB) {
+ // Round to the start of the frame.
+ return bytesPerFrame * (i / bytesPerFrame);
+ }
+ }
+ return buffer.limit();
+ }
+
+ /**
+ * Returns the earliest byte position in [position, limit) of {@code buffer} such that all frames
+ * from the byte position to the limit are classified as silent.
+ */
+ private int findNoiseLimit(ByteBuffer buffer) {
+ // The input is in ByteOrder.nativeOrder(), which is little endian on Android.
+ for (int i = buffer.limit() - 1; i >= buffer.position(); i -= 2) {
+ if (Math.abs(buffer.get(i)) > SILENCE_THRESHOLD_LEVEL_MSB) {
+ // Return the start of the next frame.
+ return bytesPerFrame * (i / bytesPerFrame) + bytesPerFrame;
+ }
+ }
+ return buffer.position();
+ }
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java
index c4a55eeb02..c404912882 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/SimpleDecoderAudioRenderer.java
@@ -23,9 +23,11 @@ import android.support.annotation.IntDef;
import com.google.android.exoplayer2.BaseRenderer;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
+import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlaybackParameters;
+import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
@@ -45,6 +47,17 @@ import java.lang.annotation.RetentionPolicy;
/**
* Decodes and renders audio using a {@link SimpleDecoder}.
+ *
+ * This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)}
+ * on the playback thread:
+ *
+ *
+ * - Message with type {@link C#MSG_SET_VOLUME} to set the volume. The message payload should be
+ * a {@link Float} with 0 being silence and 1 being unity gain.
+ *
- Message with type {@link C#MSG_SET_AUDIO_ATTRIBUTES} to set the audio attributes. The
+ * message payload should be an {@link com.google.android.exoplayer2.audio.AudioAttributes}
+ * instance that will configure the underlying audio track.
+ *
*/
public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock {
@@ -72,12 +85,14 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private final DrmSessionManager drmSessionManager;
private final boolean playClearSamplesWithoutKeys;
private final EventDispatcher eventDispatcher;
- private final AudioTrack audioTrack;
+ private final AudioSink audioSink;
private final FormatHolder formatHolder;
private final DecoderInputBuffer flagsOnlyBuffer;
private DecoderCounters decoderCounters;
private Format inputFormat;
+ private int encoderDelay;
+ private int encoderPadding;
private SimpleDecoder decoder;
private DecoderInputBuffer inputBuffer;
@@ -90,13 +105,14 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private boolean audioTrackNeedsConfigure;
private long currentPositionUs;
+ private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean waitingForKeys;
public SimpleDecoderAudioRenderer() {
- this(null, null);
+ this(/* eventHandler= */ null, /* eventListener= */ null);
}
/**
@@ -105,9 +121,15 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
- public SimpleDecoderAudioRenderer(Handler eventHandler,
- AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) {
- this(eventHandler, eventListener, null, null, false, audioProcessors);
+ public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
+ AudioProcessor... audioProcessors) {
+ this(
+ eventHandler,
+ eventListener,
+ /* audioCapabilities= */ null,
+ /* drmSessionManager= */ null,
+ /* playClearSamplesWithoutKeys= */ false,
+ audioProcessors);
}
/**
@@ -117,9 +139,14 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
*/
- public SimpleDecoderAudioRenderer(Handler eventHandler,
- AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities) {
- this(eventHandler, eventListener, audioCapabilities, null, false);
+ public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
+ AudioCapabilities audioCapabilities) {
+ this(
+ eventHandler,
+ eventListener,
+ audioCapabilities,
+ /* drmSessionManager= */ null,
+ /* playClearSamplesWithoutKeys= */ false);
}
/**
@@ -137,15 +164,35 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
- public SimpleDecoderAudioRenderer(Handler eventHandler,
- AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
+ public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
+ AudioCapabilities audioCapabilities, DrmSessionManager drmSessionManager,
+ boolean playClearSamplesWithoutKeys, AudioProcessor... audioProcessors) {
+ this(eventHandler, eventListener, drmSessionManager,
+ playClearSamplesWithoutKeys, new DefaultAudioSink(audioCapabilities, audioProcessors));
+ }
+
+ /**
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
+ * media is not required.
+ * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
+ * For example a media file may start with a short clear region so as to allow playback to
+ * begin in parallel with key acquisition. This parameter specifies whether the renderer is
+ * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
+ * has obtained the keys necessary to decrypt encrypted regions of the media.
+ * @param audioSink The sink to which audio will be output.
+ */
+ public SimpleDecoderAudioRenderer(Handler eventHandler, AudioRendererEventListener eventListener,
DrmSessionManager drmSessionManager, boolean playClearSamplesWithoutKeys,
- AudioProcessor... audioProcessors) {
+ AudioSink audioSink) {
super(C.TRACK_TYPE_AUDIO);
this.drmSessionManager = drmSessionManager;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
- audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
+ this.audioSink = audioSink;
+ audioSink.setListener(new AudioSinkListener());
formatHolder = new FormatHolder();
flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
decoderReinitializationState = REINITIALIZATION_STATE_NONE;
@@ -159,8 +206,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override
public final int supportsFormat(Format format) {
- int formatSupport = supportsFormatInternal(format);
- if (formatSupport == FORMAT_UNSUPPORTED_TYPE || formatSupport == FORMAT_UNSUPPORTED_SUBTYPE) {
+ int formatSupport = supportsFormatInternal(drmSessionManager, format);
+ if (formatSupport <= FORMAT_UNSUPPORTED_DRM) {
return formatSupport;
}
int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
@@ -171,17 +218,29 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* Returns the {@link #FORMAT_SUPPORT_MASK} component of the return value for
* {@link #supportsFormat(Format)}.
*
+ * @param drmSessionManager The renderer's {@link DrmSessionManager}.
* @param format The format.
* @return The extent to which the renderer supports the format itself.
*/
- protected abstract int supportsFormatInternal(Format format);
+ protected abstract int supportsFormatInternal(DrmSessionManager drmSessionManager,
+ Format format);
+
+ /**
+ * Returns whether the audio sink can accept audio in the specified encoding.
+ *
+ * @param encoding The audio encoding.
+ * @return Whether the audio sink can accept audio in the specified encoding.
+ */
+ protected final boolean supportsOutputEncoding(@C.Encoding int encoding) {
+ return audioSink.isEncodingSupported(encoding);
+ }
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (outputStreamEnded) {
try {
- audioTrack.playToEndOfStream();
- } catch (AudioTrack.WriteException e) {
+ audioSink.playToEndOfStream();
+ } catch (AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
return;
@@ -216,8 +275,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
while (drainOutputBuffer()) {}
while (feedInputBuffer()) {}
TraceUtil.endSection();
- } catch (AudioDecoderException | AudioTrack.ConfigurationException
- | AudioTrack.InitializationException | AudioTrack.WriteException e) {
+ } catch (AudioDecoderException | AudioSink.ConfigurationException
+ | AudioSink.InitializationException | AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
decoderCounters.ensureUpdated();
@@ -230,21 +289,21 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances
* should be released in {@link #onDisabled()} (if not before).
*
- * @see AudioTrack.Listener#onAudioSessionId(int)
+ * @see AudioSink.Listener#onAudioSessionId(int)
*/
protected void onAudioSessionId(int audioSessionId) {
// Do nothing.
}
/**
- * @see AudioTrack.Listener#onPositionDiscontinuity()
+ * @see AudioSink.Listener#onPositionDiscontinuity()
*/
protected void onAudioTrackPositionDiscontinuity() {
// Do nothing.
}
/**
- * @see AudioTrack.Listener#onUnderrun(int, long, long)
+ * @see AudioSink.Listener#onUnderrun(int, long, long)
*/
protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
long elapsedSinceLastFeedMs) {
@@ -278,8 +337,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException,
- AudioTrack.ConfigurationException, AudioTrack.InitializationException,
- AudioTrack.WriteException {
+ AudioSink.ConfigurationException, AudioSink.InitializationException,
+ AudioSink.WriteException {
if (outputBuffer == null) {
outputBuffer = decoder.dequeueOutputBuffer();
if (outputBuffer == null) {
@@ -305,12 +364,12 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
if (audioTrackNeedsConfigure) {
Format outputFormat = getOutputFormat();
- audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount,
- outputFormat.sampleRate, outputFormat.pcmEncoding, 0);
+ audioSink.configure(outputFormat.pcmEncoding, outputFormat.channelCount,
+ outputFormat.sampleRate, 0, null, encoderDelay, encoderPadding);
audioTrackNeedsConfigure = false;
}
- if (audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) {
+ if (audioSink.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) {
decoderCounters.renderedOutputBufferCount++;
outputBuffer.release();
outputBuffer = null;
@@ -369,6 +428,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return false;
}
inputBuffer.flip();
+ onQueueInputBuffer(inputBuffer);
decoder.queueInputBuffer(inputBuffer);
decoderReceivedBuffers = true;
decoderCounters.inputBufferCount++;
@@ -390,9 +450,9 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private void processEndOfStream() throws ExoPlaybackException {
outputStreamEnded = true;
try {
- audioTrack.playToEndOfStream();
- } catch (AudioTrack.WriteException e) {
- throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
+ audioSink.playToEndOfStream();
+ } catch (AudioSink.WriteException e) {
+ throw ExoPlaybackException.createForRenderer(e, getIndex());
}
}
@@ -414,34 +474,31 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override
public boolean isEnded() {
- return outputStreamEnded && audioTrack.isEnded();
+ return outputStreamEnded && audioSink.isEnded();
}
@Override
public boolean isReady() {
- return audioTrack.hasPendingData()
+ return audioSink.hasPendingData()
|| (inputFormat != null && !waitingForKeys && (isSourceReady() || outputBuffer != null));
}
@Override
public long getPositionUs() {
- long newCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
- if (newCurrentPositionUs != AudioTrack.CURRENT_POSITION_NOT_SET) {
- currentPositionUs = allowPositionDiscontinuity ? newCurrentPositionUs
- : Math.max(currentPositionUs, newCurrentPositionUs);
- allowPositionDiscontinuity = false;
+ if (getState() == STATE_STARTED) {
+ updateCurrentPosition();
}
return currentPositionUs;
}
@Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
- return audioTrack.setPlaybackParameters(playbackParameters);
+ return audioSink.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
- return audioTrack.getPlaybackParameters();
+ return audioSink.getPlaybackParameters();
}
@Override
@@ -450,16 +507,17 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
eventDispatcher.enabled(decoderCounters);
int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
- audioTrack.enableTunnelingV21(tunnelingAudioSessionId);
+ audioSink.enableTunnelingV21(tunnelingAudioSessionId);
} else {
- audioTrack.disableTunneling();
+ audioSink.disableTunneling();
}
}
@Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
- audioTrack.reset();
+ audioSink.reset();
currentPositionUs = positionUs;
+ allowFirstBufferPositionDiscontinuity = true;
allowPositionDiscontinuity = true;
inputStreamEnded = false;
outputStreamEnded = false;
@@ -470,12 +528,13 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override
protected void onStarted() {
- audioTrack.play();
+ audioSink.play();
}
@Override
protected void onStopped() {
- audioTrack.pause();
+ updateCurrentPosition();
+ audioSink.pause();
}
@Override
@@ -485,7 +544,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
waitingForKeys = false;
try {
releaseDecoder();
- audioTrack.release();
+ audioSink.release();
} finally {
try {
if (drmSession != null) {
@@ -506,6 +565,22 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
}
+ @Override
+ public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
+ switch (messageType) {
+ case C.MSG_SET_VOLUME:
+ audioSink.setVolume((Float) message);
+ break;
+ case C.MSG_SET_AUDIO_ATTRIBUTES:
+ AudioAttributes audioAttributes = (AudioAttributes) message;
+ audioSink.setAudioAttributes(audioAttributes);
+ break;
+ default:
+ super.handleMessage(messageType, message);
+ break;
+ }
+ }
+
private void maybeInitDecoder() throws ExoPlaybackException {
if (decoder != null) {
return;
@@ -518,10 +593,12 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
if (mediaCrypto == null) {
DrmSessionException drmError = drmSession.getError();
if (drmError != null) {
- throw ExoPlaybackException.createForRenderer(drmError, getIndex());
+ // Continue for now. We may be able to avoid failure if the session recovers, or if a new
+ // input format causes the session to be replaced before it's used.
+ } else {
+ // The drm session isn't open yet.
+ return;
}
- // The drm session isn't open yet.
- return;
}
}
@@ -585,26 +662,36 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
audioTrackNeedsConfigure = true;
}
+ encoderDelay = newFormat.encoderDelay;
+ encoderPadding = newFormat.encoderPadding;
+
eventDispatcher.inputFormatChanged(newFormat);
}
- @Override
- public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
- switch (messageType) {
- case C.MSG_SET_VOLUME:
- audioTrack.setVolume((Float) message);
- break;
- case C.MSG_SET_AUDIO_ATTRIBUTES:
- AudioAttributes audioAttributes = (AudioAttributes) message;
- audioTrack.setAudioAttributes(audioAttributes);
- break;
- default:
- super.handleMessage(messageType, message);
- break;
+ private void onQueueInputBuffer(DecoderInputBuffer buffer) {
+ if (allowFirstBufferPositionDiscontinuity && !buffer.isDecodeOnly()) {
+ // TODO: Remove this hack once we have a proper fix for [Internal: b/71876314].
+ // Allow the position to jump if the first presentable input buffer has a timestamp that
+ // differs significantly from what was expected.
+ if (Math.abs(buffer.timeUs - currentPositionUs) > 500000) {
+ currentPositionUs = buffer.timeUs;
+ }
+ allowFirstBufferPositionDiscontinuity = false;
}
}
- private final class AudioTrackListener implements AudioTrack.Listener {
+ private void updateCurrentPosition() {
+ long newCurrentPositionUs = audioSink.getCurrentPositionUs(isEnded());
+ if (newCurrentPositionUs != AudioSink.CURRENT_POSITION_NOT_SET) {
+ currentPositionUs =
+ allowPositionDiscontinuity
+ ? newCurrentPositionUs
+ : Math.max(currentPositionUs, newCurrentPositionUs);
+ allowPositionDiscontinuity = false;
+ }
+ }
+
+ private final class AudioSinkListener implements AudioSink.Listener {
@Override
public void onAudioSessionId(int audioSessionId) {
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/Sonic.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/Sonic.java
index ef7877ae1e..0bf6baa4d0 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/Sonic.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/Sonic.java
@@ -27,32 +27,29 @@ import java.util.Arrays;
*/
/* package */ final class Sonic {
- private static final boolean USE_CHORD_PITCH = false;
private static final int MINIMUM_PITCH = 65;
private static final int MAXIMUM_PITCH = 400;
private static final int AMDF_FREQUENCY = 4000;
- private final int sampleRate;
- private final int numChannels;
+ private final int inputSampleRateHz;
+ private final int channelCount;
+ private final float speed;
+ private final float pitch;
+ private final float rate;
private final int minPeriod;
private final int maxPeriod;
- private final int maxRequired;
+ private final int maxRequiredFrameCount;
private final short[] downSampleBuffer;
- private int inputBufferSize;
private short[] inputBuffer;
- private int outputBufferSize;
+ private int inputFrameCount;
private short[] outputBuffer;
- private int pitchBufferSize;
+ private int outputFrameCount;
private short[] pitchBuffer;
+ private int pitchFrameCount;
private int oldRatePosition;
private int newRatePosition;
- private float speed;
- private float pitch;
- private int numInputSamples;
- private int numOutputSamples;
- private int numPitchSamples;
- private int remainingInputToCopy;
+ private int remainingInputToCopyFrameCount;
private int prevPeriod;
private int prevMinDiff;
private int minDiff;
@@ -61,55 +58,26 @@ import java.util.Arrays;
/**
* Creates a new Sonic audio stream processor.
*
- * @param sampleRate The sample rate of input audio.
- * @param numChannels The number of channels in the input audio.
+ * @param inputSampleRateHz The sample rate of input audio, in hertz.
+ * @param channelCount The number of channels in the input audio.
+ * @param speed The speedup factor for output audio.
+ * @param pitch The pitch factor for output audio.
+ * @param outputSampleRateHz The sample rate for output audio, in hertz.
*/
- public Sonic(int sampleRate, int numChannels) {
- this.sampleRate = sampleRate;
- this.numChannels = numChannels;
- minPeriod = sampleRate / MAXIMUM_PITCH;
- maxPeriod = sampleRate / MINIMUM_PITCH;
- maxRequired = 2 * maxPeriod;
- downSampleBuffer = new short[maxRequired];
- inputBufferSize = maxRequired;
- inputBuffer = new short[maxRequired * numChannels];
- outputBufferSize = maxRequired;
- outputBuffer = new short[maxRequired * numChannels];
- pitchBufferSize = maxRequired;
- pitchBuffer = new short[maxRequired * numChannels];
- oldRatePosition = 0;
- newRatePosition = 0;
- prevPeriod = 0;
- speed = 1.0f;
- pitch = 1.0f;
- }
-
- /**
- * Sets the output speed.
- */
- public void setSpeed(float speed) {
+ public Sonic(
+ int inputSampleRateHz, int channelCount, float speed, float pitch, int outputSampleRateHz) {
+ this.inputSampleRateHz = inputSampleRateHz;
+ this.channelCount = channelCount;
this.speed = speed;
- }
-
- /**
- * Gets the output speed.
- */
- public float getSpeed() {
- return speed;
- }
-
- /**
- * Sets the output pitch.
- */
- public void setPitch(float pitch) {
this.pitch = pitch;
- }
-
- /**
- * Gets the output pitch.
- */
- public float getPitch() {
- return pitch;
+ rate = (float) inputSampleRateHz / outputSampleRateHz;
+ minPeriod = inputSampleRateHz / MAXIMUM_PITCH;
+ maxPeriod = inputSampleRateHz / MINIMUM_PITCH;
+ maxRequiredFrameCount = 2 * maxPeriod;
+ downSampleBuffer = new short[maxRequiredFrameCount];
+ inputBuffer = new short[maxRequiredFrameCount * channelCount];
+ outputBuffer = new short[maxRequiredFrameCount * channelCount];
+ pitchBuffer = new short[maxRequiredFrameCount * channelCount];
}
/**
@@ -119,11 +87,11 @@ import java.util.Arrays;
* @param buffer A {@link ShortBuffer} containing input data between its position and limit.
*/
public void queueInput(ShortBuffer buffer) {
- int samplesToWrite = buffer.remaining() / numChannels;
- int bytesToWrite = samplesToWrite * numChannels * 2;
- enlargeInputBufferIfNeeded(samplesToWrite);
- buffer.get(inputBuffer, numInputSamples * numChannels, bytesToWrite / 2);
- numInputSamples += samplesToWrite;
+ int framesToWrite = buffer.remaining() / channelCount;
+ int bytesToWrite = framesToWrite * channelCount * 2;
+ inputBuffer = ensureSpaceForAdditionalFrames(inputBuffer, inputFrameCount, framesToWrite);
+ buffer.get(inputBuffer, inputFrameCount * channelCount, bytesToWrite / 2);
+ inputFrameCount += framesToWrite;
processStreamInput();
}
@@ -134,11 +102,15 @@ import java.util.Arrays;
* @param buffer A {@link ShortBuffer} into which output will be written.
*/
public void getOutput(ShortBuffer buffer) {
- int samplesToRead = Math.min(buffer.remaining() / numChannels, numOutputSamples);
- buffer.put(outputBuffer, 0, samplesToRead * numChannels);
- numOutputSamples -= samplesToRead;
- System.arraycopy(outputBuffer, samplesToRead * numChannels, outputBuffer, 0,
- numOutputSamples * numChannels);
+ int framesToRead = Math.min(buffer.remaining() / channelCount, outputFrameCount);
+ buffer.put(outputBuffer, 0, framesToRead * channelCount);
+ outputFrameCount -= framesToRead;
+ System.arraycopy(
+ outputBuffer,
+ framesToRead * channelCount,
+ outputBuffer,
+ 0,
+ outputFrameCount * channelCount);
}
/**
@@ -146,79 +118,105 @@ import java.util.Arrays;
* added to the output, but flushing in the middle of words could introduce distortion.
*/
public void queueEndOfStream() {
- int remainingSamples = numInputSamples;
+ int remainingFrameCount = inputFrameCount;
float s = speed / pitch;
- int expectedOutputSamples =
- numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / pitch + 0.5f);
+ float r = rate * pitch;
+ int expectedOutputFrames =
+ outputFrameCount + (int) ((remainingFrameCount / s + pitchFrameCount) / r + 0.5f);
// Add enough silence to flush both input and pitch buffers.
- enlargeInputBufferIfNeeded(remainingSamples + 2 * maxRequired);
- for (int xSample = 0; xSample < 2 * maxRequired * numChannels; xSample++) {
- inputBuffer[remainingSamples * numChannels + xSample] = 0;
+ inputBuffer =
+ ensureSpaceForAdditionalFrames(
+ inputBuffer, inputFrameCount, remainingFrameCount + 2 * maxRequiredFrameCount);
+ for (int xSample = 0; xSample < 2 * maxRequiredFrameCount * channelCount; xSample++) {
+ inputBuffer[remainingFrameCount * channelCount + xSample] = 0;
}
- numInputSamples += 2 * maxRequired;
+ inputFrameCount += 2 * maxRequiredFrameCount;
processStreamInput();
- // Throw away any extra samples we generated due to the silence we added.
- if (numOutputSamples > expectedOutputSamples) {
- numOutputSamples = expectedOutputSamples;
+ // Throw away any extra frames we generated due to the silence we added.
+ if (outputFrameCount > expectedOutputFrames) {
+ outputFrameCount = expectedOutputFrames;
}
// Empty input and pitch buffers.
- numInputSamples = 0;
- remainingInputToCopy = 0;
- numPitchSamples = 0;
+ inputFrameCount = 0;
+ remainingInputToCopyFrameCount = 0;
+ pitchFrameCount = 0;
}
- /**
- * Returns the number of output samples that can be read with {@link #getOutput(ShortBuffer)}.
- */
- public int getSamplesAvailable() {
- return numOutputSamples;
+ /** Clears state in preparation for receiving a new stream of input buffers. */
+ public void flush() {
+ inputFrameCount = 0;
+ outputFrameCount = 0;
+ pitchFrameCount = 0;
+ oldRatePosition = 0;
+ newRatePosition = 0;
+ remainingInputToCopyFrameCount = 0;
+ prevPeriod = 0;
+ prevMinDiff = 0;
+ minDiff = 0;
+ maxDiff = 0;
+ }
+
+ /** Returns the number of output frames that can be read with {@link #getOutput(ShortBuffer)}. */
+ public int getFramesAvailable() {
+ return outputFrameCount;
}
// Internal methods.
- private void enlargeOutputBufferIfNeeded(int numSamples) {
- if (numOutputSamples + numSamples > outputBufferSize) {
- outputBufferSize += (outputBufferSize / 2) + numSamples;
- outputBuffer = Arrays.copyOf(outputBuffer, outputBufferSize * numChannels);
+ /**
+ * Returns {@code buffer} or a copy of it, such that there is enough space in the returned buffer
+ * to store {@code newFrameCount} additional frames.
+ *
+ * @param buffer The buffer.
+ * @param frameCount The number of frames already in the buffer.
+ * @param additionalFrameCount The number of additional frames that need to be stored in the
+ * buffer.
+ * @return A buffer with enough space for the additional frames.
+ */
+ private short[] ensureSpaceForAdditionalFrames(
+ short[] buffer, int frameCount, int additionalFrameCount) {
+ int currentCapacityFrames = buffer.length / channelCount;
+ if (frameCount + additionalFrameCount <= currentCapacityFrames) {
+ return buffer;
+ } else {
+ int newCapacityFrames = 3 * currentCapacityFrames / 2 + additionalFrameCount;
+ return Arrays.copyOf(buffer, newCapacityFrames * channelCount);
}
}
- private void enlargeInputBufferIfNeeded(int numSamples) {
- if (numInputSamples + numSamples > inputBufferSize) {
- inputBufferSize += (inputBufferSize / 2) + numSamples;
- inputBuffer = Arrays.copyOf(inputBuffer, inputBufferSize * numChannels);
- }
+ private void removeProcessedInputFrames(int positionFrames) {
+ int remainingFrames = inputFrameCount - positionFrames;
+ System.arraycopy(
+ inputBuffer, positionFrames * channelCount, inputBuffer, 0, remainingFrames * channelCount);
+ inputFrameCount = remainingFrames;
}
- private void removeProcessedInputSamples(int position) {
- int remainingSamples = numInputSamples - position;
- System.arraycopy(inputBuffer, position * numChannels, inputBuffer, 0,
- remainingSamples * numChannels);
- numInputSamples = remainingSamples;
+ private void copyToOutput(short[] samples, int positionFrames, int frameCount) {
+ outputBuffer = ensureSpaceForAdditionalFrames(outputBuffer, outputFrameCount, frameCount);
+ System.arraycopy(
+ samples,
+ positionFrames * channelCount,
+ outputBuffer,
+ outputFrameCount * channelCount,
+ frameCount * channelCount);
+ outputFrameCount += frameCount;
}
- private void copyToOutput(short[] samples, int position, int numSamples) {
- enlargeOutputBufferIfNeeded(numSamples);
- System.arraycopy(samples, position * numChannels, outputBuffer, numOutputSamples * numChannels,
- numSamples * numChannels);
- numOutputSamples += numSamples;
- }
-
- private int copyInputToOutput(int position) {
- int numSamples = Math.min(maxRequired, remainingInputToCopy);
- copyToOutput(inputBuffer, position, numSamples);
- remainingInputToCopy -= numSamples;
- return numSamples;
+ private int copyInputToOutput(int positionFrames) {
+ int frameCount = Math.min(maxRequiredFrameCount, remainingInputToCopyFrameCount);
+ copyToOutput(inputBuffer, positionFrames, frameCount);
+ remainingInputToCopyFrameCount -= frameCount;
+ return frameCount;
}
private void downSampleInput(short[] samples, int position, int skip) {
// If skip is greater than one, average skip samples together and write them to the down-sample
- // buffer. If numChannels is greater than one, mix the channels together as we down sample.
- int numSamples = maxRequired / skip;
- int samplesPerValue = numChannels * skip;
- position *= numChannels;
- for (int i = 0; i < numSamples; i++) {
+ // buffer. If channelCount is greater than one, mix the channels together as we down sample.
+ int frameCount = maxRequiredFrameCount / skip;
+ int samplesPerValue = channelCount * skip;
+ position *= channelCount;
+ for (int i = 0; i < frameCount; i++) {
int value = 0;
for (int j = 0; j < samplesPerValue; j++) {
value += samples[position + i * samplesPerValue + j];
@@ -235,13 +233,13 @@ import java.util.Arrays;
int worstPeriod = 255;
int minDiff = 1;
int maxDiff = 0;
- position *= numChannels;
+ position *= channelCount;
for (int period = minPeriod; period <= maxPeriod; period++) {
int diff = 0;
for (int i = 0; i < period; i++) {
short sVal = samples[position + i];
short pVal = samples[position + period + i];
- diff += sVal >= pVal ? sVal - pVal : pVal - sVal;
+ diff += Math.abs(sVal - pVal);
}
// Note that the highest number of samples we add into diff will be less than 256, since we
// skip samples. Thus, diff is a 24 bit number, and we can safely multiply by numSamples
@@ -264,36 +262,30 @@ import java.util.Arrays;
* Returns whether the previous pitch period estimate is a better approximation, which can occur
* at the abrupt end of voiced words.
*/
- private boolean previousPeriodBetter(int minDiff, int maxDiff, boolean preferNewPeriod) {
+ private boolean previousPeriodBetter(int minDiff, int maxDiff) {
if (minDiff == 0 || prevPeriod == 0) {
return false;
}
- if (preferNewPeriod) {
- if (maxDiff > minDiff * 3) {
- // Got a reasonable match this period
- return false;
- }
- if (minDiff * 2 <= prevMinDiff * 3) {
- // Mismatch is not that much greater this period
- return false;
- }
- } else {
- if (minDiff <= prevMinDiff) {
- return false;
- }
+ if (maxDiff > minDiff * 3) {
+ // Got a reasonable match this period.
+ return false;
+ }
+ if (minDiff * 2 <= prevMinDiff * 3) {
+ // Mismatch is not that much greater this period.
+ return false;
}
return true;
}
- private int findPitchPeriod(short[] samples, int position, boolean preferNewPeriod) {
+ private int findPitchPeriod(short[] samples, int position) {
// Find the pitch period. This is a critical step, and we may have to try multiple ways to get a
// good answer. This version uses AMDF. To improve speed, we down sample by an integer factor
// get in the 11 kHz range, and then do it again with a narrower frequency range without down
// sampling.
int period;
int retPeriod;
- int skip = sampleRate > AMDF_FREQUENCY ? sampleRate / AMDF_FREQUENCY : 1;
- if (numChannels == 1 && skip == 1) {
+ int skip = inputSampleRateHz > AMDF_FREQUENCY ? inputSampleRateHz / AMDF_FREQUENCY : 1;
+ if (channelCount == 1 && skip == 1) {
period = findPitchPeriodInRange(samples, position, minPeriod, maxPeriod);
} else {
downSampleInput(samples, position, skip);
@@ -308,7 +300,7 @@ import java.util.Arrays;
if (maxP > maxPeriod) {
maxP = maxPeriod;
}
- if (numChannels == 1) {
+ if (channelCount == 1) {
period = findPitchPeriodInRange(samples, position, minP, maxP);
} else {
downSampleInput(samples, position, 1);
@@ -316,7 +308,7 @@ import java.util.Arrays;
}
}
}
- if (previousPeriodBetter(minDiff, maxDiff, preferNewPeriod)) {
+ if (previousPeriodBetter(minDiff, maxDiff)) {
retPeriod = prevPeriod;
} else {
retPeriod = period;
@@ -326,56 +318,35 @@ import java.util.Arrays;
return retPeriod;
}
- private void moveNewSamplesToPitchBuffer(int originalNumOutputSamples) {
- int numSamples = numOutputSamples - originalNumOutputSamples;
- if (numPitchSamples + numSamples > pitchBufferSize) {
- pitchBufferSize += (pitchBufferSize / 2) + numSamples;
- pitchBuffer = Arrays.copyOf(pitchBuffer, pitchBufferSize * numChannels);
- }
- System.arraycopy(outputBuffer, originalNumOutputSamples * numChannels, pitchBuffer,
- numPitchSamples * numChannels, numSamples * numChannels);
- numOutputSamples = originalNumOutputSamples;
- numPitchSamples += numSamples;
+ private void moveNewSamplesToPitchBuffer(int originalOutputFrameCount) {
+ int frameCount = outputFrameCount - originalOutputFrameCount;
+ pitchBuffer = ensureSpaceForAdditionalFrames(pitchBuffer, pitchFrameCount, frameCount);
+ System.arraycopy(
+ outputBuffer,
+ originalOutputFrameCount * channelCount,
+ pitchBuffer,
+ pitchFrameCount * channelCount,
+ frameCount * channelCount);
+ outputFrameCount = originalOutputFrameCount;
+ pitchFrameCount += frameCount;
}
- private void removePitchSamples(int numSamples) {
- if (numSamples == 0) {
+ private void removePitchFrames(int frameCount) {
+ if (frameCount == 0) {
return;
}
- System.arraycopy(pitchBuffer, numSamples * numChannels, pitchBuffer, 0,
- (numPitchSamples - numSamples) * numChannels);
- numPitchSamples -= numSamples;
- }
-
- private void adjustPitch(int originalNumOutputSamples) {
- // Latency due to pitch changes could be reduced by looking at past samples to determine pitch,
- // rather than future.
- if (numOutputSamples == originalNumOutputSamples) {
- return;
- }
- moveNewSamplesToPitchBuffer(originalNumOutputSamples);
- int position = 0;
- while (numPitchSamples - position >= maxRequired) {
- int period = findPitchPeriod(pitchBuffer, position, false);
- int newPeriod = (int) (period / pitch);
- enlargeOutputBufferIfNeeded(newPeriod);
- if (pitch >= 1.0f) {
- overlapAdd(newPeriod, numChannels, outputBuffer, numOutputSamples, pitchBuffer, position,
- pitchBuffer, position + period - newPeriod);
- } else {
- int separation = newPeriod - period;
- overlapAddWithSeparation(period, numChannels, separation, outputBuffer, numOutputSamples,
- pitchBuffer, position, pitchBuffer, position);
- }
- numOutputSamples += newPeriod;
- position += period;
- }
- removePitchSamples(position);
+ System.arraycopy(
+ pitchBuffer,
+ frameCount * channelCount,
+ pitchBuffer,
+ 0,
+ (pitchFrameCount - frameCount) * channelCount);
+ pitchFrameCount -= frameCount;
}
private short interpolate(short[] in, int inPos, int oldSampleRate, int newSampleRate) {
short left = in[inPos];
- short right = in[inPos + numChannels];
+ short right = in[inPos + channelCount];
int position = newRatePosition * oldSampleRate;
int leftPosition = oldRatePosition * newSampleRate;
int rightPosition = (oldRatePosition + 1) * newSampleRate;
@@ -384,28 +355,30 @@ import java.util.Arrays;
return (short) ((ratio * left + (width - ratio) * right) / width);
}
- private void adjustRate(float rate, int originalNumOutputSamples) {
- if (numOutputSamples == originalNumOutputSamples) {
+ private void adjustRate(float rate, int originalOutputFrameCount) {
+ if (outputFrameCount == originalOutputFrameCount) {
return;
}
- int newSampleRate = (int) (sampleRate / rate);
- int oldSampleRate = sampleRate;
+ int newSampleRate = (int) (inputSampleRateHz / rate);
+ int oldSampleRate = inputSampleRateHz;
// Set these values to help with the integer math.
while (newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) {
newSampleRate /= 2;
oldSampleRate /= 2;
}
- moveNewSamplesToPitchBuffer(originalNumOutputSamples);
+ moveNewSamplesToPitchBuffer(originalOutputFrameCount);
// Leave at least one pitch sample in the buffer.
- for (int position = 0; position < numPitchSamples - 1; position++) {
+ for (int position = 0; position < pitchFrameCount - 1; position++) {
while ((oldRatePosition + 1) * newSampleRate > newRatePosition * oldSampleRate) {
- enlargeOutputBufferIfNeeded(1);
- for (int i = 0; i < numChannels; i++) {
- outputBuffer[numOutputSamples * numChannels + i] =
- interpolate(pitchBuffer, position * numChannels + i, oldSampleRate, newSampleRate);
+ outputBuffer =
+ ensureSpaceForAdditionalFrames(
+ outputBuffer, outputFrameCount, /* additionalFrameCount= */ 1);
+ for (int i = 0; i < channelCount; i++) {
+ outputBuffer[outputFrameCount * channelCount + i] =
+ interpolate(pitchBuffer, position * channelCount + i, oldSampleRate, newSampleRate);
}
newRatePosition++;
- numOutputSamples++;
+ outputFrameCount++;
}
oldRatePosition++;
if (oldRatePosition == oldSampleRate) {
@@ -414,119 +387,117 @@ import java.util.Arrays;
newRatePosition = 0;
}
}
- removePitchSamples(numPitchSamples - 1);
+ removePitchFrames(pitchFrameCount - 1);
}
private int skipPitchPeriod(short[] samples, int position, float speed, int period) {
// Skip over a pitch period, and copy period/speed samples to the output.
- int newSamples;
+ int newFrameCount;
if (speed >= 2.0f) {
- newSamples = (int) (period / (speed - 1.0f));
+ newFrameCount = (int) (period / (speed - 1.0f));
} else {
- newSamples = period;
- remainingInputToCopy = (int) (period * (2.0f - speed) / (speed - 1.0f));
+ newFrameCount = period;
+ remainingInputToCopyFrameCount = (int) (period * (2.0f - speed) / (speed - 1.0f));
}
- enlargeOutputBufferIfNeeded(newSamples);
- overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples, samples, position, samples,
+ outputBuffer = ensureSpaceForAdditionalFrames(outputBuffer, outputFrameCount, newFrameCount);
+ overlapAdd(
+ newFrameCount,
+ channelCount,
+ outputBuffer,
+ outputFrameCount,
+ samples,
+ position,
+ samples,
position + period);
- numOutputSamples += newSamples;
- return newSamples;
+ outputFrameCount += newFrameCount;
+ return newFrameCount;
}
private int insertPitchPeriod(short[] samples, int position, float speed, int period) {
// Insert a pitch period, and determine how much input to copy directly.
- int newSamples;
+ int newFrameCount;
if (speed < 0.5f) {
- newSamples = (int) (period * speed / (1.0f - speed));
+ newFrameCount = (int) (period * speed / (1.0f - speed));
} else {
- newSamples = period;
- remainingInputToCopy = (int) (period * (2.0f * speed - 1.0f) / (1.0f - speed));
+ newFrameCount = period;
+ remainingInputToCopyFrameCount = (int) (period * (2.0f * speed - 1.0f) / (1.0f - speed));
}
- enlargeOutputBufferIfNeeded(period + newSamples);
- System.arraycopy(samples, position * numChannels, outputBuffer, numOutputSamples * numChannels,
- period * numChannels);
- overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples + period, samples,
- position + period, samples, position);
- numOutputSamples += period + newSamples;
- return newSamples;
+ outputBuffer =
+ ensureSpaceForAdditionalFrames(outputBuffer, outputFrameCount, period + newFrameCount);
+ System.arraycopy(
+ samples,
+ position * channelCount,
+ outputBuffer,
+ outputFrameCount * channelCount,
+ period * channelCount);
+ overlapAdd(
+ newFrameCount,
+ channelCount,
+ outputBuffer,
+ outputFrameCount + period,
+ samples,
+ position + period,
+ samples,
+ position);
+ outputFrameCount += period + newFrameCount;
+ return newFrameCount;
}
private void changeSpeed(float speed) {
- if (numInputSamples < maxRequired) {
+ if (inputFrameCount < maxRequiredFrameCount) {
return;
}
- int numSamples = numInputSamples;
- int position = 0;
+ int frameCount = inputFrameCount;
+ int positionFrames = 0;
do {
- if (remainingInputToCopy > 0) {
- position += copyInputToOutput(position);
+ if (remainingInputToCopyFrameCount > 0) {
+ positionFrames += copyInputToOutput(positionFrames);
} else {
- int period = findPitchPeriod(inputBuffer, position, true);
+ int period = findPitchPeriod(inputBuffer, positionFrames);
if (speed > 1.0) {
- position += period + skipPitchPeriod(inputBuffer, position, speed, period);
+ positionFrames += period + skipPitchPeriod(inputBuffer, positionFrames, speed, period);
} else {
- position += insertPitchPeriod(inputBuffer, position, speed, period);
+ positionFrames += insertPitchPeriod(inputBuffer, positionFrames, speed, period);
}
}
- } while (position + maxRequired <= numSamples);
- removeProcessedInputSamples(position);
+ } while (positionFrames + maxRequiredFrameCount <= frameCount);
+ removeProcessedInputFrames(positionFrames);
}
private void processStreamInput() {
// Resample as many pitch periods as we have buffered on the input.
- int originalNumOutputSamples = numOutputSamples;
+ int originalOutputFrameCount = outputFrameCount;
float s = speed / pitch;
+ float r = rate * pitch;
if (s > 1.00001 || s < 0.99999) {
changeSpeed(s);
} else {
- copyToOutput(inputBuffer, 0, numInputSamples);
- numInputSamples = 0;
+ copyToOutput(inputBuffer, 0, inputFrameCount);
+ inputFrameCount = 0;
}
- if (USE_CHORD_PITCH) {
- if (pitch != 1.0f) {
- adjustPitch(originalNumOutputSamples);
- }
- } else if (!USE_CHORD_PITCH && pitch != 1.0f) {
- adjustRate(pitch, originalNumOutputSamples);
+ if (r != 1.0f) {
+ adjustRate(r, originalOutputFrameCount);
}
}
- private static void overlapAdd(int numSamples, int numChannels, short[] out, int outPos,
- short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
- for (int i = 0; i < numChannels; i++) {
- int o = outPos * numChannels + i;
- int u = rampUpPos * numChannels + i;
- int d = rampDownPos * numChannels + i;
- for (int t = 0; t < numSamples; t++) {
- out[o] = (short) ((rampDown[d] * (numSamples - t) + rampUp[u] * t) / numSamples);
- o += numChannels;
- d += numChannels;
- u += numChannels;
- }
- }
- }
-
- private static void overlapAddWithSeparation(int numSamples, int numChannels, int separation,
- short[] out, int outPos, short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
- for (int i = 0; i < numChannels; i++) {
- int o = outPos * numChannels + i;
- int u = rampUpPos * numChannels + i;
- int d = rampDownPos * numChannels + i;
- for (int t = 0; t < numSamples + separation; t++) {
- if (t < separation) {
- out[o] = (short) (rampDown[d] * (numSamples - t) / numSamples);
- d += numChannels;
- } else if (t < numSamples) {
- out[o] =
- (short) ((rampDown[d] * (numSamples - t) + rampUp[u] * (t - separation))
- / numSamples);
- d += numChannels;
- u += numChannels;
- } else {
- out[o] = (short) (rampUp[u] * (t - separation) / numSamples);
- u += numChannels;
- }
- o += numChannels;
+ private static void overlapAdd(
+ int frameCount,
+ int channelCount,
+ short[] out,
+ int outPosition,
+ short[] rampDown,
+ int rampDownPosition,
+ short[] rampUp,
+ int rampUpPosition) {
+ for (int i = 0; i < channelCount; i++) {
+ int o = outPosition * channelCount + i;
+ int u = rampUpPosition * channelCount + i;
+ int d = rampDownPosition * channelCount + i;
+ for (int t = 0; t < frameCount; t++) {
+ out[o] = (short) ((rampDown[d] * (frameCount - t) + rampUp[u] * t) / frameCount);
+ o += channelCount;
+ d += channelCount;
+ u += channelCount;
}
}
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java
index df20139255..2ca2d47828 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java
@@ -15,16 +15,18 @@
*/
package com.google.android.exoplayer2.audio;
+import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
/**
- * An {@link AudioProcessor} that uses the Sonic library to modify the speed/pitch of audio.
+ * An {@link AudioProcessor} that uses the Sonic library to modify audio speed/pitch/sample rate.
*/
public final class SonicAudioProcessor implements AudioProcessor {
@@ -44,19 +46,30 @@ public final class SonicAudioProcessor implements AudioProcessor {
* The minimum allowed pitch in {@link #setPitch(float)}.
*/
public static final float MINIMUM_PITCH = 0.1f;
+ /**
+ * Indicates that the output sample rate should be the same as the input.
+ */
+ public static final int SAMPLE_RATE_NO_CHANGE = -1;
/**
* The threshold below which the difference between two pitch/speed factors is negligible.
*/
private static final float CLOSE_THRESHOLD = 0.01f;
+ /**
+ * The minimum number of output bytes at which the speedup is calculated using the input/output
+ * byte counts, rather than using the current playback parameters speed.
+ */
+ private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024;
+
private int channelCount;
private int sampleRateHz;
-
- private Sonic sonic;
private float speed;
private float pitch;
+ private int outputSampleRateHz;
+ private int pendingOutputSampleRateHz;
+ private @Nullable Sonic sonic;
private ByteBuffer buffer;
private ShortBuffer shortBuffer;
private ByteBuffer outputBuffer;
@@ -72,45 +85,76 @@ public final class SonicAudioProcessor implements AudioProcessor {
pitch = 1f;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
+ outputSampleRateHz = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
+ pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
}
/**
- * Sets the playback speed. The new speed will take effect after a call to {@link #flush()}.
+ * Sets the playback speed. Calling this method will discard any data buffered within the
+ * processor, and may update the value returned by {@link #isActive()}.
*
* @param speed The requested new playback speed.
* @return The actual new playback speed.
*/
public float setSpeed(float speed) {
- this.speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED);
- return this.speed;
+ speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED);
+ if (this.speed != speed) {
+ this.speed = speed;
+ sonic = null;
+ }
+ flush();
+ return speed;
}
/**
- * Sets the playback pitch. The new pitch will take effect after a call to {@link #flush()}.
+ * Sets the playback pitch. Calling this method will discard any data buffered within the
+ * processor, and may update the value returned by {@link #isActive()}.
*
* @param pitch The requested new pitch.
* @return The actual new pitch.
*/
public float setPitch(float pitch) {
- this.pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH);
+ pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH);
+ if (this.pitch != pitch) {
+ this.pitch = pitch;
+ sonic = null;
+ }
+ flush();
return pitch;
}
/**
- * Returns the number of bytes of input queued since the last call to {@link #flush()}.
+ * Sets the sample rate for output audio, in hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output
+ * audio at the same sample rate as the input. After calling this method, call
+ * {@link #configure(int, int, int)} to start using the new sample rate.
+ *
+ * @param sampleRateHz The sample rate for output audio, in hertz.
+ * @see #configure(int, int, int)
*/
- public long getInputByteCount() {
- return inputBytes;
+ public void setOutputSampleRateHz(int sampleRateHz) {
+ pendingOutputSampleRateHz = sampleRateHz;
}
/**
- * Returns the number of bytes of output dequeued since the last call to {@link #flush()}.
+ * Returns the specified duration scaled to take into account the speedup factor of this instance,
+ * in the same units as {@code duration}.
+ *
+ * @param duration The duration to scale taking into account speedup.
+ * @return The specified duration scaled to take into account speedup, in the same units as
+ * {@code duration}.
*/
- public long getOutputByteCount() {
- return outputBytes;
+ public long scaleDurationForSpeedup(long duration) {
+ if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
+ return outputSampleRateHz == sampleRateHz
+ ? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes)
+ : Util.scaleLargeTimestamp(duration, inputBytes * outputSampleRateHz,
+ outputBytes * sampleRateHz);
+ } else {
+ return (long) ((double) speed * duration);
+ }
}
@Override
@@ -119,17 +163,25 @@ public final class SonicAudioProcessor implements AudioProcessor {
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
- if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
+ int outputSampleRateHz = pendingOutputSampleRateHz == SAMPLE_RATE_NO_CHANGE
+ ? sampleRateHz : pendingOutputSampleRateHz;
+ if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount
+ && this.outputSampleRateHz == outputSampleRateHz) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
+ this.outputSampleRateHz = outputSampleRateHz;
+ sonic = null;
return true;
}
@Override
public boolean isActive() {
- return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD;
+ return sampleRateHz != Format.NO_VALUE
+ && (Math.abs(speed - 1f) >= CLOSE_THRESHOLD
+ || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
+ || outputSampleRateHz != sampleRateHz);
}
@Override
@@ -142,8 +194,14 @@ public final class SonicAudioProcessor implements AudioProcessor {
return C.ENCODING_PCM_16BIT;
}
+ @Override
+ public int getOutputSampleRateHz() {
+ return outputSampleRateHz;
+ }
+
@Override
public void queueInput(ByteBuffer inputBuffer) {
+ Assertions.checkState(sonic != null);
if (inputBuffer.hasRemaining()) {
ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
int inputSize = inputBuffer.remaining();
@@ -151,7 +209,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
sonic.queueInput(shortBuffer);
inputBuffer.position(inputBuffer.position() + inputSize);
}
- int outputSize = sonic.getSamplesAvailable() * channelCount * 2;
+ int outputSize = sonic.getFramesAvailable() * channelCount * 2;
if (outputSize > 0) {
if (buffer.capacity() < outputSize) {
buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder());
@@ -169,6 +227,7 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void queueEndOfStream() {
+ Assertions.checkState(sonic != null);
sonic.queueEndOfStream();
inputEnded = true;
}
@@ -182,14 +241,18 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public boolean isEnded() {
- return inputEnded && (sonic == null || sonic.getSamplesAvailable() == 0);
+ return inputEnded && (sonic == null || sonic.getFramesAvailable() == 0);
}
@Override
public void flush() {
- sonic = new Sonic(sampleRateHz, channelCount);
- sonic.setSpeed(speed);
- sonic.setPitch(pitch);
+ if (isActive()) {
+ if (sonic == null) {
+ sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz);
+ } else {
+ sonic.flush();
+ }
+ }
outputBuffer = EMPTY_BUFFER;
inputBytes = 0;
outputBytes = 0;
@@ -198,12 +261,16 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override
public void reset() {
- sonic = null;
+ speed = 1f;
+ pitch = 1f;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ outputSampleRateHz = Format.NO_VALUE;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
- channelCount = Format.NO_VALUE;
- sampleRateHz = Format.NO_VALUE;
+ pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
+ sonic = null;
inputBytes = 0;
outputBytes = 0;
inputEnded = false;
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java
new file mode 100644
index 0000000000..ccaa9c3fed
--- /dev/null
+++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java
@@ -0,0 +1,186 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer2.audio;
+
+import com.google.android.exoplayer2.C;
+import com.google.android.exoplayer2.C.Encoding;
+import com.google.android.exoplayer2.Format;
+import com.google.android.exoplayer2.util.Util;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/** Audio processor for trimming samples from the start/end of data. */
+/* package */ final class TrimmingAudioProcessor implements AudioProcessor {
+
+ private boolean isActive;
+ private int trimStartFrames;
+ private int trimEndFrames;
+ private int channelCount;
+ private int sampleRateHz;
+
+ private int pendingTrimStartBytes;
+ private ByteBuffer buffer;
+ private ByteBuffer outputBuffer;
+ private byte[] endBuffer;
+ private int endBufferSize;
+ private boolean inputEnded;
+
+ /** Creates a new audio processor for trimming samples from the start/end of data. */
+ public TrimmingAudioProcessor() {
+ buffer = EMPTY_BUFFER;
+ outputBuffer = EMPTY_BUFFER;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ endBuffer = new byte[0];
+ }
+
+ /**
+ * Sets the number of audio frames to trim from the start and end of audio passed to this
+ * processor. After calling this method, call {@link #configure(int, int, int)} to apply the new
+ * trimming frame counts.
+ *
+ * @param trimStartFrames The number of audio frames to trim from the start of audio.
+ * @param trimEndFrames The number of audio frames to trim from the end of audio.
+ * @see AudioSink#configure(int, int, int, int, int[], int, int)
+ */
+ public void setTrimFrameCount(int trimStartFrames, int trimEndFrames) {
+ this.trimStartFrames = trimStartFrames;
+ this.trimEndFrames = trimEndFrames;
+ }
+
+ @Override
+ public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding)
+ throws UnhandledFormatException {
+ if (encoding != C.ENCODING_PCM_16BIT) {
+ throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
+ }
+ this.channelCount = channelCount;
+ this.sampleRateHz = sampleRateHz;
+ endBuffer = new byte[trimEndFrames * channelCount * 2];
+ endBufferSize = 0;
+ pendingTrimStartBytes = trimStartFrames * channelCount * 2;
+ boolean wasActive = isActive;
+ isActive = trimStartFrames != 0 || trimEndFrames != 0;
+ return wasActive != isActive;
+ }
+
+ @Override
+ public boolean isActive() {
+ return isActive;
+ }
+
+ @Override
+ public int getOutputChannelCount() {
+ return channelCount;
+ }
+
+ @Override
+ public int getOutputEncoding() {
+ return C.ENCODING_PCM_16BIT;
+ }
+
+ @Override
+ public int getOutputSampleRateHz() {
+ return sampleRateHz;
+ }
+
+ @Override
+ public void queueInput(ByteBuffer inputBuffer) {
+ int position = inputBuffer.position();
+ int limit = inputBuffer.limit();
+ int remaining = limit - position;
+
+ // Trim any pending start bytes from the input buffer.
+ int trimBytes = Math.min(remaining, pendingTrimStartBytes);
+ pendingTrimStartBytes -= trimBytes;
+ inputBuffer.position(position + trimBytes);
+ if (pendingTrimStartBytes > 0) {
+ // Nothing to output yet.
+ return;
+ }
+ remaining -= trimBytes;
+
+ // endBuffer must be kept as full as possible, so that we trim the right amount of media if we
+ // don't receive any more input. After taking into account the number of bytes needed to keep
+ // endBuffer as full as possible, the output should be any surplus bytes currently in endBuffer
+ // followed by any surplus bytes in the new inputBuffer.
+ int remainingBytesToOutput = endBufferSize + remaining - endBuffer.length;
+ if (buffer.capacity() < remainingBytesToOutput) {
+ buffer = ByteBuffer.allocateDirect(remainingBytesToOutput).order(ByteOrder.nativeOrder());
+ } else {
+ buffer.clear();
+ }
+
+ // Output from endBuffer.
+ int endBufferBytesToOutput = Util.constrainValue(remainingBytesToOutput, 0, endBufferSize);
+ buffer.put(endBuffer, 0, endBufferBytesToOutput);
+ remainingBytesToOutput -= endBufferBytesToOutput;
+
+ // Output from inputBuffer, restoring its limit afterwards.
+ int inputBufferBytesToOutput = Util.constrainValue(remainingBytesToOutput, 0, remaining);
+ inputBuffer.limit(inputBuffer.position() + inputBufferBytesToOutput);
+ buffer.put(inputBuffer);
+ inputBuffer.limit(limit);
+ remaining -= inputBufferBytesToOutput;
+
+ // Compact endBuffer, then repopulate it using the new input.
+ endBufferSize -= endBufferBytesToOutput;
+ System.arraycopy(endBuffer, endBufferBytesToOutput, endBuffer, 0, endBufferSize);
+ inputBuffer.get(endBuffer, endBufferSize, remaining);
+ endBufferSize += remaining;
+
+ buffer.flip();
+ outputBuffer = buffer;
+ }
+
+ @Override
+ public void queueEndOfStream() {
+ inputEnded = true;
+ }
+
+ @Override
+ public ByteBuffer getOutput() {
+ ByteBuffer outputBuffer = this.outputBuffer;
+ this.outputBuffer = EMPTY_BUFFER;
+ return outputBuffer;
+ }
+
+ @SuppressWarnings("ReferenceEquality")
+ @Override
+ public boolean isEnded() {
+ return inputEnded && outputBuffer == EMPTY_BUFFER;
+ }
+
+ @Override
+ public void flush() {
+ outputBuffer = EMPTY_BUFFER;
+ inputEnded = false;
+ // It's no longer necessary to trim any media from the start, but it is necessary to clear the
+ // end buffer and refill it.
+ pendingTrimStartBytes = 0;
+ endBufferSize = 0;
+ }
+
+ @Override
+ public void reset() {
+ flush();
+ buffer = EMPTY_BUFFER;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ endBuffer = new byte[0];
+ }
+
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java b/library/core/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java
index 7a532110d3..8409bab558 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/decoder/DecoderCounters.java
@@ -53,18 +53,26 @@ public final class DecoderCounters {
*/
public int skippedOutputBufferCount;
/**
- * The number of dropped output buffers.
+ * The number of dropped buffers.
*
- * A dropped output buffer is an output buffer that was supposed to be rendered, but was instead
+ * A dropped buffer is an buffer that was supposed to be decoded/rendered, but was instead
* dropped because it could not be rendered in time.
*/
- public int droppedOutputBufferCount;
+ public int droppedBufferCount;
/**
- * The maximum number of dropped output buffers without an interleaving rendered output buffer.
+ * The maximum number of dropped buffers without an interleaving rendered output buffer.
*
* Skipped output buffers are ignored for the purposes of calculating this value.
*/
- public int maxConsecutiveDroppedOutputBufferCount;
+ public int maxConsecutiveDroppedBufferCount;
+ /**
+ * The number of times all buffers to a keyframe were dropped.
+ *
+ * Each time buffers to a keyframe are dropped, this counter is increased by one, and the dropped
+ * buffer counters are increased by one (for the current output buffer) plus the number of buffers
+ * dropped from the source to advance to the keyframe.
+ */
+ public int droppedToKeyframeCount;
/**
* Should be called to ensure counter values are made visible across threads. The playback thread
@@ -88,9 +96,10 @@ public final class DecoderCounters {
skippedInputBufferCount += other.skippedInputBufferCount;
renderedOutputBufferCount += other.renderedOutputBufferCount;
skippedOutputBufferCount += other.skippedOutputBufferCount;
- droppedOutputBufferCount += other.droppedOutputBufferCount;
- maxConsecutiveDroppedOutputBufferCount = Math.max(maxConsecutiveDroppedOutputBufferCount,
- other.maxConsecutiveDroppedOutputBufferCount);
+ droppedBufferCount += other.droppedBufferCount;
+ maxConsecutiveDroppedBufferCount = Math.max(maxConsecutiveDroppedBufferCount,
+ other.maxConsecutiveDroppedBufferCount);
+ droppedToKeyframeCount += other.droppedToKeyframeCount;
}
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java b/library/core/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java
index 1d380ef858..68089d7b41 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/decoder/SimpleDecoder.java
@@ -219,7 +219,18 @@ public abstract class SimpleDecoder= 27) {
+ return request;
+ }
+ // Prior to O-MR1 the ClearKey CDM encoded the values in the "kids" array using Base64 encoding
+ // rather than Base64Url encoding. See [Internal: b/64388098]. We know the exact request format
+ // from the platform's InitDataParser.cpp. Since there aren't any "+" or "/" symbols elsewhere
+ // in the request, it's safe to fix the encoding by replacement through the whole request.
+ String requestString = Util.fromUtf8Bytes(request);
+ return Util.getUtf8Bytes(base64ToBase64Url(requestString));
+ }
+
+ /**
+ * Adjusts ClearKey response data to be suitable for providing to the Android ClearKey CDM.
+ *
+ * @param response The response data.
+ * @return The adjusted response data.
+ */
+ public static byte[] adjustResponseData(byte[] response) {
+ if (Util.SDK_INT >= 27) {
+ return response;
+ }
+ // Prior to O-MR1 the ClearKey CDM expected Base64 encoding rather than Base64Url encoding for
+ // the "k" and "kid" strings. See [Internal: b/64388098]. We know that the ClearKey CDM only
+ // looks at the k, kid and kty parameters in each key, so can ignore the rest of the response.
+ try {
+ JSONObject responseJson = new JSONObject(Util.fromUtf8Bytes(response));
+ StringBuilder adjustedResponseBuilder = new StringBuilder("{\"keys\":[");
+ JSONArray keysArray = responseJson.getJSONArray("keys");
+ for (int i = 0; i < keysArray.length(); i++) {
+ if (i != 0) {
+ adjustedResponseBuilder.append(",");
+ }
+ JSONObject key = keysArray.getJSONObject(i);
+ adjustedResponseBuilder.append("{\"k\":\"");
+ adjustedResponseBuilder.append(base64UrlToBase64(key.getString("k")));
+ adjustedResponseBuilder.append("\",\"kid\":\"");
+ adjustedResponseBuilder.append(base64UrlToBase64(key.getString("kid")));
+ adjustedResponseBuilder.append("\",\"kty\":\"");
+ adjustedResponseBuilder.append(key.getString("kty"));
+ adjustedResponseBuilder.append("\"}");
+ }
+ adjustedResponseBuilder.append("]}");
+ return Util.getUtf8Bytes(adjustedResponseBuilder.toString());
+ } catch (JSONException e) {
+ Log.e(TAG, "Failed to adjust response data: " + Util.fromUtf8Bytes(response), e);
+ return response;
+ }
+ }
+
+ private static String base64ToBase64Url(String base64) {
+ return base64.replace('+', '-').replace('/', '_');
+ }
+
+ private static String base64UrlToBase64(String base64Url) {
+ return base64Url.replace('-', '+').replace('_', '/');
+ }
+
+}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/drm/DecryptionException.java b/library/core/src/main/java/com/google/android/exoplayer2/drm/DecryptionException.java
index 6916b972b2..81cfc26393 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/drm/DecryptionException.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/drm/DecryptionException.java
@@ -1,20 +1,37 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.google.android.exoplayer2.drm;
/**
- * An exception when doing drm decryption using the In-App Drm
+ * Thrown when a non-platform component fails to decrypt data.
*/
public class DecryptionException extends Exception {
- private final int errorCode;
+ /**
+ * A component specific error code.
+ */
+ public final int errorCode;
+
+ /**
+ * @param errorCode A component specific error code.
+ * @param message The detail message.
+ */
public DecryptionException(int errorCode, String message) {
super(message);
this.errorCode = errorCode;
}
- /**
- * Get error code
- */
- public int getErrorCode() {
- return errorCode;
- }
}
diff --git a/library/core/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java b/library/core/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java
index cfb2cf9d8a..c57b023139 100644
--- a/library/core/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java
+++ b/library/core/src/main/java/com/google/android/exoplayer2/drm/DefaultDrmSession.java
@@ -17,8 +17,6 @@ package com.google.android.exoplayer2.drm;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
-import android.media.DeniedByServerException;
-import android.media.MediaDrm;
import android.media.NotProvisionedException;
import android.os.Handler;
import android.os.HandlerThread;
@@ -27,52 +25,75 @@ import android.os.Message;
import android.util.Log;
import android.util.Pair;
import com.google.android.exoplayer2.C;
-import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
+import com.google.android.exoplayer2.drm.DefaultDrmSessionEventListener.EventDispatcher;
+import com.google.android.exoplayer2.drm.ExoMediaDrm.DefaultKeyRequest;
import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest;
-import com.google.android.exoplayer2.drm.ExoMediaDrm.OnEventListener;
import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest;
-import com.google.android.exoplayer2.extractor.mp4.PsshAtomUtil;
-import com.google.android.exoplayer2.util.MimeTypes;
-import com.google.android.exoplayer2.util.Util;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
- * A {@link DrmSession} that supports playbacks using {@link MediaDrm}.
+ * A {@link DrmSession} that supports playbacks using {@link ExoMediaDrm}.
*/
@TargetApi(18)
/* package */ class DefaultDrmSession implements DrmSession {
- private static final String TAG = "DefaultDrmSession";
- private static final String CENC_SCHEME_MIME_TYPE = "cenc";
+ /**
+ * Manages provisioning requests.
+ */
+ public interface ProvisioningManager {
+
+ /**
+ * Called when a session requires provisioning. The manager may call
+ * {@link #provision()} to have this session perform the provisioning operation. The manager
+ * will call {@link DefaultDrmSession#onProvisionCompleted()} when provisioning has
+ * completed, or {@link DefaultDrmSession#onProvisionError} if provisioning fails.
+ *
+ * @param session The session.
+ */
+ void provisionRequired(DefaultDrmSession session);
+
+ /**
+ * Called by a session when it fails to perform a provisioning operation.
+ *
+ * @param error The error that occurred.
+ */
+ void onProvisionError(Exception error);
+
+ /**
+ * Called by a session when it successfully completes a provisioning operation.
+ */
+ void onProvisionCompleted();
+
+ }
+
+ private static final String TAG = "DefaultDrmSession";
private static final int MSG_PROVISION = 0;
private static final int MSG_KEYS = 1;
-
private static final int MAX_LICENSE_DURATION_TO_RENEW = 60;
- private final Handler eventHandler;
- private final DefaultDrmSessionManager.EventListener eventListener;
private final ExoMediaDrm mediaDrm;
+ private final ProvisioningManager provisioningManager;
+ private final byte[] initData;
+ private final String mimeType;
+ private final @DefaultDrmSessionManager.Mode int mode;
private final HashMap