Merge branch 'dev-v2' into rtsp-socket-factory

This commit is contained in:
claincly 2021-11-11 13:34:56 +00:00 committed by GitHub
commit 86447cb852
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
89 changed files with 3355 additions and 1944 deletions

View file

@ -3,11 +3,17 @@
### dev-v2 (not yet released)
* Core Library:
* Fix track selection issue where a mixture of non-empty and empty track
overrides is not applied correctly
([#9649](https://github.com/google/ExoPlayer/issues/9649).
* Add protected method `DefaultRenderersFactory.getCodecAdapterFactory()`
so that subclasses of `DefaultRenderersFactory` that override
`buildVideoRenderers()` or `buildAudioRenderers()` can access the codec
adapter factory and pass it to `MediaCodecRenderer` instances they
create.
* Extractors:
* WAV: Add support for RF64 streams
([#9543](https://github.com/google/ExoPlayer/issues/9543).
* RTSP
* Provide a client API to override the `SocketFactory` used for any server
connection ([#9606](https://github.com/google/ExoPlayer/pull/9606)).

View file

@ -1,8 +1,9 @@
# Security policy #
To report a security issue, please email exoplayer-support+security@google.com
with a description of the issue, the steps you took to create the issue,
affected versions, and, if known, mitigations for the issue. Our vulnerability
management team will respond within 3 working days of your email. If the issue
is confirmed as a vulnerability, we will open a Security Advisory. This project
follows a 90 day disclosure timeline.
To report a security issue, please email
android-media-support+security@google.com with a description of the issue, the
steps you took to create the issue, affected versions, and, if known,
mitigations for the issue. Our vulnerability management team will respond within
3 working days of your email. If the issue is confirmed as a vulnerability, we
will open a Security Advisory. This project follows a 90 day disclosure
timeline.

View file

@ -179,8 +179,7 @@ public final class MainActivity extends Activity {
player.play();
VideoProcessingGLSurfaceView videoProcessingGLSurfaceView =
Assertions.checkNotNull(this.videoProcessingGLSurfaceView);
videoProcessingGLSurfaceView.setVideoComponent(
Assertions.checkNotNull(player.getVideoComponent()));
videoProcessingGLSurfaceView.setPlayer(player);
Assertions.checkNotNull(playerView).setPlayer(player);
player.addAnalyticsListener(new EventLogger(/* trackSelector= */ null));
this.player = player;
@ -188,9 +187,9 @@ public final class MainActivity extends Activity {
private void releasePlayer() {
Assertions.checkNotNull(playerView).setPlayer(null);
Assertions.checkNotNull(videoProcessingGLSurfaceView).setPlayer(null);
if (player != null) {
player.release();
Assertions.checkNotNull(videoProcessingGLSurfaceView).setVideoComponent(null);
player = null;
}
}

View file

@ -73,7 +73,7 @@ public final class VideoProcessingGLSurfaceView extends GLSurfaceView {
@Nullable private SurfaceTexture surfaceTexture;
@Nullable private Surface surface;
@Nullable private ExoPlayer.VideoComponent videoComponent;
@Nullable private ExoPlayer player;
/**
* Creates a new instance. Pass {@code true} for {@code requireSecureContext} if the {@link
@ -147,25 +147,24 @@ public final class VideoProcessingGLSurfaceView extends GLSurfaceView {
}
/**
* Attaches or detaches (if {@code newVideoComponent} is {@code null}) this view from the video
* component of the player.
* Attaches or detaches (if {@code player} is {@code null}) this view from the player.
*
* @param newVideoComponent The new video component, or {@code null} to detach this view.
* @param player The new player, or {@code null} to detach this view.
*/
public void setVideoComponent(@Nullable ExoPlayer.VideoComponent newVideoComponent) {
if (newVideoComponent == videoComponent) {
public void setPlayer(@Nullable ExoPlayer player) {
if (player == this.player) {
return;
}
if (videoComponent != null) {
if (this.player != null) {
if (surface != null) {
videoComponent.clearVideoSurface(surface);
this.player.clearVideoSurface(surface);
}
videoComponent.clearVideoFrameMetadataListener(renderer);
this.player.clearVideoFrameMetadataListener(renderer);
}
videoComponent = newVideoComponent;
if (videoComponent != null) {
videoComponent.setVideoFrameMetadataListener(renderer);
videoComponent.setVideoSurface(surface);
this.player = player;
if (this.player != null) {
this.player.setVideoFrameMetadataListener(renderer);
this.player.setVideoSurface(surface);
}
}
@ -176,8 +175,8 @@ public final class VideoProcessingGLSurfaceView extends GLSurfaceView {
mainHandler.post(
() -> {
if (surface != null) {
if (videoComponent != null) {
videoComponent.setVideoSurface(null);
if (player != null) {
player.setVideoSurface(null);
}
releaseSurface(surfaceTexture, surface);
surfaceTexture = null;
@ -194,8 +193,8 @@ public final class VideoProcessingGLSurfaceView extends GLSurfaceView {
this.surfaceTexture = surfaceTexture;
this.surface = new Surface(surfaceTexture);
releaseSurface(oldSurfaceTexture, oldSurface);
if (videoComponent != null) {
videoComponent.setVideoSurface(surface);
if (player != null) {
player.setVideoSurface(surface);
}
});
}

View file

@ -23,11 +23,13 @@ import android.net.Uri;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.MediaItem.ClippingConfiguration;
import com.google.android.exoplayer2.MediaItem.SubtitleConfiguration;
import com.google.android.exoplayer2.MediaMetadata;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -118,36 +120,46 @@ public class IntentUtil {
@Nullable String mimeType = intent.getStringExtra(MIME_TYPE_EXTRA + extrasKeySuffix);
@Nullable String title = intent.getStringExtra(TITLE_EXTRA + extrasKeySuffix);
@Nullable String adTagUri = intent.getStringExtra(AD_TAG_URI_EXTRA + extrasKeySuffix);
@Nullable
SubtitleConfiguration subtitleConfiguration =
createSubtitleConfiguration(intent, extrasKeySuffix);
MediaItem.Builder builder =
new MediaItem.Builder()
.setUri(uri)
.setMimeType(mimeType)
.setMediaMetadata(new MediaMetadata.Builder().setTitle(title).build())
.setSubtitles(createSubtitlesFromIntent(intent, extrasKeySuffix))
.setClipStartPositionMs(
intent.getLongExtra(CLIP_START_POSITION_MS_EXTRA + extrasKeySuffix, 0))
.setClipEndPositionMs(
intent.getLongExtra(
CLIP_END_POSITION_MS_EXTRA + extrasKeySuffix, C.TIME_END_OF_SOURCE));
.setClippingConfiguration(
new ClippingConfiguration.Builder()
.setStartPositionMs(
intent.getLongExtra(CLIP_START_POSITION_MS_EXTRA + extrasKeySuffix, 0))
.setEndPositionMs(
intent.getLongExtra(
CLIP_END_POSITION_MS_EXTRA + extrasKeySuffix, C.TIME_END_OF_SOURCE))
.build());
if (adTagUri != null) {
builder.setAdsConfiguration(
new MediaItem.AdsConfiguration.Builder(Uri.parse(adTagUri)).build());
}
if (subtitleConfiguration != null) {
builder.setSubtitleConfigurations(ImmutableList.of(subtitleConfiguration));
}
return populateDrmPropertiesFromIntent(builder, intent, extrasKeySuffix).build();
}
private static List<MediaItem.Subtitle> createSubtitlesFromIntent(
@Nullable
private static MediaItem.SubtitleConfiguration createSubtitleConfiguration(
Intent intent, String extrasKeySuffix) {
if (!intent.hasExtra(SUBTITLE_URI_EXTRA + extrasKeySuffix)) {
return Collections.emptyList();
return null;
}
return Collections.singletonList(
new MediaItem.Subtitle(
Uri.parse(intent.getStringExtra(SUBTITLE_URI_EXTRA + extrasKeySuffix)),
checkNotNull(intent.getStringExtra(SUBTITLE_MIME_TYPE_EXTRA + extrasKeySuffix)),
intent.getStringExtra(SUBTITLE_LANGUAGE_EXTRA + extrasKeySuffix),
C.SELECTION_FLAG_DEFAULT));
return new MediaItem.SubtitleConfiguration.Builder(
Uri.parse(intent.getStringExtra(SUBTITLE_URI_EXTRA + extrasKeySuffix)))
.setMimeType(
checkNotNull(intent.getStringExtra(SUBTITLE_MIME_TYPE_EXTRA + extrasKeySuffix)))
.setLanguage(intent.getStringExtra(SUBTITLE_LANGUAGE_EXTRA + extrasKeySuffix))
.setSelectionFlags(C.SELECTION_FLAG_DEFAULT)
.build();
}
private static MediaItem.Builder populateDrmPropertiesFromIntent(

View file

@ -269,7 +269,8 @@ public class PlayerActivity extends AppCompatActivity
trackSelector = new DefaultTrackSelector(/* context= */ this);
lastSeenTracksInfo = TracksInfo.EMPTY;
player =
new ExoPlayer.Builder(/* context= */ this, renderersFactory)
new ExoPlayer.Builder(/* context= */ this)
.setRenderersFactory(renderersFactory)
.setMediaSourceFactory(mediaSourceFactory)
.setTrackSelector(trackSelector)
.build();

View file

@ -43,8 +43,8 @@ import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.MediaItem.ClippingConfiguration;
import com.google.android.exoplayer2.MediaMetadata;
import com.google.android.exoplayer2.ParserException;
import com.google.android.exoplayer2.RenderersFactory;
import com.google.android.exoplayer2.offline.DownloadService;
import com.google.android.exoplayer2.upstream.DataSource;
@ -53,6 +53,7 @@ import com.google.android.exoplayer2.upstream.DataSourceUtil;
import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.io.InputStream;
@ -327,8 +328,7 @@ public class SampleChooserActivity extends AppCompatActivity
reader.nextString(); // Ignore.
break;
default:
throw ParserException.createForMalformedManifest(
"Unsupported name: " + name, /* cause= */ null);
throw new IOException("Unsupported name: " + name, /* cause= */ null);
}
}
reader.endObject();
@ -351,6 +351,8 @@ public class SampleChooserActivity extends AppCompatActivity
boolean drmSessionForClearContent = false;
boolean drmMultiSession = false;
boolean drmForceDefaultLicenseUri = false;
MediaItem.ClippingConfiguration.Builder clippingConfiguration =
new ClippingConfiguration.Builder();
MediaItem.Builder mediaItem = new MediaItem.Builder();
reader.beginObject();
@ -367,10 +369,10 @@ public class SampleChooserActivity extends AppCompatActivity
extension = reader.nextString();
break;
case "clip_start_position_ms":
mediaItem.setClipStartPositionMs(reader.nextLong());
clippingConfiguration.setStartPositionMs(reader.nextLong());
break;
case "clip_end_position_ms":
mediaItem.setClipEndPositionMs(reader.nextLong());
clippingConfiguration.setEndPositionMs(reader.nextLong());
break;
case "ad_tag_uri":
mediaItem.setAdsConfiguration(
@ -420,8 +422,7 @@ public class SampleChooserActivity extends AppCompatActivity
reader.endArray();
break;
default:
throw ParserException.createForMalformedManifest(
"Unsupported attribute name: " + name, /* cause= */ null);
throw new IOException("Unsupported attribute name: " + name, /* cause= */ null);
}
}
reader.endObject();
@ -439,7 +440,8 @@ public class SampleChooserActivity extends AppCompatActivity
mediaItem
.setUri(uri)
.setMediaMetadata(new MediaMetadata.Builder().setTitle(title).build())
.setMimeType(adaptiveMimeType);
.setMimeType(adaptiveMimeType)
.setClippingConfiguration(clippingConfiguration.build());
if (drmUuid != null) {
mediaItem.setDrmConfiguration(
new MediaItem.DrmConfiguration.Builder(drmUuid)
@ -463,13 +465,15 @@ public class SampleChooserActivity extends AppCompatActivity
"drm_uuid is required if drm_force_default_license_uri is set.");
}
if (subtitleUri != null) {
MediaItem.Subtitle subtitle =
new MediaItem.Subtitle(
subtitleUri,
checkNotNull(
subtitleMimeType, "subtitle_mime_type is required if subtitle_uri is set."),
subtitleLanguage);
mediaItem.setSubtitles(Collections.singletonList(subtitle));
MediaItem.SubtitleConfiguration subtitleConfiguration =
new MediaItem.SubtitleConfiguration.Builder(subtitleUri)
.setMimeType(
checkNotNull(
subtitleMimeType,
"subtitle_mime_type is required if subtitle_uri is set."))
.setLanguage(subtitleLanguage)
.build();
mediaItem.setSubtitleConfigurations(ImmutableList.of(subtitleConfiguration));
}
return new PlaylistHolder(title, Collections.singletonList(mediaItem.build()));
}

View file

@ -18,7 +18,7 @@ en: &EN
FOLLOW_US : "Follow us on [NAME]."
EMAIL_ME : "Send me Email."
EMAIL_US : "Send us Email."
COPYRIGHT_DATES : "2019"
COPYRIGHT_DATES : "2021"
en-GB:
<<: *EN
@ -49,7 +49,7 @@ zh-Hans: &ZH_HANS
FOLLOW_US : "在 [NAME] 上关注我们。"
EMAIL_ME : "给我发邮件。"
EMAIL_US : "给我们发邮件。"
COPYRIGHT_DATES : "2019"
COPYRIGHT_DATES : "2021"
zh:
<<: *ZH_HANS
@ -78,7 +78,7 @@ zh-Hant: &ZH_HANT
FOLLOW_US : "在 [NAME] 上關注我們。"
EMAIL_ME : "給我發郵件。"
EMAIL_US : "給我們發郵件。"
COPYRIGHT_DATES : "2019"
COPYRIGHT_DATES : "2021"
zh-TW:
<<: *ZH_HANT
@ -105,7 +105,7 @@ ko: &KO
FOLLOW_US : "[NAME]에서 팔로우하기"
EMAIL_ME : "이메일 보내기"
EMAIL_US : "이메일 보내기"
COPYRIGHT_DATES : "2019"
COPYRIGHT_DATES : "2021"
ko-KR:
<<: *KO

View file

@ -1,10 +1,6 @@
---
layout: article
---
The Android media team is interested in your experiences with the Android media
APIs and developer resources. Please provide your feedback by
[completing this short survey](https://goo.gle/media-survey-6).
{:.info}
ExoPlayer is an application level media player for Android. It provides an
alternative to Androids MediaPlayer API for playing audio and video both

View file

@ -27,7 +27,7 @@ H264 and VP9 videos). They may even be of different types (e.g., its fine for
playlist to contain both videos and audio only streams). It's allowed to use the
same `MediaItem` multiple times within a playlist.
## Modifying the playlist ##
## Modifying the playlist
It's possible to dynamically modify a playlist by adding, moving and removing
media items. This can be done both before and during playback by calling the
@ -63,13 +63,60 @@ currently playing `MediaItem` is removed, the player will automatically move to
playing the first remaining successor, or transition to the ended state if no
such successor exists.
## Querying the playlist ##
## Querying the playlist
The playlist can be queried using `Player.getMediaItemCount` and
`Player.getMediaItemAt`. The currently playing media item can be queried
by calling `Player.getCurrentMediaItem`.
by calling `Player.getCurrentMediaItem`. There are also other convenience
methods like `Player.hasNextMediaItem` or `Player.getNextMediaItemIndex` to
simplify navigation in the playlist.
## Identifying playlist items ##
## Repeat modes
The player supports 3 repeat modes that can be set at any time with
`Player.setRepeatMode`:
* `Player.REPEAT_MODE_OFF`: The playlist isn't repeated and the player will
transition to `Player.STATE_ENDED` once the last item in the playlist has
been played.
* `Player.REPEAT_MODE_ONE`: The current item is repeated in an endless loop.
Methods like `Player.seekToNextMediaItem` will ignore this and seek to the
next item in the list, which will then be repeated in an endless loop.
* `Player.REPEAT_MODE_ALL`: The entire playlist is repeated in an endless loop.
## Shuffle mode
Shuffle mode can be enabled or disabled at any time with
`Player.setShuffleModeEnabled`. When in shuffle mode, the player will play the
playlist in a precomputed, randomized order. All items will be played once and
the shuffle mode can also be combined with `Player.REPEAT_MODE_ALL` to repeat
the same randomized order in an endless loop. When shuffle mode is turned off,
playback continues from the current item at its original position in the
playlist.
Note that the indices as returned by methods like
`Player.getCurrentMediaItemIndex` always refer to the original, unshuffled
order. Similarly, `Player.seekToNextMediaItem` will not play the item at
`player.getCurrentMediaItemIndex() + 1`, but the next item according to the
shuffle order. Inserting new items in the playlist or removing items will keep
the existing shuffled order unchanged as far as possible.
### Setting a custom shuffle order
By default the player supports shuffling by using the `DefaultShuffleOrder`.
This can be customized by providing a custom shuffle order implementation, or by
setting a custom order in the `DefaultShuffleOrder` constructor:
~~~
// Set a custom shuffle order for the 5 items currently in the playlist:
exoPlayer.setShuffleOrder(
new DefaultShuffleOrder(new int[] {3, 1, 0, 4, 2}, randomSeed));
// Enable shuffle mode.
exoPlayer.setShuffleModeEnabled(/* shuffleModeEnabled= */ true);
~~~
{: .language-java}
## Identifying playlist items
To identify playlist items, `MediaItem.mediaId` can be set when building the
item:
@ -84,7 +131,7 @@ MediaItem mediaItem =
If an app does not explicitly define a media ID for a media item, the string
representation of the URI is used.
## Associating app data with playlist items ##
## Associating app data with playlist items
In addition to an ID, each media item can also be configured with a custom tag,
which can be any app provided object. One use of custom tags is to attach
@ -98,7 +145,7 @@ MediaItem mediaItem =
{: .language-java}
## Detecting when playback transitions to another media item ##
## Detecting when playback transitions to another media item
When playback transitions to another media item, or starts repeating the same
media item, `Listener.onMediaItemTransition(MediaItem,
@ -132,7 +179,7 @@ public void onMediaItemTransition(
~~~
{: .language-java}
## Detecting when the playlist changes ##
## Detecting when the playlist changes
When a media item is added, removed or moved,
`Listener.onTimelineChanged(Timeline, @TimelineChangeReason)` is called
@ -158,19 +205,3 @@ timeline update include:
* A manifest becoming available after preparing an adaptive media item.
* A manifest being updated periodically during playback of a live stream.
## Setting a custom shuffle order ##
By default the playlist supports shuffling by using the `DefaultShuffleOrder`.
This can be customized by providing a custom shuffle order implementation:
~~~
// Set the custom shuffle order.
exoPlayer.setShuffleOrder(shuffleOrder);
// Enable shuffle mode.
exoPlayer.setShuffleModeEnabled(/* shuffleModeEnabled= */ true);
~~~
{: .language-java}
If the repeat mode of the player is set to `REPEAT_MODE_ALL`, the custom shuffle
order is played in an endless loop.

View file

@ -3,8 +3,175 @@ title: Track selection
---
Track selection determines which of the available media tracks are played by the
player. Track selection is the responsibility of a `TrackSelector`, an instance
of which can be provided whenever an `ExoPlayer` is built.
player. This process is configured by [`TrackSelectionParameters`][], which
support many different options to specify constraints and overrides.
## Information about existing tracks
The player needs to prepare the media to know which tracks are available for
selection. You can listen to `Player.Listener.onTracksInfoChanged` to get
notified about changes, which may happen
* When preparation completes
* When the available or selected tracks change
* When the playlist item changes
~~~
player.addListener(new Player.Listener() {
@Override
public void onTracksInfoChanged(TracksInfo tracksInfo) {
// Update UI using current TracksInfo.
}
});
~~~
{: .language-java}
You can also retrieve the current `TracksInfo` by calling
`player.getCurrentTracksInfo()`.
`TracksInfo` contains a list of `TrackGroupInfo`s with information about the
track type, format details, player support and selection status of each
available track. Tracks are grouped together into one `TrackGroup` if they
represent the same content that can be used interchangeably by the player (for
example, all audio tracks of a single language, but with different bitrates).
~~~
for (TrackGroupInfo groupInfo : tracksInfo.getTrackGroupInfos()) {
// Group level information.
@C.TrackType int trackType = groupInfo.getTrackType();
boolean trackInGroupIsSelected = groupInfo.isSelected();
boolean trackInGroupIsSupported = groupInfo.isSupported();
TrackGroup group = groupInfo.getTrackGroup();
for (int i = 0; i < group.length; i++) {
// Individual track information.
boolean isSupported = groupInfo.isTrackSupported(i);
boolean isSelected = groupInfo.isTrackSelected(i);
Format trackFormat = group.getFormat(i);
}
}
~~~
{: .language-java}
* A track is 'supported' if the `Player` is able to decode and render its
samples. Note that even if multiple track groups of the same type (for example
multiple audio track groups) are supported, it only means that they are
supported individually and the player is not necessarily able to play them at
the same time.
* A track is 'selected' if the track selector chose this track for playback
using the current `TrackSelectionParameters`. If multiple tracks within one
track group are selected, the player uses these tracks for adaptive playback
(for example, multiple video tracks with different bitrates). Note that only
one of these tracks will be played at any one time. If you want to be notified
of in-playback changes to the adaptive video track you can listen to
`Player.Listener.onVideoSizeChanged`.
## Modifying track selection parameters
The selection process can be configured by setting `TrackSelectionParameters` on
the `Player` with `Player.setTrackSelectionParameters`. These updates can be
done before and during playback. In most cases, it's advisable to obtain the
current parameters and only modify the required aspects with the
`TrackSelectionParameters.Builder`. The builder class also allows chaining to
specify multiple options with one command:
~~~
player.setTrackSelectionParameters(
player.getTrackSelectionParameters()
.buildUpon()
.setMaxVideoSizeSd()
.setPreferredAudioLanguage("hu")
.build());
~~~
{: .language-java}
### Constraint based track selection
Most options in `TrackSelectionParameters` allow you to specify constraints,
which are independent of the tracks that are actually available. Typical
constraints are:
* Maximum or minimum video width, height, frame rate, or bitrate.
* Maximum audio channel count or bitrate.
* Preferred MIME types for video or audio.
* Preferred audio languages or role flags.
* Preferred text languages or role flags.
Note that ExoPlayer already applies sensible defaults for most of these values,
for example restricting video resolution to the display size or preferring the
audio language that matches the user's system Locale setting.
There are several benefits to using constraint based track selection instead of
specifying specific tracks directly:
* You can specify constraints before knowing what tracks the media provides.
This allows to immediately select the appropriate tracks for faster startup
time and also simplifies track selection code as you don't have to listen for
changes in the available tracks.
* Constraints can be applied consistently across all items in a playlist. For
example, selecting an audio language based on user preference will
automatically apply to the next playlist item too, whereas overriding a
specific track will only apply to the current playlist item for which the
track exists.
### Selecting specific tracks
It's possible to specify specific tracks in `TrackSelectionParameters` that
should be selected for the current set of tracks. Note that a change in the
available tracks, for example when changing items in a playlist, will also
invalidate such a track override.
The simplest way to specify track overrides is to specify the `TrackGroup` that
should be selected for its track type. For example, you can specify an audio
track group to select this audio group and prevent any other audio track groups
from being selected:
~~~
TrackSelectionOverrides overrides =
new TrackSelectionOverrides.Builder()
.setOverrideForType(new TrackSelectionOverride(audioTrackGroup))
.build();
player.setTrackSelectionParameters(
player.getTrackSelectionParameters()
.buildUpon().setTrackSelectionOverrides(overrides).build());
~~~
{: .language-java}
### Disabling track types or groups
Track types, like video, audio or text, can be disabled completely by using
`TrackSelectionParameters.Builder.setDisabledTrackTypes`. This will apply
unconditionally and will also affect other playlist items.
~~~
player.setTrackSelectionParameters(
player.getTrackSelectionParameters()
.buildUpon()
.setDisabledTrackTypes(ImmutableSet.of(C.TRACK_TYPE_VIDEO))
.build());
~~~
{: .language-java}
Alternatively, it's possible to prevent the selection of track groups for the
current playlist item only by specifying empty overrides for these groups:
~~~
TrackSelectionOverrides overrides =
new TrackSelectionOverrides.Builder()
.addOverride(
new TrackSelectionOverride(
disabledTrackGroup,
/* select no tracks for this group */ ImmutableList.of()))
.build();
player.setTrackSelectionParameters(
player.getTrackSelectionParameters()
.buildUpon().setTrackSelectionOverrides(overrides).build());
~~~
{: .language-java}
## Customizing the track selector
Track selection is the responsibility of a `TrackSelector`, an instance
of which can be provided whenever an `ExoPlayer` is built and later obtained
with `ExoPlayer.getTrackSelector()`.
~~~
DefaultTrackSelector trackSelector = new DefaultTrackSelector(context);
@ -16,28 +183,22 @@ ExoPlayer player =
{: .language-java}
`DefaultTrackSelector` is a flexible `TrackSelector` suitable for most use
cases. When using a `DefaultTrackSelector`, it's possible to control which
tracks it selects by modifying its `Parameters`. This can be done before or
during playback. For example the following code tells the selector to restrict
video track selections to SD, and to select a German audio track if there is
one:
cases. It uses the `TrackSelectionParameters` set in the `Player`, but also
provides some advanced customization options that can be specified in the
`DefaultTrackSelector.ParametersBuilder`:
~~~
trackSelector.setParameters(
trackSelector
.buildUponParameters()
.setMaxVideoSizeSd()
.setPreferredAudioLanguage("deu"));
.setAllowVideoMixedMimeTypeAdaptiveness(true));
~~~
{: .language-java}
This is an example of constraint based track selection, in which constraints are
specified without knowledge of the tracks that are actually available. Many
different types of constraint can be specified using `Parameters`. `Parameters`
can also be used to select specific tracks from those that are available. See
the [`DefaultTrackSelector`][], [`Parameters`][] and [`ParametersBuilder`][]
documentation for more details.
### Tunneling
[`Parameters`]: {{ site.exo_sdk }}/trackselection/DefaultTrackSelector.Parameters.html
[`ParametersBuilder`]: {{ site.exo_sdk }}/trackselection/DefaultTrackSelector.ParametersBuilder.html
[`DefaultTrackSelector`]: {{ site.exo_sdk }}/trackselection/DefaultTrackSelector.html
Tunneled playback can be enabled in cases where the combination of renderers and
selected tracks supports it. This can be done by using
`DefaultTrackSelector.ParametersBuilder.setTunnelingEnabled(true)`.
[`TrackSelectionParameters`]: {{ site.exo_sdk }}/trackselection/TrackSelectionParameters.html

View file

@ -88,8 +88,11 @@ public class DefaultMediaItemConverter implements MediaItemConverter {
.setMediaId(mediaId != null ? mediaId : MediaItem.DEFAULT_MEDIA_ID)
.setMediaMetadata(new MediaMetadata.Builder().setTitle(title).build())
.setTag(media2MediaItem)
.setClipStartPositionMs(startPositionMs)
.setClipEndPositionMs(endPositionMs)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setStartPositionMs(startPositionMs)
.setEndPositionMs(endPositionMs)
.build())
.build();
}

View file

@ -1208,8 +1208,7 @@ public final class MediaSessionConnector {
@Override
public void onStop() {
if (canDispatchPlaybackAction(PlaybackStateCompat.ACTION_STOP)) {
player.stop();
player.clearMediaItems();
player.stop(/* reset= */ true);
}
}

View file

@ -2119,7 +2119,7 @@ public interface Player {
/** Returns the index of the period currently being played. */
int getCurrentPeriodIndex();
/** @deprecated Use {@link #getCurrentMediaItem()} instead. */
/** @deprecated Use {@link #getCurrentMediaItemIndex()} instead. */
@Deprecated
int getCurrentWindowIndex();

View file

@ -113,23 +113,23 @@ public final class TrackSelectionOverrides implements Bundleable {
}
/**
* Forces the selection of {@link #trackIndexes} for a {@link TrackGroup}.
* Forces the selection of {@link #trackIndices} for a {@link TrackGroup}.
*
* <p>If multiple {link #tracks} are overridden, as many as possible will be selected depending on
* the player capabilities.
* <p>If multiple tracks in {@link #trackGroup} are overridden, as many as possible will be
* selected depending on the player capabilities.
*
* <p>If a {@link TrackSelectionOverride} has no tracks ({@code tracks.isEmpty()}), no tracks will
* be played. This is similar to {@link TrackSelectionParameters#disabledTrackTypes}, except it
* will only affect the playback of the associated {@link TrackGroup}. For example, if the only
* {@link C#TRACK_TYPE_VIDEO} {@link TrackGroup} is associated with no tracks, no video will play
* until the next video starts.
* <p>If {@link #trackIndices} is empty, no tracks from {@link #trackGroup} will be played. This
* is similar to {@link TrackSelectionParameters#disabledTrackTypes}, except it will only affect
* the playback of the associated {@link TrackGroup}. For example, if the only {@link
* C#TRACK_TYPE_VIDEO} {@link TrackGroup} is associated with no tracks, no video will play until
* the next video starts.
*/
public static final class TrackSelectionOverride implements Bundleable {
/** The {@link TrackGroup} whose {@link #trackIndexes} are forced to be selected. */
/** The {@link TrackGroup} whose {@link #trackIndices} are forced to be selected. */
public final TrackGroup trackGroup;
/** The index of tracks in a {@link TrackGroup} to be selected. */
public final ImmutableList<Integer> trackIndexes;
/** The indices of tracks in a {@link TrackGroup} to be selected. */
public final ImmutableList<Integer> trackIndices;
/** Constructs an instance to force all tracks in {@code trackGroup} to be selected. */
public TrackSelectionOverride(TrackGroup trackGroup) {
@ -138,23 +138,23 @@ public final class TrackSelectionOverrides implements Bundleable {
for (int i = 0; i < trackGroup.length; i++) {
builder.add(i);
}
this.trackIndexes = builder.build();
this.trackIndices = builder.build();
}
/**
* Constructs an instance to force {@code trackIndexes} in {@code trackGroup} to be selected.
* Constructs an instance to force {@code trackIndices} in {@code trackGroup} to be selected.
*
* @param trackGroup The {@link TrackGroup} for which to override the track selection.
* @param trackIndexes The indexes of the tracks in the {@link TrackGroup} to select.
* @param trackIndices The indices of the tracks in the {@link TrackGroup} to select.
*/
public TrackSelectionOverride(TrackGroup trackGroup, List<Integer> trackIndexes) {
if (!trackIndexes.isEmpty()) {
if (min(trackIndexes) < 0 || max(trackIndexes) >= trackGroup.length) {
public TrackSelectionOverride(TrackGroup trackGroup, List<Integer> trackIndices) {
if (!trackIndices.isEmpty()) {
if (min(trackIndices) < 0 || max(trackIndices) >= trackGroup.length) {
throw new IndexOutOfBoundsException();
}
}
this.trackGroup = trackGroup;
this.trackIndexes = ImmutableList.copyOf(trackIndexes);
this.trackIndices = ImmutableList.copyOf(trackIndices);
}
@Override
@ -166,15 +166,16 @@ public final class TrackSelectionOverrides implements Bundleable {
return false;
}
TrackSelectionOverride that = (TrackSelectionOverride) obj;
return trackGroup.equals(that.trackGroup) && trackIndexes.equals(that.trackIndexes);
return trackGroup.equals(that.trackGroup) && trackIndices.equals(that.trackIndices);
}
@Override
public int hashCode() {
return trackGroup.hashCode() + 31 * trackIndexes.hashCode();
return trackGroup.hashCode() + 31 * trackIndices.hashCode();
}
private @C.TrackType int getTrackType() {
/** Returns the {@link C.TrackType} of the overriden track group. */
public @C.TrackType int getTrackType() {
return MimeTypes.getTrackType(trackGroup.getFormat(0).sampleMimeType);
}
@ -195,7 +196,7 @@ public final class TrackSelectionOverrides implements Bundleable {
public Bundle toBundle() {
Bundle bundle = new Bundle();
bundle.putBundle(keyForField(FIELD_TRACK_GROUP), trackGroup.toBundle());
bundle.putIntArray(keyForField(FIELD_TRACKS), Ints.toArray(trackIndexes));
bundle.putIntArray(keyForField(FIELD_TRACKS), Ints.toArray(trackIndices));
return bundle;
}
@ -232,7 +233,7 @@ public final class TrackSelectionOverrides implements Bundleable {
return new Builder(overrides);
}
/** Returns all {@link TrackSelectionOverride} contained. */
/** Returns a list of the {@link TrackSelectionOverride overrides}. */
public ImmutableList<TrackSelectionOverride> asList() {
return ImmutableList.copyOf(overrides.values());
}

View file

@ -26,7 +26,6 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.text.TextUtils;
import androidx.annotation.DoNotInline;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
@ -87,18 +86,6 @@ public final class GlUtil {
this(loadAsset(context, vertexShaderFilePath), loadAsset(context, fragmentShaderFilePath));
}
/**
* Compiles a GL shader program from vertex and fragment shader GLSL GLES20 code.
*
* @param vertexShaderGlsl The vertex shader program as arrays of strings. Strings are joined by
* adding a new line character in between each of them.
* @param fragmentShaderGlsl The fragment shader program as arrays of strings. Strings are
* joined by adding a new line character in between each of them.
*/
public Program(String[] vertexShaderGlsl, String[] fragmentShaderGlsl) {
this(TextUtils.join("\n", vertexShaderGlsl), TextUtils.join("\n", fragmentShaderGlsl));
}
/** Uses the program. */
public void use() {
// Link and check for errors.
@ -119,8 +106,19 @@ public final class GlUtil {
GLES20.glDeleteProgram(programId);
}
/**
* Returns the location of an {@link Attribute}, which has been enabled as a vertex attribute
* array.
*/
public int getAttributeArrayLocationAndEnable(String attributeName) {
int location = getAttributeLocation(attributeName);
GLES20.glEnableVertexAttribArray(location);
checkGlError();
return location;
}
/** Returns the location of an {@link Attribute}. */
public int getAttribLocation(String attributeName) {
private int getAttributeLocation(String attributeName) {
return GLES20.glGetAttribLocation(programId, attributeName);
}
@ -134,7 +132,7 @@ public final class GlUtil {
int[] attributeCount = new int[1];
GLES20.glGetProgramiv(programId, GLES20.GL_ACTIVE_ATTRIBUTES, attributeCount, 0);
if (attributeCount[0] != 2) {
throw new IllegalStateException("Expected two attributes.");
throw new IllegalStateException("Expected two attributes but found " + attributeCount[0]);
}
Attribute[] attributes = new Attribute[attributeCount[0]];
@ -169,7 +167,7 @@ public final class GlUtil {
GLES20.glGetActiveAttrib(
programId, index, length[0], ignore, 0, size, 0, type, 0, nameBytes, 0);
String name = new String(nameBytes, 0, strlen(nameBytes));
int location = getAttribLocation(name);
int location = getAttributeLocation(name);
return new Attribute(name, index, location);
}

View file

@ -268,8 +268,8 @@ public class MediaItemTest {
}
@Test
@SuppressWarnings("deprecation") // Using deprecated Subtitle type
public void builderSetSubtitles_setsSubtitles() {
@SuppressWarnings("deprecation") // Reading deprecated subtitles field
public void builderSetSubtitleConfigurations() {
List<MediaItem.SubtitleConfiguration> subtitleConfigurations =
ImmutableList.of(
new MediaItem.SubtitleConfiguration.Builder(Uri.parse(URI_STRING + "/es"))
@ -278,7 +278,24 @@ public class MediaItemTest {
.setSelectionFlags(C.SELECTION_FLAG_FORCED)
.setRoleFlags(C.ROLE_FLAG_ALTERNATE)
.setLabel("label")
.build(),
.build());
MediaItem mediaItem =
new MediaItem.Builder()
.setUri(URI_STRING)
.setSubtitleConfigurations(subtitleConfigurations)
.build();
assertThat(mediaItem.localConfiguration.subtitleConfigurations)
.isEqualTo(subtitleConfigurations);
assertThat(mediaItem.localConfiguration.subtitles).isEqualTo(subtitleConfigurations);
}
@Test
@SuppressWarnings("deprecation") // Using deprecated Subtitle type
public void builderSetSubtitles() {
List<MediaItem.Subtitle> subtitles =
ImmutableList.of(
new MediaItem.Subtitle(
Uri.parse(URI_STRING + "/en"), MimeTypes.APPLICATION_TTML, /* language= */ "en"),
new MediaItem.Subtitle(
@ -295,14 +312,10 @@ public class MediaItemTest {
"label"));
MediaItem mediaItem =
new MediaItem.Builder()
.setUri(URI_STRING)
.setSubtitleConfigurations(subtitleConfigurations)
.build();
new MediaItem.Builder().setUri(URI_STRING).setSubtitles(subtitles).build();
assertThat(mediaItem.localConfiguration.subtitleConfigurations)
.isEqualTo(subtitleConfigurations);
assertThat(mediaItem.localConfiguration.subtitles).isEqualTo(subtitleConfigurations);
assertThat(mediaItem.localConfiguration.subtitleConfigurations).isEqualTo(subtitles);
assertThat(mediaItem.localConfiguration.subtitles).isEqualTo(subtitles);
}
@Test

View file

@ -301,12 +301,13 @@ public class TimelineTest {
window.isSeekable = true;
window.isDynamic = true;
window.liveConfiguration =
new LiveConfiguration(
/* targetOffsetMs= */ 1,
/* minOffsetMs= */ 2,
/* maxOffsetMs= */ 3,
/* minPlaybackSpeed= */ 0.5f,
/* maxPlaybackSpeed= */ 1.5f);
new LiveConfiguration.Builder()
.setTargetOffsetMs(1)
.setMinOffsetMs(2)
.setMaxOffsetMs(3)
.setMinPlaybackSpeed(0.5f)
.setMaxPlaybackSpeed(1.5f)
.build();
window.isPlaceholder = true;
window.defaultPositionUs = 444;
window.durationUs = 555;

View file

@ -49,7 +49,7 @@ public final class TrackSelectionOverridesTest {
new TrackSelectionOverride(newTrackGroupWithIds(1, 2));
assertThat(trackSelectionOverride.trackGroup).isEqualTo(newTrackGroupWithIds(1, 2));
assertThat(trackSelectionOverride.trackIndexes).containsExactly(0, 1).inOrder();
assertThat(trackSelectionOverride.trackIndices).containsExactly(0, 1).inOrder();
}
@Test
@ -58,7 +58,7 @@ public final class TrackSelectionOverridesTest {
new TrackSelectionOverride(newTrackGroupWithIds(1, 2), ImmutableList.of(1));
assertThat(trackSelectionOverride.trackGroup).isEqualTo(newTrackGroupWithIds(1, 2));
assertThat(trackSelectionOverride.trackIndexes).containsExactly(1);
assertThat(trackSelectionOverride.trackIndices).containsExactly(1);
}
@Test
@ -67,7 +67,7 @@ public final class TrackSelectionOverridesTest {
new TrackSelectionOverride(newTrackGroupWithIds(1, 2), ImmutableList.of());
assertThat(trackSelectionOverride.trackGroup).isEqualTo(newTrackGroupWithIds(1, 2));
assertThat(trackSelectionOverride.trackIndexes).isEmpty();
assertThat(trackSelectionOverride.trackIndices).isEmpty();
}
@Test
@ -118,9 +118,9 @@ public final class TrackSelectionOverridesTest {
public void addOverride_onSameGroup_replacesOverride() {
TrackGroup trackGroup = newTrackGroupWithIds(1, 2, 3);
TrackSelectionOverride override1 =
new TrackSelectionOverride(trackGroup, /* trackIndexes= */ ImmutableList.of(0));
new TrackSelectionOverride(trackGroup, /* trackIndices= */ ImmutableList.of(0));
TrackSelectionOverride override2 =
new TrackSelectionOverride(trackGroup, /* trackIndexes= */ ImmutableList.of(1));
new TrackSelectionOverride(trackGroup, /* trackIndices= */ ImmutableList.of(1));
TrackSelectionOverrides trackSelectionOverrides =
new TrackSelectionOverrides.Builder().addOverride(override1).addOverride(override2).build();

View file

@ -77,7 +77,7 @@ public final class TrackSelectionParametersTest {
new TrackGroup(
new Format.Builder().setId(4).build(),
new Format.Builder().setId(5).build()),
/* trackIndexes= */ ImmutableList.of(1)))
/* trackIndices= */ ImmutableList.of(1)))
.build();
TrackSelectionParameters parameters =
TrackSelectionParameters.DEFAULT_WITHOUT_CONTEXT

View file

@ -20,6 +20,7 @@ import static com.google.common.truth.Truth.assertThat;
import android.net.Uri;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.MediaItem.SubtitleConfiguration;
import com.google.android.exoplayer2.source.ClippingMediaSource;
import com.google.android.exoplayer2.text.Cue;
import com.google.android.exoplayer2.util.ConditionVariable;
@ -44,15 +45,16 @@ public final class ClippedPlaybackTest {
MediaItem mediaItem =
new MediaItem.Builder()
.setUri("asset:///media/mp4/sample.mp4")
.setSubtitles(
.setSubtitleConfigurations(
ImmutableList.of(
new MediaItem.Subtitle(
Uri.parse("asset:///media/webvtt/typical"),
MimeTypes.TEXT_VTT,
"en",
C.SELECTION_FLAG_DEFAULT)))
new SubtitleConfiguration.Builder(Uri.parse("asset:///media/webvtt/typical"))
.setMimeType(MimeTypes.TEXT_VTT)
.setLanguage("en")
.setSelectionFlags(C.SELECTION_FLAG_DEFAULT)
.build()))
// Expect the clipping to affect both subtitles and video.
.setClipEndPositionMs(1000)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder().setEndPositionMs(1000).build())
.build();
AtomicReference<ExoPlayer> player = new AtomicReference<>();
TextCapturingPlaybackListener textCapturer = new TextCapturingPlaybackListener();
@ -80,21 +82,24 @@ public final class ClippedPlaybackTest {
ImmutableList.of(
new MediaItem.Builder()
.setUri("asset:///media/mp4/sample.mp4")
.setSubtitles(
.setSubtitleConfigurations(
ImmutableList.of(
new MediaItem.Subtitle(
Uri.parse("asset:///media/webvtt/typical"),
MimeTypes.TEXT_VTT,
"en",
C.SELECTION_FLAG_DEFAULT)))
new SubtitleConfiguration.Builder(
Uri.parse("asset:///media/webvtt/typical"))
.setMimeType(MimeTypes.TEXT_VTT)
.setLanguage("en")
.setSelectionFlags(C.SELECTION_FLAG_DEFAULT)
.build()))
// Expect the clipping to affect both subtitles and video.
.setClipEndPositionMs(1000)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder().setEndPositionMs(1000).build())
.build(),
new MediaItem.Builder()
.setUri("asset:///media/mp4/sample.mp4")
// Not needed for correctness, just makes test run faster. Must be longer than the
// subtitle content (3.5s).
.setClipEndPositionMs(4_000)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder().setEndPositionMs(4_000).build())
.build());
AtomicReference<ExoPlayer> player = new AtomicReference<>();
TextCapturingPlaybackListener textCapturer = new TextCapturingPlaybackListener();

View file

@ -15,9 +15,11 @@
*/
package com.google.android.exoplayer2;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static java.lang.Math.max;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer.InsufficientCapacityException;
import com.google.android.exoplayer2.source.SampleStream;
@ -26,6 +28,7 @@ import com.google.android.exoplayer2.source.SampleStream.ReadFlags;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MediaClock;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** An abstract base class suitable for most {@link Renderer} implementations. */
public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@ -35,6 +38,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@Nullable private RendererConfiguration configuration;
private int index;
private @MonotonicNonNull PlayerId playerId;
private int state;
@Nullable private SampleStream stream;
@Nullable private Format[] streamFormats;
@ -65,8 +69,9 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
}
@Override
public final void setIndex(int index) {
public final void init(int index, PlayerId playerId) {
this.index = index;
this.playerId = playerId;
}
@Override
@ -328,11 +333,24 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return Assertions.checkNotNull(configuration);
}
/** Returns the index of the renderer within the player. */
/**
* Returns the index of the renderer within the player.
*
* <p>Must only be used after the renderer has been initialized by the player.
*/
protected final int getIndex() {
return index;
}
/**
* Returns the {@link PlayerId} of the player using this renderer.
*
* <p>Must only be used after the renderer has been initialized by the player.
*/
protected final PlayerId getPlayerId() {
return checkNotNull(playerId);
}
/**
* Creates an {@link ExoPlaybackException} of type {@link ExoPlaybackException#TYPE_RENDERER} for
* this renderer.

View file

@ -191,6 +191,25 @@ public class DefaultRenderersFactory implements RenderersFactory {
return this;
}
/**
* Enable calling {@link MediaCodec#start} immediately after {@link MediaCodec#flush} on the
* playback thread, when operating the codec in asynchronous mode. If disabled, {@link
* MediaCodec#start} will be called by the callback thread after pending callbacks are handled.
*
* <p>By default, this feature is disabled.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param enabled Whether {@link MediaCodec#start} will be called on the playback thread
* immediately after {@link MediaCodec#flush}.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory experimentalSetImmediateCodecStartAfterFlushEnabled(
boolean enabled) {
codecAdapterFactory.experimentalSetImmediateCodecStartAfterFlushEnabled(enabled);
return this;
}
/**
* Sets whether to enable fallback to lower-priority decoders if decoder initialization fails.
* This may result in using a decoder that is less efficient or slower than the primary decoder.

View file

@ -982,6 +982,7 @@ public interface ExoPlayer extends Player {
* {@link ExoPlaybackException}.
*/
@Override
@Nullable
ExoPlaybackException getPlayerError();
/**

View file

@ -22,6 +22,7 @@ import static java.lang.Math.max;
import static java.lang.Math.min;
import android.annotation.SuppressLint;
import android.media.metrics.LogSessionId;
import android.os.Handler;
import android.os.Looper;
import android.util.Pair;
@ -30,9 +31,11 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.ExoPlayer.AudioOffloadListener;
import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.source.MediaSource;
@ -246,6 +249,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
addListener(analyticsCollector);
bandwidthMeter.addEventListener(new Handler(applicationLooper), analyticsCollector);
}
PlayerId playerId = Util.SDK_INT < 31 ? new PlayerId() : Api31.createPlayerId();
internalPlayer =
new ExoPlayerImplInternal(
renderers,
@ -262,7 +266,8 @@ import java.util.concurrent.CopyOnWriteArraySet;
pauseAtEndOfMediaItems,
applicationLooper,
clock,
playbackInfoUpdateListener);
playbackInfoUpdateListener,
playerId);
}
/**
@ -1856,4 +1861,14 @@ import java.util.concurrent.CopyOnWriteArraySet;
return timeline;
}
}
@RequiresApi(31)
private static final class Api31 {
private Api31() {}
public static PlayerId createPlayerId() {
// TODO: Create a MediaMetricsListener and obtain LogSessionId from it.
return new PlayerId(LogSessionId.LOG_SESSION_ID_NONE);
}
}
}

View file

@ -35,6 +35,7 @@ import com.google.android.exoplayer2.Player.PlayWhenReadyChangeReason;
import com.google.android.exoplayer2.Player.PlaybackSuppressionReason;
import com.google.android.exoplayer2.Player.RepeatMode;
import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.drm.DrmSession;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.source.BehindLiveWindowException;
@ -229,7 +230,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
boolean pauseAtEndOfWindow,
Looper applicationLooper,
Clock clock,
PlaybackInfoUpdateListener playbackInfoUpdateListener) {
PlaybackInfoUpdateListener playbackInfoUpdateListener,
PlayerId playerId) {
this.playbackInfoUpdateListener = playbackInfoUpdateListener;
this.renderers = renderers;
this.trackSelector = trackSelector;
@ -252,7 +254,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
playbackInfoUpdate = new PlaybackInfoUpdate(playbackInfo);
rendererCapabilities = new RendererCapabilities[renderers.length];
for (int i = 0; i < renderers.length; i++) {
renderers[i].setIndex(i);
renderers[i].init(/* index= */ i, playerId);
rendererCapabilities[i] = renderers[i].getCapabilities();
}
mediaClock = new DefaultMediaClock(this, clock);
@ -266,7 +268,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
Handler eventHandler = new Handler(applicationLooper);
queue = new MediaPeriodQueue(analyticsCollector, eventHandler);
mediaSourceList = new MediaSourceList(/* listener= */ this, analyticsCollector, eventHandler);
mediaSourceList =
new MediaSourceList(/* listener= */ this, analyticsCollector, eventHandler, playerId);
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.

View file

@ -21,6 +21,7 @@ import static java.lang.Math.min;
import android.os.Handler;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.drm.DrmSession;
import com.google.android.exoplayer2.drm.DrmSessionEventListener;
import com.google.android.exoplayer2.source.LoadEventInfo;
@ -70,6 +71,7 @@ import java.util.Set;
private static final String TAG = "MediaSourceList";
private final PlayerId playerId;
private final List<MediaSourceHolder> mediaSourceHolders;
private final IdentityHashMap<MediaPeriod, MediaSourceHolder> mediaSourceByMediaPeriod;
private final Map<Object, MediaSourceHolder> mediaSourceByUid;
@ -93,11 +95,14 @@ import java.util.Set;
* source events.
* @param analyticsCollectorHandler The {@link Handler} to call {@link AnalyticsCollector} methods
* on.
* @param playerId The {@link PlayerId} of the player using this list.
*/
public MediaSourceList(
MediaSourceListInfoRefreshListener listener,
@Nullable AnalyticsCollector analyticsCollector,
Handler analyticsCollectorHandler) {
Handler analyticsCollectorHandler,
PlayerId playerId) {
this.playerId = playerId;
mediaSourceListInfoListener = listener;
shuffleOrder = new DefaultShuffleOrder(0);
mediaSourceByMediaPeriod = new IdentityHashMap<>();
@ -440,7 +445,7 @@ import java.util.Set;
childSources.put(holder, new MediaSourceAndListener(mediaSource, caller, eventListener));
mediaSource.addEventListener(Util.createHandlerForCurrentOrMainLooper(), eventListener);
mediaSource.addDrmEventListener(Util.createHandlerForCurrentOrMainLooper(), eventListener);
mediaSource.prepareSource(caller, mediaTransferListener);
mediaSource.prepareSource(caller, mediaTransferListener, playerId);
}
private void maybeReleaseChildSource(MediaSourceHolder mediaSourceHolder) {

View file

@ -23,6 +23,7 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
import com.google.android.exoplayer2.extractor.ExtractorsFactory;
import com.google.android.exoplayer2.extractor.mp4.Mp4Extractor;
@ -140,7 +141,8 @@ public final class MetadataRetriever {
case MESSAGE_PREPARE_SOURCE:
MediaItem mediaItem = (MediaItem) msg.obj;
mediaSource = mediaSourceFactory.createMediaSource(mediaItem);
mediaSource.prepareSource(mediaSourceCaller, /* mediaTransferListener= */ null);
mediaSource.prepareSource(
mediaSourceCaller, /* mediaTransferListener= */ null, PlayerId.UNSET);
mediaSourceHandler.sendEmptyMessage(MESSAGE_CHECK_FOR_FAILURE);
return true;
case MESSAGE_CHECK_FOR_FAILURE:

View file

@ -16,6 +16,7 @@
package com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MediaClock;
@ -45,7 +46,7 @@ public abstract class NoSampleRenderer implements Renderer, RendererCapabilities
}
@Override
public final void setIndex(int index) {
public final void init(int index, PlayerId playerId) {
this.index = index;
}

View file

@ -20,6 +20,7 @@ import android.view.Surface;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.audio.AuxEffectInfo;
import com.google.android.exoplayer2.source.SampleStream;
@ -248,11 +249,12 @@ public interface Renderer extends PlayerMessage.Target {
RendererCapabilities getCapabilities();
/**
* Sets the index of this renderer within the player.
* Initializes the renderer for playback with a player.
*
* @param index The renderer index.
* @param index The renderer index within the player.
* @param playerId The {@link PlayerId} of the player.
*/
void setIndex(int index);
void init(int index, PlayerId playerId);
/**
* If the renderer advances its own playback position then this method returns a corresponding

View file

@ -0,0 +1,75 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.analytics;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.media.metrics.LogSessionId;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.util.Util;
/** Identifier for a player instance. */
public final class PlayerId {
/**
* A player identifier with unset default values that can be used as a placeholder or for testing.
*/
public static final PlayerId UNSET =
Util.SDK_INT < 31 ? new PlayerId() : new PlayerId(LogSessionIdApi31.UNSET);
@Nullable private final LogSessionIdApi31 logSessionIdApi31;
/** Creates an instance for API &lt; 31. */
public PlayerId() {
this(/* logSessionIdApi31= */ (LogSessionIdApi31) null);
checkState(Util.SDK_INT < 31);
}
/**
* Creates an instance for API &ge; 31.
*
* @param logSessionId The {@link LogSessionId} used for this player.
*/
@RequiresApi(31)
public PlayerId(LogSessionId logSessionId) {
this(new LogSessionIdApi31(logSessionId));
}
private PlayerId(@Nullable LogSessionIdApi31 logSessionIdApi31) {
this.logSessionIdApi31 = logSessionIdApi31;
}
/** Returns the {@link LogSessionId} for this player instance. */
@RequiresApi(31)
public LogSessionId getLogSessionId() {
return checkNotNull(logSessionIdApi31).logSessionId;
}
@RequiresApi(31)
private static final class LogSessionIdApi31 {
public static final LogSessionIdApi31 UNSET =
new LogSessionIdApi31(LogSessionId.LOG_SESSION_ID_NONE);
public final LogSessionId logSessionId;
public LogSessionIdApi31(LogSessionId logSessionId) {
this.logSessionId = logSessionId;
}
}
}

View file

@ -50,11 +50,7 @@ import java.nio.ByteBuffer;
private final Supplier<HandlerThread> callbackThreadSupplier;
private final Supplier<HandlerThread> queueingThreadSupplier;
private final boolean synchronizeCodecInteractionsWithQueueing;
/** Creates a factory for codecs handling the specified {@link C.TrackType track type}. */
public Factory(@C.TrackType int trackType) {
this(trackType, /* synchronizeCodecInteractionsWithQueueing= */ false);
}
private final boolean enableImmediateCodecStartAfterFlush;
/**
* Creates an factory for {@link AsynchronousMediaCodecAdapter} instances.
@ -66,23 +62,29 @@ import java.nio.ByteBuffer;
* interactions will wait until all input buffers pending queueing wil be submitted to the
* {@link MediaCodec}.
*/
public Factory(@C.TrackType int trackType, boolean synchronizeCodecInteractionsWithQueueing) {
public Factory(
@C.TrackType int trackType,
boolean synchronizeCodecInteractionsWithQueueing,
boolean enableImmediateCodecStartAfterFlush) {
this(
/* callbackThreadSupplier= */ () ->
new HandlerThread(createCallbackThreadLabel(trackType)),
/* queueingThreadSupplier= */ () ->
new HandlerThread(createQueueingThreadLabel(trackType)),
synchronizeCodecInteractionsWithQueueing);
synchronizeCodecInteractionsWithQueueing,
enableImmediateCodecStartAfterFlush);
}
@VisibleForTesting
/* package */ Factory(
Supplier<HandlerThread> callbackThreadSupplier,
Supplier<HandlerThread> queueingThreadSupplier,
boolean synchronizeCodecInteractionsWithQueueing) {
boolean synchronizeCodecInteractionsWithQueueing,
boolean enableImmediateCodecStartAfterFlush) {
this.callbackThreadSupplier = callbackThreadSupplier;
this.queueingThreadSupplier = queueingThreadSupplier;
this.synchronizeCodecInteractionsWithQueueing = synchronizeCodecInteractionsWithQueueing;
this.enableImmediateCodecStartAfterFlush = enableImmediateCodecStartAfterFlush;
}
@Override
@ -99,7 +101,8 @@ import java.nio.ByteBuffer;
codec,
callbackThreadSupplier.get(),
queueingThreadSupplier.get(),
synchronizeCodecInteractionsWithQueueing);
synchronizeCodecInteractionsWithQueueing,
enableImmediateCodecStartAfterFlush);
TraceUtil.endSection();
codecAdapter.initialize(
configuration.mediaFormat,
@ -132,6 +135,7 @@ import java.nio.ByteBuffer;
private final AsynchronousMediaCodecCallback asynchronousMediaCodecCallback;
private final AsynchronousMediaCodecBufferEnqueuer bufferEnqueuer;
private final boolean synchronizeCodecInteractionsWithQueueing;
private final boolean enableImmediateCodecStartAfterFlush;
private boolean codecReleased;
@State private int state;
@Nullable private Surface inputSurface;
@ -140,11 +144,13 @@ import java.nio.ByteBuffer;
MediaCodec codec,
HandlerThread callbackThread,
HandlerThread enqueueingThread,
boolean synchronizeCodecInteractionsWithQueueing) {
boolean synchronizeCodecInteractionsWithQueueing,
boolean enableImmediateCodecStartAfterFlush) {
this.codec = codec;
this.asynchronousMediaCodecCallback = new AsynchronousMediaCodecCallback(callbackThread);
this.bufferEnqueuer = new AsynchronousMediaCodecBufferEnqueuer(codec, enqueueingThread);
this.synchronizeCodecInteractionsWithQueueing = synchronizeCodecInteractionsWithQueueing;
this.enableImmediateCodecStartAfterFlush = enableImmediateCodecStartAfterFlush;
this.state = STATE_CREATED;
}
@ -231,13 +237,20 @@ import java.nio.ByteBuffer;
@Override
public void flush() {
// The order of calls is important:
// First, flush the bufferEnqueuer to stop queueing input buffers.
// Second, flush the codec to stop producing available input/output buffers.
// Third, flush the callback after flushing the codec so that in-flight callbacks are discarded.
// 1. Flush the bufferEnqueuer to stop queueing input buffers.
// 2. Flush the codec to stop producing available input/output buffers.
// 3. Flush the callback after flushing the codec so that in-flight callbacks are discarded.
bufferEnqueuer.flush();
codec.flush();
// When flushAsync() is completed, start the codec again.
asynchronousMediaCodecCallback.flushAsync(/* onFlushCompleted= */ codec::start);
if (enableImmediateCodecStartAfterFlush) {
// The asynchronous callback will drop pending callbacks but we can start the codec now.
asynchronousMediaCodecCallback.flush(/* codec= */ null);
codec.start();
} else {
// Let the asynchronous callback start the codec in the callback thread after pending
// callbacks are handled.
asynchronousMediaCodecCallback.flush(codec);
}
}
@Override
@ -289,6 +302,7 @@ import java.nio.ByteBuffer;
@Override
public void signalEndOfInputStream() {
maybeBlockOnQueueing();
codec.signalEndOfInputStream();
}

View file

@ -16,6 +16,7 @@
package com.google.android.exoplayer2.mediacodec;
import static androidx.annotation.VisibleForTesting.NONE;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Util.castNonNull;
@ -147,7 +148,7 @@ class AsynchronousMediaCodecBufferEnqueuer {
}
}
/** Shut down the instance. Make sure to call this method to release its internal resources. */
/** Shuts down the instance. Make sure to call this method to release its internal resources. */
public void shutdown() {
if (started) {
flush();
@ -173,26 +174,23 @@ class AsynchronousMediaCodecBufferEnqueuer {
* blocks until the {@link #handlerThread} is idle.
*/
private void flushHandlerThread() throws InterruptedException {
Handler handler = castNonNull(this.handler);
handler.removeCallbacksAndMessages(null);
checkNotNull(this.handler).removeCallbacksAndMessages(null);
blockUntilHandlerThreadIsIdle();
// Check if any exceptions happened during the last queueing action.
maybeThrowException();
}
private void blockUntilHandlerThreadIsIdle() throws InterruptedException {
conditionVariable.close();
castNonNull(handler).obtainMessage(MSG_OPEN_CV).sendToTarget();
checkNotNull(handler).obtainMessage(MSG_OPEN_CV).sendToTarget();
conditionVariable.block();
}
// Called from the handler thread
@VisibleForTesting
@VisibleForTesting(otherwise = NONE)
/* package */ void setPendingRuntimeException(RuntimeException exception) {
pendingRuntimeException.set(exception);
}
// Called from the handler thread
private void doHandleMessage(Message msg) {
@Nullable MessageParams params = null;
switch (msg.what) {
@ -214,7 +212,8 @@ class AsynchronousMediaCodecBufferEnqueuer {
conditionVariable.open();
break;
default:
setPendingRuntimeException(new IllegalStateException(String.valueOf(msg.what)));
pendingRuntimeException.compareAndSet(
null, new IllegalStateException(String.valueOf(msg.what)));
}
if (params != null) {
recycleMessageParams(params);
@ -226,7 +225,7 @@ class AsynchronousMediaCodecBufferEnqueuer {
try {
codec.queueInputBuffer(index, offset, size, presentationTimeUs, flag);
} catch (RuntimeException e) {
setPendingRuntimeException(e);
pendingRuntimeException.compareAndSet(null, e);
}
}
@ -240,7 +239,7 @@ class AsynchronousMediaCodecBufferEnqueuer {
codec.queueSecureInputBuffer(index, offset, info, presentationTimeUs, flags);
}
} catch (RuntimeException e) {
setPendingRuntimeException(e);
pendingRuntimeException.compareAndSet(null, e);
}
}

View file

@ -34,8 +34,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@RequiresApi(23)
/* package */ final class AsynchronousMediaCodecCallback extends MediaCodec.Callback {
private final Object lock;
private final HandlerThread callbackThread;
private @MonotonicNonNull Handler handler;
@GuardedBy("lock")
@ -192,14 +192,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* Initiates a flush asynchronously, which will be completed on the callback thread. When the
* flush is complete, it will trigger {@code onFlushCompleted} from the callback thread.
*
* @param onFlushCompleted A {@link Runnable} that will be called when flush is completed. {@code
* onFlushCompleted} will be called from the scallback thread, therefore it should execute
* synchronized and thread-safe code.
* @param codec A {@link MediaCodec} to {@link MediaCodec#start start} after all pending callbacks
* are handled, or {@code null} if starting the {@link MediaCodec} is performed elsewhere.
*/
public void flushAsync(Runnable onFlushCompleted) {
public void flush(@Nullable MediaCodec codec) {
synchronized (lock) {
++pendingFlushCount;
Util.castNonNull(handler).post(() -> this.onFlushCompleted(onFlushCompleted));
Util.castNonNull(handler).post(() -> this.onFlushCompleted(codec));
}
}
@ -239,34 +238,31 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
}
private void onFlushCompleted(Runnable onFlushCompleted) {
private void onFlushCompleted(@Nullable MediaCodec codec) {
synchronized (lock) {
onFlushCompletedSynchronized(onFlushCompleted);
}
}
if (shutDown) {
return;
}
@GuardedBy("lock")
private void onFlushCompletedSynchronized(Runnable onFlushCompleted) {
if (shutDown) {
return;
}
--pendingFlushCount;
if (pendingFlushCount > 0) {
// Another flush() has been called.
return;
} else if (pendingFlushCount < 0) {
// This should never happen.
setInternalException(new IllegalStateException());
return;
}
flushInternal();
try {
onFlushCompleted.run();
} catch (IllegalStateException e) {
setInternalException(e);
} catch (Exception e) {
setInternalException(new IllegalStateException(e));
--pendingFlushCount;
if (pendingFlushCount > 0) {
// Another flush() has been called.
return;
} else if (pendingFlushCount < 0) {
// This should never happen.
setInternalException(new IllegalStateException());
return;
}
flushInternal();
if (codec != null) {
try {
codec.start();
} catch (IllegalStateException e) {
setInternalException(e);
} catch (Exception e) {
setInternalException(new IllegalStateException(e));
}
}
}
}
@ -275,10 +271,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private void flushInternal() {
if (!formats.isEmpty()) {
pendingOutputFormat = formats.getLast();
} else {
// pendingOutputFormat may already be non-null following a previous flush, and remains set in
// this case.
}
// else, pendingOutputFormat may already be non-null following a previous flush, and remains
// set in this case.
availableInputBuffers.clear();
availableOutputBuffers.clear();
bufferInfos.clear();

View file

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer2.mediacodec;
import android.media.MediaCodec;
import androidx.annotation.IntDef;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes;
@ -46,6 +47,7 @@ public final class DefaultMediaCodecAdapterFactory implements MediaCodecAdapter.
@Mode private int asynchronousMode;
private boolean enableSynchronizeCodecInteractionsWithQueueing;
private boolean enableImmediateCodecStartAfterFlush;
public DefaultMediaCodecAdapterFactory() {
asynchronousMode = MODE_DEFAULT;
@ -85,6 +87,22 @@ public final class DefaultMediaCodecAdapterFactory implements MediaCodecAdapter.
enableSynchronizeCodecInteractionsWithQueueing = enabled;
}
/**
* Enable calling {@link MediaCodec#start} immediately after {@link MediaCodec#flush} on the
* playback thread, when operating the codec in asynchronous mode. If disabled, {@link
* MediaCodec#start} will be called by the callback thread after pending callbacks are handled.
*
* <p>By default, this feature is disabled.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param enabled Whether {@link MediaCodec#start()} will be called on the playback thread
* immediately after {@link MediaCodec#flush}.
*/
public void experimentalSetImmediateCodecStartAfterFlushEnabled(boolean enabled) {
enableImmediateCodecStartAfterFlush = enabled;
}
@Override
public MediaCodecAdapter createAdapter(MediaCodecAdapter.Configuration configuration)
throws IOException {
@ -97,7 +115,9 @@ public final class DefaultMediaCodecAdapterFactory implements MediaCodecAdapter.
+ Util.getTrackTypeString(trackType));
AsynchronousMediaCodecAdapter.Factory factory =
new AsynchronousMediaCodecAdapter.Factory(
trackType, enableSynchronizeCodecInteractionsWithQueueing);
trackType,
enableSynchronizeCodecInteractionsWithQueueing,
enableImmediateCodecStartAfterFlush);
return factory.createAdapter(configuration);
}
return new SynchronousMediaCodecAdapter.Factory().createAdapter(configuration);

View file

@ -32,6 +32,7 @@ import com.google.android.exoplayer2.Renderer;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.RenderersFactory;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.extractor.ExtractorsFactory;
@ -956,7 +957,8 @@ public final class DownloadHelper {
public boolean handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_PREPARE_SOURCE:
mediaSource.prepareSource(/* caller= */ this, /* mediaTransferListener= */ null);
mediaSource.prepareSource(
/* caller= */ this, /* mediaTransferListener= */ null, PlayerId.UNSET);
mediaSourceHandler.sendEmptyMessage(MESSAGE_CHECK_FOR_FAILURE);
return true;
case MESSAGE_CHECK_FOR_FAILURE:

View file

@ -15,10 +15,13 @@
*/
package com.google.android.exoplayer2.source;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.drm.DrmSessionEventListener;
import com.google.android.exoplayer2.upstream.TransferListener;
import com.google.android.exoplayer2.util.Assertions;
@ -41,6 +44,7 @@ public abstract class BaseMediaSource implements MediaSource {
@Nullable private Looper looper;
@Nullable private Timeline timeline;
@Nullable private PlayerId playerId;
public BaseMediaSource() {
mediaSourceCallers = new ArrayList<>(/* initialCapacity= */ 1);
@ -51,7 +55,7 @@ public abstract class BaseMediaSource implements MediaSource {
/**
* Starts source preparation and enables the source, see {@link #prepareSource(MediaSourceCaller,
* TransferListener)}. This method is called at most once until the next call to {@link
* TransferListener, PlayerId)}. This method is called at most once until the next call to {@link
* #releaseSourceInternal()}.
*
* @param mediaTransferListener The transfer listener which should be informed of any media data
@ -160,6 +164,16 @@ public abstract class BaseMediaSource implements MediaSource {
return !enabledMediaSourceCallers.isEmpty();
}
/**
* Returns the {@link PlayerId} of the player using this media source.
*
* <p>Must only be used when the media source is {@link #prepareSourceInternal(TransferListener)
* prepared}.
*/
protected final PlayerId getPlayerId() {
return checkStateNotNull(playerId);
}
@Override
public final void addEventListener(Handler handler, MediaSourceEventListener eventListener) {
Assertions.checkNotNull(handler);
@ -186,9 +200,12 @@ public abstract class BaseMediaSource implements MediaSource {
@Override
public final void prepareSource(
MediaSourceCaller caller, @Nullable TransferListener mediaTransferListener) {
MediaSourceCaller caller,
@Nullable TransferListener mediaTransferListener,
PlayerId playerId) {
Looper looper = Looper.myLooper();
Assertions.checkArgument(this.looper == null || this.looper == looper);
this.playerId = playerId;
@Nullable Timeline timeline = this.timeline;
mediaSourceCallers.add(caller);
if (this.looper == null) {
@ -226,6 +243,7 @@ public abstract class BaseMediaSource implements MediaSource {
if (mediaSourceCallers.isEmpty()) {
looper = null;
timeline = null;
playerId = null;
enabledMediaSourceCallers.clear();
releaseSourceInternal();
} else {

View file

@ -117,7 +117,7 @@ public abstract class CompositeMediaSource<T> extends BaseMediaSource {
childSources.put(id, new MediaSourceAndListener<>(mediaSource, caller, eventListener));
mediaSource.addEventListener(Assertions.checkNotNull(eventHandler), eventListener);
mediaSource.addDrmEventListener(Assertions.checkNotNull(eventHandler), eventListener);
mediaSource.prepareSource(caller, mediaTransferListener);
mediaSource.prepareSource(caller, mediaTransferListener, getPlayerId());
if (!isEnabled()) {
mediaSource.disable(caller);
}

View file

@ -294,6 +294,7 @@ public final class DefaultMediaSourceFactory implements MediaSourceFactory {
return this;
}
@Deprecated
@Override
public DefaultMediaSourceFactory setDrmHttpDataSourceFactory(
@Nullable HttpDataSource.Factory drmHttpDataSourceFactory) {
@ -301,12 +302,14 @@ public final class DefaultMediaSourceFactory implements MediaSourceFactory {
return this;
}
@Deprecated
@Override
public DefaultMediaSourceFactory setDrmUserAgent(@Nullable String userAgent) {
delegateFactoryLoader.setDrmUserAgent(userAgent);
return this;
}
@Deprecated
@Override
public DefaultMediaSourceFactory setDrmSessionManager(
@Nullable DrmSessionManager drmSessionManager) {

View file

@ -20,6 +20,7 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.drm.DrmSessionEventListener;
import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.upstream.TransferListener;
@ -34,7 +35,7 @@ import java.io.IOException;
* provide a new timeline whenever the structure of the media changes. The MediaSource
* provides these timelines by calling {@link MediaSourceCaller#onSourceInfoRefreshed} on the
* {@link MediaSourceCaller}s passed to {@link #prepareSource(MediaSourceCaller,
* TransferListener)}.
* TransferListener, PlayerId)}.
* <li>To provide {@link MediaPeriod} instances for the periods in its timeline. MediaPeriods are
* obtained by calling {@link #createPeriod(MediaPeriodId, Allocator, long)}, and provide a
* way for the player to load and read the media.
@ -183,6 +184,16 @@ public interface MediaSource {
/** Returns the {@link MediaItem} whose media is provided by the source. */
MediaItem getMediaItem();
/**
* @deprecated Implement {@link #prepareSource(MediaSourceCaller, TransferListener, PlayerId)}
* instead.
*/
@Deprecated
default void prepareSource(
MediaSourceCaller caller, @Nullable TransferListener mediaTransferListener) {
prepareSource(caller, mediaTransferListener, PlayerId.UNSET);
}
/**
* Registers a {@link MediaSourceCaller}. Starts source preparation if needed and enables the
* source for the creation of {@link MediaPeriod MediaPerods}.
@ -200,15 +211,20 @@ public interface MediaSource {
* transfers. May be null if no listener is available. Note that this listener should be only
* informed of transfers related to the media loads and not of auxiliary loads for manifests
* and other data.
* @param playerId The {@link PlayerId} of the player using this media source.
*/
void prepareSource(MediaSourceCaller caller, @Nullable TransferListener mediaTransferListener);
void prepareSource(
MediaSourceCaller caller,
@Nullable TransferListener mediaTransferListener,
PlayerId playerId);
/**
* Throws any pending error encountered while loading or refreshing source information.
*
* <p>Should not be called directly from application code.
*
* <p>Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener)}.
* <p>Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener,
* PlayerId)}.
*/
void maybeThrowSourceInfoRefreshError() throws IOException;
@ -217,7 +233,8 @@ public interface MediaSource {
*
* <p>Should not be called directly from application code.
*
* <p>Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener)}.
* <p>Must only be called after {@link #prepareSource(MediaSourceCaller, TransferListener,
* PlayerId)}.
*
* @param caller The {@link MediaSourceCaller} enabling the source.
*/

View file

@ -175,6 +175,8 @@ public final class ProgressiveMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmSessionManager(@Nullable DrmSessionManager drmSessionManager) {
if (drmSessionManager == null) {
setDrmSessionManagerProvider(null);
@ -184,6 +186,7 @@ public final class ProgressiveMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmHttpDataSourceFactory(
@Nullable HttpDataSource.Factory drmHttpDataSourceFactory) {
@ -194,6 +197,7 @@ public final class ProgressiveMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmUserAgent(@Nullable String userAgent) {
if (!usingCustomDrmSessionManagerProvider) {

View file

@ -168,7 +168,7 @@ public final class ServerSideInsertedAdsMediaSource extends BaseMediaSource
}
mediaSource.addEventListener(handler, /* eventListener= */ this);
mediaSource.addDrmEventListener(handler, /* eventListener= */ this);
mediaSource.prepareSource(/* caller= */ this, mediaTransferListener);
mediaSource.prepareSource(/* caller= */ this, mediaTransferListener, getPlayerId());
}
@Override

View file

@ -29,7 +29,6 @@ import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.FormatSupport;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Renderer;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.RendererCapabilities.AdaptiveSupport;
@ -61,112 +60,36 @@ import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.compatqual.NullableType;
/**
* A default {@link TrackSelector} suitable for most use cases. Track selections are made according
* to configurable {@link Parameters}, which can be set by calling {@link
* Player#setTrackSelectionParameters}.
* A default {@link TrackSelector} suitable for most use cases.
*
* <h2>Modifying parameters</h2>
*
* To modify only some aspects of the parameters currently used by a selector, it's possible to
* obtain a {@link ParametersBuilder} initialized with the current {@link Parameters}. The desired
* modifications can be made on the builder, and the resulting {@link Parameters} can then be built
* and set on the selector. For example the following code modifies the parameters to restrict video
* track selections to SD, and to select a German audio track if there is one:
*
* <pre>{@code
* // Build on the current parameters.
* TrackSelectionParameters currentParameters = player.getTrackSelectionParameters();
* // Build the resulting parameters.
* TrackSelectionParameters newParameters = currentParameters
* .buildUpon()
* .setMaxVideoSizeSd()
* .setPreferredAudioLanguage("deu")
* .build();
* // Set the new parameters.
* player.setTrackSelectionParameters(newParameters);
* }</pre>
*
* Convenience methods and chaining allow this to be written more concisely as:
* Track selection parameters should be modified by obtaining a {@link
* TrackSelectionParameters.Builder} initialized with the current {@link TrackSelectionParameters}
* from the player. The desired modifications can be made on the builder, and the resulting {@link
* TrackSelectionParameters} can then be built and set on the player:
*
* <pre>{@code
* player.setTrackSelectionParameters(
* player.getTrackSelectionParameters()
* .buildUpon()
* .setMaxVideoSizeSd()
* .setPreferredAudioLanguage("deu")
* .setPreferredAudioLanguage("de")
* .build());
*
* }</pre>
*
* Selection {@link Parameters} support many different options, some of which are described below.
*
* <h2>Selecting specific tracks</h2>
*
* Track selection overrides can be used to select specific tracks. To specify an override for a
* renderer, it's first necessary to obtain the tracks that have been mapped to it:
* Some specialized parameters are only available in the extended {@link Parameters} class, which
* can be retrieved and modified in a similar way in this track selector:
*
* <pre>{@code
* MappedTrackInfo mappedTrackInfo = trackSelector.getCurrentMappedTrackInfo();
* TrackGroupArray rendererTrackGroups = mappedTrackInfo == null ? null
* : mappedTrackInfo.getTrackGroups(rendererIndex);
* }</pre>
*
* If {@code rendererTrackGroups} is null then there aren't any currently mapped tracks, and so
* setting an override isn't possible. Note that a {@link Player.Listener} registered on the player
* can be used to determine when the current tracks (and therefore the mapping) changes. If {@code
* rendererTrackGroups} is non-null then an override can be set. The next step is to query the
* properties of the available tracks to determine the {@code groupIndex} and the {@code
* trackIndices} within the group it that should be selected. The override can then be specified
* using {@link ParametersBuilder#setSelectionOverride}:
*
* <pre>{@code
* SelectionOverride selectionOverride = new SelectionOverride(groupIndex, trackIndices);
* player.setTrackSelectionParameters(
* ((Parameters)player.getTrackSelectionParameters())
* defaultTrackSelector.setParameters(
* defaultTrackSelector.getParameters()
* .buildUpon()
* .setSelectionOverride(rendererIndex, rendererTrackGroups, selectionOverride)
* .setTunnelingEnabled(true)
* .build());
*
* }</pre>
*
* <h2>Constraint based track selection</h2>
*
* Whilst track selection overrides make it possible to select specific tracks, the recommended way
* of controlling which tracks are selected is by specifying constraints. For example consider the
* case of wanting to restrict video track selections to SD, and preferring German audio tracks.
* Track selection overrides could be used to select specific tracks meeting these criteria, however
* a simpler and more flexible approach is to specify these constraints directly:
*
* <pre>{@code
* player.setTrackSelectionParameters(
* player.getTrackSelectionParameters()
* .buildUpon()
* .setMaxVideoSizeSd()
* .setPreferredAudioLanguage("deu")
* .build());
* }</pre>
*
* There are several benefits to using constraint based track selection instead of specific track
* overrides:
*
* <ul>
* <li>You can specify constraints before knowing what tracks the media provides. This can
* simplify track selection code (e.g. you don't have to listen for changes in the available
* tracks before configuring the selector).
* <li>Constraints can be applied consistently across all periods in a complex piece of media,
* even if those periods contain different tracks. In contrast, a specific track override is
* only applied to periods whose tracks match those for which the override was set.
* </ul>
*
* <h2>Disabling renderers</h2>
*
* Renderers can be disabled using {@link ParametersBuilder#setRendererDisabled}. Disabling a
* renderer differs from setting a {@code null} override because the renderer is disabled
* unconditionally, whereas a {@code null} override is applied only when the track groups available
* to the renderer match the {@link TrackGroupArray} for which it was specified.
*
* <h2>Tunneling</h2>
*
* Tunneled playback can be enabled in cases where the combination of renderers and selected tracks
* supports it. This can be done by using {@link ParametersBuilder#setTunnelingEnabled(boolean)}.
*/
public class DefaultTrackSelector extends MappingTrackSelector {
@ -307,7 +230,7 @@ public class DefaultTrackSelector extends MappingTrackSelector {
rendererDisabledFlags =
makeSparseBooleanArrayFromTrueKeys(
bundle.getIntArray(
Parameters.keyForField(Parameters.FIELD_RENDERER_DISABLED_INDEXES)));
Parameters.keyForField(Parameters.FIELD_RENDERER_DISABLED_INDICES)));
}
@Override
@ -825,9 +748,9 @@ public class DefaultTrackSelector extends MappingTrackSelector {
private void setSelectionOverridesFromBundle(Bundle bundle) {
@Nullable
int[] rendererIndexes =
int[] rendererIndices =
bundle.getIntArray(
Parameters.keyForField(Parameters.FIELD_SELECTION_OVERRIDES_RENDERER_INDEXES));
Parameters.keyForField(Parameters.FIELD_SELECTION_OVERRIDES_RENDERER_INDICES));
List<TrackGroupArray> trackGroupArrays =
BundleableUtil.fromBundleNullableList(
TrackGroupArray.CREATOR,
@ -841,11 +764,11 @@ public class DefaultTrackSelector extends MappingTrackSelector {
Parameters.keyForField(Parameters.FIELD_SELECTION_OVERRIDES)),
/* defaultValue= */ new SparseArray<>());
if (rendererIndexes == null || rendererIndexes.length != trackGroupArrays.size()) {
if (rendererIndices == null || rendererIndices.length != trackGroupArrays.size()) {
return; // Incorrect format, ignore all overrides.
}
for (int i = 0; i < rendererIndexes.length; i++) {
int rendererIndex = rendererIndexes[i];
for (int i = 0; i < rendererIndices.length; i++) {
int rendererIndex = rendererIndices[i];
TrackGroupArray groups = trackGroupArrays.get(i);
@Nullable SelectionOverride selectionOverride = selectionOverrides.get(i);
setSelectionOverride(rendererIndex, groups, selectionOverride);
@ -1009,7 +932,11 @@ public class DefaultTrackSelector extends MappingTrackSelector {
* @param rendererIndex The renderer index.
* @param groups The {@link TrackGroupArray}.
* @return Whether there is an override.
* @deprecated Only works to retrieve the overrides set with the deprecated {@link
* ParametersBuilder#setSelectionOverride(int, TrackGroupArray, SelectionOverride)}. Use
* {@link TrackSelectionParameters#trackSelectionOverrides} instead.
*/
@Deprecated
public final boolean hasSelectionOverride(int rendererIndex, TrackGroupArray groups) {
Map<TrackGroupArray, @NullableType SelectionOverride> overrides =
selectionOverrides.get(rendererIndex);
@ -1022,7 +949,11 @@ public class DefaultTrackSelector extends MappingTrackSelector {
* @param rendererIndex The renderer index.
* @param groups The {@link TrackGroupArray}.
* @return The override, or null if no override exists.
* @deprecated Only works to retrieve the overrides set with the deprecated {@link
* ParametersBuilder#setSelectionOverride(int, TrackGroupArray, SelectionOverride)}. Use
* {@link TrackSelectionParameters#trackSelectionOverrides} instead.
*/
@Deprecated
@Nullable
public final SelectionOverride getSelectionOverride(int rendererIndex, TrackGroupArray groups) {
Map<TrackGroupArray, @NullableType SelectionOverride> overrides =
@ -1107,10 +1038,10 @@ public class DefaultTrackSelector extends MappingTrackSelector {
FIELD_EXCEED_RENDERER_CAPABILITIES_IF_NECESSARY,
FIELD_TUNNELING_ENABLED,
FIELD_ALLOW_MULTIPLE_ADAPTIVE_SELECTIONS,
FIELD_SELECTION_OVERRIDES_RENDERER_INDEXES,
FIELD_SELECTION_OVERRIDES_RENDERER_INDICES,
FIELD_SELECTION_OVERRIDES_TRACK_GROUP_ARRAYS,
FIELD_SELECTION_OVERRIDES,
FIELD_RENDERER_DISABLED_INDEXES,
FIELD_RENDERER_DISABLED_INDICES,
})
private @interface FieldNumber {}
@ -1126,10 +1057,10 @@ public class DefaultTrackSelector extends MappingTrackSelector {
private static final int FIELD_EXCEED_RENDERER_CAPABILITIES_IF_NECESSARY = 1008;
private static final int FIELD_TUNNELING_ENABLED = 1009;
private static final int FIELD_ALLOW_MULTIPLE_ADAPTIVE_SELECTIONS = 1010;
private static final int FIELD_SELECTION_OVERRIDES_RENDERER_INDEXES = 1011;
private static final int FIELD_SELECTION_OVERRIDES_RENDERER_INDICES = 1011;
private static final int FIELD_SELECTION_OVERRIDES_TRACK_GROUP_ARRAYS = 1012;
private static final int FIELD_SELECTION_OVERRIDES = 1013;
private static final int FIELD_RENDERER_DISABLED_INDEXES = 1014;
private static final int FIELD_RENDERER_DISABLED_INDICES = 1014;
@Override
public Bundle toBundle() {
@ -1172,7 +1103,7 @@ public class DefaultTrackSelector extends MappingTrackSelector {
putSelectionOverridesToBundle(bundle, selectionOverrides);
// Only true values are put into rendererDisabledFlags.
bundle.putIntArray(
keyForField(FIELD_RENDERER_DISABLED_INDEXES),
keyForField(FIELD_RENDERER_DISABLED_INDICES),
getKeysFromSparseBooleanArray(rendererDisabledFlags));
return bundle;
@ -1194,7 +1125,7 @@ public class DefaultTrackSelector extends MappingTrackSelector {
private static void putSelectionOverridesToBundle(
Bundle bundle,
SparseArray<Map<TrackGroupArray, @NullableType SelectionOverride>> selectionOverrides) {
ArrayList<Integer> rendererIndexes = new ArrayList<>();
ArrayList<Integer> rendererIndices = new ArrayList<>();
ArrayList<TrackGroupArray> trackGroupArrays = new ArrayList<>();
SparseArray<SelectionOverride> selections = new SparseArray<>();
@ -1207,10 +1138,10 @@ public class DefaultTrackSelector extends MappingTrackSelector {
selections.put(trackGroupArrays.size(), selection);
}
trackGroupArrays.add(override.getKey());
rendererIndexes.add(rendererIndex);
rendererIndices.add(rendererIndex);
}
bundle.putIntArray(
keyForField(FIELD_SELECTION_OVERRIDES_RENDERER_INDEXES), Ints.toArray(rendererIndexes));
keyForField(FIELD_SELECTION_OVERRIDES_RENDERER_INDICES), Ints.toArray(rendererIndices));
bundle.putParcelableArrayList(
keyForField(FIELD_SELECTION_OVERRIDES_TRACK_GROUP_ARRAYS),
BundleableUtil.toBundleArrayList(trackGroupArrays));
@ -1504,9 +1435,32 @@ public class DefaultTrackSelector extends MappingTrackSelector {
rendererMixedMimeTypeAdaptationSupports,
params);
// Apply track disabling and overriding.
// Apply per track type overrides.
SparseArray<Pair<TrackSelectionOverride, Integer>> applicableOverridesByTrackType =
getApplicableOverrides(mappedTrackInfo, params);
for (int i = 0; i < applicableOverridesByTrackType.size(); i++) {
Pair<TrackSelectionOverride, Integer> overrideAndRendererIndex =
applicableOverridesByTrackType.valueAt(i);
applyTrackTypeOverride(
mappedTrackInfo,
definitions,
/* trackType= */ applicableOverridesByTrackType.keyAt(i),
/* override= */ overrideAndRendererIndex.first,
/* overrideRendererIndex= */ overrideAndRendererIndex.second);
}
// Apply legacy per renderer overrides.
for (int i = 0; i < rendererCount; i++) {
definitions[i] = maybeApplyOverride(mappedTrackInfo, params, i, definitions[i]);
if (hasLegacyRendererOverride(mappedTrackInfo, params, /* rendererIndex= */ i)) {
definitions[i] = getLegacyRendererOverride(mappedTrackInfo, params, /* rendererIndex= */ i);
}
}
// Disable renderers if needed.
for (int i = 0; i < rendererCount; i++) {
if (isRendererDisabled(mappedTrackInfo, params, /* rendererIndex= */ i)) {
definitions[i] = null;
}
}
@NullableType
@ -1538,44 +1492,94 @@ public class DefaultTrackSelector extends MappingTrackSelector {
return Pair.create(rendererConfigurations, rendererTrackSelections);
}
/**
* Returns the {@link ExoTrackSelection.Definition} of a renderer after applying selection
* overriding and renderer disabling.
*/
protected ExoTrackSelection.@NullableType Definition maybeApplyOverride(
MappedTrackInfo mappedTrackInfo,
Parameters params,
int rendererIndex,
ExoTrackSelection.@NullableType Definition currentDefinition) {
// Per renderer and per track type disabling
private boolean isRendererDisabled(
MappedTrackInfo mappedTrackInfo, Parameters params, int rendererIndex) {
@C.TrackType int rendererType = mappedTrackInfo.getRendererType(rendererIndex);
if (params.getRendererDisabled(rendererIndex)
|| params.disabledTrackTypes.contains(rendererType)) {
return params.getRendererDisabled(rendererIndex)
|| params.disabledTrackTypes.contains(rendererType);
}
@SuppressWarnings("deprecation") // Calling deprecated hasSelectionOverride.
private boolean hasLegacyRendererOverride(
MappedTrackInfo mappedTrackInfo, Parameters params, int rendererIndex) {
TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
return params.hasSelectionOverride(rendererIndex, rendererTrackGroups);
}
@SuppressWarnings("deprecation") // Calling deprecated getSelectionOverride.
private ExoTrackSelection.@NullableType Definition getLegacyRendererOverride(
MappedTrackInfo mappedTrackInfo, Parameters params, int rendererIndex) {
TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
@Nullable
SelectionOverride override = params.getSelectionOverride(rendererIndex, rendererTrackGroups);
if (override == null) {
return null;
}
// Per TrackGroupArray override
TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
if (params.hasSelectionOverride(rendererIndex, rendererTrackGroups)) {
@Nullable
SelectionOverride override = params.getSelectionOverride(rendererIndex, rendererTrackGroups);
if (override == null) {
return null;
}
return new ExoTrackSelection.Definition(
rendererTrackGroups.get(override.groupIndex), override.tracks, override.type);
}
// Per TrackGroup override
for (int j = 0; j < rendererTrackGroups.length; j++) {
TrackGroup trackGroup = rendererTrackGroups.get(j);
@Nullable
TrackSelectionOverride overrideTracks =
params.trackSelectionOverrides.getOverride(trackGroup);
if (overrideTracks != null) {
return new ExoTrackSelection.Definition(
trackGroup, Ints.toArray(overrideTracks.trackIndexes));
return new ExoTrackSelection.Definition(
rendererTrackGroups.get(override.groupIndex), override.tracks, override.type);
}
/**
* Returns applicable overrides. Mapping from track type to a pair of override and renderer index
* for this override.
*/
private SparseArray<Pair<TrackSelectionOverride, Integer>> getApplicableOverrides(
MappedTrackInfo mappedTrackInfo, Parameters params) {
SparseArray<Pair<TrackSelectionOverride, Integer>> applicableOverrides = new SparseArray<>();
// Iterate through all existing track groups to ensure only overrides for those groups are used.
int rendererCount = mappedTrackInfo.getRendererCount();
for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) {
TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
for (int j = 0; j < rendererTrackGroups.length; j++) {
maybeUpdateApplicableOverrides(
applicableOverrides,
params.trackSelectionOverrides.getOverride(rendererTrackGroups.get(j)),
rendererIndex);
}
}
// Also iterate unmapped groups to see if they have overrides.
TrackGroupArray unmappedGroups = mappedTrackInfo.getUnmappedTrackGroups();
for (int i = 0; i < unmappedGroups.length; i++) {
maybeUpdateApplicableOverrides(
applicableOverrides,
params.trackSelectionOverrides.getOverride(unmappedGroups.get(i)),
/* rendererIndex= */ C.INDEX_UNSET);
}
return applicableOverrides;
}
private void maybeUpdateApplicableOverrides(
SparseArray<Pair<TrackSelectionOverride, Integer>> applicableOverrides,
@Nullable TrackSelectionOverride override,
int rendererIndex) {
if (override == null) {
return;
}
@C.TrackType int trackType = override.getTrackType();
@Nullable
Pair<TrackSelectionOverride, Integer> existingOverride = applicableOverrides.get(trackType);
if (existingOverride == null || existingOverride.first.trackIndices.isEmpty()) {
// We only need to choose one non-empty override per type.
applicableOverrides.put(trackType, Pair.create(override, rendererIndex));
}
}
private void applyTrackTypeOverride(
MappedTrackInfo mappedTrackInfo,
ExoTrackSelection.@NullableType Definition[] definitions,
@C.TrackType int trackType,
TrackSelectionOverride override,
int overrideRendererIndex) {
for (int i = 0; i < definitions.length; i++) {
if (overrideRendererIndex == i) {
definitions[i] =
new ExoTrackSelection.Definition(
override.trackGroup, Ints.toArray(override.trackIndices));
} else if (mappedTrackInfo.getRendererType(i) == trackType) {
// Disable other renderers of the same type.
definitions[i] = null;
}
}
return currentDefinition; // No override
}
// Track selection prior to overrides and disabled flags being applied.

View file

@ -465,7 +465,7 @@ public final class DefaultBandwidthMeter implements BandwidthMeter, TransferList
/**
* Returns initial bitrate group assignments for a {@code country}. The initial bitrate is a list
* of indexes for [Wifi, 2G, 3G, 4G, 5G_NSA, 5G_SA].
* of indices for [Wifi, 2G, 3G, 4G, 5G_NSA, 5G_SA].
*/
private static int[] getInitialBitrateCountryGroupAssignment(String country) {
switch (country) {

View file

@ -167,8 +167,7 @@ public final class VideoDecoderGLSurfaceView extends GLSurfaceView
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
program = new GlUtil.Program(VERTEX_SHADER, FRAGMENT_SHADER);
program.use();
int posLocation = program.getAttribLocation("in_pos");
GLES20.glEnableVertexAttribArray(posLocation);
int posLocation = program.getAttributeArrayLocationAndEnable("in_pos");
GLES20.glVertexAttribPointer(
posLocation,
2,
@ -176,13 +175,9 @@ public final class VideoDecoderGLSurfaceView extends GLSurfaceView
/* normalized= */ false,
/* stride= */ 0,
TEXTURE_VERTICES);
texLocations[0] = program.getAttribLocation("in_tc_y");
GLES20.glEnableVertexAttribArray(texLocations[0]);
texLocations[1] = program.getAttribLocation("in_tc_u");
GLES20.glEnableVertexAttribArray(texLocations[1]);
texLocations[2] = program.getAttribLocation("in_tc_v");
GLES20.glEnableVertexAttribArray(texLocations[2]);
GlUtil.checkGlError();
texLocations[0] = program.getAttributeArrayLocationAndEnable("in_tc_y");
texLocations[1] = program.getAttributeArrayLocationAndEnable("in_tc_u");
texLocations[2] = program.getAttributeArrayLocationAndEnable("in_tc_v");
colorMatrixLocation = program.getUniformLocation("mColorConversion");
GlUtil.checkGlError();
setupTextures();
@ -255,9 +250,9 @@ public final class VideoDecoderGLSurfaceView extends GLSurfaceView
int[] widths = new int[3];
widths[0] = outputBuffer.width;
// TODO: Handle streams where chroma channels are not stored at half width and height
// compared to luma channel. See [Internal: b/142097774].
// U and V planes are being stored at half width compared to Y.
// TODO(b/142097774): Handle streams where chroma channels are not stored at half width and
// height compared to the luma channel. U and V planes are being stored at half width compared
// to Y.
widths[1] = widths[2] = (widths[0] + 1) / 2;
for (int i = 0; i < 3; i++) {
// Set cropping of stride if either width or stride has changed.

View file

@ -46,33 +46,27 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
// Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data.
private static final String[] VERTEX_SHADER_CODE =
new String[] {
"uniform mat4 uMvpMatrix;",
"uniform mat3 uTexMatrix;",
"attribute vec4 aPosition;",
"attribute vec2 aTexCoords;",
"varying vec2 vTexCoords;",
// Standard transformation.
"void main() {",
" gl_Position = uMvpMatrix * aPosition;",
" vTexCoords = (uTexMatrix * vec3(aTexCoords, 1)).xy;",
"}"
};
private static final String[] FRAGMENT_SHADER_CODE =
new String[] {
// This is required since the texture data is GL_TEXTURE_EXTERNAL_OES.
"#extension GL_OES_EGL_image_external : require",
"precision mediump float;",
// Standard texture rendering shader.
"uniform samplerExternalOES uTexture;",
"varying vec2 vTexCoords;",
"void main() {",
" gl_FragColor = texture2D(uTexture, vTexCoords);",
"}"
};
private static final String VERTEX_SHADER =
"uniform mat4 uMvpMatrix;\n"
+ "uniform mat3 uTexMatrix;\n"
+ "attribute vec4 aPosition;\n"
+ "attribute vec2 aTexCoords;\n"
+ "varying vec2 vTexCoords;\n"
+ "// Standard transformation.\n"
+ "void main() {\n"
+ " gl_Position = uMvpMatrix * aPosition;\n"
+ " vTexCoords = (uTexMatrix * vec3(aTexCoords, 1)).xy;\n"
+ "}\n";
private static final String FRAGMENT_SHADER =
"// This is required since the texture data is GL_TEXTURE_EXTERNAL_OES.\n"
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "// Standard texture rendering shader.\n"
+ "uniform samplerExternalOES uTexture;\n"
+ "varying vec2 vTexCoords;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(uTexture, vTexCoords);\n"
+ "}\n";
// Texture transform matrices.
private static final float[] TEX_MATRIX_WHOLE = {
@ -121,11 +115,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Initializes of the GL components. */
/* package */ void init() {
program = new GlUtil.Program(VERTEX_SHADER_CODE, FRAGMENT_SHADER_CODE);
program = new GlUtil.Program(VERTEX_SHADER, FRAGMENT_SHADER);
mvpMatrixHandle = program.getUniformLocation("uMvpMatrix");
uTexMatrixHandle = program.getUniformLocation("uTexMatrix");
positionHandle = program.getAttribLocation("aPosition");
texCoordsHandle = program.getAttribLocation("aTexCoords");
positionHandle = program.getAttributeArrayLocationAndEnable("aPosition");
texCoordsHandle = program.getAttributeArrayLocationAndEnable("aTexCoords");
textureHandle = program.getUniformLocation("uTexture");
}
@ -148,10 +142,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkNotNull(program).use();
checkGlError();
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glEnableVertexAttribArray(texCoordsHandle);
checkGlError();
float[] texMatrix;
if (stereoMode == C.STEREO_MODE_TOP_BOTTOM) {
texMatrix = rightEye ? TEX_MATRIX_BOTTOM : TEX_MATRIX_TOP;
@ -162,6 +152,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
GLES20.glUniformMatrix3fv(uTexMatrixHandle, 1, false, texMatrix, 0);
// TODO(b/205002913): Update to use GlUtil.Uniform.bind().
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);

View file

@ -44,12 +44,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
assertThat(defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs()).isEqualTo(42_000);
}
@ -60,12 +61,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 4321,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(4321)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
assertThat(defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs()).isEqualTo(400_000);
}
@ -76,12 +78,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 3,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(3)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
assertThat(defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs()).isEqualTo(5_000);
}
@ -93,12 +96,13 @@ public class DefaultLivePlaybackSpeedControlTest {
defaultLivePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(321_000);
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
long targetLiveOffsetUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
@ -113,12 +117,13 @@ public class DefaultLivePlaybackSpeedControlTest {
defaultLivePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(123_456_789);
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
long targetLiveOffsetUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
@ -133,12 +138,13 @@ public class DefaultLivePlaybackSpeedControlTest {
defaultLivePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(3_141);
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
long targetLiveOffsetUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
@ -164,12 +170,13 @@ public class DefaultLivePlaybackSpeedControlTest {
new DefaultLivePlaybackSpeedControl.Builder().build();
defaultLivePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(123_456_789);
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
defaultLivePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(C.TIME_UNSET);
long targetLiveOffsetUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
@ -184,12 +191,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setTargetLiveOffsetIncrementOnRebufferMs(3)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
long targetLiveOffsetBeforeUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
defaultLivePlaybackSpeedControl.notifyRebuffer();
@ -206,12 +214,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setTargetLiveOffsetIncrementOnRebufferMs(3)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
List<Long> targetOffsetsUs = new ArrayList<>();
for (int i = 0; i < 500; i++) {
@ -231,12 +240,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setTargetLiveOffsetIncrementOnRebufferMs(0)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
defaultLivePlaybackSpeedControl.notifyRebuffer();
long targetLiveOffsetUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
@ -252,12 +262,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setTargetLiveOffsetIncrementOnRebufferMs(3)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
defaultLivePlaybackSpeedControl.notifyRebuffer();
defaultLivePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(321_000);
@ -274,22 +285,24 @@ public class DefaultLivePlaybackSpeedControlTest {
.setTargetLiveOffsetIncrementOnRebufferMs(3)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
long targetLiveOffsetBeforeUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
defaultLivePlaybackSpeedControl.notifyRebuffer();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 3,
/* maxLiveOffsetMs= */ 450,
/* minPlaybackSpeed= */ 0.9f,
/* maxPlaybackSpeed= */ 1.1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(3)
.setMaxOffsetMs(450)
.setMinPlaybackSpeed(0.9f)
.setMaxPlaybackSpeed(1.1f)
.build());
long targetLiveOffsetAfterUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
assertThat(targetLiveOffsetAfterUs).isGreaterThan(targetLiveOffsetBeforeUs);
@ -304,21 +317,23 @@ public class DefaultLivePlaybackSpeedControlTest {
.setTargetLiveOffsetIncrementOnRebufferMs(3)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42,
/* minLiveOffsetMs= */ 5,
/* maxLiveOffsetMs= */ 400,
/* minPlaybackSpeed= */ 1f,
/* maxPlaybackSpeed= */ 1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42)
.setMinOffsetMs(5)
.setMaxOffsetMs(400)
.setMinPlaybackSpeed(1f)
.setMaxPlaybackSpeed(1f)
.build());
defaultLivePlaybackSpeedControl.notifyRebuffer();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 39,
/* minLiveOffsetMs= */ 3,
/* maxLiveOffsetMs= */ 450,
/* minPlaybackSpeed= */ 0.9f,
/* maxPlaybackSpeed= */ 1.1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(39)
.setMinOffsetMs(3)
.setMaxOffsetMs(450)
.setMinPlaybackSpeed(0.9f)
.setMaxPlaybackSpeed(1.1f)
.build());
long targetLiveOffsetUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
assertThat(targetLiveOffsetUs).isEqualTo(39_000);
@ -333,12 +348,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setMinUpdateIntervalMs(100)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42_000,
/* minLiveOffsetMs= */ 5_000,
/* maxLiveOffsetMs= */ 400_000,
/* minPlaybackSpeed= */ 0.9f,
/* maxPlaybackSpeed= */ 1.1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42_000)
.setMinOffsetMs(5_000)
.setMaxOffsetMs(400_000)
.setMinPlaybackSpeed(0.9f)
.setMaxPlaybackSpeed(1.1f)
.build());
defaultLivePlaybackSpeedControl.notifyRebuffer();
long targetLiveOffsetAfterRebufferUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
@ -371,12 +387,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setMinUpdateIntervalMs(100)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42_000,
/* minLiveOffsetMs= */ 5_000,
/* maxLiveOffsetMs= */ 400_000,
/* minPlaybackSpeed= */ 0.9f,
/* maxPlaybackSpeed= */ 1.1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42_000)
.setMinOffsetMs(5_000)
.setMaxOffsetMs(400_000)
.setMinPlaybackSpeed(0.9f)
.setMaxPlaybackSpeed(1.1f)
.build());
defaultLivePlaybackSpeedControl.notifyRebuffer();
long targetLiveOffsetAfterRebufferUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
@ -408,12 +425,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42_000,
/* minLiveOffsetMs= */ 5_000,
/* maxLiveOffsetMs= */ 400_000,
/* minPlaybackSpeed= */ 0.9f,
/* maxPlaybackSpeed= */ 1.1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42_000)
.setMinOffsetMs(5_000)
.setMaxOffsetMs(400_000)
.setMinPlaybackSpeed(0.9f)
.setMaxPlaybackSpeed(1.1f)
.build());
long targetLiveOffsetBeforeUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
// Pretend to have a buffered duration at around the target duration with some artificial noise.
@ -440,12 +458,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setMinPossibleLiveOffsetSmoothingFactor(0f)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42_000,
/* minLiveOffsetMs= */ 5_000,
/* maxLiveOffsetMs= */ 400_000,
/* minPlaybackSpeed= */ 0.9f,
/* maxPlaybackSpeed= */ 1.1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42_000)
.setMinOffsetMs(5_000)
.setMaxOffsetMs(400_000)
.setMinPlaybackSpeed(0.9f)
.setMaxPlaybackSpeed(1.1f)
.build());
long targetLiveOffsetBeforeUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
// Pretend to have a buffered duration at around the target duration with some artificial noise.
@ -474,12 +493,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setMinUpdateIntervalMs(100)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 42_000,
/* minLiveOffsetMs= */ 5_000,
/* maxLiveOffsetMs= */ 400_000,
/* minPlaybackSpeed= */ 0.9f,
/* maxPlaybackSpeed= */ 1.1f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(42_000)
.setMinOffsetMs(5_000)
.setMaxOffsetMs(400_000)
.setMinPlaybackSpeed(0.9f)
.setMaxPlaybackSpeed(1.1f)
.build());
long targetLiveOffsetBeforeUs = defaultLivePlaybackSpeedControl.getTargetLiveOffsetUs();
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -495,12 +515,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -516,12 +537,13 @@ public class DefaultLivePlaybackSpeedControlTest {
.setMaxLiveOffsetErrorMsForUnitSpeed(5)
.build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeedJustAboveLowerErrorMargin =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -539,12 +561,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setProportionalControlFactor(0.01f).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -561,12 +584,13 @@ public class DefaultLivePlaybackSpeedControlTest {
new DefaultLivePlaybackSpeedControl.Builder().setProportionalControlFactor(0.01f).build();
defaultLivePlaybackSpeedControl.setTargetLiveOffsetOverrideUs(2_000_000);
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -583,12 +607,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setFallbackMaxPlaybackSpeed(1.5f).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -603,12 +628,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setFallbackMinPlaybackSpeed(0.5f).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -623,12 +649,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setFallbackMaxPlaybackSpeed(1.5f).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ 2f));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(2f)
.build());
float adjustedSpeed =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -643,12 +670,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setFallbackMinPlaybackSpeed(0.5f).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ 0.2f,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(0.2f)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -662,12 +690,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setMinUpdateIntervalMs(123).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed1 =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -691,23 +720,25 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setMinUpdateIntervalMs(123).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed1 =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
/* liveOffsetUs= */ 1_500_000, /* bufferedDurationUs= */ 1_000_000);
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed2 =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
/* liveOffsetUs= */ 2_500_000, /* bufferedDurationUs= */ 1_000_000);
@ -721,23 +752,25 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setMinUpdateIntervalMs(123).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed1 =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
/* liveOffsetUs= */ 1_500_000, /* bufferedDurationUs= */ 1_000_000);
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 1_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(1_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed2 =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
/* liveOffsetUs= */ 2_500_000, /* bufferedDurationUs= */ 1_000_000);
@ -751,12 +784,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setMinUpdateIntervalMs(123).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed1 =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(
@ -774,12 +808,13 @@ public class DefaultLivePlaybackSpeedControlTest {
DefaultLivePlaybackSpeedControl defaultLivePlaybackSpeedControl =
new DefaultLivePlaybackSpeedControl.Builder().setMinUpdateIntervalMs(123).build();
defaultLivePlaybackSpeedControl.setLiveConfiguration(
new LiveConfiguration(
/* targetLiveOffsetMs= */ 2_000,
/* minLiveOffsetMs= */ C.TIME_UNSET,
/* maxLiveOffsetMs= */ C.TIME_UNSET,
/* minPlaybackSpeed= */ C.RATE_UNSET,
/* maxPlaybackSpeed= */ C.RATE_UNSET));
new LiveConfiguration.Builder()
.setTargetOffsetMs(2_000)
.setMinOffsetMs(C.TIME_UNSET)
.setMaxOffsetMs(C.TIME_UNSET)
.setMinPlaybackSpeed(C.RATE_UNSET)
.setMaxPlaybackSpeed(C.RATE_UNSET)
.build());
float adjustedSpeed1 =
defaultLivePlaybackSpeedControl.getAdjustedPlaybackSpeed(

View file

@ -23,7 +23,9 @@ import android.net.Uri;
import android.os.Handler;
import android.os.Looper;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.source.MediaSource.MediaSourceCaller;
import com.google.android.exoplayer2.source.SinglePeriodTimeline;
import com.google.android.exoplayer2.source.ads.AdPlaybackState;
import com.google.android.exoplayer2.source.ads.SinglePeriodAdTimeline;
@ -78,7 +80,8 @@ public final class MediaPeriodQueueTest {
new MediaSourceList(
mock(MediaSourceList.MediaSourceListInfoRefreshListener.class),
/* analyticsCollector= */ null,
new Handler(Looper.getMainLooper()));
new Handler(Looper.getMainLooper()),
PlayerId.UNSET);
rendererCapabilities = new RendererCapabilities[0];
trackSelector = mock(TrackSelector.class);
allocator = mock(Allocator.class);
@ -738,7 +741,8 @@ public final class MediaPeriodQueueTest {
new MediaSourceList.MediaSourceHolder(fakeMediaSource, /* useLazyPreparation= */ false);
mediaSourceList.setMediaSources(
ImmutableList.of(mediaSourceHolder), new FakeShuffleOrder(/* length= */ 1));
mediaSourceHolder.mediaSource.prepareSourceInternal(/* mediaTransferListener */ null);
mediaSourceHolder.mediaSource.prepareSource(
mock(MediaSourceCaller.class), /* mediaTransferListener */ null, PlayerId.UNSET);
Timeline playlistTimeline = mediaSourceList.createTimeline();
firstPeriodUid = playlistTimeline.getUidOfPeriod(/* periodIndex= */ 0);

View file

@ -26,6 +26,7 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ShuffleOrder;
import com.google.android.exoplayer2.testutil.FakeMediaSource;
@ -54,7 +55,8 @@ public class MediaSourceListTest {
new MediaSourceList(
mock(MediaSourceList.MediaSourceListInfoRefreshListener.class),
/* analyticsCollector= */ null,
Util.createHandlerForCurrentOrMainLooper());
Util.createHandlerForCurrentOrMainLooper(),
PlayerId.UNSET);
}
@Test
@ -92,30 +94,30 @@ public class MediaSourceListTest {
// Verify prepare is called once on prepare.
verify(mockMediaSource1, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.prepare(/* mediaTransferListener= */ null);
assertThat(mediaSourceList.isPrepared()).isTrue();
// Verify prepare is called once on prepare.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.release();
mediaSourceList.prepare(/* mediaTransferListener= */ null);
// Verify prepare is called a second time on re-prepare.
verify(mockMediaSource1, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
}
@Test
@ -182,10 +184,10 @@ public class MediaSourceListTest {
// Verify sources are prepared.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
// Set media items again. The second holder is re-used.
MediaSource mockMediaSource3 = mock(MediaSource.class);
@ -203,7 +205,7 @@ public class MediaSourceListTest {
assertThat(mediaSources.get(1).isRemoved).isFalse();
verify(mockMediaSource2, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
}
@Test
@ -222,10 +224,10 @@ public class MediaSourceListTest {
// Verify lazy initialization does not call prepare on sources.
verify(mockMediaSource1, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
for (int i = 0; i < mediaSources.size(); i++) {
assertThat(mediaSources.get(i).firstWindowIndexInChild).isEqualTo(i);
@ -259,10 +261,10 @@ public class MediaSourceListTest {
// Verify prepare is called on sources when added.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
}
@Test
@ -387,7 +389,7 @@ public class MediaSourceListTest {
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 1));
verify(mockMediaSource, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.release();
verify(mockMediaSource, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSourceHolder.isRemoved).isFalse();
@ -406,7 +408,7 @@ public class MediaSourceListTest {
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 1));
verify(mockMediaSource, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.release();
verify(mockMediaSource, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSourceHolder.isRemoved).isFalse();

View file

@ -54,7 +54,8 @@ public class AsynchronousMediaCodecAdapterTest {
new AsynchronousMediaCodecAdapter.Factory(
/* callbackThreadSupplier= */ () -> callbackThread,
/* queueingThreadSupplier= */ () -> queueingThread,
/* synchronizeCodecInteractionsWithQueueing= */ false)
/* synchronizeCodecInteractionsWithQueueing= */ false,
/* enableImmediateCodecStartAfterFlush= */ false)
.createAdapter(configuration);
bufferInfo = new MediaCodec.BufferInfo();
// After starting the MediaCodec, the ShadowMediaCodec offers input buffer 0. We advance the

View file

@ -190,6 +190,25 @@ public class AsynchronousMediaCodecBufferEnqueuerTest {
enqueuer.flush();
}
@Test
public void flush_withPendingError_doesNotResetError() {
enqueuer.start();
enqueuer.setPendingRuntimeException(
new MediaCodec.CryptoException(/* errorCode= */ 0, /* detailMessage= */ null));
enqueuer.flush();
assertThrows(
MediaCodec.CryptoException.class,
() ->
enqueuer.queueInputBuffer(
/* index= */ 0,
/* offset= */ 0,
/* size= */ 0,
/* presentationTimeUs= */ 0,
/* flags= */ 0));
}
@Test
public void shutdown_withoutStart_works() {
enqueuer.shutdown();
@ -219,6 +238,16 @@ public class AsynchronousMediaCodecBufferEnqueuerTest {
assertThrows(IllegalStateException.class, () -> enqueuer.shutdown());
}
@Test
public void shutdown_withPendingError_doesNotThrow() {
enqueuer.start();
enqueuer.setPendingRuntimeException(
new MediaCodec.CryptoException(/* errorCode= */ 0, /* detailMessage= */ null));
// Shutting down with a pending error set should not throw .
enqueuer.shutdown();
}
private static CryptoInfo createCryptoInfo() {
CryptoInfo info = new CryptoInfo();
int numSubSamples = 5;

View file

@ -24,6 +24,7 @@ import static org.robolectric.Shadows.shadowOf;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -81,16 +82,24 @@ public class AsynchronousMediaCodecCallbackTest {
@Test
public void dequeInputBufferIndex_withPendingFlush_returnsTryAgain() {
Looper callbackThreadLooper = callbackThread.getLooper();
AtomicBoolean beforeFlushCompletes = new AtomicBoolean();
AtomicBoolean flushCompleted = new AtomicBoolean();
Looper callbackThreadLooper = callbackThread.getLooper();
Handler callbackHandler = new Handler(callbackThreadLooper);
ShadowLooper shadowCallbackLooper = shadowOf(callbackThreadLooper);
// Pause the callback thread so that flush() never completes.
shadowOf(callbackThreadLooper).pause();
shadowCallbackLooper.pause();
// Send two input buffers to the callback and then flush().
asynchronousMediaCodecCallback.onInputBufferAvailable(codec, 0);
asynchronousMediaCodecCallback.onInputBufferAvailable(codec, 1);
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
callbackHandler.post(() -> beforeFlushCompletes.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
callbackHandler.post(() -> flushCompleted.set(true));
while (!beforeFlushCompletes.get()) {
shadowCallbackLooper.runOneTask();
}
assertThat(flushCompleted.get()).isFalse();
assertThat(asynchronousMediaCodecCallback.dequeueInputBufferIndex())
.isEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
@ -104,8 +113,8 @@ public class AsynchronousMediaCodecCallbackTest {
// Send two input buffers to the callback and then flush().
asynchronousMediaCodecCallback.onInputBufferAvailable(codec, 0);
asynchronousMediaCodecCallback.onInputBufferAvailable(codec, 1);
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the callback thread so that flush() completes.
shadowOf(callbackThreadLooper).idle();
@ -123,10 +132,11 @@ public class AsynchronousMediaCodecCallbackTest {
// another input buffer.
asynchronousMediaCodecCallback.onInputBufferAvailable(codec, 0);
asynchronousMediaCodecCallback.onInputBufferAvailable(codec, 1);
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
// Progress the callback thread so that flush() completes.
shadowOf(callbackThreadLooper).idle();
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the callback thread to complete flush.
shadowOf(callbackThread.getLooper()).idle();
// Send another input buffer to the callback
asynchronousMediaCodecCallback.onInputBufferAvailable(codec, 2);
assertThat(flushCompleted.get()).isTrue();
@ -152,20 +162,6 @@ public class AsynchronousMediaCodecCallbackTest {
() -> asynchronousMediaCodecCallback.dequeueInputBufferIndex());
}
@Test
public void dequeueInputBufferIndex_afterFlushCompletedWithError_throwsError() throws Exception {
MediaCodec.CodecException codecException = createCodecException();
asynchronousMediaCodecCallback.flushAsync(
() -> {
throw codecException;
});
shadowOf(callbackThread.getLooper()).idle();
assertThrows(
MediaCodec.CodecException.class,
() -> asynchronousMediaCodecCallback.dequeueInputBufferIndex());
}
@Test
public void dequeOutputBufferIndex_afterCreation_returnsTryAgain() {
MediaCodec.BufferInfo outBufferInfo = new MediaCodec.BufferInfo();
@ -198,17 +194,24 @@ public class AsynchronousMediaCodecCallbackTest {
@Test
public void dequeOutputBufferIndex_withPendingFlush_returnsTryAgain() {
Looper callbackThreadLooper = callbackThread.getLooper();
AtomicBoolean beforeFlushCompletes = new AtomicBoolean();
AtomicBoolean flushCompleted = new AtomicBoolean();
Looper callbackThreadLooper = callbackThread.getLooper();
Handler callbackHandler = new Handler(callbackThreadLooper);
ShadowLooper shadowCallbackLooper = shadowOf(callbackThreadLooper);
// Pause the callback thread so that flush() never completes.
shadowOf(callbackThreadLooper).pause();
shadowCallbackLooper.pause();
// Send two output buffers to the callback and then flush().
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
asynchronousMediaCodecCallback.onOutputBufferAvailable(codec, 0, bufferInfo);
asynchronousMediaCodecCallback.onOutputBufferAvailable(codec, 1, bufferInfo);
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
callbackHandler.post(() -> beforeFlushCompletes.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
callbackHandler.post(() -> flushCompleted.set(true));
while (beforeFlushCompletes.get()) {
shadowCallbackLooper.runOneTask();
}
assertThat(flushCompleted.get()).isFalse();
assertThat(asynchronousMediaCodecCallback.dequeueOutputBufferIndex(new MediaCodec.BufferInfo()))
@ -224,8 +227,8 @@ public class AsynchronousMediaCodecCallbackTest {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
asynchronousMediaCodecCallback.onOutputBufferAvailable(codec, 0, bufferInfo);
asynchronousMediaCodecCallback.onOutputBufferAvailable(codec, 1, bufferInfo);
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the callback looper so that flush() completes.
shadowOf(callbackThreadLooper).idle();
@ -245,10 +248,11 @@ public class AsynchronousMediaCodecCallbackTest {
asynchronousMediaCodecCallback.onOutputFormatChanged(codec, createMediaFormat("format0"));
asynchronousMediaCodecCallback.onOutputBufferAvailable(codec, 0, bufferInfo);
asynchronousMediaCodecCallback.onOutputBufferAvailable(codec, 1, bufferInfo);
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the callback looper so that flush() completes.
shadowOf(callbackThreadLooper).idle();
// Emulate an output buffer is available.
asynchronousMediaCodecCallback.onOutputBufferAvailable(codec, 2, bufferInfo);
MediaCodec.BufferInfo outBufferInfo = new MediaCodec.BufferInfo();
@ -271,8 +275,8 @@ public class AsynchronousMediaCodecCallbackTest {
MediaFormat pendingMediaFormat = new MediaFormat();
asynchronousMediaCodecCallback.onOutputFormatChanged(codec, pendingMediaFormat);
// flush() should not discard the last format.
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the callback looper so that flush() completes.
shadowOf(callbackThreadLooper).idle();
// Right after flush(), we send an output buffer: the pending output format should be
@ -298,8 +302,8 @@ public class AsynchronousMediaCodecCallbackTest {
MediaFormat pendingMediaFormat = new MediaFormat();
asynchronousMediaCodecCallback.onOutputFormatChanged(codec, pendingMediaFormat);
// flush() should not discard the last format.
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the callback looper so that flush() completes.
shadowOf(callbackThreadLooper).idle();
// The first callback after flush() is a new MediaFormat, it should overwrite the pending
@ -335,20 +339,6 @@ public class AsynchronousMediaCodecCallbackTest {
() -> asynchronousMediaCodecCallback.dequeueOutputBufferIndex(new MediaCodec.BufferInfo()));
}
@Test
public void dequeueOutputBufferIndex_afterFlushCompletedWithError_throwsError() throws Exception {
MediaCodec.CodecException codecException = createCodecException();
asynchronousMediaCodecCallback.flushAsync(
() -> {
throw codecException;
});
shadowOf(callbackThread.getLooper()).idle();
assertThrows(
MediaCodec.CodecException.class,
() -> asynchronousMediaCodecCallback.dequeueOutputBufferIndex(new MediaCodec.BufferInfo()));
}
@Test
public void getOutputFormat_onNewInstance_raisesException() {
try {
@ -377,8 +367,8 @@ public class AsynchronousMediaCodecCallbackTest {
asynchronousMediaCodecCallback.onOutputFormatChanged(codec, format);
asynchronousMediaCodecCallback.dequeueOutputBufferIndex(new MediaCodec.BufferInfo());
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the callback looper so that flush() completes.
shadowOf(callbackThreadLooper).idle();
@ -390,7 +380,8 @@ public class AsynchronousMediaCodecCallbackTest {
public void getOutputFormat_afterFlushWithPendingFormat_returnsPendingFormat() {
MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
AtomicBoolean flushCompleted = new AtomicBoolean();
ShadowLooper shadowCallbackLooper = shadowOf(callbackThread.getLooper());
Looper callbackThreadLooper = callbackThread.getLooper();
ShadowLooper shadowCallbackLooper = shadowOf(callbackThreadLooper);
shadowCallbackLooper.pause();
asynchronousMediaCodecCallback.onOutputFormatChanged(codec, createMediaFormat("format0"));
@ -399,8 +390,8 @@ public class AsynchronousMediaCodecCallbackTest {
asynchronousMediaCodecCallback.onOutputFormatChanged(codec, createMediaFormat("format1"));
asynchronousMediaCodecCallback.onOutputBufferAvailable(
codec, /* index= */ 1, new MediaCodec.BufferInfo());
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ () -> flushCompleted.set(true));
asynchronousMediaCodecCallback.flush(/* codec= */ null);
new Handler(callbackThreadLooper).post(() -> flushCompleted.set(true));
// Progress the looper so that flush is completed
shadowCallbackLooper.idle();
// Enqueue an output buffer to make the pending format available.
@ -419,7 +410,8 @@ public class AsynchronousMediaCodecCallbackTest {
public void
getOutputFormat_withConsecutiveFlushAndPendingFormatFromFirstFlush_returnsPendingFormat() {
MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
AtomicInteger flushesCompleted = new AtomicInteger();
AtomicInteger flushCompleted = new AtomicInteger();
Handler callbackThreadHandler = new Handler(callbackThread.getLooper());
ShadowLooper shadowCallbackLooper = shadowOf(callbackThread.getLooper());
shadowCallbackLooper.pause();
@ -427,17 +419,17 @@ public class AsynchronousMediaCodecCallbackTest {
asynchronousMediaCodecCallback.onOutputBufferAvailable(
codec, /* index= */ 0, new MediaCodec.BufferInfo());
// Flush and progress the looper so that flush is completed.
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ flushesCompleted::incrementAndGet);
asynchronousMediaCodecCallback.flush(/* codec= */ null);
callbackThreadHandler.post(flushCompleted::incrementAndGet);
shadowCallbackLooper.idle();
// Flush again, the pending format from the first flush should remain as pending.
asynchronousMediaCodecCallback.flushAsync(
/* onFlushCompleted= */ flushesCompleted::incrementAndGet);
asynchronousMediaCodecCallback.flush(/* codec= */ null);
callbackThreadHandler.post(flushCompleted::incrementAndGet);
shadowCallbackLooper.idle();
asynchronousMediaCodecCallback.onOutputBufferAvailable(
codec, /* index= */ 1, new MediaCodec.BufferInfo());
assertThat(flushesCompleted.get()).isEqualTo(2);
assertThat(flushCompleted.get()).isEqualTo(2);
assertThat(asynchronousMediaCodecCallback.dequeueOutputBufferIndex(outInfo))
.isEqualTo(MediaCodec.INFO_OUTPUT_FORMAT_CHANGED);
assertThat(asynchronousMediaCodecCallback.getOutputFormat().getString("name"))
@ -446,16 +438,21 @@ public class AsynchronousMediaCodecCallbackTest {
}
@Test
public void flush_withPendingFlush_onlyLastFlushCompletes() {
ShadowLooper callbackLooperShadow = shadowOf(callbackThread.getLooper());
callbackLooperShadow.pause();
AtomicInteger flushCompleted = new AtomicInteger();
public void flush_withPendingError_resetsError() throws Exception {
asynchronousMediaCodecCallback.onError(codec, createCodecException());
// Calling flush should clear any pending error.
asynchronousMediaCodecCallback.flush(/* codec= */ null);
asynchronousMediaCodecCallback.flushAsync(/* onFlushCompleted= */ () -> flushCompleted.set(1));
asynchronousMediaCodecCallback.flushAsync(/* onFlushCompleted= */ () -> flushCompleted.set(2));
callbackLooperShadow.idle();
assertThat(asynchronousMediaCodecCallback.dequeueInputBufferIndex())
.isEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
}
assertThat(flushCompleted.get()).isEqualTo(2);
@Test
public void shutdown_withPendingError_doesNotThrow() throws Exception {
asynchronousMediaCodecCallback.onError(codec, createCodecException());
// Calling shutdown() should not throw.
asynchronousMediaCodecCallback.shutdown();
}
/** Reflectively create a {@link MediaCodec.CodecException}. */

View file

@ -24,6 +24,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.source.MediaSource.MediaSourceCaller;
import com.google.android.exoplayer2.source.ShuffleOrder.DefaultShuffleOrder;
@ -644,7 +645,7 @@ public final class ConcatenatingMediaSourceTest {
() -> {
MediaSourceCaller caller = mock(MediaSourceCaller.class);
mediaSource.addMediaSources(Arrays.asList(createMediaSources(2)));
mediaSource.prepareSource(caller, /* mediaTransferListener= */ null);
mediaSource.prepareSource(caller, /* mediaTransferListener= */ null, PlayerId.UNSET);
mediaSource.moveMediaSource(
/* currentIndex= */ 0,
/* newIndex= */ 1,

View file

@ -93,12 +93,22 @@ public final class DefaultMediaSourceFactoryTest {
public void createMediaSource_withSubtitle_isMergingMediaSource() {
DefaultMediaSourceFactory defaultMediaSourceFactory =
new DefaultMediaSourceFactory((Context) ApplicationProvider.getApplicationContext());
List<MediaItem.Subtitle> subtitles =
List<MediaItem.SubtitleConfiguration> subtitleConfigurations =
Arrays.asList(
new MediaItem.Subtitle(Uri.parse(URI_TEXT), MimeTypes.APPLICATION_TTML, "en"),
new MediaItem.Subtitle(
Uri.parse(URI_TEXT), MimeTypes.APPLICATION_TTML, "de", C.SELECTION_FLAG_DEFAULT));
MediaItem mediaItem = new MediaItem.Builder().setUri(URI_MEDIA).setSubtitles(subtitles).build();
new MediaItem.SubtitleConfiguration.Builder(Uri.parse(URI_TEXT))
.setMimeType(MimeTypes.APPLICATION_TTML)
.setLanguage("en")
.build(),
new MediaItem.SubtitleConfiguration.Builder(Uri.parse(URI_TEXT))
.setMimeType(MimeTypes.APPLICATION_TTML)
.setLanguage("de")
.setSelectionFlags(C.SELECTION_FLAG_DEFAULT)
.build());
MediaItem mediaItem =
new MediaItem.Builder()
.setUri(URI_MEDIA)
.setSubtitleConfigurations(subtitleConfigurations)
.build();
MediaSource mediaSource = defaultMediaSourceFactory.createMediaSource(mediaItem);
@ -110,7 +120,11 @@ public final class DefaultMediaSourceFactoryTest {
DefaultMediaSourceFactory defaultMediaSourceFactory =
new DefaultMediaSourceFactory((Context) ApplicationProvider.getApplicationContext());
MediaItem mediaItem =
new MediaItem.Builder().setUri(URI_MEDIA).setClipStartPositionMs(1000L).build();
new MediaItem.Builder()
.setUri(URI_MEDIA)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder().setStartPositionMs(1000L).build())
.build();
MediaSource mediaSource = defaultMediaSourceFactory.createMediaSource(mediaItem);
@ -122,7 +136,11 @@ public final class DefaultMediaSourceFactoryTest {
DefaultMediaSourceFactory defaultMediaSourceFactory =
new DefaultMediaSourceFactory((Context) ApplicationProvider.getApplicationContext());
MediaItem mediaItem =
new MediaItem.Builder().setUri(URI_MEDIA).setClipEndPositionMs(1000L).build();
new MediaItem.Builder()
.setUri(URI_MEDIA)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder().setEndPositionMs(1000L).build())
.build();
MediaSource mediaSource = defaultMediaSourceFactory.createMediaSource(mediaItem);
@ -134,7 +152,13 @@ public final class DefaultMediaSourceFactoryTest {
DefaultMediaSourceFactory defaultMediaSourceFactory =
new DefaultMediaSourceFactory((Context) ApplicationProvider.getApplicationContext());
MediaItem mediaItem =
new MediaItem.Builder().setUri(URI_MEDIA).setClipRelativeToDefaultPosition(true).build();
new MediaItem.Builder()
.setUri(URI_MEDIA)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setRelativeToDefaultPosition(true)
.build())
.build();
MediaSource mediaSource = defaultMediaSourceFactory.createMediaSource(mediaItem);
@ -148,7 +172,10 @@ public final class DefaultMediaSourceFactoryTest {
MediaItem mediaItem =
new MediaItem.Builder()
.setUri(URI_MEDIA)
.setClipEndPositionMs(C.TIME_END_OF_SOURCE)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setEndPositionMs(C.TIME_END_OF_SOURCE)
.build())
.build();
MediaSource mediaSource = defaultMediaSourceFactory.createMediaSource(mediaItem);

View file

@ -29,6 +29,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.MediaPeriod;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.source.MediaSource.MediaSourceCaller;
@ -117,7 +118,8 @@ public final class AdsMediaSourceTest {
adMediaSourceFactory,
mockAdsLoader,
mockAdViewProvider);
adsMediaSource.prepareSource(mockMediaSourceCaller, /* mediaTransferListener= */ null);
adsMediaSource.prepareSource(
mockMediaSourceCaller, /* mediaTransferListener= */ null, PlayerId.UNSET);
shadowOf(Looper.getMainLooper()).idle();
verify(mockAdsLoader)
.start(

View file

@ -40,6 +40,7 @@ import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.AnalyticsListener;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.robolectric.PlaybackOutput;
import com.google.android.exoplayer2.robolectric.ShadowMediaCodecConfig;
import com.google.android.exoplayer2.source.DefaultMediaSourceFactory;
@ -104,7 +105,9 @@ public final class ServerSideInsertedAdMediaSourceTest {
mediaSource.setAdPlaybackState(adPlaybackState);
mediaSource.prepareSource(
(source, timeline) -> timelineReference.set(timeline), /* mediaTransferListener= */ null);
(source, timeline) -> timelineReference.set(timeline),
/* mediaTransferListener= */ null,
PlayerId.UNSET);
runMainLooperUntil(() -> timelineReference.get() != null);
Timeline timeline = timelineReference.get();

View file

@ -18,6 +18,7 @@ package com.google.android.exoplayer2.trackselection;
import static com.google.android.exoplayer2.C.FORMAT_EXCEEDS_CAPABILITIES;
import static com.google.android.exoplayer2.C.FORMAT_HANDLED;
import static com.google.android.exoplayer2.C.FORMAT_UNSUPPORTED_SUBTYPE;
import static com.google.android.exoplayer2.C.FORMAT_UNSUPPORTED_TYPE;
import static com.google.android.exoplayer2.RendererCapabilities.ADAPTIVE_NOT_SEAMLESS;
import static com.google.android.exoplayer2.RendererCapabilities.TUNNELING_NOT_SUPPORTED;
import static com.google.android.exoplayer2.RendererConfiguration.DEFAULT;
@ -52,6 +53,7 @@ import com.google.android.exoplayer2.upstream.BandwidthMeter;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.HashMap;
import java.util.Map;
@ -204,6 +206,44 @@ public final class DefaultTrackSelectorTest {
.isEqualTo(new RendererConfiguration[] {DEFAULT, DEFAULT});
}
@Test
public void selectTrack_withMixedEmptyAndNonEmptyTrackOverrides_appliesNonEmptyOverride()
throws Exception {
TrackGroup videoGroupHighBitrate =
new TrackGroup(VIDEO_FORMAT.buildUpon().setAverageBitrate(1_000_000).build());
TrackGroup videoGroupMidBitrate =
new TrackGroup(VIDEO_FORMAT.buildUpon().setAverageBitrate(500_000).build());
TrackGroup videoGroupLowBitrate =
new TrackGroup(VIDEO_FORMAT.buildUpon().setAverageBitrate(100_000).build());
trackSelector.setParameters(
trackSelector
.buildUponParameters()
.setTrackSelectionOverrides(
new TrackSelectionOverrides.Builder()
.addOverride(
new TrackSelectionOverride(
videoGroupHighBitrate, /* trackIndices= */ ImmutableList.of()))
.addOverride(
new TrackSelectionOverride(
videoGroupMidBitrate, /* trackIndices= */ ImmutableList.of(0)))
.addOverride(
new TrackSelectionOverride(
videoGroupLowBitrate, /* trackIndices= */ ImmutableList.of()))
.build()));
TrackSelectorResult result =
trackSelector.selectTracks(
RENDERER_CAPABILITIES,
new TrackGroupArray(videoGroupHighBitrate, videoGroupMidBitrate, videoGroupLowBitrate),
periodId,
TIMELINE);
assertThat(result.selections)
.asList()
.containsExactly(new FixedTrackSelection(videoGroupMidBitrate, /* track= */ 0), null)
.inOrder();
}
/** Tests that an empty override is not applied for a different set of available track groups. */
@Test
public void selectTracks_withEmptyTrackOverrideForDifferentTracks_hasNoEffect()
@ -230,6 +270,97 @@ public final class DefaultTrackSelectorTest {
.isEqualTo(new RendererConfiguration[] {DEFAULT, DEFAULT});
}
@Test
public void selectTrack_withOverrideForDifferentRenderer_clearsDefaultSelectionOfSameType()
throws Exception {
Format videoFormatH264 =
VIDEO_FORMAT.buildUpon().setId("H264").setSampleMimeType(MimeTypes.VIDEO_H264).build();
Format videoFormatAv1 =
VIDEO_FORMAT.buildUpon().setId("AV1").setSampleMimeType(MimeTypes.VIDEO_AV1).build();
TrackGroup videoGroupH264 = new TrackGroup(videoFormatH264);
TrackGroup videoGroupAv1 = new TrackGroup(videoFormatAv1);
Map<String, Integer> rendererCapabilitiesMap =
ImmutableMap.of(
videoFormatH264.id, FORMAT_HANDLED, videoFormatAv1.id, FORMAT_UNSUPPORTED_TYPE);
RendererCapabilities rendererCapabilitiesH264 =
new FakeMappedRendererCapabilities(C.TRACK_TYPE_VIDEO, rendererCapabilitiesMap);
rendererCapabilitiesMap =
ImmutableMap.of(
videoFormatH264.id, FORMAT_UNSUPPORTED_TYPE, videoFormatAv1.id, FORMAT_HANDLED);
RendererCapabilities rendererCapabilitiesAv1 =
new FakeMappedRendererCapabilities(C.TRACK_TYPE_VIDEO, rendererCapabilitiesMap);
// Try to force selection of one TrackGroup in both directions to ensure the default gets
// overridden without having to know what the default is.
trackSelector.setParameters(
trackSelector
.buildUponParameters()
.setTrackSelectionOverrides(
new TrackSelectionOverrides.Builder()
.setOverrideForType(new TrackSelectionOverride(videoGroupH264))
.build()));
TrackSelectorResult result =
trackSelector.selectTracks(
new RendererCapabilities[] {rendererCapabilitiesH264, rendererCapabilitiesAv1},
new TrackGroupArray(videoGroupH264, videoGroupAv1),
periodId,
TIMELINE);
assertThat(result.selections)
.asList()
.containsExactly(new FixedTrackSelection(videoGroupH264, /* track= */ 0), null)
.inOrder();
trackSelector.setParameters(
trackSelector
.buildUponParameters()
.setTrackSelectionOverrides(
new TrackSelectionOverrides.Builder()
.setOverrideForType(new TrackSelectionOverride(videoGroupAv1))
.build()));
result =
trackSelector.selectTracks(
new RendererCapabilities[] {rendererCapabilitiesH264, rendererCapabilitiesAv1},
new TrackGroupArray(videoGroupH264, videoGroupAv1),
periodId,
TIMELINE);
assertThat(result.selections)
.asList()
.containsExactly(null, new FixedTrackSelection(videoGroupAv1, /* track= */ 0))
.inOrder();
}
@Test
public void selectTracks_withOverrideForUnmappedGroup_disablesAllRenderersOfSameType()
throws Exception {
Format audioSupported = AUDIO_FORMAT.buildUpon().setId("supported").build();
Format audioUnsupported = AUDIO_FORMAT.buildUpon().setId("unsupported").build();
TrackGroup audioGroupSupported = new TrackGroup(audioSupported);
TrackGroup audioGroupUnsupported = new TrackGroup(audioUnsupported);
Map<String, Integer> audioRendererCapabilitiesMap =
ImmutableMap.of(
audioSupported.id, FORMAT_HANDLED, audioUnsupported.id, FORMAT_UNSUPPORTED_TYPE);
RendererCapabilities audioRendererCapabilties =
new FakeMappedRendererCapabilities(C.TRACK_TYPE_AUDIO, audioRendererCapabilitiesMap);
trackSelector.setParameters(
trackSelector
.buildUponParameters()
.setTrackSelectionOverrides(
new TrackSelectionOverrides.Builder()
.setOverrideForType(new TrackSelectionOverride(audioGroupUnsupported))
.build()));
TrackSelectorResult result =
trackSelector.selectTracks(
new RendererCapabilities[] {VIDEO_CAPABILITIES, audioRendererCapabilties},
new TrackGroupArray(VIDEO_TRACK_GROUP, audioGroupSupported, audioGroupUnsupported),
periodId,
TIMELINE);
assertThat(result.selections).asList().containsExactly(VIDEO_TRACK_SELECTION, null).inOrder();
}
/** Tests that an override is not applied for a different set of available track groups. */
@Test
public void selectTracksWithNullOverrideForDifferentTracks() throws ExoPlaybackException {
@ -1896,7 +2027,7 @@ public final class DefaultTrackSelectorTest {
.setOverrideForType(
new TrackSelectionOverride(
new TrackGroup(AUDIO_FORMAT, AUDIO_FORMAT, AUDIO_FORMAT, AUDIO_FORMAT),
/* trackIndexes= */ ImmutableList.of(0, 2, 3)))
/* trackIndices= */ ImmutableList.of(0, 2, 3)))
.build())
.setDisabledTrackTypes(ImmutableSet.of(C.TRACK_TYPE_AUDIO))
.build();

View file

@ -178,6 +178,7 @@ public final class DashMediaSource extends BaseMediaSource {
return this;
}
@Deprecated
@Override
public Factory setDrmSessionManager(@Nullable DrmSessionManager drmSessionManager) {
if (drmSessionManager == null) {
@ -188,6 +189,7 @@ public final class DashMediaSource extends BaseMediaSource {
return this;
}
@Deprecated
@Override
public Factory setDrmHttpDataSourceFactory(
@Nullable HttpDataSource.Factory drmHttpDataSourceFactory) {
@ -198,6 +200,7 @@ public final class DashMediaSource extends BaseMediaSource {
return this;
}
@Deprecated
@Override
public Factory setDrmUserAgent(@Nullable String userAgent) {
if (!usingCustomDrmSessionManagerProvider) {
@ -1053,8 +1056,13 @@ public final class DashMediaSource extends BaseMediaSource {
maxPlaybackSpeed = manifest.serviceDescription.maxPlaybackSpeed;
}
liveConfiguration =
new MediaItem.LiveConfiguration(
targetOffsetMs, minLiveOffsetMs, maxLiveOffsetMs, minPlaybackSpeed, maxPlaybackSpeed);
new MediaItem.LiveConfiguration.Builder()
.setTargetOffsetMs(targetOffsetMs)
.setMinOffsetMs(minLiveOffsetMs)
.setMaxOffsetMs(maxLiveOffsetMs)
.setMinPlaybackSpeed(minPlaybackSpeed)
.setMaxPlaybackSpeed(maxPlaybackSpeed)
.build();
}
private void scheduleManifestRefresh(long delayUntilNextLoadMs) {

View file

@ -27,6 +27,7 @@ import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.ParserException;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.Timeline.Window;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.offline.StreamKey;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.MediaSource.MediaSourceCaller;
@ -484,7 +485,7 @@ public final class DashMediaSourceTest {
countDownLatch.countDown();
}
};
mediaSource.prepareSource(caller, /* mediaTransferListener= */ null);
mediaSource.prepareSource(caller, /* mediaTransferListener= */ null, PlayerId.UNSET);
while (!countDownLatch.await(/* timeout= */ 10, MILLISECONDS)) {
ShadowLooper.idleMainLooper();
}

View file

@ -18,10 +18,12 @@
xmlns:tools="http://schemas.android.com/tools"
package="com.google.android.exoplayer2.upstream.test">
<uses-permission android:name="android.permission.INTERNET"/>
<uses-sdk/>
<application
android:allowBackup="false"
android:usesCleartextTraffic="true"
tools:ignore="MissingApplicationIcon,HardcodedDebugMode">
<provider
android:authorities="com.google.android.exoplayer2.testutil.AssetContentProvider"

View file

@ -20,6 +20,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.DataSourceContractTest;
import com.google.android.exoplayer2.testutil.HttpDataSourceTestEnv;
import com.google.common.collect.ImmutableList;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.runner.RunWith;
@ -43,4 +44,8 @@ public class DefaultHttpDataSourceContractTest extends DataSourceContractTest {
protected Uri getNotFoundUri() {
return Uri.parse(httpDataSourceTestEnv.getNonexistentUrl());
}
@Override
@Ignore("internal b/205811776")
public void getResponseHeaders_noNullKeysOrValues() {}
}

View file

@ -30,6 +30,10 @@ public final class WavUtil {
public static final int FMT_FOURCC = 0x666d7420;
/** Four character code for "data". */
public static final int DATA_FOURCC = 0x64617461;
/** Four character code for "RF64". */
public static final int RF64_FOURCC = 0x52463634;
/** Four character code for "ds64". */
public static final int DS64_FOURCC = 0x64733634;
/** WAVE type value for integer PCM audio data. */
public static final int TYPE_PCM = 0x0001;

View file

@ -31,6 +31,7 @@ import com.google.android.exoplayer2.extractor.ExtractorsFactory;
import com.google.android.exoplayer2.extractor.PositionHolder;
import com.google.android.exoplayer2.extractor.TrackOutput;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.ParsableByteArray;
import com.google.android.exoplayer2.util.Util;
@ -47,6 +48,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** Extracts data from WAV byte streams. */
public final class WavExtractor implements Extractor {
private static final String TAG = "WavExtractor";
/**
* When outputting PCM data to a {@link TrackOutput}, we can choose how many frames are grouped
* into each sample, and hence each sample's duration. This is the target number of samples to
@ -63,6 +66,7 @@ public final class WavExtractor implements Extractor {
@Target({ElementType.TYPE_USE})
@IntDef({
STATE_READING_FILE_TYPE,
STATE_READING_RF64_SAMPLE_DATA_SIZE,
STATE_READING_FORMAT,
STATE_SKIPPING_TO_SAMPLE_DATA,
STATE_READING_SAMPLE_DATA
@ -70,19 +74,22 @@ public final class WavExtractor implements Extractor {
private @interface State {}
private static final int STATE_READING_FILE_TYPE = 0;
private static final int STATE_READING_FORMAT = 1;
private static final int STATE_SKIPPING_TO_SAMPLE_DATA = 2;
private static final int STATE_READING_SAMPLE_DATA = 3;
private static final int STATE_READING_RF64_SAMPLE_DATA_SIZE = 1;
private static final int STATE_READING_FORMAT = 2;
private static final int STATE_SKIPPING_TO_SAMPLE_DATA = 3;
private static final int STATE_READING_SAMPLE_DATA = 4;
private @MonotonicNonNull ExtractorOutput extractorOutput;
private @MonotonicNonNull TrackOutput trackOutput;
private @State int state;
private long rf64SampleDataSize;
private @MonotonicNonNull OutputWriter outputWriter;
private int dataStartPosition;
private long dataEndPosition;
public WavExtractor() {
state = STATE_READING_FILE_TYPE;
rf64SampleDataSize = C.LENGTH_UNSET;
dataStartPosition = C.POSITION_UNSET;
dataEndPosition = C.POSITION_UNSET;
}
@ -120,6 +127,9 @@ public final class WavExtractor implements Extractor {
case STATE_READING_FILE_TYPE:
readFileType(input);
return Extractor.RESULT_CONTINUE;
case STATE_READING_RF64_SAMPLE_DATA_SIZE:
readRf64SampleDataSize(input);
return Extractor.RESULT_CONTINUE;
case STATE_READING_FORMAT:
readFormat(input);
return Extractor.RESULT_CONTINUE;
@ -152,6 +162,11 @@ public final class WavExtractor implements Extractor {
"Unsupported or unrecognized wav file type.", /* cause= */ null);
}
input.skipFully((int) (input.getPeekPosition() - input.getPosition()));
state = STATE_READING_RF64_SAMPLE_DATA_SIZE;
}
private void readRf64SampleDataSize(ExtractorInput input) throws IOException {
rf64SampleDataSize = WavHeaderReader.readRf64SampleDataSize(input);
state = STATE_READING_FORMAT;
}
@ -194,7 +209,18 @@ public final class WavExtractor implements Extractor {
private void skipToSampleData(ExtractorInput input) throws IOException {
Pair<Long, Long> dataBounds = WavHeaderReader.skipToSampleData(input);
dataStartPosition = dataBounds.first.intValue();
dataEndPosition = dataBounds.second;
long dataSize = dataBounds.second;
if (rf64SampleDataSize != C.LENGTH_UNSET && dataSize == 0xFFFFFFFFL) {
// Following EBU - Tech 3306-2007, the data size indicated in the ds64 chunk should only be
// used if the size of the data chunk is unset.
dataSize = rf64SampleDataSize;
}
dataEndPosition = dataStartPosition + dataSize;
long inputLength = input.getLength();
if (inputLength != C.LENGTH_UNSET && dataEndPosition > inputLength) {
Log.w(TAG, "Data exceeds input length: " + dataEndPosition + ", " + inputLength);
dataEndPosition = inputLength;
}
Assertions.checkNotNull(outputWriter).init(dataStartPosition, dataEndPosition);
state = STATE_READING_SAMPLE_DATA;
}

View file

@ -32,20 +32,19 @@ import java.io.IOException;
private static final String TAG = "WavHeaderReader";
/**
* Returns whether the given {@code input} starts with a RIFF chunk header, followed by a WAVE
* tag.
* Returns whether the given {@code input} starts with a RIFF or RF64 chunk header, followed by a
* WAVE tag.
*
* @param input The input stream to peek from. The position should point to the start of the
* stream.
* @return Whether the given {@code input} starts with a RIFF chunk header, followed by a WAVE
* tag.
* @return Whether the given {@code input} starts with a RIFF or RF64 chunk header, followed by a
* WAVE tag.
* @throws IOException If peeking from the input fails.
*/
public static boolean checkFileType(ExtractorInput input) throws IOException {
ParsableByteArray scratch = new ParsableByteArray(ChunkHeader.SIZE_IN_BYTES);
// Attempt to read the RIFF chunk.
ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch);
if (chunkHeader.id != WavUtil.RIFF_FOURCC) {
if (chunkHeader.id != WavUtil.RIFF_FOURCC && chunkHeader.id != WavUtil.RF64_FOURCC) {
return false;
}
@ -60,25 +59,44 @@ import java.io.IOException;
return true;
}
/**
* Reads the ds64 chunk defined in EBU - TECH 3306-2007, if present. If there is no such chunk,
* the input's position is left unchanged.
*
* @param input Input stream to read from. The position should point to the byte following the
* WAVE tag.
* @throws IOException If reading from the input fails.
* @return The value of the data size field in the ds64 chunk, or {@link C#LENGTH_UNSET} if there
* is no such chunk.
*/
public static long readRf64SampleDataSize(ExtractorInput input) throws IOException {
ParsableByteArray scratch = new ParsableByteArray(ChunkHeader.SIZE_IN_BYTES);
ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch);
if (chunkHeader.id != WavUtil.DS64_FOURCC) {
input.resetPeekPosition();
return C.LENGTH_UNSET;
}
input.advancePeekPosition(8); // RIFF size
scratch.setPosition(0);
input.peekFully(scratch.getData(), 0, 8);
long sampleDataSize = scratch.readLittleEndianLong();
input.skipFully(ChunkHeader.SIZE_IN_BYTES + (int) chunkHeader.size);
return sampleDataSize;
}
/**
* Reads and returns a {@code WavFormat}.
*
* @param input Input stream to read the WAV format from. The position should point to the byte
* following the WAVE tag.
* following the ds64 chunk if present, or to the byte following the WAVE tag otherwise.
* @throws IOException If reading from the input fails.
* @return A new {@code WavFormat} read from {@code input}.
*/
public static WavFormat readFormat(ExtractorInput input) throws IOException {
// Allocate a scratch buffer large enough to store the format chunk.
ParsableByteArray scratch = new ParsableByteArray(16);
// Skip chunks until we find the format chunk.
ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch);
while (chunkHeader.id != WavUtil.FMT_FOURCC) {
input.skipFully(ChunkHeader.SIZE_IN_BYTES + (int) chunkHeader.size);
chunkHeader = ChunkHeader.peek(input, scratch);
}
ChunkHeader chunkHeader = skipToChunk(/* chunkId= */ WavUtil.FMT_FOURCC, input, scratch);
Assertions.checkState(chunkHeader.size >= 16);
input.peekFully(scratch.getData(), 0, 16);
scratch.setPosition(0);
@ -110,12 +128,14 @@ import java.io.IOException;
}
/**
* Skips to the data in the given WAV input stream, and returns its bounds. After calling, the
* input stream's position will point to the start of sample data in the WAV. If an exception is
* thrown, the input position will be left pointing to a chunk header.
* Skips to the data in the given WAV input stream, and returns its start position and size. After
* calling, the input stream's position will point to the start of sample data in the WAV. If an
* exception is thrown, the input position will be left pointing to a chunk header (that may not
* be the data chunk header).
*
* @param input The input stream, whose read position must be pointing to a valid chunk header.
* @return The byte positions at which the data starts (inclusive) and ends (exclusive).
* @return The byte positions at which the data starts (inclusive) and the size of the data, in
* bytes.
* @throws ParserException If an error occurs parsing chunks.
* @throws IOException If reading from the input fails.
*/
@ -125,8 +145,31 @@ import java.io.IOException;
ParsableByteArray scratch = new ParsableByteArray(ChunkHeader.SIZE_IN_BYTES);
// Skip all chunks until we find the data header.
ChunkHeader chunkHeader = skipToChunk(/* chunkId= */ WavUtil.DATA_FOURCC, input, scratch);
// Skip past the "data" header.
input.skipFully(ChunkHeader.SIZE_IN_BYTES);
long dataStartPosition = input.getPosition();
return Pair.create(dataStartPosition, chunkHeader.size);
}
/**
* Skips to the chunk header corresponding to the {@code chunkId} provided. After calling, the
* input stream's position will point to the chunk header with provided {@code chunkId} and the
* peek position to the chunk body. If an exception is thrown, the input position will be left
* pointing to a chunk header (that may not be the one corresponding to the {@code chunkId}).
*
* @param chunkId The ID of the chunk to skip to.
* @param input The input stream, whose read position must be pointing to a valid chunk header.
* @param scratch A scratch buffer to read the chunk headers.
* @return The {@link ChunkHeader} corresponding to the {@code chunkId} provided.
* @throws ParserException If an error occurs parsing chunks.
* @throws IOException If reading from the input fails.
*/
private static ChunkHeader skipToChunk(
int chunkId, ExtractorInput input, ParsableByteArray scratch) throws IOException {
ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch);
while (chunkHeader.id != WavUtil.DATA_FOURCC) {
while (chunkHeader.id != chunkId) {
Log.w(TAG, "Ignoring unknown WAV chunk: " + chunkHeader.id);
long bytesToSkip = ChunkHeader.SIZE_IN_BYTES + chunkHeader.size;
if (bytesToSkip > Integer.MAX_VALUE) {
@ -136,17 +179,7 @@ import java.io.IOException;
input.skipFully((int) bytesToSkip);
chunkHeader = ChunkHeader.peek(input, scratch);
}
// Skip past the "data" header.
input.skipFully(ChunkHeader.SIZE_IN_BYTES);
long dataStartPosition = input.getPosition();
long dataEndPosition = dataStartPosition + chunkHeader.size;
long inputLength = input.getLength();
if (inputLength != C.LENGTH_UNSET && dataEndPosition > inputLength) {
Log.w(TAG, "Data exceeds input length: " + dataEndPosition + ", " + inputLength);
dataEndPosition = inputLength;
}
return Pair.create(dataStartPosition, dataEndPosition);
return chunkHeader;
}
private WavHeaderReader() {

View file

@ -53,4 +53,10 @@ public final class WavExtractorTest {
ExtractorAsserts.assertBehavior(
WavExtractor::new, "media/wav/sample_ima_adpcm.wav", simulationConfig);
}
@Test
public void sample_rf64() throws Exception {
ExtractorAsserts.assertBehavior(
WavExtractor::new, "media/wav/sample_rf64.wav", simulationConfig);
}
}

View file

@ -297,6 +297,7 @@ public final class HlsMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmSessionManager(@Nullable DrmSessionManager drmSessionManager) {
if (drmSessionManager == null) {
@ -307,6 +308,7 @@ public final class HlsMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmHttpDataSourceFactory(
@Nullable HttpDataSource.Factory drmHttpDataSourceFactory) {
@ -317,6 +319,7 @@ public final class HlsMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmUserAgent(@Nullable String userAgent) {
if (!usingCustomDrmSessionManagerProvider) {

View file

@ -25,6 +25,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.ParserException;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.offline.StreamKey;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist;
@ -752,7 +753,7 @@ public class HlsMediaSourceTest {
List<Timeline> timelines = new ArrayList<>();
MediaSource.MediaSourceCaller mediaSourceCaller = (source, timeline) -> timelines.add(timeline);
mediaSource.prepareSource(mediaSourceCaller, null);
mediaSource.prepareSource(mediaSourceCaller, /* mediaTransferListener= */ null, PlayerId.UNSET);
runMainLooperUntil(() -> timelines.size() == 1);
mediaSource.onPrimaryPlaylistRefreshed(secondPlaylist);
runMainLooperUntil(() -> timelines.size() == 2);
@ -785,7 +786,9 @@ public class HlsMediaSourceTest {
throws TimeoutException {
AtomicReference<Timeline> receivedTimeline = new AtomicReference<>();
mediaSource.prepareSource(
(source, timeline) -> receivedTimeline.set(timeline), /* mediaTransferListener= */ null);
(source, timeline) -> receivedTimeline.set(timeline),
/* mediaTransferListener= */ null,
PlayerId.UNSET);
runMainLooperUntil(() -> receivedTimeline.get() != null);
return receivedTimeline.get();
}

View file

@ -205,6 +205,7 @@ public final class SsMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmSessionManager(@Nullable DrmSessionManager drmSessionManager) {
if (drmSessionManager == null) {
@ -215,6 +216,7 @@ public final class SsMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmHttpDataSourceFactory(
@Nullable HttpDataSource.Factory drmHttpDataSourceFactory) {
@ -225,6 +227,7 @@ public final class SsMediaSource extends BaseMediaSource
return this;
}
@Deprecated
@Override
public Factory setDrmUserAgent(@Nullable String userAgent) {
if (!usingCustomDrmSessionManagerProvider) {

View file

@ -0,0 +1,384 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static java.lang.Math.min;
import android.media.MediaCodec.BufferInfo;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioProcessor.AudioFormat;
import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* Pipeline to decode audio samples, apply transformations on the raw samples, and re-encode them.
*/
@RequiresApi(18)
/* package */ final class AudioSamplePipeline implements SamplePipeline {
private static final String TAG = "AudioSamplePipeline";
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
private final MediaCodecAdapterWrapper decoder;
private final Format decoderInputFormat;
private final DecoderInputBuffer decoderInputBuffer;
private final SonicAudioProcessor sonicAudioProcessor;
private final SpeedProvider speedProvider;
private final DecoderInputBuffer encoderInputBuffer;
private final DecoderInputBuffer encoderOutputBuffer;
private final Transformation transformation;
private final int rendererIndex;
private @MonotonicNonNull AudioFormat encoderInputAudioFormat;
private @MonotonicNonNull MediaCodecAdapterWrapper encoder;
private long nextEncoderInputBufferTimeUs;
private long encoderBufferDurationRemainder;
private ByteBuffer sonicOutputBuffer;
private boolean drainingSonicForSpeedChange;
private float currentSpeed;
public AudioSamplePipeline(
Format decoderInputFormat, Transformation transformation, int rendererIndex)
throws ExoPlaybackException {
this.decoderInputFormat = decoderInputFormat;
this.transformation = transformation;
this.rendererIndex = rendererIndex;
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderOutputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
sonicAudioProcessor = new SonicAudioProcessor();
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
speedProvider = new SegmentSpeedProvider(decoderInputFormat);
currentSpeed = speedProvider.getSpeed(0);
try {
this.decoder = MediaCodecAdapterWrapper.createForAudioDecoding(decoderInputFormat);
} catch (IOException e) {
// TODO (internal b/184262323): Assign an adequate error code.
throw ExoPlaybackException.createForRenderer(
e,
TAG,
rendererIndex,
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
PlaybackException.ERROR_CODE_UNSPECIFIED);
}
}
@Override
public void release() {
sonicAudioProcessor.reset();
decoder.release();
if (encoder != null) {
encoder.release();
}
}
@Override
public boolean processData() throws ExoPlaybackException {
if (!ensureEncoderAndAudioProcessingConfigured()) {
return false;
}
if (sonicAudioProcessor.isActive()) {
return feedEncoderFromSonic() || feedSonicFromDecoder();
} else {
return feedEncoderFromDecoder();
}
}
@Override
@Nullable
public DecoderInputBuffer dequeueInputBuffer() {
return decoder.maybeDequeueInputBuffer(decoderInputBuffer) ? decoderInputBuffer : null;
}
@Override
public void queueInputBuffer() {
decoder.queueInputBuffer(decoderInputBuffer);
}
@Override
@Nullable
public Format getOutputFormat() {
return encoder != null ? encoder.getOutputFormat() : null;
}
@Override
public boolean isEnded() {
return encoder != null && encoder.isEnded();
}
@Override
@Nullable
public DecoderInputBuffer getOutputBuffer() {
if (encoder != null) {
encoderOutputBuffer.data = encoder.getOutputBuffer();
if (encoderOutputBuffer.data != null) {
encoderOutputBuffer.timeUs = checkNotNull(encoder.getOutputBufferInfo()).presentationTimeUs;
return encoderOutputBuffer;
}
}
return null;
}
@Override
public void releaseOutputBuffer() {
checkStateNotNull(encoder).releaseOutputBuffer();
}
/**
* Attempts to pass decoder output data to the encoder, and returns whether it may be possible to
* pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat", "encoder"})
private boolean feedEncoderFromDecoder() {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (decoder.isEnded()) {
queueEndOfStreamToEncoder();
return false;
}
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
flushSonicAndSetSpeed(currentSpeed);
return false;
}
feedEncoder(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Attempts to pass audio processor output data to the encoder, and returns whether it may be
* possible to pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat", "encoder"})
private boolean feedEncoderFromSonic() {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (!sonicOutputBuffer.hasRemaining()) {
sonicOutputBuffer = sonicAudioProcessor.getOutput();
if (!sonicOutputBuffer.hasRemaining()) {
if (decoder.isEnded() && sonicAudioProcessor.isEnded()) {
queueEndOfStreamToEncoder();
}
return false;
}
}
feedEncoder(sonicOutputBuffer);
return true;
}
/**
* Attempts to process decoder output data, and returns whether it may be possible to process more
* data immediately by calling this method again.
*/
private boolean feedSonicFromDecoder() {
if (drainingSonicForSpeedChange) {
if (sonicAudioProcessor.isEnded() && !sonicOutputBuffer.hasRemaining()) {
flushSonicAndSetSpeed(currentSpeed);
drainingSonicForSpeedChange = false;
}
return false;
}
// Sonic invalidates any previous output buffer when more input is queued, so we don't queue if
// there is output still to be processed.
if (sonicOutputBuffer.hasRemaining()) {
return false;
}
if (decoder.isEnded()) {
sonicAudioProcessor.queueEndOfStream();
return false;
}
checkState(!sonicAudioProcessor.isEnded());
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
sonicAudioProcessor.queueEndOfStream();
drainingSonicForSpeedChange = true;
return false;
}
sonicAudioProcessor.queueInput(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Feeds as much data as possible between the current position and limit of the specified {@link
* ByteBuffer} to the encoder, and advances its position by the number of bytes fed.
*/
@RequiresNonNull({"encoder", "encoderInputAudioFormat"})
private void feedEncoder(ByteBuffer inputBuffer) {
ByteBuffer encoderInputBufferData = checkNotNull(encoderInputBuffer.data);
int bufferLimit = inputBuffer.limit();
inputBuffer.limit(min(bufferLimit, inputBuffer.position() + encoderInputBufferData.capacity()));
encoderInputBufferData.put(inputBuffer);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
computeNextEncoderInputBufferTimeUs(
/* bytesWritten= */ encoderInputBufferData.position(),
encoderInputAudioFormat.bytesPerFrame,
encoderInputAudioFormat.sampleRate);
encoderInputBuffer.setFlags(0);
encoderInputBuffer.flip();
inputBuffer.limit(bufferLimit);
encoder.queueInputBuffer(encoderInputBuffer);
}
@RequiresNonNull("encoder")
private void queueEndOfStreamToEncoder() {
checkState(checkNotNull(encoderInputBuffer.data).position() == 0);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
encoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
encoderInputBuffer.flip();
// Queuing EOS should only occur with an empty buffer.
encoder.queueInputBuffer(encoderInputBuffer);
}
/**
* Attempts to configure the {@link #encoder} and Sonic (if applicable), if they have not been
* configured yet, and returns whether they have been configured.
*/
@EnsuresNonNullIf(
expression = {"encoder", "encoderInputAudioFormat"},
result = true)
private boolean ensureEncoderAndAudioProcessingConfigured() throws ExoPlaybackException {
if (encoder != null && encoderInputAudioFormat != null) {
return true;
}
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
if (decoderOutputFormat == null) {
return false;
}
AudioFormat outputAudioFormat =
new AudioFormat(
decoderOutputFormat.sampleRate,
decoderOutputFormat.channelCount,
decoderOutputFormat.pcmEncoding);
if (transformation.flattenForSlowMotion) {
try {
outputAudioFormat = sonicAudioProcessor.configure(outputAudioFormat);
flushSonicAndSetSpeed(currentSpeed);
} catch (AudioProcessor.UnhandledAudioFormatException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
}
String audioMimeType =
transformation.audioMimeType == null
? decoderInputFormat.sampleMimeType
: transformation.audioMimeType;
try {
encoder =
MediaCodecAdapterWrapper.createForAudioEncoding(
new Format.Builder()
.setSampleMimeType(audioMimeType)
.setSampleRate(outputAudioFormat.sampleRate)
.setChannelCount(outputAudioFormat.channelCount)
.setAverageBitrate(DEFAULT_ENCODER_BITRATE)
.build());
} catch (IOException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
encoderInputAudioFormat = outputAudioFormat;
return true;
}
private boolean isSpeedChanging(BufferInfo bufferInfo) {
if (!transformation.flattenForSlowMotion) {
return false;
}
float newSpeed = speedProvider.getSpeed(bufferInfo.presentationTimeUs);
boolean speedChanging = newSpeed != currentSpeed;
currentSpeed = newSpeed;
return speedChanging;
}
private void flushSonicAndSetSpeed(float speed) {
sonicAudioProcessor.setSpeed(speed);
sonicAudioProcessor.setPitch(speed);
sonicAudioProcessor.flush();
}
private ExoPlaybackException createRendererException(Throwable cause, int errorCode) {
return ExoPlaybackException.createForRenderer(
cause,
TAG,
rendererIndex,
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
errorCode);
}
private void computeNextEncoderInputBufferTimeUs(
long bytesWritten, int bytesPerFrame, int sampleRate) {
// The calculation below accounts for remainders and rounding. Without that it corresponds to
// the following:
// bufferDurationUs = numberOfFramesInBuffer * sampleDurationUs
// where numberOfFramesInBuffer = bytesWritten / bytesPerFrame
// and sampleDurationUs = C.MICROS_PER_SECOND / sampleRate
long numerator = bytesWritten * C.MICROS_PER_SECOND + encoderBufferDurationRemainder;
long denominator = (long) bytesPerFrame * sampleRate;
long bufferDurationUs = numerator / denominator;
encoderBufferDurationRemainder = numerator - bufferDurationUs * denominator;
if (encoderBufferDurationRemainder > 0) { // Ceil division result.
bufferDurationUs += 1;
encoderBufferDurationRemainder -= denominator;
}
nextEncoderInputBufferTimeUs += bufferDurationUs;
}
}

View file

@ -0,0 +1,77 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
/** Pipeline that passes through the samples without any re-encoding or transformation. */
/* package */ final class PassthroughSamplePipeline implements SamplePipeline {
private final DecoderInputBuffer buffer;
private final Format format;
private boolean hasPendingBuffer;
public PassthroughSamplePipeline(Format format) {
this.format = format;
buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT);
hasPendingBuffer = false;
}
@Override
@Nullable
public DecoderInputBuffer dequeueInputBuffer() {
return hasPendingBuffer ? null : buffer;
}
@Override
public void queueInputBuffer() {
hasPendingBuffer = true;
}
@Override
public boolean processData() {
return false;
}
@Override
public Format getOutputFormat() {
return format;
}
@Override
@Nullable
public DecoderInputBuffer getOutputBuffer() {
return hasPendingBuffer ? buffer : null;
}
@Override
public void releaseOutputBuffer() {
buffer.clear();
hasPendingBuffer = false;
}
@Override
public boolean isEnded() {
return buffer.isEndOfStream();
}
@Override
public void release() {}
}

View file

@ -0,0 +1,69 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
/**
* Pipeline for processing {@link DecoderInputBuffer DecoderInputBuffers}.
*
* <p>This pipeline can be used to implement transformations of audio or video samples.
*/
/* package */ interface SamplePipeline {
/** Returns a buffer if the pipeline is ready to accept input, and {@code null} otherwise. */
@Nullable
DecoderInputBuffer dequeueInputBuffer();
/**
* Informs the pipeline that its input buffer contains new input.
*
* <p>Should be called after filling the input buffer from {@link #dequeueInputBuffer()} with new
* input.
*/
void queueInputBuffer();
/**
* Process the input data and returns whether more data can be processed by calling this method
* again.
*/
boolean processData() throws ExoPlaybackException;
/** Returns the output format of the pipeline if available, and {@code null} otherwise. */
@Nullable
Format getOutputFormat();
/** Returns an output buffer if the pipeline has produced output, and {@code null} otherwise */
@Nullable
DecoderInputBuffer getOutputBuffer();
/**
* Releases the pipeline's output buffer.
*
* <p>Should be called when the output buffer from {@link #getOutputBuffer()} is no longer needed.
*/
void releaseOutputBuffer();
/** Returns whether the pipeline has ended. */
boolean isEnded();
/** Releases all resources held by the pipeline. */
void release();
}

View file

@ -18,24 +18,15 @@ package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static java.lang.Math.min;
import android.media.MediaCodec.BufferInfo;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioProcessor.AudioFormat;
import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.source.SampleStream.ReadDataResult;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@ -44,37 +35,18 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/* package */ final class TransformerAudioRenderer extends TransformerBaseRenderer {
private static final String TAG = "TransformerAudioRenderer";
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
private static final float SPEED_UNSET = -1f;
private final DecoderInputBuffer decoderInputBuffer;
private final DecoderInputBuffer encoderInputBuffer;
private final SonicAudioProcessor sonicAudioProcessor;
@Nullable private MediaCodecAdapterWrapper decoder;
@Nullable private MediaCodecAdapterWrapper encoder;
@Nullable private SpeedProvider speedProvider;
private @MonotonicNonNull Format decoderInputFormat;
private @MonotonicNonNull AudioFormat encoderInputAudioFormat;
private ByteBuffer sonicOutputBuffer;
private long nextEncoderInputBufferTimeUs;
private float currentSpeed;
private @MonotonicNonNull SamplePipeline samplePipeline;
private boolean muxerWrapperTrackAdded;
private boolean muxerWrapperTrackEnded;
private boolean hasEncoderOutputFormat;
private boolean drainingSonicForSpeedChange;
public TransformerAudioRenderer(
MuxerWrapper muxerWrapper, TransformerMediaClock mediaClock, Transformation transformation) {
super(C.TRACK_TYPE_AUDIO, muxerWrapper, mediaClock, transformation);
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
sonicAudioProcessor = new SonicAudioProcessor();
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
nextEncoderInputBufferTimeUs = 0;
currentSpeed = SPEED_UNSET;
}
@Override
@ -89,201 +61,100 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Override
protected void onReset() {
decoderInputBuffer.clear();
decoderInputBuffer.data = null;
encoderInputBuffer.clear();
encoderInputBuffer.data = null;
sonicAudioProcessor.reset();
if (decoder != null) {
decoder.release();
decoder = null;
if (samplePipeline != null) {
samplePipeline.release();
}
if (encoder != null) {
encoder.release();
encoder = null;
}
speedProvider = null;
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
nextEncoderInputBufferTimeUs = 0;
currentSpeed = SPEED_UNSET;
muxerWrapperTrackAdded = false;
muxerWrapperTrackEnded = false;
hasEncoderOutputFormat = false;
drainingSonicForSpeedChange = false;
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (!isRendererStarted || isEnded()) {
if (!isRendererStarted || isEnded() || !ensureRendererConfigured()) {
return;
}
if (ensureDecoderConfigured()) {
MediaCodecAdapterWrapper decoder = this.decoder;
if (ensureEncoderAndAudioProcessingConfigured()) {
MediaCodecAdapterWrapper encoder = this.encoder;
while (feedMuxerFromEncoder(encoder)) {}
if (sonicAudioProcessor.isActive()) {
while (feedEncoderFromSonic(decoder, encoder)) {}
while (feedSonicFromDecoder(decoder)) {}
} else {
while (feedEncoderFromDecoder(decoder, encoder)) {}
}
}
while (feedDecoderFromInput(decoder)) {}
while (feedMuxerFromPipeline() || samplePipeline.processData() || feedPipelineFromInput()) {}
}
/** Attempts to read the input format and to initialize the sample pipeline. */
@EnsuresNonNullIf(expression = "samplePipeline", result = true)
private boolean ensureRendererConfigured() throws ExoPlaybackException {
if (samplePipeline != null) {
return true;
}
FormatHolder formatHolder = getFormatHolder();
@ReadDataResult
int result = readSource(formatHolder, decoderInputBuffer, /* readFlags= */ FLAG_REQUIRE_FORMAT);
if (result != C.RESULT_FORMAT_READ) {
return false;
}
Format decoderInputFormat = checkNotNull(formatHolder.format);
if ((transformation.audioMimeType != null
&& !transformation.audioMimeType.equals(decoderInputFormat.sampleMimeType))
|| transformation.flattenForSlowMotion) {
samplePipeline = new AudioSamplePipeline(decoderInputFormat, transformation, getIndex());
} else {
samplePipeline = new PassthroughSamplePipeline(decoderInputFormat);
}
return true;
}
/**
* Attempts to write encoder output data to the muxer, and returns whether it may be possible to
* write more data immediately by calling this method again.
* Attempts to write sample pipeline output data to the muxer, and returns whether it may be
* possible to write more data immediately by calling this method again.
*/
private boolean feedMuxerFromEncoder(MediaCodecAdapterWrapper encoder) {
if (!hasEncoderOutputFormat) {
@Nullable Format encoderOutputFormat = encoder.getOutputFormat();
if (encoderOutputFormat == null) {
@RequiresNonNull("samplePipeline")
private boolean feedMuxerFromPipeline() {
if (!muxerWrapperTrackAdded) {
@Nullable Format samplePipelineOutputFormat = samplePipeline.getOutputFormat();
if (samplePipelineOutputFormat == null) {
return false;
}
hasEncoderOutputFormat = true;
muxerWrapper.addTrackFormat(encoderOutputFormat);
muxerWrapperTrackAdded = true;
muxerWrapper.addTrackFormat(samplePipelineOutputFormat);
}
if (encoder.isEnded()) {
if (samplePipeline.isEnded()) {
muxerWrapper.endTrack(getTrackType());
muxerWrapperTrackEnded = true;
return false;
}
@Nullable ByteBuffer encoderOutputBuffer = encoder.getOutputBuffer();
if (encoderOutputBuffer == null) {
@Nullable DecoderInputBuffer samplePipelineOutputBuffer = samplePipeline.getOutputBuffer();
if (samplePipelineOutputBuffer == null) {
return false;
}
BufferInfo encoderOutputBufferInfo = checkNotNull(encoder.getOutputBufferInfo());
if (!muxerWrapper.writeSample(
getTrackType(),
encoderOutputBuffer,
samplePipelineOutputBuffer.data,
/* isKeyFrame= */ true,
encoderOutputBufferInfo.presentationTimeUs)) {
samplePipelineOutputBuffer.timeUs)) {
return false;
}
encoder.releaseOutputBuffer();
samplePipeline.releaseOutputBuffer();
return true;
}
/**
* Attempts to pass decoder output data to the encoder, and returns whether it may be possible to
* Attempts to pass input data to the sample pipeline, and returns whether it may be possible to
* pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat"})
private boolean feedEncoderFromDecoder(
MediaCodecAdapterWrapper decoder, MediaCodecAdapterWrapper encoder) {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
@RequiresNonNull("samplePipeline")
private boolean feedPipelineFromInput() {
@Nullable DecoderInputBuffer samplePipelineInputBuffer = samplePipeline.dequeueInputBuffer();
if (samplePipelineInputBuffer == null) {
return false;
}
if (decoder.isEnded()) {
queueEndOfStreamToEncoder(encoder);
return false;
}
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
flushSonicAndSetSpeed(currentSpeed);
return false;
}
feedEncoder(encoder, decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Attempts to pass audio processor output data to the encoder, and returns whether it may be
* possible to pass more data immediately by calling this method again.
*/
@RequiresNonNull({"encoderInputAudioFormat"})
private boolean feedEncoderFromSonic(
MediaCodecAdapterWrapper decoder, MediaCodecAdapterWrapper encoder) {
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (!sonicOutputBuffer.hasRemaining()) {
sonicOutputBuffer = sonicAudioProcessor.getOutput();
if (!sonicOutputBuffer.hasRemaining()) {
if (decoder.isEnded() && sonicAudioProcessor.isEnded()) {
queueEndOfStreamToEncoder(encoder);
}
return false;
}
}
feedEncoder(encoder, sonicOutputBuffer);
return true;
}
/**
* Attempts to process decoder output data, and returns whether it may be possible to process more
* data immediately by calling this method again.
*/
private boolean feedSonicFromDecoder(MediaCodecAdapterWrapper decoder) {
if (drainingSonicForSpeedChange) {
if (sonicAudioProcessor.isEnded() && !sonicOutputBuffer.hasRemaining()) {
flushSonicAndSetSpeed(currentSpeed);
drainingSonicForSpeedChange = false;
}
return false;
}
// Sonic invalidates any previous output buffer when more input is queued, so we don't queue if
// there is output still to be processed.
if (sonicOutputBuffer.hasRemaining()) {
return false;
}
if (decoder.isEnded()) {
sonicAudioProcessor.queueEndOfStream();
return false;
}
checkState(!sonicAudioProcessor.isEnded());
@Nullable ByteBuffer decoderOutputBuffer = decoder.getOutputBuffer();
if (decoderOutputBuffer == null) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
sonicAudioProcessor.queueEndOfStream();
drainingSonicForSpeedChange = true;
return false;
}
sonicAudioProcessor.queueInput(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/**
* Attempts to pass input data to the decoder, and returns whether it may be possible to pass more
* data immediately by calling this method again.
*/
private boolean feedDecoderFromInput(MediaCodecAdapterWrapper decoder) {
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
return false;
}
decoderInputBuffer.clear();
@ReadDataResult
int result = readSource(getFormatHolder(), decoderInputBuffer, /* readFlags= */ 0);
int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0);
switch (result) {
case C.RESULT_BUFFER_READ:
mediaClock.updateTimeForTrackType(getTrackType(), decoderInputBuffer.timeUs);
decoderInputBuffer.timeUs -= streamOffsetUs;
decoderInputBuffer.flip();
decoder.queueInputBuffer(decoderInputBuffer);
return !decoderInputBuffer.isEndOfStream();
mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs);
samplePipelineInputBuffer.timeUs -= streamOffsetUs;
samplePipelineInputBuffer.flip();
samplePipeline.queueInputBuffer();
return !samplePipelineInputBuffer.isEndOfStream();
case C.RESULT_FORMAT_READ:
throw new IllegalStateException("Format changes are not supported.");
case C.RESULT_NOTHING_READ:
@ -291,150 +162,4 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return false;
}
}
/**
* Feeds as much data as possible between the current position and limit of the specified {@link
* ByteBuffer} to the encoder, and advances its position by the number of bytes fed.
*/
@RequiresNonNull({"encoderInputAudioFormat"})
private void feedEncoder(MediaCodecAdapterWrapper encoder, ByteBuffer inputBuffer) {
ByteBuffer encoderInputBufferData = checkNotNull(encoderInputBuffer.data);
int bufferLimit = inputBuffer.limit();
inputBuffer.limit(min(bufferLimit, inputBuffer.position() + encoderInputBufferData.capacity()));
encoderInputBufferData.put(inputBuffer);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
nextEncoderInputBufferTimeUs +=
getBufferDurationUs(
/* bytesWritten= */ encoderInputBufferData.position(),
encoderInputAudioFormat.bytesPerFrame,
encoderInputAudioFormat.sampleRate);
encoderInputBuffer.setFlags(0);
encoderInputBuffer.flip();
inputBuffer.limit(bufferLimit);
encoder.queueInputBuffer(encoderInputBuffer);
}
private void queueEndOfStreamToEncoder(MediaCodecAdapterWrapper encoder) {
checkState(checkNotNull(encoderInputBuffer.data).position() == 0);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
encoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
encoderInputBuffer.flip();
// Queuing EOS should only occur with an empty buffer.
encoder.queueInputBuffer(encoderInputBuffer);
}
/**
* Attempts to configure the {@link #encoder} and Sonic (if applicable), if they have not been
* configured yet, and returns whether they have been configured.
*/
@RequiresNonNull({"decoder", "decoderInputFormat"})
@EnsuresNonNullIf(
expression = {"encoder", "encoderInputAudioFormat"},
result = true)
private boolean ensureEncoderAndAudioProcessingConfigured() throws ExoPlaybackException {
if (encoder != null && encoderInputAudioFormat != null) {
return true;
}
MediaCodecAdapterWrapper decoder = this.decoder;
@Nullable Format decoderOutputFormat = decoder.getOutputFormat();
if (decoderOutputFormat == null) {
return false;
}
AudioFormat outputAudioFormat =
new AudioFormat(
decoderOutputFormat.sampleRate,
decoderOutputFormat.channelCount,
decoderOutputFormat.pcmEncoding);
if (transformation.flattenForSlowMotion) {
try {
outputAudioFormat = sonicAudioProcessor.configure(outputAudioFormat);
flushSonicAndSetSpeed(currentSpeed);
} catch (AudioProcessor.UnhandledAudioFormatException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
}
String audioMimeType =
transformation.audioMimeType == null
? decoderInputFormat.sampleMimeType
: transformation.audioMimeType;
try {
encoder =
MediaCodecAdapterWrapper.createForAudioEncoding(
new Format.Builder()
.setSampleMimeType(audioMimeType)
.setSampleRate(outputAudioFormat.sampleRate)
.setChannelCount(outputAudioFormat.channelCount)
.setAverageBitrate(DEFAULT_ENCODER_BITRATE)
.build());
} catch (IOException e) {
// TODO(internal b/192864511): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
encoderInputAudioFormat = outputAudioFormat;
return true;
}
/**
* Attempts to configure the {@link #decoder} if it has not been configured yet, and returns
* whether the decoder has been configured.
*/
@EnsuresNonNullIf(
expression = {"decoderInputFormat", "decoder"},
result = true)
private boolean ensureDecoderConfigured() throws ExoPlaybackException {
if (decoder != null && decoderInputFormat != null) {
return true;
}
FormatHolder formatHolder = getFormatHolder();
@ReadDataResult int result = readSource(formatHolder, decoderInputBuffer, FLAG_REQUIRE_FORMAT);
if (result != C.RESULT_FORMAT_READ) {
return false;
}
decoderInputFormat = checkNotNull(formatHolder.format);
MediaCodecAdapterWrapper decoder;
try {
decoder = MediaCodecAdapterWrapper.createForAudioDecoding(decoderInputFormat);
} catch (IOException e) {
// TODO (internal b/184262323): Assign an adequate error code.
throw createRendererException(e, PlaybackException.ERROR_CODE_UNSPECIFIED);
}
speedProvider = new SegmentSpeedProvider(decoderInputFormat);
currentSpeed = speedProvider.getSpeed(0);
this.decoder = decoder;
return true;
}
private boolean isSpeedChanging(BufferInfo bufferInfo) {
if (!transformation.flattenForSlowMotion) {
return false;
}
float newSpeed = checkNotNull(speedProvider).getSpeed(bufferInfo.presentationTimeUs);
boolean speedChanging = newSpeed != currentSpeed;
currentSpeed = newSpeed;
return speedChanging;
}
private void flushSonicAndSetSpeed(float speed) {
sonicAudioProcessor.setSpeed(speed);
sonicAudioProcessor.setPitch(speed);
sonicAudioProcessor.flush();
}
private ExoPlaybackException createRendererException(Throwable cause, int errorCode) {
return ExoPlaybackException.createForRenderer(
cause,
TAG,
getIndex(),
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
errorCode);
}
private static long getBufferDurationUs(long bytesWritten, int bytesPerFrame, int sampleRate) {
long framesWritten = bytesWritten / bytesPerFrame;
return framesWritten * C.MICROS_PER_SECOND / sampleRate;
}
}

View file

@ -16,33 +16,18 @@
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import com.google.android.exoplayer2.source.SampleStream.ReadDataResult;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@ -50,35 +35,13 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@RequiresApi(18)
/* package */ final class TransformerTranscodingVideoRenderer extends TransformerBaseRenderer {
static {
GlUtil.glAssertionsEnabled = true;
}
private static final String TAG = "TransformerTranscodingVideoRenderer";
private final Context context;
private final DecoderInputBuffer decoderInputBuffer;
private final float[] decoderTextureTransformMatrix;
private @MonotonicNonNull Format decoderInputFormat;
@Nullable private EGLDisplay eglDisplay;
@Nullable private EGLContext eglContext;
@Nullable private EGLSurface eglSurface;
private int decoderTextureId;
@Nullable private SurfaceTexture decoderSurfaceTexture;
@Nullable private Surface decoderSurface;
@Nullable private MediaCodecAdapterWrapper decoder;
private volatile boolean isDecoderSurfacePopulated;
private boolean waitingForPopulatedDecoderSurface;
@Nullable private GlUtil.Uniform decoderTextureTransformUniform;
@Nullable private MediaCodecAdapterWrapper encoder;
/** Whether encoder's actual output format is obtained. */
private boolean hasEncoderActualOutputFormat;
private @MonotonicNonNull SamplePipeline samplePipeline;
private boolean muxerWrapperTrackAdded;
private boolean muxerWrapperTrackEnded;
public TransformerTranscodingVideoRenderer(
@ -88,9 +51,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
Transformation transformation) {
super(C.TRACK_TYPE_VIDEO, muxerWrapper, mediaClock, transformation);
this.context = context;
decoderInputBuffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT);
decoderTextureTransformMatrix = new float[16];
decoderTextureId = GlUtil.TEXTURE_ID_UNSET;
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
}
@Override
@ -98,34 +60,6 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return TAG;
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (!isRendererStarted || isEnded() || !ensureInputFormatRead()) {
return;
}
ensureEncoderConfigured();
MediaCodecAdapterWrapper encoder = this.encoder;
ensureOpenGlConfigured();
EGLDisplay eglDisplay = this.eglDisplay;
EGLSurface eglSurface = this.eglSurface;
GlUtil.Uniform decoderTextureTransformUniform = this.decoderTextureTransformUniform;
if (!ensureDecoderConfigured()) {
return;
}
MediaCodecAdapterWrapper decoder = this.decoder;
SurfaceTexture decoderSurfaceTexture = this.decoderSurfaceTexture;
while (feedMuxerFromEncoder(encoder)) {}
while (feedEncoderFromDecoder(
decoder,
encoder,
decoderSurfaceTexture,
eglDisplay,
eglSurface,
decoderTextureTransformUniform)) {}
while (feedDecoderFromInput(decoder)) {}
}
@Override
public boolean isEnded() {
return muxerWrapperTrackEnded;
@ -133,272 +67,107 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Override
protected void onReset() {
decoderInputBuffer.clear();
decoderInputBuffer.data = null;
GlUtil.destroyEglContext(eglDisplay, eglContext);
eglDisplay = null;
eglContext = null;
eglSurface = null;
if (decoderTextureId != GlUtil.TEXTURE_ID_UNSET) {
GlUtil.deleteTexture(decoderTextureId);
if (samplePipeline != null) {
samplePipeline.release();
}
if (decoderSurfaceTexture != null) {
decoderSurfaceTexture.release();
decoderSurfaceTexture = null;
}
if (decoderSurface != null) {
decoderSurface.release();
decoderSurface = null;
}
if (decoder != null) {
decoder.release();
decoder = null;
}
isDecoderSurfacePopulated = false;
waitingForPopulatedDecoderSurface = false;
decoderTextureTransformUniform = null;
if (encoder != null) {
encoder.release();
encoder = null;
}
hasEncoderActualOutputFormat = false;
muxerWrapperTrackAdded = false;
muxerWrapperTrackEnded = false;
}
@EnsuresNonNullIf(expression = "decoderInputFormat", result = true)
private boolean ensureInputFormatRead() {
if (decoderInputFormat != null) {
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (!isRendererStarted || isEnded() || !ensureRendererConfigured()) {
return;
}
while (feedMuxerFromPipeline() || samplePipeline.processData() || feedPipelineFromInput()) {}
}
/** Attempts to read the input format and to initialize the sample pipeline. */
@EnsuresNonNullIf(expression = "samplePipeline", result = true)
private boolean ensureRendererConfigured() throws ExoPlaybackException {
if (samplePipeline != null) {
return true;
}
FormatHolder formatHolder = getFormatHolder();
@SampleStream.ReadDataResult
int result =
readSource(
formatHolder, decoderInputBuffer, /* readFlags= */ SampleStream.FLAG_REQUIRE_FORMAT);
@ReadDataResult
int result = readSource(formatHolder, decoderInputBuffer, /* readFlags= */ FLAG_REQUIRE_FORMAT);
if (result != C.RESULT_FORMAT_READ) {
return false;
}
decoderInputFormat = checkNotNull(formatHolder.format);
Format decoderInputFormat = checkNotNull(formatHolder.format);
if (transformation.videoMimeType != null
&& !transformation.videoMimeType.equals(decoderInputFormat.sampleMimeType)) {
samplePipeline =
new VideoSamplePipeline(context, decoderInputFormat, transformation, getIndex());
} else {
samplePipeline = new PassthroughSamplePipeline(decoderInputFormat);
}
return true;
}
@RequiresNonNull({"decoderInputFormat"})
@EnsuresNonNull({"encoder"})
private void ensureEncoderConfigured() throws ExoPlaybackException {
if (encoder != null) {
return;
}
try {
encoder =
MediaCodecAdapterWrapper.createForVideoEncoding(
new Format.Builder()
.setWidth(decoderInputFormat.width)
.setHeight(decoderInputFormat.height)
.setSampleMimeType(
transformation.videoMimeType != null
? transformation.videoMimeType
: decoderInputFormat.sampleMimeType)
.build(),
ImmutableMap.of());
} catch (IOException e) {
throw createRendererException(
// TODO(claincly): should be "ENCODER_INIT_FAILED"
e, decoderInputFormat, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
}
}
@RequiresNonNull({"encoder", "decoderInputFormat"})
@EnsuresNonNull({"eglDisplay", "eglSurface", "decoderTextureTransformUniform"})
private void ensureOpenGlConfigured() {
if (eglDisplay != null && eglSurface != null && decoderTextureTransformUniform != null) {
return;
}
MediaCodecAdapterWrapper encoder = this.encoder;
EGLDisplay eglDisplay = GlUtil.createEglDisplay();
EGLContext eglContext;
try {
eglContext = GlUtil.createEglContext(eglDisplay);
this.eglContext = eglContext;
} catch (GlUtil.UnsupportedEglVersionException e) {
throw new IllegalStateException("EGL version is unsupported", e);
}
EGLSurface eglSurface =
GlUtil.getEglSurface(eglDisplay, checkNotNull(encoder.getInputSurface()));
GlUtil.focusSurface(
eglDisplay, eglContext, eglSurface, decoderInputFormat.width, decoderInputFormat.height);
decoderTextureId = GlUtil.createExternalTexture();
GlUtil.Program copyProgram;
try {
copyProgram =
new GlUtil.Program(
context,
/* vertexShaderFilePath= */ "shaders/blit_vertex_shader.glsl",
/* fragmentShaderFilePath= */ "shaders/copy_external_fragment_shader.glsl");
} catch (IOException e) {
throw new IllegalStateException(e);
}
copyProgram.use();
GlUtil.Attribute[] copyAttributes = copyProgram.getAttributes();
checkState(copyAttributes.length == 2, "Expected program to have two vertex attributes.");
for (GlUtil.Attribute copyAttribute : copyAttributes) {
if (copyAttribute.name.equals("a_position")) {
copyAttribute.setBuffer(
new float[] {
-1.0f, -1.0f, 0.0f, 1.0f,
1.0f, -1.0f, 0.0f, 1.0f,
-1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
},
/* size= */ 4);
} else if (copyAttribute.name.equals("a_texcoord")) {
copyAttribute.setBuffer(
new float[] {
0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
},
/* size= */ 4);
} else {
throw new IllegalStateException("Unexpected attribute name.");
}
copyAttribute.bind();
}
GlUtil.Uniform[] copyUniforms = copyProgram.getUniforms();
checkState(copyUniforms.length == 2, "Expected program to have two uniforms.");
for (GlUtil.Uniform copyUniform : copyUniforms) {
if (copyUniform.name.equals("tex_sampler")) {
copyUniform.setSamplerTexId(decoderTextureId, 0);
copyUniform.bind();
} else if (copyUniform.name.equals("tex_transform")) {
decoderTextureTransformUniform = copyUniform;
} else {
throw new IllegalStateException("Unexpected uniform name.");
}
}
checkNotNull(decoderTextureTransformUniform);
this.eglDisplay = eglDisplay;
this.eglSurface = eglSurface;
}
@RequiresNonNull({"decoderInputFormat"})
@EnsuresNonNullIf(
expression = {"decoder", "decoderSurfaceTexture"},
result = true)
private boolean ensureDecoderConfigured() throws ExoPlaybackException {
if (decoder != null && decoderSurfaceTexture != null) {
return true;
}
checkState(decoderTextureId != GlUtil.TEXTURE_ID_UNSET);
SurfaceTexture decoderSurfaceTexture = new SurfaceTexture(decoderTextureId);
decoderSurfaceTexture.setOnFrameAvailableListener(
surfaceTexture -> isDecoderSurfacePopulated = true);
decoderSurface = new Surface(decoderSurfaceTexture);
try {
decoder = MediaCodecAdapterWrapper.createForVideoDecoding(decoderInputFormat, decoderSurface);
} catch (IOException e) {
throw createRendererException(
e, decoderInputFormat, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
}
this.decoderSurfaceTexture = decoderSurfaceTexture;
return true;
}
private boolean feedDecoderFromInput(MediaCodecAdapterWrapper decoder) {
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
return false;
}
decoderInputBuffer.clear();
@SampleStream.ReadDataResult
int result = readSource(getFormatHolder(), decoderInputBuffer, /* readFlags= */ 0);
switch (result) {
case C.RESULT_FORMAT_READ:
throw new IllegalStateException("Format changes are not supported.");
case C.RESULT_BUFFER_READ:
mediaClock.updateTimeForTrackType(getTrackType(), decoderInputBuffer.timeUs);
decoderInputBuffer.timeUs -= streamOffsetUs;
ByteBuffer data = checkNotNull(decoderInputBuffer.data);
data.flip();
decoder.queueInputBuffer(decoderInputBuffer);
return !decoderInputBuffer.isEndOfStream();
case C.RESULT_NOTHING_READ:
default:
return false;
}
}
private boolean feedEncoderFromDecoder(
MediaCodecAdapterWrapper decoder,
MediaCodecAdapterWrapper encoder,
SurfaceTexture decoderSurfaceTexture,
EGLDisplay eglDisplay,
EGLSurface eglSurface,
GlUtil.Uniform decoderTextureTransformUniform) {
if (decoder.isEnded()) {
return false;
}
if (!isDecoderSurfacePopulated) {
if (!waitingForPopulatedDecoderSurface) {
if (decoder.getOutputBufferInfo() != null) {
decoder.releaseOutputBuffer(/* render= */ true);
waitingForPopulatedDecoderSurface = true;
}
if (decoder.isEnded()) {
encoder.signalEndOfInputStream();
}
}
return false;
}
waitingForPopulatedDecoderSurface = false;
decoderSurfaceTexture.updateTexImage();
decoderSurfaceTexture.getTransformMatrix(decoderTextureTransformMatrix);
decoderTextureTransformUniform.setFloats(decoderTextureTransformMatrix);
decoderTextureTransformUniform.bind();
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
long decoderSurfaceTextureTimestampNs = decoderSurfaceTexture.getTimestamp();
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, decoderSurfaceTextureTimestampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
isDecoderSurfacePopulated = false;
return true;
}
private boolean feedMuxerFromEncoder(MediaCodecAdapterWrapper encoder) {
if (!hasEncoderActualOutputFormat) {
@Nullable Format encoderOutputFormat = encoder.getOutputFormat();
if (encoderOutputFormat == null) {
/**
* Attempts to write sample pipeline output data to the muxer, and returns whether it may be
* possible to write more data immediately by calling this method again.
*/
@RequiresNonNull("samplePipeline")
private boolean feedMuxerFromPipeline() {
if (!muxerWrapperTrackAdded) {
@Nullable Format samplePipelineOutputFormat = samplePipeline.getOutputFormat();
if (samplePipelineOutputFormat == null) {
return false;
}
hasEncoderActualOutputFormat = true;
muxerWrapper.addTrackFormat(encoderOutputFormat);
muxerWrapperTrackAdded = true;
muxerWrapper.addTrackFormat(samplePipelineOutputFormat);
}
if (encoder.isEnded()) {
if (samplePipeline.isEnded()) {
muxerWrapper.endTrack(getTrackType());
muxerWrapperTrackEnded = true;
return false;
}
@Nullable ByteBuffer encoderOutputBuffer = encoder.getOutputBuffer();
if (encoderOutputBuffer == null) {
@Nullable DecoderInputBuffer samplePipelineOutputBuffer = samplePipeline.getOutputBuffer();
if (samplePipelineOutputBuffer == null) {
return false;
}
MediaCodec.BufferInfo encoderOutputBufferInfo = checkNotNull(encoder.getOutputBufferInfo());
if (!muxerWrapper.writeSample(
getTrackType(),
encoderOutputBuffer,
/* isKeyFrame= */ (encoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) > 0,
encoderOutputBufferInfo.presentationTimeUs)) {
samplePipelineOutputBuffer.data,
samplePipelineOutputBuffer.isKeyFrame(),
samplePipelineOutputBuffer.timeUs)) {
return false;
}
encoder.releaseOutputBuffer();
samplePipeline.releaseOutputBuffer();
return true;
}
/**
* Attempts to pass input data to the sample pipeline, and returns whether it may be possible to
* pass more data immediately by calling this method again.
*/
@RequiresNonNull("samplePipeline")
private boolean feedPipelineFromInput() {
@Nullable DecoderInputBuffer samplePipelineInputBuffer = samplePipeline.dequeueInputBuffer();
if (samplePipelineInputBuffer == null) {
return false;
}
@ReadDataResult
int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0);
switch (result) {
case C.RESULT_BUFFER_READ:
mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs);
samplePipelineInputBuffer.timeUs -= streamOffsetUs;
samplePipelineInputBuffer.flip();
samplePipeline.queueInputBuffer();
return !samplePipelineInputBuffer.isEndOfStream();
case C.RESULT_FORMAT_READ:
throw new IllegalStateException("Format changes are not supported.");
case C.RESULT_NOTHING_READ:
default:
return false;
}
}
}

View file

@ -0,0 +1,335 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* Pipeline to decode video samples, apply transformations on the raw samples, and re-encode them.
*/
@RequiresApi(18)
/* package */ final class VideoSamplePipeline implements SamplePipeline {
static {
GlUtil.glAssertionsEnabled = true;
}
private static final String TAG = "VideoSamplePipeline";
// Predefined shader values.
private static final String VERTEX_SHADER_FILE_PATH = "shaders/blit_vertex_shader.glsl";
private static final String FRAGMENT_SHADER_FILE_PATH =
"shaders/copy_external_fragment_shader.glsl";
private static final int EXPECTED_NUMBER_OF_ATTRIBUTES = 2;
private static final int EXPECTED_NUMBER_OF_UNIFORMS = 2;
private final Context context;
private final int rendererIndex;
private final MediaCodecAdapterWrapper encoder;
private final DecoderInputBuffer encoderOutputBuffer;
private final DecoderInputBuffer decoderInputBuffer;
private final float[] decoderTextureTransformMatrix;
private final Format decoderInputFormat;
private @MonotonicNonNull EGLDisplay eglDisplay;
private @MonotonicNonNull EGLContext eglContext;
private @MonotonicNonNull EGLSurface eglSurface;
private int decoderTextureId;
private @MonotonicNonNull SurfaceTexture decoderSurfaceTexture;
private @MonotonicNonNull Surface decoderSurface;
private @MonotonicNonNull MediaCodecAdapterWrapper decoder;
private volatile boolean isDecoderSurfacePopulated;
private boolean waitingForPopulatedDecoderSurface;
private GlUtil.@MonotonicNonNull Uniform decoderTextureTransformUniform;
public VideoSamplePipeline(
Context context, Format decoderInputFormat, Transformation transformation, int rendererIndex)
throws ExoPlaybackException {
this.decoderInputFormat = decoderInputFormat;
this.rendererIndex = rendererIndex;
this.context = context;
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
decoderTextureTransformMatrix = new float[16];
decoderTextureId = GlUtil.TEXTURE_ID_UNSET;
encoderOutputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
try {
encoder =
MediaCodecAdapterWrapper.createForVideoEncoding(
new Format.Builder()
.setWidth(decoderInputFormat.width)
.setHeight(decoderInputFormat.height)
.setSampleMimeType(
transformation.videoMimeType != null
? transformation.videoMimeType
: decoderInputFormat.sampleMimeType)
.build(),
ImmutableMap.of());
} catch (IOException e) {
// TODO (internal b/184262323): Assign an adequate error code.
throw ExoPlaybackException.createForRenderer(
e,
TAG,
rendererIndex,
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
PlaybackException.ERROR_CODE_UNSPECIFIED);
}
}
@Override
public boolean processData() throws ExoPlaybackException {
ensureOpenGlConfigured();
return !ensureDecoderConfigured() || feedEncoderFromDecoder();
}
@Override
@Nullable
public DecoderInputBuffer dequeueInputBuffer() {
return decoder != null && decoder.maybeDequeueInputBuffer(decoderInputBuffer)
? decoderInputBuffer
: null;
}
@Override
public void queueInputBuffer() {
checkStateNotNull(decoder).queueInputBuffer(decoderInputBuffer);
}
@Override
@Nullable
public Format getOutputFormat() {
return encoder.getOutputFormat();
}
@Override
public boolean isEnded() {
return encoder.isEnded();
}
@Override
@Nullable
public DecoderInputBuffer getOutputBuffer() {
encoderOutputBuffer.data = encoder.getOutputBuffer();
if (encoderOutputBuffer.data == null) {
return null;
}
MediaCodec.BufferInfo bufferInfo = checkNotNull(encoder.getOutputBufferInfo());
encoderOutputBuffer.timeUs = bufferInfo.presentationTimeUs;
encoderOutputBuffer.setFlags(bufferInfo.flags);
return encoderOutputBuffer;
}
@Override
public void releaseOutputBuffer() {
encoder.releaseOutputBuffer();
}
@Override
public void release() {
GlUtil.destroyEglContext(eglDisplay, eglContext);
if (decoderTextureId != GlUtil.TEXTURE_ID_UNSET) {
GlUtil.deleteTexture(decoderTextureId);
}
if (decoderSurfaceTexture != null) {
decoderSurfaceTexture.release();
}
if (decoderSurface != null) {
decoderSurface.release();
}
if (decoder != null) {
decoder.release();
}
encoder.release();
}
@EnsuresNonNull({"eglDisplay", "eglContext", "eglSurface", "decoderTextureTransformUniform"})
private void ensureOpenGlConfigured() {
if (eglDisplay != null
&& eglContext != null
&& eglSurface != null
&& decoderTextureTransformUniform != null) {
return;
}
eglDisplay = GlUtil.createEglDisplay();
try {
eglContext = GlUtil.createEglContext(eglDisplay);
} catch (GlUtil.UnsupportedEglVersionException e) {
throw new IllegalStateException("EGL version is unsupported", e);
}
eglSurface = GlUtil.getEglSurface(eglDisplay, checkNotNull(encoder.getInputSurface()));
GlUtil.focusSurface(
eglDisplay, eglContext, eglSurface, decoderInputFormat.width, decoderInputFormat.height);
decoderTextureId = GlUtil.createExternalTexture();
GlUtil.Program copyProgram;
try {
copyProgram = new GlUtil.Program(context, VERTEX_SHADER_FILE_PATH, FRAGMENT_SHADER_FILE_PATH);
} catch (IOException e) {
throw new IllegalStateException(e);
}
copyProgram.use();
GlUtil.Attribute[] copyAttributes = copyProgram.getAttributes();
checkState(
copyAttributes.length == EXPECTED_NUMBER_OF_ATTRIBUTES,
"Expected program to have " + EXPECTED_NUMBER_OF_ATTRIBUTES + " vertex attributes.");
for (GlUtil.Attribute copyAttribute : copyAttributes) {
if (copyAttribute.name.equals("a_position")) {
copyAttribute.setBuffer(
new float[] {
-1.0f, -1.0f, 0.0f, 1.0f,
1.0f, -1.0f, 0.0f, 1.0f,
-1.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
},
/* size= */ 4);
} else if (copyAttribute.name.equals("a_texcoord")) {
copyAttribute.setBuffer(
new float[] {
0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
},
/* size= */ 4);
} else {
throw new IllegalStateException("Unexpected attribute name.");
}
copyAttribute.bind();
}
GlUtil.Uniform[] copyUniforms = copyProgram.getUniforms();
checkState(
copyUniforms.length == EXPECTED_NUMBER_OF_UNIFORMS,
"Expected program to have " + EXPECTED_NUMBER_OF_UNIFORMS + " uniforms.");
for (GlUtil.Uniform copyUniform : copyUniforms) {
if (copyUniform.name.equals("tex_sampler")) {
copyUniform.setSamplerTexId(decoderTextureId, 0);
copyUniform.bind();
} else if (copyUniform.name.equals("tex_transform")) {
decoderTextureTransformUniform = copyUniform;
} else {
throw new IllegalStateException("Unexpected uniform name.");
}
}
checkNotNull(decoderTextureTransformUniform);
}
@EnsuresNonNullIf(
expression = {"decoder", "decoderSurfaceTexture"},
result = true)
private boolean ensureDecoderConfigured() throws ExoPlaybackException {
if (decoder != null && decoderSurfaceTexture != null) {
return true;
}
checkState(decoderTextureId != GlUtil.TEXTURE_ID_UNSET);
decoderSurfaceTexture = new SurfaceTexture(decoderTextureId);
decoderSurfaceTexture.setOnFrameAvailableListener(
surfaceTexture -> isDecoderSurfacePopulated = true);
decoderSurface = new Surface(decoderSurfaceTexture);
try {
decoder = MediaCodecAdapterWrapper.createForVideoDecoding(decoderInputFormat, decoderSurface);
} catch (IOException e) {
throw createRendererException(e, PlaybackException.ERROR_CODE_DECODER_INIT_FAILED);
}
return true;
}
@RequiresNonNull({
"decoder",
"decoderSurfaceTexture",
"decoderTextureTransformUniform",
"eglDisplay",
"eglSurface"
})
private boolean feedEncoderFromDecoder() {
if (decoder.isEnded()) {
return false;
}
if (!isDecoderSurfacePopulated) {
if (!waitingForPopulatedDecoderSurface) {
if (decoder.getOutputBufferInfo() != null) {
decoder.releaseOutputBuffer(/* render= */ true);
waitingForPopulatedDecoderSurface = true;
}
if (decoder.isEnded()) {
encoder.signalEndOfInputStream();
}
}
return false;
}
waitingForPopulatedDecoderSurface = false;
decoderSurfaceTexture.updateTexImage();
decoderSurfaceTexture.getTransformMatrix(decoderTextureTransformMatrix);
decoderTextureTransformUniform.setFloats(decoderTextureTransformMatrix);
decoderTextureTransformUniform.bind();
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
long decoderSurfaceTextureTimestampNs = decoderSurfaceTexture.getTimestamp();
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, decoderSurfaceTextureTimestampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
isDecoderSurfacePopulated = false;
return true;
}
private ExoPlaybackException createRendererException(Throwable cause, int errorCode) {
return ExoPlaybackException.createForRenderer(
cause,
TAG,
rendererIndex,
decoderInputFormat,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false,
errorCode);
}
}

View file

@ -82,7 +82,6 @@ import java.util.Formatter;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.CopyOnWriteArrayList;
import org.checkerframework.dataflow.qual.Pure;
/**
* A view for controlling {@link Player} instances.
@ -2166,12 +2165,11 @@ public class StyledPlayerControlView extends FrameLayout {
TrackSelectionParameters trackSelectionParameters =
player.getTrackSelectionParameters();
TrackSelectionOverrides overrides =
forceTrackSelection(
trackSelectionParameters.trackSelectionOverrides,
track.tracksInfo,
track.trackGroupIndex,
new TrackSelectionOverride(
track.trackGroup, ImmutableList.of(track.trackIndex)));
new TrackSelectionOverrides.Builder()
.setOverrideForType(
new TrackSelectionOverride(
track.trackGroup, ImmutableList.of(track.trackIndex)))
.build();
checkNotNull(player)
.setTrackSelectionParameters(
trackSelectionParameters
@ -2209,41 +2207,4 @@ public class StyledPlayerControlView extends FrameLayout {
checkView = itemView.findViewById(R.id.exo_check);
}
}
/**
* Forces tracks in a {@link TrackGroup} to be the only ones selected for a {@link C.TrackType}.
* No other tracks of that type will be selectable. If the forced tracks are not supported, then
* no tracks of that type will be selected.
*
* @param trackSelectionOverrides The current {@link TrackSelectionOverride overrides}.
* @param tracksInfo The current {@link TracksInfo}.
* @param forcedTrackGroupIndex The index of the {@link TrackGroup} in {@code tracksInfo} that
* should have its track selected.
* @param forcedTrackSelectionOverride The tracks to force selection of.
* @return The updated {@link TrackSelectionOverride overrides}.
*/
@Pure
private static TrackSelectionOverrides forceTrackSelection(
TrackSelectionOverrides trackSelectionOverrides,
TracksInfo tracksInfo,
int forcedTrackGroupIndex,
TrackSelectionOverride forcedTrackSelectionOverride) {
TrackSelectionOverrides.Builder overridesBuilder = trackSelectionOverrides.buildUpon();
@C.TrackType
int trackType = tracksInfo.getTrackGroupInfos().get(forcedTrackGroupIndex).getTrackType();
overridesBuilder.setOverrideForType(forcedTrackSelectionOverride);
// TrackSelectionOverride doesn't currently guarantee that only overwritten track
// group of a given type are selected, so the others have to be explicitly disabled.
// This guarantee is provided in the following patch that removes the need for this method.
ImmutableList<TrackGroupInfo> trackGroupInfos = tracksInfo.getTrackGroupInfos();
for (int i = 0; i < trackGroupInfos.size(); i++) {
TrackGroupInfo trackGroupInfo = trackGroupInfos.get(i);
if (i != forcedTrackGroupIndex && trackGroupInfo.getTrackType() == trackType) {
TrackGroup trackGroup = trackGroupInfo.getTrackGroup();
overridesBuilder.addOverride(new TrackSelectionOverride(trackGroup, ImmutableList.of()));
}
}
return overridesBuilder.build();
}
}

View file

@ -0,0 +1,36 @@
seekMap:
isSeekable = true
duration = 348625
getPosition(0) = [[timeUs=0, position=80]]
getPosition(1) = [[timeUs=0, position=80], [timeUs=20, position=84]]
getPosition(174312) = [[timeUs=174291, position=33544], [timeUs=174312, position=33548]]
getPosition(348625) = [[timeUs=348604, position=67012]]
numberOfTracks = 1
track 0:
total output bytes = 66936
sample count = 4
format 0:
averageBitrate = 1536000
peakBitrate = 1536000
sampleMimeType = audio/raw
maxInputSize = 19200
channelCount = 2
sampleRate = 48000
pcmEncoding = 2
sample 0:
time = 0
flags = 1
data = length 19200, hash EF6C7C27
sample 1:
time = 100000
flags = 1
data = length 19200, hash 5AB97AFC
sample 2:
time = 200000
flags = 1
data = length 19200, hash 37920F33
sample 3:
time = 300000
flags = 1
data = length 9336, hash 135F1C30
tracksEnded = true

View file

@ -0,0 +1,32 @@
seekMap:
isSeekable = true
duration = 348625
getPosition(0) = [[timeUs=0, position=80]]
getPosition(1) = [[timeUs=0, position=80], [timeUs=20, position=84]]
getPosition(174312) = [[timeUs=174291, position=33544], [timeUs=174312, position=33548]]
getPosition(348625) = [[timeUs=348604, position=67012]]
numberOfTracks = 1
track 0:
total output bytes = 44628
sample count = 3
format 0:
averageBitrate = 1536000
peakBitrate = 1536000
sampleMimeType = audio/raw
maxInputSize = 19200
channelCount = 2
sampleRate = 48000
pcmEncoding = 2
sample 0:
time = 116208
flags = 1
data = length 19200, hash E4B962ED
sample 1:
time = 216208
flags = 1
data = length 19200, hash 4F13D6CF
sample 2:
time = 316208
flags = 1
data = length 6228, hash 3FB5F446
tracksEnded = true

View file

@ -0,0 +1,28 @@
seekMap:
isSeekable = true
duration = 348625
getPosition(0) = [[timeUs=0, position=80]]
getPosition(1) = [[timeUs=0, position=80], [timeUs=20, position=84]]
getPosition(174312) = [[timeUs=174291, position=33544], [timeUs=174312, position=33548]]
getPosition(348625) = [[timeUs=348604, position=67012]]
numberOfTracks = 1
track 0:
total output bytes = 22316
sample count = 2
format 0:
averageBitrate = 1536000
peakBitrate = 1536000
sampleMimeType = audio/raw
maxInputSize = 19200
channelCount = 2
sampleRate = 48000
pcmEncoding = 2
sample 0:
time = 232416
flags = 1
data = length 19200, hash F82E494B
sample 1:
time = 332416
flags = 1
data = length 3116, hash 93C99CFD
tracksEnded = true

View file

@ -0,0 +1,24 @@
seekMap:
isSeekable = true
duration = 348625
getPosition(0) = [[timeUs=0, position=80]]
getPosition(1) = [[timeUs=0, position=80], [timeUs=20, position=84]]
getPosition(174312) = [[timeUs=174291, position=33544], [timeUs=174312, position=33548]]
getPosition(348625) = [[timeUs=348604, position=67012]]
numberOfTracks = 1
track 0:
total output bytes = 4
sample count = 1
format 0:
averageBitrate = 1536000
peakBitrate = 1536000
sampleMimeType = audio/raw
maxInputSize = 19200
channelCount = 2
sampleRate = 48000
pcmEncoding = 2
sample 0:
time = 348625
flags = 1
data = length 4, hash FFD4C53F
tracksEnded = true

View file

@ -0,0 +1,36 @@
seekMap:
isSeekable = true
duration = 348625
getPosition(0) = [[timeUs=0, position=80]]
getPosition(1) = [[timeUs=0, position=80], [timeUs=20, position=84]]
getPosition(174312) = [[timeUs=174291, position=33544], [timeUs=174312, position=33548]]
getPosition(348625) = [[timeUs=348604, position=67012]]
numberOfTracks = 1
track 0:
total output bytes = 66936
sample count = 4
format 0:
averageBitrate = 1536000
peakBitrate = 1536000
sampleMimeType = audio/raw
maxInputSize = 19200
channelCount = 2
sampleRate = 48000
pcmEncoding = 2
sample 0:
time = 0
flags = 1
data = length 19200, hash EF6C7C27
sample 1:
time = 100000
flags = 1
data = length 19200, hash 5AB97AFC
sample 2:
time = 200000
flags = 1
data = length 19200, hash 37920F33
sample 3:
time = 300000
flags = 1
data = length 9336, hash 135F1C30
tracksEnded = true

Binary file not shown.

File diff suppressed because it is too large Load diff

View file

@ -1,9 +1,15 @@
containerMimeType = video/mp4
format 0:
id = 2
sampleMimeType = audio/mp4a-latm
codecs = mp4a.40.2
maxInputSize = 294
channelCount = 1
sampleRate = 44100
pcmEncoding = 2
language = und
metadata = entries=[TSSE: description=null: value=Lavf56.1.0]
initializationData:
data = length 2, hash 5F7
format 1:
id = 1
sampleMimeType = video/avc
@ -110,181 +116,127 @@ sample:
dataHashCode = 1205768497
size = 23
isKeyFrame = true
presentationTimeUs = 0
presentationTimeUs = 44000
sample:
trackIndex = 0
dataHashCode = 837571078
size = 6
isKeyFrame = true
presentationTimeUs = 249
presentationTimeUs = 67219
sample:
trackIndex = 0
dataHashCode = -1991633045
size = 148
isKeyFrame = true
presentationTimeUs = 317
presentationTimeUs = 90439
sample:
trackIndex = 0
dataHashCode = -822987359
size = 189
isKeyFrame = true
presentationTimeUs = 1995
presentationTimeUs = 113659
sample:
trackIndex = 0
dataHashCode = -1141508176
size = 205
isKeyFrame = true
presentationTimeUs = 4126
presentationTimeUs = 136879
sample:
trackIndex = 0
dataHashCode = -226971245
size = 210
isKeyFrame = true
presentationTimeUs = 6438
presentationTimeUs = 160099
sample:
trackIndex = 0
dataHashCode = -2099636855
size = 210
isKeyFrame = true
presentationTimeUs = 8818
presentationTimeUs = 183319
sample:
trackIndex = 0
dataHashCode = 1541550559
size = 207
isKeyFrame = true
presentationTimeUs = 11198
presentationTimeUs = 206539
sample:
trackIndex = 0
dataHashCode = 411148001
size = 225
isKeyFrame = true
presentationTimeUs = 13533
presentationTimeUs = 229759
sample:
trackIndex = 0
dataHashCode = -897603973
size = 215
isKeyFrame = true
presentationTimeUs = 16072
presentationTimeUs = 252979
sample:
trackIndex = 0
dataHashCode = 1478106136
size = 211
isKeyFrame = true
presentationTimeUs = 18498
presentationTimeUs = 276199
sample:
trackIndex = 0
dataHashCode = -1380417145
size = 216
isKeyFrame = true
presentationTimeUs = 20878
presentationTimeUs = 299419
sample:
trackIndex = 0
dataHashCode = 780903644
size = 229
isKeyFrame = true
presentationTimeUs = 23326
presentationTimeUs = 322639
sample:
trackIndex = 0
dataHashCode = 586204432
size = 232
isKeyFrame = true
presentationTimeUs = 25911
presentationTimeUs = 345859
sample:
trackIndex = 0
dataHashCode = -2038771492
size = 235
isKeyFrame = true
presentationTimeUs = 28541
presentationTimeUs = 369079
sample:
trackIndex = 0
dataHashCode = -2065161304
size = 231
isKeyFrame = true
presentationTimeUs = 31194
presentationTimeUs = 392299
sample:
trackIndex = 0
dataHashCode = 468662933
size = 226
isKeyFrame = true
presentationTimeUs = 33801
presentationTimeUs = 415519
sample:
trackIndex = 0
dataHashCode = -358398546
size = 216
isKeyFrame = true
presentationTimeUs = 36363
presentationTimeUs = 438739
sample:
trackIndex = 0
dataHashCode = 1767325983
size = 229
isKeyFrame = true
presentationTimeUs = 38811
presentationTimeUs = 461959
sample:
trackIndex = 0
dataHashCode = 1093095458
size = 219
isKeyFrame = true
presentationTimeUs = 41396
presentationTimeUs = 485179
sample:
trackIndex = 0
dataHashCode = 1687543702
size = 241
isKeyFrame = true
presentationTimeUs = 43867
sample:
trackIndex = 0
dataHashCode = 1675188486
size = 228
isKeyFrame = true
presentationTimeUs = 46588
sample:
trackIndex = 0
dataHashCode = 888567545
size = 238
isKeyFrame = true
presentationTimeUs = 49173
sample:
trackIndex = 0
dataHashCode = -439631803
size = 234
isKeyFrame = true
presentationTimeUs = 51871
sample:
trackIndex = 0
dataHashCode = 1606694497
size = 231
isKeyFrame = true
presentationTimeUs = 54524
sample:
trackIndex = 0
dataHashCode = 1747388653
size = 217
isKeyFrame = true
presentationTimeUs = 57131
sample:
trackIndex = 0
dataHashCode = -734560004
size = 239
isKeyFrame = true
presentationTimeUs = 59579
sample:
trackIndex = 0
dataHashCode = -975079040
size = 243
isKeyFrame = true
presentationTimeUs = 62277
sample:
trackIndex = 0
dataHashCode = -1403504710
size = 231
isKeyFrame = true
presentationTimeUs = 65020
sample:
trackIndex = 0
dataHashCode = 379512981
size = 230
isKeyFrame = true
presentationTimeUs = 67627
presentationTimeUs = 508399
sample:
trackIndex = 1
dataHashCode = -1830836678
@ -309,96 +261,6 @@ sample:
size = 4725
isKeyFrame = false
presentationTimeUs = 700700
sample:
trackIndex = 0
dataHashCode = -997198863
size = 238
isKeyFrame = true
presentationTimeUs = 70234
sample:
trackIndex = 0
dataHashCode = 1394492825
size = 225
isKeyFrame = true
presentationTimeUs = 72932
sample:
trackIndex = 0
dataHashCode = -885232755
size = 232
isKeyFrame = true
presentationTimeUs = 75471
sample:
trackIndex = 0
dataHashCode = 260871367
size = 243
isKeyFrame = true
presentationTimeUs = 78101
sample:
trackIndex = 0
dataHashCode = -1505318960
size = 232
isKeyFrame = true
presentationTimeUs = 80844
sample:
trackIndex = 0
dataHashCode = -390625371
size = 237
isKeyFrame = true
presentationTimeUs = 83474
sample:
trackIndex = 0
dataHashCode = 1067950751
size = 228
isKeyFrame = true
presentationTimeUs = 86149
sample:
trackIndex = 0
dataHashCode = -1179436278
size = 235
isKeyFrame = true
presentationTimeUs = 88734
sample:
trackIndex = 0
dataHashCode = 1906607774
size = 264
isKeyFrame = true
presentationTimeUs = 91387
sample:
trackIndex = 0
dataHashCode = -800475828
size = 257
isKeyFrame = true
presentationTimeUs = 94380
sample:
trackIndex = 0
dataHashCode = 1718972977
size = 227
isKeyFrame = true
presentationTimeUs = 97282
sample:
trackIndex = 0
dataHashCode = -1120448741
size = 227
isKeyFrame = true
presentationTimeUs = 99844
sample:
trackIndex = 0
dataHashCode = -1718323210
size = 235
isKeyFrame = true
presentationTimeUs = 102406
sample:
trackIndex = 0
dataHashCode = -422416
size = 229
isKeyFrame = true
presentationTimeUs = 105059
sample:
trackIndex = 0
dataHashCode = 833757830
size = 6
isKeyFrame = true
presentationTimeUs = 107644
sample:
trackIndex = 1
dataHashCode = 1569455924
@ -465,4 +327,148 @@ sample:
size = 568
isKeyFrame = false
presentationTimeUs = 934266
sample:
trackIndex = 0
dataHashCode = 1675188486
size = 228
isKeyFrame = true
presentationTimeUs = 531619
sample:
trackIndex = 0
dataHashCode = 888567545
size = 238
isKeyFrame = true
presentationTimeUs = 554839
sample:
trackIndex = 0
dataHashCode = -439631803
size = 234
isKeyFrame = true
presentationTimeUs = 578058
sample:
trackIndex = 0
dataHashCode = 1606694497
size = 231
isKeyFrame = true
presentationTimeUs = 601278
sample:
trackIndex = 0
dataHashCode = 1747388653
size = 217
isKeyFrame = true
presentationTimeUs = 624498
sample:
trackIndex = 0
dataHashCode = -734560004
size = 239
isKeyFrame = true
presentationTimeUs = 647718
sample:
trackIndex = 0
dataHashCode = -975079040
size = 243
isKeyFrame = true
presentationTimeUs = 670938
sample:
trackIndex = 0
dataHashCode = -1403504710
size = 231
isKeyFrame = true
presentationTimeUs = 694158
sample:
trackIndex = 0
dataHashCode = 379512981
size = 230
isKeyFrame = true
presentationTimeUs = 717378
sample:
trackIndex = 0
dataHashCode = -997198863
size = 238
isKeyFrame = true
presentationTimeUs = 740598
sample:
trackIndex = 0
dataHashCode = 1394492825
size = 225
isKeyFrame = true
presentationTimeUs = 763818
sample:
trackIndex = 0
dataHashCode = -885232755
size = 232
isKeyFrame = true
presentationTimeUs = 787038
sample:
trackIndex = 0
dataHashCode = 260871367
size = 243
isKeyFrame = true
presentationTimeUs = 810258
sample:
trackIndex = 0
dataHashCode = -1505318960
size = 232
isKeyFrame = true
presentationTimeUs = 833478
sample:
trackIndex = 0
dataHashCode = -390625371
size = 237
isKeyFrame = true
presentationTimeUs = 856698
sample:
trackIndex = 0
dataHashCode = 1067950751
size = 228
isKeyFrame = true
presentationTimeUs = 879918
sample:
trackIndex = 0
dataHashCode = -1179436278
size = 235
isKeyFrame = true
presentationTimeUs = 903138
sample:
trackIndex = 0
dataHashCode = 1906607774
size = 264
isKeyFrame = true
presentationTimeUs = 926358
sample:
trackIndex = 0
dataHashCode = -800475828
size = 257
isKeyFrame = true
presentationTimeUs = 949578
sample:
trackIndex = 0
dataHashCode = 1718972977
size = 227
isKeyFrame = true
presentationTimeUs = 972798
sample:
trackIndex = 0
dataHashCode = -1120448741
size = 227
isKeyFrame = true
presentationTimeUs = 996018
sample:
trackIndex = 0
dataHashCode = -1718323210
size = 235
isKeyFrame = true
presentationTimeUs = 1019238
sample:
trackIndex = 0
dataHashCode = -422416
size = 229
isKeyFrame = true
presentationTimeUs = 1042458
sample:
trackIndex = 0
dataHashCode = 833757830
size = 6
isKeyFrame = true
presentationTimeUs = 1065678
released = true

View file

@ -1,277 +1,283 @@
containerMimeType = video/mp4
format 0:
id = 2
sampleMimeType = audio/mp4a-latm
codecs = mp4a.40.2
maxInputSize = 294
channelCount = 1
sampleRate = 44100
pcmEncoding = 2
language = und
metadata = entries=[TSSE: description=null: value=Lavf56.1.0]
initializationData:
data = length 2, hash 5F7
sample:
trackIndex = 0
dataHashCode = 1205768497
size = 23
isKeyFrame = true
presentationTimeUs = 0
presentationTimeUs = 44000
sample:
trackIndex = 0
dataHashCode = 837571078
size = 6
isKeyFrame = true
presentationTimeUs = 249
presentationTimeUs = 67219
sample:
trackIndex = 0
dataHashCode = -1991633045
size = 148
isKeyFrame = true
presentationTimeUs = 317
presentationTimeUs = 90439
sample:
trackIndex = 0
dataHashCode = -822987359
size = 189
isKeyFrame = true
presentationTimeUs = 1995
presentationTimeUs = 113659
sample:
trackIndex = 0
dataHashCode = -1141508176
size = 205
isKeyFrame = true
presentationTimeUs = 4126
presentationTimeUs = 136879
sample:
trackIndex = 0
dataHashCode = -226971245
size = 210
isKeyFrame = true
presentationTimeUs = 6438
presentationTimeUs = 160099
sample:
trackIndex = 0
dataHashCode = -2099636855
size = 210
isKeyFrame = true
presentationTimeUs = 8818
presentationTimeUs = 183319
sample:
trackIndex = 0
dataHashCode = 1541550559
size = 207
isKeyFrame = true
presentationTimeUs = 11198
presentationTimeUs = 206539
sample:
trackIndex = 0
dataHashCode = 411148001
size = 225
isKeyFrame = true
presentationTimeUs = 13533
presentationTimeUs = 229759
sample:
trackIndex = 0
dataHashCode = -897603973
size = 215
isKeyFrame = true
presentationTimeUs = 16072
presentationTimeUs = 252979
sample:
trackIndex = 0
dataHashCode = 1478106136
size = 211
isKeyFrame = true
presentationTimeUs = 18498
presentationTimeUs = 276199
sample:
trackIndex = 0
dataHashCode = -1380417145
size = 216
isKeyFrame = true
presentationTimeUs = 20878
presentationTimeUs = 299419
sample:
trackIndex = 0
dataHashCode = 780903644
size = 229
isKeyFrame = true
presentationTimeUs = 23326
presentationTimeUs = 322639
sample:
trackIndex = 0
dataHashCode = 586204432
size = 232
isKeyFrame = true
presentationTimeUs = 25911
presentationTimeUs = 345859
sample:
trackIndex = 0
dataHashCode = -2038771492
size = 235
isKeyFrame = true
presentationTimeUs = 28541
presentationTimeUs = 369079
sample:
trackIndex = 0
dataHashCode = -2065161304
size = 231
isKeyFrame = true
presentationTimeUs = 31194
presentationTimeUs = 392299
sample:
trackIndex = 0
dataHashCode = 468662933
size = 226
isKeyFrame = true
presentationTimeUs = 33801
presentationTimeUs = 415519
sample:
trackIndex = 0
dataHashCode = -358398546
size = 216
isKeyFrame = true
presentationTimeUs = 36363
presentationTimeUs = 438739
sample:
trackIndex = 0
dataHashCode = 1767325983
size = 229
isKeyFrame = true
presentationTimeUs = 38811
presentationTimeUs = 461959
sample:
trackIndex = 0
dataHashCode = 1093095458
size = 219
isKeyFrame = true
presentationTimeUs = 41396
presentationTimeUs = 485179
sample:
trackIndex = 0
dataHashCode = 1687543702
size = 241
isKeyFrame = true
presentationTimeUs = 43867
presentationTimeUs = 508399
sample:
trackIndex = 0
dataHashCode = 1675188486
size = 228
isKeyFrame = true
presentationTimeUs = 46588
presentationTimeUs = 531619
sample:
trackIndex = 0
dataHashCode = 888567545
size = 238
isKeyFrame = true
presentationTimeUs = 49173
presentationTimeUs = 554839
sample:
trackIndex = 0
dataHashCode = -439631803
size = 234
isKeyFrame = true
presentationTimeUs = 51871
presentationTimeUs = 578058
sample:
trackIndex = 0
dataHashCode = 1606694497
size = 231
isKeyFrame = true
presentationTimeUs = 54524
presentationTimeUs = 601278
sample:
trackIndex = 0
dataHashCode = 1747388653
size = 217
isKeyFrame = true
presentationTimeUs = 57131
presentationTimeUs = 624498
sample:
trackIndex = 0
dataHashCode = -734560004
size = 239
isKeyFrame = true
presentationTimeUs = 59579
presentationTimeUs = 647718
sample:
trackIndex = 0
dataHashCode = -975079040
size = 243
isKeyFrame = true
presentationTimeUs = 62277
presentationTimeUs = 670938
sample:
trackIndex = 0
dataHashCode = -1403504710
size = 231
isKeyFrame = true
presentationTimeUs = 65020
presentationTimeUs = 694158
sample:
trackIndex = 0
dataHashCode = 379512981
size = 230
isKeyFrame = true
presentationTimeUs = 67627
presentationTimeUs = 717378
sample:
trackIndex = 0
dataHashCode = -997198863
size = 238
isKeyFrame = true
presentationTimeUs = 70234
presentationTimeUs = 740598
sample:
trackIndex = 0
dataHashCode = 1394492825
size = 225
isKeyFrame = true
presentationTimeUs = 72932
presentationTimeUs = 763818
sample:
trackIndex = 0
dataHashCode = -885232755
size = 232
isKeyFrame = true
presentationTimeUs = 75471
presentationTimeUs = 787038
sample:
trackIndex = 0
dataHashCode = 260871367
size = 243
isKeyFrame = true
presentationTimeUs = 78101
presentationTimeUs = 810258
sample:
trackIndex = 0
dataHashCode = -1505318960
size = 232
isKeyFrame = true
presentationTimeUs = 80844
presentationTimeUs = 833478
sample:
trackIndex = 0
dataHashCode = -390625371
size = 237
isKeyFrame = true
presentationTimeUs = 83474
presentationTimeUs = 856698
sample:
trackIndex = 0
dataHashCode = 1067950751
size = 228
isKeyFrame = true
presentationTimeUs = 86149
presentationTimeUs = 879918
sample:
trackIndex = 0
dataHashCode = -1179436278
size = 235
isKeyFrame = true
presentationTimeUs = 88734
presentationTimeUs = 903138
sample:
trackIndex = 0
dataHashCode = 1906607774
size = 264
isKeyFrame = true
presentationTimeUs = 91387
presentationTimeUs = 926358
sample:
trackIndex = 0
dataHashCode = -800475828
size = 257
isKeyFrame = true
presentationTimeUs = 94380
presentationTimeUs = 949578
sample:
trackIndex = 0
dataHashCode = 1718972977
size = 227
isKeyFrame = true
presentationTimeUs = 97282
presentationTimeUs = 972798
sample:
trackIndex = 0
dataHashCode = -1120448741
size = 227
isKeyFrame = true
presentationTimeUs = 99844
presentationTimeUs = 996018
sample:
trackIndex = 0
dataHashCode = -1718323210
size = 235
isKeyFrame = true
presentationTimeUs = 102406
presentationTimeUs = 1019238
sample:
trackIndex = 0
dataHashCode = -422416
size = 229
isKeyFrame = true
presentationTimeUs = 105059
presentationTimeUs = 1042458
sample:
trackIndex = 0
dataHashCode = 833757830
size = 6
isKeyFrame = true
presentationTimeUs = 107644
presentationTimeUs = 1065678
released = true

View file

@ -132,64 +132,148 @@ sample:
presentationTimeUs = 0
sample:
trackIndex = 0
dataHashCode = -833872563
size = 1732
dataHashCode = 1000136444
size = 140
isKeyFrame = true
presentationTimeUs = 416
presentationTimeUs = 417
sample:
trackIndex = 0
dataHashCode = -135901925
size = 380
dataHashCode = 217961709
size = 172
isKeyFrame = true
presentationTimeUs = 36499
presentationTimeUs = 3334
sample:
trackIndex = 0
dataHashCode = -879376936
size = 176
isKeyFrame = true
presentationTimeUs = 6917
sample:
trackIndex = 0
dataHashCode = 1259979587
size = 192
isKeyFrame = true
presentationTimeUs = 10584
sample:
trackIndex = 0
dataHashCode = 907407225
size = 188
isKeyFrame = true
presentationTimeUs = 14584
sample:
trackIndex = 0
dataHashCode = -904354707
size = 176
isKeyFrame = true
presentationTimeUs = 18500
sample:
trackIndex = 0
dataHashCode = 1001385853
size = 172
isKeyFrame = true
presentationTimeUs = 22167
sample:
trackIndex = 0
dataHashCode = 1545716086
size = 196
isKeyFrame = true
presentationTimeUs = 25750
sample:
trackIndex = 0
dataHashCode = 358710839
size = 180
isKeyFrame = true
presentationTimeUs = 29834
sample:
trackIndex = 0
dataHashCode = -671124798
size = 140
isKeyFrame = true
presentationTimeUs = 33584
sample:
trackIndex = 0
dataHashCode = -945404910
size = 120
isKeyFrame = true
presentationTimeUs = 36500
sample:
trackIndex = 0
dataHashCode = 1881048379
size = 88
isKeyFrame = true
presentationTimeUs = 39000
sample:
trackIndex = 0
dataHashCode = 1059579897
size = 88
isKeyFrame = true
presentationTimeUs = 40834
sample:
trackIndex = 0
dataHashCode = 1496098648
size = 84
isKeyFrame = true
presentationTimeUs = 42667
sample:
trackIndex = 0
dataHashCode = 250093960
size = 751
isKeyFrame = true
presentationTimeUs = 44415
presentationTimeUs = 44417
sample:
trackIndex = 0
dataHashCode = 1895536226
size = 1045
isKeyFrame = true
presentationTimeUs = 59998
presentationTimeUs = 60063
sample:
trackIndex = 0
dataHashCode = 1723596464
size = 947
isKeyFrame = true
presentationTimeUs = 81748
presentationTimeUs = 81834
sample:
trackIndex = 0
dataHashCode = -978803114
size = 946
isKeyFrame = true
presentationTimeUs = 101414
presentationTimeUs = 101563
sample:
trackIndex = 0
dataHashCode = 387377078
size = 946
isKeyFrame = true
presentationTimeUs = 121080
presentationTimeUs = 121271
sample:
trackIndex = 0
dataHashCode = -132658698
size = 901
isKeyFrame = true
presentationTimeUs = 140746
presentationTimeUs = 140980
sample:
trackIndex = 0
dataHashCode = 1495036471
size = 899
isKeyFrame = true
presentationTimeUs = 159496
presentationTimeUs = 159750
sample:
trackIndex = 0
dataHashCode = 304440590
size = 878
isKeyFrame = true
presentationTimeUs = 178162
presentationTimeUs = 178480
sample:
trackIndex = 0
dataHashCode = -1955900344
size = 112
isKeyFrame = true
presentationTimeUs = 196771
sample:
trackIndex = 0
dataHashCode = 88896626
size = 116
isKeyFrame = true
presentationTimeUs = 199105
sample:
trackIndex = 1
dataHashCode = 2139021989
@ -214,12 +298,6 @@ sample:
size = 1193
isKeyFrame = false
presentationTimeUs = 734083
sample:
trackIndex = 0
dataHashCode = -752661703
size = 228
isKeyFrame = true
presentationTimeUs = 196412
sample:
trackIndex = 1
dataHashCode = -1554795381

View file

@ -19,6 +19,7 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Util.castNonNull;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
@ -39,10 +40,12 @@ import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.upstream.TransferListener;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.Ignore;
import org.junit.Rule;
@ -503,6 +506,62 @@ public abstract class DataSourceContractTest {
assertThat(dataSource.getUri()).isNull();
}
@Test
public void getResponseHeaders_noNullKeysOrValues() throws Exception {
ImmutableList<TestResource> resources = getTestResources();
Assertions.checkArgument(!resources.isEmpty(), "Must provide at least one test resource.");
for (int i = 0; i < resources.size(); i++) {
additionalFailureInfo.setInfo(getFailureLabel(resources, i));
TestResource resource = resources.get(i);
DataSource dataSource = createDataSource();
try {
dataSource.open(new DataSpec(resource.getUri()));
Map<String, List<String>> responseHeaders = dataSource.getResponseHeaders();
assertThat(responseHeaders).doesNotContainKey(null);
assertThat(responseHeaders.values()).doesNotContain(null);
for (List<String> value : responseHeaders.values()) {
assertThat(value).doesNotContain(null);
}
} finally {
dataSource.close();
}
additionalFailureInfo.setInfo(null);
}
}
@Test
public void getResponseHeaders_caseInsensitive() throws Exception {
ImmutableList<TestResource> resources = getTestResources();
Assertions.checkArgument(!resources.isEmpty(), "Must provide at least one test resource.");
for (int i = 0; i < resources.size(); i++) {
additionalFailureInfo.setInfo(getFailureLabel(resources, i));
TestResource resource = resources.get(i);
DataSource dataSource = createDataSource();
try {
dataSource.open(new DataSpec(resource.getUri()));
Map<String, List<String>> responseHeaders = dataSource.getResponseHeaders();
for (String key : responseHeaders.keySet()) {
// TODO(internal b/205811776): Remove this when DefaultHttpDataSource is fixed to not
// return a null key.
if (key == null) {
continue;
}
String caseFlippedKey = invertAsciiCaseOfEveryOtherCharacter(key);
assertWithMessage("key='%s', caseFlippedKey='%s'", key, caseFlippedKey)
.that(responseHeaders.get(caseFlippedKey))
.isEqualTo(responseHeaders.get(key));
}
} finally {
dataSource.close();
}
additionalFailureInfo.setInfo(null);
}
}
@Test
public void getResponseHeaders_isEmptyWhileNotOpen() throws Exception {
ImmutableList<TestResource> resources = getTestResources();
@ -548,6 +607,28 @@ public abstract class DataSourceContractTest {
}
}
private static String invertAsciiCaseOfEveryOtherCharacter(String input) {
StringBuilder result = new StringBuilder();
for (int i = 0; i < input.length(); i++) {
result.append(i % 2 == 0 ? invertAsciiCase(input.charAt(i)) : input.charAt(i));
}
return result.toString();
}
/**
* Returns {@code c} in the opposite case if it's an ASCII character, otherwise returns {@code c}
* unchanged.
*/
private static char invertAsciiCase(char c) {
if (Ascii.isUpperCase(c)) {
return Ascii.toLowerCase(c);
} else if (Ascii.isLowerCase(c)) {
return Ascii.toUpperCase(c);
} else {
return c;
}
}
/** Information about a resource that can be used to test the {@link DataSource} instance. */
public static final class TestResource {

View file

@ -26,6 +26,7 @@ import android.os.Looper;
import android.util.Pair;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.LoadEventInfo;
import com.google.android.exoplayer2.source.MediaLoadData;
import com.google.android.exoplayer2.source.MediaPeriod;
@ -115,7 +116,8 @@ public class MediaSourceTestRunner {
final IOException[] prepareError = new IOException[1];
runOnPlaybackThread(
() -> {
mediaSource.prepareSource(mediaSourceListener, /* mediaTransferListener= */ null);
mediaSource.prepareSource(
mediaSourceListener, /* mediaTransferListener= */ null, PlayerId.UNSET);
try {
// TODO: This only catches errors that are set synchronously in prepareSource. To
// capture async errors we'll need to poll maybeThrowSourceInfoRefreshError until the

View file

@ -261,7 +261,7 @@ public class WebServerDispatcher extends Dispatcher {
Resource resource = checkNotNull(resourcesByPath.get(requestPath));
byte[] resourceData = resource.getData();
if (resource.supportsRangeRequests()) {
response.setHeader("Accept-ranges", "bytes");
response.setHeader("Accept-Ranges", "bytes");
}
@Nullable ImmutableMap<String, Float> acceptEncodingHeader = getAcceptEncodingHeader(request);
@Nullable String preferredContentCoding;

View file

@ -21,6 +21,7 @@ import androidx.annotation.Nullable;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.Timeline.Window;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.MediaSource;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
@ -42,7 +43,8 @@ public class FakeMediaSourceFactoryTest {
int firstWindowIndex = timeline.getFirstWindowIndex(/* shuffleModeEnabled= */ false);
reportedMediaItem.set(timeline.getWindow(firstWindowIndex, new Window()).mediaItem);
},
/* mediaTransferListener= */ null);
/* mediaTransferListener= */ null,
PlayerId.UNSET);
assertThat(reportedMediaItem.get()).isSameInstanceAs(mediaItem);
assertThat(mediaSource.getMediaItem()).isSameInstanceAs(mediaItem);