Merge pull request #367 from google/dev

dev -> dev-webm-vp9-opus
This commit is contained in:
ojw28 2015-03-25 12:12:44 +00:00
commit 7f44d8f9b9
65 changed files with 3808 additions and 780 deletions

View file

@ -48,6 +48,7 @@ public class DemoUtil {
public static final int TYPE_SS = 1;
public static final int TYPE_OTHER = 2;
public static final int TYPE_HLS = 3;
public static final int TYPE_MP4 = 4;
private static final CookieManager defaultCookieManager;

View file

@ -24,6 +24,7 @@ import com.google.android.exoplayer.demo.player.DefaultRendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.HlsRendererBuilder;
import com.google.android.exoplayer.demo.player.Mp4RendererBuilder;
import com.google.android.exoplayer.demo.player.SmoothStreamingRendererBuilder;
import com.google.android.exoplayer.demo.player.UnsupportedDrmException;
import com.google.android.exoplayer.metadata.GeobMetadata;
@ -215,6 +216,8 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
new WidevineTestMediaDrmCallback(contentId), debugTextView, audioCapabilities);
case DemoUtil.TYPE_HLS:
return new HlsRendererBuilder(userAgent, contentUri.toString());
case DemoUtil.TYPE_MP4:
return new Mp4RendererBuilder(contentUri, debugTextView);
default:
return new DefaultRendererBuilder(this, contentUri, debugTextView);
}

View file

@ -135,6 +135,12 @@ import java.util.Locale;
new Sample("Apple AAC 10s", "https://devimages.apple.com.edgekey.net/"
+ "streaming/examples/bipbop_4x3/gear0/fileSequence0.aac",
DemoUtil.TYPE_OTHER),
new Sample("Big Buck Bunny (MP4)",
"http://redirector.c.youtube.com/videoplayback?id=604ed5ce52eda7ee&itag=22&source=youtube"
+ "&sparams=ip,ipbits,expire&ip=0.0.0.0&ipbits=0&expire=19000000000&signature="
+ "2E853B992F6CAB9D28CA3BEBD84A6F26709A8A55.94344B0D8BA83A7417AAD24DACC8C71A9A878ECE"
+ "&key=ik0",
DemoUtil.TYPE_MP4),
};
private Samples() {}

View file

@ -0,0 +1,69 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.source.DefaultSampleSource;
import com.google.android.exoplayer.source.Mp4SampleExtractor;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.UriDataSource;
import android.media.MediaCodec;
import android.net.Uri;
import android.widget.TextView;
/**
* A {@link RendererBuilder} for streams that can be read using {@link Mp4SampleExtractor}.
*/
public class Mp4RendererBuilder implements RendererBuilder {
private final Uri uri;
private final TextView debugTextView;
public Mp4RendererBuilder(Uri uri, TextView debugTextView) {
this.uri = uri;
this.debugTextView = debugTextView;
}
@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
// Build the video and audio renderers.
DefaultSampleSource sampleSource = new DefaultSampleSource(
new Mp4SampleExtractor(new UriDataSource("exoplayer", null), new DataSpec(uri)), 2);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player);
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer)
: null;
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(null, null, renderers);
}
}

View file

@ -281,7 +281,7 @@ public final class Ac3PassthroughAudioTrackRenderer extends TrackRenderer {
protected void onDisabled() {
audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
shouldReadInputBuffer = true;
audioTrack.reset();
audioTrack.release();
}
@Override

View file

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer;
import android.media.MediaCodec;
import android.media.MediaExtractor;
/**
@ -43,11 +44,23 @@ public final class C {
public static final String UTF8_NAME = "UTF-8";
/**
* Sample flag that indicates the sample is a synchronization sample.
* @see MediaExtractor#SAMPLE_FLAG_SYNC
*/
@SuppressWarnings("InlinedApi")
public static final int SAMPLE_FLAG_SYNC = MediaExtractor.SAMPLE_FLAG_SYNC;
/**
* @see MediaExtractor#SAMPLE_FLAG_ENCRYPTED
*/
@SuppressWarnings("InlinedApi")
public static final int SAMPLE_FLAG_ENCRYPTED = MediaExtractor.SAMPLE_FLAG_ENCRYPTED;
/**
* @see MediaCodec#CRYPTO_MODE_AES_CTR
*/
@SuppressWarnings("InlinedApi")
public static final int CRYPTO_MODE_AES_CTR = MediaCodec.CRYPTO_MODE_AES_CTR;
private C() {}
}

View file

@ -202,7 +202,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
protected void onDisabled() {
audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
try {
audioTrack.reset();
audioTrack.release();
} finally {
super.onDisabled();
}

View file

@ -16,6 +16,7 @@
package com.google.android.exoplayer;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
@ -25,7 +26,6 @@ import android.media.MediaCodec;
import android.media.MediaCodec.CodecException;
import android.media.MediaCodec.CryptoException;
import android.media.MediaCrypto;
import android.media.MediaExtractor;
import android.os.Handler;
import android.os.SystemClock;
@ -33,8 +33,6 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* An abstract {@link TrackRenderer} that uses {@link MediaCodec} to decode samples for rendering.
@ -164,7 +162,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
protected final Handler eventHandler;
private MediaFormat format;
private Map<UUID, byte[]> drmInitData;
private DrmInitData drmInitData;
private MediaCodec codec;
private boolean codecIsAdaptive;
private ByteBuffer[] inputBuffers;
@ -281,7 +279,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
throw new ExoPlaybackException("Media requires a DrmSessionManager");
}
if (!openedDrmSession) {
drmSessionManager.open(drmInitData, mimeType);
drmSessionManager.open(drmInitData);
openedDrmSession = true;
}
int drmSessionState = drmSessionManager.getState();
@ -584,7 +582,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
waitingForFirstSyncFrame = false;
}
boolean sampleEncrypted = (sampleHolder.flags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0;
boolean sampleEncrypted = (sampleHolder.flags & C.SAMPLE_FLAG_ENCRYPTED) != 0;
waitingForKeys = shouldWaitForKeys(sampleEncrypted);
if (waitingForKeys) {
return false;

View file

@ -40,6 +40,8 @@ public class MediaFormat {
public final String mimeType;
public final int maxInputSize;
public final long durationUs;
public final int width;
public final int height;
public final float pixelWidthHeightRatio;
@ -49,11 +51,11 @@ public class MediaFormat {
public final int bitrate;
public final List<byte[]> initializationData;
private int maxWidth;
private int maxHeight;
public final List<byte[]> initializationData;
// Lazy-initialized hashcode.
private int hashCode;
// Possibly-lazy-initialized framework media format.
@ -66,25 +68,38 @@ public class MediaFormat {
public static MediaFormat createVideoFormat(String mimeType, int maxInputSize, int width,
int height, List<byte[]> initializationData) {
return createVideoFormat(mimeType, maxInputSize, width, height, 1, initializationData);
return createVideoFormat(
mimeType, maxInputSize, C.UNKNOWN_TIME_US, width, height, initializationData);
}
public static MediaFormat createVideoFormat(String mimeType, int maxInputSize, int width,
int height, float pixelWidthHeightRatio, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, width, height, pixelWidthHeightRatio, NO_VALUE,
NO_VALUE, NO_VALUE, initializationData);
public static MediaFormat createVideoFormat(String mimeType, int maxInputSize, long durationUs,
int width, int height, List<byte[]> initializationData) {
return createVideoFormat(
mimeType, maxInputSize, durationUs, width, height, 1, initializationData);
}
public static MediaFormat createVideoFormat(String mimeType, int maxInputSize, long durationUs,
int width, int height, float pixelWidthHeightRatio, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, pixelWidthHeightRatio,
NO_VALUE, NO_VALUE, NO_VALUE, initializationData);
}
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, int channelCount,
int sampleRate, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, NO_VALUE, NO_VALUE, NO_VALUE, channelCount,
sampleRate, NO_VALUE, initializationData);
return createAudioFormat(
mimeType, maxInputSize, C.UNKNOWN_TIME_US, channelCount, sampleRate, initializationData);
}
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, int channelCount,
int sampleRate, int bitrate, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, NO_VALUE, NO_VALUE, NO_VALUE, channelCount,
sampleRate, bitrate, initializationData);
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, long durationUs,
int channelCount, int sampleRate, List<byte[]> initializationData) {
return createAudioFormat(
mimeType, maxInputSize, durationUs, channelCount, sampleRate, NO_VALUE, initializationData);
}
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, long durationUs,
int channelCount, int sampleRate, int bitrate, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, durationUs, NO_VALUE, NO_VALUE, NO_VALUE,
channelCount, sampleRate, bitrate, initializationData);
}
public static MediaFormat createId3Format() {
@ -100,8 +115,8 @@ public class MediaFormat {
}
public static MediaFormat createFormatForMimeType(String mimeType) {
return new MediaFormat(mimeType, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, null);
return new MediaFormat(mimeType, NO_VALUE, C.UNKNOWN_TIME_US, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, NO_VALUE, NO_VALUE, null);
}
@TargetApi(16)
@ -123,15 +138,18 @@ public class MediaFormat {
initializationData.add(data);
buffer.flip();
}
durationUs = format.containsKey(android.media.MediaFormat.KEY_DURATION)
? format.getLong(android.media.MediaFormat.KEY_DURATION) : C.UNKNOWN_TIME_US;
maxWidth = NO_VALUE;
maxHeight = NO_VALUE;
}
private MediaFormat(String mimeType, int maxInputSize, int width, int height,
private MediaFormat(String mimeType, int maxInputSize, long durationUs, int width, int height,
float pixelWidthHeightRatio, int channelCount, int sampleRate, int bitrate,
List<byte[]> initializationData) {
this.mimeType = mimeType;
this.maxInputSize = maxInputSize;
this.durationUs = durationUs;
this.width = width;
this.height = height;
this.pixelWidthHeightRatio = pixelWidthHeightRatio;
@ -169,6 +187,7 @@ public class MediaFormat {
result = 31 * result + width;
result = 31 * result + height;
result = 31 * result + Float.floatToRawIntBits(pixelWidthHeightRatio);
result = 31 * result + (int) durationUs;
result = 31 * result + maxWidth;
result = 31 * result + maxHeight;
result = 31 * result + channelCount;
@ -225,7 +244,7 @@ public class MediaFormat {
public String toString() {
return "MediaFormat(" + mimeType + ", " + maxInputSize + ", " + width + ", " + height + ", "
+ pixelWidthHeightRatio + ", " + channelCount + ", " + sampleRate + ", " + bitrate + ", "
+ maxWidth + ", " + maxHeight + ")";
+ durationUs + ", " + maxWidth + ", " + maxHeight + ")";
}
/**
@ -246,6 +265,9 @@ public class MediaFormat {
for (int i = 0; i < initializationData.size(); i++) {
format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
}
if (durationUs != C.UNKNOWN_TIME_US) {
format.setLong(android.media.MediaFormat.KEY_DURATION, durationUs);
}
maybeSetMaxDimensionsV16(format);
frameworkMediaFormat = format;
}

View file

@ -15,8 +15,7 @@
*/
package com.google.android.exoplayer;
import java.util.Map;
import java.util.UUID;
import com.google.android.exoplayer.drm.DrmInitData;
/**
* Holds a {@link MediaFormat} and corresponding drm scheme initialization data.
@ -28,9 +27,8 @@ public final class MediaFormatHolder {
*/
public MediaFormat format;
/**
* Initialization data for each of the drm schemes supported by the media, keyed by scheme UUID.
* Null if the media is not encrypted.
* Initialization data for drm schemes supported by the media. Null if the media is not encrypted.
*/
public Map<UUID, byte[]> drmInitData;
public DrmInitData drmInitData;
}

View file

@ -50,9 +50,8 @@ public final class SampleHolder {
public int size;
/**
* Flags that accompany the sample. A combination of
* {@link android.media.MediaExtractor#SAMPLE_FLAG_SYNC} and
* {@link android.media.MediaExtractor#SAMPLE_FLAG_ENCRYPTED}
* Flags that accompany the sample. A combination of {@link C#SAMPLE_FLAG_SYNC} and
* {@link C#SAMPLE_FLAG_ENCRYPTED}
*/
public int flags;

View file

@ -44,6 +44,8 @@ import java.nio.ByteBuffer;
* <p>Call {@link #reconfigure} when the output format changes.
*
* <p>Call {@link #reset} to free resources. It is safe to re-{@link #initialize} the instance.
*
* <p>Call {@link #release} when the instance will no longer be used.
*/
@TargetApi(16)
public final class AudioTrack {
@ -91,6 +93,12 @@ public final class AudioTrack {
/** Returned by {@link #getCurrentPositionUs} when the position is not set. */
public static final long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE;
/**
* Set to {@code true} to enable a workaround for an issue where an audio effect does not keep its
* session active across releasing/initializing a new audio track, on platform API version < 21.
*/
private static final boolean ENABLE_PRE_V21_AUDIO_SESSION_WORKAROUND = false;
/** A minimum length for the {@link android.media.AudioTrack} buffer, in microseconds. */
private static final long MIN_BUFFER_DURATION_US = 250000;
/** A maximum length for the {@link android.media.AudioTrack} buffer, in microseconds. */
@ -132,6 +140,9 @@ public final class AudioTrack {
private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets;
/** Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). */
private android.media.AudioTrack keepSessionIdAudioTrack;
private android.media.AudioTrack audioTrack;
private AudioTrackUtil audioTrackUtil;
private int sampleRate;
@ -267,15 +278,37 @@ public final class AudioTrack {
audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM, sessionId);
}
checkAudioTrackInitialized();
sessionId = audioTrack.getAudioSessionId();
if (ENABLE_PRE_V21_AUDIO_SESSION_WORKAROUND) {
if (Util.SDK_INT < 21) {
// The workaround creates an audio track with a one byte buffer on the same session, and
// does not release it until this object is released, which keeps the session active.
if (keepSessionIdAudioTrack != null
&& sessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
releaseKeepSessionIdAudioTrack();
}
if (keepSessionIdAudioTrack == null) {
int sampleRate = 4000; // Equal to private android.media.AudioTrack.MIN_SAMPLE_RATE.
int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
int encoding = AudioFormat.ENCODING_PCM_8BIT;
int bufferSize = 1; // Use a one byte buffer, as it is not actually used for playback.
keepSessionIdAudioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC,
sampleRate, channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STATIC,
sessionId);
}
}
}
if (Util.SDK_INT >= 19) {
audioTrackUtil = new AudioTrackUtilV19(audioTrack);
} else {
audioTrackUtil = new AudioTrackUtil(audioTrack);
}
setVolume(volume);
return audioTrack.getAudioSessionId();
return sessionId;
}
/**
@ -515,9 +548,9 @@ public final class AudioTrack {
}
/**
* Releases resources associated with this instance asynchronously. Calling {@link #initialize}
* will block until the audio track has been released, so it is safe to initialize immediately
* after resetting.
* Releases the underlying audio track asynchronously. Calling {@link #initialize} will block
* until the audio track has been released, so it is safe to initialize immediately after
* resetting. The audio session may remain active until the instance is {@link #release}d.
*/
public void reset() {
if (isInitialized()) {
@ -547,6 +580,29 @@ public final class AudioTrack {
}
}
/** Releases all resources associated with this instance. */
public void release() {
reset();
releaseKeepSessionIdAudioTrack();
}
/** Releases {@link #keepSessionIdAudioTrack} asynchronously, if it is non-{@code null}. */
private void releaseKeepSessionIdAudioTrack() {
if (keepSessionIdAudioTrack == null) {
return;
}
// AudioTrack.release can take some time, so we call it on a background thread.
final android.media.AudioTrack toRelease = keepSessionIdAudioTrack;
keepSessionIdAudioTrack = null;
new Thread() {
@Override
public void run() {
toRelease.release();
}
}.start();
}
/** Returns whether {@link #getCurrentPositionUs} can return the current playback position. */
private boolean hasCurrentPositionUs() {
return isInitialized() && startMediaTimeUs != START_NOT_SET;

View file

@ -352,7 +352,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormat, true)) {
chunkSource.getMaxVideoDimensions(mediaFormat);
formatHolder.format = mediaFormat;
formatHolder.drmInitData = mediaChunk.getPsshInfo();
formatHolder.drmInitData = mediaChunk.getDrmInitData();
downstreamMediaFormat = mediaFormat;
return FORMAT_READ;
}

View file

@ -19,14 +19,12 @@ import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.chunk.parser.Extractor;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.Assertions;
import java.util.Map;
import java.util.UUID;
/**
* A {@link MediaChunk} extracted from a container.
*/
@ -38,7 +36,7 @@ public final class ContainerMediaChunk extends MediaChunk {
private boolean prepared;
private MediaFormat mediaFormat;
private Map<UUID, byte[]> psshInfo;
private DrmInitData drmInitData;
/**
* @deprecated Use the other constructor, passing null as {@code psshInfo}.
@ -60,8 +58,9 @@ public final class ContainerMediaChunk extends MediaChunk {
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
* @param extractor The extractor that will be used to extract the samples.
* @param psshInfo Pssh data. May be null if pssh data is present within the stream, meaning it
* can be obtained directly from {@code extractor}, or if no pssh data is required.
* @param drmInitData DRM initialization data. May be null if DRM initialization data is present
* within the stream, meaning it can be obtained directly from {@code extractor}, or if no
* DRM initialization data is required.
* @param maybeSelfContained Set to true if this chunk might be self contained, meaning it might
* contain a moov atom defining the media format of the chunk. This parameter can always be
* safely set to true. Setting to false where the chunk is known to not be self contained may
@ -70,12 +69,12 @@ public final class ContainerMediaChunk extends MediaChunk {
*/
public ContainerMediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex, Extractor extractor,
Map<UUID, byte[]> psshInfo, boolean maybeSelfContained, long sampleOffsetUs) {
DrmInitData drmInitData, boolean maybeSelfContained, long sampleOffsetUs) {
super(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex);
this.extractor = extractor;
this.maybeSelfContained = maybeSelfContained;
this.sampleOffsetUs = sampleOffsetUs;
this.psshInfo = psshInfo;
this.drmInitData = drmInitData;
}
@Override
@ -111,9 +110,9 @@ public final class ContainerMediaChunk extends MediaChunk {
}
if (prepared) {
mediaFormat = extractor.getFormat();
Map<UUID, byte[]> extractorPsshInfo = extractor.getPsshInfo();
if (extractorPsshInfo != null) {
psshInfo = extractorPsshInfo;
DrmInitData extractorDrmInitData = extractor.getDrmInitData();
if (extractorDrmInitData != null) {
drmInitData = extractorDrmInitData;
}
}
}
@ -145,8 +144,8 @@ public final class ContainerMediaChunk extends MediaChunk {
}
@Override
public Map<UUID, byte[]> getPsshInfo() {
return psshInfo;
public DrmInitData getDrmInitData() {
return drmInitData;
}
}

View file

@ -18,12 +18,10 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import java.util.Map;
import java.util.UUID;
/**
* An abstract base class for {@link Chunk}s that contain media samples.
*/
@ -129,12 +127,12 @@ public abstract class MediaChunk extends Chunk {
public abstract MediaFormat getMediaFormat();
/**
* Returns the pssh information associated with the chunk.
* Returns the DRM initialization data associated with the chunk.
* <p>
* Should only be called after the chunk has been successfully prepared.
*
* @return The pssh information.
* @return The DRM initialization data.
*/
public abstract Map<UUID, byte[]> getPsshInfo();
public abstract DrmInitData getDrmInitData();
}

View file

@ -17,14 +17,12 @@ package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.Assertions;
import java.util.Map;
import java.util.UUID;
/**
* A {@link MediaChunk} containing a single sample.
*/
@ -132,7 +130,7 @@ public class SingleSampleMediaChunk extends MediaChunk {
}
@Override
public Map<UUID, byte[]> getPsshInfo() {
public DrmInitData getDrmInitData() {
return null;
}

View file

@ -15,15 +15,12 @@
*/
package com.google.android.exoplayer.chunk.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import java.util.Map;
import java.util.UUID;
/**
* Facilitates extraction of media samples from a container format.
*/
@ -43,7 +40,7 @@ public interface Extractor {
public static final int RESULT_READ_SAMPLE = 4;
/**
* Initialization data was read. The parsed data can be read using {@link #getFormat()} and
* {@link #getPsshInfo}.
* {@link #getDrmInitData()}.
*/
public static final int RESULT_READ_INIT = 8;
/**
@ -80,17 +77,12 @@ public interface Extractor {
public MediaFormat getFormat();
/**
* Returns the duration of the stream in microseconds, or {@link C#UNKNOWN_TIME_US} if unknown.
*/
public long getDurationUs();
/**
* Returns the pssh information parsed from the stream.
* Returns DRM initialization data parsed from the stream.
*
* @return The pssh information. May be null if pssh data has yet to be parsed, or if the stream
* does not contain any pssh data.
* @return The DRM initialization data. May be null if the initialization data has yet to be
* parsed, or if the stream does not contain any DRM initialization data.
*/
public Map<UUID, byte[]> getPsshInfo();
public DrmInitData getDrmInitData();
/**
* Consumes data from a {@link NonBlockingInputStream}.

View file

@ -21,6 +21,7 @@ import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.chunk.parser.Extractor;
import com.google.android.exoplayer.chunk.parser.SegmentIndex;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.mp4.Atom;
import com.google.android.exoplayer.mp4.Atom.ContainerAtom;
import com.google.android.exoplayer.mp4.Atom.LeafAtom;
@ -28,20 +29,15 @@ import com.google.android.exoplayer.mp4.CommonMp4AtomParsers;
import com.google.android.exoplayer.mp4.Mp4Util;
import com.google.android.exoplayer.mp4.Track;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.ParsableByteArray;
import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.UUID;
@ -145,7 +141,7 @@ public final class FragmentedMp4Extractor implements Extractor {
private int lastSyncSampleIndex;
// Data parsed from moov and sidx atoms
private final HashMap<UUID, byte[]> psshData;
private DrmInitData.Mapped drmInitData;
private SegmentIndex segmentIndex;
private Track track;
private DefaultSampleValues extendsDefaults;
@ -165,7 +161,6 @@ public final class FragmentedMp4Extractor implements Extractor {
extendedTypeScratch = new byte[16];
containerAtoms = new Stack<ContainerAtom>();
fragmentRun = new TrackFragment();
psshData = new HashMap<UUID, byte[]>();
}
/**
@ -179,8 +174,8 @@ public final class FragmentedMp4Extractor implements Extractor {
}
@Override
public Map<UUID, byte[]> getPsshInfo() {
return psshData.isEmpty() ? null : psshData;
public DrmInitData getDrmInitData() {
return drmInitData;
}
@Override
@ -198,11 +193,6 @@ public final class FragmentedMp4Extractor implements Extractor {
return track == null ? null : track.mediaFormat;
}
@Override
public long getDurationUs() {
return track == null ? C.UNKNOWN_TIME_US : track.durationUs;
}
@Override
public int read(NonBlockingInputStream inputStream, SampleHolder out)
throws ParserException {
@ -375,7 +365,10 @@ public final class FragmentedMp4Extractor implements Extractor {
int dataSize = psshAtom.readInt();
byte[] data = new byte[dataSize];
psshAtom.readBytes(data, 0, dataSize);
psshData.put(uuid, data);
if (drmInitData == null) {
drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
}
drmInitData.put(uuid, data);
}
}
ContainerAtom mvex = moov.getContainerAtomOfType(Atom.TYPE_mvex);
@ -798,12 +791,14 @@ public final class FragmentedMp4Extractor implements Extractor {
return RESULT_READ_SAMPLE;
}
@SuppressLint("InlinedApi")
private void readSampleEncryptionData(ParsableByteArray sampleEncryptionData, SampleHolder out) {
TrackEncryptionBox encryptionBox =
track.sampleDescriptionEncryptionBoxes[fragmentRun.sampleDescriptionIndex];
if (!encryptionBox.isEncrypted) {
return;
}
byte[] keyId = encryptionBox.keyId;
boolean isEncrypted = encryptionBox.isEncrypted;
int vectorSize = encryptionBox.initializationVectorSize;
boolean subsampleEncryption = fragmentRun.sampleHasSubsampleEncryptionTable[sampleIndex];
@ -831,11 +826,10 @@ public final class FragmentedMp4Extractor implements Extractor {
clearDataSizes[0] = 0;
encryptedDataSizes[0] = fragmentRun.sampleSizeTable[sampleIndex];
}
out.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes, keyId, vector,
isEncrypted ? MediaCodec.CRYPTO_MODE_AES_CTR : MediaCodec.CRYPTO_MODE_UNENCRYPTED);
if (isEncrypted) {
out.flags |= MediaExtractor.SAMPLE_FLAG_ENCRYPTED;
}
C.CRYPTO_MODE_AES_CTR);
out.flags |= C.SAMPLE_FLAG_ENCRYPTED;
}
}

View file

@ -21,6 +21,7 @@ import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.chunk.parser.Extractor;
import com.google.android.exoplayer.chunk.parser.SegmentIndex;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.LongArray;
import com.google.android.exoplayer.util.MimeTypes;
@ -28,8 +29,6 @@ import com.google.android.exoplayer.util.MimeTypes;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
/**
@ -38,6 +37,8 @@ import java.util.concurrent.TimeUnit;
* <p>WebM is a subset of the EBML elements defined for Matroska. More information about EBML and
* Matroska is available <a href="http://www.matroska.org/technical/specs/index.html">here</a>.
* More info about WebM is <a href="http://www.webmproject.org/code/specs/container/">here</a>.
* RFC on encrypted WebM can be found
* <a href="http://wiki.webmproject.org/encryption/webm-encryption-rfc">here</a>.
*/
public final class WebmExtractor implements Extractor {
@ -47,6 +48,7 @@ public final class WebmExtractor implements Extractor {
private static final String CODEC_ID_OPUS = "A_OPUS";
private static final int VORBIS_MAX_INPUT_SIZE = 8192;
private static final int OPUS_MAX_INPUT_SIZE = 5760;
private static final int BLOCK_COUNTER_SIZE = 16;
private static final int UNKNOWN = -1;
// Element IDs
@ -80,23 +82,31 @@ public final class WebmExtractor implements Extractor {
private static final int ID_CHANNELS = 0x9F;
private static final int ID_SAMPLING_FREQUENCY = 0xB5;
private static final int ID_CONTENT_ENCODINGS = 0x6D80;
private static final int ID_CONTENT_ENCODING = 0x6240;
private static final int ID_CONTENT_ENCODING_ORDER = 0x5031;
private static final int ID_CONTENT_ENCODING_SCOPE = 0x5032;
private static final int ID_CONTENT_ENCODING_TYPE = 0x5033;
private static final int ID_CONTENT_ENCRYPTION = 0x5035;
private static final int ID_CONTENT_ENCRYPTION_ALGORITHM = 0x47E1;
private static final int ID_CONTENT_ENCRYPTION_KEY_ID = 0x47E2;
private static final int ID_CONTENT_ENCRYPTION_AES_SETTINGS = 0x47E7;
private static final int ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE = 0x47E8;
private static final int ID_CUES = 0x1C53BB6B;
private static final int ID_CUE_POINT = 0xBB;
private static final int ID_CUE_TIME = 0xB3;
private static final int ID_CUE_TRACK_POSITIONS = 0xB7;
private static final int ID_CUE_CLUSTER_POSITION = 0xF1;
// SimpleBlock Lacing Values
private static final int LACING_NONE = 0;
private static final int LACING_XIPH = 1;
private static final int LACING_FIXED = 2;
private static final int LACING_EBML = 3;
private static final int READ_TERMINATING_RESULTS = RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM
| RESULT_READ_SAMPLE | RESULT_NEED_SAMPLE_HOLDER;
private final EbmlReader reader;
private final byte[] simpleBlockTimecodeAndFlags = new byte[3];
private DrmInitData.Universal drmInitData;
private SampleHolder sampleHolder;
private int readResults;
@ -104,7 +114,7 @@ public final class WebmExtractor implements Extractor {
private long segmentStartOffsetBytes = UNKNOWN;
private long segmentEndOffsetBytes = UNKNOWN;
private long timecodeScale = 1000000L;
private long durationUs = UNKNOWN;
private long durationUs = C.UNKNOWN_TIME_US;
private int pixelWidth = UNKNOWN;
private int pixelHeight = UNKNOWN;
private int channelCount = UNKNOWN;
@ -113,7 +123,9 @@ public final class WebmExtractor implements Extractor {
private String codecId;
private long codecDelayNs;
private long seekPreRollNs;
private boolean seenAudioTrack;
private boolean isAudioTrack;
private boolean hasContentEncryption;
private byte[] encryptionKeyId;
private long cuesSizeBytes = UNKNOWN;
private long clusterTimecodeUs = UNKNOWN;
private long simpleBlockTimecodeUs = UNKNOWN;
@ -182,14 +194,8 @@ public final class WebmExtractor implements Extractor {
}
@Override
public long getDurationUs() {
return durationUs == UNKNOWN ? C.UNKNOWN_TIME_US : durationUs;
}
@Override
public Map<UUID, byte[]> getPsshInfo() {
// TODO: Parse pssh data from Webm streams.
return null;
public DrmInitData getDrmInitData() {
return drmInitData;
}
/* package */ int getElementType(int id) {
@ -202,6 +208,10 @@ public final class WebmExtractor implements Extractor {
case ID_TRACK_ENTRY:
case ID_AUDIO:
case ID_VIDEO:
case ID_CONTENT_ENCODINGS:
case ID_CONTENT_ENCODING:
case ID_CONTENT_ENCRYPTION:
case ID_CONTENT_ENCRYPTION_AES_SETTINGS:
case ID_CUES:
case ID_CUE_POINT:
case ID_CUE_TRACK_POSITIONS:
@ -216,12 +226,18 @@ public final class WebmExtractor implements Extractor {
case ID_CODEC_DELAY:
case ID_SEEK_PRE_ROLL:
case ID_CHANNELS:
case ID_CONTENT_ENCODING_ORDER:
case ID_CONTENT_ENCODING_SCOPE:
case ID_CONTENT_ENCODING_TYPE:
case ID_CONTENT_ENCRYPTION_ALGORITHM:
case ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE:
case ID_CUE_TIME:
case ID_CUE_CLUSTER_POSITION:
return EbmlReader.TYPE_UNSIGNED_INT;
case ID_DOC_TYPE:
case ID_CODEC_ID:
return EbmlReader.TYPE_STRING;
case ID_CONTENT_ENCRYPTION_KEY_ID:
case ID_SIMPLE_BLOCK:
case ID_BLOCK:
case ID_CODEC_PRIVATE:
@ -250,6 +266,12 @@ public final class WebmExtractor implements Extractor {
cueTimesUs = new LongArray();
cueClusterPositions = new LongArray();
break;
case ID_CONTENT_ENCODING:
// TODO: check and fail if more than one content encoding is present.
break;
case ID_CONTENT_ENCRYPTION:
hasContentEncryption = true;
break;
default:
// pass
}
@ -261,17 +283,24 @@ public final class WebmExtractor implements Extractor {
case ID_CUES:
buildCues();
return false;
case ID_VIDEO:
buildVideoFormat();
case ID_CONTENT_ENCODING:
if (!hasContentEncryption) {
// We found a ContentEncoding other than Encryption.
throw new ParserException("Found an unsupported ContentEncoding");
}
if (encryptionKeyId == null) {
throw new ParserException("Encrypted Track found but ContentEncKeyID was not found");
}
drmInitData = new DrmInitData.Universal(MimeTypes.VIDEO_WEBM, encryptionKeyId);
return true;
case ID_AUDIO:
seenAudioTrack = true;
isAudioTrack = true;
return true;
case ID_TRACK_ENTRY:
if (seenAudioTrack) {
// Audio format has to be built here since codec private may not be available at the end
// of ID_AUDIO.
if (isAudioTrack) {
buildAudioFormat();
} else {
buildVideoFormat();
}
return true;
default:
@ -311,6 +340,37 @@ public final class WebmExtractor implements Extractor {
case ID_CHANNELS:
channelCount = (int) value;
break;
case ID_CONTENT_ENCODING_ORDER:
// This extractor only supports one ContentEncoding element and hence the order has to be 0.
if (value != 0) {
throw new ParserException("ContentEncodingOrder " + value + " not supported");
}
break;
case ID_CONTENT_ENCODING_SCOPE:
// This extractor only supports the scope of all frames (since that's the only scope used
// for Encryption).
if (value != 1) {
throw new ParserException("ContentEncodingScope " + value + " not supported");
}
break;
case ID_CONTENT_ENCODING_TYPE:
// This extractor only supports Encrypted ContentEncodingType.
if (value != 1) {
throw new ParserException("ContentEncodingType " + value + " not supported");
}
break;
case ID_CONTENT_ENCRYPTION_ALGORITHM:
// Only the value 5 (AES) is allowed according to the WebM specification.
if (value != 5) {
throw new ParserException("ContentEncAlgo " + value + " not supported");
}
break;
case ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE:
// Only the value 1 is allowed according to the WebM specification.
if (value != 1) {
throw new ParserException("AESSettingsCipherMode " + value + " not supported");
}
break;
case ID_CUE_TIME:
cueTimesUs.add(scaleTimecodeToUs(value));
break;
@ -402,22 +462,49 @@ public final class WebmExtractor implements Extractor {
}
boolean invisible = (simpleBlockTimecodeAndFlags[2] & 0x08) == 0x08;
int lacing = (simpleBlockTimecodeAndFlags[2] & 0x06) >> 1;
if (lacing != LACING_NONE) {
throw new ParserException("Lacing mode " + lacing + " not supported");
}
long elementEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
simpleBlockTimecodeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.flags = keyframe ? C.SAMPLE_FLAG_SYNC : 0;
sampleHolder.decodeOnly = invisible;
sampleHolder.timeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.size = (int) (elementEndOffsetBytes - reader.getBytesRead());
// Validate lacing and set info into sample holder.
switch (lacing) {
case LACING_NONE:
long elementEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
simpleBlockTimecodeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.flags = keyframe ? C.SAMPLE_FLAG_SYNC : 0;
sampleHolder.decodeOnly = invisible;
sampleHolder.timeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.size = (int) (elementEndOffsetBytes - reader.getBytesRead());
break;
case LACING_EBML:
case LACING_FIXED:
case LACING_XIPH:
default:
throw new ParserException("Lacing mode " + lacing + " not supported");
if (hasContentEncryption) {
byte[] signalByte = new byte[1];
reader.readBytes(inputStream, signalByte, 1);
sampleHolder.size -= 1;
// First bit of the signalByte (extension bit) must be 0.
if ((signalByte[0] & 0x80) != 0) {
throw new ParserException("Extension bit is set in signal byte");
}
boolean isEncrypted = (signalByte[0] & 0x01) == 0x01;
if (isEncrypted) {
byte[] iv = null;
iv = sampleHolder.cryptoInfo.iv;
if (iv == null || iv.length != BLOCK_COUNTER_SIZE) {
iv = new byte[BLOCK_COUNTER_SIZE];
}
reader.readBytes(inputStream, iv, 8); // The container has only 8 bytes of IV.
sampleHolder.size -= 8;
int[] clearDataSizes = sampleHolder.cryptoInfo.numBytesOfClearData;
if (clearDataSizes == null || clearDataSizes.length < 1) {
clearDataSizes = new int[1];
}
int[] encryptedDataSizes = sampleHolder.cryptoInfo.numBytesOfEncryptedData;
if (encryptedDataSizes == null || encryptedDataSizes.length < 1) {
encryptedDataSizes = new int[1];
}
clearDataSizes[0] = 0;
encryptedDataSizes[0] = sampleHolder.size;
sampleHolder.cryptoInfo.set(1, clearDataSizes, encryptedDataSizes,
encryptionKeyId, iv, C.CRYPTO_MODE_AES_CTR);
sampleHolder.flags |= C.SAMPLE_FLAG_ENCRYPTED;
}
}
if (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size) {
@ -437,6 +524,10 @@ public final class WebmExtractor implements Extractor {
codecPrivate = new byte[contentsSizeBytes];
reader.readBytes(inputStream, codecPrivate, contentsSizeBytes);
break;
case ID_CONTENT_ENCRYPTION_KEY_ID:
encryptionKeyId = new byte[contentsSizeBytes];
reader.readBytes(inputStream, encryptionKeyId, contentsSizeBytes);
break;
default:
// pass
}
@ -463,8 +554,8 @@ public final class WebmExtractor implements Extractor {
private void buildVideoFormat() throws ParserException {
if (pixelWidth != UNKNOWN && pixelHeight != UNKNOWN
&& (format == null || format.width != pixelWidth || format.height != pixelHeight)) {
format = MediaFormat.createVideoFormat(
MimeTypes.VIDEO_VP9, MediaFormat.NO_VALUE, pixelWidth, pixelHeight, null);
format = MediaFormat.createVideoFormat(MimeTypes.VIDEO_VP9, MediaFormat.NO_VALUE, durationUs,
pixelWidth, pixelHeight, null);
readResults |= RESULT_READ_INIT;
} else if (format == null) {
throw new ParserException("Unable to build format");
@ -485,17 +576,15 @@ public final class WebmExtractor implements Extractor {
&& (format == null || format.channelCount != channelCount
|| format.sampleRate != sampleRate)) {
if (CODEC_ID_VORBIS.equals(codecId)) {
format = MediaFormat.createAudioFormat(
MimeTypes.AUDIO_VORBIS, VORBIS_MAX_INPUT_SIZE,
channelCount, sampleRate, parseVorbisCodecPrivate());
format = MediaFormat.createAudioFormat(MimeTypes.AUDIO_VORBIS, VORBIS_MAX_INPUT_SIZE,
durationUs, channelCount, sampleRate, parseVorbisCodecPrivate());
} else if (CODEC_ID_OPUS.equals(codecId)) {
ArrayList<byte[]> opusInitializationData = new ArrayList<byte[]>(3);
opusInitializationData.add(codecPrivate);
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(codecDelayNs).array());
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(seekPreRollNs).array());
format = MediaFormat.createAudioFormat(
MimeTypes.AUDIO_OPUS, OPUS_MAX_INPUT_SIZE, channelCount, sampleRate,
opusInitializationData);
format = MediaFormat.createAudioFormat(MimeTypes.AUDIO_OPUS, OPUS_MAX_INPUT_SIZE,
durationUs, channelCount, sampleRate, opusInitializationData);
}
readResults |= RESULT_READ_INIT;
} else if (format == null) {
@ -512,7 +601,7 @@ public final class WebmExtractor implements Extractor {
private void buildCues() throws ParserException {
if (segmentStartOffsetBytes == UNKNOWN) {
throw new ParserException("Segment start/end offsets unknown");
} else if (durationUs == UNKNOWN) {
} else if (durationUs == C.UNKNOWN_TIME_US) {
throw new ParserException("Duration unknown");
} else if (cuesSizeBytes == UNKNOWN) {
throw new ParserException("Cues size unknown");

View file

@ -39,6 +39,7 @@ import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.dash.mpd.RangedUri;
import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.text.webvtt.WebvttParser;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
@ -54,8 +55,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* An {@link ChunkSource} for DASH streams.
@ -96,7 +95,7 @@ public class DashChunkSource implements ChunkSource {
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final int adaptationSetIndex;
private final int[] representationIndices;
private final Map<UUID, byte[]> psshInfo;
private final DrmInitData drmInitData;
private MediaPresentationDescription currentManifest;
private boolean finishedCurrentManifest;
@ -190,7 +189,7 @@ public class DashChunkSource implements ChunkSource {
this.evaluation = new Evaluation();
this.headerBuilder = new StringBuilder();
psshInfo = getPsshInfo(currentManifest, adaptationSetIndex);
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
Representation[] representations = getFilteredRepresentations(currentManifest,
adaptationSetIndex, representationIndices);
long periodDurationUs = (representations[0].periodDurationMs == TrackRenderer.UNKNOWN_TIME_US)
@ -407,7 +406,7 @@ public class DashChunkSource implements ChunkSource {
// Do nothing.
}
private boolean mimeTypeIsWebm(String mimeType) {
private static boolean mimeTypeIsWebm(String mimeType) {
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
}
@ -475,8 +474,8 @@ public class DashChunkSource implements ChunkSource {
startTimeUs, endTimeUs, nextAbsoluteSegmentNum, null, representationHolder.vttHeader);
} else {
return new ContainerMediaChunk(dataSource, dataSpec, representation.format, trigger,
startTimeUs, endTimeUs, nextAbsoluteSegmentNum, representationHolder.extractor, psshInfo,
false, presentationTimeOffsetUs);
startTimeUs, endTimeUs, nextAbsoluteSegmentNum, representationHolder.extractor,
drmInitData, false, presentationTimeOffsetUs);
}
}
@ -529,19 +528,24 @@ public class DashChunkSource implements ChunkSource {
}
}
private static Map<UUID, byte[]> getPsshInfo(MediaPresentationDescription manifest,
private static DrmInitData getDrmInitData(MediaPresentationDescription manifest,
int adaptationSetIndex) {
AdaptationSet adaptationSet = manifest.periods.get(0).adaptationSets.get(adaptationSetIndex);
String drmInitMimeType = mimeTypeIsWebm(adaptationSet.representations.get(0).format.mimeType)
? MimeTypes.VIDEO_WEBM : MimeTypes.VIDEO_MP4;
if (adaptationSet.contentProtections.isEmpty()) {
return null;
} else {
Map<UUID, byte[]> psshInfo = new HashMap<UUID, byte[]>();
DrmInitData.Mapped drmInitData = null;
for (ContentProtection contentProtection : adaptationSet.contentProtections) {
if (contentProtection.uuid != null && contentProtection.data != null) {
psshInfo.put(contentProtection.uuid, contentProtection.data);
if (drmInitData == null) {
drmInitData = new DrmInitData.Mapped(drmInitMimeType);
}
drmInitData.put(contentProtection.uuid, contentProtection.data);
}
}
return psshInfo.isEmpty() ? null : psshInfo;
return drmInitData;
}
}

View file

@ -0,0 +1,103 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.drm;
import android.media.MediaDrm;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* Encapsulates initialization data required by a {@link MediaDrm} instance.
*/
public abstract class DrmInitData {
/**
* The container mime type.
*/
public final String mimeType;
public DrmInitData(String mimeType) {
this.mimeType = mimeType;
}
/**
* Retrieves initialization data for a given DRM scheme, specified by its UUID.
*
* @param schemeUuid The DRM scheme's UUID.
* @return The initialization data for the scheme, or null if the scheme is not supported.
*/
public abstract byte[] get(UUID schemeUuid);
/**
* A {@link DrmInitData} implementation that maps UUID onto scheme specific data.
*/
public static final class Mapped extends DrmInitData {
private final Map<UUID, byte[]> schemeData;
public Mapped(String mimeType) {
super(mimeType);
schemeData = new HashMap<UUID, byte[]>();
}
@Override
public byte[] get(UUID schemeUuid) {
return schemeData.get(schemeUuid);
}
/**
* Inserts scheme specific initialization data.
*
* @param schemeUuid The scheme UUID.
* @param data The corresponding initialization data.
*/
public void put(UUID schemeUuid, byte[] data) {
schemeData.put(schemeUuid, data);
}
/**
* Inserts scheme specific initialization data.
*
* @param data A mapping from scheme UUID to initialization data.
*/
public void putAll(Map<UUID, byte[]> data) {
schemeData.putAll(data);
}
}
/**
* A {@link DrmInitData} implementation that returns the same initialization data for all schemes.
*/
public static final class Universal extends DrmInitData {
private byte[] data;
public Universal(String mimeType, byte[] data) {
super(mimeType);
this.data = data;
}
@Override
public byte[] get(UUID schemeUuid) {
return data;
}
}
}

View file

@ -18,9 +18,6 @@ package com.google.android.exoplayer.drm;
import android.annotation.TargetApi;
import android.media.MediaCrypto;
import java.util.Map;
import java.util.UUID;
/**
* Manages a DRM session.
*/
@ -36,7 +33,7 @@ public interface DrmSessionManager {
*/
public static final int STATE_CLOSED = 1;
/**
* The session is being opened (i.e. {@link #open(Map, String)} has been called, but the session
* The session is being opened (i.e. {@link #open(DrmInitData)} has been called, but the session
* is not yet open).
*/
public static final int STATE_OPENING = 2;
@ -52,11 +49,9 @@ public interface DrmSessionManager {
/**
* Opens the session, possibly asynchronously.
*
* @param drmInitData Initialization data for the drm schemes supported by the media, keyed by
* scheme UUID.
* @param mimeType The mimeType of the media.
* @param drmInitData DRM initialization data.
*/
void open(Map<UUID, byte[]> drmInitData, String mimeType);
void open(DrmInitData drmInitData);
/**
* Closes the session.

View file

@ -31,7 +31,6 @@ import android.os.Looper;
import android.os.Message;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
@ -168,7 +167,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
}
@Override
public void open(Map<UUID, byte[]> psshData, String mimeType) {
public void open(DrmInitData drmInitData) {
if (++openCount != 1) {
return;
}
@ -178,8 +177,8 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
postRequestHandler = new PostRequestHandler(requestHandlerThread.getLooper());
}
if (this.schemePsshData == null) {
this.mimeType = mimeType;
schemePsshData = psshData.get(uuid);
mimeType = drmInitData.mimeType;
schemePsshData = drmInitData.get(uuid);
if (schemePsshData == null) {
onError(new IllegalStateException("Media does not support uuid: " + uuid));
return;

View file

@ -19,6 +19,7 @@ import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.hls.parser.AdtsExtractor;
import com.google.android.exoplayer.hls.parser.HlsExtractor;
import com.google.android.exoplayer.hls.parser.HlsExtractorWrapper;
import com.google.android.exoplayer.hls.parser.TsExtractor;
import com.google.android.exoplayer.upstream.Aes128DataSource;
import com.google.android.exoplayer.upstream.BandwidthMeter;
@ -305,7 +306,7 @@ public class HlsChunkSource {
Uri chunkUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, segment.url);
// Check if encryption is specified.
if (HlsMediaPlaylist.ENCRYPTION_METHOD_AES_128.equals(segment.encryptionMethod)) {
if (segment.isEncrypted) {
Uri keyUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, segment.encryptionKeyUri);
if (!keyUri.equals(encryptionKeyUri)) {
// Encryption is specified and the key has changed.
@ -341,16 +342,17 @@ public class HlsChunkSource {
boolean isLastChunk = !mediaPlaylist.live && chunkIndex == mediaPlaylist.segments.size() - 1;
// Configure the extractor that will read the chunk.
HlsExtractor extractor;
HlsExtractorWrapper extractorWrapper;
if (previousTsChunk == null || segment.discontinuity || switchingVariant || liveDiscontinuity) {
extractor = chunkUri.getLastPathSegment().endsWith(AAC_FILE_EXTENSION)
? new AdtsExtractor(switchingVariantSpliced, startTimeUs, bufferPool)
: new TsExtractor(switchingVariantSpliced, startTimeUs, bufferPool);
HlsExtractor extractor = chunkUri.getLastPathSegment().endsWith(AAC_FILE_EXTENSION)
? new AdtsExtractor(startTimeUs)
: new TsExtractor(startTimeUs);
extractorWrapper = new HlsExtractorWrapper(bufferPool, extractor, switchingVariantSpliced);
} else {
extractor = previousTsChunk.extractor;
extractorWrapper = previousTsChunk.extractor;
}
return new TsChunk(dataSource, dataSpec, extractor, enabledVariants[variantIndex].index,
return new TsChunk(dataSource, dataSpec, extractorWrapper, enabledVariants[variantIndex].index,
startTimeUs, endTimeUs, chunkMediaSequence, isLastChunk);
}
@ -387,16 +389,24 @@ public class HlsChunkSource {
private int getNextVariantIndex(TsChunk previousTsChunk, long playbackPositionUs) {
clearStaleBlacklistedPlaylists();
if (previousTsChunk == null) {
// Don't consider switching if we don't have a previous chunk.
return variantIndex;
}
long bitrateEstimate = bandwidthMeter.getBitrateEstimate();
if (bitrateEstimate == BandwidthMeter.NO_ESTIMATE) {
// Don't consider switching if we don't have a bandwidth estimate.
return variantIndex;
}
int idealVariantIndex = getVariantIndexForBandwdith(
(int) (bandwidthMeter.getBitrateEstimate() * BANDWIDTH_FRACTION));
(int) (bitrateEstimate * BANDWIDTH_FRACTION));
if (idealVariantIndex == variantIndex) {
// We're already using the ideal variant.
return variantIndex;
}
// We're not using the ideal variant for the available bandwidth, but only switch if the
// conditions are appropriate.
long bufferedPositionUs = previousTsChunk == null ? playbackPositionUs
: adaptiveMode == ADAPTIVE_MODE_SPLICE ? previousTsChunk.startTimeUs
long bufferedPositionUs = adaptiveMode == ADAPTIVE_MODE_SPLICE ? previousTsChunk.startTimeUs
: previousTsChunk.endTimeUs;
long bufferedUs = bufferedPositionUs - playbackPositionUs;
if (mediaPlaylistBlacklistTimesMs[variantIndex] != 0

View file

@ -23,10 +23,12 @@ import java.util.List;
public final class HlsMasterPlaylist extends HlsPlaylist {
public final List<Variant> variants;
public final List<Subtitle> subtitles;
public HlsMasterPlaylist(String baseUri, List<Variant> variants) {
public HlsMasterPlaylist(String baseUri, List<Variant> variants, List<Subtitle> subtitles) {
super(baseUri, HlsPlaylist.TYPE_MASTER);
this.variants = variants;
this.subtitles = subtitles;
}
}

View file

@ -28,24 +28,25 @@ public final class HlsMediaPlaylist extends HlsPlaylist {
* Media segment reference.
*/
public static final class Segment implements Comparable<Long> {
public final boolean discontinuity;
public final double durationSecs;
public final String url;
public final long startTimeUs;
public final String encryptionMethod;
public final boolean isEncrypted;
public final String encryptionKeyUri;
public final String encryptionIV;
public final int byterangeOffset;
public final int byterangeLength;
public Segment(String uri, double durationSecs, boolean discontinuity, long startTimeUs,
String encryptionMethod, String encryptionKeyUri, String encryptionIV,
int byterangeOffset, int byterangeLength) {
boolean isEncrypted, String encryptionKeyUri, String encryptionIV, int byterangeOffset,
int byterangeLength) {
this.url = uri;
this.durationSecs = durationSecs;
this.discontinuity = discontinuity;
this.startTimeUs = startTimeUs;
this.encryptionMethod = encryptionMethod;
this.isEncrypted = isEncrypted;
this.encryptionKeyUri = encryptionKeyUri;
this.encryptionIV = encryptionIV;
this.byterangeOffset = byterangeOffset;

View file

@ -25,6 +25,8 @@ import java.util.regex.Pattern;
*/
/* package */ class HlsParserUtil {
private static final String BOOLEAN_YES = "YES";
private HlsParserUtil() {}
public static String parseStringAttr(String line, Pattern pattern, String tag)
@ -36,14 +38,6 @@ import java.util.regex.Pattern;
throw new ParserException(String.format("Couldn't match %s tag in %s", tag, line));
}
public static String parseOptionalStringAttr(String line, Pattern pattern) {
Matcher matcher = pattern.matcher(line);
if (matcher.find() && matcher.groupCount() == 1) {
return matcher.group(1);
}
return null;
}
public static int parseIntAttr(String line, Pattern pattern, String tag)
throws ParserException {
return Integer.parseInt(parseStringAttr(line, pattern, tag));
@ -54,4 +48,20 @@ import java.util.regex.Pattern;
return Double.parseDouble(parseStringAttr(line, pattern, tag));
}
public static String parseOptionalStringAttr(String line, Pattern pattern) {
Matcher matcher = pattern.matcher(line);
if (matcher.find() && matcher.groupCount() == 1) {
return matcher.group(1);
}
return null;
}
public static boolean parseOptionalBoolAttr(String line, Pattern pattern) {
Matcher matcher = pattern.matcher(line);
if (matcher.find() && matcher.groupCount() == 1) {
return BOOLEAN_YES.equals(matcher.group(1));
}
return false;
}
}

View file

@ -37,12 +37,8 @@ import java.util.regex.Pattern;
public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlaylist> {
private static final String VERSION_TAG = "#EXT-X-VERSION";
private static final String STREAM_INF_TAG = "#EXT-X-STREAM-INF";
private static final String BANDWIDTH_ATTR = "BANDWIDTH";
private static final String CODECS_ATTR = "CODECS";
private static final String RESOLUTION_ATTR = "RESOLUTION";
private static final String MEDIA_TAG = "#EXT-X-MEDIA";
private static final String DISCONTINUITY_TAG = "#EXT-X-DISCONTINUITY";
private static final String MEDIA_DURATION_TAG = "#EXTINF";
private static final String MEDIA_SEQUENCE_TAG = "#EXT-X-MEDIA-SEQUENCE";
@ -51,17 +47,32 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
private static final String KEY_TAG = "#EXT-X-KEY";
private static final String BYTERANGE_TAG = "#EXT-X-BYTERANGE";
private static final String BANDWIDTH_ATTR = "BANDWIDTH";
private static final String CODECS_ATTR = "CODECS";
private static final String RESOLUTION_ATTR = "RESOLUTION";
private static final String LANGUAGE_ATTR = "LANGUAGE";
private static final String NAME_ATTR = "NAME";
private static final String AUTOSELECT_ATTR = "AUTOSELECT";
private static final String DEFAULT_ATTR = "DEFAULT";
private static final String TYPE_ATTR = "TYPE";
private static final String METHOD_ATTR = "METHOD";
private static final String URI_ATTR = "URI";
private static final String IV_ATTR = "IV";
private static final String AUDIO_TYPE = "AUDIO";
private static final String VIDEO_TYPE = "VIDEO";
private static final String SUBTITLES_TYPE = "SUBTITLES";
private static final String CLOSED_CAPTIONS_TYPE = "CLOSED-CAPTIONS";
private static final String METHOD_NONE = "NONE";
private static final String METHOD_AES128 = "AES-128";
private static final Pattern BANDWIDTH_ATTR_REGEX =
Pattern.compile(BANDWIDTH_ATTR + "=(\\d+)\\b");
private static final Pattern CODECS_ATTR_REGEX =
Pattern.compile(CODECS_ATTR + "=\"(.+?)\"");
private static final Pattern RESOLUTION_ATTR_REGEX =
Pattern.compile(RESOLUTION_ATTR + "=(\\d+x\\d+)");
private static final Pattern MEDIA_DURATION_REGEX =
Pattern.compile(MEDIA_DURATION_TAG + ":([\\d.]+),");
private static final Pattern MEDIA_SEQUENCE_REGEX =
@ -74,11 +85,22 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
Pattern.compile(BYTERANGE_TAG + ":(\\d+(?:@\\d+)?)\\b");
private static final Pattern METHOD_ATTR_REGEX =
Pattern.compile(METHOD_ATTR + "=([^,.*]+)");
Pattern.compile(METHOD_ATTR + "=(" + METHOD_NONE + "|" + METHOD_AES128 + ")");
private static final Pattern URI_ATTR_REGEX =
Pattern.compile(URI_ATTR + "=\"(.+)\"");
private static final Pattern IV_ATTR_REGEX =
Pattern.compile(IV_ATTR + "=([^,.*]+)");
private static final Pattern TYPE_ATTR_REGEX =
Pattern.compile(TYPE_ATTR + "=(" + AUDIO_TYPE + "|" + VIDEO_TYPE + "|" + SUBTITLES_TYPE + "|"
+ CLOSED_CAPTIONS_TYPE + ")");
private static final Pattern LANGUAGE_ATTR_REGEX =
Pattern.compile(LANGUAGE_ATTR + "=\"(.+?)\"");
private static final Pattern NAME_ATTR_REGEX =
Pattern.compile(NAME_ATTR + "=\"(.+?)\"");
private static final Pattern AUTOSELECT_ATTR_REGEX =
Pattern.compile(AUTOSELECT_ATTR + "=\"(.+?)\"");
private static final Pattern DEFAULT_ATTR_REGEX =
Pattern.compile(DEFAULT_ATTR + "=\"(.+?)\"");
@Override
public HlsPlaylist parse(String connectionUrl, InputStream inputStream)
@ -103,10 +125,8 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
|| line.equals(ENDLIST_TAG)) {
extraLines.add(line);
return parseMediaPlaylist(new LineIterator(extraLines, reader), connectionUrl);
} else if (line.startsWith(VERSION_TAG)) {
} else {
extraLines.add(line);
} else if (!line.startsWith("#")) {
throw new ParserException("Missing a tag before URL.");
}
}
} finally {
@ -117,17 +137,32 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
private static HlsMasterPlaylist parseMasterPlaylist(LineIterator iterator, String baseUri)
throws IOException {
List<Variant> variants = new ArrayList<Variant>();
ArrayList<Variant> variants = new ArrayList<Variant>();
ArrayList<Subtitle> subtitles = new ArrayList<Subtitle>();
int bandwidth = 0;
String[] codecs = null;
int width = -1;
int height = -1;
int variantIndex = 0;
boolean expectingStreamInfUrl = false;
String line;
while (iterator.hasNext()) {
line = iterator.next();
if (line.startsWith(STREAM_INF_TAG)) {
if (line.startsWith(MEDIA_TAG)) {
String type = HlsParserUtil.parseStringAttr(line, TYPE_ATTR_REGEX, TYPE_ATTR);
if (SUBTITLES_TYPE.equals(type)) {
// We assume all subtitles belong to the same group.
String name = HlsParserUtil.parseStringAttr(line, NAME_ATTR_REGEX, NAME_ATTR);
String uri = HlsParserUtil.parseStringAttr(line, URI_ATTR_REGEX, URI_ATTR);
String language = HlsParserUtil.parseOptionalStringAttr(line, LANGUAGE_ATTR_REGEX);
boolean isDefault = HlsParserUtil.parseOptionalBoolAttr(line, DEFAULT_ATTR_REGEX);
boolean autoSelect = HlsParserUtil.parseOptionalBoolAttr(line, AUTOSELECT_ATTR_REGEX);
subtitles.add(new Subtitle(name, uri, language, isDefault, autoSelect));
} else {
// TODO: Support other types of media tag.
}
} else if (line.startsWith(STREAM_INF_TAG)) {
bandwidth = HlsParserUtil.parseIntAttr(line, BANDWIDTH_ATTR_REGEX, BANDWIDTH_ATTR);
String codecsString = HlsParserUtil.parseOptionalStringAttr(line, CODECS_ATTR_REGEX);
if (codecsString != null) {
@ -145,15 +180,18 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
width = -1;
height = -1;
}
} else if (!line.startsWith("#")) {
expectingStreamInfUrl = true;
} else if (!line.startsWith("#") && expectingStreamInfUrl) {
variants.add(new Variant(variantIndex++, line, bandwidth, codecs, width, height));
bandwidth = 0;
codecs = null;
width = -1;
height = -1;
expectingStreamInfUrl = false;
}
}
return new HlsMasterPlaylist(baseUri, Collections.unmodifiableList(variants));
return new HlsMasterPlaylist(baseUri, Collections.unmodifiableList(variants),
Collections.unmodifiableList(subtitles));
}
private static HlsMediaPlaylist parseMediaPlaylist(LineIterator iterator, String baseUri)
@ -167,14 +205,14 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
double segmentDurationSecs = 0.0;
boolean segmentDiscontinuity = false;
long segmentStartTimeUs = 0;
String segmentEncryptionMethod = null;
String segmentEncryptionKeyUri = null;
String segmentEncryptionIV = null;
int segmentByterangeOffset = 0;
int segmentByterangeLength = C.LENGTH_UNBOUNDED;
int segmentMediaSequence = 0;
boolean isEncrypted = false;
String encryptionKeyUri = null;
String encryptionIV = null;
String line;
while (iterator.hasNext()) {
line = iterator.next();
@ -190,18 +228,14 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
segmentDurationSecs = HlsParserUtil.parseDoubleAttr(line, MEDIA_DURATION_REGEX,
MEDIA_DURATION_TAG);
} else if (line.startsWith(KEY_TAG)) {
segmentEncryptionMethod = HlsParserUtil.parseStringAttr(line, METHOD_ATTR_REGEX,
METHOD_ATTR);
if (segmentEncryptionMethod.equals(HlsMediaPlaylist.ENCRYPTION_METHOD_NONE)) {
segmentEncryptionKeyUri = null;
segmentEncryptionIV = null;
String method = HlsParserUtil.parseStringAttr(line, METHOD_ATTR_REGEX, METHOD_ATTR);
isEncrypted = METHOD_AES128.equals(method);
if (isEncrypted) {
encryptionKeyUri = HlsParserUtil.parseStringAttr(line, URI_ATTR_REGEX, URI_ATTR);
encryptionIV = HlsParserUtil.parseOptionalStringAttr(line, IV_ATTR_REGEX);
} else {
segmentEncryptionKeyUri = HlsParserUtil.parseStringAttr(line, URI_ATTR_REGEX,
URI_ATTR);
segmentEncryptionIV = HlsParserUtil.parseOptionalStringAttr(line, IV_ATTR_REGEX);
if (segmentEncryptionIV == null) {
segmentEncryptionIV = Integer.toHexString(segmentMediaSequence);
}
encryptionKeyUri = null;
encryptionIV = null;
}
} else if (line.startsWith(BYTERANGE_TAG)) {
String byteRange = HlsParserUtil.parseStringAttr(line, BYTERANGE_REGEX, BYTERANGE_TAG);
@ -213,13 +247,21 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
} else if (line.equals(DISCONTINUITY_TAG)) {
segmentDiscontinuity = true;
} else if (!line.startsWith("#")) {
String segmentEncryptionIV;
if (!isEncrypted) {
segmentEncryptionIV = null;
} else if (encryptionIV != null) {
segmentEncryptionIV = encryptionIV;
} else {
segmentEncryptionIV = Integer.toHexString(segmentMediaSequence);
}
segmentMediaSequence++;
if (segmentByterangeLength == C.LENGTH_UNBOUNDED) {
segmentByterangeOffset = 0;
}
segments.add(new Segment(line, segmentDurationSecs, segmentDiscontinuity,
segmentStartTimeUs, segmentEncryptionMethod, segmentEncryptionKeyUri,
segmentEncryptionIV, segmentByterangeOffset, segmentByterangeLength));
segmentStartTimeUs, isEncrypted, encryptionKeyUri, segmentEncryptionIV,
segmentByterangeOffset, segmentByterangeLength));
segmentStartTimeUs += (long) (segmentDurationSecs * C.MICROS_PER_SECOND);
segmentDiscontinuity = false;
segmentDurationSecs = 0.0;

View file

@ -21,7 +21,7 @@ import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.hls.parser.HlsExtractor;
import com.google.android.exoplayer.hls.parser.HlsExtractorWrapper;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
import com.google.android.exoplayer.util.Assertions;
@ -44,7 +44,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
private static final int NO_RESET_PENDING = -1;
private final HlsChunkSource chunkSource;
private final LinkedList<HlsExtractor> extractors;
private final LinkedList<HlsExtractorWrapper> extractors;
private final boolean frameAccurateSeeking;
private final int minLoadableRetryCount;
@ -83,7 +83,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
this.frameAccurateSeeking = frameAccurateSeeking;
this.remainingReleaseCount = downstreamRendererCount;
this.minLoadableRetryCount = minLoadableRetryCount;
extractors = new LinkedList<HlsExtractor>();
extractors = new LinkedList<HlsExtractorWrapper>();
}
@Override
@ -96,7 +96,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
}
continueBufferingInternal();
if (!extractors.isEmpty()) {
HlsExtractor extractor = extractors.getFirst();
HlsExtractorWrapper extractor = extractors.getFirst();
if (extractor.isPrepared()) {
trackCount = extractor.getTrackCount();
trackEnabledStates = new boolean[trackCount];
@ -195,7 +195,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
return NOTHING_READ;
}
HlsExtractor extractor = getCurrentExtractor();
HlsExtractorWrapper extractor = getCurrentExtractor();
if (extractors.size() > 1) {
// If there's more than one extractor, attempt to configure a seamless splice from the
// current one to the next one.
@ -328,8 +328,8 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
*
* @return The current extractor from which samples should be read. Guaranteed to be non-null.
*/
private HlsExtractor getCurrentExtractor() {
HlsExtractor extractor = extractors.getFirst();
private HlsExtractorWrapper getCurrentExtractor() {
HlsExtractorWrapper extractor = extractors.getFirst();
while (extractors.size() > 1 && !haveSamplesForEnabledTracks(extractor)) {
// We're finished reading from the extractor for all tracks, and so can discard it.
extractors.removeFirst().release();
@ -338,7 +338,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
return extractor;
}
private void discardSamplesForDisabledTracks(HlsExtractor extractor, long timeUs) {
private void discardSamplesForDisabledTracks(HlsExtractorWrapper extractor, long timeUs) {
if (!extractor.isPrepared()) {
return;
}
@ -349,7 +349,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
}
}
private boolean haveSamplesForEnabledTracks(HlsExtractor extractor) {
private boolean haveSamplesForEnabledTracks(HlsExtractorWrapper extractor) {
if (!extractor.isPrepared()) {
return false;
}

View file

@ -0,0 +1,37 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls;
/**
* Subtitle media tag.
*/
public final class Subtitle {
public final String name;
public final String uri;
public final String language;
public final boolean isDefault;
public final boolean autoSelect;
public Subtitle(String name, String uri, String language, boolean isDefault, boolean autoSelect) {
this.name = name;
this.uri = uri;
this.language = language;
this.autoSelect = autoSelect;
this.isDefault = isDefault;
}
}

View file

@ -15,7 +15,9 @@
*/
package com.google.android.exoplayer.hls;
import com.google.android.exoplayer.hls.parser.HlsExtractor;
import com.google.android.exoplayer.hls.parser.DataSourceExtractorInput;
import com.google.android.exoplayer.hls.parser.HlsExtractor.ExtractorInput;
import com.google.android.exoplayer.hls.parser.HlsExtractorWrapper;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
@ -26,8 +28,6 @@ import java.io.IOException;
*/
public final class TsChunk extends HlsChunk {
private static final byte[] SCRATCH_SPACE = new byte[4096];
/**
* The index of the variant in the master playlist.
*/
@ -51,7 +51,7 @@ public final class TsChunk extends HlsChunk {
/**
* The extractor into which this chunk is being consumed.
*/
public final HlsExtractor extractor;
public final HlsExtractorWrapper extractor;
private int loadPosition;
private volatile boolean loadFinished;
@ -67,7 +67,7 @@ public final class TsChunk extends HlsChunk {
* @param chunkIndex The index of the chunk.
* @param isLastChunk True if this is the last chunk in the media. False otherwise.
*/
public TsChunk(DataSource dataSource, DataSpec dataSpec, HlsExtractor extractor,
public TsChunk(DataSource dataSource, DataSpec dataSpec, HlsExtractorWrapper extractor,
int variantIndex, long startTimeUs, long endTimeUs, int chunkIndex, boolean isLastChunk) {
super(dataSource, dataSpec);
this.extractor = extractor;
@ -102,30 +102,23 @@ public final class TsChunk extends HlsChunk {
@Override
public void load() throws IOException, InterruptedException {
ExtractorInput input = new DataSourceExtractorInput(dataSource, 0);
try {
dataSource.open(dataSpec);
int bytesRead = 0;
int bytesSkipped = 0;
// If we previously fed part of this chunk to the extractor, skip it this time.
// TODO: Ideally we'd construct a dataSpec that only loads the remainder of the data here,
// rather than loading the whole chunk again and then skipping data we previously loaded. To
// do this is straightforward for non-encrypted content, but more complicated for content
// encrypted with AES, for which we'll need to modify the way that decryption is performed.
while (bytesRead != -1 && !loadCanceled && bytesSkipped < loadPosition) {
int skipLength = Math.min(loadPosition - bytesSkipped, SCRATCH_SPACE.length);
bytesRead = dataSource.read(SCRATCH_SPACE, 0, skipLength);
if (bytesRead != -1) {
bytesSkipped += bytesRead;
input.skipFully(loadPosition);
try {
while (!input.isEnded() && !loadCanceled) {
extractor.read(input);
}
} finally {
loadPosition = (int) input.getPosition();
loadFinished = !loadCanceled;
}
// Feed the remaining data into the extractor.
while (bytesRead != -1 && !loadCanceled) {
bytesRead = extractor.read(dataSource);
if (bytesRead != -1) {
loadPosition += bytesRead;
}
}
loadFinished = !loadCanceled;
} finally {
dataSource.close();
}

View file

@ -15,11 +15,6 @@
*/
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.io.IOException;
@ -28,82 +23,37 @@ import java.io.IOException;
* Facilitates the extraction of AAC samples from elementary audio files formatted as AAC with ADTS
* headers.
*/
public class AdtsExtractor extends HlsExtractor {
public class AdtsExtractor implements HlsExtractor {
private static final int MAX_PACKET_SIZE = 200;
private final long firstSampleTimestamp;
private final ParsableByteArray packetBuffer;
private final AdtsReader adtsReader;
// Accessed only by the loading thread.
private AdtsReader adtsReader;
private boolean firstPacket;
// Accessed by both the loading and consuming threads.
private volatile boolean prepared;
public AdtsExtractor(boolean shouldSpliceIn, long firstSampleTimestamp, BufferPool bufferPool) {
super(shouldSpliceIn);
public AdtsExtractor(long firstSampleTimestamp) {
this.firstSampleTimestamp = firstSampleTimestamp;
packetBuffer = new ParsableByteArray(MAX_PACKET_SIZE);
adtsReader = new AdtsReader(bufferPool);
firstPacket = true;
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return 1;
public void init(TrackOutputBuilder output) {
adtsReader = new AdtsReader(output.buildOutput(0));
output.allOutputsBuilt();
}
@Override
public MediaFormat getFormat(int track) {
Assertions.checkState(prepared);
return adtsReader.getMediaFormat();
}
@Override
public boolean isPrepared() {
return prepared;
}
@Override
public void release() {
adtsReader.release();
}
@Override
public long getLargestSampleTimestamp() {
return adtsReader.getLargestParsedTimestampUs();
}
@Override
public boolean getSample(int track, SampleHolder holder) {
Assertions.checkState(prepared);
Assertions.checkState(track == 0);
return adtsReader.getSample(holder);
}
@Override
public void discardUntil(int track, long timeUs) {
Assertions.checkState(prepared);
Assertions.checkState(track == 0);
adtsReader.discardUntil(timeUs);
}
@Override
public boolean hasSamples(int track) {
Assertions.checkState(prepared);
Assertions.checkState(track == 0);
return !adtsReader.isEmpty();
}
@Override
public int read(DataSource dataSource) throws IOException {
int bytesRead = dataSource.read(packetBuffer.data, 0, MAX_PACKET_SIZE);
public void read(ExtractorInput input) throws IOException, InterruptedException {
int bytesRead = input.read(packetBuffer.data, 0, MAX_PACKET_SIZE);
if (bytesRead == -1) {
return -1;
return;
}
// Feed whatever data we have to the reader, regardless of whether the read finished or not.
packetBuffer.setPosition(0);
packetBuffer.setLimit(bytesRead);
@ -111,16 +61,6 @@ public class AdtsExtractor extends HlsExtractor {
// unnecessary to copy the data through packetBuffer.
adtsReader.consume(packetBuffer, firstSampleTimestamp, firstPacket);
firstPacket = false;
if (!prepared) {
prepared = adtsReader.hasMediaFormat();
}
return bytesRead;
}
@Override
protected SampleQueue getSampleQueue(int track) {
Assertions.checkState(track == 0);
return adtsReader;
}
}

View file

@ -17,7 +17,7 @@ package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.hls.parser.HlsExtractor.TrackOutput;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.ParsableBitArray;
@ -55,8 +55,8 @@ import java.util.Collections;
// Used when reading the samples.
private long timeUs;
public AdtsReader(BufferPool bufferPool) {
super(bufferPool);
public AdtsReader(TrackOutput output) {
super(output);
adtsScratch = new ParsableBitArray(new byte[HEADER_SIZE + CRC_SIZE]);
state = STATE_FINDING_SYNC;
}
@ -78,17 +78,17 @@ import java.util.Collections;
int targetLength = hasCrc ? HEADER_SIZE + CRC_SIZE : HEADER_SIZE;
if (continueRead(data, adtsScratch.getData(), targetLength)) {
parseHeader();
startSample(timeUs);
output.startSample(timeUs, 0);
bytesRead = 0;
state = STATE_READING_SAMPLE;
}
break;
case STATE_READING_SAMPLE:
int bytesToRead = Math.min(data.bytesLeft(), sampleSize - bytesRead);
appendData(data, bytesToRead);
output.appendData(data, bytesToRead);
bytesRead += bytesToRead;
if (bytesRead == sampleSize) {
commitSample(true);
output.commitSample(C.SAMPLE_FLAG_SYNC, 0, null);
timeUs += frameDurationUs;
bytesRead = 0;
state = STATE_FINDING_SYNC;
@ -152,7 +152,7 @@ import java.util.Collections;
private void parseHeader() {
adtsScratch.setPosition(0);
if (!hasMediaFormat()) {
if (!output.hasFormat()) {
int audioObjectType = adtsScratch.readBits(2) + 1;
int sampleRateIndex = adtsScratch.readBits(4);
adtsScratch.skipBits(1);
@ -167,7 +167,7 @@ import java.util.Collections;
MediaFormat.NO_VALUE, audioParams.second, audioParams.first,
Collections.singletonList(audioSpecificConfig));
frameDurationUs = (C.MICROS_PER_SECOND * 1024L) / mediaFormat.sampleRate;
setMediaFormat(mediaFormat);
output.setFormat(mediaFormat);
} else {
adtsScratch.skipBits(10);
}

View file

@ -0,0 +1,106 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.hls.parser.HlsExtractor.ExtractorInput;
import com.google.android.exoplayer.upstream.DataSource;
import java.io.IOException;
/**
* An {@link ExtractorInput} that wraps a {@link DataSource}.
*/
public final class DataSourceExtractorInput implements ExtractorInput {
private static final byte[] SCRATCH_SPACE = new byte[4096];
private final DataSource dataSource;
private long position;
private boolean isEnded;
/**
* @param dataSource The wrapped {@link DataSource}.
* @param position The initial position in the stream.
*/
public DataSourceExtractorInput(DataSource dataSource, long position) {
this.dataSource = dataSource;
this.position = position;
}
@Override
public int read(byte[] target, int offset, int length) throws IOException, InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int bytesRead = dataSource.read(target, offset, length);
if (bytesRead == -1) {
isEnded = true;
return -1;
}
position += bytesRead;
return bytesRead;
}
@Override
public boolean readFully(byte[] target, int offset, int length)
throws IOException, InterruptedException {
int remaining = length;
while (remaining > 0) {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int bytesRead = dataSource.read(target, offset, remaining);
if (bytesRead == -1) {
isEnded = true;
return false;
}
offset += bytesRead;
remaining -= bytesRead;
}
position += length;
return true;
}
@Override
public boolean skipFully(int length) throws IOException, InterruptedException {
int remaining = length;
while (remaining > 0) {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int bytesRead = dataSource.read(SCRATCH_SPACE, 0, Math.min(SCRATCH_SPACE.length, remaining));
if (bytesRead == -1) {
isEnded = true;
return false;
}
remaining -= bytesRead;
}
position += length;
return true;
}
@Override
public long getPosition() {
return position;
}
@Override
public boolean isEnded() {
return isEnded;
}
}

View file

@ -15,16 +15,21 @@
*/
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.hls.parser.HlsExtractor.TrackOutput;
import com.google.android.exoplayer.util.ParsableByteArray;
/**
* Extracts individual samples from an elementary media stream, preserving original order.
*/
/* package */ abstract class ElementaryStreamReader extends SampleQueue {
/* package */ abstract class ElementaryStreamReader {
protected ElementaryStreamReader(BufferPool bufferPool) {
super(bufferPool);
protected final TrackOutput output;
/**
* @param output A {@link TrackOutput} to which samples should be written.
*/
protected ElementaryStreamReader(TrackOutput output) {
this.output = output;
}
/**

View file

@ -15,9 +15,10 @@
*/
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.hls.parser.HlsExtractor.TrackOutput;
import com.google.android.exoplayer.mp4.Mp4Util;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.ParsableBitArray;
@ -43,18 +44,20 @@ import java.util.List;
private final NalUnitTargetBuffer sps;
private final NalUnitTargetBuffer pps;
private final NalUnitTargetBuffer sei;
private final ParsableByteArray seiWrapper;
private int scratchEscapeCount;
private int[] scratchEscapePositions;
private boolean isKeyframe;
public H264Reader(BufferPool bufferPool, SeiReader seiReader) {
super(bufferPool);
public H264Reader(TrackOutput output, SeiReader seiReader) {
super(output);
this.seiReader = seiReader;
prefixFlags = new boolean[3];
sps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SPS, 128);
pps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_PPS, 128);
sei = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SEI, 128);
seiWrapper = new ParsableByteArray();
scratchEscapePositions = new int[10];
}
@ -66,7 +69,7 @@ import java.util.List;
byte[] dataArray = data.data;
// Append the data to the buffer.
appendData(data, data.bytesLeft());
output.appendData(data, data.bytesLeft());
// Scan the appended data, processing NAL units as they are encountered
while (offset < limit) {
@ -84,13 +87,13 @@ import java.util.List;
int nalUnitType = Mp4Util.getNalUnitType(dataArray, nextNalUnitOffset);
int nalUnitOffsetInData = nextNalUnitOffset - limit;
if (nalUnitType == NAL_UNIT_TYPE_AUD) {
if (writingSample()) {
if (isKeyframe && !hasMediaFormat() && sps.isCompleted() && pps.isCompleted()) {
if (output.isWritingSample()) {
if (isKeyframe && !output.hasFormat() && sps.isCompleted() && pps.isCompleted()) {
parseMediaFormat(sps, pps);
}
commitSample(isKeyframe, nalUnitOffsetInData);
output.commitSample(isKeyframe ? C.SAMPLE_FLAG_SYNC : 0, nalUnitOffsetInData, null);
}
startSample(pesTimeUs, nalUnitOffsetInData);
output.startSample(pesTimeUs, nalUnitOffsetInData);
isKeyframe = false;
} else if (nalUnitType == NAL_UNIT_TYPE_IDR) {
isKeyframe = true;
@ -117,7 +120,7 @@ import java.util.List;
}
private void feedNalUnitTargetBuffersStart(int nalUnitType) {
if (!hasMediaFormat()) {
if (!output.hasFormat()) {
sps.startNalUnit(nalUnitType);
pps.startNalUnit(nalUnitType);
}
@ -125,7 +128,7 @@ import java.util.List;
}
private void feedNalUnitTargetBuffersData(byte[] dataArray, int offset, int limit) {
if (!hasMediaFormat()) {
if (!output.hasFormat()) {
sps.appendToNalUnit(dataArray, offset, limit);
pps.appendToNalUnit(dataArray, offset, limit);
}
@ -137,7 +140,8 @@ import java.util.List;
pps.endNalUnit(discardPadding);
if (sei.endNalUnit(discardPadding)) {
int unescapedLength = unescapeStream(sei.nalData, sei.nalLength);
seiReader.read(sei.nalData, 0, unescapedLength, pesTimeUs);
seiWrapper.reset(sei.nalData, unescapedLength);
seiReader.consume(seiWrapper, pesTimeUs, true);
}
}
@ -229,7 +233,7 @@ import java.util.List;
}
// Set the format.
setMediaFormat(MediaFormat.createVideoFormat(MimeTypes.VIDEO_H264, MediaFormat.NO_VALUE,
output.setFormat(MediaFormat.createVideoFormat(MimeTypes.VIDEO_H264, MediaFormat.NO_VALUE,
frameWidth, frameHeight, initializationData));
}

View file

@ -16,136 +16,138 @@
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.io.IOException;
/**
* Facilitates extraction of media samples for HLS playbacks.
*/
// TODO: Consider consolidating more common logic in this base class.
public abstract class HlsExtractor {
public interface HlsExtractor {
private final boolean shouldSpliceIn;
/**
* An object from which source data can be read.
*/
public interface ExtractorInput {
// Accessed only by the consuming thread.
private boolean spliceConfigured;
/**
* Reads up to {@code length} bytes from the input.
* <p>
* This method blocks until at least one byte of data can be read, the end of the input is
* detected, or an exception is thrown.
*
* @param target A target array into which data should be written.
* @param offset The offset into the target array at which to write.
* @param length The maximum number of bytes to read from the input.
* @return The number of bytes read, or -1 if the input has ended.
* @throws IOException If an error occurs reading from the input.
* @throws InterruptedException If the thread has been interrupted.
*/
int read(byte[] target, int offset, int length) throws IOException, InterruptedException;
/**
* Like {@link #read(byte[], int, int)}, but guaranteed to read request {@code length} in full
* unless the end of the input is detected, or an exception is thrown.
*
* TODO: Firm up behavior of this method if (a) zero bytes are read before EOS, (b) the read
* is partially satisfied before EOS.
*
* @param target A target array into which data should be written.
* @param offset The offset into the target array at which to write.
* @param length The number of bytes to read from the input.
* @return True if the read was successful. False if the end of the input was reached.
* @throws IOException If an error occurs reading from the input.
* @throws InterruptedException If the thread has been interrupted.
*/
boolean readFully(byte[] target, int offset, int length)
throws IOException, InterruptedException;
/**
* Like {@link #readFully(byte[], int, int)}, except the data is skipped instead of read.
*
* TODO: Firm up behavior of this method if (a) zero bytes are skipped before EOS, (b) the skip
* is partially satisfied before EOS.
*
* @param length The number of bytes to skip from the input.
* @return True if the read was successful. False if the end of the input was reached.
* @throws IOException If an error occurs reading from the input.
* @throws InterruptedException If the thread is interrupted.
*/
boolean skipFully(int length) throws IOException, InterruptedException;
/**
* The current position in the stream.
*
* @return The position in the stream.
*/
long getPosition();
/**
* Whether or not the input has ended.
*
* @return True if the input has ended. False otherwise.
*/
boolean isEnded();
public HlsExtractor(boolean shouldSpliceIn) {
this.shouldSpliceIn = shouldSpliceIn;
}
/**
* Attempts to configure a splice from this extractor to the next.
* <p>
* The splice is performed such that for each track the samples read from the next extractor
* start with a keyframe, and continue from where the samples read from this extractor finish.
* A successful splice may discard samples from either or both extractors.
* <p>
* Splice configuration may fail if the next extractor is not yet in a state that allows the
* splice to be performed. Calling this method is a noop if the splice has already been
* configured. Hence this method should be called repeatedly during the window within which a
* splice can be performed.
*
* @param nextExtractor The extractor being spliced to.
* An object to which extracted data should be output.
*/
public final void configureSpliceTo(HlsExtractor nextExtractor) {
if (spliceConfigured || !nextExtractor.shouldSpliceIn || !nextExtractor.isPrepared()) {
// The splice is already configured, or the next extractor doesn't want to be spliced in, or
// the next extractor isn't ready to be spliced in.
return;
}
boolean spliceConfigured = true;
int trackCount = getTrackCount();
for (int i = 0; i < trackCount; i++) {
spliceConfigured &= getSampleQueue(i).configureSpliceTo(nextExtractor.getSampleQueue(i));
}
this.spliceConfigured = spliceConfigured;
return;
public interface TrackOutputBuilder {
/**
* Invoked to build a {@link TrackOutput} to which data should be output for a given track.
*
* @param trackId A stable track id.
* @return The corresponding {@link TrackOutput}.
*/
TrackOutput buildOutput(int trackId);
/**
* Invoked when all {@link TrackOutput}s have been built, meaning {@link #buildOutput(int)}
* will not be invoked again.
*/
void allOutputsBuilt();
}
/**
* Gets the number of available tracks.
* <p>
* This method should only be called after the extractor has been prepared.
*
* @return The number of available tracks.
* An object to which extracted data belonging to a given track should be output.
*/
public abstract int getTrackCount();
public interface TrackOutput {
boolean hasFormat();
void setFormat(MediaFormat format);
boolean isWritingSample();
int appendData(DataSource dataSource, int length) throws IOException;
void appendData(ParsableByteArray data, int length);
void startSample(long timeUs, int offset);
void commitSample(int flags, int offset, byte[] encryptionKey);
}
/**
* Gets the format of the specified track.
* <p>
* This method must only be called after the extractor has been prepared.
* Initializes the extractor.
*
* @param track The track index.
* @return The corresponding format.
* @param output A {@link TrackOutputBuilder} to which extracted data should be output.
*/
public abstract MediaFormat getFormat(int track);
void init(TrackOutputBuilder output);
/**
* Whether the extractor is prepared.
* Reads from the provided {@link ExtractorInput}.
*
* @return True if the extractor is prepared. False otherwise.
*/
public abstract boolean isPrepared();
/**
* Releases the extractor, recycling any pending or incomplete samples to the sample pool.
* <p>
* This method should not be called whilst {@link #read(DataSource)} is also being invoked.
*/
public abstract void release();
/**
* Gets the largest timestamp of any sample parsed by the extractor.
*
* @return The largest timestamp, or {@link Long#MIN_VALUE} if no samples have been parsed.
*/
public abstract long getLargestSampleTimestamp();
/**
* Gets the next sample for the specified track.
*
* @param track The track from which to read.
* @param holder A {@link SampleHolder} into which the sample should be read.
* @return True if a sample was read. False otherwise.
*/
public abstract boolean getSample(int track, SampleHolder holder);
/**
* Discards samples for the specified track up to the specified time.
*
* @param track The track from which samples should be discarded.
* @param timeUs The time up to which samples should be discarded, in microseconds.
*/
public abstract void discardUntil(int track, long timeUs);
/**
* Whether samples are available for reading from {@link #getSample(int, SampleHolder)} for the
* specified track.
*
* @return True if samples are available for reading from {@link #getSample(int, SampleHolder)}
* for the specified track. False otherwise.
*/
public abstract boolean hasSamples(int track);
/**
* Reads up to a single TS packet.
*
* @param dataSource The {@link DataSource} from which to read.
* @param input The {@link ExtractorInput} from which to read.
* @throws IOException If an error occurred reading from the source.
* @return The number of bytes read from the source.
* @throws InterruptedException If the thread was interrupted.
*/
public abstract int read(DataSource dataSource) throws IOException;
/**
* Gets the {@link SampleQueue} for the specified track.
*
* @param track The track index.
* @return The corresponding sample queue.
*/
protected abstract SampleQueue getSampleQueue(int track);
void read(ExtractorInput input) throws IOException, InterruptedException;
}

View file

@ -0,0 +1,210 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.hls.parser.HlsExtractor.ExtractorInput;
import com.google.android.exoplayer.hls.parser.HlsExtractor.TrackOutput;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.util.Assertions;
import android.util.SparseArray;
import java.io.IOException;
/**
* Wraps a {@link HlsExtractor}, adding functionality to enable reading of the extracted samples.
*/
public final class HlsExtractorWrapper implements HlsExtractor.TrackOutputBuilder {
private final BufferPool bufferPool;
private final HlsExtractor extractor;
private final SparseArray<SampleQueue> sampleQueues;
private final boolean shouldSpliceIn;
private volatile boolean outputsBuilt;
// Accessed only by the consuming thread.
private boolean prepared;
private boolean spliceConfigured;
public HlsExtractorWrapper(BufferPool bufferPool, HlsExtractor extractor,
boolean shouldSpliceIn) {
this.bufferPool = bufferPool;
this.extractor = extractor;
this.shouldSpliceIn = shouldSpliceIn;
sampleQueues = new SparseArray<SampleQueue>();
extractor.init(this);
}
/**
* Attempts to configure a splice from this extractor to the next.
* <p>
* The splice is performed such that for each track the samples read from the next extractor
* start with a keyframe, and continue from where the samples read from this extractor finish.
* A successful splice may discard samples from either or both extractors.
* <p>
* Splice configuration may fail if the next extractor is not yet in a state that allows the
* splice to be performed. Calling this method is a noop if the splice has already been
* configured. Hence this method should be called repeatedly during the window within which a
* splice can be performed.
*
* @param nextExtractor The extractor being spliced to.
*/
public final void configureSpliceTo(HlsExtractorWrapper nextExtractor) {
if (spliceConfigured || !nextExtractor.shouldSpliceIn || !nextExtractor.isPrepared()) {
// The splice is already configured, or the next extractor doesn't want to be spliced in, or
// the next extractor isn't ready to be spliced in.
return;
}
boolean spliceConfigured = true;
int trackCount = getTrackCount();
for (int i = 0; i < trackCount; i++) {
SampleQueue currentSampleQueue = sampleQueues.valueAt(i);
SampleQueue nextSampleQueue = nextExtractor.sampleQueues.valueAt(i);
spliceConfigured &= currentSampleQueue.configureSpliceTo(nextSampleQueue);
}
this.spliceConfigured = spliceConfigured;
return;
}
/**
* Gets the number of available tracks.
* <p>
* This method should only be called after the extractor has been prepared.
*
* @return The number of available tracks.
*/
public int getTrackCount() {
return sampleQueues.size();
}
/**
* Gets the format of the specified track.
* <p>
* This method must only be called after the extractor has been prepared.
*
* @param track The track index.
* @return The corresponding format.
*/
public MediaFormat getFormat(int track) {
return sampleQueues.valueAt(track).getFormat();
}
/**
* Whether the extractor is prepared.
*
* @return True if the extractor is prepared. False otherwise.
*/
public boolean isPrepared() {
if (!prepared && outputsBuilt) {
for (int i = 0; i < sampleQueues.size(); i++) {
if (!sampleQueues.valueAt(i).hasFormat()) {
return false;
}
}
prepared = true;
}
return prepared;
}
/**
* Releases the extractor, recycling any pending or incomplete samples to the sample pool.
* <p>
* This method should not be called whilst {@link #read(ExtractorInput)} is also being invoked.
*/
public void release() {
for (int i = 0; i < sampleQueues.size(); i++) {
sampleQueues.valueAt(i).release();
}
}
/**
* Gets the largest timestamp of any sample parsed by the extractor.
*
* @return The largest timestamp, or {@link Long#MIN_VALUE} if no samples have been parsed.
*/
public long getLargestSampleTimestamp() {
long largestParsedTimestampUs = Long.MIN_VALUE;
for (int i = 0; i < sampleQueues.size(); i++) {
largestParsedTimestampUs = Math.max(largestParsedTimestampUs,
sampleQueues.valueAt(i).getLargestParsedTimestampUs());
}
return largestParsedTimestampUs;
}
/**
* Gets the next sample for the specified track.
*
* @param track The track from which to read.
* @param holder A {@link SampleHolder} into which the sample should be read.
* @return True if a sample was read. False otherwise.
*/
public boolean getSample(int track, SampleHolder holder) {
Assertions.checkState(isPrepared());
return sampleQueues.valueAt(track).getSample(holder);
}
/**
* Discards samples for the specified track up to the specified time.
*
* @param track The track from which samples should be discarded.
* @param timeUs The time up to which samples should be discarded, in microseconds.
*/
public void discardUntil(int track, long timeUs) {
Assertions.checkState(isPrepared());
sampleQueues.valueAt(track).discardUntil(timeUs);
}
/**
* Whether samples are available for reading from {@link #getSample(int, SampleHolder)} for the
* specified track.
*
* @return True if samples are available for reading from {@link #getSample(int, SampleHolder)}
* for the specified track. False otherwise.
*/
public boolean hasSamples(int track) {
Assertions.checkState(isPrepared());
return !sampleQueues.valueAt(track).isEmpty();
}
/**
* Reads from the provided {@link ExtractorInput}.
*
* @param input The {@link ExtractorInput} from which to read.
* @throws IOException If an error occurred reading from the source.
* @throws InterruptedException If the thread was interrupted.
*/
public void read(ExtractorInput input) throws IOException, InterruptedException {
extractor.read(input);
}
// ExtractorOutput implementation.
@Override
public TrackOutput buildOutput(int id) {
SampleQueue sampleQueue = new SampleQueue(bufferPool);
sampleQueues.put(id, sampleQueue);
return sampleQueue;
}
@Override
public void allOutputsBuilt() {
this.outputsBuilt = true;
}
}

View file

@ -15,8 +15,9 @@
*/
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.hls.parser.HlsExtractor.TrackOutput;
import com.google.android.exoplayer.util.ParsableByteArray;
/**
@ -24,24 +25,24 @@ import com.google.android.exoplayer.util.ParsableByteArray;
*/
/* package */ class Id3Reader extends ElementaryStreamReader {
public Id3Reader(BufferPool bufferPool) {
super(bufferPool);
setMediaFormat(MediaFormat.createId3Format());
public Id3Reader(TrackOutput output) {
super(output);
output.setFormat(MediaFormat.createId3Format());
}
@Override
public void consume(ParsableByteArray data, long pesTimeUs, boolean startOfPacket) {
if (startOfPacket) {
startSample(pesTimeUs);
output.startSample(pesTimeUs, 0);
}
if (writingSample()) {
appendData(data, data.bytesLeft());
if (output.isWritingSample()) {
output.appendData(data, data.bytesLeft());
}
}
@Override
public void packetFinished() {
commitSample(true);
output.commitSample(C.SAMPLE_FLAG_SYNC, 0, null);
}
}

View file

@ -18,9 +18,11 @@ package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ConcurrentLinkedQueue;
@ -29,12 +31,15 @@ import java.util.concurrent.ConcurrentLinkedQueue;
*/
/* package */ final class RollingSampleBuffer {
private static final int INITIAL_SCRATCH_SIZE = 32;
private final BufferPool fragmentPool;
private final int fragmentLength;
private final InfoQueue infoQueue;
private final ConcurrentLinkedQueue<byte[]> dataQueue;
private final long[] dataOffsetHolder;
private final SampleExtrasHolder extrasHolder;
private final ParsableByteArray scratch;
// Accessed only by the consuming thread.
private long totalBytesDropped;
@ -51,7 +56,8 @@ import java.util.concurrent.ConcurrentLinkedQueue;
fragmentLength = bufferPool.bufferLength;
infoQueue = new InfoQueue();
dataQueue = new ConcurrentLinkedQueue<byte[]>();
dataOffsetHolder = new long[1];
extrasHolder = new SampleExtrasHolder();
scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE);
}
public void release() {
@ -71,7 +77,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
* @return True if the holder was filled. False if there is no current sample.
*/
public boolean peekSample(SampleHolder holder) {
return infoQueue.peekSample(holder, dataOffsetHolder);
return infoQueue.peekSample(holder, extrasHolder);
}
/**
@ -85,23 +91,99 @@ import java.util.concurrent.ConcurrentLinkedQueue;
/**
* Reads the current sample, advancing the read index to the next sample.
*
* @param holder The holder into which the current sample should be written.
* @param sampleHolder The holder into which the current sample should be written.
*/
public void readSample(SampleHolder holder) {
// Write the sample information into the holder.
infoQueue.peekSample(holder, dataOffsetHolder);
// Write the sample data into the holder.
if (holder.data == null || holder.data.capacity() < holder.size) {
holder.replaceBuffer(holder.size);
public void readSample(SampleHolder sampleHolder) {
// Write the sample information into the holder and extrasHolder.
infoQueue.peekSample(sampleHolder, extrasHolder);
// Read encryption data if the sample is encrypted.
if ((sampleHolder.flags & C.SAMPLE_FLAG_ENCRYPTED) != 0) {
readEncryptionData(sampleHolder, extrasHolder);
}
if (holder.data != null) {
readData(dataOffsetHolder[0], holder.data, holder.size);
// Write the sample data into the holder.
if (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size) {
sampleHolder.replaceBuffer(sampleHolder.size);
}
if (sampleHolder.data != null) {
readData(extrasHolder.offset, sampleHolder.data, sampleHolder.size);
}
// Advance the read head.
long nextOffset = infoQueue.moveToNextSample();
dropFragmentsTo(nextOffset);
}
/**
* Reads encryption data for the current sample.
* <p>
* The encryption data is written into {@code sampleHolder.cryptoInfo}, and
* {@code sampleHolder.size} is adjusted to subtract the number of bytes that were read. The
* same value is added to {@code extrasHolder.offset}.
*
* @param sampleHolder The holder into which the encryption data should be written.
* @param extrasHolder The extras holder whose offset should be read and subsequently adjusted.
*/
private void readEncryptionData(SampleHolder sampleHolder, SampleExtrasHolder extrasHolder) {
long offset = extrasHolder.offset;
// Read the signal byte.
readData(offset, scratch.data, 1);
offset++;
byte signalByte = scratch.data[0];
boolean subsampleEncryption = (signalByte & 0x80) != 0;
int ivSize = signalByte & 0x7F;
// Read the initialization vector.
if (sampleHolder.cryptoInfo.iv == null) {
sampleHolder.cryptoInfo.iv = new byte[16];
}
readData(offset, sampleHolder.cryptoInfo.iv, ivSize);
offset += ivSize;
// Read the subsample count, if present.
int subsampleCount;
if (subsampleEncryption) {
readData(offset, scratch.data, 2);
offset += 2;
scratch.setPosition(0);
subsampleCount = scratch.readUnsignedShort();
} else {
subsampleCount = 1;
}
// Write the clear and encrypted subsample sizes.
int[] clearDataSizes = sampleHolder.cryptoInfo.numBytesOfClearData;
if (clearDataSizes == null || clearDataSizes.length < subsampleCount) {
clearDataSizes = new int[subsampleCount];
}
int[] encryptedDataSizes = sampleHolder.cryptoInfo.numBytesOfEncryptedData;
if (encryptedDataSizes == null || encryptedDataSizes.length < subsampleCount) {
encryptedDataSizes = new int[subsampleCount];
}
if (subsampleEncryption) {
int subsampleDataLength = 6 * subsampleCount;
ensureCapacity(scratch, subsampleDataLength);
readData(offset, scratch.data, subsampleDataLength);
offset += subsampleDataLength;
scratch.setPosition(0);
for (int i = 0; i < subsampleCount; i++) {
clearDataSizes[i] = scratch.readUnsignedShort();
encryptedDataSizes[i] = scratch.readUnsignedIntToInt();
}
} else {
clearDataSizes[0] = 0;
encryptedDataSizes[0] = sampleHolder.size - (int) (offset - extrasHolder.offset);
}
// Populate the cryptoInfo.
sampleHolder.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes,
extrasHolder.encryptionKeyId, sampleHolder.cryptoInfo.iv, C.CRYPTO_MODE_AES_CTR);
// Adjust the offset and size to take into account the bytes read.
int bytesRead = (int) (offset - extrasHolder.offset);
extrasHolder.offset += bytesRead;
sampleHolder.size -= bytesRead;
}
/**
* Reads data from the front of the rolling buffer.
*
@ -121,6 +203,26 @@ import java.util.concurrent.ConcurrentLinkedQueue;
}
}
/**
* Reads data from the front of the rolling buffer.
*
* @param absolutePosition The absolute position from which data should be read.
* @param target The array into which data should be written.
* @param length The number of bytes to read.
*/
// TODO: Consider reducing duplication of this method and the one above.
private void readData(long absolutePosition, byte[] target, int length) {
int remaining = length;
while (remaining > 0) {
dropFragmentsTo(absolutePosition);
int positionInFragment = (int) (absolutePosition - totalBytesDropped);
int toCopy = Math.min(remaining, fragmentLength - positionInFragment);
System.arraycopy(dataQueue.peek(), positionInFragment, target, 0, toCopy);
absolutePosition += toCopy;
remaining -= toCopy;
}
}
/**
* Discard any fragments that hold data prior to the specified absolute position, returning
* them to the pool.
@ -136,6 +238,15 @@ import java.util.concurrent.ConcurrentLinkedQueue;
}
}
/**
* Ensure that the passed {@link ParsableByteArray} is of at least the specified limit.
*/
private static void ensureCapacity(ParsableByteArray byteArray, int limit) {
if (byteArray.limit() < limit) {
byteArray.reset(new byte[limit], limit);
}
}
// Called by the loading thread.
/**
@ -151,12 +262,39 @@ import java.util.concurrent.ConcurrentLinkedQueue;
pendingSampleOffset = totalBytesWritten + offset;
}
/**
* Appends data to the rolling buffer.
*
* @param dataSource The source from which to read.
* @param length The maximum length of the read.
* @return The number of bytes read, or -1 if the the end of the source has been reached.
* @throws IOException If an error occurs reading from the source.
*/
public int appendData(DataSource dataSource, int length) throws IOException {
int remainingWriteLength = length;
if (dataQueue.isEmpty() || lastFragmentOffset == fragmentLength) {
lastFragmentOffset = 0;
lastFragment = fragmentPool.allocateDirect();
dataQueue.add(lastFragment);
}
int thisWriteLength = Math.min(remainingWriteLength, fragmentLength - lastFragmentOffset);
int bytesRead = dataSource.read(lastFragment, lastFragmentOffset, thisWriteLength);
if (bytesRead == -1) {
return -1;
}
lastFragmentOffset += bytesRead;
remainingWriteLength -= bytesRead;
totalBytesWritten += bytesRead;
return bytesRead;
}
/**
* Appends data to the rolling buffer.
*
* @param buffer A buffer containing the data to append.
* @param length The length of the data to append.
*/
// TODO: Consider reducing duplication of this method and the one above.
public void appendData(ParsableByteArray buffer, int length) {
int remainingWriteLength = length;
while (remainingWriteLength > 0) {
@ -176,21 +314,22 @@ import java.util.concurrent.ConcurrentLinkedQueue;
/**
* Indicates the end point for the current sample, making it available for consumption.
*
* @param isKeyframe True if the sample being committed is a keyframe. False otherwise.
* @param flags Flags that accompany the sample. See {@link SampleHolder#flags}.
* @param offset The offset of the first byte after the end of the sample's data, relative to
* the total number of bytes written to the buffer. Must be negative or zero.
* @param encryptionKey The encryption key associated with the sample, or null.
*/
public void commitSample(boolean isKeyframe, int offset) {
public void commitSample(int flags, int offset, byte[] encryptionKey) {
Assertions.checkState(offset <= 0);
int sampleSize = (int) (totalBytesWritten + offset - pendingSampleOffset);
infoQueue.commitSample(pendingSampleTimeUs, pendingSampleOffset, sampleSize,
isKeyframe ? C.SAMPLE_FLAG_SYNC : 0);
infoQueue.commitSample(pendingSampleTimeUs, pendingSampleOffset, sampleSize, flags,
encryptionKey);
}
/**
* Holds information about the samples in the rolling buffer.
*/
private static class InfoQueue {
private static final class InfoQueue {
private static final int SAMPLE_CAPACITY_INCREMENT = 1000;
@ -200,6 +339,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
private int[] sizes;
private int[] flags;
private long[] timesUs;
private byte[][] encryptionKeys;
private int queueSize;
private int readIndex;
@ -211,6 +351,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
timesUs = new long[capacity];
flags = new int[capacity];
sizes = new int[capacity];
encryptionKeys = new byte[capacity][];
}
// Called by the consuming thread.
@ -224,18 +365,18 @@ import java.util.concurrent.ConcurrentLinkedQueue;
* {@code offsetHolder[0]}.
*
* @param holder The holder into which the current sample information should be written.
* @param offsetHolder The holder into which the absolute position of the sample's data should
* be written.
* @param extrasHolder The holder into which extra sample information should be written.
* @return True if the holders were filled. False if there is no current sample.
*/
public synchronized boolean peekSample(SampleHolder holder, long[] offsetHolder) {
public synchronized boolean peekSample(SampleHolder holder, SampleExtrasHolder extrasHolder) {
if (queueSize == 0) {
return false;
}
holder.timeUs = timesUs[readIndex];
holder.size = sizes[readIndex];
holder.flags = flags[readIndex];
offsetHolder[0] = offsets[readIndex];
extrasHolder.offset = offsets[readIndex];
extrasHolder.encryptionKeyId = encryptionKeys[readIndex];
return true;
}
@ -257,11 +398,13 @@ import java.util.concurrent.ConcurrentLinkedQueue;
// Called by the loading thread.
public synchronized void commitSample(long timeUs, long offset, int size, int sampleFlags) {
public synchronized void commitSample(long timeUs, long offset, int size, int sampleFlags,
byte[] encryptionKey) {
timesUs[writeIndex] = timeUs;
offsets[writeIndex] = offset;
sizes[writeIndex] = size;
flags[writeIndex] = sampleFlags;
encryptionKeys[writeIndex] = encryptionKey;
// Increment the write index.
queueSize++;
if (queueSize == capacity) {
@ -271,20 +414,24 @@ import java.util.concurrent.ConcurrentLinkedQueue;
long[] newTimesUs = new long[newCapacity];
int[] newFlags = new int[newCapacity];
int[] newSizes = new int[newCapacity];
byte[][] newEncryptionKeys = new byte[newCapacity][];
int beforeWrap = capacity - readIndex;
System.arraycopy(offsets, readIndex, newOffsets, 0, beforeWrap);
System.arraycopy(timesUs, readIndex, newTimesUs, 0, beforeWrap);
System.arraycopy(flags, readIndex, newFlags, 0, beforeWrap);
System.arraycopy(sizes, readIndex, newSizes, 0, beforeWrap);
System.arraycopy(encryptionKeys, readIndex, newEncryptionKeys, 0, beforeWrap);
int afterWrap = readIndex;
System.arraycopy(offsets, 0, newOffsets, beforeWrap, afterWrap);
System.arraycopy(timesUs, 0, newTimesUs, beforeWrap, afterWrap);
System.arraycopy(flags, 0, newFlags, beforeWrap, afterWrap);
System.arraycopy(sizes, 0, newSizes, beforeWrap, afterWrap);
System.arraycopy(encryptionKeys, 0, newEncryptionKeys, beforeWrap, afterWrap);
offsets = newOffsets;
timesUs = newTimesUs;
flags = newFlags;
sizes = newSizes;
encryptionKeys = newEncryptionKeys;
readIndex = 0;
writeIndex = capacity;
queueSize = capacity;
@ -300,4 +447,14 @@ import java.util.concurrent.ConcurrentLinkedQueue;
}
/**
* Holds additional sample information not held by {@link SampleHolder}.
*/
private static final class SampleExtrasHolder {
public long offset;
public byte[] encryptionKeyId;
}
}

View file

@ -18,15 +18,19 @@ package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.hls.parser.HlsExtractor.TrackOutput;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.io.IOException;
/**
* Wraps a {@link RollingSampleBuffer}, adding higher level functionality such as enforcing that
* the first sample returned from the queue is a keyframe, allowing splicing to another queue, and
* so on.
*/
/* package */ abstract class SampleQueue {
public final class SampleQueue implements TrackOutput {
private final RollingSampleBuffer rollingBuffer;
private final SampleHolder sampleInfoHolder;
@ -40,10 +44,10 @@ import com.google.android.exoplayer.util.ParsableByteArray;
private boolean writingSample;
// Accessed by both the loading and consuming threads.
private volatile MediaFormat mediaFormat;
private volatile long largestParsedTimestampUs;
private volatile MediaFormat format;
protected SampleQueue(BufferPool bufferPool) {
public SampleQueue(BufferPool bufferPool) {
rollingBuffer = new RollingSampleBuffer(bufferPool);
sampleInfoHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DISABLED);
needKeyframe = true;
@ -58,18 +62,14 @@ import com.google.android.exoplayer.util.ParsableByteArray;
// Called by the consuming thread.
public MediaFormat getFormat() {
return format;
}
public long getLargestParsedTimestampUs() {
return largestParsedTimestampUs;
}
public boolean hasMediaFormat() {
return mediaFormat != null;
}
public MediaFormat getMediaFormat() {
return mediaFormat;
}
public boolean isEmpty() {
return !advanceToEligibleSample();
}
@ -166,37 +166,44 @@ import com.google.android.exoplayer.util.ParsableByteArray;
return true;
}
// Called by the loading thread.
// TrackOutput implementation. Called by the loading thread.
protected boolean writingSample() {
return writingSample;
@Override
public boolean hasFormat() {
return format != null;
}
protected void setMediaFormat(MediaFormat mediaFormat) {
this.mediaFormat = mediaFormat;
@Override
public void setFormat(MediaFormat format) {
this.format = format;
}
protected void startSample(long sampleTimeUs) {
startSample(sampleTimeUs, 0);
@Override
public int appendData(DataSource dataSource, int length) throws IOException {
return rollingBuffer.appendData(dataSource, length);
}
protected void startSample(long sampleTimeUs, int offset) {
@Override
public void appendData(ParsableByteArray buffer, int length) {
rollingBuffer.appendData(buffer, length);
}
@Override
public void startSample(long sampleTimeUs, int offset) {
writingSample = true;
largestParsedTimestampUs = Math.max(largestParsedTimestampUs, sampleTimeUs);
rollingBuffer.startSample(sampleTimeUs, offset);
}
protected void appendData(ParsableByteArray buffer, int length) {
rollingBuffer.appendData(buffer, length);
}
protected void commitSample(boolean isKeyframe) {
commitSample(isKeyframe, 0);
}
protected void commitSample(boolean isKeyframe, int offset) {
rollingBuffer.commitSample(isKeyframe, offset);
@Override
public void commitSample(int flags, int offset, byte[] encryptionKey) {
rollingBuffer.commitSample(flags, offset, encryptionKey);
writingSample = false;
}
@Override
public boolean isWritingSample() {
return writingSample;
}
}

View file

@ -15,9 +15,10 @@
*/
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.hls.parser.HlsExtractor.TrackOutput;
import com.google.android.exoplayer.text.eia608.Eia608Parser;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.util.ParsableByteArray;
/**
@ -26,20 +27,17 @@ import com.google.android.exoplayer.util.ParsableByteArray;
* TODO: Technically, we shouldn't allow a sample to be read from the queue until we're sure that
* a sample with an earlier timestamp won't be added to it.
*/
/* package */ class SeiReader extends SampleQueue {
/* package */ class SeiReader extends ElementaryStreamReader {
private final ParsableByteArray seiBuffer;
public SeiReader(BufferPool bufferPool) {
super(bufferPool);
setMediaFormat(MediaFormat.createEia608Format());
seiBuffer = new ParsableByteArray();
public SeiReader(TrackOutput output) {
super(output);
output.setFormat(MediaFormat.createEia608Format());
}
public void read(byte[] data, int position, int limit, long pesTimeUs) {
seiBuffer.reset(data, limit);
@Override
public void consume(ParsableByteArray seiBuffer, long pesTimeUs, boolean startOfPacket) {
// Skip the NAL prefix and type.
seiBuffer.setPosition(position + 4);
seiBuffer.skip(4);
int b;
while (seiBuffer.bytesLeft() > 1 /* last byte will be rbsp_trailing_bits */) {
@ -57,13 +55,18 @@ import com.google.android.exoplayer.util.ParsableByteArray;
} while (b == 0xFF);
// Process the payload. We only support EIA-608 payloads currently.
if (Eia608Parser.isSeiMessageEia608(payloadType, payloadSize, seiBuffer)) {
startSample(pesTimeUs);
appendData(seiBuffer, payloadSize);
commitSample(true);
output.startSample(pesTimeUs, 0);
output.appendData(seiBuffer, payloadSize);
output.commitSample(C.SAMPLE_FLAG_SYNC, 0, null);
} else {
seiBuffer.skip(payloadSize);
}
}
}
@Override
public void packetFinished() {
// Do nothing.
}
}

View file

@ -16,11 +16,6 @@
package com.google.android.exoplayer.hls.parser;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.ParsableBitArray;
import com.google.android.exoplayer.util.ParsableByteArray;
@ -32,7 +27,7 @@ import java.io.IOException;
/**
* Facilitates the extraction of data from the MPEG-2 TS container format.
*/
public final class TsExtractor extends HlsExtractor {
public final class TsExtractor implements HlsExtractor {
private static final String TAG = "TsExtractor";
@ -48,119 +43,41 @@ public final class TsExtractor extends HlsExtractor {
private static final long MAX_PTS = 0x1FFFFFFFFL;
private final ParsableByteArray tsPacketBuffer;
private final SparseArray<SampleQueue> sampleQueues; // Indexed by streamType
private final SparseArray<ElementaryStreamReader> streamReaders; // Indexed by streamType
private final SparseArray<TsPayloadReader> tsPayloadReaders; // Indexed by pid
private final BufferPool bufferPool;
private final long firstSampleTimestamp;
private final ParsableBitArray tsScratch;
// Accessed only by the loading thread.
private int tsPacketBytesRead;
private TrackOutputBuilder output;
private long timestampOffsetUs;
private long lastPts;
// Accessed by both the loading and consuming threads.
private volatile boolean prepared;
public TsExtractor(boolean shouldSpliceIn, long firstSampleTimestamp, BufferPool bufferPool) {
super(shouldSpliceIn);
public TsExtractor(long firstSampleTimestamp) {
this.firstSampleTimestamp = firstSampleTimestamp;
this.bufferPool = bufferPool;
tsScratch = new ParsableBitArray(new byte[3]);
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
sampleQueues = new SparseArray<SampleQueue>();
streamReaders = new SparseArray<ElementaryStreamReader>();
tsPayloadReaders = new SparseArray<TsPayloadReader>();
tsPayloadReaders.put(TS_PAT_PID, new PatReader());
lastPts = Long.MIN_VALUE;
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return sampleQueues.size();
public void init(TrackOutputBuilder output) {
this.output = output;
}
@Override
public MediaFormat getFormat(int track) {
Assertions.checkState(prepared);
return sampleQueues.valueAt(track).getMediaFormat();
}
@Override
public boolean isPrepared() {
return prepared;
}
@Override
public void release() {
for (int i = 0; i < sampleQueues.size(); i++) {
sampleQueues.valueAt(i).release();
}
}
@Override
public long getLargestSampleTimestamp() {
long largestParsedTimestampUs = Long.MIN_VALUE;
for (int i = 0; i < sampleQueues.size(); i++) {
largestParsedTimestampUs = Math.max(largestParsedTimestampUs,
sampleQueues.valueAt(i).getLargestParsedTimestampUs());
}
return largestParsedTimestampUs;
}
@Override
public boolean getSample(int track, SampleHolder holder) {
Assertions.checkState(prepared);
return sampleQueues.valueAt(track).getSample(holder);
}
@Override
public void discardUntil(int track, long timeUs) {
Assertions.checkState(prepared);
sampleQueues.valueAt(track).discardUntil(timeUs);
}
@Override
public boolean hasSamples(int track) {
Assertions.checkState(prepared);
return !sampleQueues.valueAt(track).isEmpty();
}
private boolean checkPrepared() {
int pesPayloadReaderCount = sampleQueues.size();
if (pesPayloadReaderCount == 0) {
return false;
}
for (int i = 0; i < pesPayloadReaderCount; i++) {
if (!sampleQueues.valueAt(i).hasMediaFormat()) {
return false;
}
}
return true;
}
@Override
public int read(DataSource dataSource) throws IOException {
int bytesRead = dataSource.read(tsPacketBuffer.data, tsPacketBytesRead,
TS_PACKET_SIZE - tsPacketBytesRead);
if (bytesRead == -1) {
return -1;
public void read(ExtractorInput input) throws IOException, InterruptedException {
if (!input.readFully(tsPacketBuffer.data, 0, TS_PACKET_SIZE)) {
return;
}
tsPacketBytesRead += bytesRead;
if (tsPacketBytesRead < TS_PACKET_SIZE) {
// We haven't read the whole packet yet.
return bytesRead;
}
// Reset before reading the packet.
tsPacketBytesRead = 0;
tsPacketBuffer.setPosition(0);
tsPacketBuffer.setLimit(TS_PACKET_SIZE);
int syncByte = tsPacketBuffer.readUnsignedByte();
if (syncByte != TS_SYNC_BYTE) {
return bytesRead;
return;
}
tsPacketBuffer.readBytes(tsScratch, 3);
@ -183,20 +100,9 @@ public final class TsExtractor extends HlsExtractor {
if (payloadExists) {
TsPayloadReader payloadReader = tsPayloadReaders.get(pid);
if (payloadReader != null) {
payloadReader.consume(tsPacketBuffer, payloadUnitStartIndicator);
payloadReader.consume(tsPacketBuffer, payloadUnitStartIndicator, output);
}
}
if (!prepared) {
prepared = checkPrepared();
}
return bytesRead;
}
@Override
protected SampleQueue getSampleQueue(int track) {
return sampleQueues.valueAt(track);
}
/**
@ -231,7 +137,8 @@ public final class TsExtractor extends HlsExtractor {
*/
private abstract static class TsPayloadReader {
public abstract void consume(ParsableByteArray data, boolean payloadUnitStartIndicator);
public abstract void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
TrackOutputBuilder output);
}
@ -247,7 +154,8 @@ public final class TsExtractor extends HlsExtractor {
}
@Override
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator) {
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
TrackOutputBuilder output) {
// Skip pointer.
if (payloadUnitStartIndicator) {
int pointerField = data.readUnsignedByte();
@ -286,7 +194,8 @@ public final class TsExtractor extends HlsExtractor {
}
@Override
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator) {
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
TrackOutputBuilder output) {
// Skip pointer.
if (payloadUnitStartIndicator) {
int pointerField = data.readUnsignedByte();
@ -323,32 +232,33 @@ public final class TsExtractor extends HlsExtractor {
data.skip(esInfoLength);
entriesSize -= esInfoLength + 5;
if (sampleQueues.get(streamType) != null) {
if (streamReaders.get(streamType) != null) {
continue;
}
ElementaryStreamReader pesPayloadReader = null;
switch (streamType) {
case TS_STREAM_TYPE_AAC:
pesPayloadReader = new AdtsReader(bufferPool);
pesPayloadReader = new AdtsReader(output.buildOutput(TS_STREAM_TYPE_AAC));
break;
case TS_STREAM_TYPE_H264:
SeiReader seiReader = new SeiReader(bufferPool);
sampleQueues.put(TS_STREAM_TYPE_EIA608, seiReader);
pesPayloadReader = new H264Reader(bufferPool, seiReader);
SeiReader seiReader = new SeiReader(output.buildOutput(TS_STREAM_TYPE_EIA608));
streamReaders.put(TS_STREAM_TYPE_EIA608, seiReader);
pesPayloadReader = new H264Reader(output.buildOutput(TS_STREAM_TYPE_H264),
seiReader);
break;
case TS_STREAM_TYPE_ID3:
pesPayloadReader = new Id3Reader(bufferPool);
pesPayloadReader = new Id3Reader(output.buildOutput(TS_STREAM_TYPE_ID3));
break;
}
if (pesPayloadReader != null) {
sampleQueues.put(streamType, pesPayloadReader);
streamReaders.put(streamType, pesPayloadReader);
tsPayloadReaders.put(elementaryPid, new PesReader(pesPayloadReader));
}
}
// Skip CRC_32.
output.allOutputsBuilt();
}
}
@ -387,7 +297,8 @@ public final class TsExtractor extends HlsExtractor {
}
@Override
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator) {
public void consume(ParsableByteArray data, boolean payloadUnitStartIndicator,
TrackOutputBuilder output) {
if (payloadUnitStartIndicator) {
switch (state) {
case STATE_FINDING_HEADER:

View file

@ -67,7 +67,7 @@ public final class CommonMp4AtomParsers {
long mediaTimescale = parseMdhd(mdia.getLeafAtomOfType(Atom.TYPE_mdhd).data);
Pair<MediaFormat, TrackEncryptionBox[]> sampleDescriptions =
parseStsd(stbl.getLeafAtomOfType(Atom.TYPE_stsd).data);
parseStsd(stbl.getLeafAtomOfType(Atom.TYPE_stsd).data, durationUs);
return new Track(id, trackType, mediaTimescale, durationUs, sampleDescriptions.first,
sampleDescriptions.second);
}
@ -321,7 +321,8 @@ public final class CommonMp4AtomParsers {
return mdhd.readUnsignedInt();
}
private static Pair<MediaFormat, TrackEncryptionBox[]> parseStsd(ParsableByteArray stsd) {
private static Pair<MediaFormat, TrackEncryptionBox[]> parseStsd(
ParsableByteArray stsd, long durationUs) {
stsd.setPosition(Mp4Util.FULL_ATOM_HEADER_SIZE);
int numberOfEntries = stsd.readInt();
MediaFormat mediaFormat = null;
@ -334,19 +335,19 @@ public final class CommonMp4AtomParsers {
if (childAtomType == Atom.TYPE_avc1 || childAtomType == Atom.TYPE_avc3
|| childAtomType == Atom.TYPE_encv) {
Pair<MediaFormat, TrackEncryptionBox> avc =
parseAvcFromParent(stsd, childStartPosition, childAtomSize);
parseAvcFromParent(stsd, childStartPosition, childAtomSize, durationUs);
mediaFormat = avc.first;
trackEncryptionBoxes[i] = avc.second;
} else if (childAtomType == Atom.TYPE_mp4a || childAtomType == Atom.TYPE_enca
|| childAtomType == Atom.TYPE_ac_3) {
Pair<MediaFormat, TrackEncryptionBox> audioSampleEntry =
parseAudioSampleEntry(stsd, childAtomType, childStartPosition, childAtomSize);
Pair<MediaFormat, TrackEncryptionBox> audioSampleEntry = parseAudioSampleEntry(stsd,
childAtomType, childStartPosition, childAtomSize, durationUs);
mediaFormat = audioSampleEntry.first;
trackEncryptionBoxes[i] = audioSampleEntry.second;
} else if (childAtomType == Atom.TYPE_TTML) {
mediaFormat = MediaFormat.createTtmlFormat();
} else if (childAtomType == Atom.TYPE_mp4v) {
mediaFormat = parseMp4vFromParent(stsd, childStartPosition, childAtomSize);
mediaFormat = parseMp4vFromParent(stsd, childStartPosition, childAtomSize, durationUs);
}
stsd.setPosition(childStartPosition + childAtomSize);
}
@ -355,7 +356,7 @@ public final class CommonMp4AtomParsers {
/** Returns the media format for an avc1 box. */
private static Pair<MediaFormat, TrackEncryptionBox> parseAvcFromParent(ParsableByteArray parent,
int position, int size) {
int position, int size, long durationUs) {
parent.setPosition(position + Mp4Util.ATOM_HEADER_SIZE);
parent.skip(24);
@ -388,7 +389,7 @@ public final class CommonMp4AtomParsers {
}
MediaFormat format = MediaFormat.createVideoFormat(MimeTypes.VIDEO_H264, MediaFormat.NO_VALUE,
width, height, pixelWidthHeightRatio, initializationData);
durationUs, width, height, pixelWidthHeightRatio, initializationData);
return Pair.create(format, trackEncryptionBox);
}
@ -468,8 +469,8 @@ public final class CommonMp4AtomParsers {
}
/** Returns the media format for an mp4v box. */
private static MediaFormat parseMp4vFromParent(ParsableByteArray parent,
int position, int size) {
private static MediaFormat parseMp4vFromParent(ParsableByteArray parent, int position, int size,
long durationUs) {
parent.setPosition(position + Mp4Util.ATOM_HEADER_SIZE);
parent.skip(24);
@ -492,11 +493,11 @@ public final class CommonMp4AtomParsers {
}
return MediaFormat.createVideoFormat(
MimeTypes.VIDEO_MP4V, MediaFormat.NO_VALUE, width, height, initializationData);
MimeTypes.VIDEO_MP4V, MediaFormat.NO_VALUE, durationUs, width, height, initializationData);
}
private static Pair<MediaFormat, TrackEncryptionBox> parseAudioSampleEntry(
ParsableByteArray parent, int atomType, int position, int size) {
ParsableByteArray parent, int atomType, int position, int size, long durationUs) {
parent.setPosition(position + Mp4Util.ATOM_HEADER_SIZE);
parent.skip(16);
int channelCount = parent.readUnsignedShort();
@ -555,7 +556,7 @@ public final class CommonMp4AtomParsers {
}
MediaFormat format = MediaFormat.createAudioFormat(
mimeType, sampleSize, channelCount, sampleRate, bitrate,
mimeType, sampleSize, durationUs, channelCount, sampleRate, bitrate,
initializationData == null ? null : Collections.singletonList(initializationData));
return Pair.create(format, trackEncryptionBox);
}

View file

@ -30,6 +30,7 @@ import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.parser.Extractor;
import com.google.android.exoplayer.chunk.parser.mp4.FragmentedMp4Extractor;
import com.google.android.exoplayer.chunk.parser.mp4.TrackEncryptionBox;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.mp4.Track;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.ProtectionElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
@ -38,6 +39,7 @@ import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.MimeTypes;
import android.net.Uri;
import android.os.SystemClock;
@ -48,8 +50,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* An {@link ChunkSource} for SmoothStreaming.
@ -71,7 +71,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
private final int maxHeight;
private final SparseArray<FragmentedMp4Extractor> extractors;
private final Map<UUID, byte[]> psshInfo;
private final DrmInitData drmInitData;
private final SmoothStreamingFormat[] formats;
private SmoothStreamingManifest currentManifest;
@ -143,9 +143,11 @@ public class SmoothStreamingChunkSource implements ChunkSource {
byte[] keyId = getKeyId(protectionElement.data);
trackEncryptionBoxes = new TrackEncryptionBox[1];
trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId);
psshInfo = Collections.singletonMap(protectionElement.uuid, protectionElement.data);
DrmInitData.Mapped drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
drmInitData.put(protectionElement.uuid, protectionElement.data);
this.drmInitData = drmInitData;
} else {
psshInfo = null;
drmInitData = null;
}
int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length;
@ -299,7 +301,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
Uri uri = streamElement.buildRequestUri(selectedFormat.trackIndex, chunkIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null,
extractors.get(Integer.parseInt(selectedFormat.id)), psshInfo, dataSource,
extractors.get(Integer.parseInt(selectedFormat.id)), drmInitData, dataSource,
currentAbsoluteChunkIndex, isLastChunk, chunkStartTimeUs, nextChunkStartTimeUs, 0);
out.chunk = mediaChunk;
}
@ -365,7 +367,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
}
private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey,
Extractor extractor, Map<UUID, byte[]> psshInfo, DataSource dataSource, int chunkIndex,
Extractor extractor, DrmInitData drmInitData, DataSource dataSource, int chunkIndex,
boolean isLast, long chunkStartTimeUs, long nextChunkStartTimeUs, int trigger) {
int nextChunkIndex = isLast ? -1 : chunkIndex + 1;
long nextStartTimeUs = isLast ? -1 : nextChunkStartTimeUs;
@ -374,7 +376,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
// In SmoothStreaming each chunk contains sample timestamps relative to the start of the chunk.
// To convert them the absolute timestamps, we need to set sampleOffsetUs to -chunkStartTimeUs.
return new ContainerMediaChunk(dataSource, dataSpec, formatInfo, trigger, chunkStartTimeUs,
nextStartTimeUs, nextChunkIndex, extractor, psshInfo, false, -chunkStartTimeUs);
nextStartTimeUs, nextChunkIndex, extractor, drmInitData, false, -chunkStartTimeUs);
}
private static byte[] getKeyId(byte[] initData) {

View file

@ -16,6 +16,7 @@
package com.google.android.exoplayer.source;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
@ -62,9 +63,14 @@ public final class DefaultSampleSource implements SampleSource {
if (sampleExtractor.prepare()) {
prepared = true;
trackInfos = sampleExtractor.getTrackInfos();
trackStates = new int[trackInfos.length];
pendingDiscontinuities = new boolean[trackInfos.length];
int trackCount = sampleExtractor.getTrackCount();
trackStates = new int[trackCount];
pendingDiscontinuities = new boolean[trackCount];
trackInfos = new TrackInfo[trackCount];
for (int track = 0; track < trackCount; track++) {
MediaFormat mediaFormat = sampleExtractor.getMediaFormat(track);
trackInfos[track] = new TrackInfo(mediaFormat.mimeType, mediaFormat.durationUs);
}
}
return prepared;
@ -119,7 +125,8 @@ public final class DefaultSampleSource implements SampleSource {
return NOTHING_READ;
}
if (trackStates[track] != TRACK_STATE_FORMAT_SENT) {
sampleExtractor.getTrackMediaFormat(track, formatHolder);
formatHolder.format = sampleExtractor.getMediaFormat(track);
formatHolder.drmInitData = sampleExtractor.getDrmInitData(track);
trackStates[track] = TRACK_STATE_FORMAT_SENT;
return FORMAT_READ;
}

View file

@ -17,12 +17,12 @@ package com.google.android.exoplayer.source;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
@ -53,8 +53,6 @@ public final class FrameworkSampleExtractor implements SampleExtractor {
private final MediaExtractor mediaExtractor;
private TrackInfo[] trackInfos;
/**
* Instantiates a new sample extractor reading from the specified {@code uri}.
*
@ -106,24 +104,9 @@ public final class FrameworkSampleExtractor implements SampleExtractor {
mediaExtractor.setDataSource(fileDescriptor, fileDescriptorOffset, fileDescriptorLength);
}
int trackCount = mediaExtractor.getTrackCount();
trackInfos = new TrackInfo[trackCount];
for (int i = 0; i < trackCount; i++) {
android.media.MediaFormat format = mediaExtractor.getTrackFormat(i);
long durationUs = format.containsKey(android.media.MediaFormat.KEY_DURATION)
? format.getLong(android.media.MediaFormat.KEY_DURATION) : C.UNKNOWN_TIME_US;
String mime = format.getString(android.media.MediaFormat.KEY_MIME);
trackInfos[i] = new TrackInfo(mime, durationUs);
}
return true;
}
@Override
public TrackInfo[] getTrackInfos() {
return trackInfos;
}
@Override
public void selectTrack(int index) {
mediaExtractor.selectTrack(index);
@ -151,10 +134,18 @@ public final class FrameworkSampleExtractor implements SampleExtractor {
}
@Override
public void getTrackMediaFormat(int track, MediaFormatHolder mediaFormatHolder) {
mediaFormatHolder.format =
MediaFormat.createFromFrameworkMediaFormatV16(mediaExtractor.getTrackFormat(track));
mediaFormatHolder.drmInitData = Util.SDK_INT >= 18 ? getPsshInfoV18() : null;
public int getTrackCount() {
return mediaExtractor.getTrackCount();
}
@Override
public MediaFormat getMediaFormat(int track) {
return MediaFormat.createFromFrameworkMediaFormatV16(mediaExtractor.getTrackFormat(track));
}
@Override
public DrmInitData getDrmInitData(int track) {
return Util.SDK_INT >= 18 ? getDrmInitDataV18() : null;
}
@Override
@ -173,7 +164,7 @@ public final class FrameworkSampleExtractor implements SampleExtractor {
}
sampleHolder.timeUs = mediaExtractor.getSampleTime();
sampleHolder.flags = mediaExtractor.getSampleFlags();
if ((sampleHolder.flags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
if ((sampleHolder.flags & C.SAMPLE_FLAG_ENCRYPTED) != 0) {
sampleHolder.cryptoInfo.setFromExtractorV16(mediaExtractor);
}
@ -188,9 +179,15 @@ public final class FrameworkSampleExtractor implements SampleExtractor {
}
@TargetApi(18)
private Map<UUID, byte[]> getPsshInfoV18() {
private DrmInitData getDrmInitDataV18() {
// MediaExtractor only supports psshInfo for MP4, so it's ok to hard code the mimeType here.
Map<UUID, byte[]> psshInfo = mediaExtractor.getPsshInfo();
return (psshInfo == null || psshInfo.isEmpty()) ? null : psshInfo;
if (psshInfo == null || psshInfo.isEmpty()) {
return null;
}
DrmInitData.Mapped drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
drmInitData.putAll(psshInfo);
return drmInitData;
}
}

View file

@ -0,0 +1,740 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.source;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.mp4.Atom;
import com.google.android.exoplayer.mp4.Atom.ContainerAtom;
import com.google.android.exoplayer.mp4.CommonMp4AtomParsers;
import com.google.android.exoplayer.mp4.Mp4TrackSampleTable;
import com.google.android.exoplayer.mp4.Mp4Util;
import com.google.android.exoplayer.mp4.Track;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.BufferedNonBlockingInputStream;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSourceStream;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.ParsableByteArray;
import com.google.android.exoplayer.util.Util;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;
/**
* Extracts data from a {@link DataSpec} in unfragmented MP4 format (ISO 14496-12).
*/
public final class Mp4SampleExtractor implements SampleExtractor, Loader.Callback {
private static final String TAG = "Mp4SampleExtractor";
private static final String LOADER_THREAD_NAME = "Mp4SampleExtractor";
// Reading results
private static final int RESULT_NEED_MORE_DATA = 1;
private static final int RESULT_END_OF_STREAM = 2;
// Parser states
private static final int STATE_READING_ATOM_HEADER = 0;
private static final int STATE_READING_ATOM_PAYLOAD = 1;
/** Set of atom types that contain data to be parsed. */
private static final Set<Integer> LEAF_ATOM_TYPES = getAtomTypeSet(
Atom.TYPE_mdhd, Atom.TYPE_mvhd, Atom.TYPE_hdlr, Atom.TYPE_vmhd, Atom.TYPE_smhd,
Atom.TYPE_stsd, Atom.TYPE_avc1, Atom.TYPE_avcC, Atom.TYPE_mp4a, Atom.TYPE_esds,
Atom.TYPE_stts, Atom.TYPE_stss, Atom.TYPE_ctts, Atom.TYPE_stsc, Atom.TYPE_stsz,
Atom.TYPE_stco, Atom.TYPE_co64, Atom.TYPE_tkhd);
/** Set of atom types that contain other atoms that need to be parsed. */
private static final Set<Integer> CONTAINER_TYPES = getAtomTypeSet(
Atom.TYPE_moov, Atom.TYPE_trak, Atom.TYPE_mdia, Atom.TYPE_minf, Atom.TYPE_stbl);
/** Default number of times to retry loading data prior to failing. */
private static final int DEFAULT_LOADABLE_RETRY_COUNT = 3;
private final DataSource dataSource;
private final DataSpec dataSpec;
private final int readAheadAllocationSize;
private final int reloadMinimumSeekDistance;
private final int maximumTrackSampleInterval;
private final int loadRetryCount;
private final BufferPool bufferPool;
private final Loader loader;
private final ParsableByteArray atomHeader;
private final Stack<Atom.ContainerAtom> containerAtoms;
private DataSourceStream dataSourceStream;
private BufferedNonBlockingInputStream inputStream;
private long inputStreamOffset;
private long rootAtomBytesRead;
private boolean loadCompleted;
private int parserState;
private int atomBytesRead;
private int atomType;
private long atomSize;
private ParsableByteArray atomData;
private boolean prepared;
private int loadErrorCount;
private Mp4Track[] tracks;
/** An exception from {@link #inputStream}'s callbacks, or {@code null} if there was no error. */
private IOException lastLoadError;
private long loadErrorPosition;
/** If handling a call to {@link #seekTo}, the new required stream offset, or -1 otherwise. */
private long pendingSeekPosition;
/** If the input stream is being reopened at a new position, the new offset, or -1 otherwise. */
private long pendingLoadPosition;
/**
* Creates a new sample extractor for reading {@code dataSource} and {@code dataSpec} as an
* unfragmented MP4 file with default settings.
*
* <p>The default settings read ahead by 5 MiB, handle maximum offsets between samples at the same
* timestamp in different tracks of 3 MiB and restart loading when seeking forward by >= 256 KiB.
*
* @param dataSource Data source used to read from {@code dataSpec}.
* @param dataSpec Data specification specifying what to read.
*/
public Mp4SampleExtractor(DataSource dataSource, DataSpec dataSpec) {
this(dataSource, dataSpec, 5 * 1024 * 1024, 3 * 1024 * 1024, 256 * 1024,
DEFAULT_LOADABLE_RETRY_COUNT);
}
/**
* Creates a new sample extractor for reading {@code dataSource} and {@code dataSpec} as an
* unfragmented MP4 file.
*
* @param dataSource Data source used to read from {@code dataSpec}.
* @param dataSpec Data specification specifying what to read.
* @param readAheadAllocationSize Size of the allocation that buffers the stream, in bytes. The
* value must exceed the maximum sample size, so that a sample can be read in its entirety.
* @param maximumTrackSampleInterval Size of the buffer that handles reading from any selected
* track. The value should be chosen so that the buffer is as big as the interval in bytes
* between the start of the earliest and the end of the latest sample required to render media
* from all selected tracks, at any timestamp in the data source.
* @param reloadMinimumSeekDistance Determines when {@code dataSource} is reopened while seeking:
* if the number of bytes between the current position and the new position is greater than or
* equal to this value, or the new position is before the current position, loading will
* restart. The value should be set to the number of bytes that can be loaded/consumed from an
* existing connection in the time it takes to start a new connection.
* @param loadableRetryCount The number of times to retry loading if an error occurs.
*/
public Mp4SampleExtractor(DataSource dataSource, DataSpec dataSpec, int readAheadAllocationSize,
int maximumTrackSampleInterval, int reloadMinimumSeekDistance, int loadableRetryCount) {
// TODO: Handle minimumTrackSampleInterval specified in time not bytes.
this.dataSource = Assertions.checkNotNull(dataSource);
this.dataSpec = Assertions.checkNotNull(dataSpec);
this.readAheadAllocationSize = readAheadAllocationSize;
this.maximumTrackSampleInterval = maximumTrackSampleInterval;
this.reloadMinimumSeekDistance = reloadMinimumSeekDistance;
this.loadRetryCount = loadableRetryCount;
// TODO: Implement Allocator here so it is possible to check there is only one buffer at a time.
bufferPool = new BufferPool(readAheadAllocationSize);
loader = new Loader(LOADER_THREAD_NAME);
atomHeader = new ParsableByteArray(Mp4Util.LONG_ATOM_HEADER_SIZE);
containerAtoms = new Stack<Atom.ContainerAtom>();
parserState = STATE_READING_ATOM_HEADER;
pendingLoadPosition = -1;
pendingSeekPosition = -1;
loadErrorPosition = -1;
}
@Override
public boolean prepare() throws IOException {
if (inputStream == null) {
loadFromOffset(0L);
}
if (!prepared) {
if (readHeaders() && !prepared) {
throw new IOException("moov atom not found.");
}
if (!prepared) {
maybeThrowLoadError();
}
}
return prepared;
}
@Override
public void selectTrack(int trackIndex) {
Assertions.checkState(prepared);
if (tracks[trackIndex].selected) {
return;
}
tracks[trackIndex].selected = true;
// Get the timestamp of the earliest currently-selected sample.
int earliestSampleTrackIndex = getTrackIndexOfEarliestCurrentSample();
if (earliestSampleTrackIndex == Mp4Util.NO_TRACK) {
tracks[trackIndex].sampleIndex = 0;
return;
}
if (earliestSampleTrackIndex == Mp4Util.NO_SAMPLE) {
tracks[trackIndex].sampleIndex = Mp4Util.NO_SAMPLE;
return;
}
long timestampUs =
tracks[earliestSampleTrackIndex].sampleTable.timestampsUs[earliestSampleTrackIndex];
// Find the latest sync sample in the new track that has an earlier or equal timestamp.
tracks[trackIndex].sampleIndex =
tracks[trackIndex].sampleTable.getIndexOfEarlierOrEqualSynchronizationSample(timestampUs);
}
@Override
public void deselectTrack(int trackIndex) {
Assertions.checkState(prepared);
tracks[trackIndex].selected = false;
}
@Override
public long getBufferedPositionUs() {
Assertions.checkState(prepared);
if (pendingLoadPosition != -1) {
return TrackRenderer.UNKNOWN_TIME_US;
}
if (loadCompleted) {
return TrackRenderer.END_OF_TRACK_US;
}
// Get the absolute position to which there is data buffered.
long bufferedPosition =
inputStreamOffset + inputStream.getReadPosition() + inputStream.getAvailableByteCount();
// Find the timestamp of the latest sample that does not exceed the buffered position.
long latestTimestampBeforeEnd = Long.MIN_VALUE;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
if (!tracks[trackIndex].selected) {
continue;
}
Mp4TrackSampleTable sampleTable = tracks[trackIndex].sampleTable;
int sampleIndex = Util.binarySearchFloor(sampleTable.offsets, bufferedPosition, false, true);
if (sampleIndex > 0
&& sampleTable.offsets[sampleIndex] + sampleTable.sizes[sampleIndex] > bufferedPosition) {
sampleIndex--;
}
// Update the latest timestamp if this is greater.
long timestamp = sampleTable.timestampsUs[sampleIndex];
if (timestamp > latestTimestampBeforeEnd) {
latestTimestampBeforeEnd = timestamp;
}
}
return latestTimestampBeforeEnd < 0L ? C.UNKNOWN_TIME_US : latestTimestampBeforeEnd;
}
@Override
public void seekTo(long positionUs) {
Assertions.checkState(prepared);
long earliestSamplePosition = Long.MAX_VALUE;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
if (!tracks[trackIndex].selected) {
continue;
}
Mp4TrackSampleTable sampleTable = tracks[trackIndex].sampleTable;
int sampleIndex = sampleTable.getIndexOfEarlierOrEqualSynchronizationSample(positionUs);
if (sampleIndex == Mp4Util.NO_SAMPLE) {
sampleIndex = sampleTable.getIndexOfLaterOrEqualSynchronizationSample(positionUs);
}
tracks[trackIndex].sampleIndex = sampleIndex;
long offset = sampleTable.offsets[tracks[trackIndex].sampleIndex];
if (offset < earliestSamplePosition) {
earliestSamplePosition = offset;
}
}
pendingSeekPosition = earliestSamplePosition;
if (pendingLoadPosition != -1) {
loadFromOffset(earliestSamplePosition);
return;
}
inputStream.returnToMark();
long earliestOffset = inputStreamOffset + inputStream.getReadPosition();
long latestOffset = earliestOffset + inputStream.getAvailableByteCount();
if (earliestSamplePosition < earliestOffset
|| earliestSamplePosition >= latestOffset + reloadMinimumSeekDistance) {
loadFromOffset(earliestSamplePosition);
}
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return tracks.length;
}
@Override
public MediaFormat getMediaFormat(int track) {
Assertions.checkState(prepared);
return tracks[track].track.mediaFormat;
}
@Override
public DrmInitData getDrmInitData(int track) {
return null;
}
@Override
public int readSample(int trackIndex, SampleHolder sampleHolder) throws IOException {
Assertions.checkState(prepared);
Mp4Track track = tracks[trackIndex];
Assertions.checkState(track.selected);
int sampleIndex = track.sampleIndex;
// Check for the end of the stream.
if (sampleIndex == Mp4Util.NO_SAMPLE) {
// TODO: Should END_OF_STREAM be returned as soon as this track has no more samples, or as
// soon as no tracks have a sample (as implemented here)?
return hasSampleInAnySelectedTrack() ? SampleSource.NOTHING_READ : SampleSource.END_OF_STREAM;
}
// Return if the input stream will be reopened at the requested position.
if (pendingLoadPosition != -1) {
return SampleSource.NOTHING_READ;
}
// If there was a seek request, try to skip forwards to the requested position.
if (pendingSeekPosition != -1) {
int bytesToSeekPosition =
(int) (pendingSeekPosition - (inputStreamOffset + inputStream.getReadPosition()));
int skippedByteCount = inputStream.skip(bytesToSeekPosition);
if (skippedByteCount == -1) {
throw new IOException("Unexpected end-of-stream while seeking to sample.");
}
bytesToSeekPosition -= skippedByteCount;
inputStream.mark();
if (bytesToSeekPosition == 0) {
pendingSeekPosition = -1;
} else {
maybeThrowLoadError();
return SampleSource.NOTHING_READ;
}
}
// Return if the sample offset hasn't been loaded yet.
inputStream.returnToMark();
long sampleOffset = track.sampleTable.offsets[sampleIndex];
long seekOffsetLong = (sampleOffset - inputStreamOffset) - inputStream.getReadPosition();
Assertions.checkState(seekOffsetLong <= Integer.MAX_VALUE);
int seekOffset = (int) seekOffsetLong;
if (inputStream.skip(seekOffset) != seekOffset) {
maybeThrowLoadError();
return SampleSource.NOTHING_READ;
}
// Return if the sample has been loaded.
int sampleSize = track.sampleTable.sizes[sampleIndex];
if (inputStream.getAvailableByteCount() < sampleSize) {
maybeThrowLoadError();
return SampleSource.NOTHING_READ;
}
if (sampleHolder.data == null || sampleHolder.data.capacity() < sampleSize) {
sampleHolder.replaceBuffer(sampleSize);
}
ByteBuffer data = sampleHolder.data;
if (data == null) {
inputStream.skip(sampleSize);
sampleHolder.size = 0;
} else {
int bytesRead = inputStream.read(data, sampleSize);
Assertions.checkState(bytesRead == sampleSize);
if (MimeTypes.VIDEO_H264.equals(tracks[trackIndex].track.mediaFormat.mimeType)) {
// The mp4 file contains length-prefixed access units, but the decoder wants start code
// delimited content.
Mp4Util.replaceLengthPrefixesWithAvcStartCodes(sampleHolder.data, sampleSize);
}
sampleHolder.size = sampleSize;
}
// Move the input stream mark forwards if the earliest current sample was just read.
if (getTrackIndexOfEarliestCurrentSample() == trackIndex) {
inputStream.mark();
}
// TODO: Read encryption data.
sampleHolder.timeUs = track.sampleTable.timestampsUs[sampleIndex];
sampleHolder.flags = track.sampleTable.flags[sampleIndex];
// Advance to the next sample, checking if this was the last sample.
track.sampleIndex =
sampleIndex + 1 == track.sampleTable.getSampleCount() ? Mp4Util.NO_SAMPLE : sampleIndex + 1;
// Reset the loading error counter if we read past the offset at which the error was thrown.
if (dataSourceStream.getReadPosition() > loadErrorPosition) {
loadErrorCount = 0;
loadErrorPosition = -1;
}
return SampleSource.SAMPLE_READ;
}
@Override
public void release() {
pendingLoadPosition = -1;
loader.release();
if (inputStream != null) {
inputStream.close();
}
}
@Override
public void onLoadError(Loadable loadable, IOException exception) {
lastLoadError = exception;
loadErrorCount++;
if (loadErrorPosition == -1) {
loadErrorPosition = dataSourceStream.getLoadPosition();
}
int delayMs = getRetryDelayMs(loadErrorCount);
Log.w(TAG, "Retry loading (delay " + delayMs + " ms).");
loader.startLoading(dataSourceStream, this, delayMs);
}
@Override
public void onLoadCompleted(Loadable loadable) {
loadCompleted = true;
}
@Override
public void onLoadCanceled(Loadable loadable) {
if (pendingLoadPosition != -1) {
loadFromOffset(pendingLoadPosition);
pendingLoadPosition = -1;
}
}
private void loadFromOffset(long offsetBytes) {
inputStreamOffset = offsetBytes;
rootAtomBytesRead = offsetBytes;
if (loader.isLoading()) {
// Wait for loading to be canceled before proceeding.
pendingLoadPosition = offsetBytes;
loader.cancelLoading();
return;
}
if (inputStream != null) {
inputStream.close();
}
DataSpec dataSpec = new DataSpec(
this.dataSpec.uri, offsetBytes, C.LENGTH_UNBOUNDED, this.dataSpec.key);
dataSourceStream =
new DataSourceStream(dataSource, dataSpec, bufferPool, readAheadAllocationSize);
loader.startLoading(dataSourceStream, this);
// Wrap the input stream with a buffering stream so that it is possible to read from any track.
inputStream =
new BufferedNonBlockingInputStream(dataSourceStream, maximumTrackSampleInterval);
loadCompleted = false;
loadErrorCount = 0;
loadErrorPosition = -1;
}
/**
* Returns the index of the track that contains the earliest current sample, or
* {@link Mp4Util#NO_TRACK} if no track is selected, or {@link Mp4Util#NO_SAMPLE} if no samples
* remain in selected tracks.
*/
private int getTrackIndexOfEarliestCurrentSample() {
int earliestSampleTrackIndex = Mp4Util.NO_TRACK;
long earliestSampleOffset = Long.MAX_VALUE;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
Mp4Track track = tracks[trackIndex];
if (!track.selected) {
continue;
}
int sampleIndex = track.sampleIndex;
if (sampleIndex == Mp4Util.NO_SAMPLE) {
if (earliestSampleTrackIndex == Mp4Util.NO_TRACK) {
// A track is selected, but it has no more samples.
earliestSampleTrackIndex = Mp4Util.NO_SAMPLE;
}
continue;
}
long trackSampleOffset = track.sampleTable.offsets[sampleIndex];
if (trackSampleOffset < earliestSampleOffset) {
earliestSampleOffset = trackSampleOffset;
earliestSampleTrackIndex = trackIndex;
}
}
return earliestSampleTrackIndex;
}
private boolean hasSampleInAnySelectedTrack() {
boolean hasSample = false;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
if (tracks[trackIndex].selected && tracks[trackIndex].sampleIndex != Mp4Util.NO_SAMPLE) {
hasSample = true;
break;
}
}
return hasSample;
}
/** Reads headers, returning whether the end of the stream was reached. */
private boolean readHeaders() {
int results = 0;
while (!prepared && (results & (RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM)) == 0) {
switch (parserState) {
case STATE_READING_ATOM_HEADER:
results |= readAtomHeader();
break;
case STATE_READING_ATOM_PAYLOAD:
results |= readAtomPayload();
break;
}
}
return (results & RESULT_END_OF_STREAM) != 0;
}
private int readAtomHeader() {
if (pendingLoadPosition != -1) {
return RESULT_NEED_MORE_DATA;
}
// The size value is either 4 or 8 bytes long (in which case atomSize = Mp4Util.LONG_ATOM_SIZE).
int remainingBytes;
if (atomSize != Mp4Util.LONG_ATOM_SIZE) {
remainingBytes = Mp4Util.ATOM_HEADER_SIZE - atomBytesRead;
} else {
remainingBytes = Mp4Util.LONG_ATOM_HEADER_SIZE - atomBytesRead;
}
int bytesRead = inputStream.read(atomHeader.data, atomBytesRead, remainingBytes);
if (bytesRead == -1) {
return RESULT_END_OF_STREAM;
}
rootAtomBytesRead += bytesRead;
atomBytesRead += bytesRead;
if (atomBytesRead < Mp4Util.ATOM_HEADER_SIZE
|| (atomSize == Mp4Util.LONG_ATOM_SIZE && atomBytesRead < Mp4Util.LONG_ATOM_HEADER_SIZE)) {
return RESULT_NEED_MORE_DATA;
}
atomHeader.setPosition(0);
atomSize = atomHeader.readUnsignedInt();
atomType = atomHeader.readInt();
if (atomSize == Mp4Util.LONG_ATOM_SIZE) {
// The extended atom size is contained in the next 8 bytes, so try to read it now.
if (atomBytesRead < Mp4Util.LONG_ATOM_HEADER_SIZE) {
return readAtomHeader();
}
atomSize = atomHeader.readLong();
}
Integer atomTypeInteger = atomType; // Avoids boxing atomType twice.
if (CONTAINER_TYPES.contains(atomTypeInteger)) {
if (atomSize == Mp4Util.LONG_ATOM_SIZE) {
containerAtoms.add(new ContainerAtom(
atomType, rootAtomBytesRead + atomSize - Mp4Util.LONG_ATOM_HEADER_SIZE));
} else {
containerAtoms.add(new ContainerAtom(
atomType, rootAtomBytesRead + atomSize - Mp4Util.ATOM_HEADER_SIZE));
}
enterState(STATE_READING_ATOM_HEADER);
} else if (LEAF_ATOM_TYPES.contains(atomTypeInteger)) {
Assertions.checkState(atomSize <= Integer.MAX_VALUE);
atomData = new ParsableByteArray((int) atomSize);
System.arraycopy(atomHeader.data, 0, atomData.data, 0, Mp4Util.ATOM_HEADER_SIZE);
enterState(STATE_READING_ATOM_PAYLOAD);
} else {
atomData = null;
enterState(STATE_READING_ATOM_PAYLOAD);
}
return 0;
}
private int readAtomPayload() {
int bytesRead;
if (atomData != null) {
bytesRead = inputStream.read(atomData.data, atomBytesRead, (int) atomSize - atomBytesRead);
} else {
if (atomSize >= reloadMinimumSeekDistance || atomSize > Integer.MAX_VALUE) {
loadFromOffset(rootAtomBytesRead + atomSize - atomBytesRead);
onContainerAtomRead();
enterState(STATE_READING_ATOM_HEADER);
return 0;
} else {
bytesRead = inputStream.skip((int) atomSize - atomBytesRead);
}
}
if (bytesRead == -1) {
return RESULT_END_OF_STREAM;
}
rootAtomBytesRead += bytesRead;
atomBytesRead += bytesRead;
if (atomBytesRead != atomSize) {
return RESULT_NEED_MORE_DATA;
}
if (atomData != null && !containerAtoms.isEmpty()) {
containerAtoms.peek().add(new Atom.LeafAtom(atomType, atomData));
}
onContainerAtomRead();
enterState(STATE_READING_ATOM_HEADER);
return 0;
}
private void onContainerAtomRead() {
while (!containerAtoms.isEmpty() && containerAtoms.peek().endByteOffset == rootAtomBytesRead) {
Atom.ContainerAtom containerAtom = containerAtoms.pop();
if (containerAtom.type == Atom.TYPE_moov) {
processMoovAtom(containerAtom);
} else if (!containerAtoms.isEmpty()) {
containerAtoms.peek().add(containerAtom);
}
}
}
private void enterState(int state) {
switch (state) {
case STATE_READING_ATOM_HEADER:
atomBytesRead = 0;
atomSize = 0;
break;
}
parserState = state;
inputStream.mark();
}
/** Updates the stored track metadata to reflect the contents on the specified moov atom. */
private void processMoovAtom(Atom.ContainerAtom moov) {
List<Mp4Track> tracks = new ArrayList<Mp4Track>();
long earliestSampleOffset = Long.MAX_VALUE;
for (int i = 0; i < moov.containerChildren.size(); i++) {
Atom.ContainerAtom atom = moov.containerChildren.get(i);
if (atom.type != Atom.TYPE_trak) {
continue;
}
Track track = CommonMp4AtomParsers.parseTrak(atom, moov.getLeafAtomOfType(Atom.TYPE_mvhd));
if (track.type != Track.TYPE_AUDIO && track.type != Track.TYPE_VIDEO) {
continue;
}
Atom.ContainerAtom stblAtom = atom.getContainerAtomOfType(Atom.TYPE_mdia)
.getContainerAtomOfType(Atom.TYPE_minf).getContainerAtomOfType(Atom.TYPE_stbl);
Mp4TrackSampleTable trackSampleTable = CommonMp4AtomParsers.parseStbl(track, stblAtom);
if (trackSampleTable.getSampleCount() == 0) {
continue;
}
tracks.add(new Mp4Track(track, trackSampleTable));
// Keep track of the byte offset of the earliest sample.
long firstSampleOffset = trackSampleTable.offsets[0];
if (firstSampleOffset < earliestSampleOffset) {
earliestSampleOffset = firstSampleOffset;
}
}
this.tracks = tracks.toArray(new Mp4Track[0]);
if (earliestSampleOffset < inputStream.getReadPosition()) {
loadFromOffset(earliestSampleOffset);
}
prepared = true;
}
/** Returns an unmodifiable set of atom types. */
private static Set<Integer> getAtomTypeSet(int... atomTypes) {
Set<Integer> atomTypeSet = new HashSet<Integer>();
for (int atomType : atomTypes) {
atomTypeSet.add(atomType);
}
return Collections.unmodifiableSet(atomTypeSet);
}
private int getRetryDelayMs(int errorCount) {
return Math.min((errorCount - 1) * 1000, 5000);
}
private void maybeThrowLoadError() throws IOException {
if (loadErrorCount > loadRetryCount) {
throw lastLoadError;
}
}
private static final class Mp4Track {
public final Track track;
public final Mp4TrackSampleTable sampleTable;
public boolean selected;
public int sampleIndex;
public Mp4Track(Track track, Mp4TrackSampleTable sampleTable) {
this.track = track;
this.sampleTable = sampleTable;
}
}
}

View file

@ -16,11 +16,10 @@
package com.google.android.exoplayer.source;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.drm.DrmInitData;
import java.io.IOException;
@ -28,7 +27,7 @@ import java.io.IOException;
* Extractor for reading track metadata and samples stored in tracks.
*
* <p>Call {@link #prepare} until it returns {@code true}, then access track metadata via
* {@link #getTrackInfos} and {@link #getTrackMediaFormat}.
* {@link #getMediaFormat}.
*
* <p>Pass indices of tracks to read from to {@link #selectTrack}. A track can later be deselected
* by calling {@link #deselectTrack}. It is safe to select/deselect tracks after reading sample
@ -46,9 +45,6 @@ public interface SampleExtractor {
*/
boolean prepare() throws IOException;
/** Returns track information about all tracks that can be selected. */
TrackInfo[] getTrackInfos();
/** Selects the track at {@code index} for reading sample data. */
void selectTrack(int index);
@ -75,8 +71,14 @@ public interface SampleExtractor {
*/
void seekTo(long positionUs);
/** Stores the {@link MediaFormat} of {@code track}. */
void getTrackMediaFormat(int track, MediaFormatHolder mediaFormatHolder);
/** Returns the number of tracks, if {@link #prepare} has returned {@code true}. */
int getTrackCount();
/** Returns the {@link MediaFormat} of {@code track}. */
MediaFormat getMediaFormat(int track);
/** Returns the DRM initialization data for {@code track}. */
DrmInitData getDrmInitData(int track);
/**
* Reads the next sample in the track at index {@code track} into {@code sampleHolder}, returning

View file

@ -57,6 +57,8 @@ package com.google.android.exoplayer.text.eia608;
public static final byte CARRIAGE_RETURN = 0x2D;
public static final byte ERASE_NON_DISPLAYED_MEMORY = 0x2E;
public static final byte BACKSPACE = 0x21;
public static final byte MID_ROW_CHAN_1 = 0x11;
public static final byte MID_ROW_CHAN_2 = 0x19;

View file

@ -82,6 +82,26 @@ public class Eia608Parser {
0xFB // 3F: 251 'û' "Latin small letter U with circumflex"
};
// Extended Spanish/Miscellaneous and French char set.
private static final int[] SPECIAL_ES_FR_CHARACTER_SET = new int[] {
// Spanish and misc.
0xC1, 0xC9, 0xD3, 0xDA, 0xDC, 0xFC, 0x2018, 0xA1,
0x2A, 0x27, 0x2014, 0xA9, 0x2120, 0x2022, 0x201C, 0x201D,
// French.
0xC0, 0xC2, 0xC7, 0xC8, 0xCA, 0xCB, 0xEB, 0xCE,
0xCF, 0xEF, 0xD4, 0xD9, 0xF9, 0xDB, 0xAB, 0xBB
};
//Extended Portuguese and German/Danish char set.
private static final int[] SPECIAL_PT_DE_CHARACTER_SET = new int[] {
// Portuguese.
0xC3, 0xE3, 0xCD, 0xCC, 0xEC, 0xD2, 0xF2, 0xD5,
0xF5, 0x7B, 0x7D, 0x5C, 0x5E, 0x5F, 0x7C, 0x7E,
// German/Danish.
0xC4, 0xE4, 0xD6, 0xF6, 0xDF, 0xA5, 0xA4, 0x2502,
0xC5, 0xE5, 0xD8, 0xF8, 0x250C, 0x2510, 0x2514, 0x2518
};
private final ParsableBitArray seiBuffer;
private final StringBuilder stringBuilder;
private final ArrayList<ClosedCaption> captions;
@ -134,31 +154,45 @@ public class Eia608Parser {
}
// Special North American character set.
if ((ccData1 == 0x11) && ((ccData2 & 0x70) == 0x30)) {
// ccData2 - P|0|1|1|X|X|X|X
if ((ccData1 == 0x11 || ccData1 == 0x19)
&& ((ccData2 & 0x70) == 0x30)) {
stringBuilder.append(getSpecialChar(ccData2));
continue;
}
// Extended Spanish/Miscellaneous and French character set.
// ccData2 - P|0|1|X|X|X|X|X
if ((ccData1 == 0x12 || ccData1 == 0x1A)
&& ((ccData2 & 0x60) == 0x20)) {
backspace(); // Remove standard equivalent of the special extended char.
stringBuilder.append(getExtendedEsFrChar(ccData2));
continue;
}
// Extended Portuguese and German/Danish character set.
// ccData2 - P|0|1|X|X|X|X|X
if ((ccData1 == 0x13 || ccData1 == 0x1B)
&& ((ccData2 & 0x60) == 0x20)) {
backspace(); // Remove standard equivalent of the special extended char.
stringBuilder.append(getExtendedPtDeChar(ccData2));
continue;
}
// Control character.
if (ccData1 < 0x20) {
if (stringBuilder.length() > 0) {
captions.add(new ClosedCaptionText(stringBuilder.toString()));
stringBuilder.setLength(0);
}
captions.add(new ClosedCaptionCtrl(ccData1, ccData2));
addCtrl(ccData1, ccData2);
continue;
}
// Basic North American character set.
stringBuilder.append(getChar(ccData1));
if (ccData2 != 0) {
if (ccData2 >= 0x20) {
stringBuilder.append(getChar(ccData2));
}
}
if (stringBuilder.length() > 0) {
captions.add(new ClosedCaptionText(stringBuilder.toString()));
}
addBufferedText();
if (captions.isEmpty()) {
return null;
@ -179,6 +213,32 @@ public class Eia608Parser {
return (char) SPECIAL_CHARACTER_SET[index];
}
private static char getExtendedEsFrChar(byte ccData) {
int index = ccData & 0x1F;
return (char) SPECIAL_ES_FR_CHARACTER_SET[index];
}
private static char getExtendedPtDeChar(byte ccData) {
int index = ccData & 0x1F;
return (char) SPECIAL_PT_DE_CHARACTER_SET[index];
}
private void addBufferedText() {
if (stringBuilder.length() > 0) {
captions.add(new ClosedCaptionText(stringBuilder.toString()));
stringBuilder.setLength(0);
}
}
private void addCtrl(byte ccData1, byte ccData2) {
addBufferedText();
captions.add(new ClosedCaptionCtrl(ccData1, ccData2));
}
private void backspace() {
addCtrl((byte) 0x14, ClosedCaptionCtrl.BACKSPACE);
}
/**
* Inspects an sei message to determine whether it contains EIA-608.
* <p>

View file

@ -317,6 +317,11 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
case ClosedCaptionCtrl.CARRIAGE_RETURN:
maybeAppendNewline();
return;
case ClosedCaptionCtrl.BACKSPACE:
if (captionStringBuilder.length() > 0) {
captionStringBuilder.setLength(captionStringBuilder.length() - 1);
}
return;
}
}

View file

@ -0,0 +1,150 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.upstream;
import com.google.android.exoplayer.util.Assertions;
import java.nio.ByteBuffer;
/**
* Input stream with non-blocking reading/skipping that also stores read/skipped data in a buffer.
* Call {@link #mark} to discard any buffered data before the current reading position. Call
* {@link #returnToMark} to move the current reading position back to the marked position, which is
* initially the start of the input stream.
*/
public final class BufferedNonBlockingInputStream implements NonBlockingInputStream {
private final NonBlockingInputStream inputStream;
private final byte[] bufferedBytes;
private long inputStreamPosition;
private int readPosition;
private int writePosition;
/**
* Wraps the specified {@code nonBlockingInputStream} for buffered reading using a buffer of size
* {@code bufferSize} bytes.
*/
public BufferedNonBlockingInputStream(
NonBlockingInputStream nonBlockingInputStream, int bufferSize) {
inputStream = Assertions.checkNotNull(nonBlockingInputStream);
bufferedBytes = new byte[bufferSize];
}
@Override
public int skip(int length) {
return consumeStream(null, null, 0, length);
}
@Override
public int read(byte[] buffer, int offset, int length) {
return consumeStream(null, buffer, offset, length);
}
@Override
public int read(ByteBuffer buffer, int length) {
return consumeStream(buffer, null, 0, length);
}
@Override
public long getAvailableByteCount() {
// The amount that can be read from the input stream is limited by how much can be buffered.
return (writePosition - readPosition)
+ Math.min(inputStream.getAvailableByteCount(), bufferedBytes.length - writePosition);
}
@Override
public boolean isEndOfStream() {
return writePosition == readPosition && inputStream.isEndOfStream();
}
@Override
public void close() {
inputStream.close();
inputStreamPosition = -1;
}
/** Returns the current position in the stream. */
public long getReadPosition() {
return inputStreamPosition - (writePosition - readPosition);
}
/**
* Moves the mark to be at the current position. Any data before the current position is
* discarded. After calling this method, calling {@link #returnToMark} will move the reading
* position back to the mark position.
*/
public void mark() {
System.arraycopy(bufferedBytes, readPosition, bufferedBytes, 0, writePosition - readPosition);
writePosition -= readPosition;
readPosition = 0;
}
/** Moves the current position back to the mark position. */
public void returnToMark() {
readPosition = 0;
}
/**
* Reads or skips data from the input stream. If {@code byteBuffer} is non-{@code null}, reads
* {@code length} bytes into {@code byteBuffer} (other arguments are ignored). If
* {@code byteArray} is non-{@code null}, reads {@code length} bytes into {@code byteArray} at
* {@code offset} (other arguments are ignored). Otherwise, skips {@code length} bytes.
*
* @param byteBuffer {@link ByteBuffer} to read into, or {@code null} to read into
* {@code byteArray} or skip.
* @param byteArray Byte array to read into, or {@code null} to read into {@code byteBuffer} or
* skip.
* @param offset Offset in {@code byteArray} to write to, if it is non-{@code null}.
* @param length Number of bytes to read or skip.
* @return The number of bytes consumed, or -1 if nothing was consumed and the end of stream was
* reached.
*/
private int consumeStream(ByteBuffer byteBuffer, byte[] byteArray, int offset, int length) {
// If necessary, reduce length so that we do not need to write past the end of the array.
int pendingBytes = writePosition - readPosition;
length = Math.min(length, bufferedBytes.length - writePosition + pendingBytes);
// If reading past the end of buffered data, request more and populate the buffer.
int streamBytesRead = 0;
if (length - pendingBytes > 0) {
streamBytesRead = inputStream.read(bufferedBytes, writePosition, length - pendingBytes);
if (streamBytesRead > 0) {
inputStreamPosition += streamBytesRead;
writePosition += streamBytesRead;
pendingBytes += streamBytesRead;
}
}
// Signal the end of the stream if nothing more will be read.
if (streamBytesRead == -1 && pendingBytes == 0) {
return -1;
}
// Fill the buffer using buffered data if reading, or just skip otherwise.
length = Math.min(pendingBytes, length);
if (byteBuffer != null) {
byteBuffer.put(bufferedBytes, readPosition, length);
} else if (byteArray != null) {
System.arraycopy(bufferedBytes, readPosition, byteArray, offset, length);
}
readPosition += length;
return length;
}
}

View file

@ -55,13 +55,15 @@ public interface DataSource {
/**
* Reads up to {@code length} bytes of data and stores them into {@code buffer}, starting at
* index {@code offset}. This method blocks until at least one byte of data can be read, the end
* of the opened range is detected, or an exception is thrown.
* index {@code offset}.
* <p>
* This method blocks until at least one byte of data can be read, the end of the opened range is
* detected, or an exception is thrown.
*
* @param buffer The buffer into which the read data should be stored.
* @param offset The start offset into {@code buffer} at which data should be written.
* @param readLength The maximum number of bytes to read.
* @return The actual number of bytes read, or -1 if the end of the opened range is reached.
* @return The number of bytes read, or -1 if the end of the opened range is reached.
* @throws IOException If an error occurs reading from the source.
*/
public int read(byte[] buffer, int offset, int readLength) throws IOException;

View file

@ -47,6 +47,10 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
private final Allocator allocator;
private final ReadHead readHead;
/** Whether {@link #allocation}'s capacity is fixed. If true, the allocation is not resized. */
private final boolean isAllocationFixedSize;
private final int allocationSize;
private Allocation allocation;
private volatile boolean loadCanceled;
@ -58,6 +62,9 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
private int writeFragmentRemainingLength;
/**
* Constructs an instance whose allocation grows to contain all of the data specified by the
* {@code dataSpec}.
*
* @param dataSource The source from which the data should be loaded.
* @param dataSpec Defines the data to be loaded. {@code dataSpec.length} must not exceed
* {@link Integer#MAX_VALUE}. If {@code dataSpec.length == C.LENGTH_UNBOUNDED} then
@ -72,12 +79,48 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
this.allocator = allocator;
resolvedLength = C.LENGTH_UNBOUNDED;
readHead = new ReadHead();
isAllocationFixedSize = false;
allocationSize = 0;
}
/**
* Constructs an instance whose allocation is of a fixed size, which may be smaller than the data
* specified by the {@code dataSpec}.
* <p>
* The allocation size determines how far ahead loading can proceed relative to the current
* reading position.
*
* @param dataSource The source form which the data should be loaded.
* @param dataSpec Defines the data to be loaded.
* @param allocator Used to obtain an {@link Allocation} for holding the data.
* @param allocationSize The minimum size for a fixed-size allocation that will hold the data
* loaded from {@code dataSource}.
*/
public DataSourceStream(
DataSource dataSource, DataSpec dataSpec, Allocator allocator, int allocationSize) {
Assertions.checkState(dataSpec.length <= Integer.MAX_VALUE);
this.dataSource = dataSource;
this.dataSpec = dataSpec;
this.allocator = allocator;
this.allocationSize = allocationSize;
resolvedLength = C.LENGTH_UNBOUNDED;
readHead = new ReadHead();
isAllocationFixedSize = true;
}
/**
* Resets the read position to the start of the data.
*
* @throws UnsupportedOperationException Thrown if the allocation size is fixed.
*/
public void resetReadPosition() {
if (isAllocationFixedSize) {
throw new UnsupportedOperationException(
"The read position cannot be reset when using a fixed allocation");
}
readHead.reset();
}
@ -176,7 +219,12 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
byte[][] buffers = allocation.getBuffers();
while (bytesRead < bytesToRead) {
if (readHead.fragmentRemaining == 0) {
readHead.fragmentIndex++;
if (readHead.fragmentIndex == buffers.length - 1) {
Assertions.checkState(isAllocationFixedSize);
readHead.fragmentIndex = 0;
} else {
readHead.fragmentIndex++;
}
readHead.fragmentOffset = allocation.getFragmentOffset(readHead.fragmentIndex);
readHead.fragmentRemaining = allocation.getFragmentLength(readHead.fragmentIndex);
}
@ -194,6 +242,13 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
readHead.fragmentRemaining -= bufferReadLength;
}
if (isAllocationFixedSize) {
synchronized (readHead) {
// Notify load() of the updated position so it can resume.
readHead.notify();
}
}
return bytesRead;
}
@ -210,6 +265,7 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
}
@Override
@SuppressWarnings("NonAtomicVolatileUpdate")
public void load() throws IOException, InterruptedException {
if (loadCanceled || isLoadFinished()) {
// The load was canceled, or is already complete.
@ -221,7 +277,7 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
if (loadPosition == 0 && resolvedLength == C.LENGTH_UNBOUNDED) {
loadDataSpec = dataSpec;
long resolvedLength = dataSource.open(loadDataSpec);
if (resolvedLength > Integer.MAX_VALUE) {
if (!isAllocationFixedSize && resolvedLength > Integer.MAX_VALUE) {
throw new DataSourceStreamLoadException(
new UnexpectedLengthException(dataSpec.length, resolvedLength));
}
@ -235,9 +291,13 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
}
if (allocation == null) {
int initialAllocationSize = resolvedLength != C.LENGTH_UNBOUNDED
? (int) resolvedLength : CHUNKED_ALLOCATION_INCREMENT;
allocation = allocator.allocate(initialAllocationSize);
if (isAllocationFixedSize) {
allocation = allocator.allocate(allocationSize);
} else {
int initialAllocationSize = resolvedLength != C.LENGTH_UNBOUNDED
? (int) resolvedLength : CHUNKED_ALLOCATION_INCREMENT;
allocation = allocator.allocate(initialAllocationSize);
}
}
int allocationCapacity = allocation.capacity();
@ -253,18 +313,25 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
if (Thread.interrupted()) {
throw new InterruptedException();
}
read = dataSource.read(buffers[writeFragmentIndex], writeFragmentOffset,
writeFragmentRemainingLength);
int bytesToWrite = getBytesToWrite();
read = dataSource.read(buffers[writeFragmentIndex], writeFragmentOffset, bytesToWrite);
if (read > 0) {
loadPosition += read;
writeFragmentOffset += read;
writeFragmentRemainingLength -= read;
if (writeFragmentRemainingLength == 0 && maybeMoreToLoad()) {
writeFragmentIndex++;
if (loadPosition == allocationCapacity) {
allocation.ensureCapacity(allocationCapacity + CHUNKED_ALLOCATION_INCREMENT);
allocationCapacity = allocation.capacity();
buffers = allocation.getBuffers();
if (writeFragmentIndex == buffers.length) {
if (isAllocationFixedSize) {
// Wrap back to the first fragment.
writeFragmentIndex = 0;
} else {
// Grow the allocation.
allocation.ensureCapacity(allocationCapacity + CHUNKED_ALLOCATION_INCREMENT);
allocationCapacity = allocation.capacity();
buffers = allocation.getBuffers();
}
}
writeFragmentOffset = allocation.getFragmentOffset(writeFragmentIndex);
writeFragmentRemainingLength = allocation.getFragmentLength(writeFragmentIndex);
@ -281,6 +348,25 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
}
}
/**
* Returns the number of bytes that can be written to the current fragment, blocking until the
* reader has consumed data if the allocation has a fixed size and is full.
*/
private int getBytesToWrite() throws InterruptedException {
if (!isAllocationFixedSize) {
return writeFragmentRemainingLength;
}
synchronized (readHead) {
while (loadPosition == readHead.position + allocation.capacity()) {
readHead.wait();
}
}
return Math.min(writeFragmentRemainingLength,
allocation.capacity() - (int) (loadPosition - readHead.position));
}
private boolean maybeMoreToLoad() {
return resolvedLength == C.LENGTH_UNBOUNDED || loadPosition < resolvedLength;
}

View file

@ -18,6 +18,7 @@ package com.google.android.exoplayer.upstream;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Predicate;
import com.google.android.exoplayer.util.Util;
import android.text.TextUtils;
import android.util.Log;
@ -226,6 +227,7 @@ public class DefaultHttpDataSource implements HttpDataSource {
public void close() throws HttpDataSourceException {
try {
if (inputStream != null) {
Util.maybeTerminateInputStream(connection, bytesRemaining());
try {
inputStream.close();
} catch (IOException e) {

View file

@ -127,6 +127,21 @@ public final class Loader {
startLoading(myLooper, loadable, callback);
}
/**
* Invokes {@link #startLoading(Looper, Loadable, Callback)}, using the {@link Looper}
* associated with the calling thread. Loading is delayed by {@code delayMs}.
*
* @param loadable The {@link Loadable} to load.
* @param callback A callback to invoke when the load ends.
* @param delayMs Number of milliseconds to wait before calling {@link Loadable#load()}.
* @throws IllegalStateException If the calling thread does not have an associated {@link Looper}.
*/
public void startLoading(Loadable loadable, Callback callback, int delayMs) {
Looper myLooper = Looper.myLooper();
Assertions.checkState(myLooper != null);
startLoading(myLooper, loadable, callback, delayMs);
}
/**
* Start loading a {@link Loadable}.
* <p>
@ -138,9 +153,24 @@ public final class Loader {
* @param callback A callback to invoke when the load ends.
*/
public void startLoading(Looper looper, Loadable loadable, Callback callback) {
startLoading(looper, loadable, callback, 0);
}
/**
* Start loading a {@link Loadable} after {@code delayMs} has elapsed.
* <p>
* A {@link Loader} instance can only load one {@link Loadable} at a time, and so this method
* must not be called when another load is in progress.
*
* @param looper The looper of the thread on which the callback should be invoked.
* @param loadable The {@link Loadable} to load.
* @param callback A callback to invoke when the load ends.
* @param delayMs Number of milliseconds to wait before calling {@link Loadable#load()}.
*/
public void startLoading(Looper looper, Loadable loadable, Callback callback, int delayMs) {
Assertions.checkState(!loading);
loading = true;
currentTask = new LoadTask(looper, loadable, callback);
currentTask = new LoadTask(looper, loadable, callback, delayMs);
downloadExecutorService.submit(currentTask);
}
@ -182,13 +212,15 @@ public final class Loader {
private final Loadable loadable;
private final Loader.Callback callback;
private final int delayMs;
private volatile Thread executorThread;
public LoadTask(Looper looper, Loadable loadable, Loader.Callback callback) {
public LoadTask(Looper looper, Loadable loadable, Loader.Callback callback, int delayMs) {
super(looper);
this.loadable = loadable;
this.callback = callback;
this.delayMs = delayMs;
}
public void quit() {
@ -202,6 +234,9 @@ public final class Loader {
public void run() {
try {
executorThread = Thread.currentThread();
if (delayMs > 0) {
Thread.sleep(delayMs);
}
if (!loadable.isLoadCanceled()) {
loadable.load();
}

View file

@ -15,12 +15,16 @@
*/
package com.google.android.exoplayer.util;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.upstream.DataSource;
import android.text.TextUtils;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.ParseException;
import java.util.Arrays;
@ -57,6 +61,8 @@ public final class Util {
Pattern.compile("^(-)?P(([0-9]*)Y)?(([0-9]*)M)?(([0-9]*)D)?"
+ "(T(([0-9]*)H)?(([0-9]*)M)?(([0-9.]*)S)?)?$");
private static final long MAX_BYTES_TO_DRAIN = 2048;
private Util() {}
/**
@ -396,4 +402,48 @@ public final class Util {
return intArray;
}
/**
* On platform API levels 19 and 20, okhttp's implementation of {@link InputStream#close} can
* block for a long time if the stream has a lot of data remaining. Call this method before
* closing the input stream to make a best effort to cause the input stream to encounter an
* unexpected end of input, working around this issue. On other platform API levels, the method
* does nothing.
*
* @param connection The connection whose {@link InputStream} should be terminated.
* @param bytesRemaining The number of bytes remaining to be read from the input stream if its
* length is known. {@link C#LENGTH_UNBOUNDED} otherwise.
*/
public static void maybeTerminateInputStream(HttpURLConnection connection, long bytesRemaining) {
if (SDK_INT != 19 && SDK_INT != 20) {
return;
}
try {
InputStream inputStream = connection.getInputStream();
if (bytesRemaining == C.LENGTH_UNBOUNDED) {
// If the input stream has already ended, do nothing. The socket may be re-used.
if (inputStream.read() == -1) {
return;
}
} else if (bytesRemaining <= MAX_BYTES_TO_DRAIN) {
// There isn't much data left. Prefer to allow it to drain, which may allow the socket to be
// re-used.
return;
}
String className = inputStream.getClass().getName();
if (className.equals("com.android.okhttp.internal.http.HttpTransport$ChunkedInputStream")
|| className.equals(
"com.android.okhttp.internal.http.HttpTransport$FixedLengthInputStream")) {
Class<?> superclass = inputStream.getClass().getSuperclass();
Method unexpectedEndOfInput = superclass.getDeclaredMethod("unexpectedEndOfInput");
unexpectedEndOfInput.setAccessible(true);
unexpectedEndOfInput.invoke(inputStream);
}
} catch (IOException e) {
// The connection didn't ever have an input stream, or it was closed already.
} catch (Exception e) {
// Something went wrong. The device probably isn't using okhttp.
}
}
}

View file

@ -0,0 +1,22 @@
package com.google.android.exoplayer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import junit.framework.TestCase;
/**
* Unit test for {@link C}.
*/
public class CTest extends TestCase {
@SuppressLint("InlinedApi")
public static final void testContants() {
// Sanity check that constant values match those defined by the platform.
assertEquals(MediaExtractor.SAMPLE_FLAG_SYNC, C.SAMPLE_FLAG_SYNC);
assertEquals(MediaExtractor.SAMPLE_FLAG_ENCRYPTED, C.SAMPLE_FLAG_ENCRYPTED);
assertEquals(MediaCodec.CRYPTO_MODE_AES_CTR, C.CRYPTO_MODE_AES_CTR);
}
}

View file

@ -42,9 +42,9 @@ public class MediaFormatTest extends TestCase {
initData.add(initData2);
testConversionToFrameworkFormatV16(
MediaFormat.createVideoFormat("video/xyz", 102400, 1280, 720, 1.5f, initData));
MediaFormat.createVideoFormat("video/xyz", 102400, 1000L, 1280, 720, 1.5f, initData));
testConversionToFrameworkFormatV16(
MediaFormat.createAudioFormat("audio/xyz", 102400, 5, 44100, initData));
MediaFormat.createAudioFormat("audio/xyz", 102400, 1000L, 5, 44100, initData));
}
@TargetApi(16)

View file

@ -20,6 +20,7 @@ import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.chunk.parser.SegmentIndex;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.upstream.ByteArrayNonBlockingInputStream;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.MimeTypes;
@ -29,6 +30,7 @@ import android.test.InstrumentationTestCase;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.UUID;
public class WebmExtractorTest extends InstrumentationTestCase {
@ -50,6 +52,13 @@ public class WebmExtractorTest extends InstrumentationTestCase {
private static final String TEST_VORBIS_CODEC_PRIVATE = "webm/vorbis_codec_private";
private static final int TEST_VORBIS_INFO_SIZE = 30;
private static final int TEST_VORBIS_BOOKS_SIZE = 4140;
private static final byte[] TEST_ENCRYPTION_KEY_ID = { 0x00, 0x01, 0x02, 0x03 };
private static final UUID WIDEVINE_UUID = new UUID(0xEDEF8BA979D64ACEL, 0xA3C827DCD51D21EDL);
private static final UUID ZERO_UUID = new UUID(0, 0);
// First 8 bytes of IV come from the container, last 8 bytes are always initialized to 0.
private static final byte[] TEST_INITIALIZATION_VECTOR = {
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };
private static final int ID_VP9 = 0;
private static final int ID_OPUS = 1;
@ -71,7 +80,7 @@ public class WebmExtractorTest extends InstrumentationTestCase {
public void testPrepare() throws ParserException {
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9));
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
@ -79,7 +88,7 @@ public class WebmExtractorTest extends InstrumentationTestCase {
public void testPrepareOpus() throws ParserException {
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_OPUS));
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_OPUS, null));
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
assertAudioFormat(ID_OPUS);
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
@ -87,15 +96,28 @@ public class WebmExtractorTest extends InstrumentationTestCase {
public void testPrepareVorbis() throws ParserException {
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VORBIS));
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VORBIS, null));
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
assertAudioFormat(ID_VORBIS);
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
}
public void testPrepareContentEncodingEncryption() throws ParserException {
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 1);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
DrmInitData drmInitData = extractor.getDrmInitData();
assertNotNull(drmInitData);
android.test.MoreAsserts.assertEquals(TEST_ENCRYPTION_KEY_ID, drmInitData.get(WIDEVINE_UUID));
android.test.MoreAsserts.assertEquals(TEST_ENCRYPTION_KEY_ID, drmInitData.get(ZERO_UUID));
}
public void testPrepareThreeCuePoints() throws ParserException {
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(3, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9));
createInitializationSegment(3, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertIndex(
@ -106,7 +128,7 @@ public class WebmExtractorTest extends InstrumentationTestCase {
public void testPrepareCustomTimecodeScale() throws ParserException {
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(3, 0, true, 1000, ID_VP9));
createInitializationSegment(3, 0, true, 1000, ID_VP9, null));
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertIndex(
@ -117,7 +139,7 @@ public class WebmExtractorTest extends InstrumentationTestCase {
public void testPrepareNoCuePoints() {
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(0, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9));
createInitializationSegment(0, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
try {
extractor.read(testInputStream, sampleHolder);
fail();
@ -128,7 +150,7 @@ public class WebmExtractorTest extends InstrumentationTestCase {
public void testPrepareInvalidDocType() {
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, false, DEFAULT_TIMECODE_SCALE, ID_VP9));
createInitializationSegment(1, 0, false, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
try {
extractor.read(testInputStream, sampleHolder);
fail();
@ -137,68 +159,158 @@ public class WebmExtractorTest extends InstrumentationTestCase {
}
}
public void testPrepareInvalidContentEncodingOrder() {
ContentEncodingSettings settings = new ContentEncodingSettings(1, 1, 1, 5, 1);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
try {
extractor.read(testInputStream, sampleHolder);
fail();
} catch (ParserException exception) {
assertEquals("ContentEncodingOrder 1 not supported", exception.getMessage());
}
}
public void testPrepareInvalidContentEncodingScope() {
ContentEncodingSettings settings = new ContentEncodingSettings(0, 0, 1, 5, 1);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
try {
extractor.read(testInputStream, sampleHolder);
fail();
} catch (ParserException exception) {
assertEquals("ContentEncodingScope 0 not supported", exception.getMessage());
}
}
public void testPrepareInvalidContentEncodingType() {
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 0, 5, 1);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
try {
extractor.read(testInputStream, sampleHolder);
fail();
} catch (ParserException exception) {
assertEquals("ContentEncodingType 0 not supported", exception.getMessage());
}
}
public void testPrepareInvalidContentEncAlgo() {
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 4, 1);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
try {
extractor.read(testInputStream, sampleHolder);
fail();
} catch (ParserException exception) {
assertEquals("ContentEncAlgo 4 not supported", exception.getMessage());
}
}
public void testPrepareInvalidAESSettingsCipherMode() {
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 0);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
try {
extractor.read(testInputStream, sampleHolder);
fail();
} catch (ParserException exception) {
assertEquals("AESSettingsCipherMode 0 not supported", exception.getMessage());
}
}
public void testReadSampleKeyframe() throws ParserException {
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, true);
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, true, false, false);
byte[] testInputData = joinByteArrays(
createInitializationSegment(
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9),
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null),
mediaSegment.clusterBytes);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertSample(mediaSegment, 0, true, false);
assertSample(mediaSegment, 0, true, false, false);
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
}
public void testReadBlock() throws ParserException {
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, false);
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, false, false, false);
byte[] testInputData = joinByteArrays(
createInitializationSegment(
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_OPUS),
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_OPUS, null),
mediaSegment.clusterBytes);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
assertAudioFormat(ID_OPUS);
assertSample(mediaSegment, 0, true, false);
assertSample(mediaSegment, 0, true, false, false);
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
}
public void testReadSampleInvisible() throws ParserException {
MediaSegment mediaSegment = createMediaSegment(100, 12, 13, false, true, true);
public void testReadEncryptedFrame() throws ParserException {
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, true, true, true);
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 1);
byte[] testInputData = joinByteArrays(
createInitializationSegment(
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9),
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings),
mediaSegment.clusterBytes);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertSample(mediaSegment, 25000, false, true);
assertSample(mediaSegment, 0, true, false, true);
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
}
public void testReadEncryptedFrameWithInvalidSignalByte() {
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, true, true, false);
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 1);
byte[] testInputData = joinByteArrays(
createInitializationSegment(
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings),
mediaSegment.clusterBytes);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
try {
extractor.read(testInputStream, sampleHolder);
fail();
} catch (ParserException exception) {
assertEquals("Extension bit is set in signal byte", exception.getMessage());
}
}
public void testReadSampleInvisible() throws ParserException {
MediaSegment mediaSegment = createMediaSegment(100, 12, 13, false, true, true, false, false);
byte[] testInputData = joinByteArrays(
createInitializationSegment(
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null),
mediaSegment.clusterBytes);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertSample(mediaSegment, 25000, false, true, false);
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
}
public void testReadSampleCustomTimescale() throws ParserException {
MediaSegment mediaSegment = createMediaSegment(100, 12, 13, false, false, true);
MediaSegment mediaSegment = createMediaSegment(100, 12, 13, false, false, true, false, false);
byte[] testInputData = joinByteArrays(
createInitializationSegment(
1, mediaSegment.clusterBytes.length, true, 1000, ID_VP9),
1, mediaSegment.clusterBytes.length, true, 1000, ID_VP9, null),
mediaSegment.clusterBytes);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertSample(mediaSegment, 25, false, false);
assertSample(mediaSegment, 25, false, false, false);
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
}
public void testReadSampleNegativeSimpleBlockTimecode() throws ParserException {
MediaSegment mediaSegment = createMediaSegment(100, 13, -12, true, true, true);
MediaSegment mediaSegment = createMediaSegment(100, 13, -12, true, true, true, false, false);
byte[] testInputData = joinByteArrays(
createInitializationSegment(
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9),
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null),
mediaSegment.clusterBytes);
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
assertFormat();
assertSample(mediaSegment, 1000, true, true);
assertSample(mediaSegment, 1000, true, true, false);
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
}
@ -241,23 +353,33 @@ public class WebmExtractorTest extends InstrumentationTestCase {
}
private void assertSample(
MediaSegment mediaSegment, int timeUs, boolean keyframe, boolean invisible) {
MediaSegment mediaSegment, int timeUs, boolean keyframe, boolean invisible,
boolean encrypted) {
assertTrue(Arrays.equals(
mediaSegment.videoBytes, Arrays.copyOf(sampleHolder.data.array(), sampleHolder.size)));
assertEquals(timeUs, sampleHolder.timeUs);
assertEquals(keyframe, (sampleHolder.flags & C.SAMPLE_FLAG_SYNC) != 0);
assertEquals(invisible, sampleHolder.decodeOnly);
assertEquals(encrypted, (sampleHolder.flags & C.SAMPLE_FLAG_ENCRYPTED) != 0);
if (encrypted) {
android.test.MoreAsserts.assertEquals(TEST_INITIALIZATION_VECTOR, sampleHolder.cryptoInfo.iv);
assertEquals(C.CRYPTO_MODE_AES_CTR, sampleHolder.cryptoInfo.mode);
assertEquals(1, sampleHolder.cryptoInfo.numSubSamples);
assertEquals(100, sampleHolder.cryptoInfo.numBytesOfEncryptedData[0]);
assertEquals(0, sampleHolder.cryptoInfo.numBytesOfClearData[0]);
}
}
private byte[] createInitializationSegment(
int cuePoints, int mediaSegmentSize, boolean docTypeIsWebm, int timecodeScale,
int codecId) {
int codecId, ContentEncodingSettings contentEncodingSettings) {
int initalizationSegmentSize = INFO_ELEMENT_BYTE_SIZE + TRACKS_ELEMENT_BYTE_SIZE
+ CUES_ELEMENT_BYTE_SIZE + CUE_POINT_ELEMENT_BYTE_SIZE * cuePoints;
byte[] tracksElement = null;
switch (codecId) {
case ID_VP9:
tracksElement = createTracksElementWithVideo(true, TEST_WIDTH, TEST_HEIGHT);
tracksElement = createTracksElementWithVideo(
true, TEST_WIDTH, TEST_HEIGHT, contentEncodingSettings);
break;
case ID_OPUS:
tracksElement = createTracksElementWithOpusAudio(TEST_CHANNEL_COUNT);
@ -278,12 +400,13 @@ public class WebmExtractorTest extends InstrumentationTestCase {
}
private static MediaSegment createMediaSegment(int videoBytesLength, int clusterTimecode,
int blockTimecode, boolean keyframe, boolean invisible, boolean isSimple) {
int blockTimecode, boolean keyframe, boolean invisible, boolean simple,
boolean encrypted, boolean validSignalByte) {
byte[] videoBytes = createVideoBytes(videoBytesLength);
byte[] blockBytes;
if (isSimple) {
if (simple) {
blockBytes = createSimpleBlockElement(videoBytes.length, blockTimecode,
keyframe, invisible, true);
keyframe, invisible, true, encrypted, validSignalByte);
} else {
blockBytes = createBlockElement(videoBytes.length, blockTimecode, invisible, true);
}
@ -338,22 +461,66 @@ public class WebmExtractorTest extends InstrumentationTestCase {
}
private static byte[] createTracksElementWithVideo(
boolean codecIsVp9, int pixelWidth, int pixelHeight) {
boolean codecIsVp9, int pixelWidth, int pixelHeight,
ContentEncodingSettings contentEncodingSettings) {
byte[] widthBytes = getIntegerBytes(pixelWidth);
byte[] heightBytes = getIntegerBytes(pixelHeight);
return createByteArray(
0x16, 0x54, 0xAE, 0x6B, // Tracks
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, // size=36
0xAE, // TrackEntry
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1B, // size=27
0x86, // CodecID
0x85, 0x56, 0x5F, 0x56, 0x50, codecIsVp9 ? 0x39 : 0x30, // size=5 value=V_VP9/0
0xE0, // Video
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, // size=8
0xB0, // PixelWidth
0x82, widthBytes[2], widthBytes[3], // size=2
0xBA, // PixelHeight
0x82, heightBytes[2], heightBytes[3]); // size=2
if (contentEncodingSettings != null) {
byte[] orderBytes = getIntegerBytes(contentEncodingSettings.order);
byte[] scopeBytes = getIntegerBytes(contentEncodingSettings.scope);
byte[] typeBytes = getIntegerBytes(contentEncodingSettings.type);
byte[] algorithmBytes = getIntegerBytes(contentEncodingSettings.algorithm);
byte[] cipherModeBytes = getIntegerBytes(contentEncodingSettings.aesCipherMode);
return createByteArray(
0x16, 0x54, 0xAE, 0x6B, // Tracks
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, // size=72
0xAE, // TrackEntry
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, // size=63
0x86, // CodecID
0x85, 0x56, 0x5F, 0x56, 0x50, codecIsVp9 ? 0x39 : 0x30, // size=5 value=V_VP9/0
0x6D, 0x80, // ContentEncodings
0xA4, // size=36
0x62, 0x40, // ContentEncoding
0xA1, // size=33
0x50, 0x31, // ContentEncodingOrder
0x81, orderBytes[3],
0x50, 0x32, // ContentEncodingScope
0x81, scopeBytes[3],
0x50, 0x33, // ContentEncodingType
0x81, typeBytes[3],
0x50, 0x35, // ContentEncryption
0x92, // size=18
0x47, 0xE1, // ContentEncAlgo
0x81, algorithmBytes[3],
0x47, 0xE2, // ContentEncKeyID
0x84, // size=4
TEST_ENCRYPTION_KEY_ID[0], TEST_ENCRYPTION_KEY_ID[1],
TEST_ENCRYPTION_KEY_ID[2], TEST_ENCRYPTION_KEY_ID[3], // value=binary
0x47, 0xE7, // ContentEncAESSettings
0x84, // size=4
0x47, 0xE8, // AESSettingsCipherMode
0x81, cipherModeBytes[3],
0xE0, // Video
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, // size=8
0xB0, // PixelWidth
0x82, widthBytes[2], widthBytes[3], // size=2
0xBA, // PixelHeight
0x82, heightBytes[2], heightBytes[3]); // size=2
} else {
return createByteArray(
0x16, 0x54, 0xAE, 0x6B, // Tracks
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, // size=36
0xAE, // TrackEntry
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1B, // size=27
0x86, // CodecID
0x85, 0x56, 0x5F, 0x56, 0x50, codecIsVp9 ? 0x39 : 0x30, // size=5 value=V_VP9/0
0xE0, // Video
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, // size=8
0xB0, // PixelWidth
0x82, widthBytes[2], widthBytes[3], // size=2
0xBA, // PixelHeight
0x82, heightBytes[2], heightBytes[3]); // size=2
}
}
private static byte[] createTracksElementWithOpusAudio(int channelCount) {
@ -438,16 +605,23 @@ public class WebmExtractorTest extends InstrumentationTestCase {
}
private static byte[] createSimpleBlockElement(
int size, int timecode, boolean keyframe, boolean invisible, boolean noLacing) {
byte[] sizeBytes = getIntegerBytes(size + 4);
int size, int timecode, boolean keyframe, boolean invisible, boolean noLacing,
boolean encrypted, boolean validSignalByte) {
byte[] sizeBytes = getIntegerBytes(size + 4 + (encrypted ? 9 : 0));
byte[] timeBytes = getIntegerBytes(timecode);
byte flags = (byte)
((keyframe ? 0x80 : 0x00) | (invisible ? 0x08 : 0x00) | (noLacing ? 0x00 : 0x06));
return createByteArray(
byte[] simpleBlock = createByteArray(
0xA3, // SimpleBlock
0x01, 0x00, 0x00, 0x00, sizeBytes[0], sizeBytes[1], sizeBytes[2], sizeBytes[3],
0x81, // Track number value=1
timeBytes[2], timeBytes[3], flags); // Timecode and flags
if (encrypted) {
simpleBlock = joinByteArrays(
simpleBlock, createByteArray(validSignalByte ? 0x01 : 0x80),
Arrays.copyOfRange(TEST_INITIALIZATION_VECTOR, 0, 8));
}
return simpleBlock;
}
private static byte[] createBlockElement(
@ -520,4 +694,24 @@ public class WebmExtractorTest extends InstrumentationTestCase {
}
/** Used by {@link #createTracksElementWithVideo} to create a Track header with Encryption. */
private static final class ContentEncodingSettings {
private final int order;
private final int scope;
private final int type;
private final int algorithm;
private final int aesCipherMode;
private ContentEncodingSettings(int order, int scope, int type, int algorithm,
int aesCipherMode) {
this.order = order;
this.scope = scope;
this.type = type;
this.algorithm = algorithm;
this.aesCipherMode = aesCipherMode;
}
}
}

View file

@ -81,7 +81,7 @@ public class HlsMediaPlaylistParserTest extends TestCase {
assertEquals(false, segments.get(0).discontinuity);
assertEquals(7.975, segments.get(0).durationSecs);
assertEquals(null, segments.get(0).encryptionMethod);
assertEquals(false, segments.get(0).isEncrypted);
assertEquals(null, segments.get(0).encryptionKeyUri);
assertEquals(null, segments.get(0).encryptionIV);
assertEquals(51370, segments.get(0).byterangeLength);
@ -90,7 +90,7 @@ public class HlsMediaPlaylistParserTest extends TestCase {
assertEquals(false, segments.get(1).discontinuity);
assertEquals(7.975, segments.get(1).durationSecs);
assertEquals("AES-128", segments.get(1).encryptionMethod);
assertEquals(true, segments.get(1).isEncrypted);
assertEquals("https://priv.example.com/key.php?r=2680", segments.get(1).encryptionKeyUri);
assertEquals("0x1566B", segments.get(1).encryptionIV);
assertEquals(51501, segments.get(1).byterangeLength);
@ -99,7 +99,7 @@ public class HlsMediaPlaylistParserTest extends TestCase {
assertEquals(false, segments.get(2).discontinuity);
assertEquals(7.941, segments.get(2).durationSecs);
assertEquals(HlsMediaPlaylist.ENCRYPTION_METHOD_NONE, segments.get(2).encryptionMethod);
assertEquals(false, segments.get(2).isEncrypted);
assertEquals(null, segments.get(2).encryptionKeyUri);
assertEquals(null, segments.get(2).encryptionIV);
assertEquals(51501, segments.get(2).byterangeLength);
@ -108,7 +108,7 @@ public class HlsMediaPlaylistParserTest extends TestCase {
assertEquals(true, segments.get(3).discontinuity);
assertEquals(7.975, segments.get(3).durationSecs);
assertEquals("AES-128", segments.get(3).encryptionMethod);
assertEquals(true, segments.get(3).isEncrypted);
assertEquals("https://priv.example.com/key.php?r=2682", segments.get(3).encryptionKeyUri);
// 0xA7A == 2682.
assertNotNull(segments.get(3).encryptionIV);
@ -119,11 +119,11 @@ public class HlsMediaPlaylistParserTest extends TestCase {
assertEquals(false, segments.get(4).discontinuity);
assertEquals(7.975, segments.get(4).durationSecs);
assertEquals("AES-128", segments.get(4).encryptionMethod);
assertEquals(true, segments.get(4).isEncrypted);
assertEquals("https://priv.example.com/key.php?r=2682", segments.get(4).encryptionKeyUri);
// 0xA7A == 2682.
// 0xA7B == 2683.
assertNotNull(segments.get(4).encryptionIV);
assertEquals("A7A", segments.get(4).encryptionIV.toUpperCase(Locale.getDefault()));
assertEquals("A7B", segments.get(4).encryptionIV.toUpperCase(Locale.getDefault()));
assertEquals(C.LENGTH_UNBOUNDED, segments.get(4).byterangeLength);
assertEquals(0, segments.get(4).byterangeOffset);
assertEquals("https://priv.example.com/fileSequence2683.ts", segments.get(4).url);

View file

@ -0,0 +1,123 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.source;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.util.MimeTypes;
import junit.framework.TestCase;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/** Tests for {@link DefaultSampleSource}. */
public final class DefaultSampleSourceTest extends TestCase {
private static final int RENDERER_COUNT = 2;
private static final MediaFormat FAKE_MEDIA_FORMAT =
MediaFormat.createFormatForMimeType(MimeTypes.AUDIO_AAC);
private DefaultSampleSource defaultSampleSource;
@Mock SampleExtractor mockSampleExtractor;
@Override
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
when(mockSampleExtractor.prepare()).thenReturn(true);
when(mockSampleExtractor.getTrackCount()).thenReturn(2);
when(mockSampleExtractor.getMediaFormat(anyInt())).thenReturn(FAKE_MEDIA_FORMAT);
defaultSampleSource = new DefaultSampleSource(mockSampleExtractor, RENDERER_COUNT);
}
public void testSourceReleasedWhenRenderersReleased() throws Exception {
// Given a prepared sample source
defaultSampleSource.prepare();
// When releasing it once, it is not released.
defaultSampleSource.release();
verify(mockSampleExtractor, never()).release();
// When releasing RENDERER_COUNT times, it is released.
defaultSampleSource.release();
verify(mockSampleExtractor).release();
}
public void testEnablingTracksAtStartDoesNotSeek() throws Exception {
// Given a prepared sample source
defaultSampleSource.prepare();
// When the first track is enabled at t=0, the sample extractor does not seek.
defaultSampleSource.enable(0, 0L);
verify(mockSampleExtractor, never()).seekTo(0);
// When the second track is enabled at t=0, the sample extractor does not seek.
defaultSampleSource.enable(1, 0L);
verify(mockSampleExtractor, never()).seekTo(0);
}
public void testEnablingTracksInMiddleDoesSeek() throws Exception {
// Given a prepared sample source
defaultSampleSource.prepare();
// When the first track is enabled at t!=0, the sample extractor does seek.
defaultSampleSource.enable(0, 1000L);
verify(mockSampleExtractor, times(1)).seekTo(1000L);
// When the second track is enabled at t!=0, the sample extractor does seek.
defaultSampleSource.enable(1, 1000L);
verify(mockSampleExtractor, times(2)).seekTo(1000L);
}
public void testEnablingTrackSelectsTrack() throws Exception {
// Given a prepared sample source
defaultSampleSource.prepare();
// When the first track is enabled, it selects the first track.
defaultSampleSource.enable(0, 0L);
verify(mockSampleExtractor).selectTrack(0);
}
public void testReadDataInitiallyReadsFormat() throws Exception {
// Given a prepared sample source with the first track selected
defaultSampleSource.prepare();
defaultSampleSource.enable(0, 0L);
// A format is read.
MediaFormatHolder mediaFormatHolder = new MediaFormatHolder();
assertEquals(SampleSource.FORMAT_READ,
defaultSampleSource.readData(0, 0, mediaFormatHolder, null, false));
}
public void testSeekAndReadDataReadsDiscontinuity() throws Exception {
// Given a prepared sample source with the first track selected
defaultSampleSource.prepare();
defaultSampleSource.enable(0, 1L);
// A discontinuity is read.
assertEquals(
SampleSource.DISCONTINUITY_READ, defaultSampleSource.readData(0, 0, null, null, false));
}
}

View file

@ -0,0 +1,622 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.source;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.mp4.Atom;
import com.google.android.exoplayer.upstream.ByteArrayDataSource;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.MediaExtractor;
import android.net.Uri;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import junit.framework.TestCase;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
/**
* Tests for {@link Mp4SampleExtractor}.
*/
@TargetApi(16)
public class Mp4SampleExtractorTest extends TestCase {
/** String of hexadecimal bytes containing the video stsd payload from an AVC video. */
private static final byte[] VIDEO_STSD_PAYLOAD = getByteArray(
"00000000000000010000009961766331000000000000000100000000000000000000000000000000050002"
+ "d00048000000480000000000000001000000000000000000000000000000000000000000000000000000"
+ "00000000000018ffff0000002f617663430164001fffe100186764001facb402802dd808800000030080"
+ "00001e078c195001000468ee3cb000000014627472740000e35c0042a61000216cb8");
private static final byte[] VIDEO_HDLR_PAYLOAD = getByteArray("000000000000000076696465");
private static final byte[] VIDEO_MDHD_PAYLOAD =
getByteArray("0000000000000000cf6c48890000001e00001c8a55c40000");
private static final int TIMESCALE = 30;
private static final int VIDEO_WIDTH = 1280;
private static final int VIDEO_HEIGHT = 720;
/** String of hexadecimal bytes containing the video stsd payload for an mp4v track. */
private static final byte[] VIDEO_STSD_MP4V_PAYLOAD = getByteArray(
"0000000000000001000000A36D703476000000000000000100000000000000000000000000000000014000"
+ "B40048000000480000000000000001000000000000000000000000000000000000000000000000000000"
+ "00000000000018FFFF0000004D6573647300000000033F00000004372011001A400004CF280002F11805"
+ "28000001B001000001B58913000001000000012000C48D8800F50A04169463000001B2476F6F676C6506"
+ "0102");
private static final int VIDEO_MP4V_WIDTH = 320;
private static final int VIDEO_MP4V_HEIGHT = 180;
/** String of hexadecimal bytes containing the audio stsd payload from an AAC track. */
private static final byte[] AUDIO_STSD_PAYLOAD = getByteArray(
"0000000000000001000000596d703461000000000000000100000000000000000001001000000000ac4400"
+ "000000003565736473000000000327000000041f401500023e00024bc000023280051012080000000000"
+ "000000000000000000060102");
private static final byte[] AUDIO_HDLR_PAYLOAD = getByteArray("0000000000000000736f756e");
private static final byte[] AUDIO_MDHD_PAYLOAD =
getByteArray("00000000cf6c4889cf6c488a0000ac4400a3e40055c40000");
/** String of hexadecimal bytes containing an mvhd payload from an AVC/AAC video. */
private static final byte[] MVHD_PAYLOAD = getByteArray(
"00000000cf6c4888cf6c48880000025800023ad40001000001000000000000000000000000010000000000"
+ "000000000000000000000100000000000000000000000000004000000000000000000000000000000000"
+ "000000000000000000000000000003");
/** String of hexadecimal bytes containing a tkhd payload with an unknown duration. */
private static final byte[] TKHD_PAYLOAD =
getByteArray("0000000000000000000000000000000000000000FFFFFFFF");
/** Video frame timestamps in time units. */
private static final int[] SAMPLE_TIMESTAMPS = {0, 2, 3, 5, 6, 7};
/** Video frame sizes in bytes, including a very large sample. */
private static final int[] SAMPLE_SIZES = {100, 20, 20, 44, 100, 1 * 1024 * 1024};
/** Indices of key-frames. */
private static final int[] SYNCHRONIZATION_SAMPLE_INDICES = {0, 4, 5};
/** Indices of video frame chunk offsets. */
private static final int[] CHUNK_OFFSETS = {1000, 2000, 3000, 4000};
/** Numbers of video frames in each chunk. */
private static final int[] SAMPLES_IN_CHUNK = {2, 2, 1, 1};
/** The mdat box must be large enough to avoid reading chunk sample data out of bounds. */
private static final int MDAT_SIZE = 10 * 1024 * 1024;
/** Fake HTTP URI that can't be opened. */
private static final Uri FAKE_URI = Uri.parse("http://");
/** Empty byte array. */
private static final byte[] EMPTY = new byte[0];
public void testParsesValidMp4File() throws Exception {
// Given an extractor with an AVC/AAC file
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
// The MIME type and metadata are set correctly.
assertEquals(MimeTypes.VIDEO_H264, extractor.mediaFormats[0].mimeType);
assertEquals(MimeTypes.AUDIO_AAC, extractor.mediaFormats[1].mimeType);
assertEquals(VIDEO_WIDTH, extractor.selectedTrackMediaFormat.width);
assertEquals(VIDEO_HEIGHT, extractor.selectedTrackMediaFormat.height);
}
public void testParsesValidMp4vFile() throws Exception {
// Given an extractor with an mp4v file
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, true /* mp4vFormat */));
// The MIME type and metadata are set correctly.
assertEquals(MimeTypes.VIDEO_MP4V, extractor.selectedTrackMediaFormat.mimeType);
assertEquals(VIDEO_MP4V_WIDTH, extractor.selectedTrackMediaFormat.width);
assertEquals(VIDEO_MP4V_HEIGHT, extractor.selectedTrackMediaFormat.height);
}
public void testSampleTimestampsMatch() throws Exception {
// Given an extractor
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
// The timestamps are set correctly.
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
for (int i = 0; i < SAMPLE_TIMESTAMPS.length; i++) {
extractor.readSample(0, sampleHolder);
assertEquals(getVideoTimestampUs(SAMPLE_TIMESTAMPS[i]), sampleHolder.timeUs);
}
assertEquals(SampleSource.END_OF_STREAM, extractor.readSample(0, sampleHolder));
}
public void testSeekToStart() throws Exception {
// When seeking to the start
int timestampTimeUnits = SAMPLE_TIMESTAMPS[0];
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(timestampTimeUnits));
// The timestamp is at the start.
assertEquals(getVideoTimestampUs(timestampTimeUnits), sampleTimestampUs);
}
public void testSeekToEnd() throws Exception {
// When seeking to the end
int timestampTimeUnits = SAMPLE_TIMESTAMPS[SAMPLE_TIMESTAMPS.length - 1];
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(timestampTimeUnits));
// The timestamp is at the end.
assertEquals(getVideoTimestampUs(timestampTimeUnits), sampleTimestampUs);
}
public void testSeekToNearStart() throws Exception {
// When seeking to just after the start
int timestampTimeUnits = SAMPLE_TIMESTAMPS[0];
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(timestampTimeUnits) + 1);
// The timestamp is at the start.
assertEquals(getVideoTimestampUs(timestampTimeUnits), sampleTimestampUs);
}
public void testSeekToBeforeLastSynchronizationSample() throws Exception {
// When seeking to just after the start
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(SAMPLE_TIMESTAMPS[4]) - 1);
// The timestamp is at the start.
assertEquals(getVideoTimestampUs(SAMPLE_TIMESTAMPS[0]), sampleTimestampUs);
}
public void testAllSamplesAreSynchronizationSamplesWhenStssIsMissing() throws Exception {
// Given an extractor without an stss box
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(false /* includeStss */, false /* mp4vFormat */));
// All samples are synchronization samples.
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
int sampleIndex = 0;
while (true) {
int result = extractor.readSample(0, sampleHolder);
if (result == SampleSource.SAMPLE_READ) {
assertTrue((sampleHolder.flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0);
sampleHolder.clearData();
sampleIndex++;
} else if (result == SampleSource.END_OF_STREAM) {
break;
}
}
assertTrue(sampleIndex == SAMPLE_SIZES.length);
}
public void testReadAllSamplesSucceeds() throws Exception {
// Given an extractor
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
// The sample sizes are set correctly.
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
int sampleIndex = 0;
while (true) {
int result = extractor.readSample(0, sampleHolder);
if (result == SampleSource.SAMPLE_READ) {
assertEquals(SAMPLE_SIZES[sampleIndex], sampleHolder.size);
sampleHolder.clearData();
sampleIndex++;
} else if (result == SampleSource.END_OF_STREAM) {
break;
}
}
assertEquals(SAMPLE_SIZES.length, sampleIndex);
}
/** Returns the sample time read after seeking to {@code timestampTimeUnits}. */
private static long getTimestampUsResultingFromSeek(long timestampTimeUnits) throws Exception {
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
extractor.seekTo(timestampTimeUnits);
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
while (true) {
int result = extractor.readSample(0, sampleHolder);
if (result == SampleSource.SAMPLE_READ) {
return sampleHolder.timeUs;
} else if (result == SampleSource.END_OF_STREAM) {
return -1;
}
}
}
private static Mp4ExtractorWrapper prepareSampleExtractor(DataSource dataSource)
throws Exception {
Mp4ExtractorWrapper extractor = new Mp4ExtractorWrapper(dataSource);
extractor.prepare();
return extractor;
}
/** Returns a video timestamp in microseconds corresponding to {@code timeUnits}. */
private static long getVideoTimestampUs(int timeUnits) {
return Util.scaleLargeTimestamp(timeUnits, C.MICROS_PER_SECOND, TIMESCALE);
}
private static byte[] getStco() {
byte[] result = new byte[4 + 4 + 4 * CHUNK_OFFSETS.length];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(CHUNK_OFFSETS.length);
for (int chunkOffset : CHUNK_OFFSETS) {
buffer.putInt(chunkOffset);
}
return result;
}
private static byte[] getStsc() {
int samplesPerChunk = -1;
List<Integer> samplesInChunkChangeIndices = new ArrayList<Integer>();
for (int i = 0; i < SAMPLES_IN_CHUNK.length; i++) {
if (SAMPLES_IN_CHUNK[i] != samplesPerChunk) {
samplesInChunkChangeIndices.add(i);
samplesPerChunk = SAMPLES_IN_CHUNK[i];
}
}
byte[] result = new byte[4 + 4 + 3 * 4 * samplesInChunkChangeIndices.size()];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(samplesInChunkChangeIndices.size());
for (int index : samplesInChunkChangeIndices) {
buffer.putInt(index + 1);
buffer.putInt(SAMPLES_IN_CHUNK[index]);
buffer.putInt(0); // Sample description index (skipped)
}
return result;
}
private static byte[] getStsz() {
byte[] result = new byte[4 + 4 + 4 + 4 * SAMPLE_SIZES.length];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(0); // No fixed sample size.
buffer.putInt(SAMPLE_SIZES.length);
for (int size : SAMPLE_SIZES) {
buffer.putInt(size);
}
return result;
}
private static byte[] getStss() {
byte[] result = new byte[4 + 4 + 4 * SYNCHRONIZATION_SAMPLE_INDICES.length];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(SYNCHRONIZATION_SAMPLE_INDICES.length);
for (int synchronizationSampleIndex : SYNCHRONIZATION_SAMPLE_INDICES) {
buffer.putInt(synchronizationSampleIndex + 1);
}
return result;
}
private static byte[] getStts() {
int sampleTimestampDeltaChanges = 0;
int currentSampleTimestampDelta = -1;
for (int i = 1; i < SAMPLE_TIMESTAMPS.length; i++) {
int timestampDelta = SAMPLE_TIMESTAMPS[i] - SAMPLE_TIMESTAMPS[i - 1];
if (timestampDelta != currentSampleTimestampDelta) {
sampleTimestampDeltaChanges++;
currentSampleTimestampDelta = timestampDelta;
}
}
byte[] result = new byte[4 + 4 + 2 * 4 * sampleTimestampDeltaChanges];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped);
buffer.putInt(sampleTimestampDeltaChanges);
int lastTimestampDeltaChangeIndex = 1;
currentSampleTimestampDelta = SAMPLE_TIMESTAMPS[1] - SAMPLE_TIMESTAMPS[0];
for (int i = 2; i < SAMPLE_TIMESTAMPS.length; i++) {
int timestampDelta = SAMPLE_TIMESTAMPS[i] - SAMPLE_TIMESTAMPS[i - 1];
if (timestampDelta != currentSampleTimestampDelta) {
buffer.putInt(i - lastTimestampDeltaChangeIndex);
lastTimestampDeltaChangeIndex = i;
buffer.putInt(currentSampleTimestampDelta);
currentSampleTimestampDelta = timestampDelta;
}
}
// The last sample also has a duration, so the number of entries is the number of samples.
buffer.putInt(SAMPLE_TIMESTAMPS.length - lastTimestampDeltaChangeIndex + 1);
buffer.putInt(currentSampleTimestampDelta);
return result;
}
private static byte[] getMdat() {
// TODO: Put NAL length tags in at each sample position so the sample lengths don't have to
// be multiples of four.
return new byte[MDAT_SIZE];
}
private static final DataSource getFakeDataSource(boolean includeStss, boolean mp4vFormat) {
return new ByteArrayDataSource(includeStss
? getTestMp4File(mp4vFormat) : getTestMp4FileWithoutSynchronizationData(mp4vFormat));
}
/** Gets a valid MP4 file with audio/video tracks and synchronization data. */
private static byte[] getTestMp4File(boolean mp4vFormat) {
return Mp4Atom.serialize(
atom(Atom.TYPE_ftyp, EMPTY),
atom(Atom.TYPE_moov,
atom(Atom.TYPE_mvhd, MVHD_PAYLOAD),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, VIDEO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, VIDEO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd,
mp4vFormat ? VIDEO_STSD_MP4V_PAYLOAD : VIDEO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stss, getStss()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco()))))),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, AUDIO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, AUDIO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd, AUDIO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stss, getStss()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco())))))),
atom(Atom.TYPE_mdat, getMdat()));
}
/** Gets a valid MP4 file with audio/video tracks and without a synchronization table. */
private static byte[] getTestMp4FileWithoutSynchronizationData(boolean mp4vFormat) {
return Mp4Atom.serialize(
atom(Atom.TYPE_ftyp, EMPTY),
atom(Atom.TYPE_moov,
atom(Atom.TYPE_mvhd, MVHD_PAYLOAD),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, VIDEO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, VIDEO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd,
mp4vFormat ? VIDEO_STSD_MP4V_PAYLOAD : VIDEO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco()))))),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, AUDIO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, AUDIO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd, AUDIO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco())))))),
atom(Atom.TYPE_mdat, getMdat()));
}
private static Mp4Atom atom(int type, Mp4Atom... containedMp4Atoms) {
return new Mp4Atom(type, containedMp4Atoms);
}
private static Mp4Atom atom(int type, byte[] payload) {
return new Mp4Atom(type, payload);
}
private static byte[] getByteArray(String hexBytes) {
byte[] result = new byte[hexBytes.length() / 2];
for (int i = 0; i < result.length; i++) {
result[i] = (byte) ((Character.digit(hexBytes.charAt(i * 2), 16) << 4)
+ Character.digit(hexBytes.charAt(i * 2 + 1), 16));
}
return result;
}
/** MP4 atom that can be serialized as a byte array. */
private static final class Mp4Atom {
public static byte[] serialize(Mp4Atom... atoms) {
int size = 0;
for (Mp4Atom atom : atoms) {
size += atom.getSize();
}
ByteBuffer buffer = ByteBuffer.allocate(size);
for (Mp4Atom atom : atoms) {
atom.getData(buffer);
}
return buffer.array();
}
private static final int HEADER_SIZE = 8;
private final int type;
private final Mp4Atom[] containedMp4Atoms;
private final byte[] payload;
private Mp4Atom(int type, Mp4Atom... containedMp4Atoms) {
this.type = type;
this.containedMp4Atoms = containedMp4Atoms;
payload = null;
}
private Mp4Atom(int type, byte[] payload) {
this.type = type;
this.payload = payload;
containedMp4Atoms = null;
}
private int getSize() {
int size = HEADER_SIZE;
if (payload != null) {
size += payload.length;
} else {
for (Mp4Atom atom : containedMp4Atoms) {
size += atom.getSize();
}
}
return size;
}
private void getData(ByteBuffer byteBuffer) {
byteBuffer.putInt(getSize());
byteBuffer.putInt(type);
if (payload != null) {
byteBuffer.put(payload);
} else {
for (Mp4Atom atom : containedMp4Atoms) {
atom.getData(byteBuffer);
}
}
}
}
/**
* Creates a {@link Mp4SampleExtractor} on a separate thread with a looper, so that it can use a
* handler for loading, and provides blocking operations like {@link #seekTo} and
* {@link #readSample}.
*/
private static final class Mp4ExtractorWrapper extends Thread {
private static final int MSG_PREPARE = 0;
private static final int MSG_SEEK_TO = 1;
private static final int MSG_READ_SAMPLE = 2;
private final DataSource dataSource;
// Written by the handler's thread and read by the main thread.
public volatile MediaFormat[] mediaFormats;
public volatile MediaFormat selectedTrackMediaFormat;
private volatile Handler handler;
private volatile int readSampleResult;
private volatile Exception exception;
private volatile CountDownLatch pendingOperationLatch;
public Mp4ExtractorWrapper(DataSource dataSource) {
super("Mp4SampleExtractorTest");
this.dataSource = Assertions.checkNotNull(dataSource);
pendingOperationLatch = new CountDownLatch(1);
start();
}
public void prepare() throws Exception {
// Block until the handler has been created.
pendingOperationLatch.await();
// Block until the extractor has been prepared.
pendingOperationLatch = new CountDownLatch(1);
handler.sendEmptyMessage(MSG_PREPARE);
pendingOperationLatch.await();
if (exception != null) {
throw exception;
}
}
public void seekTo(long timestampUs) {
handler.obtainMessage(MSG_SEEK_TO, timestampUs).sendToTarget();
}
public int readSample(int trackIndex, SampleHolder sampleHolder) throws Exception {
// Block until the extractor has completed readSample.
pendingOperationLatch = new CountDownLatch(1);
handler.obtainMessage(MSG_READ_SAMPLE, trackIndex, 0, sampleHolder).sendToTarget();
pendingOperationLatch.await();
if (exception != null) {
throw exception;
}
return readSampleResult;
}
@SuppressLint("HandlerLeak")
@Override
public void run() {
final Mp4SampleExtractor mp4SampleExtractor =
new Mp4SampleExtractor(dataSource, new DataSpec(FAKE_URI));
Looper.prepare();
handler = new Handler() {
@Override
public void handleMessage(Message message) {
try {
switch (message.what) {
case MSG_PREPARE:
if (!mp4SampleExtractor.prepare()) {
sendEmptyMessage(MSG_PREPARE);
} else {
// Select the video track and get its metadata.
mediaFormats = new MediaFormat[mp4SampleExtractor.getTrackCount()];
for (int track = 0; track < mp4SampleExtractor.getTrackCount(); track++) {
MediaFormat mediaFormat = mp4SampleExtractor.getMediaFormat(track);
mediaFormats[track] = mediaFormat;
if (MimeTypes.isVideo(mediaFormat.mimeType)) {
mp4SampleExtractor.selectTrack(track);
selectedTrackMediaFormat = mediaFormat;
}
}
pendingOperationLatch.countDown();
}
break;
case MSG_SEEK_TO:
long timestampUs = (long) message.obj;
mp4SampleExtractor.seekTo(timestampUs);
break;
case MSG_READ_SAMPLE:
int trackIndex = message.arg1;
SampleHolder sampleHolder = (SampleHolder) message.obj;
sampleHolder.clearData();
readSampleResult = mp4SampleExtractor.readSample(trackIndex, sampleHolder);
if (readSampleResult == SampleSource.NOTHING_READ) {
Message.obtain(message).sendToTarget();
return;
}
pendingOperationLatch.countDown();
break;
}
} catch (Exception e) {
exception = e;
pendingOperationLatch.countDown();
}
}
};
// Unblock waiting for the handler.
pendingOperationLatch.countDown();
Looper.loop();
}
}
}

View file

@ -0,0 +1,149 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.upstream;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.android.exoplayer.SampleSource;
import junit.framework.TestCase;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.Arrays;
/**
* Tests for {@link BufferedNonBlockingInputStream}.
*/
public class BufferedNonBlockingInputStreamTest extends TestCase {
private static final int BUFFER_SIZE_BYTES = 16;
@Mock private NonBlockingInputStream mockInputStream;
private BufferedNonBlockingInputStream bufferedInputStream;
@Override
public void setUp() {
MockitoAnnotations.initMocks(this);
bufferedInputStream = new BufferedNonBlockingInputStream(mockInputStream, BUFFER_SIZE_BYTES);
}
public void testSkipClipsCountToBufferSizeWhenMarkSet() {
// When marking and skipping more than the buffer size
bufferedInputStream.mark();
bufferedInputStream.skip(BUFFER_SIZE_BYTES + 1);
// Then BUFFER_SIZE_BYTES are read.
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
}
public void testSkipResetSkipUsesBufferedData() {
// Given a buffered input stream that has already read BUFFER_SIZE_BYTES
stubInputStreamForReadingBytes();
bufferedInputStream.mark();
bufferedInputStream.skip(BUFFER_SIZE_BYTES);
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
// When resetting and reading the same amount, no extra data are read.
bufferedInputStream.returnToMark();
bufferedInputStream.skip(BUFFER_SIZE_BYTES);
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
}
public void testReturnsEndOfStreamAfterBufferedData() {
// Given a buffered input stream that has read 1 byte (to end-of-stream) and has been reset
stubInputStreamForReadingBytes();
bufferedInputStream.mark();
bufferedInputStream.skip(1);
stubInputStreamForReadingEndOfStream();
bufferedInputStream.returnToMark();
// When skipping, first 1 byte is returned, then end-of-stream.
assertEquals(1, bufferedInputStream.skip(1));
assertEquals(SampleSource.END_OF_STREAM, bufferedInputStream.skip(1));
}
public void testReadAtOffset() {
// Given a mock input stream that provide non-zero data
stubInputStreamForReadingBytes();
// When reading a byte at offset 1
byte[] bytes = new byte[2];
bufferedInputStream.mark();
bufferedInputStream.read(bytes, 1, 1);
// Then only the second byte is set.
assertTrue(Arrays.equals(new byte[] {(byte) 0, (byte) 0xFF}, bytes));
}
public void testSkipAfterMark() {
// Given a mock input stream that provides non-zero data, with three bytes read
stubInputStreamForReadingBytes();
bufferedInputStream.skip(1);
bufferedInputStream.mark();
bufferedInputStream.skip(2);
bufferedInputStream.returnToMark();
// Then it is possible to skip one byte after the mark and read two bytes.
assertEquals(1, bufferedInputStream.skip(1));
assertEquals(2, bufferedInputStream.read(new byte[2], 0, 2));
verify(mockInputStream).read((byte[]) any(), eq(0), eq(1));
verify(mockInputStream).read((byte[]) any(), eq(0), eq(2));
verify(mockInputStream).read((byte[]) any(), eq(2), eq(1));
}
/** Stubs the input stream to read 0xFF for all requests. */
private void stubInputStreamForReadingBytes() {
when(mockInputStream.read((byte[]) any(), anyInt(), anyInt())).thenAnswer(
new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
byte[] bytes = (byte[]) invocation.getArguments()[0];
int offset = (int) invocation.getArguments()[1];
int length = (int) invocation.getArguments()[2];
for (int i = 0; i < length; i++) {
bytes[i + offset] = (byte) 0xFF;
}
return length;
}
});
when(mockInputStream.skip(anyInt())).thenAnswer(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
return (int) invocation.getArguments()[0];
}
});
}
/** Stubs the input stream to read end-of-stream for all requests. */
private void stubInputStreamForReadingEndOfStream() {
when(mockInputStream.read((byte[]) any(), anyInt(), anyInt()))
.thenReturn(SampleSource.END_OF_STREAM);
}
}