mirror of
https://github.com/samsonjs/media.git
synced 2026-04-16 13:05:46 +00:00
Major surgery to move all playback modes to the new Extractor model.
This commit is contained in:
parent
265adf9a8f
commit
e21f7801b5
55 changed files with 1780 additions and 5999 deletions
|
|
@ -18,6 +18,7 @@ package com.google.android.exoplayer.demo;
|
|||
import com.google.android.exoplayer.ExoPlayer;
|
||||
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
|
||||
import com.google.android.exoplayer.audio.AudioTrack;
|
||||
import com.google.android.exoplayer.chunk.Format;
|
||||
import com.google.android.exoplayer.demo.player.DemoPlayer;
|
||||
import com.google.android.exoplayer.util.VerboseLogUtil;
|
||||
|
||||
|
|
@ -91,11 +92,11 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onLoadStarted(int sourceId, String formatId, int trigger, boolean isInitialization,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs, long length) {
|
||||
public void onLoadStarted(int sourceId, long length, int type, int trigger, Format format,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs) {
|
||||
loadStartTimeMs[sourceId] = SystemClock.elapsedRealtime();
|
||||
if (VerboseLogUtil.isTagEnabled(TAG)) {
|
||||
Log.v(TAG, "loadStart [" + getSessionTimeString() + ", " + sourceId
|
||||
Log.v(TAG, "loadStart [" + getSessionTimeString() + ", " + sourceId + ", " + type
|
||||
+ ", " + mediaStartTimeMs + ", " + mediaEndTimeMs + "]");
|
||||
}
|
||||
}
|
||||
|
|
@ -110,27 +111,22 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onVideoFormatEnabled(String formatId, int trigger, int mediaTimeMs) {
|
||||
Log.d(TAG, "videoFormat [" + getSessionTimeString() + ", " + formatId + ", "
|
||||
public void onVideoFormatEnabled(Format format, int trigger, int mediaTimeMs) {
|
||||
Log.d(TAG, "videoFormat [" + getSessionTimeString() + ", " + format.id + ", "
|
||||
+ Integer.toString(trigger) + "]");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAudioFormatEnabled(String formatId, int trigger, int mediaTimeMs) {
|
||||
Log.d(TAG, "audioFormat [" + getSessionTimeString() + ", " + formatId + ", "
|
||||
public void onAudioFormatEnabled(Format format, int trigger, int mediaTimeMs) {
|
||||
Log.d(TAG, "audioFormat [" + getSessionTimeString() + ", " + format.id + ", "
|
||||
+ Integer.toString(trigger) + "]");
|
||||
}
|
||||
|
||||
// DemoPlayer.InternalErrorListener
|
||||
|
||||
@Override
|
||||
public void onUpstreamError(int sourceId, IOException e) {
|
||||
printInternalError("upstreamError", e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConsumptionError(int sourceId, IOException e) {
|
||||
printInternalError("consumptionError", e);
|
||||
public void onLoadError(int sourceId, IOException e) {
|
||||
printInternalError("loadError", e);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
|||
import com.google.android.exoplayer.TrackRenderer;
|
||||
import com.google.android.exoplayer.audio.AudioTrack;
|
||||
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
||||
import com.google.android.exoplayer.chunk.Format;
|
||||
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
|
||||
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
|
||||
import com.google.android.exoplayer.metadata.MetadataTrackRenderer;
|
||||
|
|
@ -113,8 +114,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||
void onAudioTrackWriteError(AudioTrack.WriteException e);
|
||||
void onDecoderInitializationError(DecoderInitializationException e);
|
||||
void onCryptoError(CryptoException e);
|
||||
void onUpstreamError(int sourceId, IOException e);
|
||||
void onConsumptionError(int sourceId, IOException e);
|
||||
void onLoadError(int sourceId, IOException e);
|
||||
void onDrmSessionManagerError(Exception e);
|
||||
}
|
||||
|
||||
|
|
@ -122,12 +122,12 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||
* A listener for debugging information.
|
||||
*/
|
||||
public interface InfoListener {
|
||||
void onVideoFormatEnabled(String formatId, int trigger, int mediaTimeMs);
|
||||
void onAudioFormatEnabled(String formatId, int trigger, int mediaTimeMs);
|
||||
void onVideoFormatEnabled(Format format, int trigger, int mediaTimeMs);
|
||||
void onAudioFormatEnabled(Format format, int trigger, int mediaTimeMs);
|
||||
void onDroppedFrames(int count, long elapsed);
|
||||
void onBandwidthSample(int elapsedMs, long bytes, long bitrateEstimate);
|
||||
void onLoadStarted(int sourceId, String formatId, int trigger, boolean isInitialization,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs, long length);
|
||||
void onLoadStarted(int sourceId, long length, int type, int trigger, Format format,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs);
|
||||
void onLoadCompleted(int sourceId, long bytesLoaded);
|
||||
void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
|
||||
long initializationDurationMs);
|
||||
|
|
@ -432,15 +432,14 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onDownstreamFormatChanged(int sourceId, String formatId, int trigger,
|
||||
int mediaTimeMs) {
|
||||
public void onDownstreamFormatChanged(int sourceId, Format format, int trigger, int mediaTimeMs) {
|
||||
if (infoListener == null) {
|
||||
return;
|
||||
}
|
||||
if (sourceId == TYPE_VIDEO) {
|
||||
infoListener.onVideoFormatEnabled(formatId, trigger, mediaTimeMs);
|
||||
infoListener.onVideoFormatEnabled(format, trigger, mediaTimeMs);
|
||||
} else if (sourceId == TYPE_AUDIO) {
|
||||
infoListener.onAudioFormatEnabled(formatId, trigger, mediaTimeMs);
|
||||
infoListener.onAudioFormatEnabled(format, trigger, mediaTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -490,16 +489,9 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onUpstreamError(int sourceId, IOException e) {
|
||||
public void onLoadError(int sourceId, IOException e) {
|
||||
if (internalErrorListener != null) {
|
||||
internalErrorListener.onUpstreamError(sourceId, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConsumptionError(int sourceId, IOException e) {
|
||||
if (internalErrorListener != null) {
|
||||
internalErrorListener.onConsumptionError(sourceId, e);
|
||||
internalErrorListener.onLoadError(sourceId, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -531,11 +523,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onLoadStarted(int sourceId, String formatId, int trigger, boolean isInitialization,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs, long length) {
|
||||
public void onLoadStarted(int sourceId, long length, int type, int trigger, Format format,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs) {
|
||||
if (infoListener != null) {
|
||||
infoListener.onLoadStarted(sourceId, formatId, trigger, isInitialization, mediaStartTimeMs,
|
||||
mediaEndTimeMs, length);
|
||||
infoListener.onLoadStarted(sourceId, length, type, trigger, format, mediaStartTimeMs,
|
||||
mediaEndTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -552,14 +544,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onUpstreamDiscarded(int sourceId, int mediaStartTimeMs, int mediaEndTimeMs,
|
||||
long bytesDiscarded) {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDownstreamDiscarded(int sourceId, int mediaStartTimeMs, int mediaEndTimeMs,
|
||||
long bytesDiscarded) {
|
||||
public void onUpstreamDiscarded(int sourceId, int mediaStartTimeMs, int mediaEndTimeMs) {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.SampleSource;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Interface for callbacks to be notified of chunk based {@link SampleSource} events.
|
||||
*/
|
||||
public interface BaseChunkSampleSourceEventListener {
|
||||
|
||||
/**
|
||||
* Invoked when an upstream load is started.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param length The length of the data being loaded in bytes, or {@link C#LENGTH_UNBOUNDED} if
|
||||
* the length of the data is not known in advance.
|
||||
* @param type The type of the data being loaded.
|
||||
* @param trigger The reason for the data being loaded.
|
||||
* @param format The particular format to which this data corresponds, or null if the data being
|
||||
* loaded does not correspond to a format.
|
||||
* @param mediaStartTimeMs The media time of the start of the data being loaded, or -1 if this
|
||||
* load is for initialization data.
|
||||
* @param mediaEndTimeMs The media time of the end of the data being loaded, or -1 if this
|
||||
* load is for initialization data.
|
||||
*/
|
||||
void onLoadStarted(int sourceId, long length, int type, int trigger, Format format,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs);
|
||||
|
||||
/**
|
||||
* Invoked when the current load operation completes.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param bytesLoaded The number of bytes that were loaded.
|
||||
*/
|
||||
void onLoadCompleted(int sourceId, long bytesLoaded);
|
||||
|
||||
/**
|
||||
* Invoked when the current upstream load operation is canceled.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param bytesLoaded The number of bytes that were loaded prior to the cancellation.
|
||||
*/
|
||||
void onLoadCanceled(int sourceId, long bytesLoaded);
|
||||
|
||||
/**
|
||||
* Invoked when an error occurs loading media data.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param e The cause of the failure.
|
||||
*/
|
||||
void onLoadError(int sourceId, IOException e);
|
||||
|
||||
/**
|
||||
* Invoked when data is removed from the back of the buffer, typically so that it can be
|
||||
* re-buffered using a different representation.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param mediaStartTimeMs The media time of the start of the discarded data.
|
||||
* @param mediaEndTimeMs The media time of the end of the discarded data.
|
||||
*/
|
||||
void onUpstreamDiscarded(int sourceId, int mediaStartTimeMs, int mediaEndTimeMs);
|
||||
|
||||
/**
|
||||
* Invoked when the downstream format changes (i.e. when the format being supplied to the
|
||||
* caller of {@link SampleSource#readData} changes).
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param format The format.
|
||||
* @param trigger The trigger specified in the corresponding upstream load, as specified by the
|
||||
* {@link ChunkSource}.
|
||||
* @param mediaTimeMs The media time at which the change occurred.
|
||||
*/
|
||||
void onDownstreamFormatChanged(int sourceId, Format format, int trigger, int mediaTimeMs);
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.extractor.DefaultTrackOutput;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
|
||||
/**
|
||||
* A base implementation of {@link MediaChunk}, for chunks that contain a single track.
|
||||
* <p>
|
||||
* Loaded samples are output to a {@link DefaultTrackOutput}.
|
||||
*/
|
||||
public abstract class BaseMediaChunk extends MediaChunk {
|
||||
|
||||
/**
|
||||
* Whether {@link #getMediaFormat()} and {@link #getDrmInitData()} can be called at any time to
|
||||
* obtain the chunk's media format and drm initialization data. If false, these methods are only
|
||||
* guaranteed to return correct data after the first sample data has been output from the chunk.
|
||||
*/
|
||||
public final boolean isFormatFinal;
|
||||
|
||||
private DefaultTrackOutput output;
|
||||
private int firstSampleIndex;
|
||||
|
||||
/**
|
||||
* @param dataSource A {@link DataSource} for loading the data.
|
||||
* @param dataSpec Defines the data to be loaded.
|
||||
* @param trigger The reason for this chunk being selected.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param startTimeUs The start time of the media contained by the chunk, in microseconds.
|
||||
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
|
||||
* @param chunkIndex The index of the chunk.
|
||||
* @param isLastChunk True if this is the last chunk in the media. False otherwise.
|
||||
* @param isFormatFinal True if {@link #getMediaFormat()} and {@link #getDrmInitData()} can be
|
||||
* called at any time to obtain the media format and drm initialization data. False if these
|
||||
* methods are only guaranteed to return correct data after the first sample data has been
|
||||
* output from the chunk.
|
||||
*/
|
||||
public BaseMediaChunk(DataSource dataSource, DataSpec dataSpec, int trigger, Format format,
|
||||
long startTimeUs, long endTimeUs, int chunkIndex, boolean isLastChunk,
|
||||
boolean isFormatFinal) {
|
||||
super(dataSource, dataSpec, trigger, format, startTimeUs, endTimeUs, chunkIndex, isLastChunk);
|
||||
this.isFormatFinal = isFormatFinal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the chunk for loading, setting the {@link DefaultTrackOutput} that will receive
|
||||
* samples as they are loaded.
|
||||
*
|
||||
* @param output The output that will receive the loaded samples.
|
||||
*/
|
||||
public void init(DefaultTrackOutput output) {
|
||||
this.output = output;
|
||||
this.firstSampleIndex = output.getWriteIndex();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the index of the first sample in the output that was passed to
|
||||
* {@link #init(DefaultTrackOutput)} that will originate from this chunk.
|
||||
*/
|
||||
public final int getFirstSampleIndex() {
|
||||
return firstSampleIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the {@link MediaFormat} corresponding to the chunk.
|
||||
* <p>
|
||||
* See {@link #isFormatFinal} for information about when this method is guaranteed to return
|
||||
* correct data.
|
||||
*
|
||||
* @return The {@link MediaFormat} corresponding to this chunk.
|
||||
*/
|
||||
public abstract MediaFormat getMediaFormat();
|
||||
|
||||
/**
|
||||
* Gets the {@link DrmInitData} corresponding to the chunk.
|
||||
* <p>
|
||||
* See {@link #isFormatFinal} for information about when this method is guaranteed to return
|
||||
* correct data.
|
||||
*
|
||||
* @return The {@link DrmInitData} corresponding to this chunk.
|
||||
*/
|
||||
public abstract DrmInitData getDrmInitData();
|
||||
|
||||
/**
|
||||
* Returns the output most recently passed to {@link #init(DefaultTrackOutput)}.
|
||||
*/
|
||||
protected final DefaultTrackOutput getOutput() {
|
||||
return output;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -15,18 +15,10 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.upstream.Allocation;
|
||||
import com.google.android.exoplayer.upstream.Allocator;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSourceStream;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.upstream.Loader.Loadable;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.TraceUtil;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* An abstract base class for {@link Loadable} implementations that load chunks of data required
|
||||
|
|
@ -34,6 +26,31 @@ import java.io.IOException;
|
|||
*/
|
||||
public abstract class Chunk implements Loadable {
|
||||
|
||||
/**
|
||||
* Value of {@link #type} for chunks containing unspecified data.
|
||||
*/
|
||||
public static final int TYPE_UNSPECIFIED = 0;
|
||||
/**
|
||||
* Value of {@link #type} for chunks containing media data.
|
||||
*/
|
||||
public static final int TYPE_MEDIA = 1;
|
||||
/**
|
||||
* Value of {@link #type} for chunks containing media initialization data.
|
||||
*/
|
||||
public static final int TYPE_MEDIA_INITIALIZATION = 2;
|
||||
/**
|
||||
* Value of {@link #type} for chunks containing drm related data.
|
||||
*/
|
||||
public static final int TYPE_DRM = 3;
|
||||
/**
|
||||
* Value of {@link #type} for chunks containing manifest or playlist data.
|
||||
*/
|
||||
public static final int TYPE_MANIFEST = 4;
|
||||
/**
|
||||
* Implementations may define custom {@link #type} codes greater than or equal to this value.
|
||||
*/
|
||||
public static final int TYPE_CUSTOM_BASE = 10000;
|
||||
|
||||
/**
|
||||
* Value of {@link #trigger} for a load whose reason is unspecified.
|
||||
*/
|
||||
|
|
@ -56,20 +73,24 @@ public abstract class Chunk implements Loadable {
|
|||
public static final int TRIGGER_CUSTOM_BASE = 10000;
|
||||
|
||||
/**
|
||||
* The format associated with the data being loaded.
|
||||
* The type of the chunk. For reporting only.
|
||||
*/
|
||||
// TODO: Consider removing this and pushing it down into MediaChunk instead.
|
||||
public final Format format;
|
||||
public final int type;
|
||||
/**
|
||||
* The reason for a {@link ChunkSource} having generated this chunk. For reporting only. Possible
|
||||
* values for this variable are defined by the specific {@link ChunkSource} implementations.
|
||||
* The reason why the chunk was generated. For reporting only.
|
||||
*/
|
||||
public final int trigger;
|
||||
/**
|
||||
* The format associated with the data being loaded, or null if the data being loaded is not
|
||||
* associated with a specific format.
|
||||
*/
|
||||
public final Format format;
|
||||
/**
|
||||
* The {@link DataSpec} that defines the data to be loaded.
|
||||
*/
|
||||
public final DataSpec dataSpec;
|
||||
|
||||
private final DataSource dataSource;
|
||||
private final DataSpec dataSpec;
|
||||
|
||||
private DataSourceStream dataSourceStream;
|
||||
protected final DataSource dataSource;
|
||||
|
||||
/**
|
||||
* @param dataSource The source from which the data should be loaded.
|
||||
|
|
@ -77,64 +98,16 @@ public abstract class Chunk implements Loadable {
|
|||
* {@link Integer#MAX_VALUE}. If {@code dataSpec.length == C.LENGTH_UNBOUNDED} then
|
||||
* the length resolved by {@code dataSource.open(dataSpec)} must not exceed
|
||||
* {@link Integer#MAX_VALUE}.
|
||||
* @param format See {@link #format}.
|
||||
* @param type See {@link #type}.
|
||||
* @param trigger See {@link #trigger}.
|
||||
* @param format See {@link #format}.
|
||||
*/
|
||||
public Chunk(DataSource dataSource, DataSpec dataSpec, Format format, int trigger) {
|
||||
Assertions.checkState(dataSpec.length <= Integer.MAX_VALUE);
|
||||
public Chunk(DataSource dataSource, DataSpec dataSpec, int type, int trigger, Format format) {
|
||||
this.dataSource = Assertions.checkNotNull(dataSource);
|
||||
this.dataSpec = Assertions.checkNotNull(dataSpec);
|
||||
this.format = Assertions.checkNotNull(format);
|
||||
this.type = type;
|
||||
this.trigger = trigger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the {@link Chunk}.
|
||||
*
|
||||
* @param allocator An {@link Allocator} from which the {@link Allocation} needed to contain the
|
||||
* data can be obtained.
|
||||
*/
|
||||
public final void init(Allocator allocator) {
|
||||
Assertions.checkState(dataSourceStream == null);
|
||||
dataSourceStream = new DataSourceStream(dataSource, dataSpec, allocator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases the {@link Chunk}, releasing any backing {@link Allocation}s.
|
||||
*/
|
||||
public final void release() {
|
||||
if (dataSourceStream != null) {
|
||||
dataSourceStream.close();
|
||||
dataSourceStream = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the length of the chunk in bytes.
|
||||
*
|
||||
* @return The length of the chunk in bytes, or {@link C#LENGTH_UNBOUNDED} if the length has yet
|
||||
* to be determined.
|
||||
*/
|
||||
public final long getLength() {
|
||||
return dataSourceStream.getLength();
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether the whole of the data has been consumed.
|
||||
*
|
||||
* @return True if the whole of the data has been consumed. False otherwise.
|
||||
*/
|
||||
public final boolean isReadFinished() {
|
||||
return dataSourceStream.isEndOfStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether the whole of the chunk has been loaded.
|
||||
*
|
||||
* @return True if the whole of the chunk has been loaded. False otherwise.
|
||||
*/
|
||||
public final boolean isLoadFinished() {
|
||||
return dataSourceStream.isLoadFinished();
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -142,65 +115,6 @@ public abstract class Chunk implements Loadable {
|
|||
*
|
||||
* @return The number of bytes that have been loaded.
|
||||
*/
|
||||
public final long bytesLoaded() {
|
||||
return dataSourceStream.getLoadPosition();
|
||||
}
|
||||
|
||||
/**
|
||||
* Causes loaded data to be consumed.
|
||||
*
|
||||
* @throws IOException If an error occurs consuming the loaded data.
|
||||
*/
|
||||
public final void consume() throws IOException {
|
||||
Assertions.checkState(dataSourceStream != null);
|
||||
consumeStream(dataSourceStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoked by {@link #consume()}. Implementations may override this method if they wish to
|
||||
* consume the loaded data at this point.
|
||||
* <p>
|
||||
* The default implementation is a no-op.
|
||||
*
|
||||
* @param stream The stream of loaded data.
|
||||
* @throws IOException If an error occurs consuming the loaded data.
|
||||
*/
|
||||
protected void consumeStream(NonBlockingInputStream stream) throws IOException {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
protected final NonBlockingInputStream getNonBlockingInputStream() {
|
||||
return dataSourceStream;
|
||||
}
|
||||
|
||||
protected final void resetReadPosition() {
|
||||
if (dataSourceStream != null) {
|
||||
dataSourceStream.resetReadPosition();
|
||||
} else {
|
||||
// We haven't been initialized yet, so the read position must already be 0.
|
||||
}
|
||||
}
|
||||
|
||||
// Loadable implementation
|
||||
|
||||
@Override
|
||||
public final void cancelLoad() {
|
||||
dataSourceStream.cancelLoad();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isLoadCanceled() {
|
||||
return dataSourceStream.isLoadCanceled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void load() throws IOException, InterruptedException {
|
||||
TraceUtil.beginSection("chunkLoad");
|
||||
try {
|
||||
dataSourceStream.load();
|
||||
} finally {
|
||||
TraceUtil.endSection();
|
||||
}
|
||||
}
|
||||
public abstract long bytesLoaded();
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.extractor.Extractor;
|
||||
import com.google.android.exoplayer.extractor.ExtractorInput;
|
||||
import com.google.android.exoplayer.extractor.ExtractorOutput;
|
||||
import com.google.android.exoplayer.extractor.SeekMap;
|
||||
import com.google.android.exoplayer.extractor.TrackOutput;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* An {@link Extractor} wrapper for loading chunks containing a single track.
|
||||
* <p>
|
||||
* The wrapper allows switching of the {@link SingleTrackOutput} that receives parsed data.
|
||||
*/
|
||||
public final class ChunkExtractorWrapper implements ExtractorOutput, TrackOutput {
|
||||
|
||||
/**
|
||||
* Receives stream level data extracted by the wrapped {@link Extractor}.
|
||||
*/
|
||||
public interface SingleTrackOutput extends TrackOutput {
|
||||
|
||||
/**
|
||||
* @see ExtractorOutput#seekMap(SeekMap)
|
||||
*/
|
||||
void seekMap(SeekMap seekMap);
|
||||
|
||||
/**
|
||||
* @see ExtractorOutput#drmInitData(DrmInitData)
|
||||
*/
|
||||
void drmInitData(DrmInitData drmInitData);
|
||||
|
||||
}
|
||||
|
||||
private final Extractor extractor;
|
||||
private boolean extractorInitialized;
|
||||
private SingleTrackOutput output;
|
||||
|
||||
// Accessed only on the loader thread.
|
||||
private boolean seenTrack;
|
||||
|
||||
/**
|
||||
* @param extractor The extractor to wrap.
|
||||
*/
|
||||
public ChunkExtractorWrapper(Extractor extractor) {
|
||||
this.extractor = extractor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the extractor to output to the provided {@link SingleTrackOutput}, and configures
|
||||
* it to receive data from a new chunk.
|
||||
*
|
||||
* @param output The {@link SingleTrackOutput} that will receive the parsed data.
|
||||
*/
|
||||
public void init(SingleTrackOutput output) {
|
||||
this.output = output;
|
||||
if (!extractorInitialized) {
|
||||
extractor.init(this);
|
||||
extractorInitialized = true;
|
||||
} else {
|
||||
extractor.seek();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads from the provided {@link ExtractorInput}.
|
||||
*
|
||||
* @param input The {@link ExtractorInput} from which to read.
|
||||
* @return One of {@link Extractor#RESULT_CONTINUE} and {@link Extractor#RESULT_END_OF_INPUT}.
|
||||
* @throws IOException If an error occurred reading from the source.
|
||||
* @throws InterruptedException If the thread was interrupted.
|
||||
*/
|
||||
public int read(ExtractorInput input) throws IOException, InterruptedException {
|
||||
int result = extractor.read(input, null);
|
||||
Assertions.checkState(result != Extractor.RESULT_SEEK);
|
||||
return result;
|
||||
}
|
||||
|
||||
// ExtractorOutput implementation.
|
||||
|
||||
@Override
|
||||
public TrackOutput track(int id) {
|
||||
Assertions.checkState(!seenTrack);
|
||||
seenTrack = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void endTracks() {
|
||||
Assertions.checkState(seenTrack);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seekMap(SeekMap seekMap) {
|
||||
output.seekMap(seekMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drmInitData(DrmInitData drmInitData) {
|
||||
output.drmInitData(drmInitData);
|
||||
}
|
||||
|
||||
// TrackOutput implementation.
|
||||
|
||||
@Override
|
||||
public void format(MediaFormat format) {
|
||||
output.format(format);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int sampleData(ExtractorInput input, int length) throws IOException, InterruptedException {
|
||||
return output.sampleData(input, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sampleData(ParsableByteArray data, int length) {
|
||||
output.sampleData(data, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sampleMetadata(long timeUs, int flags, int size, int offset, byte[] encryptionKey) {
|
||||
output.sampleMetadata(timeUs, flags, size, offset, encryptionKey);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -23,6 +23,7 @@ import com.google.android.exoplayer.SampleHolder;
|
|||
import com.google.android.exoplayer.SampleSource;
|
||||
import com.google.android.exoplayer.TrackInfo;
|
||||
import com.google.android.exoplayer.TrackRenderer;
|
||||
import com.google.android.exoplayer.extractor.DefaultTrackOutput;
|
||||
import com.google.android.exoplayer.upstream.Loader;
|
||||
import com.google.android.exoplayer.upstream.Loader.Loadable;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
|
|
@ -32,7 +33,6 @@ import android.os.SystemClock;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
|
|
@ -45,94 +45,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
/**
|
||||
* Interface definition for a callback to be notified of {@link ChunkSampleSource} events.
|
||||
*/
|
||||
public interface EventListener {
|
||||
|
||||
/**
|
||||
* Invoked when an upstream load is started.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param formatId The format id.
|
||||
* @param trigger A trigger for the format selection, as specified by the {@link ChunkSource}.
|
||||
* @param isInitialization Whether the load is for format initialization data.
|
||||
* @param mediaStartTimeMs The media time of the start of the data being loaded, or -1 if this
|
||||
* load is for initialization data.
|
||||
* @param mediaEndTimeMs The media time of the end of the data being loaded, or -1 if this
|
||||
* load is for initialization data.
|
||||
* @param length The length of the data being loaded in bytes, or {@link C#LENGTH_UNBOUNDED} if
|
||||
* the length of the data has not yet been determined.
|
||||
*/
|
||||
void onLoadStarted(int sourceId, String formatId, int trigger, boolean isInitialization,
|
||||
int mediaStartTimeMs, int mediaEndTimeMs, long length);
|
||||
|
||||
/**
|
||||
* Invoked when the current load operation completes.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param bytesLoaded The number of bytes that were loaded.
|
||||
*/
|
||||
void onLoadCompleted(int sourceId, long bytesLoaded);
|
||||
|
||||
/**
|
||||
* Invoked when the current upstream load operation is canceled.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param bytesLoaded The number of bytes that were loaded prior to the cancellation.
|
||||
*/
|
||||
void onLoadCanceled(int sourceId, long bytesLoaded);
|
||||
|
||||
/**
|
||||
* Invoked when data is removed from the back of the buffer, typically so that it can be
|
||||
* re-buffered using a different representation.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param mediaStartTimeMs The media time of the start of the discarded data.
|
||||
* @param mediaEndTimeMs The media time of the end of the discarded data.
|
||||
* @param bytesDiscarded The length of the data being discarded in bytes.
|
||||
*/
|
||||
void onUpstreamDiscarded(int sourceId, int mediaStartTimeMs, int mediaEndTimeMs,
|
||||
long bytesDiscarded);
|
||||
|
||||
/**
|
||||
* Invoked when an error occurs loading media data.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param e The cause of the failure.
|
||||
*/
|
||||
void onUpstreamError(int sourceId, IOException e);
|
||||
|
||||
/**
|
||||
* Invoked when an error occurs consuming loaded data.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param e The cause of the failure.
|
||||
*/
|
||||
void onConsumptionError(int sourceId, IOException e);
|
||||
|
||||
/**
|
||||
* Invoked when data is removed from the front of the buffer, typically due to a seek or
|
||||
* because the data has been consumed.
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param mediaStartTimeMs The media time of the start of the discarded data.
|
||||
* @param mediaEndTimeMs The media time of the end of the discarded data.
|
||||
* @param bytesDiscarded The length of the data being discarded in bytes.
|
||||
*/
|
||||
void onDownstreamDiscarded(int sourceId, int mediaStartTimeMs, int mediaEndTimeMs,
|
||||
long bytesDiscarded);
|
||||
|
||||
/**
|
||||
* Invoked when the downstream format changes (i.e. when the format being supplied to the
|
||||
* caller of {@link SampleSource#readData} changes).
|
||||
*
|
||||
* @param sourceId The id of the reporting {@link SampleSource}.
|
||||
* @param formatId The format id.
|
||||
* @param trigger The trigger specified in the corresponding upstream load, as specified by the
|
||||
* {@link ChunkSource}.
|
||||
* @param mediaTimeMs The media time at which the change occurred.
|
||||
*/
|
||||
void onDownstreamFormatChanged(int sourceId, String formatId, int trigger, int mediaTimeMs);
|
||||
|
||||
}
|
||||
public interface EventListener extends BaseChunkSampleSourceEventListener {}
|
||||
|
||||
/**
|
||||
* The default minimum number of times to retry loading data prior to failing.
|
||||
|
|
@ -149,8 +62,9 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
private final LoadControl loadControl;
|
||||
private final ChunkSource chunkSource;
|
||||
private final ChunkOperationHolder currentLoadableHolder;
|
||||
private final LinkedList<MediaChunk> mediaChunks;
|
||||
private final List<MediaChunk> readOnlyMediaChunks;
|
||||
private final LinkedList<BaseMediaChunk> mediaChunks;
|
||||
private final List<BaseMediaChunk> readOnlyMediaChunks;
|
||||
private final DefaultTrackOutput sampleQueue;
|
||||
private final int bufferSizeContribution;
|
||||
private final boolean frameAccurateSeeking;
|
||||
private final Handler eventHandler;
|
||||
|
|
@ -165,6 +79,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
private boolean pendingDiscontinuity;
|
||||
|
||||
private Loader loader;
|
||||
private boolean loadingFinished;
|
||||
private IOException currentLoadableException;
|
||||
private boolean currentLoadableExceptionFatal;
|
||||
private int currentLoadableExceptionCount;
|
||||
|
|
@ -197,9 +112,11 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
this.eventSourceId = eventSourceId;
|
||||
this.minLoadableRetryCount = minLoadableRetryCount;
|
||||
currentLoadableHolder = new ChunkOperationHolder();
|
||||
mediaChunks = new LinkedList<MediaChunk>();
|
||||
mediaChunks = new LinkedList<BaseMediaChunk>();
|
||||
readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks);
|
||||
sampleQueue = new DefaultTrackOutput(loadControl.getAllocator());
|
||||
state = STATE_UNPREPARED;
|
||||
pendingResetPositionUs = NO_RESET_PENDING;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -259,7 +176,8 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
if (loader.isLoading()) {
|
||||
loader.cancelLoading();
|
||||
} else {
|
||||
clearMediaChunks();
|
||||
sampleQueue.clear();
|
||||
mediaChunks.clear();
|
||||
clearCurrentLoadable();
|
||||
loadControl.trimAllocator();
|
||||
}
|
||||
|
|
@ -273,23 +191,11 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
chunkSource.continueBuffering(positionUs);
|
||||
updateLoadControl();
|
||||
|
||||
boolean haveSamples = false;
|
||||
if (isPendingReset() || mediaChunks.isEmpty()) {
|
||||
// No sample available.
|
||||
} else if (sampleAvailableOrFinishedLastChunk(mediaChunks.getFirst())) {
|
||||
// There's a sample available to be read from the current chunk.
|
||||
haveSamples = true;
|
||||
} else {
|
||||
// It may be the case that the current chunk has been fully read but not yet discarded and
|
||||
// that the next chunk has an available sample. Return true if so, otherwise false.
|
||||
haveSamples = mediaChunks.size() > 1
|
||||
&& sampleAvailableOrFinishedLastChunk(mediaChunks.get(1));
|
||||
}
|
||||
|
||||
boolean haveSamples = !sampleQueue.isEmpty();
|
||||
if (!haveSamples) {
|
||||
maybeThrowLoadableException();
|
||||
}
|
||||
return haveSamples;
|
||||
return loadingFinished || haveSamples;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
@ -297,6 +203,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException {
|
||||
Assertions.checkState(state == STATE_ENABLED);
|
||||
Assertions.checkState(track == 0);
|
||||
downstreamPositionUs = positionUs;
|
||||
|
||||
if (pendingDiscontinuity) {
|
||||
pendingDiscontinuity = false;
|
||||
|
|
@ -307,85 +214,79 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
return NOTHING_READ;
|
||||
}
|
||||
|
||||
downstreamPositionUs = positionUs;
|
||||
if (isPendingReset()) {
|
||||
maybeThrowLoadableException();
|
||||
IOException chunkSourceException = chunkSource.getError();
|
||||
if (chunkSourceException != null) {
|
||||
throw chunkSourceException;
|
||||
}
|
||||
return NOTHING_READ;
|
||||
}
|
||||
|
||||
MediaChunk mediaChunk = mediaChunks.getFirst();
|
||||
if (mediaChunk.isReadFinished()) {
|
||||
// We've read all of the samples from the current media chunk.
|
||||
if (mediaChunks.size() > 1) {
|
||||
discardDownstreamMediaChunk();
|
||||
mediaChunk = mediaChunks.getFirst();
|
||||
mediaChunk.seekToStart();
|
||||
return readData(track, positionUs, formatHolder, sampleHolder, false);
|
||||
} else if (mediaChunk.isLastChunk()) {
|
||||
boolean haveSamples = !sampleQueue.isEmpty();
|
||||
BaseMediaChunk currentChunk = mediaChunks.getFirst();
|
||||
while (haveSamples && mediaChunks.size() > 1
|
||||
&& mediaChunks.get(1).getFirstSampleIndex() == sampleQueue.getReadIndex()) {
|
||||
mediaChunks.removeFirst();
|
||||
currentChunk = mediaChunks.getFirst();
|
||||
}
|
||||
|
||||
if (downstreamFormat == null || !downstreamFormat.equals(currentChunk.format)) {
|
||||
notifyDownstreamFormatChanged(currentChunk.format, currentChunk.trigger,
|
||||
currentChunk.startTimeUs);
|
||||
downstreamFormat = currentChunk.format;
|
||||
}
|
||||
|
||||
if (haveSamples || currentChunk.isFormatFinal) {
|
||||
MediaFormat mediaFormat = currentChunk.getMediaFormat();
|
||||
if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormat, true)) {
|
||||
chunkSource.getMaxVideoDimensions(mediaFormat);
|
||||
formatHolder.format = mediaFormat;
|
||||
formatHolder.drmInitData = currentChunk.getDrmInitData();
|
||||
downstreamMediaFormat = mediaFormat;
|
||||
return FORMAT_READ;
|
||||
}
|
||||
}
|
||||
|
||||
if (!haveSamples) {
|
||||
if (loadingFinished) {
|
||||
return END_OF_STREAM;
|
||||
}
|
||||
IOException chunkSourceException = chunkSource.getError();
|
||||
if (chunkSourceException != null) {
|
||||
throw chunkSourceException;
|
||||
}
|
||||
return NOTHING_READ;
|
||||
}
|
||||
|
||||
if (downstreamFormat == null || !downstreamFormat.equals(mediaChunk.format)) {
|
||||
notifyDownstreamFormatChanged(mediaChunk.format.id, mediaChunk.trigger,
|
||||
mediaChunk.startTimeUs);
|
||||
downstreamFormat = mediaChunk.format;
|
||||
}
|
||||
|
||||
if (!mediaChunk.prepare()) {
|
||||
if (currentLoadableException != null) {
|
||||
throw currentLoadableException;
|
||||
}
|
||||
return NOTHING_READ;
|
||||
}
|
||||
|
||||
MediaFormat mediaFormat = mediaChunk.getMediaFormat();
|
||||
if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormat, true)) {
|
||||
chunkSource.getMaxVideoDimensions(mediaFormat);
|
||||
formatHolder.format = mediaFormat;
|
||||
formatHolder.drmInitData = mediaChunk.getDrmInitData();
|
||||
downstreamMediaFormat = mediaFormat;
|
||||
return FORMAT_READ;
|
||||
}
|
||||
|
||||
if (mediaChunk.read(sampleHolder)) {
|
||||
boolean decodeOnly = frameAccurateSeeking && sampleHolder.timeUs < lastSeekPositionUs;
|
||||
sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0;
|
||||
onSampleRead(mediaChunk, sampleHolder);
|
||||
return SAMPLE_READ;
|
||||
} else {
|
||||
maybeThrowLoadableException();
|
||||
return NOTHING_READ;
|
||||
}
|
||||
|
||||
if (sampleQueue.getSample(sampleHolder)) {
|
||||
boolean decodeOnly = frameAccurateSeeking && sampleHolder.timeUs < lastSeekPositionUs;
|
||||
sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0;
|
||||
onSampleRead(currentChunk, sampleHolder);
|
||||
return SAMPLE_READ;
|
||||
}
|
||||
|
||||
maybeThrowLoadableException();
|
||||
return NOTHING_READ;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seekToUs(long positionUs) {
|
||||
Assertions.checkState(state == STATE_ENABLED);
|
||||
downstreamPositionUs = positionUs;
|
||||
lastSeekPositionUs = positionUs;
|
||||
if (pendingResetPositionUs == positionUs) {
|
||||
if ((isPendingReset() ? pendingResetPositionUs : downstreamPositionUs) == positionUs) {
|
||||
return;
|
||||
}
|
||||
|
||||
MediaChunk mediaChunk = getMediaChunk(positionUs);
|
||||
if (mediaChunk == null) {
|
||||
restartFrom(positionUs);
|
||||
pendingDiscontinuity = true;
|
||||
downstreamPositionUs = positionUs;
|
||||
// If we're not pending a reset, see if we can seek within the sample queue.
|
||||
boolean seekInsideBuffer = !isPendingReset() && sampleQueue.skipToKeyframeBefore(positionUs);
|
||||
if (seekInsideBuffer) {
|
||||
// We succeeded. All we need to do is discard any chunks that we've moved past.
|
||||
boolean haveSamples = !sampleQueue.isEmpty();
|
||||
while (haveSamples && mediaChunks.size() > 1
|
||||
&& mediaChunks.get(1).getFirstSampleIndex() <= sampleQueue.getReadIndex()) {
|
||||
mediaChunks.removeFirst();
|
||||
}
|
||||
} else {
|
||||
pendingDiscontinuity |= mediaChunk.seekTo(positionUs, mediaChunk == mediaChunks.getFirst());
|
||||
discardDownstreamMediaChunks(mediaChunk);
|
||||
updateLoadControl();
|
||||
// We failed, and need to restart.
|
||||
restartFrom(positionUs);
|
||||
}
|
||||
// Either way, we need to send a discontinuity to the downstream components.
|
||||
pendingDiscontinuity = true;
|
||||
}
|
||||
|
||||
private void maybeThrowLoadableException() throws IOException {
|
||||
|
|
@ -393,19 +294,12 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
|| currentLoadableExceptionCount > minLoadableRetryCount)) {
|
||||
throw currentLoadableException;
|
||||
}
|
||||
}
|
||||
|
||||
private MediaChunk getMediaChunk(long positionUs) {
|
||||
Iterator<MediaChunk> mediaChunkIterator = mediaChunks.iterator();
|
||||
while (mediaChunkIterator.hasNext()) {
|
||||
MediaChunk mediaChunk = mediaChunkIterator.next();
|
||||
if (positionUs < mediaChunk.startTimeUs) {
|
||||
return null;
|
||||
} else if (mediaChunk.isLastChunk() || positionUs < mediaChunk.endTimeUs) {
|
||||
return mediaChunk;
|
||||
if (sampleQueue.isEmpty() && currentLoadableHolder.chunk == null) {
|
||||
IOException chunkSourceException = chunkSource.getError();
|
||||
if (chunkSourceException != null) {
|
||||
throw chunkSourceException;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
@ -413,22 +307,12 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
Assertions.checkState(state == STATE_ENABLED);
|
||||
if (isPendingReset()) {
|
||||
return pendingResetPositionUs;
|
||||
}
|
||||
MediaChunk mediaChunk = mediaChunks.getLast();
|
||||
Chunk currentLoadable = currentLoadableHolder.chunk;
|
||||
if (currentLoadable != null && mediaChunk == currentLoadable) {
|
||||
// Linearly interpolate partially-fetched chunk times.
|
||||
long chunkLength = mediaChunk.getLength();
|
||||
if (chunkLength != C.LENGTH_UNBOUNDED && chunkLength != 0) {
|
||||
return mediaChunk.startTimeUs + ((mediaChunk.endTimeUs - mediaChunk.startTimeUs) *
|
||||
mediaChunk.bytesLoaded()) / chunkLength;
|
||||
} else {
|
||||
return mediaChunk.startTimeUs;
|
||||
}
|
||||
} else if (mediaChunk.isLastChunk()) {
|
||||
} else if (loadingFinished) {
|
||||
return TrackRenderer.END_OF_TRACK_US;
|
||||
} else {
|
||||
return mediaChunk.endTimeUs;
|
||||
long largestParsedTimestampUs = sampleQueue.getLargestParsedTimestampUs();
|
||||
return largestParsedTimestampUs == Long.MIN_VALUE ? downstreamPositionUs
|
||||
: largestParsedTimestampUs;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -445,38 +329,26 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
@Override
|
||||
public void onLoadCompleted(Loadable loadable) {
|
||||
Chunk currentLoadable = currentLoadableHolder.chunk;
|
||||
chunkSource.onChunkLoadCompleted(currentLoadable);
|
||||
notifyLoadCompleted(currentLoadable.bytesLoaded());
|
||||
try {
|
||||
currentLoadable.consume();
|
||||
} catch (IOException e) {
|
||||
currentLoadableException = e;
|
||||
currentLoadableExceptionCount++;
|
||||
currentLoadableExceptionTimestamp = SystemClock.elapsedRealtime();
|
||||
currentLoadableExceptionFatal = true;
|
||||
notifyConsumptionError(e);
|
||||
} finally {
|
||||
if (!isMediaChunk(currentLoadable)) {
|
||||
currentLoadable.release();
|
||||
}
|
||||
if (!currentLoadableExceptionFatal) {
|
||||
clearCurrentLoadable();
|
||||
}
|
||||
updateLoadControl();
|
||||
if (isMediaChunk(currentLoadable)) {
|
||||
loadingFinished = ((BaseMediaChunk) currentLoadable).isLastChunk;
|
||||
}
|
||||
clearCurrentLoadable();
|
||||
updateLoadControl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLoadCanceled(Loadable loadable) {
|
||||
Chunk currentLoadable = currentLoadableHolder.chunk;
|
||||
notifyLoadCanceled(currentLoadable.bytesLoaded());
|
||||
if (!isMediaChunk(currentLoadable)) {
|
||||
currentLoadable.release();
|
||||
}
|
||||
clearCurrentLoadable();
|
||||
if (state == STATE_ENABLED) {
|
||||
restartFrom(pendingResetPositionUs);
|
||||
} else {
|
||||
clearMediaChunks();
|
||||
sampleQueue.clear();
|
||||
mediaChunks.clear();
|
||||
clearCurrentLoadable();
|
||||
loadControl.trimAllocator();
|
||||
}
|
||||
}
|
||||
|
|
@ -486,39 +358,41 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
currentLoadableException = e;
|
||||
currentLoadableExceptionCount++;
|
||||
currentLoadableExceptionTimestamp = SystemClock.elapsedRealtime();
|
||||
notifyUpstreamError(e);
|
||||
notifyLoadError(e);
|
||||
chunkSource.onChunkLoadError(currentLoadableHolder.chunk, e);
|
||||
updateLoadControl();
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when a sample has been read from a {@link MediaChunk}. Can be used to perform any
|
||||
* modifications necessary before the sample is returned.
|
||||
* Called when a sample has been read. Can be used to perform any modifications necessary before
|
||||
* the sample is returned.
|
||||
*
|
||||
* @param mediaChunk The MediaChunk the sample was ready from.
|
||||
* @param sampleHolder The sample that has just been read.
|
||||
* @param mediaChunk The chunk from which the sample was obtained.
|
||||
* @param sampleHolder Holds the read sample.
|
||||
*/
|
||||
protected void onSampleRead(MediaChunk mediaChunk, SampleHolder sampleHolder) {
|
||||
// no-op
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
private void restartFrom(long positionUs) {
|
||||
pendingResetPositionUs = positionUs;
|
||||
loadingFinished = false;
|
||||
if (loader.isLoading()) {
|
||||
loader.cancelLoading();
|
||||
} else {
|
||||
clearMediaChunks();
|
||||
sampleQueue.clear();
|
||||
mediaChunks.clear();
|
||||
clearCurrentLoadable();
|
||||
updateLoadControl();
|
||||
}
|
||||
}
|
||||
|
||||
private void clearMediaChunks() {
|
||||
discardDownstreamMediaChunks(null);
|
||||
}
|
||||
|
||||
private void clearCurrentLoadable() {
|
||||
currentLoadableHolder.chunk = null;
|
||||
clearCurrentLoadableException();
|
||||
}
|
||||
|
||||
private void clearCurrentLoadableException() {
|
||||
currentLoadableException = null;
|
||||
currentLoadableExceptionCount = 0;
|
||||
currentLoadableExceptionFatal = false;
|
||||
|
|
@ -581,8 +455,8 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
if (isPendingReset()) {
|
||||
return pendingResetPositionUs;
|
||||
} else {
|
||||
MediaChunk lastMediaChunk = mediaChunks.getLast();
|
||||
return lastMediaChunk.nextChunkIndex == -1 ? -1 : lastMediaChunk.endTimeUs;
|
||||
BaseMediaChunk lastMediaChunk = mediaChunks.getLast();
|
||||
return lastMediaChunk.isLastChunk ? -1 : lastMediaChunk.endTimeUs;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -606,7 +480,9 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
// Chunk was unchanged. Resume loading.
|
||||
loader.startLoading(backedOffChunk, this);
|
||||
} else {
|
||||
backedOffChunk.release();
|
||||
// Chunk was changed. Notify that the existing load was canceled.
|
||||
notifyLoadCanceled(backedOffChunk.bytesLoaded());
|
||||
// Start loading the replacement.
|
||||
maybeStartLoading();
|
||||
}
|
||||
return;
|
||||
|
|
@ -621,7 +497,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
|
||||
// The current loadable is the last media chunk. Remove it before we invoke the chunk source,
|
||||
// and add it back again afterwards.
|
||||
MediaChunk removedChunk = mediaChunks.removeLast();
|
||||
BaseMediaChunk removedChunk = mediaChunks.removeLast();
|
||||
Assertions.checkState(backedOffChunk == removedChunk);
|
||||
currentLoadableHolder.queueSize = readOnlyMediaChunks.size();
|
||||
chunkSource.getChunkOperation(readOnlyMediaChunks, pendingResetPositionUs, downstreamPositionUs,
|
||||
|
|
@ -632,10 +508,12 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
// Chunk was unchanged. Resume loading.
|
||||
loader.startLoading(backedOffChunk, this);
|
||||
} else {
|
||||
// Chunk was changed. Notify that the existing load was canceled.
|
||||
notifyLoadCanceled(backedOffChunk.bytesLoaded());
|
||||
// This call will remove and release at least one chunk from the end of mediaChunks. Since
|
||||
// the current loadable is the last media chunk, it is guaranteed to be removed.
|
||||
discardUpstreamMediaChunks(currentLoadableHolder.queueSize);
|
||||
clearCurrentLoadable();
|
||||
clearCurrentLoadableException();
|
||||
maybeStartLoading();
|
||||
}
|
||||
}
|
||||
|
|
@ -646,55 +524,22 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
// Nothing to load.
|
||||
return;
|
||||
}
|
||||
currentLoadable.init(loadControl.getAllocator());
|
||||
if (isMediaChunk(currentLoadable)) {
|
||||
MediaChunk mediaChunk = (MediaChunk) currentLoadable;
|
||||
BaseMediaChunk mediaChunk = (BaseMediaChunk) currentLoadable;
|
||||
mediaChunk.init(sampleQueue);
|
||||
mediaChunks.add(mediaChunk);
|
||||
if (isPendingReset()) {
|
||||
mediaChunk.seekTo(pendingResetPositionUs, false);
|
||||
pendingResetPositionUs = NO_RESET_PENDING;
|
||||
}
|
||||
mediaChunks.add(mediaChunk);
|
||||
notifyLoadStarted(mediaChunk.format.id, mediaChunk.trigger, false,
|
||||
mediaChunk.startTimeUs, mediaChunk.endTimeUs, mediaChunk.getLength());
|
||||
notifyLoadStarted(mediaChunk.dataSpec.length, mediaChunk.type, mediaChunk.trigger,
|
||||
mediaChunk.format, mediaChunk.startTimeUs, mediaChunk.endTimeUs);
|
||||
} else {
|
||||
notifyLoadStarted(currentLoadable.format.id, currentLoadable.trigger, true, -1, -1,
|
||||
currentLoadable.getLength());
|
||||
notifyLoadStarted(currentLoadable.dataSpec.length, currentLoadable.type,
|
||||
currentLoadable.trigger, currentLoadable.format, -1, -1);
|
||||
}
|
||||
loader.startLoading(currentLoadable, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Discards downstream media chunks until {@code untilChunk} if found. {@code untilChunk} is not
|
||||
* itself discarded. Null can be passed to discard all media chunks.
|
||||
*
|
||||
* @param untilChunk The first media chunk to keep, or null to discard all media chunks.
|
||||
*/
|
||||
private void discardDownstreamMediaChunks(MediaChunk untilChunk) {
|
||||
if (mediaChunks.isEmpty() || untilChunk == mediaChunks.getFirst()) {
|
||||
return;
|
||||
}
|
||||
long totalBytes = 0;
|
||||
long startTimeUs = mediaChunks.getFirst().startTimeUs;
|
||||
long endTimeUs = 0;
|
||||
while (!mediaChunks.isEmpty() && untilChunk != mediaChunks.getFirst()) {
|
||||
MediaChunk removed = mediaChunks.removeFirst();
|
||||
totalBytes += removed.bytesLoaded();
|
||||
endTimeUs = removed.endTimeUs;
|
||||
removed.release();
|
||||
}
|
||||
notifyDownstreamDiscarded(startTimeUs, endTimeUs, totalBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Discards the first downstream media chunk.
|
||||
*/
|
||||
private void discardDownstreamMediaChunk() {
|
||||
MediaChunk removed = mediaChunks.removeFirst();
|
||||
long totalBytes = removed.bytesLoaded();
|
||||
removed.release();
|
||||
notifyDownstreamDiscarded(removed.startTimeUs, removed.endTimeUs, totalBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard upstream media chunks until the queue length is equal to the length specified.
|
||||
*
|
||||
|
|
@ -705,25 +550,22 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
if (mediaChunks.size() <= queueLength) {
|
||||
return false;
|
||||
}
|
||||
long totalBytes = 0;
|
||||
long startTimeUs = 0;
|
||||
long endTimeUs = mediaChunks.getLast().endTimeUs;
|
||||
|
||||
BaseMediaChunk removed = null;
|
||||
while (mediaChunks.size() > queueLength) {
|
||||
MediaChunk removed = mediaChunks.removeLast();
|
||||
totalBytes += removed.bytesLoaded();
|
||||
removed = mediaChunks.removeLast();
|
||||
startTimeUs = removed.startTimeUs;
|
||||
removed.release();
|
||||
}
|
||||
notifyUpstreamDiscarded(startTimeUs, endTimeUs, totalBytes);
|
||||
sampleQueue.discardUpstreamSamples(removed.getFirstSampleIndex());
|
||||
|
||||
notifyUpstreamDiscarded(startTimeUs, endTimeUs);
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean sampleAvailableOrFinishedLastChunk(MediaChunk chunk) throws IOException {
|
||||
return chunk.sampleAvailable() || (chunk.isLastChunk() && chunk.isReadFinished());
|
||||
}
|
||||
|
||||
private boolean isMediaChunk(Chunk chunk) {
|
||||
return chunk instanceof MediaChunk;
|
||||
return chunk instanceof BaseMediaChunk;
|
||||
}
|
||||
|
||||
private boolean isPendingReset() {
|
||||
|
|
@ -738,15 +580,14 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
return (int) (timeUs / 1000);
|
||||
}
|
||||
|
||||
private void notifyLoadStarted(final String formatId, final int trigger,
|
||||
final boolean isInitialization, final long mediaStartTimeUs, final long mediaEndTimeUs,
|
||||
final long length) {
|
||||
private void notifyLoadStarted(final long length, final int type, final int trigger,
|
||||
final Format format, final long mediaStartTimeUs, final long mediaEndTimeUs) {
|
||||
if (eventHandler != null && eventListener != null) {
|
||||
eventHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onLoadStarted(eventSourceId, formatId, trigger, isInitialization,
|
||||
usToMs(mediaStartTimeUs), usToMs(mediaEndTimeUs), length);
|
||||
eventListener.onLoadStarted(eventSourceId, length, type, trigger, format,
|
||||
usToMs(mediaStartTimeUs), usToMs(mediaEndTimeUs));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
@ -774,65 +615,40 @@ public class ChunkSampleSource implements SampleSource, Loader.Callback {
|
|||
}
|
||||
}
|
||||
|
||||
private void notifyUpstreamError(final IOException e) {
|
||||
private void notifyLoadError(final IOException e) {
|
||||
if (eventHandler != null && eventListener != null) {
|
||||
eventHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onUpstreamError(eventSourceId, e);
|
||||
eventListener.onLoadError(eventSourceId, e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyConsumptionError(final IOException e) {
|
||||
if (eventHandler != null && eventListener != null) {
|
||||
eventHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onConsumptionError(eventSourceId, e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyUpstreamDiscarded(final long mediaStartTimeUs, final long mediaEndTimeUs,
|
||||
final long totalBytes) {
|
||||
private void notifyUpstreamDiscarded(final long mediaStartTimeUs, final long mediaEndTimeUs) {
|
||||
if (eventHandler != null && eventListener != null) {
|
||||
eventHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onUpstreamDiscarded(eventSourceId, usToMs(mediaStartTimeUs),
|
||||
usToMs(mediaEndTimeUs), totalBytes);
|
||||
usToMs(mediaEndTimeUs));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyDownstreamFormatChanged(final String formatId, final int trigger,
|
||||
private void notifyDownstreamFormatChanged(final Format format, final int trigger,
|
||||
final long positionUs) {
|
||||
if (eventHandler != null && eventListener != null) {
|
||||
eventHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onDownstreamFormatChanged(eventSourceId, formatId, trigger,
|
||||
eventListener.onDownstreamFormatChanged(eventSourceId, format, trigger,
|
||||
usToMs(positionUs));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyDownstreamDiscarded(final long mediaStartTimeUs, final long mediaEndTimeUs,
|
||||
final long bytesDiscarded) {
|
||||
if (eventHandler != null && eventListener != null) {
|
||||
eventHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onDownstreamDiscarded(eventSourceId, usToMs(mediaStartTimeUs),
|
||||
usToMs(mediaEndTimeUs), bytesDiscarded);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -102,6 +102,14 @@ public interface ChunkSource {
|
|||
*/
|
||||
IOException getError();
|
||||
|
||||
/**
|
||||
* Invoked when the {@link ChunkSampleSource} has finished loading a chunk obtained from this
|
||||
* source.
|
||||
*
|
||||
* @param chunk The chunk whose load has been completed.
|
||||
*/
|
||||
void onChunkLoadCompleted(Chunk chunk);
|
||||
|
||||
/**
|
||||
* Invoked when the {@link ChunkSampleSource} encounters an error loading a chunk obtained from
|
||||
* this source.
|
||||
|
|
|
|||
|
|
@ -16,126 +16,77 @@
|
|||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.chunk.parser.Extractor;
|
||||
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper.SingleTrackOutput;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.extractor.DefaultExtractorInput;
|
||||
import com.google.android.exoplayer.extractor.DefaultTrackOutput;
|
||||
import com.google.android.exoplayer.extractor.Extractor;
|
||||
import com.google.android.exoplayer.extractor.ExtractorInput;
|
||||
import com.google.android.exoplayer.extractor.SeekMap;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A {@link MediaChunk} extracted from a container.
|
||||
* A {@link BaseMediaChunk} that uses an {@link Extractor} to parse sample data.
|
||||
*/
|
||||
public final class ContainerMediaChunk extends MediaChunk {
|
||||
public class ContainerMediaChunk extends BaseMediaChunk implements SingleTrackOutput {
|
||||
|
||||
private final Extractor extractor;
|
||||
private final boolean maybeSelfContained;
|
||||
private static final String TAG = "ContainerMediaChunk";
|
||||
|
||||
private final ChunkExtractorWrapper extractorWrapper;
|
||||
private final long sampleOffsetUs;
|
||||
|
||||
private boolean prepared;
|
||||
private MediaFormat mediaFormat;
|
||||
private DrmInitData drmInitData;
|
||||
|
||||
/**
|
||||
* @deprecated Use the other constructor, passing null as {@code psshInfo}.
|
||||
*/
|
||||
@Deprecated
|
||||
public ContainerMediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
|
||||
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex,
|
||||
Extractor extractor, boolean maybeSelfContained, long sampleOffsetUs) {
|
||||
this(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex,
|
||||
extractor, null, maybeSelfContained, sampleOffsetUs);
|
||||
}
|
||||
private volatile int bytesLoaded;
|
||||
private volatile boolean loadCanceled;
|
||||
|
||||
/**
|
||||
* @param dataSource A {@link DataSource} for loading the data.
|
||||
* @param dataSpec Defines the data to be loaded.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param trigger The reason for this chunk being selected.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param startTimeUs The start time of the media contained by the chunk, in microseconds.
|
||||
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
|
||||
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
|
||||
* @param extractor The extractor that will be used to extract the samples.
|
||||
* @param drmInitData DRM initialization data. May be null if DRM initialization data is present
|
||||
* within the stream, meaning it can be obtained directly from {@code extractor}, or if no
|
||||
* DRM initialization data is required.
|
||||
* @param maybeSelfContained Set to true if this chunk might be self contained, meaning it might
|
||||
* contain a moov atom defining the media format of the chunk. This parameter can always be
|
||||
* safely set to true. Setting to false where the chunk is known to not be self contained may
|
||||
* improve startup latency.
|
||||
* @param sampleOffsetUs An offset to subtract from the sample timestamps parsed by the extractor.
|
||||
* @param chunkIndex The index of the chunk.
|
||||
* @param isLastChunk True if this is the last chunk in the media. False otherwise.
|
||||
* @param sampleOffsetUs An offset to add to the sample timestamps parsed by the extractor.
|
||||
* @param extractorWrapper A wrapped extractor to use for parsing the data.
|
||||
* @param mediaFormat The {@link MediaFormat} of the chunk, if known. May be null if the data is
|
||||
* known to define its own format.
|
||||
* @param drmInitData The {@link DrmInitData} for the chunk. Null if the media is not drm
|
||||
* protected. May also be null if the data is known to define its own initialization data.
|
||||
* @param isFormatFinal True if {@code mediaFormat} and {@code drmInitData} are known to be
|
||||
* correct and final. False if the data may define its own format or initialization data.
|
||||
*/
|
||||
public ContainerMediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
|
||||
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex, Extractor extractor,
|
||||
DrmInitData drmInitData, boolean maybeSelfContained, long sampleOffsetUs) {
|
||||
super(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex);
|
||||
this.extractor = extractor;
|
||||
this.maybeSelfContained = maybeSelfContained;
|
||||
public ContainerMediaChunk(DataSource dataSource, DataSpec dataSpec, int trigger, Format format,
|
||||
long startTimeUs, long endTimeUs, int chunkIndex, boolean isLastChunk, long sampleOffsetUs,
|
||||
ChunkExtractorWrapper extractorWrapper, MediaFormat mediaFormat, DrmInitData drmInitData,
|
||||
boolean isFormatFinal) {
|
||||
super(dataSource, dataSpec, trigger, format, startTimeUs, endTimeUs, chunkIndex, isLastChunk,
|
||||
isFormatFinal);
|
||||
this.extractorWrapper = extractorWrapper;
|
||||
this.sampleOffsetUs = sampleOffsetUs;
|
||||
this.mediaFormat = mediaFormat;
|
||||
this.drmInitData = drmInitData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seekToStart() {
|
||||
extractor.seekTo(0, false);
|
||||
resetReadPosition();
|
||||
public long bytesLoaded() {
|
||||
return bytesLoaded;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean seekTo(long positionUs, boolean allowNoop) {
|
||||
long seekTimeUs = positionUs + sampleOffsetUs;
|
||||
boolean isDiscontinuous = extractor.seekTo(seekTimeUs, allowNoop);
|
||||
if (isDiscontinuous) {
|
||||
resetReadPosition();
|
||||
}
|
||||
return isDiscontinuous;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean prepare() throws ParserException {
|
||||
if (!prepared) {
|
||||
if (maybeSelfContained) {
|
||||
// Read up to the first sample. Once we're there, we know that the extractor must have
|
||||
// parsed a moov atom if the chunk contains one.
|
||||
NonBlockingInputStream inputStream = getNonBlockingInputStream();
|
||||
Assertions.checkState(inputStream != null);
|
||||
int result = extractor.read(inputStream, null);
|
||||
prepared = (result & Extractor.RESULT_NEED_SAMPLE_HOLDER) != 0;
|
||||
} else {
|
||||
// We know there isn't a moov atom. The extractor must have parsed one from a separate
|
||||
// initialization chunk.
|
||||
prepared = true;
|
||||
}
|
||||
if (prepared) {
|
||||
mediaFormat = extractor.getFormat();
|
||||
DrmInitData extractorDrmInitData = extractor.getDrmInitData();
|
||||
if (extractorDrmInitData != null) {
|
||||
drmInitData = extractorDrmInitData;
|
||||
}
|
||||
}
|
||||
}
|
||||
return prepared;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sampleAvailable() throws ParserException {
|
||||
NonBlockingInputStream inputStream = getNonBlockingInputStream();
|
||||
int result = extractor.read(inputStream, null);
|
||||
return (result & Extractor.RESULT_NEED_SAMPLE_HOLDER) != 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean read(SampleHolder holder) throws ParserException {
|
||||
NonBlockingInputStream inputStream = getNonBlockingInputStream();
|
||||
Assertions.checkState(inputStream != null);
|
||||
int result = extractor.read(inputStream, holder);
|
||||
boolean sampleRead = (result & Extractor.RESULT_READ_SAMPLE) != 0;
|
||||
if (sampleRead) {
|
||||
holder.timeUs -= sampleOffsetUs;
|
||||
}
|
||||
return sampleRead;
|
||||
public void init(DefaultTrackOutput output) {
|
||||
super.init(output);
|
||||
extractorWrapper.init(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
@ -148,4 +99,72 @@ public final class ContainerMediaChunk extends MediaChunk {
|
|||
return drmInitData;
|
||||
}
|
||||
|
||||
// SingleTrackOutput implementation.
|
||||
|
||||
@Override
|
||||
public void seekMap(SeekMap seekMap) {
|
||||
Log.w(TAG, "Ignoring unexpected seekMap");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drmInitData(DrmInitData drmInitData) {
|
||||
this.drmInitData = drmInitData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void format(MediaFormat mediaFormat) {
|
||||
this.mediaFormat = mediaFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int sampleData(ExtractorInput input, int length) throws IOException, InterruptedException {
|
||||
return getOutput().sampleData(input, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sampleData(ParsableByteArray data, int length) {
|
||||
getOutput().sampleData(data, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sampleMetadata(long timeUs, int flags, int size, int offset, byte[] encryptionKey) {
|
||||
getOutput().sampleMetadata(timeUs + sampleOffsetUs, flags, size, offset, encryptionKey);
|
||||
}
|
||||
|
||||
// Loadable implementation.
|
||||
|
||||
@Override
|
||||
public void cancelLoad() {
|
||||
loadCanceled = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLoadCanceled() {
|
||||
return loadCanceled;
|
||||
}
|
||||
|
||||
@SuppressWarnings("NonAtomicVolatileUpdate")
|
||||
@Override
|
||||
public void load() throws IOException, InterruptedException {
|
||||
DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded);
|
||||
try {
|
||||
// Create and open the input.
|
||||
ExtractorInput input = new DefaultExtractorInput(dataSource, dataSpec.absoluteStreamPosition,
|
||||
dataSource.open(loadDataSpec));
|
||||
// Set the target to ourselves.
|
||||
extractorWrapper.init(this);
|
||||
// Load and parse the initialization data.
|
||||
try {
|
||||
int result = Extractor.RESULT_CONTINUE;
|
||||
while (result == Extractor.RESULT_CONTINUE && !loadCanceled) {
|
||||
result = extractorWrapper.read(input);
|
||||
}
|
||||
} finally {
|
||||
bytesLoaded += (int) (input.getPosition() - dataSpec.absoluteStreamPosition);
|
||||
}
|
||||
} finally {
|
||||
dataSource.close();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.hls;
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
|
|
@ -22,17 +22,16 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* An abstract base class for {@link HlsChunk} implementations where the data should be loaded into
|
||||
* a {@code byte[]} before being consumed.
|
||||
* A base class for {@link Chunk} implementations where the data should be loaded into a
|
||||
* {@code byte[]} before being consumed.
|
||||
*/
|
||||
public abstract class DataChunk extends HlsChunk {
|
||||
public abstract class DataChunk extends Chunk {
|
||||
|
||||
private static final int READ_GRANULARITY = 16 * 1024;
|
||||
|
||||
private byte[] data;
|
||||
private int limit;
|
||||
|
||||
private volatile boolean loadFinished;
|
||||
private volatile boolean loadCanceled;
|
||||
|
||||
/**
|
||||
|
|
@ -41,36 +40,31 @@ public abstract class DataChunk extends HlsChunk {
|
|||
* {@link Integer#MAX_VALUE}. If {@code dataSpec.length == C.LENGTH_UNBOUNDED} then
|
||||
* the length resolved by {@code dataSource.open(dataSpec)} must not exceed
|
||||
* {@link Integer#MAX_VALUE}.
|
||||
* @param type See {@link #type}.
|
||||
* @param trigger See {@link #trigger}.
|
||||
* @param format See {@link #format}.
|
||||
* @param data An optional recycled array that can be used as a holder for the data.
|
||||
*/
|
||||
public DataChunk(DataSource dataSource, DataSpec dataSpec, byte[] data) {
|
||||
super(dataSource, dataSpec);
|
||||
public DataChunk(DataSource dataSource, DataSpec dataSpec, int type, int trigger, Format format,
|
||||
byte[] data) {
|
||||
super(dataSource, dataSpec, type, trigger, format);
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void consume() throws IOException {
|
||||
consume(data, limit);
|
||||
/**
|
||||
* Returns the array in which the data is held.
|
||||
* <p>
|
||||
* This method should be used for recycling the holder only, and not for reading the data.
|
||||
*
|
||||
* @return The array in which the data is held.
|
||||
*/
|
||||
public byte[] getDataHolder() {
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoked by {@link #consume()}. Implementations should override this method to consume the
|
||||
* loaded data.
|
||||
*
|
||||
* @param data An array containing the data.
|
||||
* @param limit The limit of the data.
|
||||
* @throws IOException If an error occurs consuming the loaded data.
|
||||
*/
|
||||
protected abstract void consume(byte[] data, int limit) throws IOException;
|
||||
|
||||
/**
|
||||
* Whether the whole of the chunk has been loaded.
|
||||
*
|
||||
* @return True if the whole of the chunk has been loaded. False otherwise.
|
||||
*/
|
||||
@Override
|
||||
public boolean isLoadFinished() {
|
||||
return loadFinished;
|
||||
public long bytesLoaded() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
// Loadable implementation
|
||||
|
|
@ -98,12 +92,24 @@ public abstract class DataChunk extends HlsChunk {
|
|||
limit += bytesRead;
|
||||
}
|
||||
}
|
||||
loadFinished = !loadCanceled;
|
||||
if (!loadCanceled) {
|
||||
consume(data, limit);
|
||||
}
|
||||
} finally {
|
||||
dataSource.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoked by {@link #load()}. Implementations should override this method to consume the loaded
|
||||
* data.
|
||||
*
|
||||
* @param data An array containing the data.
|
||||
* @param limit The limit of the data.
|
||||
* @throws IOException If an error occurs consuming the loaded data.
|
||||
*/
|
||||
protected abstract void consume(byte[] data, int limit) throws IOException;
|
||||
|
||||
private void maybeExpandData() {
|
||||
if (data == null) {
|
||||
data = new byte[READ_GRANULARITY];
|
||||
|
|
@ -0,0 +1,191 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper.SingleTrackOutput;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.extractor.DefaultExtractorInput;
|
||||
import com.google.android.exoplayer.extractor.Extractor;
|
||||
import com.google.android.exoplayer.extractor.ExtractorInput;
|
||||
import com.google.android.exoplayer.extractor.SeekMap;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A {@link Chunk} that uses an {@link Extractor} to parse initialization data for single track.
|
||||
*/
|
||||
public final class InitializationChunk extends Chunk implements SingleTrackOutput {
|
||||
|
||||
private final ChunkExtractorWrapper extractorWrapper;
|
||||
|
||||
// Initialization results. Set by the loader thread and read by any thread that knows loading
|
||||
// has completed. These variables do not need to be volatile, since a memory barrier must occur
|
||||
// for the reading thread to know that loading has completed.
|
||||
private MediaFormat mediaFormat;
|
||||
private DrmInitData drmInitData;
|
||||
private SeekMap seekMap;
|
||||
|
||||
private volatile int bytesLoaded;
|
||||
private volatile boolean loadCanceled;
|
||||
|
||||
/**
|
||||
* Constructor for a chunk of media samples.
|
||||
*
|
||||
* @param dataSource A {@link DataSource} for loading the initialization data.
|
||||
* @param dataSpec Defines the initialization data to be loaded.
|
||||
* @param trigger The reason for this chunk being selected.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param extractorWrapper A wrapped extractor to use for parsing the initialization data.
|
||||
*/
|
||||
public InitializationChunk(DataSource dataSource, DataSpec dataSpec, int trigger, Format format,
|
||||
ChunkExtractorWrapper extractorWrapper) {
|
||||
super(dataSource, dataSpec, Chunk.TYPE_MEDIA_INITIALIZATION, trigger, format);
|
||||
this.extractorWrapper = extractorWrapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long bytesLoaded() {
|
||||
return bytesLoaded;
|
||||
}
|
||||
|
||||
/**
|
||||
* True if a {@link MediaFormat} was parsed from the chunk. False otherwise.
|
||||
* <p>
|
||||
* Should be called after loading has completed.
|
||||
*/
|
||||
public boolean hasFormat() {
|
||||
return mediaFormat != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MediaFormat} parsed from the chunk, or null.
|
||||
* <p>
|
||||
* Should be called after loading has completed.
|
||||
*/
|
||||
public MediaFormat getFormat() {
|
||||
return mediaFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* True if a {@link DrmInitData} was parsed from the chunk. False otherwise.
|
||||
* <p>
|
||||
* Should be called after loading has completed.
|
||||
*/
|
||||
public boolean hasDrmInitData() {
|
||||
return drmInitData != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link DrmInitData} parsed from the chunk, or null.
|
||||
* <p>
|
||||
* Should be called after loading has completed.
|
||||
*/
|
||||
public DrmInitData getDrmInitData() {
|
||||
return drmInitData;
|
||||
}
|
||||
|
||||
/**
|
||||
* True if a {@link SeekMap} was parsed from the chunk. False otherwise.
|
||||
* <p>
|
||||
* Should be called after loading has completed.
|
||||
*/
|
||||
public boolean hasSeekMap() {
|
||||
return seekMap != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link SeekMap} parsed from the chunk, or null.
|
||||
* <p>
|
||||
* Should be called after loading has completed.
|
||||
*/
|
||||
public SeekMap getSeekMap() {
|
||||
return seekMap;
|
||||
}
|
||||
|
||||
// SingleTrackOutput implementation.
|
||||
|
||||
@Override
|
||||
public void seekMap(SeekMap seekMap) {
|
||||
this.seekMap = seekMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drmInitData(DrmInitData drmInitData) {
|
||||
this.drmInitData = drmInitData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void format(MediaFormat mediaFormat) {
|
||||
this.mediaFormat = mediaFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int sampleData(ExtractorInput input, int length) throws IOException, InterruptedException {
|
||||
throw new IllegalStateException("Unexpected sample data in initialization chunk");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sampleData(ParsableByteArray data, int length) {
|
||||
throw new IllegalStateException("Unexpected sample data in initialization chunk");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sampleMetadata(long timeUs, int flags, int size, int offset, byte[] encryptionKey) {
|
||||
throw new IllegalStateException("Unexpected sample data in initialization chunk");
|
||||
}
|
||||
|
||||
// Loadable implementation.
|
||||
|
||||
@Override
|
||||
public void cancelLoad() {
|
||||
loadCanceled = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLoadCanceled() {
|
||||
return loadCanceled;
|
||||
}
|
||||
|
||||
@SuppressWarnings("NonAtomicVolatileUpdate")
|
||||
@Override
|
||||
public void load() throws IOException, InterruptedException {
|
||||
DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded);
|
||||
try {
|
||||
// Create and open the input.
|
||||
ExtractorInput input = new DefaultExtractorInput(dataSource, dataSpec.absoluteStreamPosition,
|
||||
dataSource.open(loadDataSpec));
|
||||
// Set the target to ourselves.
|
||||
extractorWrapper.init(this);
|
||||
// Load and parse the initialization data.
|
||||
try {
|
||||
int result = Extractor.RESULT_CONTINUE;
|
||||
while (result == Extractor.RESULT_CONTINUE && !loadCanceled) {
|
||||
result = extractorWrapper.read(input);
|
||||
}
|
||||
} finally {
|
||||
bytesLoaded += (int) (input.getPosition() - dataSpec.absoluteStreamPosition);
|
||||
}
|
||||
} finally {
|
||||
dataSource.close();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -15,12 +15,9 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
|
||||
/**
|
||||
* An abstract base class for {@link Chunk}s that contain media samples.
|
||||
|
|
@ -36,103 +33,32 @@ public abstract class MediaChunk extends Chunk {
|
|||
*/
|
||||
public final long endTimeUs;
|
||||
/**
|
||||
* The index of the next media chunk, or -1 if this is the last media chunk in the stream.
|
||||
* The chunk index.
|
||||
*/
|
||||
public final int nextChunkIndex;
|
||||
public final int chunkIndex;
|
||||
/**
|
||||
* True if this is the last chunk in the media. False otherwise.
|
||||
*/
|
||||
public final boolean isLastChunk;
|
||||
|
||||
/**
|
||||
* Constructor for a chunk of media samples.
|
||||
*
|
||||
* @param dataSource A {@link DataSource} for loading the data.
|
||||
* @param dataSpec Defines the data to be loaded.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param trigger The reason for this chunk being selected.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param startTimeUs The start time of the media contained by the chunk, in microseconds.
|
||||
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
|
||||
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
|
||||
* @param chunkIndex The index of the chunk.
|
||||
* @param isLastChunk True if this is the last chunk in the media. False otherwise.
|
||||
*/
|
||||
public MediaChunk(DataSource dataSource, DataSpec dataSpec, Format format, int trigger,
|
||||
long startTimeUs, long endTimeUs, int nextChunkIndex) {
|
||||
super(dataSource, dataSpec, format, trigger);
|
||||
public MediaChunk(DataSource dataSource, DataSpec dataSpec, int trigger, Format format,
|
||||
long startTimeUs, long endTimeUs, int chunkIndex, boolean isLastChunk) {
|
||||
super(dataSource, dataSpec, Chunk.TYPE_MEDIA, trigger, format);
|
||||
Assertions.checkNotNull(format);
|
||||
this.startTimeUs = startTimeUs;
|
||||
this.endTimeUs = endTimeUs;
|
||||
this.nextChunkIndex = nextChunkIndex;
|
||||
this.chunkIndex = chunkIndex;
|
||||
this.isLastChunk = isLastChunk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this is the last chunk in the stream.
|
||||
*
|
||||
* @return True if this is the last chunk in the stream. False otherwise.
|
||||
*/
|
||||
public final boolean isLastChunk() {
|
||||
return nextChunkIndex == -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Seeks to the beginning of the chunk.
|
||||
*/
|
||||
public abstract void seekToStart();
|
||||
|
||||
/**
|
||||
* Seeks to the specified position within the chunk.
|
||||
*
|
||||
* @param positionUs The desired seek time in microseconds.
|
||||
* @param allowNoop True if the seek is allowed to do nothing if the result is more accurate than
|
||||
* seeking to a key frame. Always pass false if it is required that the next sample be a key
|
||||
* frame.
|
||||
* @return True if the seek results in a discontinuity in the sequence of samples returned by
|
||||
* {@link #read(SampleHolder)}. False otherwise.
|
||||
*/
|
||||
public abstract boolean seekTo(long positionUs, boolean allowNoop);
|
||||
|
||||
/**
|
||||
* Prepares the chunk for reading. Does nothing if the chunk is already prepared.
|
||||
* <p>
|
||||
* Preparation may require consuming some of the chunk. If the data is not yet available then
|
||||
* this method will return {@code false} rather than block. The method can be called repeatedly
|
||||
* until the return value indicates success.
|
||||
*
|
||||
* @return True if the chunk was prepared. False otherwise.
|
||||
* @throws ParserException If an error occurs parsing the media data.
|
||||
*/
|
||||
public abstract boolean prepare() throws ParserException;
|
||||
|
||||
/**
|
||||
* Returns whether the next sample is available.
|
||||
*
|
||||
* @return True if the next sample is available for reading. False otherwise.
|
||||
* @throws ParserException
|
||||
*/
|
||||
public abstract boolean sampleAvailable() throws ParserException;
|
||||
|
||||
/**
|
||||
* Reads the next media sample from the chunk.
|
||||
* <p>
|
||||
* Should only be called after the chunk has been successfully prepared.
|
||||
*
|
||||
* @param holder A holder to store the read sample.
|
||||
* @return True if a sample was read. False if more data is still required.
|
||||
* @throws ParserException If an error occurs parsing the media data.
|
||||
* @throws IllegalStateException If called before {@link #init}, or after {@link #release}
|
||||
*/
|
||||
public abstract boolean read(SampleHolder holder) throws ParserException;
|
||||
|
||||
/**
|
||||
* Returns the media format of the samples contained within this chunk.
|
||||
* <p>
|
||||
* Should only be called after the chunk has been successfully prepared.
|
||||
*
|
||||
* @return The sample media format.
|
||||
*/
|
||||
public abstract MediaFormat getMediaFormat();
|
||||
|
||||
/**
|
||||
* Returns the DRM initialization data associated with the chunk.
|
||||
* <p>
|
||||
* Should only be called after the chunk has been successfully prepared.
|
||||
*
|
||||
* @return The DRM initialization data.
|
||||
*/
|
||||
public abstract DrmInitData getDrmInitData();
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -106,6 +106,11 @@ public class MultiTrackChunkSource implements ChunkSource, ExoPlayerComponent {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadCompleted(Chunk chunk) {
|
||||
selectedSource.onChunkLoadCompleted(chunk);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadError(Chunk chunk, Exception e) {
|
||||
selectedSource.onChunkLoadError(chunk, e);
|
||||
|
|
|
|||
|
|
@ -97,14 +97,19 @@ public class SingleSampleChunkSource implements ChunkSource {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadCompleted(Chunk chunk) {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadError(Chunk chunk, Exception e) {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
private SingleSampleMediaChunk initChunk() {
|
||||
return new SingleSampleMediaChunk(dataSource, dataSpec, format, 0, 0, durationUs, -1,
|
||||
mediaFormat);
|
||||
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, format, 0,
|
||||
durationUs, 0, true, mediaFormat, null, null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,113 +15,59 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.chunk;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A {@link MediaChunk} containing a single sample.
|
||||
* A {@link BaseMediaChunk} for chunks consisting of a single raw sample.
|
||||
*/
|
||||
public class SingleSampleMediaChunk extends MediaChunk {
|
||||
|
||||
/**
|
||||
* The sample header data. May be null.
|
||||
*/
|
||||
public final byte[] headerData;
|
||||
public final class SingleSampleMediaChunk extends BaseMediaChunk {
|
||||
|
||||
private final MediaFormat sampleFormat;
|
||||
private final DrmInitData sampleDrmInitData;
|
||||
private final byte[] headerData;
|
||||
|
||||
private boolean writtenHeader;
|
||||
|
||||
private volatile int bytesLoaded;
|
||||
private volatile boolean loadCanceled;
|
||||
|
||||
/**
|
||||
* @param dataSource A {@link DataSource} for loading the data.
|
||||
* @param dataSpec Defines the data to be loaded.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param trigger The reason for this chunk being selected.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param startTimeUs The start time of the media contained by the chunk, in microseconds.
|
||||
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
|
||||
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
|
||||
* @param sampleFormat The format of the media contained by the chunk.
|
||||
*/
|
||||
public SingleSampleMediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
|
||||
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex, MediaFormat sampleFormat) {
|
||||
this(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex,
|
||||
sampleFormat, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param dataSource A {@link DataSource} for loading the data.
|
||||
* @param dataSpec Defines the data to be loaded.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param trigger The reason for this chunk being selected.
|
||||
* @param startTimeUs The start time of the media contained by the chunk, in microseconds.
|
||||
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
|
||||
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
|
||||
* @param sampleFormat The format of the media contained by the chunk.
|
||||
* @param chunkIndex The index of the chunk.
|
||||
* @param isLastChunk True if this is the last chunk in the media. False otherwise.
|
||||
* @param sampleFormat The format of the sample.
|
||||
* @param sampleDrmInitData The {@link DrmInitData} for the sample. Null if the sample is not drm
|
||||
* protected.
|
||||
* @param headerData Custom header data for the sample. May be null. If set, the header data is
|
||||
* prepended to the sample data returned when {@link #read(SampleHolder)} is called. It is not
|
||||
* reflected in the values returned by {@link #bytesLoaded()} and {@link #getLength()}.
|
||||
* prepended to the sample data. It is not reflected in the values returned by
|
||||
* {@link #bytesLoaded()}.
|
||||
*/
|
||||
public SingleSampleMediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
|
||||
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex, MediaFormat sampleFormat,
|
||||
byte[] headerData) {
|
||||
super(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex);
|
||||
public SingleSampleMediaChunk(DataSource dataSource, DataSpec dataSpec, int trigger,
|
||||
Format format, long startTimeUs, long endTimeUs, int chunkIndex, boolean isLastChunk,
|
||||
MediaFormat sampleFormat, DrmInitData sampleDrmInitData, byte[] headerData) {
|
||||
super(dataSource, dataSpec, trigger, format, startTimeUs, endTimeUs, chunkIndex, isLastChunk,
|
||||
true);
|
||||
this.sampleFormat = sampleFormat;
|
||||
this.sampleDrmInitData = sampleDrmInitData;
|
||||
this.headerData = headerData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean prepare() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sampleAvailable() {
|
||||
return isLoadFinished() && !isReadFinished();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean read(SampleHolder holder) {
|
||||
NonBlockingInputStream inputStream = getNonBlockingInputStream();
|
||||
Assertions.checkState(inputStream != null);
|
||||
if (!sampleAvailable()) {
|
||||
return false;
|
||||
}
|
||||
int bytesLoaded = (int) bytesLoaded();
|
||||
int sampleSize = bytesLoaded;
|
||||
if (headerData != null) {
|
||||
sampleSize += headerData.length;
|
||||
}
|
||||
if (holder.data == null || holder.data.capacity() < sampleSize) {
|
||||
holder.replaceBuffer(sampleSize);
|
||||
}
|
||||
int bytesRead;
|
||||
if (holder.data != null) {
|
||||
if (headerData != null) {
|
||||
holder.data.put(headerData);
|
||||
}
|
||||
bytesRead = inputStream.read(holder.data, bytesLoaded);
|
||||
holder.size = sampleSize;
|
||||
} else {
|
||||
bytesRead = inputStream.skip(bytesLoaded);
|
||||
holder.size = 0;
|
||||
}
|
||||
Assertions.checkState(bytesRead == bytesLoaded);
|
||||
holder.timeUs = startTimeUs;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seekToStart() {
|
||||
resetReadPosition();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean seekTo(long positionUs, boolean allowNoop) {
|
||||
resetReadPosition();
|
||||
return true;
|
||||
public long bytesLoaded() {
|
||||
return bytesLoaded;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
@ -131,7 +77,51 @@ public class SingleSampleMediaChunk extends MediaChunk {
|
|||
|
||||
@Override
|
||||
public DrmInitData getDrmInitData() {
|
||||
return null;
|
||||
return sampleDrmInitData;
|
||||
}
|
||||
|
||||
// Loadable implementation.
|
||||
|
||||
@Override
|
||||
public void cancelLoad() {
|
||||
loadCanceled = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLoadCanceled() {
|
||||
return loadCanceled;
|
||||
}
|
||||
|
||||
@SuppressWarnings("NonAtomicVolatileUpdate")
|
||||
@Override
|
||||
public void load() throws IOException, InterruptedException {
|
||||
if (!writtenHeader) {
|
||||
if (headerData != null) {
|
||||
getOutput().sampleData(new ParsableByteArray(headerData), headerData.length);
|
||||
}
|
||||
writtenHeader = true;
|
||||
}
|
||||
|
||||
DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded);
|
||||
try {
|
||||
// Create and open the input.
|
||||
dataSource.open(loadDataSpec);
|
||||
// Load the sample data.
|
||||
int result = 0;
|
||||
while (result != C.RESULT_END_OF_INPUT) {
|
||||
result = getOutput().sampleData(dataSource, Integer.MAX_VALUE);
|
||||
if (result != C.RESULT_END_OF_INPUT) {
|
||||
bytesLoaded += result;
|
||||
}
|
||||
}
|
||||
int sampleSize = bytesLoaded;
|
||||
if (headerData != null) {
|
||||
sampleSize += headerData.length;
|
||||
}
|
||||
getOutput().sampleMetadata(startTimeUs, 0, sampleSize, 0, null);
|
||||
} finally {
|
||||
dataSource.close();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,113 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
|
||||
/**
|
||||
* Facilitates extraction of media samples from a container format.
|
||||
*/
|
||||
public interface Extractor {
|
||||
|
||||
/**
|
||||
* An attempt to read from the input stream returned insufficient data.
|
||||
*/
|
||||
public static final int RESULT_NEED_MORE_DATA = 1;
|
||||
/**
|
||||
* The end of the input stream was reached.
|
||||
*/
|
||||
public static final int RESULT_END_OF_STREAM = 2;
|
||||
/**
|
||||
* A media sample was read.
|
||||
*/
|
||||
public static final int RESULT_READ_SAMPLE = 4;
|
||||
/**
|
||||
* Initialization data was read. The parsed data can be read using {@link #getFormat()} and
|
||||
* {@link #getDrmInitData()}.
|
||||
*/
|
||||
public static final int RESULT_READ_INIT = 8;
|
||||
/**
|
||||
* A sidx atom was read. The parsed data can be read using {@link #getIndex()}.
|
||||
*/
|
||||
public static final int RESULT_READ_INDEX = 16;
|
||||
/**
|
||||
* The next thing to be read is a sample, but a {@link SampleHolder} was not supplied.
|
||||
*/
|
||||
public static final int RESULT_NEED_SAMPLE_HOLDER = 32;
|
||||
|
||||
/**
|
||||
* Returns the segment index parsed from the stream.
|
||||
*
|
||||
* @return The segment index, or null if a SIDX atom has yet to be parsed.
|
||||
*/
|
||||
public SegmentIndex getIndex();
|
||||
|
||||
/**
|
||||
* Returns true if the offsets in the index returned by {@link #getIndex()} are relative to the
|
||||
* first byte following the initialization data, or false if they are absolute (i.e. relative to
|
||||
* the first byte of the stream).
|
||||
*
|
||||
* @return True if the offsets are relative to the first byte following the initialization data.
|
||||
* False otherwise.
|
||||
*/
|
||||
public boolean hasRelativeIndexOffsets();
|
||||
|
||||
/**
|
||||
* Returns the format of the samples contained within the media stream.
|
||||
*
|
||||
* @return The sample media format, or null if the format has yet to be parsed.
|
||||
*/
|
||||
public MediaFormat getFormat();
|
||||
|
||||
/**
|
||||
* Returns DRM initialization data parsed from the stream.
|
||||
*
|
||||
* @return The DRM initialization data. May be null if the initialization data has yet to be
|
||||
* parsed, or if the stream does not contain any DRM initialization data.
|
||||
*/
|
||||
public DrmInitData getDrmInitData();
|
||||
|
||||
/**
|
||||
* Consumes data from a {@link NonBlockingInputStream}.
|
||||
* <p>
|
||||
* The read terminates if the end of the input stream is reached, if an attempt to read from the
|
||||
* input stream returned 0 bytes of data, or if a sample is read. The returned flags indicate
|
||||
* both the reason for termination and data that was parsed during the read.
|
||||
*
|
||||
* @param inputStream The input stream from which data should be read.
|
||||
* @param out A {@link SampleHolder} into which the next sample should be read. If null then
|
||||
* {@link #RESULT_NEED_SAMPLE_HOLDER} will be returned once a sample has been reached.
|
||||
* @return One or more of the {@code RESULT_*} flags defined in this class.
|
||||
* @throws ParserException If an error occurs parsing the media data.
|
||||
*/
|
||||
public int read(NonBlockingInputStream inputStream, SampleHolder out) throws ParserException;
|
||||
|
||||
/**
|
||||
* Seeks to a position before or equal to the requested time.
|
||||
*
|
||||
* @param seekTimeUs The desired seek time in microseconds.
|
||||
* @param allowNoop Allow the seek operation to do nothing if the seek time is in the current
|
||||
* fragment run, is equal to or greater than the time of the current sample, and if there
|
||||
* does not exist a sync frame between these two times.
|
||||
* @return True if the operation resulted in a change of state. False if it was a no-op.
|
||||
*/
|
||||
public boolean seekTo(long seekTimeUs, boolean allowNoop);
|
||||
|
||||
}
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser;
|
||||
|
||||
/**
|
||||
* Defines segments within a media stream.
|
||||
*/
|
||||
public final class SegmentIndex {
|
||||
|
||||
/**
|
||||
* The size in bytes of the segment index as it exists in the stream.
|
||||
*/
|
||||
public final int sizeBytes;
|
||||
|
||||
/**
|
||||
* The number of segments.
|
||||
*/
|
||||
public final int length;
|
||||
|
||||
/**
|
||||
* The segment sizes, in bytes.
|
||||
*/
|
||||
public final int[] sizes;
|
||||
|
||||
/**
|
||||
* The segment byte offsets.
|
||||
*/
|
||||
public final long[] offsets;
|
||||
|
||||
/**
|
||||
* The segment durations, in microseconds.
|
||||
*/
|
||||
public final long[] durationsUs;
|
||||
|
||||
/**
|
||||
* The start time of each segment, in microseconds.
|
||||
*/
|
||||
public final long[] timesUs;
|
||||
|
||||
/**
|
||||
* @param sizeBytes The size in bytes of the segment index as it exists in the stream.
|
||||
* @param sizes The segment sizes, in bytes.
|
||||
* @param offsets The segment byte offsets.
|
||||
* @param durationsUs The segment durations, in microseconds.
|
||||
* @param timesUs The start time of each segment, in microseconds.
|
||||
*/
|
||||
public SegmentIndex(int sizeBytes, int[] sizes, long[] offsets, long[] durationsUs,
|
||||
long[] timesUs) {
|
||||
this.sizeBytes = sizeBytes;
|
||||
this.length = sizes.length;
|
||||
this.sizes = sizes;
|
||||
this.offsets = offsets;
|
||||
this.durationsUs = durationsUs;
|
||||
this.timesUs = timesUs;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,838 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser.mp4;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.chunk.parser.Extractor;
|
||||
import com.google.android.exoplayer.chunk.parser.SegmentIndex;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.extractor.mp4.Atom;
|
||||
import com.google.android.exoplayer.extractor.mp4.Atom.ContainerAtom;
|
||||
import com.google.android.exoplayer.extractor.mp4.Atom.LeafAtom;
|
||||
import com.google.android.exoplayer.extractor.mp4.AtomParsers;
|
||||
import com.google.android.exoplayer.extractor.mp4.DefaultSampleValues;
|
||||
import com.google.android.exoplayer.extractor.mp4.Track;
|
||||
import com.google.android.exoplayer.extractor.mp4.TrackEncryptionBox;
|
||||
import com.google.android.exoplayer.extractor.mp4.TrackFragment;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.H264Util;
|
||||
import com.google.android.exoplayer.util.MimeTypes;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Stack;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Facilitates the extraction of data from the fragmented mp4 container format.
|
||||
* <p>
|
||||
* This implementation only supports de-muxed (i.e. single track) streams.
|
||||
*/
|
||||
public final class FragmentedMp4Extractor implements Extractor {
|
||||
|
||||
/**
|
||||
* Flag to work around an issue in some video streams where every frame is marked as a sync frame.
|
||||
* The workaround overrides the sync frame flags in the stream, forcing them to false except for
|
||||
* the first sample in each segment.
|
||||
* <p>
|
||||
* This flag does nothing if the stream is not a video stream.
|
||||
*/
|
||||
public static final int WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME = 1;
|
||||
|
||||
private static final int READ_TERMINATING_RESULTS = RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM
|
||||
| RESULT_READ_SAMPLE | RESULT_NEED_SAMPLE_HOLDER;
|
||||
private static final byte[] PIFF_SAMPLE_ENCRYPTION_BOX_EXTENDED_TYPE =
|
||||
new byte[] {-94, 57, 79, 82, 90, -101, 79, 20, -94, 68, 108, 66, 124, 100, -115, -12};
|
||||
|
||||
// Parser states
|
||||
private static final int STATE_READING_ATOM_HEADER = 0;
|
||||
private static final int STATE_READING_ATOM_PAYLOAD = 1;
|
||||
private static final int STATE_READING_ENCRYPTION_DATA = 2;
|
||||
private static final int STATE_READING_SAMPLE = 3;
|
||||
|
||||
// Atoms that the parser cares about
|
||||
private static final Set<Integer> PARSED_ATOMS;
|
||||
static {
|
||||
HashSet<Integer> parsedAtoms = new HashSet<Integer>();
|
||||
parsedAtoms.add(Atom.TYPE_avc1);
|
||||
parsedAtoms.add(Atom.TYPE_avc3);
|
||||
parsedAtoms.add(Atom.TYPE_esds);
|
||||
parsedAtoms.add(Atom.TYPE_hdlr);
|
||||
parsedAtoms.add(Atom.TYPE_mdat);
|
||||
parsedAtoms.add(Atom.TYPE_mdhd);
|
||||
parsedAtoms.add(Atom.TYPE_moof);
|
||||
parsedAtoms.add(Atom.TYPE_moov);
|
||||
parsedAtoms.add(Atom.TYPE_mp4a);
|
||||
parsedAtoms.add(Atom.TYPE_mvhd);
|
||||
parsedAtoms.add(Atom.TYPE_sidx);
|
||||
parsedAtoms.add(Atom.TYPE_stsd);
|
||||
parsedAtoms.add(Atom.TYPE_tfdt);
|
||||
parsedAtoms.add(Atom.TYPE_tfhd);
|
||||
parsedAtoms.add(Atom.TYPE_tkhd);
|
||||
parsedAtoms.add(Atom.TYPE_traf);
|
||||
parsedAtoms.add(Atom.TYPE_trak);
|
||||
parsedAtoms.add(Atom.TYPE_trex);
|
||||
parsedAtoms.add(Atom.TYPE_trun);
|
||||
parsedAtoms.add(Atom.TYPE_mvex);
|
||||
parsedAtoms.add(Atom.TYPE_mdia);
|
||||
parsedAtoms.add(Atom.TYPE_minf);
|
||||
parsedAtoms.add(Atom.TYPE_stbl);
|
||||
parsedAtoms.add(Atom.TYPE_pssh);
|
||||
parsedAtoms.add(Atom.TYPE_saiz);
|
||||
parsedAtoms.add(Atom.TYPE_uuid);
|
||||
parsedAtoms.add(Atom.TYPE_senc);
|
||||
parsedAtoms.add(Atom.TYPE_pasp);
|
||||
PARSED_ATOMS = Collections.unmodifiableSet(parsedAtoms);
|
||||
}
|
||||
|
||||
// Atoms that the parser considers to be containers
|
||||
private static final Set<Integer> CONTAINER_TYPES;
|
||||
static {
|
||||
HashSet<Integer> atomContainerTypes = new HashSet<Integer>();
|
||||
atomContainerTypes.add(Atom.TYPE_moov);
|
||||
atomContainerTypes.add(Atom.TYPE_trak);
|
||||
atomContainerTypes.add(Atom.TYPE_mdia);
|
||||
atomContainerTypes.add(Atom.TYPE_minf);
|
||||
atomContainerTypes.add(Atom.TYPE_stbl);
|
||||
atomContainerTypes.add(Atom.TYPE_avcC);
|
||||
atomContainerTypes.add(Atom.TYPE_moof);
|
||||
atomContainerTypes.add(Atom.TYPE_traf);
|
||||
atomContainerTypes.add(Atom.TYPE_mvex);
|
||||
CONTAINER_TYPES = Collections.unmodifiableSet(atomContainerTypes);
|
||||
}
|
||||
|
||||
private final int workaroundFlags;
|
||||
|
||||
// Parser state
|
||||
private final ParsableByteArray atomHeader;
|
||||
private final byte[] extendedTypeScratch;
|
||||
private final Stack<ContainerAtom> containerAtoms;
|
||||
private final TrackFragment fragmentRun;
|
||||
|
||||
private int parserState;
|
||||
private int atomBytesRead;
|
||||
private int rootAtomBytesRead;
|
||||
private int atomType;
|
||||
private int atomSize;
|
||||
private ParsableByteArray atomData;
|
||||
|
||||
private int pendingSeekTimeMs;
|
||||
private int sampleIndex;
|
||||
private int pendingSeekSyncSampleIndex;
|
||||
private int lastSyncSampleIndex;
|
||||
|
||||
// Data parsed from moov and sidx atoms
|
||||
private DrmInitData.Mapped drmInitData;
|
||||
private SegmentIndex segmentIndex;
|
||||
private Track track;
|
||||
private DefaultSampleValues extendsDefaults;
|
||||
|
||||
public FragmentedMp4Extractor() {
|
||||
this(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param workaroundFlags Flags to allow parsing of faulty streams.
|
||||
* {@link #WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME} is currently the only flag defined.
|
||||
*/
|
||||
public FragmentedMp4Extractor(int workaroundFlags) {
|
||||
this.workaroundFlags = workaroundFlags;
|
||||
parserState = STATE_READING_ATOM_HEADER;
|
||||
atomHeader = new ParsableByteArray(Atom.HEADER_SIZE);
|
||||
extendedTypeScratch = new byte[16];
|
||||
containerAtoms = new Stack<ContainerAtom>();
|
||||
fragmentRun = new TrackFragment();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sideloads track information into the extractor.
|
||||
*
|
||||
* @param track The track to sideload.
|
||||
*/
|
||||
public void setTrack(Track track) {
|
||||
this.extendsDefaults = new DefaultSampleValues(0, 0, 0, 0);
|
||||
this.track = track;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DrmInitData getDrmInitData() {
|
||||
return drmInitData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentIndex getIndex() {
|
||||
return segmentIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasRelativeIndexOffsets() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getFormat() {
|
||||
return track == null ? null : track.mediaFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(NonBlockingInputStream inputStream, SampleHolder out)
|
||||
throws ParserException {
|
||||
try {
|
||||
int results = 0;
|
||||
while ((results & READ_TERMINATING_RESULTS) == 0) {
|
||||
switch (parserState) {
|
||||
case STATE_READING_ATOM_HEADER:
|
||||
results |= readAtomHeader(inputStream);
|
||||
break;
|
||||
case STATE_READING_ATOM_PAYLOAD:
|
||||
results |= readAtomPayload(inputStream);
|
||||
break;
|
||||
case STATE_READING_ENCRYPTION_DATA:
|
||||
results |= readEncryptionData(inputStream);
|
||||
break;
|
||||
default:
|
||||
results |= readOrSkipSample(inputStream, out);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
} catch (Exception e) {
|
||||
throw new ParserException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean seekTo(long seekTimeUs, boolean allowNoop) {
|
||||
pendingSeekTimeMs = (int) (seekTimeUs / 1000);
|
||||
if (allowNoop && fragmentRun != null && fragmentRun.length > 0
|
||||
&& pendingSeekTimeMs >= fragmentRun.getSamplePresentationTime(0)
|
||||
&& pendingSeekTimeMs <= fragmentRun.getSamplePresentationTime(fragmentRun.length - 1)) {
|
||||
int sampleIndexFound = 0;
|
||||
int syncSampleIndexFound = 0;
|
||||
for (int i = 0; i < fragmentRun.length; i++) {
|
||||
if (fragmentRun.getSamplePresentationTime(i) <= pendingSeekTimeMs) {
|
||||
if (fragmentRun.sampleIsSyncFrameTable[i]) {
|
||||
syncSampleIndexFound = i;
|
||||
}
|
||||
sampleIndexFound = i;
|
||||
}
|
||||
}
|
||||
if (syncSampleIndexFound == lastSyncSampleIndex && sampleIndexFound >= sampleIndex) {
|
||||
pendingSeekTimeMs = 0;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
containerAtoms.clear();
|
||||
enterState(STATE_READING_ATOM_HEADER);
|
||||
return true;
|
||||
}
|
||||
|
||||
private void enterState(int state) {
|
||||
switch (state) {
|
||||
case STATE_READING_ATOM_HEADER:
|
||||
atomBytesRead = 0;
|
||||
if (containerAtoms.isEmpty()) {
|
||||
rootAtomBytesRead = 0;
|
||||
}
|
||||
break;
|
||||
}
|
||||
parserState = state;
|
||||
}
|
||||
|
||||
private int readAtomHeader(NonBlockingInputStream inputStream) {
|
||||
int remainingBytes = Atom.HEADER_SIZE - atomBytesRead;
|
||||
int bytesRead = inputStream.read(atomHeader.data, atomBytesRead, remainingBytes);
|
||||
if (bytesRead == -1) {
|
||||
return RESULT_END_OF_STREAM;
|
||||
}
|
||||
rootAtomBytesRead += bytesRead;
|
||||
atomBytesRead += bytesRead;
|
||||
if (atomBytesRead != Atom.HEADER_SIZE) {
|
||||
return RESULT_NEED_MORE_DATA;
|
||||
}
|
||||
|
||||
atomHeader.setPosition(0);
|
||||
atomSize = atomHeader.readInt();
|
||||
atomType = atomHeader.readInt();
|
||||
|
||||
if (atomType == Atom.TYPE_mdat) {
|
||||
if (fragmentRun.sampleEncryptionDataNeedsFill) {
|
||||
enterState(STATE_READING_ENCRYPTION_DATA);
|
||||
} else {
|
||||
enterState(STATE_READING_SAMPLE);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
Integer atomTypeInteger = atomType; // Avoids boxing atomType twice.
|
||||
if (PARSED_ATOMS.contains(atomTypeInteger)) {
|
||||
if (CONTAINER_TYPES.contains(atomTypeInteger)) {
|
||||
enterState(STATE_READING_ATOM_HEADER);
|
||||
containerAtoms.add(new ContainerAtom(atomType,
|
||||
rootAtomBytesRead + atomSize - Atom.HEADER_SIZE));
|
||||
} else {
|
||||
atomData = new ParsableByteArray(atomSize);
|
||||
System.arraycopy(atomHeader.data, 0, atomData.data, 0, Atom.HEADER_SIZE);
|
||||
enterState(STATE_READING_ATOM_PAYLOAD);
|
||||
}
|
||||
} else {
|
||||
atomData = null;
|
||||
enterState(STATE_READING_ATOM_PAYLOAD);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int readAtomPayload(NonBlockingInputStream inputStream) {
|
||||
int bytesRead;
|
||||
if (atomData != null) {
|
||||
bytesRead = inputStream.read(atomData.data, atomBytesRead, atomSize - atomBytesRead);
|
||||
} else {
|
||||
bytesRead = inputStream.skip(atomSize - atomBytesRead);
|
||||
}
|
||||
if (bytesRead == -1) {
|
||||
return RESULT_END_OF_STREAM;
|
||||
}
|
||||
rootAtomBytesRead += bytesRead;
|
||||
atomBytesRead += bytesRead;
|
||||
if (atomBytesRead != atomSize) {
|
||||
return RESULT_NEED_MORE_DATA;
|
||||
}
|
||||
|
||||
int results = 0;
|
||||
if (atomData != null) {
|
||||
results |= onLeafAtomRead(new LeafAtom(atomType, atomData));
|
||||
}
|
||||
|
||||
while (!containerAtoms.isEmpty() && containerAtoms.peek().endByteOffset == rootAtomBytesRead) {
|
||||
results |= onContainerAtomRead(containerAtoms.pop());
|
||||
}
|
||||
|
||||
enterState(STATE_READING_ATOM_HEADER);
|
||||
return results;
|
||||
}
|
||||
|
||||
private int onLeafAtomRead(LeafAtom leaf) {
|
||||
if (!containerAtoms.isEmpty()) {
|
||||
containerAtoms.peek().add(leaf);
|
||||
} else if (leaf.type == Atom.TYPE_sidx) {
|
||||
segmentIndex = parseSidx(leaf.data);
|
||||
return RESULT_READ_INDEX;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int onContainerAtomRead(ContainerAtom container) {
|
||||
if (container.type == Atom.TYPE_moov) {
|
||||
onMoovContainerAtomRead(container);
|
||||
return RESULT_READ_INIT;
|
||||
} else if (container.type == Atom.TYPE_moof) {
|
||||
onMoofContainerAtomRead(container);
|
||||
} else if (!containerAtoms.isEmpty()) {
|
||||
containerAtoms.peek().add(container);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void onMoovContainerAtomRead(ContainerAtom moov) {
|
||||
List<Atom.LeafAtom> moovChildren = moov.leafChildren;
|
||||
int moovChildrenSize = moovChildren.size();
|
||||
for (int i = 0; i < moovChildrenSize; i++) {
|
||||
LeafAtom child = moovChildren.get(i);
|
||||
if (child.type == Atom.TYPE_pssh) {
|
||||
ParsableByteArray psshAtom = child.data;
|
||||
psshAtom.setPosition(Atom.FULL_HEADER_SIZE);
|
||||
UUID uuid = new UUID(psshAtom.readLong(), psshAtom.readLong());
|
||||
int dataSize = psshAtom.readInt();
|
||||
byte[] data = new byte[dataSize];
|
||||
psshAtom.readBytes(data, 0, dataSize);
|
||||
if (drmInitData == null) {
|
||||
drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4);
|
||||
}
|
||||
drmInitData.put(uuid, data);
|
||||
}
|
||||
}
|
||||
ContainerAtom mvex = moov.getContainerAtomOfType(Atom.TYPE_mvex);
|
||||
extendsDefaults = parseTrex(mvex.getLeafAtomOfType(Atom.TYPE_trex).data);
|
||||
track = AtomParsers.parseTrak(moov.getContainerAtomOfType(Atom.TYPE_trak),
|
||||
moov.getLeafAtomOfType(Atom.TYPE_mvhd));
|
||||
}
|
||||
|
||||
private void onMoofContainerAtomRead(ContainerAtom moof) {
|
||||
fragmentRun.reset();
|
||||
parseMoof(track, extendsDefaults, moof, fragmentRun, workaroundFlags, extendedTypeScratch);
|
||||
sampleIndex = 0;
|
||||
lastSyncSampleIndex = 0;
|
||||
pendingSeekSyncSampleIndex = 0;
|
||||
if (pendingSeekTimeMs != 0) {
|
||||
for (int i = 0; i < fragmentRun.length; i++) {
|
||||
if (fragmentRun.sampleIsSyncFrameTable[i]) {
|
||||
if (fragmentRun.getSamplePresentationTime(i) <= pendingSeekTimeMs) {
|
||||
pendingSeekSyncSampleIndex = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
pendingSeekTimeMs = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a trex atom (defined in 14496-12).
|
||||
*/
|
||||
private static DefaultSampleValues parseTrex(ParsableByteArray trex) {
|
||||
trex.setPosition(Atom.FULL_HEADER_SIZE + 4);
|
||||
int defaultSampleDescriptionIndex = trex.readUnsignedIntToInt() - 1;
|
||||
int defaultSampleDuration = trex.readUnsignedIntToInt();
|
||||
int defaultSampleSize = trex.readUnsignedIntToInt();
|
||||
int defaultSampleFlags = trex.readInt();
|
||||
return new DefaultSampleValues(defaultSampleDescriptionIndex, defaultSampleDuration,
|
||||
defaultSampleSize, defaultSampleFlags);
|
||||
}
|
||||
|
||||
private static void parseMoof(Track track, DefaultSampleValues extendsDefaults,
|
||||
ContainerAtom moof, TrackFragment out, int workaroundFlags, byte[] extendedTypeScratch) {
|
||||
parseTraf(track, extendsDefaults, moof.getContainerAtomOfType(Atom.TYPE_traf),
|
||||
out, workaroundFlags, extendedTypeScratch);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a traf atom (defined in 14496-12).
|
||||
*/
|
||||
private static void parseTraf(Track track, DefaultSampleValues extendsDefaults,
|
||||
ContainerAtom traf, TrackFragment out, int workaroundFlags, byte[] extendedTypeScratch) {
|
||||
LeafAtom tfdtAtom = traf.getLeafAtomOfType(Atom.TYPE_tfdt);
|
||||
long decodeTime = tfdtAtom == null ? 0 : parseTfdt(traf.getLeafAtomOfType(Atom.TYPE_tfdt).data);
|
||||
|
||||
LeafAtom tfhd = traf.getLeafAtomOfType(Atom.TYPE_tfhd);
|
||||
DefaultSampleValues fragmentHeader = parseTfhd(extendsDefaults, tfhd.data);
|
||||
out.sampleDescriptionIndex = fragmentHeader.sampleDescriptionIndex;
|
||||
|
||||
LeafAtom trun = traf.getLeafAtomOfType(Atom.TYPE_trun);
|
||||
parseTrun(track, fragmentHeader, decodeTime, workaroundFlags, trun.data, out);
|
||||
|
||||
LeafAtom saiz = traf.getLeafAtomOfType(Atom.TYPE_saiz);
|
||||
if (saiz != null) {
|
||||
TrackEncryptionBox trackEncryptionBox =
|
||||
track.sampleDescriptionEncryptionBoxes[fragmentHeader.sampleDescriptionIndex];
|
||||
parseSaiz(trackEncryptionBox, saiz.data, out);
|
||||
}
|
||||
|
||||
LeafAtom senc = traf.getLeafAtomOfType(Atom.TYPE_senc);
|
||||
if (senc != null) {
|
||||
parseSenc(senc.data, out);
|
||||
}
|
||||
|
||||
int childrenSize = traf.leafChildren.size();
|
||||
for (int i = 0; i < childrenSize; i++) {
|
||||
LeafAtom atom = traf.leafChildren.get(i);
|
||||
if (atom.type == Atom.TYPE_uuid) {
|
||||
parseUuid(atom.data, out, extendedTypeScratch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void parseSaiz(TrackEncryptionBox encryptionBox, ParsableByteArray saiz,
|
||||
TrackFragment out) {
|
||||
int vectorSize = encryptionBox.initializationVectorSize;
|
||||
saiz.setPosition(Atom.HEADER_SIZE);
|
||||
int fullAtom = saiz.readInt();
|
||||
int flags = Atom.parseFullAtomFlags(fullAtom);
|
||||
if ((flags & 0x01) == 1) {
|
||||
saiz.skip(8);
|
||||
}
|
||||
int defaultSampleInfoSize = saiz.readUnsignedByte();
|
||||
|
||||
int sampleCount = saiz.readUnsignedIntToInt();
|
||||
if (sampleCount != out.length) {
|
||||
throw new IllegalStateException("Length mismatch: " + sampleCount + ", " + out.length);
|
||||
}
|
||||
|
||||
int totalSize = 0;
|
||||
if (defaultSampleInfoSize == 0) {
|
||||
boolean[] sampleHasSubsampleEncryptionTable = out.sampleHasSubsampleEncryptionTable;
|
||||
for (int i = 0; i < sampleCount; i++) {
|
||||
int sampleInfoSize = saiz.readUnsignedByte();
|
||||
totalSize += sampleInfoSize;
|
||||
sampleHasSubsampleEncryptionTable[i] = sampleInfoSize > vectorSize;
|
||||
}
|
||||
} else {
|
||||
boolean subsampleEncryption = defaultSampleInfoSize > vectorSize;
|
||||
totalSize += defaultSampleInfoSize * sampleCount;
|
||||
Arrays.fill(out.sampleHasSubsampleEncryptionTable, 0, sampleCount, subsampleEncryption);
|
||||
}
|
||||
out.initEncryptionData(totalSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a tfhd atom (defined in 14496-12).
|
||||
*
|
||||
* @param extendsDefaults Default sample values from the trex atom.
|
||||
* @return The parsed default sample values.
|
||||
*/
|
||||
private static DefaultSampleValues parseTfhd(DefaultSampleValues extendsDefaults,
|
||||
ParsableByteArray tfhd) {
|
||||
tfhd.setPosition(Atom.HEADER_SIZE);
|
||||
int fullAtom = tfhd.readInt();
|
||||
int flags = Atom.parseFullAtomFlags(fullAtom);
|
||||
|
||||
tfhd.skip(4); // trackId
|
||||
if ((flags & 0x01 /* base_data_offset_present */) != 0) {
|
||||
tfhd.skip(8);
|
||||
}
|
||||
|
||||
int defaultSampleDescriptionIndex =
|
||||
((flags & 0x02 /* default_sample_description_index_present */) != 0) ?
|
||||
tfhd.readUnsignedIntToInt() - 1 : extendsDefaults.sampleDescriptionIndex;
|
||||
int defaultSampleDuration = ((flags & 0x08 /* default_sample_duration_present */) != 0) ?
|
||||
tfhd.readUnsignedIntToInt() : extendsDefaults.duration;
|
||||
int defaultSampleSize = ((flags & 0x10 /* default_sample_size_present */) != 0) ?
|
||||
tfhd.readUnsignedIntToInt() : extendsDefaults.size;
|
||||
int defaultSampleFlags = ((flags & 0x20 /* default_sample_flags_present */) != 0) ?
|
||||
tfhd.readUnsignedIntToInt() : extendsDefaults.flags;
|
||||
return new DefaultSampleValues(defaultSampleDescriptionIndex, defaultSampleDuration,
|
||||
defaultSampleSize, defaultSampleFlags);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a tfdt atom (defined in 14496-12).
|
||||
*
|
||||
* @return baseMediaDecodeTime The sum of the decode durations of all earlier samples in the
|
||||
* media, expressed in the media's timescale.
|
||||
*/
|
||||
private static long parseTfdt(ParsableByteArray tfdt) {
|
||||
tfdt.setPosition(Atom.HEADER_SIZE);
|
||||
int fullAtom = tfdt.readInt();
|
||||
int version = Atom.parseFullAtomVersion(fullAtom);
|
||||
return version == 1 ? tfdt.readUnsignedLongToLong() : tfdt.readUnsignedInt();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a trun atom (defined in 14496-12).
|
||||
*
|
||||
* @param track The corresponding track.
|
||||
* @param defaultSampleValues Default sample values.
|
||||
* @param decodeTime The decode time.
|
||||
* @param trun The trun atom to parse.
|
||||
* @param out The {@TrackFragment} into which parsed data should be placed.
|
||||
*/
|
||||
private static void parseTrun(Track track, DefaultSampleValues defaultSampleValues,
|
||||
long decodeTime, int workaroundFlags, ParsableByteArray trun, TrackFragment out) {
|
||||
trun.setPosition(Atom.HEADER_SIZE);
|
||||
int fullAtom = trun.readInt();
|
||||
int flags = Atom.parseFullAtomFlags(fullAtom);
|
||||
|
||||
int sampleCount = trun.readUnsignedIntToInt();
|
||||
if ((flags & 0x01 /* data_offset_present */) != 0) {
|
||||
trun.skip(4);
|
||||
}
|
||||
|
||||
boolean firstSampleFlagsPresent = (flags & 0x04 /* first_sample_flags_present */) != 0;
|
||||
int firstSampleFlags = defaultSampleValues.flags;
|
||||
if (firstSampleFlagsPresent) {
|
||||
firstSampleFlags = trun.readUnsignedIntToInt();
|
||||
}
|
||||
|
||||
boolean sampleDurationsPresent = (flags & 0x100 /* sample_duration_present */) != 0;
|
||||
boolean sampleSizesPresent = (flags & 0x200 /* sample_size_present */) != 0;
|
||||
boolean sampleFlagsPresent = (flags & 0x400 /* sample_flags_present */) != 0;
|
||||
boolean sampleCompositionTimeOffsetsPresent =
|
||||
(flags & 0x800 /* sample_composition_time_offsets_present */) != 0;
|
||||
|
||||
out.initTables(sampleCount);
|
||||
int[] sampleSizeTable = out.sampleSizeTable;
|
||||
int[] sampleCompositionTimeOffsetTable = out.sampleCompositionTimeOffsetTable;
|
||||
long[] sampleDecodingTimeTable = out.sampleDecodingTimeTable;
|
||||
boolean[] sampleIsSyncFrameTable = out.sampleIsSyncFrameTable;
|
||||
|
||||
long timescale = track.timescale;
|
||||
long cumulativeTime = decodeTime;
|
||||
boolean workaroundEveryVideoFrameIsSyncFrame = track.type == Track.TYPE_VIDEO
|
||||
&& ((workaroundFlags & WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME)
|
||||
== WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
|
||||
for (int i = 0; i < sampleCount; i++) {
|
||||
// Use trun values if present, otherwise tfhd, otherwise trex.
|
||||
int sampleDuration = sampleDurationsPresent ? trun.readUnsignedIntToInt()
|
||||
: defaultSampleValues.duration;
|
||||
int sampleSize = sampleSizesPresent ? trun.readUnsignedIntToInt() : defaultSampleValues.size;
|
||||
int sampleFlags = (i == 0 && firstSampleFlagsPresent) ? firstSampleFlags
|
||||
: sampleFlagsPresent ? trun.readInt() : defaultSampleValues.flags;
|
||||
if (sampleCompositionTimeOffsetsPresent) {
|
||||
// The BMFF spec (ISO 14496-12) states that sample offsets should be unsigned integers in
|
||||
// version 0 trun boxes, however a significant number of streams violate the spec and use
|
||||
// signed integers instead. It's safe to always parse sample offsets as signed integers
|
||||
// here, because unsigned integers will still be parsed correctly (unless their top bit is
|
||||
// set, which is never true in practice because sample offsets are always small).
|
||||
int sampleOffset = trun.readInt();
|
||||
sampleCompositionTimeOffsetTable[i] = (int) ((sampleOffset * 1000) / timescale);
|
||||
} else {
|
||||
sampleCompositionTimeOffsetTable[i] = 0;
|
||||
}
|
||||
sampleDecodingTimeTable[i] = (cumulativeTime * 1000) / timescale;
|
||||
sampleSizeTable[i] = sampleSize;
|
||||
sampleIsSyncFrameTable[i] = ((sampleFlags >> 16) & 0x1) == 0
|
||||
&& (!workaroundEveryVideoFrameIsSyncFrame || i == 0);
|
||||
cumulativeTime += sampleDuration;
|
||||
}
|
||||
}
|
||||
|
||||
private static void parseUuid(ParsableByteArray uuid, TrackFragment out,
|
||||
byte[] extendedTypeScratch) {
|
||||
uuid.setPosition(Atom.HEADER_SIZE);
|
||||
uuid.readBytes(extendedTypeScratch, 0, 16);
|
||||
|
||||
// Currently this parser only supports Microsoft's PIFF SampleEncryptionBox.
|
||||
if (!Arrays.equals(extendedTypeScratch, PIFF_SAMPLE_ENCRYPTION_BOX_EXTENDED_TYPE)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Except for the extended type, this box is identical to a SENC box. See "Portable encoding of
|
||||
// audio-video objects: The Protected Interoperable File Format (PIFF), John A. Bocharov et al,
|
||||
// Section 5.3.2.1."
|
||||
parseSenc(uuid, 16, out);
|
||||
}
|
||||
|
||||
private static void parseSenc(ParsableByteArray senc, TrackFragment out) {
|
||||
parseSenc(senc, 0, out);
|
||||
}
|
||||
|
||||
private static void parseSenc(ParsableByteArray senc, int offset, TrackFragment out) {
|
||||
senc.setPosition(Atom.HEADER_SIZE + offset);
|
||||
int fullAtom = senc.readInt();
|
||||
int flags = Atom.parseFullAtomFlags(fullAtom);
|
||||
|
||||
if ((flags & 0x01 /* override_track_encryption_box_parameters */) != 0) {
|
||||
// TODO: Implement this.
|
||||
throw new IllegalStateException("Overriding TrackEncryptionBox parameters is unsupported");
|
||||
}
|
||||
|
||||
boolean subsampleEncryption = (flags & 0x02 /* use_subsample_encryption */) != 0;
|
||||
int sampleCount = senc.readUnsignedIntToInt();
|
||||
if (sampleCount != out.length) {
|
||||
throw new IllegalStateException("Length mismatch: " + sampleCount + ", " + out.length);
|
||||
}
|
||||
|
||||
Arrays.fill(out.sampleHasSubsampleEncryptionTable, 0, sampleCount, subsampleEncryption);
|
||||
out.initEncryptionData(senc.bytesLeft());
|
||||
out.fillEncryptionData(senc);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a sidx atom (defined in 14496-12).
|
||||
*/
|
||||
private static SegmentIndex parseSidx(ParsableByteArray atom) {
|
||||
atom.setPosition(Atom.HEADER_SIZE);
|
||||
int fullAtom = atom.readInt();
|
||||
int version = Atom.parseFullAtomVersion(fullAtom);
|
||||
|
||||
atom.skip(4);
|
||||
long timescale = atom.readUnsignedInt();
|
||||
long earliestPresentationTime;
|
||||
long firstOffset;
|
||||
if (version == 0) {
|
||||
earliestPresentationTime = atom.readUnsignedInt();
|
||||
firstOffset = atom.readUnsignedInt();
|
||||
} else {
|
||||
earliestPresentationTime = atom.readUnsignedLongToLong();
|
||||
firstOffset = atom.readUnsignedLongToLong();
|
||||
}
|
||||
|
||||
atom.skip(2);
|
||||
|
||||
int referenceCount = atom.readUnsignedShort();
|
||||
int[] sizes = new int[referenceCount];
|
||||
long[] offsets = new long[referenceCount];
|
||||
long[] durationsUs = new long[referenceCount];
|
||||
long[] timesUs = new long[referenceCount];
|
||||
|
||||
long offset = firstOffset;
|
||||
long time = earliestPresentationTime;
|
||||
long timeUs = Util.scaleLargeTimestamp(time, C.MICROS_PER_SECOND, timescale);
|
||||
for (int i = 0; i < referenceCount; i++) {
|
||||
int firstInt = atom.readInt();
|
||||
|
||||
int type = 0x80000000 & firstInt;
|
||||
if (type != 0) {
|
||||
throw new IllegalStateException("Unhandled indirect reference");
|
||||
}
|
||||
long referenceDuration = atom.readUnsignedInt();
|
||||
|
||||
sizes[i] = 0x7fffffff & firstInt;
|
||||
offsets[i] = offset;
|
||||
|
||||
// Calculate time and duration values such that any rounding errors are consistent. i.e. That
|
||||
// timesUs[i] + durationsUs[i] == timesUs[i + 1].
|
||||
timesUs[i] = timeUs;
|
||||
time += referenceDuration;
|
||||
timeUs = Util.scaleLargeTimestamp(time, C.MICROS_PER_SECOND, timescale);
|
||||
durationsUs[i] = timeUs - timesUs[i];
|
||||
|
||||
atom.skip(4);
|
||||
offset += sizes[i];
|
||||
}
|
||||
|
||||
return new SegmentIndex(atom.limit(), sizes, offsets, durationsUs, timesUs);
|
||||
}
|
||||
|
||||
private int readEncryptionData(NonBlockingInputStream inputStream) {
|
||||
boolean success = fragmentRun.fillEncryptionData(inputStream);
|
||||
if (!success) {
|
||||
return RESULT_NEED_MORE_DATA;
|
||||
}
|
||||
enterState(STATE_READING_SAMPLE);
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to read or skip the next sample in the current mdat atom.
|
||||
* <p>
|
||||
* If there are no more samples in the current mdat atom then the parser state is transitioned
|
||||
* to {@link #STATE_READING_ATOM_HEADER} and 0 is returned.
|
||||
* <p>
|
||||
* If there's a pending seek to a sync frame, and if the next sample is before that frame, then
|
||||
* the sample is skipped. Otherwise it is read.
|
||||
* <p>
|
||||
* It is possible for a sample to be read or skipped in part if there is insufficent data
|
||||
* available from the {@link NonBlockingInputStream}. In this case the remainder of the sample
|
||||
* can be read in a subsequent call passing the same {@link SampleHolder}.
|
||||
*
|
||||
* @param inputStream The stream from which to read the sample.
|
||||
* @param out The holder into which to write the sample.
|
||||
* @return A combination of RESULT_* flags indicating the result of the call.
|
||||
*/
|
||||
private int readOrSkipSample(NonBlockingInputStream inputStream, SampleHolder out) {
|
||||
if (sampleIndex >= fragmentRun.length) {
|
||||
// We've run out of samples in the current mdat atom.
|
||||
enterState(STATE_READING_ATOM_HEADER);
|
||||
return 0;
|
||||
}
|
||||
int sampleSize = fragmentRun.sampleSizeTable[sampleIndex];
|
||||
if (inputStream.getAvailableByteCount() < sampleSize) {
|
||||
return RESULT_NEED_MORE_DATA;
|
||||
}
|
||||
if (sampleIndex < pendingSeekSyncSampleIndex) {
|
||||
return skipSample(inputStream, sampleSize);
|
||||
}
|
||||
return readSample(inputStream, sampleSize, out);
|
||||
}
|
||||
|
||||
private int skipSample(NonBlockingInputStream inputStream, int sampleSize) {
|
||||
if (fragmentRun.definesEncryptionData) {
|
||||
ParsableByteArray sampleEncryptionData = fragmentRun.sampleEncryptionData;
|
||||
TrackEncryptionBox encryptionBox =
|
||||
track.sampleDescriptionEncryptionBoxes[fragmentRun.sampleDescriptionIndex];
|
||||
int vectorSize = encryptionBox.initializationVectorSize;
|
||||
boolean subsampleEncryption = fragmentRun.sampleHasSubsampleEncryptionTable[sampleIndex];
|
||||
sampleEncryptionData.skip(vectorSize);
|
||||
int subsampleCount = subsampleEncryption ? sampleEncryptionData.readUnsignedShort() : 1;
|
||||
if (subsampleEncryption) {
|
||||
sampleEncryptionData.skip((2 + 4) * subsampleCount);
|
||||
}
|
||||
}
|
||||
|
||||
inputStream.skip(sampleSize);
|
||||
|
||||
sampleIndex++;
|
||||
enterState(STATE_READING_SAMPLE);
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int readSample(NonBlockingInputStream inputStream, int sampleSize, SampleHolder out) {
|
||||
if (out == null) {
|
||||
return RESULT_NEED_SAMPLE_HOLDER;
|
||||
}
|
||||
out.timeUs = fragmentRun.getSamplePresentationTime(sampleIndex) * 1000L;
|
||||
out.flags = 0;
|
||||
if (fragmentRun.sampleIsSyncFrameTable[sampleIndex]) {
|
||||
out.flags |= C.SAMPLE_FLAG_SYNC;
|
||||
lastSyncSampleIndex = sampleIndex;
|
||||
}
|
||||
if (out.data == null || out.data.capacity() < sampleSize) {
|
||||
out.replaceBuffer(sampleSize);
|
||||
}
|
||||
if (fragmentRun.definesEncryptionData) {
|
||||
readSampleEncryptionData(fragmentRun.sampleEncryptionData, out);
|
||||
}
|
||||
|
||||
ByteBuffer outputData = out.data;
|
||||
if (outputData == null) {
|
||||
inputStream.skip(sampleSize);
|
||||
out.size = 0;
|
||||
} else {
|
||||
inputStream.read(outputData, sampleSize);
|
||||
if (track.type == Track.TYPE_VIDEO) {
|
||||
// The mp4 file contains length-prefixed NAL units, but the decoder wants start code
|
||||
// delimited content.
|
||||
H264Util.replaceLengthPrefixesWithAvcStartCodes(outputData, sampleSize);
|
||||
}
|
||||
out.size = sampleSize;
|
||||
}
|
||||
|
||||
sampleIndex++;
|
||||
enterState(STATE_READING_SAMPLE);
|
||||
return RESULT_READ_SAMPLE;
|
||||
}
|
||||
|
||||
private void readSampleEncryptionData(ParsableByteArray sampleEncryptionData, SampleHolder out) {
|
||||
TrackEncryptionBox encryptionBox =
|
||||
track.sampleDescriptionEncryptionBoxes[fragmentRun.sampleDescriptionIndex];
|
||||
if (!encryptionBox.isEncrypted) {
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] keyId = encryptionBox.keyId;
|
||||
int vectorSize = encryptionBox.initializationVectorSize;
|
||||
boolean subsampleEncryption = fragmentRun.sampleHasSubsampleEncryptionTable[sampleIndex];
|
||||
|
||||
byte[] vector = out.cryptoInfo.iv;
|
||||
if (vector == null || vector.length != 16) {
|
||||
vector = new byte[16];
|
||||
}
|
||||
sampleEncryptionData.readBytes(vector, 0, vectorSize);
|
||||
|
||||
int subsampleCount = subsampleEncryption ? sampleEncryptionData.readUnsignedShort() : 1;
|
||||
int[] clearDataSizes = out.cryptoInfo.numBytesOfClearData;
|
||||
if (clearDataSizes == null || clearDataSizes.length < subsampleCount) {
|
||||
clearDataSizes = new int[subsampleCount];
|
||||
}
|
||||
int[] encryptedDataSizes = out.cryptoInfo.numBytesOfEncryptedData;
|
||||
if (encryptedDataSizes == null || encryptedDataSizes.length < subsampleCount) {
|
||||
encryptedDataSizes = new int[subsampleCount];
|
||||
}
|
||||
if (subsampleEncryption) {
|
||||
for (int i = 0; i < subsampleCount; i++) {
|
||||
clearDataSizes[i] = sampleEncryptionData.readUnsignedShort();
|
||||
encryptedDataSizes[i] = sampleEncryptionData.readUnsignedIntToInt();
|
||||
}
|
||||
} else {
|
||||
clearDataSizes[0] = 0;
|
||||
encryptedDataSizes[0] = fragmentRun.sampleSizeTable[sampleIndex];
|
||||
}
|
||||
|
||||
out.cryptoInfo.set(subsampleCount, clearDataSizes, encryptedDataSizes, keyId, vector,
|
||||
C.CRYPTO_MODE_AES_CTR);
|
||||
out.flags |= C.SAMPLE_FLAG_ENCRYPTED;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,550 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser.webm;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Stack;
|
||||
|
||||
/**
|
||||
* Default version of a basic event-driven incremental EBML parser which needs an
|
||||
* {@link EbmlEventHandler} to define IDs/types and react to events.
|
||||
*
|
||||
* <p>EBML can be summarized as a binary XML format somewhat similar to Protocol Buffers.
|
||||
* It was originally designed for the Matroska container format. More information about EBML and
|
||||
* Matroska is available <a href="http://www.matroska.org/technical/specs/index.html">here</a>.
|
||||
*/
|
||||
/* package */ final class DefaultEbmlReader implements EbmlReader {
|
||||
|
||||
// State values used in variables state, elementIdState, elementContentSizeState, and
|
||||
// varintBytesState.
|
||||
private static final int STATE_BEGIN_READING = 0;
|
||||
private static final int STATE_READ_CONTENTS = 1;
|
||||
private static final int STATE_FINISHED_READING = 2;
|
||||
|
||||
/**
|
||||
* The first byte of a variable-length integer (varint) will have one of these bit masks
|
||||
* indicating the total length in bytes.
|
||||
*
|
||||
* <p>{@code 0x80} is a one-byte integer, {@code 0x40} is two bytes, and so on up to eight bytes.
|
||||
*/
|
||||
private static final int[] VARINT_LENGTH_MASKS = new int[] {
|
||||
0x80, 0x40, 0x20, 0x10, 0x08, 0x04, 0x02, 0x01
|
||||
};
|
||||
|
||||
private static final int MAX_INTEGER_ELEMENT_SIZE_BYTES = 8;
|
||||
private static final int VALID_FLOAT32_ELEMENT_SIZE_BYTES = 4;
|
||||
private static final int VALID_FLOAT64_ELEMENT_SIZE_BYTES = 8;
|
||||
|
||||
/**
|
||||
* Scratch space to read in EBML varints, unsigned ints, and floats - each of which can be
|
||||
* up to 8 bytes.
|
||||
*/
|
||||
private final byte[] tempByteArray = new byte[8];
|
||||
private final Stack<MasterElement> masterElementsStack = new Stack<MasterElement>();
|
||||
|
||||
/**
|
||||
* Current {@link EbmlEventHandler} which is queried for element types
|
||||
* and informed of element events.
|
||||
*/
|
||||
private EbmlEventHandler eventHandler;
|
||||
|
||||
/**
|
||||
* Overall state for the current element. Must be one of the {@code STATE_*} constants.
|
||||
*/
|
||||
private int state;
|
||||
|
||||
/**
|
||||
* Total bytes read since starting or the last {@link #reset()}.
|
||||
*/
|
||||
private long bytesRead;
|
||||
|
||||
/**
|
||||
* The starting byte offset of the current element being parsed.
|
||||
*/
|
||||
private long elementOffset;
|
||||
|
||||
/**
|
||||
* Holds the current element ID after {@link #elementIdState} is {@link #STATE_FINISHED_READING}.
|
||||
*/
|
||||
private int elementId;
|
||||
|
||||
/**
|
||||
* State for the ID of the current element. Must be one of the {@code STATE_*} constants.
|
||||
*/
|
||||
private int elementIdState;
|
||||
|
||||
/**
|
||||
* Holds the current element content size after {@link #elementContentSizeState}
|
||||
* is {@link #STATE_FINISHED_READING}.
|
||||
*/
|
||||
private long elementContentSize;
|
||||
|
||||
/**
|
||||
* State for the content size of the current element.
|
||||
* Must be one of the {@code STATE_*} constants.
|
||||
*/
|
||||
private int elementContentSizeState;
|
||||
|
||||
/**
|
||||
* State for the current variable-length integer (varint) being read into
|
||||
* {@link #tempByteArray}. Must be one of the {@code STATE_*} constants.
|
||||
*/
|
||||
private int varintBytesState;
|
||||
|
||||
/**
|
||||
* Length in bytes of the current variable-length integer (varint) being read into
|
||||
* {@link #tempByteArray}.
|
||||
*/
|
||||
private int varintBytesLength;
|
||||
|
||||
/**
|
||||
* Counts the number of bytes being contiguously read into either {@link #tempByteArray} or
|
||||
* {@link #stringBytes}. Used to determine when all required bytes have been read across
|
||||
* multiple calls.
|
||||
*/
|
||||
private int bytesState;
|
||||
|
||||
/**
|
||||
* Holds string element bytes as they're being read in. Allocated after the element content
|
||||
* size is known and released after calling {@link EbmlEventHandler#onStringElement(int, String)}.
|
||||
*/
|
||||
private byte[] stringBytes;
|
||||
|
||||
@Override
|
||||
public void setEventHandler(EbmlEventHandler eventHandler) {
|
||||
this.eventHandler = eventHandler;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(NonBlockingInputStream inputStream) throws ParserException {
|
||||
Assertions.checkState(eventHandler != null);
|
||||
while (true) {
|
||||
while (!masterElementsStack.isEmpty()
|
||||
&& bytesRead >= masterElementsStack.peek().elementEndOffsetBytes) {
|
||||
eventHandler.onMasterElementEnd(masterElementsStack.pop().elementId);
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
|
||||
if (state == STATE_BEGIN_READING) {
|
||||
int idResult = readElementId(inputStream);
|
||||
if (idResult != READ_RESULT_CONTINUE) {
|
||||
return idResult;
|
||||
}
|
||||
int sizeResult = readElementContentSize(inputStream);
|
||||
if (sizeResult != READ_RESULT_CONTINUE) {
|
||||
return sizeResult;
|
||||
}
|
||||
state = STATE_READ_CONTENTS;
|
||||
bytesState = 0;
|
||||
}
|
||||
|
||||
int type = eventHandler.getElementType(elementId);
|
||||
switch (type) {
|
||||
case TYPE_MASTER:
|
||||
int masterHeaderSize = (int) (bytesRead - elementOffset); // Header size is 12 bytes max.
|
||||
masterElementsStack.add(new MasterElement(elementId, bytesRead + elementContentSize));
|
||||
eventHandler.onMasterElementStart(elementId, elementOffset, masterHeaderSize,
|
||||
elementContentSize);
|
||||
prepareForNextElement();
|
||||
return READ_RESULT_CONTINUE;
|
||||
case TYPE_UNSIGNED_INT:
|
||||
if (elementContentSize > MAX_INTEGER_ELEMENT_SIZE_BYTES) {
|
||||
throw new IllegalStateException("Invalid integer size " + elementContentSize);
|
||||
}
|
||||
int intResult =
|
||||
readBytesInternal(inputStream, tempByteArray, (int) elementContentSize);
|
||||
if (intResult != READ_RESULT_CONTINUE) {
|
||||
return intResult;
|
||||
}
|
||||
long intValue = getTempByteArrayValue((int) elementContentSize, false);
|
||||
eventHandler.onIntegerElement(elementId, intValue);
|
||||
prepareForNextElement();
|
||||
return READ_RESULT_CONTINUE;
|
||||
case TYPE_FLOAT:
|
||||
if (elementContentSize != VALID_FLOAT32_ELEMENT_SIZE_BYTES
|
||||
&& elementContentSize != VALID_FLOAT64_ELEMENT_SIZE_BYTES) {
|
||||
throw new IllegalStateException("Invalid float size " + elementContentSize);
|
||||
}
|
||||
int floatResult =
|
||||
readBytesInternal(inputStream, tempByteArray, (int) elementContentSize);
|
||||
if (floatResult != READ_RESULT_CONTINUE) {
|
||||
return floatResult;
|
||||
}
|
||||
long valueBits = getTempByteArrayValue((int) elementContentSize, false);
|
||||
double floatValue;
|
||||
if (elementContentSize == VALID_FLOAT32_ELEMENT_SIZE_BYTES) {
|
||||
floatValue = Float.intBitsToFloat((int) valueBits);
|
||||
} else {
|
||||
floatValue = Double.longBitsToDouble(valueBits);
|
||||
}
|
||||
eventHandler.onFloatElement(elementId, floatValue);
|
||||
prepareForNextElement();
|
||||
return READ_RESULT_CONTINUE;
|
||||
case TYPE_STRING:
|
||||
if (elementContentSize > Integer.MAX_VALUE) {
|
||||
throw new IllegalStateException(
|
||||
"String element size " + elementContentSize + " is larger than MAX_INT");
|
||||
}
|
||||
if (stringBytes == null) {
|
||||
stringBytes = new byte[(int) elementContentSize];
|
||||
}
|
||||
int stringResult =
|
||||
readBytesInternal(inputStream, stringBytes, (int) elementContentSize);
|
||||
if (stringResult != READ_RESULT_CONTINUE) {
|
||||
return stringResult;
|
||||
}
|
||||
String stringValue = new String(stringBytes, Charset.forName(C.UTF8_NAME));
|
||||
stringBytes = null;
|
||||
eventHandler.onStringElement(elementId, stringValue);
|
||||
prepareForNextElement();
|
||||
return READ_RESULT_CONTINUE;
|
||||
case TYPE_BINARY:
|
||||
if (elementContentSize > Integer.MAX_VALUE) {
|
||||
throw new IllegalStateException(
|
||||
"Binary element size " + elementContentSize + " is larger than MAX_INT");
|
||||
}
|
||||
if (inputStream.getAvailableByteCount() < elementContentSize) {
|
||||
return READ_RESULT_NEED_MORE_DATA;
|
||||
}
|
||||
int binaryHeaderSize = (int) (bytesRead - elementOffset); // Header size is 12 bytes max.
|
||||
boolean consumed = eventHandler.onBinaryElement(
|
||||
elementId, elementOffset, binaryHeaderSize, (int) elementContentSize, inputStream);
|
||||
if (consumed) {
|
||||
long expectedBytesRead = elementOffset + binaryHeaderSize + elementContentSize;
|
||||
if (expectedBytesRead != bytesRead) {
|
||||
throw new IllegalStateException("Incorrect total bytes read. Expected "
|
||||
+ expectedBytesRead + " but actually " + bytesRead);
|
||||
}
|
||||
prepareForNextElement();
|
||||
}
|
||||
return READ_RESULT_CONTINUE;
|
||||
case TYPE_UNKNOWN:
|
||||
if (elementContentSize > Integer.MAX_VALUE) {
|
||||
throw new IllegalStateException(
|
||||
"Unknown element size " + elementContentSize + " is larger than MAX_INT");
|
||||
}
|
||||
int skipResult = skipBytesInternal(inputStream, (int) elementContentSize);
|
||||
if (skipResult != READ_RESULT_CONTINUE) {
|
||||
return skipResult;
|
||||
}
|
||||
prepareForNextElement();
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Invalid element type " + type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getBytesRead() {
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
prepareForNextElement();
|
||||
masterElementsStack.clear();
|
||||
bytesRead = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long readVarint(NonBlockingInputStream inputStream) {
|
||||
varintBytesState = STATE_BEGIN_READING;
|
||||
int result = readVarintBytes(inputStream);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
throw new IllegalStateException("Couldn't read varint");
|
||||
}
|
||||
return getTempByteArrayValue(varintBytesLength, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readBytes(NonBlockingInputStream inputStream, ByteBuffer byteBuffer, int totalBytes) {
|
||||
bytesState = 0;
|
||||
int result = readBytesInternal(inputStream, byteBuffer, totalBytes);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
throw new IllegalStateException("Couldn't read bytes into buffer");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readBytes(NonBlockingInputStream inputStream, byte[] byteArray, int totalBytes) {
|
||||
bytesState = 0;
|
||||
int result = readBytesInternal(inputStream, byteArray, totalBytes);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
throw new IllegalStateException("Couldn't read bytes into array");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void skipBytes(NonBlockingInputStream inputStream, int totalBytes) {
|
||||
bytesState = 0;
|
||||
int result = skipBytesInternal(inputStream, totalBytes);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
throw new IllegalStateException("Couldn't skip bytes");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the internal state of {@link #read(NonBlockingInputStream)} so that it can start
|
||||
* reading a new element from scratch.
|
||||
*/
|
||||
private void prepareForNextElement() {
|
||||
state = STATE_BEGIN_READING;
|
||||
elementIdState = STATE_BEGIN_READING;
|
||||
elementContentSizeState = STATE_BEGIN_READING;
|
||||
elementOffset = bytesRead;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an element ID such that reading can be stopped and started again in a later call
|
||||
* if not enough bytes are available. Returns {@link #READ_RESULT_CONTINUE} if a full element ID
|
||||
* has been read into {@link #elementId}. Reset {@link #elementIdState} to
|
||||
* {@link #STATE_BEGIN_READING} before calling to indicate a new element ID should be read.
|
||||
*
|
||||
* @param inputStream The input stream from which an element ID should be read
|
||||
* @return One of the {@code RESULT_*} flags defined in this class
|
||||
*/
|
||||
private int readElementId(NonBlockingInputStream inputStream) {
|
||||
if (elementIdState == STATE_FINISHED_READING) {
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
if (elementIdState == STATE_BEGIN_READING) {
|
||||
varintBytesState = STATE_BEGIN_READING;
|
||||
elementIdState = STATE_READ_CONTENTS;
|
||||
}
|
||||
int result = readVarintBytes(inputStream);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
return result;
|
||||
}
|
||||
// Element IDs are at most 4 bytes so cast to int now.
|
||||
elementId = (int) getTempByteArrayValue(varintBytesLength, false);
|
||||
elementIdState = STATE_FINISHED_READING;
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an element's content size such that reading can be stopped and started again in a later
|
||||
* call if not enough bytes are available.
|
||||
*
|
||||
* <p>Returns {@link #READ_RESULT_CONTINUE} if an entire element size has been
|
||||
* read into {@link #elementContentSize}. Reset {@link #elementContentSizeState} to
|
||||
* {@link #STATE_BEGIN_READING} before calling to indicate a new element size should be read.
|
||||
*
|
||||
* @param inputStream The input stream from which an element size should be read
|
||||
* @return One of the {@code RESULT_*} flags defined in this class
|
||||
*/
|
||||
private int readElementContentSize(NonBlockingInputStream inputStream) {
|
||||
if (elementContentSizeState == STATE_FINISHED_READING) {
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
if (elementContentSizeState == STATE_BEGIN_READING) {
|
||||
varintBytesState = STATE_BEGIN_READING;
|
||||
elementContentSizeState = STATE_READ_CONTENTS;
|
||||
}
|
||||
int result = readVarintBytes(inputStream);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
return result;
|
||||
}
|
||||
elementContentSize = getTempByteArrayValue(varintBytesLength, true);
|
||||
elementContentSizeState = STATE_FINISHED_READING;
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an EBML variable-length integer (varint) such that reading can be stopped and started
|
||||
* again in a later call if not enough bytes are available.
|
||||
*
|
||||
* <p>Returns {@link #READ_RESULT_CONTINUE} if an entire varint has been read into
|
||||
* {@link #tempByteArray} and the length of the varint is in {@link #varintBytesLength}.
|
||||
* Reset {@link #varintBytesState} to {@link #STATE_BEGIN_READING} before calling to indicate
|
||||
* a new varint should be read.
|
||||
*
|
||||
* @param inputStream The input stream from which a varint should be read
|
||||
* @return One of the {@code RESULT_*} flags defined in this class
|
||||
*/
|
||||
private int readVarintBytes(NonBlockingInputStream inputStream) {
|
||||
if (varintBytesState == STATE_FINISHED_READING) {
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
|
||||
// Read first byte to get length.
|
||||
if (varintBytesState == STATE_BEGIN_READING) {
|
||||
bytesState = 0;
|
||||
int result = readBytesInternal(inputStream, tempByteArray, 1);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
return result;
|
||||
}
|
||||
varintBytesState = STATE_READ_CONTENTS;
|
||||
|
||||
int firstByte = tempByteArray[0] & 0xff;
|
||||
varintBytesLength = -1;
|
||||
for (int i = 0; i < VARINT_LENGTH_MASKS.length; i++) {
|
||||
if ((VARINT_LENGTH_MASKS[i] & firstByte) != 0) {
|
||||
varintBytesLength = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (varintBytesLength == -1) {
|
||||
throw new IllegalStateException(
|
||||
"No valid varint length mask found at bytesRead = " + bytesRead);
|
||||
}
|
||||
}
|
||||
|
||||
// Read remaining bytes.
|
||||
int result = readBytesInternal(inputStream, tempByteArray, varintBytesLength);
|
||||
if (result != READ_RESULT_CONTINUE) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// All bytes have been read.
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a set amount of bytes into a {@link ByteBuffer} such that reading can be stopped
|
||||
* and started again later if not enough bytes are available.
|
||||
*
|
||||
* <p>Returns {@link #READ_RESULT_CONTINUE} if all bytes have been read. Reset
|
||||
* {@link #bytesState} to {@code 0} before calling to indicate a new set of bytes should be read.
|
||||
*
|
||||
* @param inputStream The input stream from which bytes should be read
|
||||
* @param byteBuffer The {@link ByteBuffer} into which bytes should be read
|
||||
* @param totalBytes The total size of bytes to be read
|
||||
* @return One of the {@code RESULT_*} flags defined in this class
|
||||
*/
|
||||
private int readBytesInternal(
|
||||
NonBlockingInputStream inputStream, ByteBuffer byteBuffer, int totalBytes) {
|
||||
if (bytesState == STATE_BEGIN_READING && totalBytes > byteBuffer.capacity()) {
|
||||
throw new IllegalArgumentException("Byte buffer not large enough");
|
||||
}
|
||||
if (bytesState >= totalBytes) {
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
int remainingBytes = totalBytes - bytesState;
|
||||
int additionalBytesRead = inputStream.read(byteBuffer, remainingBytes);
|
||||
return updateBytesState(additionalBytesRead, totalBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a set amount of bytes into a {@code byte[]} such that reading can be stopped
|
||||
* and started again later if not enough bytes are available.
|
||||
*
|
||||
* <p>Returns {@link #READ_RESULT_CONTINUE} if all bytes have been read. Reset
|
||||
* {@link #bytesState} to {@code 0} before calling to indicate a new set of bytes should be read.
|
||||
*
|
||||
* @param inputStream The input stream from which bytes should be read
|
||||
* @param byteArray The {@code byte[]} into which bytes should be read
|
||||
* @param totalBytes The total size of bytes to be read
|
||||
* @return One of the {@code RESULT_*} flags defined in this class
|
||||
*/
|
||||
private int readBytesInternal(
|
||||
NonBlockingInputStream inputStream, byte[] byteArray, int totalBytes) {
|
||||
if (bytesState == STATE_BEGIN_READING && totalBytes > byteArray.length) {
|
||||
throw new IllegalArgumentException("Byte array not large enough");
|
||||
}
|
||||
if (bytesState >= totalBytes) {
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
int remainingBytes = totalBytes - bytesState;
|
||||
int additionalBytesRead = inputStream.read(byteArray, bytesState, remainingBytes);
|
||||
return updateBytesState(additionalBytesRead, totalBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Skips a set amount of bytes such that reading can be stopped and started again later if
|
||||
* not enough bytes are available.
|
||||
*
|
||||
* <p>Returns {@link #READ_RESULT_CONTINUE} if all bytes have been skipped. Reset
|
||||
* {@link #bytesState} to {@code 0} before calling to indicate a new set of bytes
|
||||
* should be skipped.
|
||||
*
|
||||
* @param inputStream The input stream from which bytes should be skipped
|
||||
* @param totalBytes The total size of bytes to be skipped
|
||||
* @return One of the {@code RESULT_*} flags defined in this class
|
||||
*/
|
||||
private int skipBytesInternal(NonBlockingInputStream inputStream, int totalBytes) {
|
||||
if (bytesState >= totalBytes) {
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
int remainingBytes = totalBytes - bytesState;
|
||||
int additionalBytesRead = inputStream.skip(remainingBytes);
|
||||
return updateBytesState(additionalBytesRead, totalBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates {@link #bytesState} and {@link #bytesRead} after reading bytes in one of the
|
||||
* {@code verbBytesInternal} methods.
|
||||
*
|
||||
* @param additionalBytesRead The number of additional bytes read to be accounted for
|
||||
* @param totalBytes The total size of bytes to be read or skipped
|
||||
* @return One of the {@code RESULT_*} flags defined in this class
|
||||
*/
|
||||
private int updateBytesState(int additionalBytesRead, int totalBytes) {
|
||||
if (additionalBytesRead == -1) {
|
||||
return READ_RESULT_END_OF_STREAM;
|
||||
}
|
||||
bytesState += additionalBytesRead;
|
||||
bytesRead += additionalBytesRead;
|
||||
if (bytesState < totalBytes) {
|
||||
return READ_RESULT_NEED_MORE_DATA;
|
||||
} else {
|
||||
return READ_RESULT_CONTINUE;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and returns the integer value currently read into the first {@code byteLength} bytes
|
||||
* of {@link #tempByteArray}. EBML varint length masks can optionally be removed.
|
||||
*
|
||||
* @param byteLength The number of bytes to parse from {@link #tempByteArray}
|
||||
* @param removeLengthMask Removes the variable-length integer length mask from the value
|
||||
* @return The resulting integer value. This value could be up to 8-bytes so a Java long is used
|
||||
*/
|
||||
private long getTempByteArrayValue(int byteLength, boolean removeLengthMask) {
|
||||
if (removeLengthMask) {
|
||||
tempByteArray[0] &= ~VARINT_LENGTH_MASKS[varintBytesLength - 1];
|
||||
}
|
||||
long varint = 0;
|
||||
for (int i = 0; i < byteLength; i++) {
|
||||
// Shift all existing bits up one byte and add the next byte at the bottom.
|
||||
varint = (varint << 8) | (tempByteArray[i] & 0xff);
|
||||
}
|
||||
return varint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used in {@link #masterElementsStack} to track when the current master element ends so that
|
||||
* {@link EbmlEventHandler#onMasterElementEnd(int)} is called.
|
||||
*/
|
||||
private static final class MasterElement {
|
||||
|
||||
private final int elementId;
|
||||
private final long elementEndOffsetBytes;
|
||||
|
||||
private MasterElement(int elementId, long elementEndOffsetBytes) {
|
||||
this.elementId = elementId;
|
||||
this.elementEndOffsetBytes = elementEndOffsetBytes;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser.webm;
|
||||
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Defines EBML element IDs/types and reacts to events.
|
||||
*/
|
||||
/* package */ interface EbmlEventHandler {
|
||||
|
||||
/**
|
||||
* Retrieves the type of an element ID.
|
||||
*
|
||||
* <p>If {@link EbmlReader#TYPE_UNKNOWN} is returned then the element is skipped.
|
||||
* Note that all children of a skipped master element are also skipped.
|
||||
*
|
||||
* @param id The integer ID of this element
|
||||
* @return One of the {@code TYPE_} constants defined in this class
|
||||
*/
|
||||
public int getElementType(int id);
|
||||
|
||||
/**
|
||||
* Called when a master element is encountered in the {@link NonBlockingInputStream}.
|
||||
*
|
||||
* <p>Following events should be considered as taking place "within" this element until a
|
||||
* matching call to {@link #onMasterElementEnd(int)} is made. Note that it is possible for
|
||||
* another master element of the same ID to be nested within itself.
|
||||
*
|
||||
* @param id The integer ID of this element
|
||||
* @param elementOffsetBytes The byte offset where this element starts
|
||||
* @param headerSizeBytes The byte length of this element's ID and size header
|
||||
* @param contentsSizeBytes The byte length of this element's children
|
||||
* @throws ParserException If a parsing error occurs.
|
||||
*/
|
||||
public void onMasterElementStart(
|
||||
int id, long elementOffsetBytes, int headerSizeBytes,
|
||||
long contentsSizeBytes) throws ParserException;
|
||||
|
||||
/**
|
||||
* Called when a master element has finished reading in all of its children from the
|
||||
* {@link NonBlockingInputStream}.
|
||||
*
|
||||
* @param id The integer ID of this element
|
||||
* @throws ParserException If a parsing error occurs.
|
||||
*/
|
||||
public void onMasterElementEnd(int id) throws ParserException;
|
||||
|
||||
/**
|
||||
* Called when an integer element is encountered in the {@link NonBlockingInputStream}.
|
||||
*
|
||||
* @param id The integer ID of this element
|
||||
* @param value The integer value this element contains
|
||||
* @throws ParserException If a parsing error occurs.
|
||||
*/
|
||||
public void onIntegerElement(int id, long value) throws ParserException;
|
||||
|
||||
/**
|
||||
* Called when a float element is encountered in the {@link NonBlockingInputStream}.
|
||||
*
|
||||
* @param id The integer ID of this element
|
||||
* @param value The float value this element contains
|
||||
* @throws ParserException If a parsing error occurs.
|
||||
*/
|
||||
public void onFloatElement(int id, double value) throws ParserException;
|
||||
|
||||
/**
|
||||
* Called when a string element is encountered in the {@link NonBlockingInputStream}.
|
||||
*
|
||||
* @param id The integer ID of this element
|
||||
* @param value The string value this element contains
|
||||
* @throws ParserException If a parsing error occurs.
|
||||
*/
|
||||
public void onStringElement(int id, String value) throws ParserException;
|
||||
|
||||
/**
|
||||
* Called when a binary element is encountered in the {@link NonBlockingInputStream}.
|
||||
*
|
||||
* <p>The element header (containing element ID and content size) will already have been read.
|
||||
* Subclasses must either read nothing and return {@code false}, or exactly read the entire
|
||||
* contents of the element, which is {@code contentsSizeBytes} in length, and return {@code true}.
|
||||
*
|
||||
* <p>It's guaranteed that the full element contents will be immediately available from
|
||||
* {@code inputStream}.
|
||||
*
|
||||
* <p>Several methods in {@link EbmlReader} are available for reading the contents of a
|
||||
* binary element:
|
||||
* <ul>
|
||||
* <li>{@link EbmlReader#readVarint(NonBlockingInputStream)}.
|
||||
* <li>{@link EbmlReader#readBytes(NonBlockingInputStream, byte[], int)}.
|
||||
* <li>{@link EbmlReader#readBytes(NonBlockingInputStream, ByteBuffer, int)}.
|
||||
* <li>{@link EbmlReader#skipBytes(NonBlockingInputStream, int)}.
|
||||
* <li>{@link EbmlReader#getBytesRead()}.
|
||||
* </ul>
|
||||
*
|
||||
* @param id The integer ID of this element
|
||||
* @param elementOffsetBytes The byte offset where this element starts
|
||||
* @param headerSizeBytes The byte length of this element's ID and size header
|
||||
* @param contentsSizeBytes The byte length of this element's contents
|
||||
* @param inputStream The {@link NonBlockingInputStream} from which this
|
||||
* element's contents should be read
|
||||
* @return True if the element was read. False otherwise.
|
||||
* @throws ParserException If a parsing error occurs.
|
||||
*/
|
||||
public boolean onBinaryElement(
|
||||
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
|
||||
NonBlockingInputStream inputStream) throws ParserException;
|
||||
|
||||
}
|
||||
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser.webm;
|
||||
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Basic event-driven incremental EBML parser which needs an {@link EbmlEventHandler} to
|
||||
* define IDs/types and react to events.
|
||||
*
|
||||
* <p>EBML can be summarized as a binary XML format somewhat similar to Protocol Buffers.
|
||||
* It was originally designed for the Matroska container format. More information about EBML and
|
||||
* Matroska is available <a href="http://www.matroska.org/technical/specs/index.html">here</a>.
|
||||
*/
|
||||
/* package */ interface EbmlReader {
|
||||
|
||||
// Element Types
|
||||
/** Undefined element. */
|
||||
public static final int TYPE_UNKNOWN = 0;
|
||||
/** Contains child elements. */
|
||||
public static final int TYPE_MASTER = 1;
|
||||
/** Unsigned integer value of up to 8 bytes. */
|
||||
public static final int TYPE_UNSIGNED_INT = 2;
|
||||
public static final int TYPE_STRING = 3;
|
||||
public static final int TYPE_BINARY = 4;
|
||||
/** IEEE floating point value of either 4 or 8 bytes. */
|
||||
public static final int TYPE_FLOAT = 5;
|
||||
|
||||
// Return values for reading methods.
|
||||
public static final int READ_RESULT_CONTINUE = 0;
|
||||
public static final int READ_RESULT_NEED_MORE_DATA = 1;
|
||||
public static final int READ_RESULT_END_OF_STREAM = 2;
|
||||
|
||||
public void setEventHandler(EbmlEventHandler eventHandler);
|
||||
|
||||
/**
|
||||
* Reads from a {@link NonBlockingInputStream}, invoking an event callback if possible.
|
||||
*
|
||||
* @param inputStream The input stream from which data should be read
|
||||
* @return One of the {@code RESULT_*} flags defined in this interface
|
||||
* @throws ParserException If parsing fails.
|
||||
*/
|
||||
public int read(NonBlockingInputStream inputStream) throws ParserException;
|
||||
|
||||
/**
|
||||
* The total number of bytes consumed by the reader since first created or last {@link #reset()}.
|
||||
*/
|
||||
public long getBytesRead();
|
||||
|
||||
/**
|
||||
* Resets the entire state of the reader so that it will read a new EBML structure from scratch.
|
||||
*
|
||||
* <p>This includes resetting the value returned from {@link #getBytesRead()} to 0 and discarding
|
||||
* all pending {@link EbmlEventHandler#onMasterElementEnd(int)} events.
|
||||
*/
|
||||
public void reset();
|
||||
|
||||
/**
|
||||
* Reads, parses, and returns an EBML variable-length integer (varint) from the contents
|
||||
* of a binary element.
|
||||
*
|
||||
* @param inputStream The input stream from which data should be read
|
||||
* @return The varint value at the current position of the contents of a binary element
|
||||
*/
|
||||
public long readVarint(NonBlockingInputStream inputStream);
|
||||
|
||||
/**
|
||||
* Reads a fixed number of bytes from the contents of a binary element into a {@link ByteBuffer}.
|
||||
*
|
||||
* @param inputStream The input stream from which data should be read
|
||||
* @param byteBuffer The {@link ByteBuffer} to which data should be written
|
||||
* @param totalBytes The fixed number of bytes to be read and written
|
||||
*/
|
||||
public void readBytes(NonBlockingInputStream inputStream, ByteBuffer byteBuffer, int totalBytes);
|
||||
|
||||
/**
|
||||
* Reads a fixed number of bytes from the contents of a binary element into a {@code byte[]}.
|
||||
*
|
||||
* @param inputStream The input stream from which data should be read
|
||||
* @param byteArray The byte array to which data should be written
|
||||
* @param totalBytes The fixed number of bytes to be read and written
|
||||
*/
|
||||
public void readBytes(NonBlockingInputStream inputStream, byte[] byteArray, int totalBytes);
|
||||
|
||||
/**
|
||||
* Skips a fixed number of bytes from the contents of a binary element.
|
||||
*
|
||||
* @param inputStream The input stream from which data should be skipped
|
||||
* @param totalBytes The fixed number of bytes to be skipped
|
||||
*/
|
||||
public void skipBytes(NonBlockingInputStream inputStream, int totalBytes);
|
||||
|
||||
}
|
||||
|
|
@ -1,734 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser.webm;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.chunk.parser.Extractor;
|
||||
import com.google.android.exoplayer.chunk.parser.SegmentIndex;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.LongArray;
|
||||
import com.google.android.exoplayer.util.MimeTypes;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* An extractor to facilitate data retrieval from the WebM container format.
|
||||
*
|
||||
* <p>WebM is a subset of the EBML elements defined for Matroska. More information about EBML and
|
||||
* Matroska is available <a href="http://www.matroska.org/technical/specs/index.html">here</a>.
|
||||
* More info about WebM is <a href="http://www.webmproject.org/code/specs/container/">here</a>.
|
||||
* RFC on encrypted WebM can be found
|
||||
* <a href="http://wiki.webmproject.org/encryption/webm-encryption-rfc">here</a>.
|
||||
*/
|
||||
public final class WebmExtractor implements Extractor {
|
||||
|
||||
private static final String DOC_TYPE_WEBM = "webm";
|
||||
private static final String CODEC_ID_VP9 = "V_VP9";
|
||||
private static final String CODEC_ID_VORBIS = "A_VORBIS";
|
||||
private static final String CODEC_ID_OPUS = "A_OPUS";
|
||||
private static final int VORBIS_MAX_INPUT_SIZE = 8192;
|
||||
private static final int OPUS_MAX_INPUT_SIZE = 5760;
|
||||
private static final int BLOCK_COUNTER_SIZE = 16;
|
||||
private static final int UNKNOWN = -1;
|
||||
|
||||
// Element IDs
|
||||
private static final int ID_EBML = 0x1A45DFA3;
|
||||
private static final int ID_EBML_READ_VERSION = 0x42F7;
|
||||
private static final int ID_DOC_TYPE = 0x4282;
|
||||
private static final int ID_DOC_TYPE_READ_VERSION = 0x4285;
|
||||
|
||||
private static final int ID_SEGMENT = 0x18538067;
|
||||
|
||||
private static final int ID_INFO = 0x1549A966;
|
||||
private static final int ID_TIMECODE_SCALE = 0x2AD7B1;
|
||||
private static final int ID_DURATION = 0x4489;
|
||||
|
||||
private static final int ID_CLUSTER = 0x1F43B675;
|
||||
private static final int ID_TIME_CODE = 0xE7;
|
||||
private static final int ID_SIMPLE_BLOCK = 0xA3;
|
||||
private static final int ID_BLOCK_GROUP = 0xA0;
|
||||
private static final int ID_BLOCK = 0xA1;
|
||||
|
||||
private static final int ID_TRACKS = 0x1654AE6B;
|
||||
private static final int ID_TRACK_ENTRY = 0xAE;
|
||||
private static final int ID_CODEC_ID = 0x86;
|
||||
private static final int ID_CODEC_PRIVATE = 0x63A2;
|
||||
private static final int ID_CODEC_DELAY = 0x56AA;
|
||||
private static final int ID_SEEK_PRE_ROLL = 0x56BB;
|
||||
private static final int ID_VIDEO = 0xE0;
|
||||
private static final int ID_PIXEL_WIDTH = 0xB0;
|
||||
private static final int ID_PIXEL_HEIGHT = 0xBA;
|
||||
private static final int ID_AUDIO = 0xE1;
|
||||
private static final int ID_CHANNELS = 0x9F;
|
||||
private static final int ID_SAMPLING_FREQUENCY = 0xB5;
|
||||
|
||||
private static final int ID_CONTENT_ENCODINGS = 0x6D80;
|
||||
private static final int ID_CONTENT_ENCODING = 0x6240;
|
||||
private static final int ID_CONTENT_ENCODING_ORDER = 0x5031;
|
||||
private static final int ID_CONTENT_ENCODING_SCOPE = 0x5032;
|
||||
private static final int ID_CONTENT_ENCODING_TYPE = 0x5033;
|
||||
private static final int ID_CONTENT_ENCRYPTION = 0x5035;
|
||||
private static final int ID_CONTENT_ENCRYPTION_ALGORITHM = 0x47E1;
|
||||
private static final int ID_CONTENT_ENCRYPTION_KEY_ID = 0x47E2;
|
||||
private static final int ID_CONTENT_ENCRYPTION_AES_SETTINGS = 0x47E7;
|
||||
private static final int ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE = 0x47E8;
|
||||
|
||||
private static final int ID_CUES = 0x1C53BB6B;
|
||||
private static final int ID_CUE_POINT = 0xBB;
|
||||
private static final int ID_CUE_TIME = 0xB3;
|
||||
private static final int ID_CUE_TRACK_POSITIONS = 0xB7;
|
||||
private static final int ID_CUE_CLUSTER_POSITION = 0xF1;
|
||||
|
||||
private static final int LACING_NONE = 0;
|
||||
|
||||
private static final int READ_TERMINATING_RESULTS = RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM
|
||||
| RESULT_READ_SAMPLE | RESULT_NEED_SAMPLE_HOLDER;
|
||||
|
||||
private final EbmlReader reader;
|
||||
private final byte[] simpleBlockTimecodeAndFlags = new byte[3];
|
||||
private DrmInitData.Universal drmInitData;
|
||||
|
||||
private SampleHolder sampleHolder;
|
||||
private int readResults;
|
||||
|
||||
private long segmentStartOffsetBytes = UNKNOWN;
|
||||
private long segmentEndOffsetBytes = UNKNOWN;
|
||||
private long timecodeScale = 1000000L;
|
||||
private long durationUs = C.UNKNOWN_TIME_US;
|
||||
private int pixelWidth = UNKNOWN;
|
||||
private int pixelHeight = UNKNOWN;
|
||||
private int channelCount = UNKNOWN;
|
||||
private int sampleRate = UNKNOWN;
|
||||
private byte[] codecPrivate;
|
||||
private String codecId;
|
||||
private long codecDelayNs;
|
||||
private long seekPreRollNs;
|
||||
private boolean isAudioTrack;
|
||||
private boolean hasContentEncryption;
|
||||
private byte[] encryptionKeyId;
|
||||
private long cuesSizeBytes = UNKNOWN;
|
||||
private long clusterTimecodeUs = UNKNOWN;
|
||||
private long simpleBlockTimecodeUs = UNKNOWN;
|
||||
private MediaFormat format;
|
||||
private SegmentIndex cues;
|
||||
private LongArray cueTimesUs;
|
||||
private LongArray cueClusterPositions;
|
||||
|
||||
public WebmExtractor() {
|
||||
this(new DefaultEbmlReader());
|
||||
}
|
||||
|
||||
/* package */ WebmExtractor(EbmlReader reader) {
|
||||
this.reader = reader;
|
||||
this.reader.setEventHandler(new InnerEbmlEventHandler());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(
|
||||
NonBlockingInputStream inputStream, SampleHolder sampleHolder) throws ParserException {
|
||||
this.sampleHolder = sampleHolder;
|
||||
this.readResults = 0;
|
||||
while ((readResults & READ_TERMINATING_RESULTS) == 0) {
|
||||
int ebmlReadResult = reader.read(inputStream);
|
||||
if (ebmlReadResult == EbmlReader.READ_RESULT_NEED_MORE_DATA) {
|
||||
readResults |= WebmExtractor.RESULT_NEED_MORE_DATA;
|
||||
} else if (ebmlReadResult == EbmlReader.READ_RESULT_END_OF_STREAM) {
|
||||
readResults |= WebmExtractor.RESULT_END_OF_STREAM;
|
||||
}
|
||||
}
|
||||
this.sampleHolder = null;
|
||||
return readResults;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean seekTo(long seekTimeUs, boolean allowNoop) {
|
||||
if (allowNoop
|
||||
&& cues != null
|
||||
&& clusterTimecodeUs != UNKNOWN
|
||||
&& simpleBlockTimecodeUs != UNKNOWN
|
||||
&& seekTimeUs >= simpleBlockTimecodeUs) {
|
||||
int clusterIndex = Arrays.binarySearch(cues.timesUs, clusterTimecodeUs);
|
||||
if (clusterIndex >= 0 && seekTimeUs < clusterTimecodeUs + cues.durationsUs[clusterIndex]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
clusterTimecodeUs = UNKNOWN;
|
||||
simpleBlockTimecodeUs = UNKNOWN;
|
||||
reader.reset();
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentIndex getIndex() {
|
||||
return cues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasRelativeIndexOffsets() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getFormat() {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DrmInitData getDrmInitData() {
|
||||
return drmInitData;
|
||||
}
|
||||
|
||||
/* package */ int getElementType(int id) {
|
||||
switch (id) {
|
||||
case ID_EBML:
|
||||
case ID_SEGMENT:
|
||||
case ID_INFO:
|
||||
case ID_CLUSTER:
|
||||
case ID_TRACKS:
|
||||
case ID_TRACK_ENTRY:
|
||||
case ID_AUDIO:
|
||||
case ID_VIDEO:
|
||||
case ID_CONTENT_ENCODINGS:
|
||||
case ID_CONTENT_ENCODING:
|
||||
case ID_CONTENT_ENCRYPTION:
|
||||
case ID_CONTENT_ENCRYPTION_AES_SETTINGS:
|
||||
case ID_CUES:
|
||||
case ID_CUE_POINT:
|
||||
case ID_CUE_TRACK_POSITIONS:
|
||||
case ID_BLOCK_GROUP:
|
||||
return EbmlReader.TYPE_MASTER;
|
||||
case ID_EBML_READ_VERSION:
|
||||
case ID_DOC_TYPE_READ_VERSION:
|
||||
case ID_TIMECODE_SCALE:
|
||||
case ID_TIME_CODE:
|
||||
case ID_PIXEL_WIDTH:
|
||||
case ID_PIXEL_HEIGHT:
|
||||
case ID_CODEC_DELAY:
|
||||
case ID_SEEK_PRE_ROLL:
|
||||
case ID_CHANNELS:
|
||||
case ID_CONTENT_ENCODING_ORDER:
|
||||
case ID_CONTENT_ENCODING_SCOPE:
|
||||
case ID_CONTENT_ENCODING_TYPE:
|
||||
case ID_CONTENT_ENCRYPTION_ALGORITHM:
|
||||
case ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE:
|
||||
case ID_CUE_TIME:
|
||||
case ID_CUE_CLUSTER_POSITION:
|
||||
return EbmlReader.TYPE_UNSIGNED_INT;
|
||||
case ID_DOC_TYPE:
|
||||
case ID_CODEC_ID:
|
||||
return EbmlReader.TYPE_STRING;
|
||||
case ID_CONTENT_ENCRYPTION_KEY_ID:
|
||||
case ID_SIMPLE_BLOCK:
|
||||
case ID_BLOCK:
|
||||
case ID_CODEC_PRIVATE:
|
||||
return EbmlReader.TYPE_BINARY;
|
||||
case ID_DURATION:
|
||||
case ID_SAMPLING_FREQUENCY:
|
||||
return EbmlReader.TYPE_FLOAT;
|
||||
default:
|
||||
return EbmlReader.TYPE_UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
/* package */ boolean onMasterElementStart(
|
||||
int id, long elementOffsetBytes, int headerSizeBytes,
|
||||
long contentsSizeBytes) throws ParserException {
|
||||
switch (id) {
|
||||
case ID_SEGMENT:
|
||||
if (segmentStartOffsetBytes != UNKNOWN || segmentEndOffsetBytes != UNKNOWN) {
|
||||
throw new ParserException("Multiple Segment elements not supported");
|
||||
}
|
||||
segmentStartOffsetBytes = elementOffsetBytes + headerSizeBytes;
|
||||
segmentEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
|
||||
break;
|
||||
case ID_CUES:
|
||||
cuesSizeBytes = headerSizeBytes + contentsSizeBytes;
|
||||
cueTimesUs = new LongArray();
|
||||
cueClusterPositions = new LongArray();
|
||||
break;
|
||||
case ID_CONTENT_ENCODING:
|
||||
// TODO: check and fail if more than one content encoding is present.
|
||||
break;
|
||||
case ID_CONTENT_ENCRYPTION:
|
||||
hasContentEncryption = true;
|
||||
break;
|
||||
default:
|
||||
// pass
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* package */ boolean onMasterElementEnd(int id) throws ParserException {
|
||||
switch (id) {
|
||||
case ID_CUES:
|
||||
buildCues();
|
||||
return false;
|
||||
case ID_CONTENT_ENCODING:
|
||||
if (!hasContentEncryption) {
|
||||
// We found a ContentEncoding other than Encryption.
|
||||
throw new ParserException("Found an unsupported ContentEncoding");
|
||||
}
|
||||
if (encryptionKeyId == null) {
|
||||
throw new ParserException("Encrypted Track found but ContentEncKeyID was not found");
|
||||
}
|
||||
drmInitData = new DrmInitData.Universal(MimeTypes.VIDEO_WEBM, encryptionKeyId);
|
||||
return true;
|
||||
case ID_AUDIO:
|
||||
isAudioTrack = true;
|
||||
return true;
|
||||
case ID_TRACK_ENTRY:
|
||||
if (isAudioTrack) {
|
||||
buildAudioFormat();
|
||||
} else {
|
||||
buildVideoFormat();
|
||||
}
|
||||
return true;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/* package */ boolean onIntegerElement(int id, long value) throws ParserException {
|
||||
switch (id) {
|
||||
case ID_EBML_READ_VERSION:
|
||||
// Validate that EBMLReadVersion is supported. This extractor only supports v1.
|
||||
if (value != 1) {
|
||||
throw new ParserException("EBMLReadVersion " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_DOC_TYPE_READ_VERSION:
|
||||
// Validate that DocTypeReadVersion is supported. This extractor only supports up to v2.
|
||||
if (value < 1 || value > 2) {
|
||||
throw new ParserException("DocTypeReadVersion " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_TIMECODE_SCALE:
|
||||
timecodeScale = value;
|
||||
break;
|
||||
case ID_PIXEL_WIDTH:
|
||||
pixelWidth = (int) value;
|
||||
break;
|
||||
case ID_PIXEL_HEIGHT:
|
||||
pixelHeight = (int) value;
|
||||
break;
|
||||
case ID_CODEC_DELAY:
|
||||
codecDelayNs = value;
|
||||
break;
|
||||
case ID_SEEK_PRE_ROLL:
|
||||
seekPreRollNs = value;
|
||||
break;
|
||||
case ID_CHANNELS:
|
||||
channelCount = (int) value;
|
||||
break;
|
||||
case ID_CONTENT_ENCODING_ORDER:
|
||||
// This extractor only supports one ContentEncoding element and hence the order has to be 0.
|
||||
if (value != 0) {
|
||||
throw new ParserException("ContentEncodingOrder " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_CONTENT_ENCODING_SCOPE:
|
||||
// This extractor only supports the scope of all frames (since that's the only scope used
|
||||
// for Encryption).
|
||||
if (value != 1) {
|
||||
throw new ParserException("ContentEncodingScope " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_CONTENT_ENCODING_TYPE:
|
||||
// This extractor only supports Encrypted ContentEncodingType.
|
||||
if (value != 1) {
|
||||
throw new ParserException("ContentEncodingType " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_CONTENT_ENCRYPTION_ALGORITHM:
|
||||
// Only the value 5 (AES) is allowed according to the WebM specification.
|
||||
if (value != 5) {
|
||||
throw new ParserException("ContentEncAlgo " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_CONTENT_ENCRYPTION_AES_SETTINGS_CIPHER_MODE:
|
||||
// Only the value 1 is allowed according to the WebM specification.
|
||||
if (value != 1) {
|
||||
throw new ParserException("AESSettingsCipherMode " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_CUE_TIME:
|
||||
cueTimesUs.add(scaleTimecodeToUs(value));
|
||||
break;
|
||||
case ID_CUE_CLUSTER_POSITION:
|
||||
cueClusterPositions.add(value);
|
||||
break;
|
||||
case ID_TIME_CODE:
|
||||
clusterTimecodeUs = scaleTimecodeToUs(value);
|
||||
break;
|
||||
default:
|
||||
// pass
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* package */ boolean onFloatElement(int id, double value) {
|
||||
switch (id) {
|
||||
case ID_DURATION:
|
||||
durationUs = scaleTimecodeToUs((long) value);
|
||||
break;
|
||||
case ID_SAMPLING_FREQUENCY:
|
||||
sampleRate = (int) value;
|
||||
break;
|
||||
default:
|
||||
// pass
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* package */ boolean onStringElement(int id, String value) throws ParserException {
|
||||
switch (id) {
|
||||
case ID_DOC_TYPE:
|
||||
// Validate that DocType is supported. This extractor only supports "webm".
|
||||
if (!DOC_TYPE_WEBM.equals(value)) {
|
||||
throw new ParserException("DocType " + value + " not supported");
|
||||
}
|
||||
break;
|
||||
case ID_CODEC_ID:
|
||||
// Validate that CodecID is supported. This extractor only supports "V_VP9" and "A_VORBIS".
|
||||
if (!isCodecSupported(value)) {
|
||||
throw new ParserException("CodecID " + value + " not supported");
|
||||
}
|
||||
codecId = value;
|
||||
break;
|
||||
default:
|
||||
// pass
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* package */ boolean onBinaryElement(
|
||||
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
|
||||
NonBlockingInputStream inputStream) throws ParserException {
|
||||
switch (id) {
|
||||
case ID_SIMPLE_BLOCK:
|
||||
case ID_BLOCK:
|
||||
// Please refer to http://www.matroska.org/technical/specs/index.html#simpleblock_structure
|
||||
// and http://matroska.org/technical/specs/index.html#block_structure
|
||||
// for info about how data is organized in SimpleBlock and Block elements respectively. They
|
||||
// differ only in the way flags are specified.
|
||||
|
||||
// If we don't have a sample holder then don't consume the data.
|
||||
if (sampleHolder == null) {
|
||||
readResults |= RESULT_NEED_SAMPLE_HOLDER;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Value of trackNumber is not used but needs to be read.
|
||||
reader.readVarint(inputStream);
|
||||
|
||||
// Next three bytes have timecode and flags.
|
||||
reader.readBytes(inputStream, simpleBlockTimecodeAndFlags, 3);
|
||||
|
||||
// First two bytes of the three are the relative timecode.
|
||||
int timecode =
|
||||
(simpleBlockTimecodeAndFlags[0] << 8) | (simpleBlockTimecodeAndFlags[1] & 0xff);
|
||||
long timecodeUs = scaleTimecodeToUs(timecode);
|
||||
|
||||
// Last byte of the three has some flags and the lacing value.
|
||||
boolean keyframe;
|
||||
if (id == ID_BLOCK) {
|
||||
// Matroska Block element does not self-sufficiently say whether it is a key frame or not.
|
||||
// It depends on the existence of another element (ReferenceBlock) which may occur after
|
||||
// the Block element. Since this extractor uses Block element only for Opus, we set the
|
||||
// keyframe to be true always since all Opus frames are key frames.
|
||||
keyframe = true;
|
||||
} else {
|
||||
keyframe = (simpleBlockTimecodeAndFlags[2] & 0x80) == 0x80;
|
||||
}
|
||||
boolean invisible = (simpleBlockTimecodeAndFlags[2] & 0x08) == 0x08;
|
||||
int lacing = (simpleBlockTimecodeAndFlags[2] & 0x06) >> 1;
|
||||
if (lacing != LACING_NONE) {
|
||||
throw new ParserException("Lacing mode " + lacing + " not supported");
|
||||
}
|
||||
long elementEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
|
||||
simpleBlockTimecodeUs = clusterTimecodeUs + timecodeUs;
|
||||
sampleHolder.flags = (keyframe ? C.SAMPLE_FLAG_SYNC : 0)
|
||||
| (invisible ? C.SAMPLE_FLAG_DECODE_ONLY : 0);
|
||||
sampleHolder.timeUs = clusterTimecodeUs + timecodeUs;
|
||||
sampleHolder.size = (int) (elementEndOffsetBytes - reader.getBytesRead());
|
||||
|
||||
if (hasContentEncryption) {
|
||||
byte[] signalByte = new byte[1];
|
||||
reader.readBytes(inputStream, signalByte, 1);
|
||||
sampleHolder.size -= 1;
|
||||
// First bit of the signalByte (extension bit) must be 0.
|
||||
if ((signalByte[0] & 0x80) != 0) {
|
||||
throw new ParserException("Extension bit is set in signal byte");
|
||||
}
|
||||
boolean isEncrypted = (signalByte[0] & 0x01) == 0x01;
|
||||
if (isEncrypted) {
|
||||
byte[] iv = null;
|
||||
iv = sampleHolder.cryptoInfo.iv;
|
||||
if (iv == null || iv.length != BLOCK_COUNTER_SIZE) {
|
||||
iv = new byte[BLOCK_COUNTER_SIZE];
|
||||
}
|
||||
reader.readBytes(inputStream, iv, 8); // The container has only 8 bytes of IV.
|
||||
sampleHolder.size -= 8;
|
||||
|
||||
int[] clearDataSizes = sampleHolder.cryptoInfo.numBytesOfClearData;
|
||||
if (clearDataSizes == null || clearDataSizes.length < 1) {
|
||||
clearDataSizes = new int[1];
|
||||
}
|
||||
int[] encryptedDataSizes = sampleHolder.cryptoInfo.numBytesOfEncryptedData;
|
||||
if (encryptedDataSizes == null || encryptedDataSizes.length < 1) {
|
||||
encryptedDataSizes = new int[1];
|
||||
}
|
||||
clearDataSizes[0] = 0;
|
||||
encryptedDataSizes[0] = sampleHolder.size;
|
||||
|
||||
sampleHolder.cryptoInfo.set(1, clearDataSizes, encryptedDataSizes,
|
||||
encryptionKeyId, iv, C.CRYPTO_MODE_AES_CTR);
|
||||
sampleHolder.flags |= C.SAMPLE_FLAG_ENCRYPTED;
|
||||
}
|
||||
}
|
||||
|
||||
if (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size) {
|
||||
sampleHolder.replaceBuffer(sampleHolder.size);
|
||||
}
|
||||
|
||||
ByteBuffer outputData = sampleHolder.data;
|
||||
if (outputData == null) {
|
||||
reader.skipBytes(inputStream, sampleHolder.size);
|
||||
sampleHolder.size = 0;
|
||||
} else {
|
||||
reader.readBytes(inputStream, outputData, sampleHolder.size);
|
||||
}
|
||||
readResults |= RESULT_READ_SAMPLE;
|
||||
break;
|
||||
case ID_CODEC_PRIVATE:
|
||||
codecPrivate = new byte[contentsSizeBytes];
|
||||
reader.readBytes(inputStream, codecPrivate, contentsSizeBytes);
|
||||
break;
|
||||
case ID_CONTENT_ENCRYPTION_KEY_ID:
|
||||
encryptionKeyId = new byte[contentsSizeBytes];
|
||||
reader.readBytes(inputStream, encryptionKeyId, contentsSizeBytes);
|
||||
break;
|
||||
default:
|
||||
// pass
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private long scaleTimecodeToUs(long unscaledTimecode) {
|
||||
return TimeUnit.NANOSECONDS.toMicros(unscaledTimecode * timecodeScale);
|
||||
}
|
||||
|
||||
private boolean isCodecSupported(String codecId) {
|
||||
return CODEC_ID_VP9.equals(codecId)
|
||||
|| CODEC_ID_OPUS.equals(codecId)
|
||||
|| CODEC_ID_VORBIS.equals(codecId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a video {@link MediaFormat} containing recently gathered Video information, if needed.
|
||||
*
|
||||
* <p>Replaces the previous {@link #format} only if video width/height have changed.
|
||||
* {@link #format} is guaranteed to not be null after calling this method. In
|
||||
* the event that it can't be built, an {@link ParserException} will be thrown.
|
||||
*/
|
||||
private void buildVideoFormat() throws ParserException {
|
||||
if (pixelWidth != UNKNOWN && pixelHeight != UNKNOWN
|
||||
&& (format == null || format.width != pixelWidth || format.height != pixelHeight)) {
|
||||
format = MediaFormat.createVideoFormat(MimeTypes.VIDEO_VP9, MediaFormat.NO_VALUE, durationUs,
|
||||
pixelWidth, pixelHeight, null);
|
||||
readResults |= RESULT_READ_INIT;
|
||||
} else if (format == null) {
|
||||
throw new ParserException("Unable to build format");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an audio {@link MediaFormat} containing recently gathered Audio information, if needed.
|
||||
*
|
||||
* <p>Replaces the previous {@link #format} only if audio channel count/sample rate have changed.
|
||||
* {@link #format} is guaranteed to not be null after calling this method.
|
||||
*
|
||||
* @throws ParserException If an error occurs when parsing codec's private data or if the format
|
||||
* can't be built.
|
||||
*/
|
||||
private void buildAudioFormat() throws ParserException {
|
||||
if (channelCount != UNKNOWN && sampleRate != UNKNOWN
|
||||
&& (format == null || format.channelCount != channelCount
|
||||
|| format.sampleRate != sampleRate)) {
|
||||
if (CODEC_ID_VORBIS.equals(codecId)) {
|
||||
format = MediaFormat.createAudioFormat(MimeTypes.AUDIO_VORBIS, VORBIS_MAX_INPUT_SIZE,
|
||||
durationUs, channelCount, sampleRate, parseVorbisCodecPrivate());
|
||||
} else if (CODEC_ID_OPUS.equals(codecId)) {
|
||||
ArrayList<byte[]> opusInitializationData = new ArrayList<byte[]>(3);
|
||||
opusInitializationData.add(codecPrivate);
|
||||
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(codecDelayNs).array());
|
||||
opusInitializationData.add(ByteBuffer.allocate(Long.SIZE).putLong(seekPreRollNs).array());
|
||||
format = MediaFormat.createAudioFormat(MimeTypes.AUDIO_OPUS, OPUS_MAX_INPUT_SIZE,
|
||||
durationUs, channelCount, sampleRate, opusInitializationData);
|
||||
}
|
||||
readResults |= RESULT_READ_INIT;
|
||||
} else if (format == null) {
|
||||
throw new ParserException("Unable to build format");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a {@link SegmentIndex} containing recently gathered Cues information.
|
||||
*
|
||||
* <p>{@link #cues} is guaranteed to not be null after calling this method. In
|
||||
* the event that it can't be built, an {@link ParserException} will be thrown.
|
||||
*/
|
||||
private void buildCues() throws ParserException {
|
||||
if (segmentStartOffsetBytes == UNKNOWN) {
|
||||
throw new ParserException("Segment start/end offsets unknown");
|
||||
} else if (durationUs == C.UNKNOWN_TIME_US) {
|
||||
throw new ParserException("Duration unknown");
|
||||
} else if (cuesSizeBytes == UNKNOWN) {
|
||||
throw new ParserException("Cues size unknown");
|
||||
} else if (cueTimesUs == null || cueClusterPositions == null
|
||||
|| cueTimesUs.size() == 0 || cueTimesUs.size() != cueClusterPositions.size()) {
|
||||
throw new ParserException("Invalid/missing cue points");
|
||||
}
|
||||
int cuePointsSize = cueTimesUs.size();
|
||||
int[] sizes = new int[cuePointsSize];
|
||||
long[] offsets = new long[cuePointsSize];
|
||||
long[] durationsUs = new long[cuePointsSize];
|
||||
long[] timesUs = new long[cuePointsSize];
|
||||
for (int i = 0; i < cuePointsSize; i++) {
|
||||
timesUs[i] = cueTimesUs.get(i);
|
||||
offsets[i] = segmentStartOffsetBytes + cueClusterPositions.get(i);
|
||||
}
|
||||
for (int i = 0; i < cuePointsSize - 1; i++) {
|
||||
sizes[i] = (int) (offsets[i + 1] - offsets[i]);
|
||||
durationsUs[i] = timesUs[i + 1] - timesUs[i];
|
||||
}
|
||||
sizes[cuePointsSize - 1] = (int) (segmentEndOffsetBytes - offsets[cuePointsSize - 1]);
|
||||
durationsUs[cuePointsSize - 1] = durationUs - timesUs[cuePointsSize - 1];
|
||||
cues = new SegmentIndex((int) cuesSizeBytes, sizes, offsets, durationsUs, timesUs);
|
||||
cueTimesUs = null;
|
||||
cueClusterPositions = null;
|
||||
readResults |= RESULT_READ_INDEX;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses Vorbis Codec Private data and adds it as initialization data to the {@link #format}.
|
||||
* WebM Vorbis Codec Private data specification can be found
|
||||
* <a href="http://matroska.org/technical/specs/codecid/index.html">here</a>.
|
||||
*
|
||||
* @return ArrayList of byte arrays containing the initialization data on success.
|
||||
* @throws ParserException If parsing codec private data fails.
|
||||
*/
|
||||
private ArrayList<byte[]> parseVorbisCodecPrivate() throws ParserException {
|
||||
try {
|
||||
if (codecPrivate[0] != 0x02) {
|
||||
throw new ParserException("Error parsing vorbis codec private");
|
||||
}
|
||||
int offset = 1;
|
||||
int vorbisInfoLength = 0;
|
||||
while (codecPrivate[offset] == (byte) 0xFF) {
|
||||
vorbisInfoLength += 0xFF;
|
||||
offset++;
|
||||
}
|
||||
vorbisInfoLength += codecPrivate[offset++];
|
||||
|
||||
int vorbisSkipLength = 0;
|
||||
while (codecPrivate[offset] == (byte) 0xFF) {
|
||||
vorbisSkipLength += 0xFF;
|
||||
offset++;
|
||||
}
|
||||
vorbisSkipLength += codecPrivate[offset++];
|
||||
|
||||
if (codecPrivate[offset] != 0x01) {
|
||||
throw new ParserException("Error parsing vorbis codec private");
|
||||
}
|
||||
byte[] vorbisInfo = new byte[vorbisInfoLength];
|
||||
System.arraycopy(codecPrivate, offset, vorbisInfo, 0, vorbisInfoLength);
|
||||
offset += vorbisInfoLength;
|
||||
if (codecPrivate[offset] != 0x03) {
|
||||
throw new ParserException("Error parsing vorbis codec private");
|
||||
}
|
||||
offset += vorbisSkipLength;
|
||||
if (codecPrivate[offset] != 0x05) {
|
||||
throw new ParserException("Error parsing vorbis codec private");
|
||||
}
|
||||
byte[] vorbisBooks = new byte[codecPrivate.length - offset];
|
||||
System.arraycopy(codecPrivate, offset, vorbisBooks, 0, codecPrivate.length - offset);
|
||||
ArrayList<byte[]> initializationData = new ArrayList<byte[]>(2);
|
||||
initializationData.add(vorbisInfo);
|
||||
initializationData.add(vorbisBooks);
|
||||
return initializationData;
|
||||
} catch (ArrayIndexOutOfBoundsException e) {
|
||||
throw new ParserException("Error parsing vorbis codec private");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Passes events through to {@link WebmExtractor} as
|
||||
* callbacks from {@link EbmlReader} are received.
|
||||
*/
|
||||
private final class InnerEbmlEventHandler implements EbmlEventHandler {
|
||||
|
||||
@Override
|
||||
public int getElementType(int id) {
|
||||
return WebmExtractor.this.getElementType(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMasterElementStart(
|
||||
int id, long elementOffsetBytes, int headerSizeBytes,
|
||||
long contentsSizeBytes) throws ParserException {
|
||||
WebmExtractor.this.onMasterElementStart(
|
||||
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMasterElementEnd(int id) throws ParserException {
|
||||
WebmExtractor.this.onMasterElementEnd(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIntegerElement(int id, long value) throws ParserException {
|
||||
WebmExtractor.this.onIntegerElement(id, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFloatElement(int id, double value) {
|
||||
WebmExtractor.this.onFloatElement(id, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStringElement(int id, String value) throws ParserException {
|
||||
WebmExtractor.this.onStringElement(id, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onBinaryElement(
|
||||
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
|
||||
NonBlockingInputStream inputStream) throws ParserException {
|
||||
return WebmExtractor.this.onBinaryElement(
|
||||
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes, inputStream);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -17,10 +17,10 @@ package com.google.android.exoplayer.dash;
|
|||
|
||||
import com.google.android.exoplayer.BehindLiveWindowException;
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.TrackInfo;
|
||||
import com.google.android.exoplayer.TrackRenderer;
|
||||
import com.google.android.exoplayer.chunk.Chunk;
|
||||
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper;
|
||||
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
|
||||
import com.google.android.exoplayer.chunk.ChunkSource;
|
||||
import com.google.android.exoplayer.chunk.ContainerMediaChunk;
|
||||
|
|
@ -28,11 +28,9 @@ import com.google.android.exoplayer.chunk.Format;
|
|||
import com.google.android.exoplayer.chunk.Format.DecreasingBandwidthComparator;
|
||||
import com.google.android.exoplayer.chunk.FormatEvaluator;
|
||||
import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
|
||||
import com.google.android.exoplayer.chunk.InitializationChunk;
|
||||
import com.google.android.exoplayer.chunk.MediaChunk;
|
||||
import com.google.android.exoplayer.chunk.SingleSampleMediaChunk;
|
||||
import com.google.android.exoplayer.chunk.parser.Extractor;
|
||||
import com.google.android.exoplayer.chunk.parser.mp4.FragmentedMp4Extractor;
|
||||
import com.google.android.exoplayer.chunk.parser.webm.WebmExtractor;
|
||||
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
|
||||
import com.google.android.exoplayer.dash.mpd.ContentProtection;
|
||||
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
|
||||
|
|
@ -40,15 +38,17 @@ import com.google.android.exoplayer.dash.mpd.Period;
|
|||
import com.google.android.exoplayer.dash.mpd.RangedUri;
|
||||
import com.google.android.exoplayer.dash.mpd.Representation;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.extractor.ChunkIndex;
|
||||
import com.google.android.exoplayer.extractor.Extractor;
|
||||
import com.google.android.exoplayer.extractor.mp4.FragmentedMp4Extractor;
|
||||
import com.google.android.exoplayer.extractor.webm.WebmExtractor;
|
||||
import com.google.android.exoplayer.text.webvtt.WebvttParser;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.Clock;
|
||||
import com.google.android.exoplayer.util.ManifestFetcher;
|
||||
import com.google.android.exoplayer.util.MimeTypes;
|
||||
|
||||
import android.net.Uri;
|
||||
import android.os.SystemClock;
|
||||
import com.google.android.exoplayer.util.SystemClock;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
|
@ -83,6 +83,7 @@ public class DashChunkSource implements ChunkSource {
|
|||
private final DataSource dataSource;
|
||||
private final FormatEvaluator evaluator;
|
||||
private final Evaluation evaluation;
|
||||
private final Clock systemClock;
|
||||
private final StringBuilder headerBuilder;
|
||||
private final long liveEdgeLatencyUs;
|
||||
private final long elapsedRealtimeOffsetUs;
|
||||
|
|
@ -95,8 +96,8 @@ public class DashChunkSource implements ChunkSource {
|
|||
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
|
||||
private final int adaptationSetIndex;
|
||||
private final int[] representationIndices;
|
||||
private final DrmInitData drmInitData;
|
||||
|
||||
private DrmInitData drmInitData;
|
||||
private MediaPresentationDescription currentManifest;
|
||||
private boolean finishedCurrentManifest;
|
||||
|
||||
|
|
@ -140,8 +141,8 @@ public class DashChunkSource implements ChunkSource {
|
|||
*/
|
||||
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
|
||||
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
|
||||
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator, 0,
|
||||
0);
|
||||
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator,
|
||||
new SystemClock(), 0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -171,19 +172,21 @@ public class DashChunkSource implements ChunkSource {
|
|||
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
|
||||
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs) {
|
||||
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
|
||||
dataSource, formatEvaluator, liveEdgeLatencyMs * 1000, elapsedRealtimeOffsetMs * 1000);
|
||||
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
|
||||
elapsedRealtimeOffsetMs * 1000);
|
||||
}
|
||||
|
||||
private DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
||||
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
||||
MediaPresentationDescription initialManifest, int adaptationSetIndex,
|
||||
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
|
||||
long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs) {
|
||||
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs) {
|
||||
this.manifestFetcher = manifestFetcher;
|
||||
this.currentManifest = initialManifest;
|
||||
this.adaptationSetIndex = adaptationSetIndex;
|
||||
this.representationIndices = representationIndices;
|
||||
this.dataSource = dataSource;
|
||||
this.evaluator = formatEvaluator;
|
||||
this.systemClock = systemClock;
|
||||
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
|
||||
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
|
||||
this.evaluation = new Evaluation();
|
||||
|
|
@ -207,7 +210,7 @@ public class DashChunkSource implements ChunkSource {
|
|||
Extractor extractor = mimeTypeIsWebm(formats[i].mimeType) ? new WebmExtractor()
|
||||
: new FragmentedMp4Extractor();
|
||||
representationHolders.put(formats[i].id,
|
||||
new RepresentationHolder(representations[i], extractor));
|
||||
new RepresentationHolder(representations[i], new ChunkExtractorWrapper(extractor)));
|
||||
}
|
||||
this.maxWidth = maxWidth;
|
||||
this.maxHeight = maxHeight;
|
||||
|
|
@ -276,7 +279,7 @@ public class DashChunkSource implements ChunkSource {
|
|||
minUpdatePeriod = 5000;
|
||||
}
|
||||
|
||||
if (finishedCurrentManifest && (SystemClock.elapsedRealtime()
|
||||
if (finishedCurrentManifest && (android.os.SystemClock.elapsedRealtime()
|
||||
> manifestFetcher.getManifestLoadTimestamp() + minUpdatePeriod)) {
|
||||
manifestFetcher.requestRefresh();
|
||||
}
|
||||
|
|
@ -310,12 +313,12 @@ public class DashChunkSource implements ChunkSource {
|
|||
RepresentationHolder representationHolder = representationHolders.get(selectedFormat.id);
|
||||
Representation selectedRepresentation = representationHolder.representation;
|
||||
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
|
||||
Extractor extractor = representationHolder.extractor;
|
||||
ChunkExtractorWrapper extractorWrapper = representationHolder.extractorWrapper;
|
||||
|
||||
RangedUri pendingInitializationUri = null;
|
||||
RangedUri pendingIndexUri = null;
|
||||
|
||||
if (extractor.getFormat() == null) {
|
||||
if (representationHolder.format == null) {
|
||||
pendingInitializationUri = selectedRepresentation.getInitializationUri();
|
||||
}
|
||||
if (segmentIndex == null) {
|
||||
|
|
@ -325,7 +328,7 @@ public class DashChunkSource implements ChunkSource {
|
|||
if (pendingInitializationUri != null || pendingIndexUri != null) {
|
||||
// We have initialization and/or index requests to make.
|
||||
Chunk initializationChunk = newInitializationChunk(pendingInitializationUri, pendingIndexUri,
|
||||
selectedRepresentation, extractor, dataSource, evaluation.trigger);
|
||||
selectedRepresentation, extractorWrapper, dataSource, evaluation.trigger);
|
||||
lastChunkWasInitialization = true;
|
||||
out.chunk = initializationChunk;
|
||||
return;
|
||||
|
|
@ -333,7 +336,7 @@ public class DashChunkSource implements ChunkSource {
|
|||
|
||||
long nowUs;
|
||||
if (elapsedRealtimeOffsetUs != 0) {
|
||||
nowUs = (SystemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
|
||||
nowUs = (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
|
||||
} else {
|
||||
nowUs = System.currentTimeMillis() * 1000;
|
||||
}
|
||||
|
|
@ -362,8 +365,9 @@ public class DashChunkSource implements ChunkSource {
|
|||
}
|
||||
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
|
||||
} else {
|
||||
segmentNum = queue.get(out.queueSize - 1).nextChunkIndex
|
||||
- representationHolder.segmentNumShift;
|
||||
MediaChunk previous = queue.get(out.queueSize - 1);
|
||||
segmentNum = previous.isLastChunk ? -1
|
||||
: previous.chunkIndex + 1 - representationHolder.segmentNumShift;
|
||||
}
|
||||
|
||||
if (currentManifest.dynamic) {
|
||||
|
|
@ -401,6 +405,26 @@ public class DashChunkSource implements ChunkSource {
|
|||
: (manifestFetcher != null ? manifestFetcher.getError() : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadCompleted(Chunk chunk) {
|
||||
if (chunk instanceof InitializationChunk) {
|
||||
InitializationChunk initializationChunk = (InitializationChunk) chunk;
|
||||
String formatId = initializationChunk.format.id;
|
||||
RepresentationHolder representationHolder = representationHolders.get(formatId);
|
||||
if (initializationChunk.hasFormat()) {
|
||||
representationHolder.format = initializationChunk.getFormat();
|
||||
}
|
||||
if (initializationChunk.hasSeekMap()) {
|
||||
representationHolder.segmentIndex = new DashWrappingSegmentIndex(
|
||||
(ChunkIndex) initializationChunk.getSeekMap(),
|
||||
initializationChunk.dataSpec.uri.toString());
|
||||
}
|
||||
if (initializationChunk.hasDrmInitData()) {
|
||||
drmInitData = initializationChunk.getDrmInitData();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadError(Chunk chunk, Exception e) {
|
||||
// Do nothing.
|
||||
|
|
@ -411,36 +435,22 @@ public class DashChunkSource implements ChunkSource {
|
|||
}
|
||||
|
||||
private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri,
|
||||
Representation representation, Extractor extractor, DataSource dataSource,
|
||||
Representation representation, ChunkExtractorWrapper extractor, DataSource dataSource,
|
||||
int trigger) {
|
||||
int expectedExtractorResult = Extractor.RESULT_END_OF_STREAM;
|
||||
long indexAnchor = 0;
|
||||
RangedUri requestUri;
|
||||
if (initializationUri != null) {
|
||||
// It's common for initialization and index data to be stored adjacently. Attempt to merge
|
||||
// the two requests together to request both at once.
|
||||
expectedExtractorResult |= Extractor.RESULT_READ_INIT;
|
||||
requestUri = initializationUri.attemptMerge(indexUri);
|
||||
if (requestUri != null) {
|
||||
expectedExtractorResult |= Extractor.RESULT_READ_INDEX;
|
||||
if (extractor.hasRelativeIndexOffsets()) {
|
||||
indexAnchor = indexUri.start + indexUri.length;
|
||||
}
|
||||
} else {
|
||||
if (requestUri == null) {
|
||||
requestUri = initializationUri;
|
||||
}
|
||||
} else {
|
||||
requestUri = indexUri;
|
||||
if (extractor.hasRelativeIndexOffsets()) {
|
||||
indexAnchor = indexUri.start + indexUri.length;
|
||||
}
|
||||
expectedExtractorResult |= Extractor.RESULT_READ_INDEX;
|
||||
}
|
||||
DataSpec dataSpec = new DataSpec(requestUri.getUri(), requestUri.start, requestUri.length,
|
||||
representation.getCacheKey());
|
||||
|
||||
return new InitializationLoadable(dataSource, dataSpec, trigger, representation.format,
|
||||
extractor, expectedExtractorResult, indexAnchor);
|
||||
return new InitializationChunk(dataSource, dataSpec, trigger, representation.format, extractor);
|
||||
}
|
||||
|
||||
private Chunk newMediaChunk(RepresentationHolder representationHolder, DataSource dataSource,
|
||||
|
|
@ -451,10 +461,9 @@ public class DashChunkSource implements ChunkSource {
|
|||
long startTimeUs = segmentIndex.getTimeUs(segmentNum);
|
||||
long endTimeUs = startTimeUs + segmentIndex.getDurationUs(segmentNum);
|
||||
|
||||
int absoluteSegmentNum = segmentNum + representationHolder.segmentNumShift;
|
||||
boolean isLastSegment = !currentManifest.dynamic
|
||||
&& segmentNum == segmentIndex.getLastSegmentNum();
|
||||
int nextAbsoluteSegmentNum = isLastSegment ? -1
|
||||
: (representationHolder.segmentNumShift + segmentNum + 1);
|
||||
|
||||
RangedUri segmentUri = segmentIndex.getSegmentUrl(segmentNum);
|
||||
DataSpec dataSpec = new DataSpec(segmentUri.getUri(), segmentUri.start, segmentUri.length,
|
||||
|
|
@ -470,12 +479,13 @@ public class DashChunkSource implements ChunkSource {
|
|||
representationHolder.vttHeader = headerBuilder.toString().getBytes();
|
||||
representationHolder.vttHeaderOffsetUs = presentationTimeOffsetUs;
|
||||
}
|
||||
return new SingleSampleMediaChunk(dataSource, dataSpec, representation.format, 0,
|
||||
startTimeUs, endTimeUs, nextAbsoluteSegmentNum, null, representationHolder.vttHeader);
|
||||
return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_INITIAL,
|
||||
representation.format, startTimeUs, endTimeUs, absoluteSegmentNum, isLastSegment, null,
|
||||
null, representationHolder.vttHeader);
|
||||
} else {
|
||||
return new ContainerMediaChunk(dataSource, dataSpec, representation.format, trigger,
|
||||
startTimeUs, endTimeUs, nextAbsoluteSegmentNum, representationHolder.extractor,
|
||||
drmInitData, false, presentationTimeOffsetUs);
|
||||
return new ContainerMediaChunk(dataSource, dataSpec, trigger, representation.format,
|
||||
startTimeUs, endTimeUs, absoluteSegmentNum, isLastSegment, 0,
|
||||
representationHolder.extractorWrapper, representationHolder.format, drmInitData, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -559,52 +569,22 @@ public class DashChunkSource implements ChunkSource {
|
|||
Collections.singletonList(period));
|
||||
}
|
||||
|
||||
private class InitializationLoadable extends Chunk {
|
||||
|
||||
private final Extractor extractor;
|
||||
private final int expectedExtractorResult;
|
||||
private final long indexAnchor;
|
||||
private final Uri uri;
|
||||
|
||||
public InitializationLoadable(DataSource dataSource, DataSpec dataSpec, int trigger,
|
||||
Format format, Extractor extractor, int expectedExtractorResult,
|
||||
long indexAnchor) {
|
||||
super(dataSource, dataSpec, format, trigger);
|
||||
this.extractor = extractor;
|
||||
this.expectedExtractorResult = expectedExtractorResult;
|
||||
this.indexAnchor = indexAnchor;
|
||||
this.uri = dataSpec.uri;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void consumeStream(NonBlockingInputStream stream) throws IOException {
|
||||
int result = extractor.read(stream, null);
|
||||
if (result != expectedExtractorResult) {
|
||||
throw new ParserException("Invalid extractor result. Expected "
|
||||
+ expectedExtractorResult + ", got " + result);
|
||||
}
|
||||
if ((result & Extractor.RESULT_READ_INDEX) != 0) {
|
||||
representationHolders.get(format.id).segmentIndex =
|
||||
new DashWrappingSegmentIndex(extractor.getIndex(), uri.toString(), indexAnchor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class RepresentationHolder {
|
||||
|
||||
public final Representation representation;
|
||||
public final Extractor extractor;
|
||||
public final ChunkExtractorWrapper extractorWrapper;
|
||||
|
||||
public DashSegmentIndex segmentIndex;
|
||||
public int segmentNumShift;
|
||||
public MediaFormat format;
|
||||
|
||||
public int segmentNumShift;
|
||||
public long vttHeaderOffsetUs;
|
||||
public byte[] vttHeader;
|
||||
|
||||
public RepresentationHolder(Representation representation, Extractor extractor) {
|
||||
public RepresentationHolder(Representation representation,
|
||||
ChunkExtractorWrapper extractorWrapper) {
|
||||
this.representation = representation;
|
||||
this.extractor = extractor;
|
||||
this.extractorWrapper = extractorWrapper;
|
||||
this.segmentIndex = representation.getIndex();
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -15,30 +15,25 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.dash;
|
||||
|
||||
import com.google.android.exoplayer.chunk.parser.SegmentIndex;
|
||||
import com.google.android.exoplayer.dash.mpd.RangedUri;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
import com.google.android.exoplayer.extractor.ChunkIndex;
|
||||
|
||||
/**
|
||||
* An implementation of {@link DashSegmentIndex} that wraps a {@link SegmentIndex} parsed from a
|
||||
* An implementation of {@link DashSegmentIndex} that wraps a {@link ChunkIndex} parsed from a
|
||||
* media stream.
|
||||
*/
|
||||
public class DashWrappingSegmentIndex implements DashSegmentIndex {
|
||||
|
||||
private final SegmentIndex segmentIndex;
|
||||
private final ChunkIndex chunkIndex;
|
||||
private final String uri;
|
||||
private final long indexAnchor;
|
||||
|
||||
/**
|
||||
* @param segmentIndex The {@link SegmentIndex} to wrap.
|
||||
* @param chunkIndex The {@link ChunkIndex} to wrap.
|
||||
* @param uri The URI where the data is located.
|
||||
* @param indexAnchor The index anchor point. This value is added to the byte offsets specified
|
||||
* in the wrapped {@link SegmentIndex}.
|
||||
*/
|
||||
public DashWrappingSegmentIndex(SegmentIndex segmentIndex, String uri, long indexAnchor) {
|
||||
this.segmentIndex = segmentIndex;
|
||||
public DashWrappingSegmentIndex(ChunkIndex chunkIndex, String uri) {
|
||||
this.chunkIndex = chunkIndex;
|
||||
this.uri = uri;
|
||||
this.indexAnchor = indexAnchor;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
@ -48,28 +43,27 @@ public class DashWrappingSegmentIndex implements DashSegmentIndex {
|
|||
|
||||
@Override
|
||||
public int getLastSegmentNum() {
|
||||
return segmentIndex.length - 1;
|
||||
return chunkIndex.length - 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTimeUs(int segmentNum) {
|
||||
return segmentIndex.timesUs[segmentNum];
|
||||
return chunkIndex.timesUs[segmentNum];
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getDurationUs(int segmentNum) {
|
||||
return segmentIndex.durationsUs[segmentNum];
|
||||
return chunkIndex.durationsUs[segmentNum];
|
||||
}
|
||||
|
||||
@Override
|
||||
public RangedUri getSegmentUrl(int segmentNum) {
|
||||
return new RangedUri(uri, null, indexAnchor + segmentIndex.offsets[segmentNum],
|
||||
segmentIndex.sizes[segmentNum]);
|
||||
return new RangedUri(uri, null, chunkIndex.offsets[segmentNum], chunkIndex.sizes[segmentNum]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSegmentNum(long timeUs) {
|
||||
return Util.binarySearchFloor(segmentIndex.timesUs, timeUs, true, true);
|
||||
return chunkIndex.getChunkIndex(timeUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ package com.google.android.exoplayer.extractor;
|
|||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.upstream.BufferPool;
|
||||
import com.google.android.exoplayer.upstream.Allocator;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
|
||||
|
|
@ -41,8 +41,11 @@ public final class DefaultTrackOutput implements TrackOutput {
|
|||
private volatile long largestParsedTimestampUs;
|
||||
private volatile MediaFormat format;
|
||||
|
||||
public DefaultTrackOutput(BufferPool bufferPool) {
|
||||
rollingBuffer = new RollingSampleBuffer(bufferPool);
|
||||
/**
|
||||
* @param allocator An {@link Allocator} from which allocations for sample data can be obtained.
|
||||
*/
|
||||
public DefaultTrackOutput(Allocator allocator) {
|
||||
rollingBuffer = new RollingSampleBuffer(allocator);
|
||||
sampleInfoHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||
needKeyframe = true;
|
||||
lastReadTimeUs = Long.MIN_VALUE;
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ package com.google.android.exoplayer.extractor;
|
|||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.upstream.BufferPool;
|
||||
import com.google.android.exoplayer.upstream.Allocator;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
|
|
@ -33,7 +33,7 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||
|
||||
private static final int INITIAL_SCRATCH_SIZE = 32;
|
||||
|
||||
private final BufferPool fragmentPool;
|
||||
private final Allocator allocator;
|
||||
private final int fragmentLength;
|
||||
|
||||
private final InfoQueue infoQueue;
|
||||
|
|
@ -49,9 +49,12 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||
private byte[] lastFragment;
|
||||
private int lastFragmentOffset;
|
||||
|
||||
public RollingSampleBuffer(BufferPool bufferPool) {
|
||||
this.fragmentPool = bufferPool;
|
||||
fragmentLength = bufferPool.bufferLength;
|
||||
/**
|
||||
* @param allocator An {@link Allocator} from which allocations for sample data can be obtained.
|
||||
*/
|
||||
public RollingSampleBuffer(Allocator allocator) {
|
||||
this.allocator = allocator;
|
||||
fragmentLength = allocator.getBufferLength();
|
||||
infoQueue = new InfoQueue();
|
||||
dataQueue = new LinkedBlockingDeque<byte[]>();
|
||||
extrasHolder = new SampleExtrasHolder();
|
||||
|
|
@ -67,7 +70,7 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||
public void clear() {
|
||||
infoQueue.clear();
|
||||
while (!dataQueue.isEmpty()) {
|
||||
fragmentPool.releaseDirect(dataQueue.remove());
|
||||
allocator.releaseBuffer(dataQueue.remove());
|
||||
}
|
||||
totalBytesDropped = 0;
|
||||
totalBytesWritten = 0;
|
||||
|
|
@ -111,7 +114,7 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||
}
|
||||
// Discard the fragments.
|
||||
for (int i = 0; i < fragmentDiscardCount; i++) {
|
||||
fragmentPool.releaseDirect(dataQueue.removeLast());
|
||||
allocator.releaseBuffer(dataQueue.removeLast());
|
||||
}
|
||||
// Update lastFragment and lastFragmentOffset to reflect the new position.
|
||||
lastFragment = dataQueue.peekLast();
|
||||
|
|
@ -306,7 +309,7 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||
|
||||
/**
|
||||
* Discard any fragments that hold data prior to the specified absolute position, returning
|
||||
* them to the pool.
|
||||
* them to the allocator.
|
||||
*
|
||||
* @param absolutePosition The absolute position up to which fragments can be discarded.
|
||||
*/
|
||||
|
|
@ -314,7 +317,7 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||
int relativePosition = (int) (absolutePosition - totalBytesDropped);
|
||||
int fragmentIndex = relativePosition / fragmentLength;
|
||||
for (int i = 0; i < fragmentIndex; i++) {
|
||||
fragmentPool.releaseDirect(dataQueue.remove());
|
||||
allocator.releaseBuffer(dataQueue.remove());
|
||||
totalBytesDropped += fragmentLength;
|
||||
}
|
||||
}
|
||||
|
|
@ -419,7 +422,7 @@ import java.util.concurrent.LinkedBlockingDeque;
|
|||
private void ensureSpaceForWrite() {
|
||||
if (lastFragmentOffset == fragmentLength) {
|
||||
lastFragmentOffset = 0;
|
||||
lastFragment = fragmentPool.allocateDirect();
|
||||
lastFragment = allocator.allocateBuffer();
|
||||
dataQueue.add(lastFragment);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class Atom {
|
||||
/* package*/ abstract class Atom {
|
||||
|
||||
/** Size of an atom header, in bytes. */
|
||||
public static final int HEADER_SIZE = 8;
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
|
||||
/** Utility methods for parsing MP4 format atom payloads according to ISO 14496-12. */
|
||||
public final class AtomParsers {
|
||||
/* package */ final class AtomParsers {
|
||||
|
||||
/** Channel counts for AC-3 audio, indexed by acmod. (See ETSI TS 102 366.) */
|
||||
private static final int[] AC3_CHANNEL_COUNTS = new int[] {2, 1, 2, 3, 3, 4, 4, 5};
|
||||
|
|
|
|||
|
|
@ -15,8 +15,7 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.extractor.mp4;
|
||||
|
||||
// TODO: Make package private.
|
||||
public final class DefaultSampleValues {
|
||||
/* package */ final class DefaultSampleValues {
|
||||
|
||||
public final int sampleDescriptionIndex;
|
||||
public final int duration;
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ package com.google.android.exoplayer.extractor.mp4;
|
|||
/**
|
||||
* Encapsulates information parsed from a track encryption (tenc) box in an MP4 stream.
|
||||
*/
|
||||
// TODO: Make package private.
|
||||
public final class TrackEncryptionBox {
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@
|
|||
package com.google.android.exoplayer.extractor.mp4;
|
||||
|
||||
import com.google.android.exoplayer.extractor.ExtractorInput;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.ParsableByteArray;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -24,8 +23,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* A holder for information corresponding to a single fragment of an mp4 file.
|
||||
*/
|
||||
// TODO: Make package private.
|
||||
public final class TrackFragment {
|
||||
/* package */ final class TrackFragment {
|
||||
|
||||
public int sampleDescriptionIndex;
|
||||
|
||||
|
|
@ -147,22 +145,6 @@ public final class TrackFragment {
|
|||
sampleEncryptionDataNeedsFill = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills {@link #sampleEncryptionData} for the current run from the provided source.
|
||||
*
|
||||
* @param source A source from which to read the encryption data.
|
||||
* @return True if the encryption data was filled. False if the source had insufficient data.
|
||||
*/
|
||||
public boolean fillEncryptionData(NonBlockingInputStream source) {
|
||||
if (source.getAvailableByteCount() < sampleEncryptionDataLength) {
|
||||
return false;
|
||||
}
|
||||
source.read(sampleEncryptionData.data, 0, sampleEncryptionDataLength);
|
||||
sampleEncryptionData.setPosition(0);
|
||||
sampleEncryptionDataNeedsFill = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
public long getSamplePresentationTime(int index) {
|
||||
return sampleDecodingTimeTable[index] + sampleCompositionTimeOffsetTable[index];
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import com.google.android.exoplayer.util.Assertions;
|
|||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
/** Sample table for a track in an MP4 file. */
|
||||
public final class TrackSampleTable {
|
||||
/* package */ final class TrackSampleTable {
|
||||
|
||||
/** Sample index when no sample is available. */
|
||||
public static final int NO_SAMPLE = -1;
|
||||
|
|
|
|||
|
|
@ -1,51 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.hls;
|
||||
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.upstream.Loader.Loadable;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* An abstract base class for {@link Loadable} implementations that load chunks of data required
|
||||
* for the playback of HLS streams.
|
||||
*/
|
||||
public abstract class HlsChunk implements Loadable {
|
||||
|
||||
protected final DataSource dataSource;
|
||||
protected final DataSpec dataSpec;
|
||||
|
||||
/**
|
||||
* @param dataSource The source from which the data should be loaded.
|
||||
* @param dataSpec Defines the data to be loaded. {@code dataSpec.length} must not exceed
|
||||
* {@link Integer#MAX_VALUE}. If {@code dataSpec.length == C.LENGTH_UNBOUNDED} then
|
||||
* the length resolved by {@code dataSource.open(dataSpec)} must not exceed
|
||||
* {@link Integer#MAX_VALUE}.
|
||||
*/
|
||||
public HlsChunk(DataSource dataSource, DataSpec dataSpec) {
|
||||
Assertions.checkState(dataSpec.length <= Integer.MAX_VALUE);
|
||||
this.dataSource = Assertions.checkNotNull(dataSource);
|
||||
this.dataSpec = Assertions.checkNotNull(dataSpec);
|
||||
}
|
||||
|
||||
public abstract void consume() throws IOException;
|
||||
|
||||
public abstract boolean isLoadFinished();
|
||||
|
||||
}
|
||||
|
|
@ -17,21 +17,25 @@ package com.google.android.exoplayer.hls;
|
|||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.chunk.Chunk;
|
||||
import com.google.android.exoplayer.chunk.DataChunk;
|
||||
import com.google.android.exoplayer.chunk.Format;
|
||||
import com.google.android.exoplayer.extractor.Extractor;
|
||||
import com.google.android.exoplayer.extractor.ts.AdtsExtractor;
|
||||
import com.google.android.exoplayer.extractor.ts.TsExtractor;
|
||||
import com.google.android.exoplayer.upstream.Aes128DataSource;
|
||||
import com.google.android.exoplayer.upstream.BandwidthMeter;
|
||||
import com.google.android.exoplayer.upstream.BufferPool;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.upstream.HttpDataSource.InvalidResponseCodeException;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.MimeTypes;
|
||||
import com.google.android.exoplayer.util.UriUtil;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import android.net.Uri;
|
||||
import android.os.SystemClock;
|
||||
import android.text.TextUtils;
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
|
|
@ -117,9 +121,10 @@ public class HlsChunkSource {
|
|||
private static final float BANDWIDTH_FRACTION = 0.8f;
|
||||
|
||||
private final BufferPool bufferPool;
|
||||
private final DataSource upstreamDataSource;
|
||||
private final DataSource dataSource;
|
||||
private final HlsPlaylistParser playlistParser;
|
||||
private final Variant[] enabledVariants;
|
||||
private final List<Variant> variants;
|
||||
private final HlsFormat[] enabledFormats;
|
||||
private final BandwidthMeter bandwidthMeter;
|
||||
private final int adaptiveMode;
|
||||
private final String baseUri;
|
||||
|
|
@ -137,11 +142,11 @@ public class HlsChunkSource {
|
|||
/* package */ boolean live;
|
||||
/* package */ long durationUs;
|
||||
|
||||
private int variantIndex;
|
||||
private DataSource encryptedDataSource;
|
||||
private int formatIndex;
|
||||
private Uri encryptionKeyUri;
|
||||
private String encryptedDataSourceIv;
|
||||
private byte[] encryptedDataSourceSecretKey;
|
||||
private byte[] encryptionKey;
|
||||
private String encryptionIvString;
|
||||
private byte[] encryptionIv;
|
||||
|
||||
public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist,
|
||||
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode) {
|
||||
|
|
@ -173,7 +178,7 @@ public class HlsChunkSource {
|
|||
BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode,
|
||||
int targetBufferSize, long targetBufferDurationMs, long minBufferDurationToSwitchUpMs,
|
||||
long maxBufferDurationToSwitchDownMs) {
|
||||
this.upstreamDataSource = dataSource;
|
||||
this.dataSource = dataSource;
|
||||
this.bandwidthMeter = bandwidthMeter;
|
||||
this.adaptiveMode = adaptiveMode;
|
||||
this.targetBufferSize = targetBufferSize;
|
||||
|
|
@ -185,30 +190,33 @@ public class HlsChunkSource {
|
|||
bufferPool = new BufferPool(256 * 1024);
|
||||
|
||||
if (playlist.type == HlsPlaylist.TYPE_MEDIA) {
|
||||
enabledVariants = new Variant[] {new Variant(0, playlistUrl, 0, null, -1, -1)};
|
||||
variants = Collections.singletonList(new Variant(playlistUrl, 0, null, -1, -1));
|
||||
variantIndices = null;
|
||||
mediaPlaylists = new HlsMediaPlaylist[1];
|
||||
mediaPlaylistBlacklistTimesMs = new long[1];
|
||||
lastMediaPlaylistLoadTimesMs = new long[1];
|
||||
setMediaPlaylist(0, (HlsMediaPlaylist) playlist);
|
||||
} else {
|
||||
Assertions.checkState(playlist.type == HlsPlaylist.TYPE_MASTER);
|
||||
enabledVariants = filterVariants((HlsMasterPlaylist) playlist, variantIndices);
|
||||
mediaPlaylists = new HlsMediaPlaylist[enabledVariants.length];
|
||||
mediaPlaylistBlacklistTimesMs = new long[enabledVariants.length];
|
||||
lastMediaPlaylistLoadTimesMs = new long[enabledVariants.length];
|
||||
variants = ((HlsMasterPlaylist) playlist).variants;
|
||||
int variantCount = variants.size();
|
||||
mediaPlaylists = new HlsMediaPlaylist[variantCount];
|
||||
mediaPlaylistBlacklistTimesMs = new long[variantCount];
|
||||
lastMediaPlaylistLoadTimesMs = new long[variantCount];
|
||||
}
|
||||
|
||||
enabledFormats = buildEnabledFormats(variants, variantIndices);
|
||||
|
||||
int maxWidth = -1;
|
||||
int maxHeight = -1;
|
||||
// Select the first variant from the master playlist that's enabled.
|
||||
long minOriginalVariantIndex = Integer.MAX_VALUE;
|
||||
for (int i = 0; i < enabledVariants.length; i++) {
|
||||
if (enabledVariants[i].index < minOriginalVariantIndex) {
|
||||
minOriginalVariantIndex = enabledVariants[i].index;
|
||||
variantIndex = i;
|
||||
int minEnabledVariantIndex = Integer.MAX_VALUE;
|
||||
for (int i = 0; i < enabledFormats.length; i++) {
|
||||
if (enabledFormats[i].variantIndex < minEnabledVariantIndex) {
|
||||
minEnabledVariantIndex = enabledFormats[i].variantIndex;
|
||||
formatIndex = i;
|
||||
}
|
||||
maxWidth = Math.max(enabledVariants[i].width, maxWidth);
|
||||
maxHeight = Math.max(enabledVariants[i].height, maxHeight);
|
||||
maxWidth = Math.max(enabledFormats[i].width, maxWidth);
|
||||
maxHeight = Math.max(enabledFormats[i].height, maxHeight);
|
||||
}
|
||||
// TODO: We should allow the default values to be passed through the constructor.
|
||||
this.maxWidth = maxWidth > 0 ? maxWidth : 1920;
|
||||
|
|
@ -232,7 +240,7 @@ public class HlsChunkSource {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns the next {@link HlsChunk} that should be loaded.
|
||||
* Returns the next {@link Chunk} that should be loaded.
|
||||
*
|
||||
* @param previousTsChunk The previously loaded chunk that the next chunk should follow.
|
||||
* @param seekPositionUs If there is no previous chunk, this parameter must specify the seek
|
||||
|
|
@ -240,7 +248,7 @@ public class HlsChunkSource {
|
|||
* @param playbackPositionUs The current playback position.
|
||||
* @return The next chunk to load.
|
||||
*/
|
||||
public HlsChunk getChunkOperation(TsChunk previousTsChunk, long seekPositionUs,
|
||||
public Chunk getChunkOperation(TsChunk previousTsChunk, long seekPositionUs,
|
||||
long playbackPositionUs) {
|
||||
if (previousTsChunk != null && (previousTsChunk.isLastChunk
|
||||
|| previousTsChunk.endTimeUs - playbackPositionUs >= targetBufferDurationUs)
|
||||
|
|
@ -249,24 +257,27 @@ public class HlsChunkSource {
|
|||
return null;
|
||||
}
|
||||
|
||||
int nextVariantIndex = variantIndex;
|
||||
boolean switchingVariant = false;
|
||||
boolean switchingVariantSpliced = false;
|
||||
int nextFormatIndex;
|
||||
boolean switchingVariant;
|
||||
boolean switchingVariantSpliced;
|
||||
if (adaptiveMode == ADAPTIVE_MODE_NONE) {
|
||||
// Do nothing.
|
||||
nextFormatIndex = formatIndex;
|
||||
switchingVariant = false;
|
||||
switchingVariantSpliced = false;
|
||||
} else {
|
||||
nextVariantIndex = getNextVariantIndex(previousTsChunk, playbackPositionUs);
|
||||
switchingVariant = nextVariantIndex != variantIndex;
|
||||
nextFormatIndex = getNextFormatIndex(previousTsChunk, playbackPositionUs);
|
||||
switchingVariant = nextFormatIndex != formatIndex;
|
||||
switchingVariantSpliced = switchingVariant && adaptiveMode == ADAPTIVE_MODE_SPLICE;
|
||||
}
|
||||
|
||||
HlsMediaPlaylist mediaPlaylist = mediaPlaylists[nextVariantIndex];
|
||||
int variantIndex = enabledFormats[nextFormatIndex].variantIndex;
|
||||
HlsMediaPlaylist mediaPlaylist = mediaPlaylists[variantIndex];
|
||||
if (mediaPlaylist == null) {
|
||||
// We don't have the media playlist for the next variant. Request it now.
|
||||
return newMediaPlaylistChunk(nextVariantIndex);
|
||||
return newMediaPlaylistChunk(variantIndex);
|
||||
}
|
||||
|
||||
variantIndex = nextVariantIndex;
|
||||
formatIndex = nextFormatIndex;
|
||||
int chunkMediaSequence = 0;
|
||||
boolean liveDiscontinuity = false;
|
||||
if (live) {
|
||||
|
|
@ -309,18 +320,17 @@ public class HlsChunkSource {
|
|||
Uri keyUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, segment.encryptionKeyUri);
|
||||
if (!keyUri.equals(encryptionKeyUri)) {
|
||||
// Encryption is specified and the key has changed.
|
||||
HlsChunk toReturn = newEncryptionKeyChunk(keyUri, segment.encryptionIV);
|
||||
Chunk toReturn = newEncryptionKeyChunk(keyUri, segment.encryptionIV);
|
||||
return toReturn;
|
||||
}
|
||||
if (!Util.areEqual(segment.encryptionIV, encryptedDataSourceIv)) {
|
||||
initEncryptedDataSource(keyUri, segment.encryptionIV, encryptedDataSourceSecretKey);
|
||||
if (!Util.areEqual(segment.encryptionIV, encryptionIvString)) {
|
||||
setEncryptionData(keyUri, segment.encryptionIV, encryptionKey);
|
||||
}
|
||||
} else {
|
||||
clearEncryptedDataSource();
|
||||
clearEncryptionData();
|
||||
}
|
||||
|
||||
// Configure the data source and spec for the chunk.
|
||||
DataSource dataSource = encryptedDataSource != null ? encryptedDataSource : upstreamDataSource;
|
||||
DataSpec dataSpec = new DataSpec(chunkUri, segment.byterangeOffset, segment.byterangeLength,
|
||||
null);
|
||||
|
||||
|
|
@ -348,22 +358,44 @@ public class HlsChunkSource {
|
|||
: new TsExtractor(startTimeUs);
|
||||
extractorWrapper = new HlsExtractorWrapper(bufferPool, extractor, switchingVariantSpliced);
|
||||
} else {
|
||||
extractorWrapper = previousTsChunk.extractor;
|
||||
extractorWrapper = previousTsChunk.extractorWrapper;
|
||||
}
|
||||
|
||||
return new TsChunk(dataSource, dataSpec, extractorWrapper, enabledVariants[variantIndex].index,
|
||||
startTimeUs, endTimeUs, chunkMediaSequence, isLastChunk);
|
||||
return new TsChunk(dataSource, dataSpec, Chunk.TRIGGER_UNSPECIFIED, enabledFormats[formatIndex],
|
||||
startTimeUs, endTimeUs, chunkMediaSequence, isLastChunk, extractorWrapper, encryptionKey,
|
||||
encryptionIv);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoked when an error occurs loading a chunk.
|
||||
* Invoked when the {@link HlsSampleSource} has finished loading a chunk obtained from this
|
||||
* source.
|
||||
*
|
||||
* @param chunk The chunk whose load failed.
|
||||
* @param e The failure.
|
||||
* @param chunk The chunk whose load has been completed.
|
||||
*/
|
||||
public void onChunkLoadCompleted(Chunk chunk) {
|
||||
if (chunk instanceof MediaPlaylistChunk) {
|
||||
MediaPlaylistChunk mediaPlaylistChunk = (MediaPlaylistChunk) chunk;
|
||||
scratchSpace = mediaPlaylistChunk.getDataHolder();
|
||||
setMediaPlaylist(mediaPlaylistChunk.variantIndex, mediaPlaylistChunk.getResult());
|
||||
} else if (chunk instanceof EncryptionKeyChunk) {
|
||||
EncryptionKeyChunk encryptionKeyChunk = (EncryptionKeyChunk) chunk;
|
||||
scratchSpace = encryptionKeyChunk.getDataHolder();
|
||||
setEncryptionData(encryptionKeyChunk.dataSpec.uri, encryptionKeyChunk.iv,
|
||||
encryptionKeyChunk.getResult());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoked when the {@link HlsSampleSource} encounters an error loading a chunk obtained from
|
||||
* this source.
|
||||
*
|
||||
* @param chunk The chunk whose load encountered the error.
|
||||
* @param e The error.
|
||||
* @return True if the error was handled by the source. False otherwise.
|
||||
*/
|
||||
public boolean onLoadError(HlsChunk chunk, IOException e) {
|
||||
if ((chunk instanceof MediaPlaylistChunk) && (e instanceof InvalidResponseCodeException)) {
|
||||
public boolean onChunkLoadError(Chunk chunk, IOException e) {
|
||||
if (chunk.bytesLoaded() == 0 && (chunk instanceof MediaPlaylistChunk)
|
||||
&& (e instanceof InvalidResponseCodeException)) {
|
||||
InvalidResponseCodeException responseCodeException = (InvalidResponseCodeException) e;
|
||||
int responseCode = responseCodeException.responseCode;
|
||||
if (responseCode == 404 || responseCode == 410) {
|
||||
|
|
@ -386,49 +418,52 @@ public class HlsChunkSource {
|
|||
return false;
|
||||
}
|
||||
|
||||
private int getNextVariantIndex(TsChunk previousTsChunk, long playbackPositionUs) {
|
||||
private int getNextFormatIndex(TsChunk previousTsChunk, long playbackPositionUs) {
|
||||
clearStaleBlacklistedPlaylists();
|
||||
if (previousTsChunk == null) {
|
||||
// Don't consider switching if we don't have a previous chunk.
|
||||
return variantIndex;
|
||||
return formatIndex;
|
||||
}
|
||||
long bitrateEstimate = bandwidthMeter.getBitrateEstimate();
|
||||
if (bitrateEstimate == BandwidthMeter.NO_ESTIMATE) {
|
||||
// Don't consider switching if we don't have a bandwidth estimate.
|
||||
return variantIndex;
|
||||
return formatIndex;
|
||||
}
|
||||
int idealVariantIndex = getVariantIndexForBandwdith(
|
||||
int idealFormatIndex = getFormatIndexForBandwidth(
|
||||
(int) (bitrateEstimate * BANDWIDTH_FRACTION));
|
||||
if (idealVariantIndex == variantIndex) {
|
||||
// We're already using the ideal variant.
|
||||
return variantIndex;
|
||||
if (idealFormatIndex == formatIndex) {
|
||||
// We're already using the ideal format.
|
||||
return formatIndex;
|
||||
}
|
||||
// We're not using the ideal variant for the available bandwidth, but only switch if the
|
||||
// We're not using the ideal format for the available bandwidth, but only switch if the
|
||||
// conditions are appropriate.
|
||||
long bufferedPositionUs = adaptiveMode == ADAPTIVE_MODE_SPLICE ? previousTsChunk.startTimeUs
|
||||
: previousTsChunk.endTimeUs;
|
||||
long bufferedUs = bufferedPositionUs - playbackPositionUs;
|
||||
if (mediaPlaylistBlacklistTimesMs[variantIndex] != 0
|
||||
|| (idealVariantIndex > variantIndex && bufferedUs < maxBufferDurationToSwitchDownUs)
|
||||
|| (idealVariantIndex < variantIndex && bufferedUs > minBufferDurationToSwitchUpUs)) {
|
||||
// Switch variant.
|
||||
return idealVariantIndex;
|
||||
if (mediaPlaylistBlacklistTimesMs[formatIndex] != 0
|
||||
|| (idealFormatIndex > formatIndex && bufferedUs < maxBufferDurationToSwitchDownUs)
|
||||
|| (idealFormatIndex < formatIndex && bufferedUs > minBufferDurationToSwitchUpUs)) {
|
||||
// Switch format.
|
||||
return idealFormatIndex;
|
||||
}
|
||||
// Stick with the current variant for now.
|
||||
return variantIndex;
|
||||
// Stick with the current format for now.
|
||||
return formatIndex;
|
||||
}
|
||||
|
||||
private int getVariantIndexForBandwdith(int bandwidth) {
|
||||
int lowestQualityEnabledVariant = 0;
|
||||
for (int i = 0; i < enabledVariants.length; i++) {
|
||||
if (mediaPlaylistBlacklistTimesMs[i] == 0) {
|
||||
if (enabledVariants[i].bandwidth <= bandwidth) {
|
||||
private int getFormatIndexForBandwidth(int bitrate) {
|
||||
int lowestQualityEnabledFormatIndex = -1;
|
||||
for (int i = 0; i < enabledFormats.length; i++) {
|
||||
int variantIndex = enabledFormats[i].variantIndex;
|
||||
if (mediaPlaylistBlacklistTimesMs[variantIndex] == 0) {
|
||||
if (enabledFormats[i].bitrate <= bitrate) {
|
||||
return i;
|
||||
}
|
||||
lowestQualityEnabledVariant = i;
|
||||
lowestQualityEnabledFormatIndex = i;
|
||||
}
|
||||
}
|
||||
return lowestQualityEnabledVariant;
|
||||
// At least one format should always be enabled.
|
||||
Assertions.checkState(lowestQualityEnabledFormatIndex != -1);
|
||||
return lowestQualityEnabledFormatIndex;
|
||||
}
|
||||
|
||||
private boolean shouldRerequestMediaPlaylist(int variantIndex) {
|
||||
|
|
@ -447,19 +482,19 @@ public class HlsChunkSource {
|
|||
}
|
||||
|
||||
private MediaPlaylistChunk newMediaPlaylistChunk(int variantIndex) {
|
||||
Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, enabledVariants[variantIndex].url);
|
||||
Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, variants.get(variantIndex).url);
|
||||
DataSpec dataSpec = new DataSpec(mediaPlaylistUri, 0, C.LENGTH_UNBOUNDED, null,
|
||||
DataSpec.FLAG_ALLOW_GZIP);
|
||||
return new MediaPlaylistChunk(variantIndex, upstreamDataSource, dataSpec,
|
||||
return new MediaPlaylistChunk(dataSource, dataSpec, scratchSpace, playlistParser, variantIndex,
|
||||
mediaPlaylistUri.toString());
|
||||
}
|
||||
|
||||
private EncryptionKeyChunk newEncryptionKeyChunk(Uri keyUri, String iv) {
|
||||
DataSpec dataSpec = new DataSpec(keyUri, 0, C.LENGTH_UNBOUNDED, null, DataSpec.FLAG_ALLOW_GZIP);
|
||||
return new EncryptionKeyChunk(upstreamDataSource, dataSpec, iv);
|
||||
return new EncryptionKeyChunk(dataSource, dataSpec, scratchSpace, iv);
|
||||
}
|
||||
|
||||
/* package */ void initEncryptedDataSource(Uri keyUri, String iv, byte[] secretKey) {
|
||||
/* package */ void setEncryptionData(Uri keyUri, String iv, byte[] secretKey) {
|
||||
String trimmedIv;
|
||||
if (iv.toLowerCase(Locale.getDefault()).startsWith("0x")) {
|
||||
trimmedIv = iv.substring(2);
|
||||
|
|
@ -473,17 +508,17 @@ public class HlsChunkSource {
|
|||
System.arraycopy(ivData, offset, ivDataWithPadding, ivDataWithPadding.length - ivData.length
|
||||
+ offset, ivData.length - offset);
|
||||
|
||||
encryptedDataSource = new Aes128DataSource(upstreamDataSource, secretKey, ivDataWithPadding);
|
||||
encryptionKeyUri = keyUri;
|
||||
encryptedDataSourceIv = iv;
|
||||
encryptedDataSourceSecretKey = secretKey;
|
||||
encryptionKey = secretKey;
|
||||
encryptionIvString = iv;
|
||||
encryptionIv = ivDataWithPadding;
|
||||
}
|
||||
|
||||
private void clearEncryptedDataSource() {
|
||||
private void clearEncryptionData() {
|
||||
encryptionKeyUri = null;
|
||||
encryptedDataSource = null;
|
||||
encryptedDataSourceIv = null;
|
||||
encryptedDataSourceSecretKey = null;
|
||||
encryptionKey = null;
|
||||
encryptionIvString = null;
|
||||
encryptionIv = null;
|
||||
}
|
||||
|
||||
/* package */ void setMediaPlaylist(int variantIndex, HlsMediaPlaylist mediaPlaylist) {
|
||||
|
|
@ -493,16 +528,15 @@ public class HlsChunkSource {
|
|||
durationUs = mediaPlaylist.durationUs;
|
||||
}
|
||||
|
||||
private static Variant[] filterVariants(HlsMasterPlaylist masterPlaylist, int[] variantIndices) {
|
||||
List<Variant> masterVariants = masterPlaylist.variants;
|
||||
private static HlsFormat[] buildEnabledFormats(List<Variant> variants, int[] variantIndices) {
|
||||
ArrayList<Variant> enabledVariants = new ArrayList<Variant>();
|
||||
if (variantIndices != null) {
|
||||
for (int i = 0; i < variantIndices.length; i++) {
|
||||
enabledVariants.add(masterVariants.get(variantIndices[i]));
|
||||
enabledVariants.add(variants.get(variantIndices[i]));
|
||||
}
|
||||
} else {
|
||||
// If variantIndices is null then all variants are initially considered.
|
||||
enabledVariants.addAll(masterVariants);
|
||||
enabledVariants.addAll(variants);
|
||||
}
|
||||
|
||||
ArrayList<Variant> definiteVideoVariants = new ArrayList<Variant>();
|
||||
|
|
@ -529,20 +563,26 @@ public class HlsChunkSource {
|
|||
// Leave the enabled variants unchanged. They're likely either all video or all audio.
|
||||
}
|
||||
|
||||
Collections.sort(enabledVariants, new Variant.DecreasingBandwidthComparator());
|
||||
HlsFormat[] enabledFormats = new HlsFormat[enabledVariants.size()];
|
||||
for (int i = 0; i < enabledFormats.length; i++) {
|
||||
Variant variant = enabledVariants.get(i);
|
||||
int variantIndex = variants.indexOf(variant);
|
||||
enabledFormats[i] = new HlsFormat(Integer.toString(variantIndex), variant.width,
|
||||
variant.height, variant.bitrate, variant.codecs, variantIndex);
|
||||
}
|
||||
|
||||
Variant[] enabledVariantsArray = new Variant[enabledVariants.size()];
|
||||
enabledVariants.toArray(enabledVariantsArray);
|
||||
return enabledVariantsArray;
|
||||
Arrays.sort(enabledFormats, new Format.DecreasingBandwidthComparator());
|
||||
return enabledFormats;
|
||||
}
|
||||
|
||||
private static boolean variantHasExplicitCodecWithPrefix(Variant variant, String prefix) {
|
||||
String[] codecs = variant.codecs;
|
||||
if (codecs == null) {
|
||||
String codecs = variant.codecs;
|
||||
if (TextUtils.isEmpty(codecs)) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < codecs.length; i++) {
|
||||
if (codecs[i].startsWith(prefix)) {
|
||||
String[] codecArray = codecs.split("(\\s*,\\s*)|(\\s*$)");
|
||||
for (int i = 0; i < codecArray.length; i++) {
|
||||
if (codecArray[i].startsWith(prefix)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -568,47 +608,67 @@ public class HlsChunkSource {
|
|||
}
|
||||
}
|
||||
|
||||
private class MediaPlaylistChunk extends DataChunk {
|
||||
private static class MediaPlaylistChunk extends DataChunk {
|
||||
|
||||
@SuppressWarnings("hiding")
|
||||
/* package */ final int variantIndex;
|
||||
public final int variantIndex;
|
||||
|
||||
private final HlsPlaylistParser playlistParser;
|
||||
private final String playlistUrl;
|
||||
|
||||
public MediaPlaylistChunk(int variantIndex, DataSource dataSource, DataSpec dataSpec,
|
||||
String playlistUrl) {
|
||||
super(dataSource, dataSpec, scratchSpace);
|
||||
private HlsMediaPlaylist result;
|
||||
|
||||
public MediaPlaylistChunk(DataSource dataSource, DataSpec dataSpec, byte[] scratchSpace,
|
||||
HlsPlaylistParser playlistParser, int variantIndex, String playlistUrl) {
|
||||
super(dataSource, dataSpec, Chunk.TYPE_MANIFEST, Chunk.TRIGGER_UNSPECIFIED, null,
|
||||
scratchSpace);
|
||||
this.variantIndex = variantIndex;
|
||||
this.playlistParser = playlistParser;
|
||||
this.playlistUrl = playlistUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void consume(byte[] data, int limit) throws IOException {
|
||||
HlsPlaylist playlist = playlistParser.parse(playlistUrl,
|
||||
result = (HlsMediaPlaylist) playlistParser.parse(playlistUrl,
|
||||
new ByteArrayInputStream(data, 0, limit));
|
||||
Assertions.checkState(playlist.type == HlsPlaylist.TYPE_MEDIA);
|
||||
HlsMediaPlaylist mediaPlaylist = (HlsMediaPlaylist) playlist;
|
||||
setMediaPlaylist(variantIndex, mediaPlaylist);
|
||||
// Recycle the allocation.
|
||||
scratchSpace = data;
|
||||
}
|
||||
|
||||
public HlsMediaPlaylist getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private class EncryptionKeyChunk extends DataChunk {
|
||||
private static class EncryptionKeyChunk extends DataChunk {
|
||||
|
||||
private final String iv;
|
||||
public final String iv;
|
||||
|
||||
public EncryptionKeyChunk(DataSource dataSource, DataSpec dataSpec, String iv) {
|
||||
super(dataSource, dataSpec, scratchSpace);
|
||||
private byte[] result;
|
||||
|
||||
public EncryptionKeyChunk(DataSource dataSource, DataSpec dataSpec, byte[] scratchSpace,
|
||||
String iv) {
|
||||
super(dataSource, dataSpec, Chunk.TYPE_DRM, Chunk.TRIGGER_UNSPECIFIED, null, scratchSpace);
|
||||
this.iv = iv;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void consume(byte[] data, int limit) throws IOException {
|
||||
initEncryptedDataSource(dataSpec.uri, iv, Arrays.copyOf(data, limit));
|
||||
// Recycle the allocation.
|
||||
scratchSpace = data;
|
||||
result = Arrays.copyOf(data, limit);
|
||||
}
|
||||
|
||||
public byte[] getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final class HlsFormat extends Format {
|
||||
|
||||
public final int variantIndex;
|
||||
|
||||
public HlsFormat(String id, int width, int height, int bitrate, String codecs,
|
||||
int variantIndex) {
|
||||
super(id, MimeTypes.APPLICATION_M3U8, width, height, -1, -1, bitrate, null, codecs);
|
||||
this.variantIndex = variantIndex;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -139,11 +139,10 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
|
|||
throws IOException {
|
||||
ArrayList<Variant> variants = new ArrayList<Variant>();
|
||||
ArrayList<Subtitle> subtitles = new ArrayList<Subtitle>();
|
||||
int bandwidth = 0;
|
||||
String[] codecs = null;
|
||||
int bitrate = 0;
|
||||
String codecs = null;
|
||||
int width = -1;
|
||||
int height = -1;
|
||||
int variantIndex = 0;
|
||||
|
||||
boolean expectingStreamInfUrl = false;
|
||||
String line;
|
||||
|
|
@ -163,13 +162,8 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
|
|||
// TODO: Support other types of media tag.
|
||||
}
|
||||
} else if (line.startsWith(STREAM_INF_TAG)) {
|
||||
bandwidth = HlsParserUtil.parseIntAttr(line, BANDWIDTH_ATTR_REGEX, BANDWIDTH_ATTR);
|
||||
String codecsString = HlsParserUtil.parseOptionalStringAttr(line, CODECS_ATTR_REGEX);
|
||||
if (codecsString != null) {
|
||||
codecs = codecsString.split("(\\s*,\\s*)|(\\s*$)");
|
||||
} else {
|
||||
codecs = null;
|
||||
}
|
||||
bitrate = HlsParserUtil.parseIntAttr(line, BANDWIDTH_ATTR_REGEX, BANDWIDTH_ATTR);
|
||||
codecs = HlsParserUtil.parseOptionalStringAttr(line, CODECS_ATTR_REGEX);
|
||||
String resolutionString = HlsParserUtil.parseOptionalStringAttr(line,
|
||||
RESOLUTION_ATTR_REGEX);
|
||||
if (resolutionString != null) {
|
||||
|
|
@ -182,8 +176,8 @@ public final class HlsPlaylistParser implements NetworkLoadable.Parser<HlsPlayli
|
|||
}
|
||||
expectingStreamInfUrl = true;
|
||||
} else if (!line.startsWith("#") && expectingStreamInfUrl) {
|
||||
variants.add(new Variant(variantIndex++, line, bandwidth, codecs, width, height));
|
||||
bandwidth = 0;
|
||||
variants.add(new Variant(line, bitrate, codecs, width, height));
|
||||
bitrate = 0;
|
||||
codecs = null;
|
||||
width = -1;
|
||||
height = -1;
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ import com.google.android.exoplayer.SampleHolder;
|
|||
import com.google.android.exoplayer.SampleSource;
|
||||
import com.google.android.exoplayer.TrackInfo;
|
||||
import com.google.android.exoplayer.TrackRenderer;
|
||||
import com.google.android.exoplayer.chunk.Chunk;
|
||||
import com.google.android.exoplayer.upstream.Loader;
|
||||
import com.google.android.exoplayer.upstream.Loader.Loadable;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
|
|
@ -62,7 +63,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
|
|||
private long pendingResetPositionUs;
|
||||
|
||||
private TsChunk previousTsLoadable;
|
||||
private HlsChunk currentLoadable;
|
||||
private Chunk currentLoadable;
|
||||
private boolean loadingFinished;
|
||||
|
||||
private Loader loader;
|
||||
|
|
@ -284,23 +285,15 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
|
|||
|
||||
@Override
|
||||
public void onLoadCompleted(Loadable loadable) {
|
||||
try {
|
||||
currentLoadable.consume();
|
||||
} catch (IOException e) {
|
||||
currentLoadableException = e;
|
||||
currentLoadableExceptionCount++;
|
||||
currentLoadableExceptionTimestamp = SystemClock.elapsedRealtime();
|
||||
currentLoadableExceptionFatal = true;
|
||||
} finally {
|
||||
if (isTsChunk(currentLoadable)) {
|
||||
TsChunk tsChunk = (TsChunk) loadable;
|
||||
loadingFinished = tsChunk.isLastChunk;
|
||||
}
|
||||
if (!currentLoadableExceptionFatal) {
|
||||
clearCurrentLoadable();
|
||||
}
|
||||
maybeStartLoading();
|
||||
chunkSource.onChunkLoadCompleted(currentLoadable);
|
||||
if (isTsChunk(currentLoadable)) {
|
||||
TsChunk tsChunk = (TsChunk) loadable;
|
||||
loadingFinished = tsChunk.isLastChunk;
|
||||
}
|
||||
if (!currentLoadableExceptionFatal) {
|
||||
clearCurrentLoadable();
|
||||
}
|
||||
maybeStartLoading();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
@ -314,7 +307,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
|
|||
|
||||
@Override
|
||||
public void onLoadError(Loadable loadable, IOException e) {
|
||||
if (chunkSource.onLoadError(currentLoadable, e)) {
|
||||
if (chunkSource.onChunkLoadError(currentLoadable, e)) {
|
||||
// Error handled by source.
|
||||
clearCurrentLoadable();
|
||||
} else {
|
||||
|
|
@ -417,7 +410,7 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
|
|||
return;
|
||||
}
|
||||
|
||||
HlsChunk nextLoadable = chunkSource.getChunkOperation(previousTsLoadable,
|
||||
Chunk nextLoadable = chunkSource.getChunkOperation(previousTsLoadable,
|
||||
pendingResetPositionUs, downstreamPositionUs);
|
||||
if (nextLoadable == null) {
|
||||
return;
|
||||
|
|
@ -429,14 +422,14 @@ public class HlsSampleSource implements SampleSource, Loader.Callback {
|
|||
if (isPendingReset()) {
|
||||
pendingResetPositionUs = NO_RESET_PENDING;
|
||||
}
|
||||
if (extractors.isEmpty() || extractors.getLast() != previousTsLoadable.extractor) {
|
||||
extractors.addLast(previousTsLoadable.extractor);
|
||||
if (extractors.isEmpty() || extractors.getLast() != previousTsLoadable.extractorWrapper) {
|
||||
extractors.addLast(previousTsLoadable.extractorWrapper);
|
||||
}
|
||||
}
|
||||
loader.startLoading(currentLoadable, this);
|
||||
}
|
||||
|
||||
private boolean isTsChunk(HlsChunk chunk) {
|
||||
private boolean isTsChunk(Chunk chunk) {
|
||||
return chunk instanceof TsChunk;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -15,77 +15,59 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.hls;
|
||||
|
||||
import com.google.android.exoplayer.chunk.Format;
|
||||
import com.google.android.exoplayer.chunk.MediaChunk;
|
||||
import com.google.android.exoplayer.extractor.DefaultExtractorInput;
|
||||
import com.google.android.exoplayer.extractor.Extractor;
|
||||
import com.google.android.exoplayer.extractor.ExtractorInput;
|
||||
import com.google.android.exoplayer.upstream.Aes128DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A MPEG2TS chunk.
|
||||
* An MPEG2TS chunk.
|
||||
*/
|
||||
public final class TsChunk extends HlsChunk {
|
||||
public final class TsChunk extends MediaChunk {
|
||||
|
||||
/**
|
||||
* The index of the variant in the master playlist.
|
||||
* The wrapped extractor into which this chunk is being consumed.
|
||||
*/
|
||||
public final int variantIndex;
|
||||
/**
|
||||
* The start time of the media contained by the chunk.
|
||||
*/
|
||||
public final long startTimeUs;
|
||||
/**
|
||||
* The end time of the media contained by the chunk.
|
||||
*/
|
||||
public final long endTimeUs;
|
||||
/**
|
||||
* The chunk index.
|
||||
*/
|
||||
public final int chunkIndex;
|
||||
/**
|
||||
* True if this is the last chunk in the media. False otherwise.
|
||||
*/
|
||||
public final boolean isLastChunk;
|
||||
/**
|
||||
* The extractor into which this chunk is being consumed.
|
||||
*/
|
||||
public final HlsExtractorWrapper extractor;
|
||||
public final HlsExtractorWrapper extractorWrapper;
|
||||
|
||||
private int loadPosition;
|
||||
private volatile boolean loadFinished;
|
||||
private final boolean isEncrypted;
|
||||
|
||||
private int bytesLoaded;
|
||||
private volatile boolean loadCanceled;
|
||||
|
||||
/**
|
||||
* @param dataSource A {@link DataSource} for loading the data.
|
||||
* @param dataSpec Defines the data to be loaded.
|
||||
* @param extractor An extractor to parse samples from the data.
|
||||
* @param variantIndex The index of the variant in the master playlist.
|
||||
* @param trigger The reason for this chunk being selected.
|
||||
* @param format The format of the stream to which this chunk belongs.
|
||||
* @param startTimeUs The start time of the media contained by the chunk, in microseconds.
|
||||
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
|
||||
* @param chunkIndex The index of the chunk.
|
||||
* @param isLastChunk True if this is the last chunk in the media. False otherwise.
|
||||
* @param extractorWrapper A wrapped extractor to parse samples from the data.
|
||||
* @param encryptionKey For AES encryption chunks, the encryption key.
|
||||
* @param encryptionIv For AES encryption chunks, the encryption initialization vector.
|
||||
*/
|
||||
public TsChunk(DataSource dataSource, DataSpec dataSpec, HlsExtractorWrapper extractor,
|
||||
int variantIndex, long startTimeUs, long endTimeUs, int chunkIndex, boolean isLastChunk) {
|
||||
super(dataSource, dataSpec);
|
||||
this.extractor = extractor;
|
||||
this.variantIndex = variantIndex;
|
||||
this.startTimeUs = startTimeUs;
|
||||
this.endTimeUs = endTimeUs;
|
||||
this.chunkIndex = chunkIndex;
|
||||
this.isLastChunk = isLastChunk;
|
||||
public TsChunk(DataSource dataSource, DataSpec dataSpec, int trigger, Format format,
|
||||
long startTimeUs, long endTimeUs, int chunkIndex, boolean isLastChunk,
|
||||
HlsExtractorWrapper extractorWrapper, byte[] encryptionKey, byte[] encryptionIv) {
|
||||
super(buildDataSource(dataSource, encryptionKey, encryptionIv), dataSpec, trigger, format,
|
||||
startTimeUs, endTimeUs, chunkIndex, isLastChunk);
|
||||
this.extractorWrapper = extractorWrapper;
|
||||
// Note: this.dataSource and dataSource may be different.
|
||||
this.isEncrypted = this.dataSource instanceof Aes128DataSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void consume() throws IOException {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLoadFinished() {
|
||||
return loadFinished;
|
||||
public long bytesLoaded() {
|
||||
return bytesLoaded;
|
||||
}
|
||||
|
||||
// Loadable implementation
|
||||
|
|
@ -102,26 +84,51 @@ public final class TsChunk extends HlsChunk {
|
|||
|
||||
@Override
|
||||
public void load() throws IOException, InterruptedException {
|
||||
ExtractorInput input;
|
||||
// If we previously fed part of this chunk to the extractor, we need to skip it this time. For
|
||||
// encrypted content we need to skip the data by reading it through the source, so as to ensure
|
||||
// correct decryption of the remainder of the chunk. For clear content, we can request the
|
||||
// remainder of the chunk directly.
|
||||
DataSpec loadDataSpec;
|
||||
boolean skipLoadedBytes;
|
||||
if (isEncrypted) {
|
||||
loadDataSpec = dataSpec;
|
||||
skipLoadedBytes = bytesLoaded != 0;
|
||||
} else {
|
||||
loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded);
|
||||
skipLoadedBytes = false;
|
||||
}
|
||||
|
||||
try {
|
||||
input = new DefaultExtractorInput(dataSource, 0, dataSource.open(dataSpec));
|
||||
// If we previously fed part of this chunk to the extractor, skip it this time.
|
||||
// TODO: Ideally we'd construct a dataSpec that only loads the remainder of the data here,
|
||||
// rather than loading the whole chunk again and then skipping data we previously loaded. To
|
||||
// do this is straightforward for non-encrypted content, but more complicated for content
|
||||
// encrypted with AES, for which we'll need to modify the way that decryption is performed.
|
||||
input.skipFully(loadPosition);
|
||||
ExtractorInput input = new DefaultExtractorInput(dataSource, dataSpec.absoluteStreamPosition,
|
||||
dataSource.open(loadDataSpec));
|
||||
if (skipLoadedBytes) {
|
||||
input.skipFully(bytesLoaded);
|
||||
}
|
||||
try {
|
||||
int result = Extractor.RESULT_CONTINUE;
|
||||
while (result == Extractor.RESULT_CONTINUE && !loadCanceled) {
|
||||
result = extractor.read(input);
|
||||
result = extractorWrapper.read(input);
|
||||
}
|
||||
} finally {
|
||||
loadPosition = (int) input.getPosition();
|
||||
bytesLoaded = (int) (input.getPosition() - dataSpec.absoluteStreamPosition);
|
||||
}
|
||||
} finally {
|
||||
dataSource.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Private methods
|
||||
|
||||
/**
|
||||
* If the content is encrypted, returns an {@link Aes128DataSource} that wraps the original in
|
||||
* order to decrypt the loaded data. Else returns the original.
|
||||
*/
|
||||
private static DataSource buildDataSource(DataSource dataSource, byte[] encryptionKey,
|
||||
byte[] encryptionIv) {
|
||||
if (encryptionKey == null || encryptionIv == null) {
|
||||
return dataSource;
|
||||
}
|
||||
return new Aes128DataSource(dataSource, encryptionKey, encryptionIv);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,38 +15,19 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.hls;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
/**
|
||||
* Variant stream reference.
|
||||
*/
|
||||
public final class Variant {
|
||||
|
||||
/**
|
||||
* Sorts {@link Variant} objects in order of decreasing bandwidth.
|
||||
* <p>
|
||||
* When two {@link Variant}s have the same bandwidth, the one with the lowest index comes first.
|
||||
*/
|
||||
public static final class DecreasingBandwidthComparator implements Comparator<Variant> {
|
||||
|
||||
@Override
|
||||
public int compare(Variant a, Variant b) {
|
||||
int bandwidthDifference = b.bandwidth - a.bandwidth;
|
||||
return bandwidthDifference != 0 ? bandwidthDifference : a.index - b.index;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public final int index;
|
||||
public final int bandwidth;
|
||||
public final int bitrate;
|
||||
public final String url;
|
||||
public final String[] codecs;
|
||||
public final String codecs;
|
||||
public final int width;
|
||||
public final int height;
|
||||
|
||||
public Variant(int index, String url, int bandwidth, String[] codecs, int width, int height) {
|
||||
this.index = index;
|
||||
this.bandwidth = bandwidth;
|
||||
public Variant(String url, int bitrate, String codecs, int width, int height) {
|
||||
this.bitrate = bitrate;
|
||||
this.url = url;
|
||||
this.codecs = codecs;
|
||||
this.width = width;
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import com.google.android.exoplayer.BehindLiveWindowException;
|
|||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.TrackInfo;
|
||||
import com.google.android.exoplayer.chunk.Chunk;
|
||||
import com.google.android.exoplayer.chunk.ChunkExtractorWrapper;
|
||||
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
|
||||
import com.google.android.exoplayer.chunk.ChunkSource;
|
||||
import com.google.android.exoplayer.chunk.ContainerMediaChunk;
|
||||
|
|
@ -27,9 +28,8 @@ import com.google.android.exoplayer.chunk.Format.DecreasingBandwidthComparator;
|
|||
import com.google.android.exoplayer.chunk.FormatEvaluator;
|
||||
import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
|
||||
import com.google.android.exoplayer.chunk.MediaChunk;
|
||||
import com.google.android.exoplayer.chunk.parser.Extractor;
|
||||
import com.google.android.exoplayer.chunk.parser.mp4.FragmentedMp4Extractor;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.extractor.mp4.FragmentedMp4Extractor;
|
||||
import com.google.android.exoplayer.extractor.mp4.Track;
|
||||
import com.google.android.exoplayer.extractor.mp4.TrackEncryptionBox;
|
||||
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.ProtectionElement;
|
||||
|
|
@ -70,7 +70,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
|||
private final int maxWidth;
|
||||
private final int maxHeight;
|
||||
|
||||
private final SparseArray<FragmentedMp4Extractor> extractors;
|
||||
private final SparseArray<ChunkExtractorWrapper> extractorWrappers;
|
||||
private final SparseArray<MediaFormat> mediaFormats;
|
||||
private final DrmInitData drmInitData;
|
||||
private final SmoothStreamingFormat[] formats;
|
||||
|
||||
|
|
@ -152,7 +153,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
|||
|
||||
int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length;
|
||||
formats = new SmoothStreamingFormat[trackCount];
|
||||
extractors = new SparseArray<FragmentedMp4Extractor>();
|
||||
extractorWrappers = new SparseArray<ChunkExtractorWrapper>();
|
||||
mediaFormats = new SparseArray<MediaFormat>();
|
||||
int maxWidth = 0;
|
||||
int maxHeight = 0;
|
||||
for (int i = 0; i < trackCount; i++) {
|
||||
|
|
@ -171,7 +173,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
|||
FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
|
||||
extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale,
|
||||
initialManifest.durationUs, mediaFormat, trackEncryptionBoxes));
|
||||
extractors.put(trackIndex, extractor);
|
||||
extractorWrappers.put(trackIndex, new ChunkExtractorWrapper(extractor));
|
||||
mediaFormats.put(trackIndex, mediaFormat);
|
||||
}
|
||||
this.maxHeight = maxHeight;
|
||||
this.maxWidth = maxWidth;
|
||||
|
|
@ -271,7 +274,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
|||
}
|
||||
chunkIndex = streamElement.getChunkIndex(seekPositionUs);
|
||||
} else {
|
||||
chunkIndex = queue.get(out.queueSize - 1).nextChunkIndex - currentManifestChunkOffset;
|
||||
MediaChunk previous = queue.get(out.queueSize - 1);
|
||||
chunkIndex = previous.isLastChunk ? -1 : previous.chunkIndex + 1 - currentManifestChunkOffset;
|
||||
}
|
||||
|
||||
if (currentManifest.isLive) {
|
||||
|
|
@ -295,14 +299,15 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
|||
|
||||
boolean isLastChunk = !currentManifest.isLive && chunkIndex == streamElement.chunkCount - 1;
|
||||
long chunkStartTimeUs = streamElement.getStartTimeUs(chunkIndex);
|
||||
long nextChunkStartTimeUs = isLastChunk ? -1
|
||||
long chunkEndTimeUs = isLastChunk ? -1
|
||||
: chunkStartTimeUs + streamElement.getChunkDurationUs(chunkIndex);
|
||||
int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset;
|
||||
|
||||
Uri uri = streamElement.buildRequestUri(selectedFormat.trackIndex, chunkIndex);
|
||||
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null,
|
||||
extractors.get(Integer.parseInt(selectedFormat.id)), drmInitData, dataSource,
|
||||
currentAbsoluteChunkIndex, isLastChunk, chunkStartTimeUs, nextChunkStartTimeUs, 0);
|
||||
int trackIndex = selectedFormat.trackIndex;
|
||||
Uri uri = streamElement.buildRequestUri(trackIndex, chunkIndex);
|
||||
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null, extractorWrappers.get(trackIndex),
|
||||
drmInitData, dataSource, currentAbsoluteChunkIndex, isLastChunk, chunkStartTimeUs,
|
||||
chunkEndTimeUs, evaluation.trigger, mediaFormats.get(trackIndex));
|
||||
out.chunk = mediaChunk;
|
||||
}
|
||||
|
||||
|
|
@ -312,6 +317,11 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
|||
: (manifestFetcher != null ? manifestFetcher.getError() : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadCompleted(Chunk chunk) {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChunkLoadError(Chunk chunk, Exception e) {
|
||||
// Do nothing.
|
||||
|
|
@ -367,16 +377,16 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
|||
}
|
||||
|
||||
private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey,
|
||||
Extractor extractor, DrmInitData drmInitData, DataSource dataSource, int chunkIndex,
|
||||
boolean isLast, long chunkStartTimeUs, long nextChunkStartTimeUs, int trigger) {
|
||||
int nextChunkIndex = isLast ? -1 : chunkIndex + 1;
|
||||
long nextStartTimeUs = isLast ? -1 : nextChunkStartTimeUs;
|
||||
ChunkExtractorWrapper extractorWrapper, DrmInitData drmInitData, DataSource dataSource,
|
||||
int chunkIndex, boolean isLast, long chunkStartTimeUs, long chunkEndTimeUs,
|
||||
int trigger, MediaFormat mediaFormat) {
|
||||
long offset = 0;
|
||||
DataSpec dataSpec = new DataSpec(uri, offset, -1, cacheKey);
|
||||
// In SmoothStreaming each chunk contains sample timestamps relative to the start of the chunk.
|
||||
// To convert them the absolute timestamps, we need to set sampleOffsetUs to -chunkStartTimeUs.
|
||||
return new ContainerMediaChunk(dataSource, dataSpec, formatInfo, trigger, chunkStartTimeUs,
|
||||
nextStartTimeUs, nextChunkIndex, extractor, drmInitData, false, -chunkStartTimeUs);
|
||||
return new ContainerMediaChunk(dataSource, dataSpec, trigger, formatInfo, chunkStartTimeUs,
|
||||
chunkEndTimeUs, chunkIndex, isLast, chunkStartTimeUs, extractorWrapper, mediaFormat,
|
||||
drmInitData, true);
|
||||
}
|
||||
|
||||
private static byte[] getKeyId(byte[] initData) {
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
/**
|
||||
* An {@link Allocation}, defined to consist of a set of fragments of underlying byte arrays.
|
||||
* <p>
|
||||
* The byte arrays in which the fragments are located are obtained by {@link #getBuffers}. For
|
||||
* each, the offset and length of the fragment within the byte array are obtained using
|
||||
* {@link #getFragmentOffset} and {@link #getFragmentLength} respectively.
|
||||
*/
|
||||
public interface Allocation {
|
||||
|
||||
/**
|
||||
* Ensures the allocation has a capacity greater than or equal to the specified size in bytes.
|
||||
* <p>
|
||||
* If {@code size} is greater than the current capacity of the allocation, then it will grow
|
||||
* to have a capacity of at least {@code size}. The allocation is grown by adding new fragments.
|
||||
* Existing fragments remain unchanged, and any data that has been written to them will be
|
||||
* preserved.
|
||||
* <p>
|
||||
* If {@code size} is less than or equal to the capacity of the allocation, then the call is a
|
||||
* no-op.
|
||||
*
|
||||
* @param size The minimum required capacity, in bytes.
|
||||
*/
|
||||
public void ensureCapacity(int size);
|
||||
|
||||
/**
|
||||
* Gets the capacity of the allocation, in bytes.
|
||||
*
|
||||
* @return The capacity of the allocation, in bytes.
|
||||
*/
|
||||
public int capacity();
|
||||
|
||||
/**
|
||||
* Gets the buffers in which the fragments are allocated.
|
||||
*
|
||||
* @return The buffers in which the fragments are allocated.
|
||||
*/
|
||||
public byte[][] getBuffers();
|
||||
|
||||
/**
|
||||
* The offset of the fragment in the buffer at the specified index.
|
||||
*
|
||||
* @param index The index of the buffer.
|
||||
* @return The offset of the fragment in the buffer.
|
||||
*/
|
||||
public int getFragmentOffset(int index);
|
||||
|
||||
/**
|
||||
* The length of the fragment in the buffer at the specified index.
|
||||
*
|
||||
* @param index The index of the buffer.
|
||||
* @return The length of the fragment in the buffer.
|
||||
*/
|
||||
public int getFragmentLength(int index);
|
||||
|
||||
/**
|
||||
* Releases the allocation.
|
||||
*/
|
||||
public void release();
|
||||
|
||||
}
|
||||
|
|
@ -16,32 +16,43 @@
|
|||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
/**
|
||||
* A source of {@link Allocation}s.
|
||||
* A source of allocations.
|
||||
*/
|
||||
public interface Allocator {
|
||||
|
||||
/**
|
||||
* Obtains an allocation of at least the specified size.
|
||||
* Obtain a buffer from the allocator.
|
||||
* <p>
|
||||
* When the caller has finished with the buffer, it should be returned by calling
|
||||
* {@link #releaseBuffer(byte[])}.
|
||||
*
|
||||
* @param size The size of the required allocation, in bytes.
|
||||
* @return The allocation.
|
||||
* @return The allocated buffer.
|
||||
*/
|
||||
public Allocation allocate(int size);
|
||||
byte[] allocateBuffer();
|
||||
|
||||
/**
|
||||
* Return a buffer to the allocator.
|
||||
*
|
||||
* @param buffer The buffer being returned.
|
||||
*/
|
||||
void releaseBuffer(byte[] buffer);
|
||||
|
||||
/**
|
||||
* Hints to the {@link Allocator} that it should make a best effort to release any memory that it
|
||||
* has allocated for the purpose of backing {@link Allocation}s, beyond the specified target
|
||||
* number of bytes.
|
||||
* has allocated, beyond the specified target number of bytes.
|
||||
*
|
||||
* @param targetSize The target size in bytes.
|
||||
*/
|
||||
public void trim(int targetSize);
|
||||
void trim(int targetSize);
|
||||
|
||||
/**
|
||||
* Returns the number of bytes currently allocated in the form of {@link Allocation}s.
|
||||
*
|
||||
* @return The number of allocated bytes.
|
||||
* Returns the total size of all allocated buffers.
|
||||
*/
|
||||
public int getAllocatedSize();
|
||||
int getAllocatedSize();
|
||||
|
||||
/**
|
||||
* Returns the length of each buffer provided by the allocator.
|
||||
*/
|
||||
int getBufferLength();
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,23 +20,16 @@ import com.google.android.exoplayer.util.Assertions;
|
|||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* An {@link Allocator} that maintains a pool of fixed length byte arrays (buffers).
|
||||
* <p>
|
||||
* An {@link Allocation} obtained from a {@link BufferPool} consists of the whole number of these
|
||||
* buffers. When an {@link Allocation} is released, the underlying buffers are returned to the pool
|
||||
* for re-use.
|
||||
* Default implementation of {@link Allocator}.
|
||||
*/
|
||||
public final class BufferPool implements Allocator {
|
||||
|
||||
private static final int INITIAL_RECYCLED_BUFFERS_CAPACITY = 100;
|
||||
|
||||
/**
|
||||
* The length in bytes of each individual buffer in the pool.
|
||||
*/
|
||||
public final int bufferLength;
|
||||
private final int bufferLength;
|
||||
|
||||
private int allocatedBufferCount;
|
||||
private int recycledBufferCount;
|
||||
private int allocatedCount;
|
||||
private int recycledCount;
|
||||
private byte[][] recycledBuffers;
|
||||
|
||||
/**
|
||||
|
|
@ -51,81 +44,42 @@ public final class BufferPool implements Allocator {
|
|||
}
|
||||
|
||||
@Override
|
||||
public synchronized int getAllocatedSize() {
|
||||
return allocatedBufferCount * bufferLength;
|
||||
public synchronized byte[] allocateBuffer() {
|
||||
allocatedCount++;
|
||||
return recycledCount > 0 ? recycledBuffers[--recycledCount] : new byte[bufferLength];
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void releaseBuffer(byte[] buffer) {
|
||||
// Weak sanity check that the buffer probably originated from this pool.
|
||||
Assertions.checkArgument(buffer.length == bufferLength);
|
||||
allocatedCount--;
|
||||
if (recycledCount == recycledBuffers.length) {
|
||||
recycledBuffers = Arrays.copyOf(recycledBuffers, recycledBuffers.length * 2);
|
||||
}
|
||||
recycledBuffers[recycledCount++] = buffer;
|
||||
// Wake up threads waiting for the allocated size to drop.
|
||||
notifyAll();
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void trim(int targetSize) {
|
||||
int targetBufferCount = (targetSize + bufferLength - 1) / bufferLength;
|
||||
int targetRecycledBufferCount = Math.max(0, targetBufferCount - allocatedBufferCount);
|
||||
if (targetRecycledBufferCount < recycledBufferCount) {
|
||||
Arrays.fill(recycledBuffers, targetRecycledBufferCount, recycledBufferCount, null);
|
||||
recycledBufferCount = targetRecycledBufferCount;
|
||||
int targetRecycledBufferCount = Math.max(0, targetBufferCount - allocatedCount);
|
||||
if (targetRecycledBufferCount < recycledCount) {
|
||||
Arrays.fill(recycledBuffers, targetRecycledBufferCount, recycledCount, null);
|
||||
recycledCount = targetRecycledBufferCount;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Allocation allocate(int size) {
|
||||
return new AllocationImpl(allocate(size, null));
|
||||
public synchronized int getAllocatedSize() {
|
||||
return allocatedCount * bufferLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allocates byte arrays whose combined length is at least {@code size}.
|
||||
* <p>
|
||||
* An existing array of byte arrays may be provided to form the start of the allocation.
|
||||
*
|
||||
* @param size The total size required, in bytes.
|
||||
* @param existing Existing byte arrays to use as the start of the allocation. May be null.
|
||||
* @return The allocated byte arrays.
|
||||
*/
|
||||
/* package */ synchronized byte[][] allocate(int size, byte[][] existing) {
|
||||
int requiredBufferCount = requiredBufferCount(size);
|
||||
if (existing != null && requiredBufferCount <= existing.length) {
|
||||
// The existing buffers are sufficient.
|
||||
return existing;
|
||||
}
|
||||
// We need to allocate additional buffers.
|
||||
byte[][] buffers = new byte[requiredBufferCount][];
|
||||
int firstNewBufferIndex = 0;
|
||||
if (existing != null) {
|
||||
firstNewBufferIndex = existing.length;
|
||||
System.arraycopy(existing, 0, buffers, 0, firstNewBufferIndex);
|
||||
}
|
||||
// Allocate the new buffers
|
||||
allocatedBufferCount += requiredBufferCount - firstNewBufferIndex;
|
||||
for (int i = firstNewBufferIndex; i < requiredBufferCount; i++) {
|
||||
// Use a recycled buffer if one is available. Else instantiate a new one.
|
||||
buffers[i] = nextBuffer();
|
||||
}
|
||||
return buffers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a single buffer directly from the pool.
|
||||
* <p>
|
||||
* When the caller has finished with the buffer, it should be returned to the pool by calling
|
||||
* {@link #releaseDirect(byte[])}.
|
||||
*
|
||||
* @return The allocated buffer.
|
||||
*/
|
||||
public synchronized byte[] allocateDirect() {
|
||||
allocatedBufferCount++;
|
||||
return nextBuffer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a single buffer to the pool.
|
||||
*
|
||||
* @param buffer The buffer being returned.
|
||||
*/
|
||||
public synchronized void releaseDirect(byte[] buffer) {
|
||||
// Weak sanity check that the buffer probably originated from this pool.
|
||||
Assertions.checkArgument(buffer.length == bufferLength);
|
||||
allocatedBufferCount--;
|
||||
|
||||
ensureRecycledBufferCapacity(recycledBufferCount + 1);
|
||||
recycledBuffers[recycledBufferCount++] = buffer;
|
||||
@Override
|
||||
public int getBufferLength() {
|
||||
return bufferLength;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -138,82 +92,4 @@ public final class BufferPool implements Allocator {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the buffers belonging to an allocation to the pool.
|
||||
*
|
||||
* @param allocation The allocation to return.
|
||||
*/
|
||||
/* package */ synchronized void release(AllocationImpl allocation) {
|
||||
byte[][] buffers = allocation.getBuffers();
|
||||
allocatedBufferCount -= buffers.length;
|
||||
|
||||
int newRecycledBufferCount = recycledBufferCount + buffers.length;
|
||||
ensureRecycledBufferCapacity(newRecycledBufferCount);
|
||||
System.arraycopy(buffers, 0, recycledBuffers, recycledBufferCount, buffers.length);
|
||||
recycledBufferCount = newRecycledBufferCount;
|
||||
}
|
||||
|
||||
private int requiredBufferCount(long size) {
|
||||
return (int) ((size + bufferLength - 1) / bufferLength);
|
||||
}
|
||||
|
||||
private byte[] nextBuffer() {
|
||||
return recycledBufferCount > 0 ? recycledBuffers[--recycledBufferCount]
|
||||
: new byte[bufferLength];
|
||||
}
|
||||
|
||||
private void ensureRecycledBufferCapacity(int requiredCapacity) {
|
||||
if (recycledBuffers.length < requiredCapacity) {
|
||||
// Expand the capacity of the recycled buffers array.
|
||||
byte[][] newRecycledBuffers = new byte[requiredCapacity * 2][];
|
||||
if (recycledBufferCount > 0) {
|
||||
System.arraycopy(recycledBuffers, 0, newRecycledBuffers, 0, recycledBufferCount);
|
||||
}
|
||||
recycledBuffers = newRecycledBuffers;
|
||||
}
|
||||
}
|
||||
|
||||
private class AllocationImpl implements Allocation {
|
||||
|
||||
private byte[][] buffers;
|
||||
|
||||
public AllocationImpl(byte[][] buffers) {
|
||||
this.buffers = buffers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void ensureCapacity(int size) {
|
||||
buffers = allocate(size, buffers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int capacity() {
|
||||
return bufferLength * buffers.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[][] getBuffers() {
|
||||
return buffers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFragmentOffset(int index) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFragmentLength(int index) {
|
||||
return bufferLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (buffers != null) {
|
||||
BufferPool.this.release(this);
|
||||
buffers = null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,150 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Input stream with non-blocking reading/skipping that also stores read/skipped data in a buffer.
|
||||
* Call {@link #mark} to discard any buffered data before the current reading position. Call
|
||||
* {@link #returnToMark} to move the current reading position back to the marked position, which is
|
||||
* initially the start of the input stream.
|
||||
*/
|
||||
public final class BufferedNonBlockingInputStream implements NonBlockingInputStream {
|
||||
|
||||
private final NonBlockingInputStream inputStream;
|
||||
private final byte[] bufferedBytes;
|
||||
|
||||
private long inputStreamPosition;
|
||||
|
||||
private int readPosition;
|
||||
private int writePosition;
|
||||
|
||||
/**
|
||||
* Wraps the specified {@code nonBlockingInputStream} for buffered reading using a buffer of size
|
||||
* {@code bufferSize} bytes.
|
||||
*/
|
||||
public BufferedNonBlockingInputStream(
|
||||
NonBlockingInputStream nonBlockingInputStream, int bufferSize) {
|
||||
inputStream = Assertions.checkNotNull(nonBlockingInputStream);
|
||||
bufferedBytes = new byte[bufferSize];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int skip(int length) {
|
||||
return consumeStream(null, null, 0, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] buffer, int offset, int length) {
|
||||
return consumeStream(null, buffer, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(ByteBuffer buffer, int length) {
|
||||
return consumeStream(buffer, null, 0, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getAvailableByteCount() {
|
||||
// The amount that can be read from the input stream is limited by how much can be buffered.
|
||||
return (writePosition - readPosition)
|
||||
+ Math.min(inputStream.getAvailableByteCount(), bufferedBytes.length - writePosition);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEndOfStream() {
|
||||
return writePosition == readPosition && inputStream.isEndOfStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
inputStream.close();
|
||||
inputStreamPosition = -1;
|
||||
}
|
||||
|
||||
/** Returns the current position in the stream. */
|
||||
public long getReadPosition() {
|
||||
return inputStreamPosition - (writePosition - readPosition);
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves the mark to be at the current position. Any data before the current position is
|
||||
* discarded. After calling this method, calling {@link #returnToMark} will move the reading
|
||||
* position back to the mark position.
|
||||
*/
|
||||
public void mark() {
|
||||
System.arraycopy(bufferedBytes, readPosition, bufferedBytes, 0, writePosition - readPosition);
|
||||
writePosition -= readPosition;
|
||||
readPosition = 0;
|
||||
}
|
||||
|
||||
/** Moves the current position back to the mark position. */
|
||||
public void returnToMark() {
|
||||
readPosition = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads or skips data from the input stream. If {@code byteBuffer} is non-{@code null}, reads
|
||||
* {@code length} bytes into {@code byteBuffer} (other arguments are ignored). If
|
||||
* {@code byteArray} is non-{@code null}, reads {@code length} bytes into {@code byteArray} at
|
||||
* {@code offset} (other arguments are ignored). Otherwise, skips {@code length} bytes.
|
||||
*
|
||||
* @param byteBuffer {@link ByteBuffer} to read into, or {@code null} to read into
|
||||
* {@code byteArray} or skip.
|
||||
* @param byteArray Byte array to read into, or {@code null} to read into {@code byteBuffer} or
|
||||
* skip.
|
||||
* @param offset Offset in {@code byteArray} to write to, if it is non-{@code null}.
|
||||
* @param length Number of bytes to read or skip.
|
||||
* @return The number of bytes consumed, or -1 if nothing was consumed and the end of stream was
|
||||
* reached.
|
||||
*/
|
||||
private int consumeStream(ByteBuffer byteBuffer, byte[] byteArray, int offset, int length) {
|
||||
// If necessary, reduce length so that we do not need to write past the end of the array.
|
||||
int pendingBytes = writePosition - readPosition;
|
||||
length = Math.min(length, bufferedBytes.length - writePosition + pendingBytes);
|
||||
|
||||
// If reading past the end of buffered data, request more and populate the buffer.
|
||||
int streamBytesRead = 0;
|
||||
if (length - pendingBytes > 0) {
|
||||
streamBytesRead = inputStream.read(bufferedBytes, writePosition, length - pendingBytes);
|
||||
if (streamBytesRead > 0) {
|
||||
inputStreamPosition += streamBytesRead;
|
||||
|
||||
writePosition += streamBytesRead;
|
||||
pendingBytes += streamBytesRead;
|
||||
}
|
||||
}
|
||||
|
||||
// Signal the end of the stream if nothing more will be read.
|
||||
if (streamBytesRead == -1 && pendingBytes == 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Fill the buffer using buffered data if reading, or just skip otherwise.
|
||||
length = Math.min(pendingBytes, length);
|
||||
if (byteBuffer != null) {
|
||||
byteBuffer.put(bufferedBytes, readPosition, length);
|
||||
} else if (byteArray != null) {
|
||||
System.arraycopy(bufferedBytes, readPosition, byteArray, offset, length);
|
||||
}
|
||||
readPosition += length;
|
||||
return length;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* An implementation of {@link NonBlockingInputStream} for reading data from a byte array.
|
||||
*/
|
||||
public final class ByteArrayNonBlockingInputStream implements NonBlockingInputStream {
|
||||
|
||||
private final byte[] data;
|
||||
|
||||
private int position;
|
||||
|
||||
public ByteArrayNonBlockingInputStream(byte[] data) {
|
||||
this.data = Assertions.checkNotNull(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int skip(int length) {
|
||||
int skipLength = getReadLength(length);
|
||||
position += skipLength;
|
||||
return skipLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] buffer, int offset, int length) {
|
||||
if (isEndOfStream()) {
|
||||
return -1;
|
||||
}
|
||||
int readLength = getReadLength(length);
|
||||
System.arraycopy(data, position, buffer, offset, readLength);
|
||||
position += readLength;
|
||||
return readLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(ByteBuffer buffer, int length) {
|
||||
if (isEndOfStream()) {
|
||||
return -1;
|
||||
}
|
||||
int readLength = getReadLength(length);
|
||||
buffer.put(data, position, readLength);
|
||||
position += readLength;
|
||||
return readLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getAvailableByteCount() {
|
||||
return data.length - position;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEndOfStream() {
|
||||
return position == data.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
private int getReadLength(int requestedLength) {
|
||||
return Math.min(requestedLength, data.length - position);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,390 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.upstream.Loader.Loadable;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Loads data from a {@link DataSource} into an in-memory {@link Allocation}. The loaded data
|
||||
* can be consumed by treating the instance as a non-blocking {@link NonBlockingInputStream}.
|
||||
*/
|
||||
public final class DataSourceStream implements Loadable, NonBlockingInputStream {
|
||||
|
||||
/**
|
||||
* Thrown when an error is encountered trying to load data into a {@link DataSourceStream}.
|
||||
*/
|
||||
public static class DataSourceStreamLoadException extends IOException {
|
||||
|
||||
public DataSourceStreamLoadException(IOException cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final int CHUNKED_ALLOCATION_INCREMENT = 256 * 1024;
|
||||
|
||||
private final DataSource dataSource;
|
||||
private final DataSpec dataSpec;
|
||||
private final Allocator allocator;
|
||||
private final ReadHead readHead;
|
||||
|
||||
/** Whether {@link #allocation}'s capacity is fixed. If true, the allocation is not resized. */
|
||||
private final boolean isAllocationFixedSize;
|
||||
private final int allocationSize;
|
||||
|
||||
private Allocation allocation;
|
||||
|
||||
private volatile boolean loadCanceled;
|
||||
private volatile long loadPosition;
|
||||
private volatile long resolvedLength;
|
||||
|
||||
private int writeFragmentIndex;
|
||||
private int writeFragmentOffset;
|
||||
private int writeFragmentRemainingLength;
|
||||
|
||||
/**
|
||||
* Constructs an instance whose allocation grows to contain all of the data specified by the
|
||||
* {@code dataSpec}.
|
||||
*
|
||||
* @param dataSource The source from which the data should be loaded.
|
||||
* @param dataSpec Defines the data to be loaded. {@code dataSpec.length} must not exceed
|
||||
* {@link Integer#MAX_VALUE}. If {@code dataSpec.length == C.LENGTH_UNBOUNDED} then
|
||||
* the length resolved by {@code dataSource.open(dataSpec)} must not exceed
|
||||
* {@link Integer#MAX_VALUE}.
|
||||
* @param allocator Used to obtain an {@link Allocation} for holding the data.
|
||||
*/
|
||||
public DataSourceStream(DataSource dataSource, DataSpec dataSpec, Allocator allocator) {
|
||||
Assertions.checkState(dataSpec.length <= Integer.MAX_VALUE);
|
||||
this.dataSource = dataSource;
|
||||
this.dataSpec = dataSpec;
|
||||
this.allocator = allocator;
|
||||
resolvedLength = C.LENGTH_UNBOUNDED;
|
||||
readHead = new ReadHead();
|
||||
|
||||
isAllocationFixedSize = false;
|
||||
allocationSize = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an instance whose allocation is of a fixed size, which may be smaller than the data
|
||||
* specified by the {@code dataSpec}.
|
||||
* <p>
|
||||
* The allocation size determines how far ahead loading can proceed relative to the current
|
||||
* reading position.
|
||||
*
|
||||
* @param dataSource The source form which the data should be loaded.
|
||||
* @param dataSpec Defines the data to be loaded.
|
||||
* @param allocator Used to obtain an {@link Allocation} for holding the data.
|
||||
* @param allocationSize The minimum size for a fixed-size allocation that will hold the data
|
||||
* loaded from {@code dataSource}.
|
||||
*/
|
||||
public DataSourceStream(
|
||||
DataSource dataSource, DataSpec dataSpec, Allocator allocator, int allocationSize) {
|
||||
Assertions.checkState(dataSpec.length <= Integer.MAX_VALUE);
|
||||
this.dataSource = dataSource;
|
||||
this.dataSpec = dataSpec;
|
||||
this.allocator = allocator;
|
||||
this.allocationSize = allocationSize;
|
||||
resolvedLength = C.LENGTH_UNBOUNDED;
|
||||
readHead = new ReadHead();
|
||||
|
||||
isAllocationFixedSize = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the read position to the start of the data.
|
||||
*
|
||||
* @throws UnsupportedOperationException Thrown if the allocation size is fixed.
|
||||
*/
|
||||
public void resetReadPosition() {
|
||||
if (isAllocationFixedSize) {
|
||||
throw new UnsupportedOperationException(
|
||||
"The read position cannot be reset when using a fixed allocation");
|
||||
}
|
||||
|
||||
readHead.reset();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current read position for data being read out of the source.
|
||||
*
|
||||
* @return The current read position.
|
||||
*/
|
||||
public long getReadPosition() {
|
||||
return readHead.position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of bytes of data that have been loaded.
|
||||
*
|
||||
* @return The number of bytes of data that have been loaded.
|
||||
*/
|
||||
public long getLoadPosition() {
|
||||
return loadPosition;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the length of the stream in bytes, or {@value C#LENGTH_UNBOUNDED} if the length has
|
||||
* yet to be determined.
|
||||
*
|
||||
* @return The length of the stream in bytes, or {@value C#LENGTH_UNBOUNDED} if the length has
|
||||
* yet to be determined.
|
||||
*/
|
||||
public long getLength() {
|
||||
return resolvedLength != C.LENGTH_UNBOUNDED ? resolvedLength : dataSpec.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether the stream has finished loading.
|
||||
*
|
||||
* @return True if the stream has finished loading. False otherwise.
|
||||
*/
|
||||
public boolean isLoadFinished() {
|
||||
return resolvedLength != C.LENGTH_UNBOUNDED && loadPosition == resolvedLength;
|
||||
}
|
||||
|
||||
// {@link NonBlockingInputStream} implementation.
|
||||
|
||||
@Override
|
||||
public long getAvailableByteCount() {
|
||||
return loadPosition - readHead.position;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEndOfStream() {
|
||||
return resolvedLength != C.LENGTH_UNBOUNDED && readHead.position == resolvedLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (allocation != null) {
|
||||
allocation.release();
|
||||
allocation = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int skip(int skipLength) {
|
||||
return read(null, null, 0, readHead, skipLength);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(ByteBuffer target1, int readLength) {
|
||||
return read(target1, null, 0, readHead, readLength);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] target, int offset, int readLength) {
|
||||
return read(null, target, offset, readHead, readLength);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads data to either a target {@link ByteBuffer}, or to a target byte array at a specified
|
||||
* offset. The {@code readHead} is updated to reflect the read that was performed.
|
||||
*/
|
||||
private int read(ByteBuffer target, byte[] targetArray, int targetArrayOffset,
|
||||
ReadHead readHead, int readLength) {
|
||||
if (isEndOfStream()) {
|
||||
return -1;
|
||||
}
|
||||
int bytesToRead = (int) Math.min(loadPosition - readHead.position, readLength);
|
||||
if (bytesToRead == 0) {
|
||||
return 0;
|
||||
}
|
||||
if (readHead.position == 0) {
|
||||
readHead.fragmentIndex = 0;
|
||||
readHead.fragmentOffset = allocation.getFragmentOffset(0);
|
||||
readHead.fragmentRemaining = allocation.getFragmentLength(0);
|
||||
}
|
||||
int bytesRead = 0;
|
||||
byte[][] buffers = allocation.getBuffers();
|
||||
while (bytesRead < bytesToRead) {
|
||||
if (readHead.fragmentRemaining == 0) {
|
||||
if (readHead.fragmentIndex == buffers.length - 1) {
|
||||
Assertions.checkState(isAllocationFixedSize);
|
||||
readHead.fragmentIndex = 0;
|
||||
} else {
|
||||
readHead.fragmentIndex++;
|
||||
}
|
||||
readHead.fragmentOffset = allocation.getFragmentOffset(readHead.fragmentIndex);
|
||||
readHead.fragmentRemaining = allocation.getFragmentLength(readHead.fragmentIndex);
|
||||
}
|
||||
int bufferReadLength = Math.min(readHead.fragmentRemaining, bytesToRead - bytesRead);
|
||||
if (target != null) {
|
||||
target.put(buffers[readHead.fragmentIndex], readHead.fragmentOffset, bufferReadLength);
|
||||
} else if (targetArray != null) {
|
||||
System.arraycopy(buffers[readHead.fragmentIndex], readHead.fragmentOffset, targetArray,
|
||||
targetArrayOffset, bufferReadLength);
|
||||
targetArrayOffset += bufferReadLength;
|
||||
}
|
||||
readHead.position += bufferReadLength;
|
||||
bytesRead += bufferReadLength;
|
||||
readHead.fragmentOffset += bufferReadLength;
|
||||
readHead.fragmentRemaining -= bufferReadLength;
|
||||
}
|
||||
|
||||
if (isAllocationFixedSize) {
|
||||
synchronized (readHead) {
|
||||
// Notify load() of the updated position so it can resume.
|
||||
readHead.notify();
|
||||
}
|
||||
}
|
||||
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
// {@link Loadable} implementation.
|
||||
|
||||
@Override
|
||||
public void cancelLoad() {
|
||||
loadCanceled = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLoadCanceled() {
|
||||
return loadCanceled;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("NonAtomicVolatileUpdate")
|
||||
public void load() throws IOException, InterruptedException {
|
||||
if (loadCanceled || isLoadFinished()) {
|
||||
// The load was canceled, or is already complete.
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
DataSpec loadDataSpec;
|
||||
if (loadPosition == 0 && resolvedLength == C.LENGTH_UNBOUNDED) {
|
||||
loadDataSpec = dataSpec;
|
||||
long resolvedLength = dataSource.open(loadDataSpec);
|
||||
if (!isAllocationFixedSize && resolvedLength > Integer.MAX_VALUE) {
|
||||
throw new DataSourceStreamLoadException(
|
||||
new UnexpectedLengthException(dataSpec.length, resolvedLength));
|
||||
}
|
||||
this.resolvedLength = resolvedLength;
|
||||
} else {
|
||||
long remainingLength = resolvedLength != C.LENGTH_UNBOUNDED
|
||||
? resolvedLength - loadPosition : C.LENGTH_UNBOUNDED;
|
||||
loadDataSpec = new DataSpec(dataSpec.uri, dataSpec.position + loadPosition,
|
||||
remainingLength, dataSpec.key, dataSpec.flags);
|
||||
dataSource.open(loadDataSpec);
|
||||
}
|
||||
|
||||
if (allocation == null) {
|
||||
if (isAllocationFixedSize) {
|
||||
allocation = allocator.allocate(allocationSize);
|
||||
} else {
|
||||
int initialAllocationSize = resolvedLength != C.LENGTH_UNBOUNDED
|
||||
? (int) resolvedLength : CHUNKED_ALLOCATION_INCREMENT;
|
||||
allocation = allocator.allocate(initialAllocationSize);
|
||||
}
|
||||
}
|
||||
int allocationCapacity = allocation.capacity();
|
||||
|
||||
if (loadPosition == 0) {
|
||||
writeFragmentIndex = 0;
|
||||
writeFragmentOffset = allocation.getFragmentOffset(0);
|
||||
writeFragmentRemainingLength = allocation.getFragmentLength(0);
|
||||
}
|
||||
|
||||
int read = Integer.MAX_VALUE;
|
||||
byte[][] buffers = allocation.getBuffers();
|
||||
while (!loadCanceled && read > 0 && maybeMoreToLoad()) {
|
||||
if (Thread.interrupted()) {
|
||||
throw new InterruptedException();
|
||||
}
|
||||
|
||||
int bytesToWrite = getBytesToWrite();
|
||||
read = dataSource.read(buffers[writeFragmentIndex], writeFragmentOffset, bytesToWrite);
|
||||
if (read > 0) {
|
||||
loadPosition += read;
|
||||
writeFragmentOffset += read;
|
||||
writeFragmentRemainingLength -= read;
|
||||
if (writeFragmentRemainingLength == 0 && maybeMoreToLoad()) {
|
||||
writeFragmentIndex++;
|
||||
if (writeFragmentIndex == buffers.length) {
|
||||
if (isAllocationFixedSize) {
|
||||
// Wrap back to the first fragment.
|
||||
writeFragmentIndex = 0;
|
||||
} else {
|
||||
// Grow the allocation.
|
||||
allocation.ensureCapacity(allocationCapacity + CHUNKED_ALLOCATION_INCREMENT);
|
||||
allocationCapacity = allocation.capacity();
|
||||
buffers = allocation.getBuffers();
|
||||
}
|
||||
}
|
||||
writeFragmentOffset = allocation.getFragmentOffset(writeFragmentIndex);
|
||||
writeFragmentRemainingLength = allocation.getFragmentLength(writeFragmentIndex);
|
||||
}
|
||||
} else if (resolvedLength == C.LENGTH_UNBOUNDED) {
|
||||
resolvedLength = loadPosition;
|
||||
} else if (resolvedLength != loadPosition) {
|
||||
throw new DataSourceStreamLoadException(
|
||||
new UnexpectedLengthException(resolvedLength, loadPosition));
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
Util.closeQuietly(dataSource);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of bytes that can be written to the current fragment, blocking until the
|
||||
* reader has consumed data if the allocation has a fixed size and is full.
|
||||
*/
|
||||
private int getBytesToWrite() throws InterruptedException {
|
||||
if (!isAllocationFixedSize) {
|
||||
return writeFragmentRemainingLength;
|
||||
}
|
||||
|
||||
synchronized (readHead) {
|
||||
while (loadPosition == readHead.position + allocation.capacity()) {
|
||||
readHead.wait();
|
||||
}
|
||||
}
|
||||
|
||||
return Math.min(writeFragmentRemainingLength,
|
||||
allocation.capacity() - (int) (loadPosition - readHead.position));
|
||||
}
|
||||
|
||||
private boolean maybeMoreToLoad() {
|
||||
return resolvedLength == C.LENGTH_UNBOUNDED || loadPosition < resolvedLength;
|
||||
}
|
||||
|
||||
private static class ReadHead {
|
||||
|
||||
private int position;
|
||||
private int fragmentIndex;
|
||||
private int fragmentOffset;
|
||||
private int fragmentRemaining;
|
||||
|
||||
public void reset() {
|
||||
position = 0;
|
||||
fragmentIndex = 0;
|
||||
fragmentOffset = 0;
|
||||
fragmentRemaining = 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -24,7 +24,6 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
/**
|
||||
* An HTTP specific extension to {@link DataSource}.
|
||||
*/
|
||||
|
|
@ -115,6 +114,15 @@ public interface HttpDataSource extends DataSource {
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
long open(DataSpec dataSpec) throws HttpDataSourceException;
|
||||
|
||||
@Override
|
||||
void close() throws HttpDataSourceException;
|
||||
|
||||
@Override
|
||||
int read(byte[] buffer, int offset, int readLength) throws HttpDataSourceException;
|
||||
|
||||
/**
|
||||
* When the source is open, returns the url from which data is being read.
|
||||
* <p>
|
||||
|
|
|
|||
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Represents a source of bytes that can be consumed by downstream components.
|
||||
* <p>
|
||||
* The read and skip methods are non-blocking, and hence return 0 (indicating that no data has
|
||||
* been read) in the case that data is not yet available to be consumed.
|
||||
*/
|
||||
public interface NonBlockingInputStream {
|
||||
|
||||
/**
|
||||
* Skips over and discards up to {@code length} bytes of data. This method may skip over some
|
||||
* smaller number of bytes, possibly 0.
|
||||
*
|
||||
* @param length The maximum number of bytes to skip.
|
||||
* @return The actual number of bytes skipped, or -1 if the end of the data is reached.
|
||||
*/
|
||||
int skip(int length);
|
||||
|
||||
/**
|
||||
* Reads up to {@code length} bytes of data and stores them into {@code buffer}, starting at
|
||||
* index {@code offset}. This method may read fewer bytes, possibly 0.
|
||||
*
|
||||
* @param buffer The buffer into which the read data should be stored.
|
||||
* @param offset The start offset into {@code buffer} at which data should be written.
|
||||
* @param length The maximum number of bytes to read.
|
||||
* @return The actual number of bytes read, or -1 if the end of the data is reached.
|
||||
*/
|
||||
int read(byte[] buffer, int offset, int length);
|
||||
|
||||
/**
|
||||
* Reads up to {@code length} bytes of data and stores them into {@code buffer}. This method may
|
||||
* read fewer bytes, possibly 0.
|
||||
*
|
||||
* @param buffer The buffer into which the read data should be stored.
|
||||
* @param length The maximum number of bytes to read.
|
||||
* @return The actual number of bytes read, or -1 if the end of the data is reached.
|
||||
*/
|
||||
int read(ByteBuffer buffer, int length);
|
||||
|
||||
/**
|
||||
* Returns the number of bytes currently available for reading or skipping. Calls to the read()
|
||||
* and skip() methods are guaranteed to be satisfied in full if they request less than or
|
||||
* equal to the value returned.
|
||||
*
|
||||
* @return The number of bytes currently available.
|
||||
*/
|
||||
long getAvailableByteCount();
|
||||
|
||||
/**
|
||||
* Whether the end of the data has been reached.
|
||||
*
|
||||
* @return True if the end of the data has been reached, false otherwise.
|
||||
*/
|
||||
boolean isEndOfStream();
|
||||
|
||||
/**
|
||||
* Closes the input stream.
|
||||
*/
|
||||
void close();
|
||||
|
||||
}
|
||||
|
|
@ -17,6 +17,7 @@ package com.google.android.exoplayer.util;
|
|||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.upstream.DataSpec;
|
||||
|
||||
import android.text.TextUtils;
|
||||
|
||||
|
|
@ -460,6 +461,25 @@ public final class Util {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a {@link DataSpec} and a number of bytes already loaded, returns a {@link DataSpec}
|
||||
* that represents the remainder of the data.
|
||||
*
|
||||
* @param dataSpec The original {@link DataSpec}.
|
||||
* @param bytesLoaded The number of bytes already loaded.
|
||||
* @return A {@link DataSpec} that represents the remainder of the data.
|
||||
*/
|
||||
public static DataSpec getRemainderDataSpec(DataSpec dataSpec, int bytesLoaded) {
|
||||
if (bytesLoaded == 0) {
|
||||
return dataSpec;
|
||||
} else {
|
||||
long remainingLength = dataSpec.length == C.LENGTH_UNBOUNDED ? C.LENGTH_UNBOUNDED
|
||||
: dataSpec.length - bytesLoaded;
|
||||
return new DataSpec(dataSpec.uri, dataSpec.position + bytesLoaded, remainingLength,
|
||||
dataSpec.key, dataSpec.flags);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the integer equal to the big-endian concatenation of the characters in {@code string}
|
||||
* as bytes. {@code string} must contain four or fewer characters.
|
||||
|
|
|
|||
|
|
@ -1,350 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser.webm;
|
||||
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.upstream.ByteArrayNonBlockingInputStream;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Tests {@link DefaultEbmlReader}.
|
||||
*/
|
||||
public class DefaultEbmlReaderTest extends TestCase {
|
||||
|
||||
private final EventCapturingEbmlEventHandler eventHandler =
|
||||
new EventCapturingEbmlEventHandler();
|
||||
|
||||
public void testNothing() {
|
||||
NonBlockingInputStream input = createTestInputStream();
|
||||
assertNoEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM);
|
||||
}
|
||||
|
||||
public void testMasterElement() {
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0x1A, 0x45, 0xDF, 0xA3, 0x84, 0x42, 0x85, 0x81, 0x01);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onMasterElementStart(EventCapturingEbmlEventHandler.ID_EBML, 0, 5, 4);
|
||||
expected.onIntegerElement(EventCapturingEbmlEventHandler.ID_DOC_TYPE_READ_VERSION, 1);
|
||||
expected.onMasterElementEnd(EventCapturingEbmlEventHandler.ID_EBML);
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testMasterElementEmpty() {
|
||||
NonBlockingInputStream input = createTestInputStream(0x18, 0x53, 0x80, 0x67, 0x80);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onMasterElementStart(EventCapturingEbmlEventHandler.ID_SEGMENT, 0, 5, 0);
|
||||
expected.onMasterElementEnd(EventCapturingEbmlEventHandler.ID_SEGMENT);
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testUnsignedIntegerElement() {
|
||||
// 0xFE is chosen because for signed integers it should be interpreted as -2
|
||||
NonBlockingInputStream input = createTestInputStream(0x42, 0xF7, 0x81, 0xFE);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onIntegerElement(EventCapturingEbmlEventHandler.ID_EBML_READ_VERSION, 254);
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testUnsignedIntegerElementLarge() {
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0x42, 0xF7, 0x88, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onIntegerElement(EventCapturingEbmlEventHandler.ID_EBML_READ_VERSION, Long.MAX_VALUE);
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testUnsignedIntegerElementTooLargeBecomesNegative() {
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0x42, 0xF7, 0x88, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onIntegerElement(EventCapturingEbmlEventHandler.ID_EBML_READ_VERSION, -1);
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testStringElement() {
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0x42, 0x82, 0x86, 0x41, 0x62, 0x63, 0x31, 0x32, 0x33);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onStringElement(EventCapturingEbmlEventHandler.ID_DOC_TYPE, "Abc123");
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testStringElementEmpty() {
|
||||
NonBlockingInputStream input = createTestInputStream(0x42, 0x82, 0x80);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onStringElement(EventCapturingEbmlEventHandler.ID_DOC_TYPE, "");
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testFloatElementThreeBytes() {
|
||||
try {
|
||||
eventHandler.read(createTestInputStream(0x44, 0x89, 0x83, 0x3F, 0x80, 0x00));
|
||||
fail();
|
||||
} catch (IllegalStateException exception) {
|
||||
// Expected
|
||||
}
|
||||
assertNoEvents();
|
||||
}
|
||||
|
||||
public void testFloatElementFourBytes() {
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0x44, 0x89, 0x84, 0x3F, 0x80, 0x00, 0x00);
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onFloatElement(EventCapturingEbmlEventHandler.ID_DURATION, 1.0);
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testFloatElementEightBytes() {
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0x44, 0x89, 0x88, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00);
|
||||
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.onFloatElement(EventCapturingEbmlEventHandler.ID_DURATION, -2.0);
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testBinaryElementReadBytes() {
|
||||
eventHandler.binaryElementHandler = EventCapturingEbmlEventHandler.HANDLER_READ_BYTES;
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0xA3, 0x88, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08);
|
||||
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.binaryElementHandler = EventCapturingEbmlEventHandler.HANDLER_READ_BYTES;
|
||||
expected.onBinaryElement(
|
||||
EventCapturingEbmlEventHandler.ID_SIMPLE_BLOCK, 0, 0, 8,
|
||||
createTestInputStream(0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08));
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testBinaryElementReadVarint() {
|
||||
eventHandler.binaryElementHandler = EventCapturingEbmlEventHandler.HANDLER_READ_VARINT;
|
||||
NonBlockingInputStream input = createTestInputStream(0xA3, 0x82, 0x40, 0x2A);
|
||||
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.binaryElementHandler = EventCapturingEbmlEventHandler.HANDLER_READ_VARINT;
|
||||
expected.onBinaryElement(
|
||||
EventCapturingEbmlEventHandler.ID_SIMPLE_BLOCK, 0, 0, 0,
|
||||
createTestInputStream(0x40, 0x2A));
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testBinaryElementSkipBytes() {
|
||||
eventHandler.binaryElementHandler = EventCapturingEbmlEventHandler.HANDLER_SKIP_BYTES;
|
||||
NonBlockingInputStream input =
|
||||
createTestInputStream(0xA3, 0x88, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08);
|
||||
|
||||
EventCapturingEbmlEventHandler expected = new EventCapturingEbmlEventHandler();
|
||||
expected.binaryElementHandler = EventCapturingEbmlEventHandler.HANDLER_SKIP_BYTES;
|
||||
expected.onBinaryElement(
|
||||
EventCapturingEbmlEventHandler.ID_SIMPLE_BLOCK, 0, 0, 8,
|
||||
createTestInputStream(0, 0, 0, 0, 0, 0, 0, 0));
|
||||
assertEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM, expected.events);
|
||||
}
|
||||
|
||||
public void testBinaryElementDoNothing() {
|
||||
eventHandler.binaryElementHandler = EventCapturingEbmlEventHandler.HANDLER_DO_NOTHING;
|
||||
try {
|
||||
eventHandler.read(
|
||||
createTestInputStream(0xA3, 0x88, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08));
|
||||
fail();
|
||||
} catch (IllegalStateException exception) {
|
||||
// Expected
|
||||
}
|
||||
assertNoEvents();
|
||||
}
|
||||
|
||||
public void testBinaryElementNotEnoughBytes() {
|
||||
NonBlockingInputStream input = createTestInputStream(0xA3, 0x88, 0x01, 0x02, 0x03);
|
||||
assertNoEvents(input, EbmlReader.READ_RESULT_NEED_MORE_DATA);
|
||||
}
|
||||
|
||||
public void testUnknownElement() {
|
||||
NonBlockingInputStream input = createTestInputStream(0xEC, 0x81, 0x00);
|
||||
assertNoEvents(input, EbmlReader.READ_RESULT_END_OF_STREAM);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to build a {@link ByteArrayNonBlockingInputStream} quickly from zero or more
|
||||
* integer arguments.
|
||||
*
|
||||
* <p>Each argument must be able to cast to a byte value.
|
||||
*
|
||||
* @param data Zero or more integers with values between {@code 0x00} and {@code 0xFF}
|
||||
* @return A {@link ByteArrayNonBlockingInputStream} containing the given byte values
|
||||
*/
|
||||
private NonBlockingInputStream createTestInputStream(int... data) {
|
||||
byte[] bytes = new byte[data.length];
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
bytes[i] = (byte) data[i];
|
||||
}
|
||||
return new ByteArrayNonBlockingInputStream(bytes);
|
||||
}
|
||||
|
||||
private void assertReads(NonBlockingInputStream input, int continues, int finalResult) {
|
||||
for (int i = 0; i < continues; i++) {
|
||||
assertEquals(EbmlReader.READ_RESULT_CONTINUE, eventHandler.read(input));
|
||||
}
|
||||
assertEquals(finalResult, eventHandler.read(input));
|
||||
}
|
||||
|
||||
private void assertNoEvents() {
|
||||
assertEvents(Collections.<String>emptyList());
|
||||
}
|
||||
|
||||
private void assertEvents(List<String> events) {
|
||||
assertEquals(events.size(), eventHandler.events.size());
|
||||
for (int i = 0; i < events.size(); i++) {
|
||||
assertEquals(events.get(i), eventHandler.events.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertNoEvents(NonBlockingInputStream input, int finalResult) {
|
||||
assertReads(input, 0, finalResult);
|
||||
assertNoEvents();
|
||||
}
|
||||
|
||||
private void assertEvents(NonBlockingInputStream input, int finalResult, List<String> events) {
|
||||
assertReads(input, events.size(), finalResult);
|
||||
assertEvents(events);
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link EbmlEventHandler} which captures all event callbacks made by
|
||||
* {@link DefaultEbmlReader} for testing purposes.
|
||||
*/
|
||||
private static final class EventCapturingEbmlEventHandler implements EbmlEventHandler {
|
||||
|
||||
// Element IDs
|
||||
private static final int ID_EBML = 0x1A45DFA3;
|
||||
private static final int ID_EBML_READ_VERSION = 0x42F7;
|
||||
private static final int ID_DOC_TYPE = 0x4282;
|
||||
private static final int ID_DOC_TYPE_READ_VERSION = 0x4285;
|
||||
|
||||
private static final int ID_SEGMENT = 0x18538067;
|
||||
private static final int ID_DURATION = 0x4489;
|
||||
private static final int ID_SIMPLE_BLOCK = 0xA3;
|
||||
|
||||
// Various ways to handle things in onBinaryElement()
|
||||
private static final int HANDLER_DO_NOTHING = 0;
|
||||
private static final int HANDLER_READ_BYTES = 1;
|
||||
private static final int HANDLER_READ_VARINT = 2;
|
||||
private static final int HANDLER_SKIP_BYTES = 3;
|
||||
|
||||
private final EbmlReader reader = new DefaultEbmlReader();
|
||||
private final List<String> events = new ArrayList<String>();
|
||||
|
||||
private int binaryElementHandler;
|
||||
|
||||
private EventCapturingEbmlEventHandler() {
|
||||
reader.setEventHandler(this);
|
||||
}
|
||||
|
||||
private int read(NonBlockingInputStream inputStream) {
|
||||
try {
|
||||
return reader.read(inputStream);
|
||||
} catch (ParserException e) {
|
||||
// should never happen.
|
||||
fail();
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getElementType(int id) {
|
||||
switch (id) {
|
||||
case ID_EBML:
|
||||
case ID_SEGMENT:
|
||||
return EbmlReader.TYPE_MASTER;
|
||||
case ID_EBML_READ_VERSION:
|
||||
case ID_DOC_TYPE_READ_VERSION:
|
||||
return EbmlReader.TYPE_UNSIGNED_INT;
|
||||
case ID_DOC_TYPE:
|
||||
return EbmlReader.TYPE_STRING;
|
||||
case ID_SIMPLE_BLOCK:
|
||||
return EbmlReader.TYPE_BINARY;
|
||||
case ID_DURATION:
|
||||
return EbmlReader.TYPE_FLOAT;
|
||||
default:
|
||||
return EbmlReader.TYPE_UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMasterElementStart(
|
||||
int id, long elementOffset, int headerSize, long contentsSize) {
|
||||
events.add(formatEvent(id, "start elementOffset=" + elementOffset
|
||||
+ " headerSize=" + headerSize + " contentsSize=" + contentsSize));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMasterElementEnd(int id) {
|
||||
events.add(formatEvent(id, "end"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIntegerElement(int id, long value) {
|
||||
events.add(formatEvent(id, "integer=" + String.valueOf(value)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFloatElement(int id, double value) {
|
||||
events.add(formatEvent(id, "float=" + String.valueOf(value)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStringElement(int id, String value) {
|
||||
events.add(formatEvent(id, "string=" + value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onBinaryElement(
|
||||
int id, long elementOffset, int headerSize, int contentsSize,
|
||||
NonBlockingInputStream inputStream) {
|
||||
switch (binaryElementHandler) {
|
||||
case HANDLER_READ_BYTES:
|
||||
byte[] bytes = new byte[contentsSize];
|
||||
reader.readBytes(inputStream, bytes, contentsSize);
|
||||
events.add(formatEvent(id, "bytes=" + Arrays.toString(bytes)));
|
||||
break;
|
||||
case HANDLER_READ_VARINT:
|
||||
long value = reader.readVarint(inputStream);
|
||||
events.add(formatEvent(id, "varint=" + String.valueOf(value)));
|
||||
break;
|
||||
case HANDLER_SKIP_BYTES:
|
||||
reader.skipBytes(inputStream, contentsSize);
|
||||
events.add(formatEvent(id, "skipped " + contentsSize + " byte(s)"));
|
||||
break;
|
||||
case HANDLER_DO_NOTHING:
|
||||
default:
|
||||
// pass
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static String formatEvent(int id, String event) {
|
||||
return "[" + Integer.toHexString(id) + "] " + event;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,717 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.chunk.parser.webm;
|
||||
|
||||
import com.google.android.exoplayer.C;
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.ParserException;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.chunk.parser.SegmentIndex;
|
||||
import com.google.android.exoplayer.drm.DrmInitData;
|
||||
import com.google.android.exoplayer.upstream.ByteArrayNonBlockingInputStream;
|
||||
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
|
||||
import com.google.android.exoplayer.util.MimeTypes;
|
||||
|
||||
import android.test.InstrumentationTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.UUID;
|
||||
|
||||
public class WebmExtractorTest extends InstrumentationTestCase {
|
||||
|
||||
private static final int INFO_ELEMENT_BYTE_SIZE = 31;
|
||||
private static final int TRACKS_ELEMENT_BYTE_SIZE = 48;
|
||||
private static final int CUES_ELEMENT_BYTE_SIZE = 12;
|
||||
private static final int CUE_POINT_ELEMENT_BYTE_SIZE = 31;
|
||||
|
||||
private static final int DEFAULT_TIMECODE_SCALE = 1000000;
|
||||
|
||||
private static final long TEST_DURATION_US = 9920000L;
|
||||
private static final int TEST_WIDTH = 1280;
|
||||
private static final int TEST_HEIGHT = 720;
|
||||
private static final int TEST_CHANNEL_COUNT = 1;
|
||||
private static final int TEST_SAMPLE_RATE = 48000;
|
||||
private static final long TEST_CODEC_DELAY = 6500000;
|
||||
private static final long TEST_SEEK_PRE_ROLL = 80000000;
|
||||
private static final int TEST_OPUS_CODEC_PRIVATE_SIZE = 2;
|
||||
private static final String TEST_VORBIS_CODEC_PRIVATE = "webm/vorbis_codec_private";
|
||||
private static final int TEST_VORBIS_INFO_SIZE = 30;
|
||||
private static final int TEST_VORBIS_BOOKS_SIZE = 4140;
|
||||
private static final byte[] TEST_ENCRYPTION_KEY_ID = { 0x00, 0x01, 0x02, 0x03 };
|
||||
private static final UUID WIDEVINE_UUID = new UUID(0xEDEF8BA979D64ACEL, 0xA3C827DCD51D21EDL);
|
||||
private static final UUID ZERO_UUID = new UUID(0, 0);
|
||||
// First 8 bytes of IV come from the container, last 8 bytes are always initialized to 0.
|
||||
private static final byte[] TEST_INITIALIZATION_VECTOR = {
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };
|
||||
|
||||
private static final int ID_VP9 = 0;
|
||||
private static final int ID_OPUS = 1;
|
||||
private static final int ID_VORBIS = 2;
|
||||
|
||||
private static final int EXPECTED_INIT_RESULT = WebmExtractor.RESULT_READ_INIT
|
||||
| WebmExtractor.RESULT_READ_INDEX | WebmExtractor.RESULT_END_OF_STREAM;
|
||||
private static final int EXPECTED_INIT_AND_SAMPLE_RESULT = WebmExtractor.RESULT_READ_INIT
|
||||
| WebmExtractor.RESULT_READ_INDEX | WebmExtractor.RESULT_READ_SAMPLE;
|
||||
|
||||
private final WebmExtractor extractor = new WebmExtractor();
|
||||
private final SampleHolder sampleHolder =
|
||||
new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||
|
||||
@Override
|
||||
public void setUp() {
|
||||
sampleHolder.data = ByteBuffer.allocate(1024);
|
||||
}
|
||||
|
||||
public void testPrepare() throws ParserException {
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
|
||||
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
|
||||
}
|
||||
|
||||
public void testPrepareOpus() throws ParserException {
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_OPUS, null));
|
||||
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertAudioFormat(ID_OPUS);
|
||||
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
|
||||
}
|
||||
|
||||
public void testPrepareVorbis() throws ParserException {
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VORBIS, null));
|
||||
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertAudioFormat(ID_VORBIS);
|
||||
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
|
||||
}
|
||||
|
||||
public void testPrepareContentEncodingEncryption() throws ParserException {
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 1);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
|
||||
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertIndex(new IndexPoint(0, 0, TEST_DURATION_US));
|
||||
DrmInitData drmInitData = extractor.getDrmInitData();
|
||||
assertNotNull(drmInitData);
|
||||
android.test.MoreAsserts.assertEquals(TEST_ENCRYPTION_KEY_ID, drmInitData.get(WIDEVINE_UUID));
|
||||
android.test.MoreAsserts.assertEquals(TEST_ENCRYPTION_KEY_ID, drmInitData.get(ZERO_UUID));
|
||||
}
|
||||
|
||||
public void testPrepareThreeCuePoints() throws ParserException {
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(3, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
|
||||
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertIndex(
|
||||
new IndexPoint(0, 0, 10000),
|
||||
new IndexPoint(10000, 0, 10000),
|
||||
new IndexPoint(20000, 0, TEST_DURATION_US - 20000));
|
||||
}
|
||||
|
||||
public void testPrepareCustomTimecodeScale() throws ParserException {
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(3, 0, true, 1000, ID_VP9, null));
|
||||
assertEquals(EXPECTED_INIT_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertIndex(
|
||||
new IndexPoint(0, 0, 10),
|
||||
new IndexPoint(10, 0, 10),
|
||||
new IndexPoint(20, 0, (TEST_DURATION_US / 1000) - 20));
|
||||
}
|
||||
|
||||
public void testPrepareNoCuePoints() {
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(0, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("Invalid/missing cue points", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPrepareInvalidDocType() {
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, false, DEFAULT_TIMECODE_SCALE, ID_VP9, null));
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("DocType webB not supported", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPrepareInvalidContentEncodingOrder() {
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(1, 1, 1, 5, 1);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("ContentEncodingOrder 1 not supported", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPrepareInvalidContentEncodingScope() {
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(0, 0, 1, 5, 1);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("ContentEncodingScope 0 not supported", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPrepareInvalidContentEncodingType() {
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 0, 5, 1);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("ContentEncodingType 0 not supported", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPrepareInvalidContentEncAlgo() {
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 4, 1);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("ContentEncAlgo 4 not supported", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPrepareInvalidAESSettingsCipherMode() {
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 0);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(
|
||||
createInitializationSegment(1, 0, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings));
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("AESSettingsCipherMode 0 not supported", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadSampleKeyframe() throws ParserException {
|
||||
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, true, false, false);
|
||||
byte[] testInputData = joinByteArrays(
|
||||
createInitializationSegment(
|
||||
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null),
|
||||
mediaSegment.clusterBytes);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
|
||||
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertSample(mediaSegment, 0, true, false, false);
|
||||
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
|
||||
}
|
||||
|
||||
public void testReadBlock() throws ParserException {
|
||||
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, false, false, false);
|
||||
byte[] testInputData = joinByteArrays(
|
||||
createInitializationSegment(
|
||||
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_OPUS, null),
|
||||
mediaSegment.clusterBytes);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
|
||||
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertAudioFormat(ID_OPUS);
|
||||
assertSample(mediaSegment, 0, true, false, false);
|
||||
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
|
||||
}
|
||||
|
||||
public void testReadEncryptedFrame() throws ParserException {
|
||||
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, true, true, true);
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 1);
|
||||
byte[] testInputData = joinByteArrays(
|
||||
createInitializationSegment(
|
||||
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings),
|
||||
mediaSegment.clusterBytes);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
|
||||
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertSample(mediaSegment, 0, true, false, true);
|
||||
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
|
||||
}
|
||||
|
||||
public void testReadEncryptedFrameWithInvalidSignalByte() {
|
||||
MediaSegment mediaSegment = createMediaSegment(100, 0, 0, true, false, true, true, false);
|
||||
ContentEncodingSettings settings = new ContentEncodingSettings(0, 1, 1, 5, 1);
|
||||
byte[] testInputData = joinByteArrays(
|
||||
createInitializationSegment(
|
||||
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, settings),
|
||||
mediaSegment.clusterBytes);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
|
||||
try {
|
||||
extractor.read(testInputStream, sampleHolder);
|
||||
fail();
|
||||
} catch (ParserException exception) {
|
||||
assertEquals("Extension bit is set in signal byte", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadSampleInvisible() throws ParserException {
|
||||
MediaSegment mediaSegment = createMediaSegment(100, 12, 13, false, true, true, false, false);
|
||||
byte[] testInputData = joinByteArrays(
|
||||
createInitializationSegment(
|
||||
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null),
|
||||
mediaSegment.clusterBytes);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
|
||||
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertSample(mediaSegment, 25000, false, true, false);
|
||||
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
|
||||
}
|
||||
|
||||
public void testReadSampleCustomTimescale() throws ParserException {
|
||||
MediaSegment mediaSegment = createMediaSegment(100, 12, 13, false, false, true, false, false);
|
||||
byte[] testInputData = joinByteArrays(
|
||||
createInitializationSegment(
|
||||
1, mediaSegment.clusterBytes.length, true, 1000, ID_VP9, null),
|
||||
mediaSegment.clusterBytes);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
|
||||
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertSample(mediaSegment, 25, false, false, false);
|
||||
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
|
||||
}
|
||||
|
||||
public void testReadSampleNegativeSimpleBlockTimecode() throws ParserException {
|
||||
MediaSegment mediaSegment = createMediaSegment(100, 13, -12, true, true, true, false, false);
|
||||
byte[] testInputData = joinByteArrays(
|
||||
createInitializationSegment(
|
||||
1, mediaSegment.clusterBytes.length, true, DEFAULT_TIMECODE_SCALE, ID_VP9, null),
|
||||
mediaSegment.clusterBytes);
|
||||
NonBlockingInputStream testInputStream = new ByteArrayNonBlockingInputStream(testInputData);
|
||||
assertEquals(EXPECTED_INIT_AND_SAMPLE_RESULT, extractor.read(testInputStream, sampleHolder));
|
||||
assertFormat();
|
||||
assertSample(mediaSegment, 1000, true, true, false);
|
||||
assertEquals(WebmExtractor.RESULT_END_OF_STREAM, extractor.read(testInputStream, sampleHolder));
|
||||
}
|
||||
|
||||
private void assertFormat() {
|
||||
MediaFormat format = extractor.getFormat();
|
||||
assertEquals(TEST_WIDTH, format.width);
|
||||
assertEquals(TEST_HEIGHT, format.height);
|
||||
assertEquals(MimeTypes.VIDEO_VP9, format.mimeType);
|
||||
}
|
||||
|
||||
private void assertAudioFormat(int codecId) {
|
||||
MediaFormat format = extractor.getFormat();
|
||||
assertEquals(TEST_CHANNEL_COUNT, format.channelCount);
|
||||
assertEquals(TEST_SAMPLE_RATE, format.sampleRate);
|
||||
if (codecId == ID_OPUS) {
|
||||
assertEquals(MimeTypes.AUDIO_OPUS, format.mimeType);
|
||||
assertEquals(3, format.initializationData.size());
|
||||
assertEquals(TEST_OPUS_CODEC_PRIVATE_SIZE, format.initializationData.get(0).length);
|
||||
assertEquals(TEST_CODEC_DELAY, ByteBuffer.wrap(format.initializationData.get(1)).getLong());
|
||||
assertEquals(TEST_SEEK_PRE_ROLL, ByteBuffer.wrap(format.initializationData.get(2)).getLong());
|
||||
} else if (codecId == ID_VORBIS) {
|
||||
assertEquals(MimeTypes.AUDIO_VORBIS, format.mimeType);
|
||||
assertEquals(2, format.initializationData.size());
|
||||
assertEquals(TEST_VORBIS_INFO_SIZE, format.initializationData.get(0).length);
|
||||
assertEquals(TEST_VORBIS_BOOKS_SIZE, format.initializationData.get(1).length);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertIndex(IndexPoint... indexPoints) {
|
||||
SegmentIndex index = extractor.getIndex();
|
||||
assertEquals(CUES_ELEMENT_BYTE_SIZE + CUE_POINT_ELEMENT_BYTE_SIZE * indexPoints.length,
|
||||
index.sizeBytes);
|
||||
assertEquals(indexPoints.length, index.length);
|
||||
for (int i = 0; i < indexPoints.length; i++) {
|
||||
IndexPoint indexPoint = indexPoints[i];
|
||||
assertEquals(indexPoint.timeUs, index.timesUs[i]);
|
||||
assertEquals(indexPoint.size, index.sizes[i]);
|
||||
assertEquals(indexPoint.durationUs, index.durationsUs[i]);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertSample(
|
||||
MediaSegment mediaSegment, int timeUs, boolean keyframe, boolean invisible,
|
||||
boolean encrypted) {
|
||||
assertTrue(Arrays.equals(
|
||||
mediaSegment.videoBytes, Arrays.copyOf(sampleHolder.data.array(), sampleHolder.size)));
|
||||
assertEquals(timeUs, sampleHolder.timeUs);
|
||||
assertEquals(keyframe, sampleHolder.isSyncFrame());
|
||||
assertEquals(invisible, sampleHolder.isDecodeOnly());
|
||||
assertEquals(encrypted, sampleHolder.isEncrypted());
|
||||
if (encrypted) {
|
||||
android.test.MoreAsserts.assertEquals(TEST_INITIALIZATION_VECTOR, sampleHolder.cryptoInfo.iv);
|
||||
assertEquals(C.CRYPTO_MODE_AES_CTR, sampleHolder.cryptoInfo.mode);
|
||||
assertEquals(1, sampleHolder.cryptoInfo.numSubSamples);
|
||||
assertEquals(100, sampleHolder.cryptoInfo.numBytesOfEncryptedData[0]);
|
||||
assertEquals(0, sampleHolder.cryptoInfo.numBytesOfClearData[0]);
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] createInitializationSegment(
|
||||
int cuePoints, int mediaSegmentSize, boolean docTypeIsWebm, int timecodeScale,
|
||||
int codecId, ContentEncodingSettings contentEncodingSettings) {
|
||||
int initalizationSegmentSize = INFO_ELEMENT_BYTE_SIZE + TRACKS_ELEMENT_BYTE_SIZE
|
||||
+ CUES_ELEMENT_BYTE_SIZE + CUE_POINT_ELEMENT_BYTE_SIZE * cuePoints;
|
||||
byte[] tracksElement = null;
|
||||
switch (codecId) {
|
||||
case ID_VP9:
|
||||
tracksElement = createTracksElementWithVideo(
|
||||
true, TEST_WIDTH, TEST_HEIGHT, contentEncodingSettings);
|
||||
break;
|
||||
case ID_OPUS:
|
||||
tracksElement = createTracksElementWithOpusAudio(TEST_CHANNEL_COUNT);
|
||||
break;
|
||||
case ID_VORBIS:
|
||||
tracksElement = createTracksElementWithVorbisAudio(TEST_CHANNEL_COUNT);
|
||||
break;
|
||||
}
|
||||
byte[] bytes = joinByteArrays(createEbmlElement(1, docTypeIsWebm, 2),
|
||||
createSegmentElement(initalizationSegmentSize + mediaSegmentSize),
|
||||
createInfoElement(timecodeScale),
|
||||
tracksElement,
|
||||
createCuesElement(CUE_POINT_ELEMENT_BYTE_SIZE * cuePoints));
|
||||
for (int i = 0; i < cuePoints; i++) {
|
||||
bytes = joinByteArrays(bytes, createCuePointElement(10 * i, initalizationSegmentSize));
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
private static MediaSegment createMediaSegment(int videoBytesLength, int clusterTimecode,
|
||||
int blockTimecode, boolean keyframe, boolean invisible, boolean simple,
|
||||
boolean encrypted, boolean validSignalByte) {
|
||||
byte[] videoBytes = createVideoBytes(videoBytesLength);
|
||||
byte[] blockBytes;
|
||||
if (simple) {
|
||||
blockBytes = createSimpleBlockElement(videoBytes.length, blockTimecode,
|
||||
keyframe, invisible, true, encrypted, validSignalByte);
|
||||
} else {
|
||||
blockBytes = createBlockElement(videoBytes.length, blockTimecode, invisible, true);
|
||||
}
|
||||
byte[] clusterBytes =
|
||||
createClusterElement(blockBytes.length + videoBytes.length, clusterTimecode);
|
||||
return new MediaSegment(joinByteArrays(clusterBytes, blockBytes, videoBytes), videoBytes);
|
||||
}
|
||||
|
||||
private static byte[] joinByteArrays(byte[]... byteArrays) {
|
||||
int length = 0;
|
||||
for (byte[] byteArray : byteArrays) {
|
||||
length += byteArray.length;
|
||||
}
|
||||
byte[] joined = new byte[length];
|
||||
length = 0;
|
||||
for (byte[] byteArray : byteArrays) {
|
||||
System.arraycopy(byteArray, 0, joined, length, byteArray.length);
|
||||
length += byteArray.length;
|
||||
}
|
||||
return joined;
|
||||
}
|
||||
|
||||
private static byte[] createEbmlElement(
|
||||
int ebmlReadVersion, boolean docTypeIsWebm, int docTypeReadVersion) {
|
||||
return createByteArray(
|
||||
0x1A, 0x45, 0xDF, 0xA3, // EBML
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, // size=15
|
||||
0x42, 0xF7, // EBMLReadVersion
|
||||
0x81, ebmlReadVersion, // size=1
|
||||
0x42, 0x82, // DocType
|
||||
0x84, 0x77, 0x65, 0x62, docTypeIsWebm ? 0x6D : 0x42, // size=4 value=webm/B
|
||||
0x42, 0x85, // DocTypeReadVersion
|
||||
0x81, docTypeReadVersion); // size=1
|
||||
}
|
||||
|
||||
private static byte[] createSegmentElement(int size) {
|
||||
byte[] sizeBytes = getIntegerBytes(size);
|
||||
return createByteArray(
|
||||
0x18, 0x53, 0x80, 0x67, // Segment
|
||||
0x01, 0x00, 0x00, 0x00, sizeBytes[0], sizeBytes[1], sizeBytes[2], sizeBytes[3]);
|
||||
}
|
||||
|
||||
private static byte[] createInfoElement(int timecodeScale) {
|
||||
byte[] scaleBytes = getIntegerBytes(timecodeScale);
|
||||
return createByteArray(
|
||||
0x15, 0x49, 0xA9, 0x66, // Info
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, // size=19
|
||||
0x2A, 0xD7, 0xB1, // TimecodeScale
|
||||
0x84, scaleBytes[0], scaleBytes[1], scaleBytes[2], scaleBytes[3], // size=4
|
||||
0x44, 0x89, // Duration
|
||||
0x88, 0x40, 0xC3, 0x60, 0x00, 0x00, 0x00, 0x00, 0x00); // size=8 value=9920.0
|
||||
}
|
||||
|
||||
private static byte[] createTracksElementWithVideo(
|
||||
boolean codecIsVp9, int pixelWidth, int pixelHeight,
|
||||
ContentEncodingSettings contentEncodingSettings) {
|
||||
byte[] widthBytes = getIntegerBytes(pixelWidth);
|
||||
byte[] heightBytes = getIntegerBytes(pixelHeight);
|
||||
if (contentEncodingSettings != null) {
|
||||
byte[] orderBytes = getIntegerBytes(contentEncodingSettings.order);
|
||||
byte[] scopeBytes = getIntegerBytes(contentEncodingSettings.scope);
|
||||
byte[] typeBytes = getIntegerBytes(contentEncodingSettings.type);
|
||||
byte[] algorithmBytes = getIntegerBytes(contentEncodingSettings.algorithm);
|
||||
byte[] cipherModeBytes = getIntegerBytes(contentEncodingSettings.aesCipherMode);
|
||||
return createByteArray(
|
||||
0x16, 0x54, 0xAE, 0x6B, // Tracks
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, // size=72
|
||||
0xAE, // TrackEntry
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3F, // size=63
|
||||
0x86, // CodecID
|
||||
0x85, 0x56, 0x5F, 0x56, 0x50, codecIsVp9 ? 0x39 : 0x30, // size=5 value=V_VP9/0
|
||||
0x6D, 0x80, // ContentEncodings
|
||||
0xA4, // size=36
|
||||
0x62, 0x40, // ContentEncoding
|
||||
0xA1, // size=33
|
||||
0x50, 0x31, // ContentEncodingOrder
|
||||
0x81, orderBytes[3],
|
||||
0x50, 0x32, // ContentEncodingScope
|
||||
0x81, scopeBytes[3],
|
||||
0x50, 0x33, // ContentEncodingType
|
||||
0x81, typeBytes[3],
|
||||
0x50, 0x35, // ContentEncryption
|
||||
0x92, // size=18
|
||||
0x47, 0xE1, // ContentEncAlgo
|
||||
0x81, algorithmBytes[3],
|
||||
0x47, 0xE2, // ContentEncKeyID
|
||||
0x84, // size=4
|
||||
TEST_ENCRYPTION_KEY_ID[0], TEST_ENCRYPTION_KEY_ID[1],
|
||||
TEST_ENCRYPTION_KEY_ID[2], TEST_ENCRYPTION_KEY_ID[3], // value=binary
|
||||
0x47, 0xE7, // ContentEncAESSettings
|
||||
0x84, // size=4
|
||||
0x47, 0xE8, // AESSettingsCipherMode
|
||||
0x81, cipherModeBytes[3],
|
||||
0xE0, // Video
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, // size=8
|
||||
0xB0, // PixelWidth
|
||||
0x82, widthBytes[2], widthBytes[3], // size=2
|
||||
0xBA, // PixelHeight
|
||||
0x82, heightBytes[2], heightBytes[3]); // size=2
|
||||
} else {
|
||||
return createByteArray(
|
||||
0x16, 0x54, 0xAE, 0x6B, // Tracks
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, // size=36
|
||||
0xAE, // TrackEntry
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1B, // size=27
|
||||
0x86, // CodecID
|
||||
0x85, 0x56, 0x5F, 0x56, 0x50, codecIsVp9 ? 0x39 : 0x30, // size=5 value=V_VP9/0
|
||||
0xE0, // Video
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, // size=8
|
||||
0xB0, // PixelWidth
|
||||
0x82, widthBytes[2], widthBytes[3], // size=2
|
||||
0xBA, // PixelHeight
|
||||
0x82, heightBytes[2], heightBytes[3]); // size=2
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] createTracksElementWithOpusAudio(int channelCount) {
|
||||
byte[] channelCountBytes = getIntegerBytes(channelCount);
|
||||
return createByteArray(
|
||||
0x16, 0x54, 0xAE, 0x6B, // Tracks
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x39, // size=57
|
||||
0xAE, // TrackEntry
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, // size=48
|
||||
0x86, // CodecID
|
||||
0x86, 0x41, 0x5F, 0x4F, 0x50, 0x55, 0x53, // size=6 value=A_OPUS
|
||||
0x56, 0xAA, // CodecDelay
|
||||
0x83, 0x63, 0x2E, 0xA0, // size=3 value=6500000
|
||||
0x56, 0xBB, // SeekPreRoll
|
||||
0x84, 0x04, 0xC4, 0xB4, 0x00, // size=4 value=80000000
|
||||
0xE1, // Audio
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0D, // size=13
|
||||
0x9F, // Channels
|
||||
0x81, channelCountBytes[3], // size=1
|
||||
0xB5, // SamplingFrequency
|
||||
0x88, 0x40, 0xE7, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, // size=8 value=48000
|
||||
0x63, 0xA2, // CodecPrivate
|
||||
0x82, 0x00, 0x00); // size=2
|
||||
}
|
||||
|
||||
private byte[] createTracksElementWithVorbisAudio(int channelCount) {
|
||||
byte[] channelCountBytes = getIntegerBytes(channelCount);
|
||||
byte[] tracksElement = createByteArray(
|
||||
0x16, 0x54, 0xAE, 0x6B, // Tracks
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x9C, // size=4252
|
||||
0xAE, // TrackEntry
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x93, // size=4243 (36+4207)
|
||||
0x86, // CodecID
|
||||
0x88, 0x41, 0x5f, 0x56, 0x4f, 0x52, 0x42, 0x49, 0x53, // size=8 value=A_VORBIS
|
||||
0xE1, // Audio
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0D, // size=13
|
||||
0x9F, // Channels
|
||||
0x81, channelCountBytes[3], // size=1
|
||||
0xB5, // SamplingFrequency
|
||||
0x88, 0x40, 0xE7, 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, // size=8 value=48000
|
||||
0x63, 0xA2, // CodecPrivate
|
||||
0x50, 0x6F); // size=4207
|
||||
byte[] codecPrivate = new byte[4207];
|
||||
try {
|
||||
getInstrumentation().getContext().getResources().getAssets().open(TEST_VORBIS_CODEC_PRIVATE)
|
||||
.read(codecPrivate);
|
||||
} catch (IOException e) {
|
||||
fail(); // should never happen
|
||||
}
|
||||
return joinByteArrays(tracksElement, codecPrivate);
|
||||
}
|
||||
|
||||
private static byte[] createCuesElement(int size) {
|
||||
byte[] sizeBytes = getIntegerBytes(size);
|
||||
return createByteArray(
|
||||
0x1C, 0x53, 0xBB, 0x6B, // Cues
|
||||
0x01, 0x00, 0x00, 0x00, sizeBytes[0], sizeBytes[1], sizeBytes[2], sizeBytes[3]); // size=31
|
||||
}
|
||||
|
||||
private static byte[] createCuePointElement(int cueTime, int cueClusterPosition) {
|
||||
byte[] positionBytes = getIntegerBytes(cueClusterPosition);
|
||||
return createByteArray(
|
||||
0xBB, // CuePoint
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x16, // size=22
|
||||
0xB3, // CueTime
|
||||
0x81, cueTime, // size=1
|
||||
0xB7, // CueTrackPositions
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, // size=10
|
||||
0xF1, // CueClusterPosition
|
||||
0x88, 0x00, 0x00, 0x00, 0x00, positionBytes[0], positionBytes[1],
|
||||
positionBytes[2], positionBytes[3]); // size=8
|
||||
}
|
||||
|
||||
private static byte[] createClusterElement(int size, int timecode) {
|
||||
byte[] sizeBytes = getIntegerBytes(size);
|
||||
byte[] timeBytes = getIntegerBytes(timecode);
|
||||
return createByteArray(
|
||||
0x1F, 0x43, 0xB6, 0x75, // Cluster
|
||||
0x01, 0x00, 0x00, 0x00, sizeBytes[0], sizeBytes[1], sizeBytes[2], sizeBytes[3],
|
||||
0xE7, // Timecode
|
||||
0x84, timeBytes[0], timeBytes[1], timeBytes[2], timeBytes[3]); // size=4
|
||||
}
|
||||
|
||||
private static byte[] createSimpleBlockElement(
|
||||
int size, int timecode, boolean keyframe, boolean invisible, boolean noLacing,
|
||||
boolean encrypted, boolean validSignalByte) {
|
||||
byte[] sizeBytes = getIntegerBytes(size + 4 + (encrypted ? 9 : 0));
|
||||
byte[] timeBytes = getIntegerBytes(timecode);
|
||||
byte flags = (byte)
|
||||
((keyframe ? 0x80 : 0x00) | (invisible ? 0x08 : 0x00) | (noLacing ? 0x00 : 0x06));
|
||||
byte[] simpleBlock = createByteArray(
|
||||
0xA3, // SimpleBlock
|
||||
0x01, 0x00, 0x00, 0x00, sizeBytes[0], sizeBytes[1], sizeBytes[2], sizeBytes[3],
|
||||
0x81, // Track number value=1
|
||||
timeBytes[2], timeBytes[3], flags); // Timecode and flags
|
||||
if (encrypted) {
|
||||
simpleBlock = joinByteArrays(
|
||||
simpleBlock, createByteArray(validSignalByte ? 0x01 : 0x80),
|
||||
Arrays.copyOfRange(TEST_INITIALIZATION_VECTOR, 0, 8));
|
||||
}
|
||||
return simpleBlock;
|
||||
}
|
||||
|
||||
private static byte[] createBlockElement(
|
||||
int size, int timecode, boolean invisible, boolean noLacing) {
|
||||
int blockSize = size + 4;
|
||||
byte[] blockSizeBytes = getIntegerBytes(blockSize);
|
||||
byte[] timeBytes = getIntegerBytes(timecode);
|
||||
int blockElementSize = 1 + 8 + blockSize; // id + size + length of data
|
||||
byte[] sizeBytes = getIntegerBytes(blockElementSize);
|
||||
byte flags = (byte) ((invisible ? 0x08 : 0x00) | (noLacing ? 0x00 : 0x06));
|
||||
return createByteArray(
|
||||
0xA0, // BlockGroup
|
||||
0x01, 0x00, 0x00, 0x00, sizeBytes[0], sizeBytes[1], sizeBytes[2], sizeBytes[3],
|
||||
0xA1, // Block
|
||||
0x01, 0x00, 0x00, 0x00,
|
||||
blockSizeBytes[0], blockSizeBytes[1], blockSizeBytes[2], blockSizeBytes[3],
|
||||
0x81, // Track number value=1
|
||||
timeBytes[2], timeBytes[3], flags); // Timecode and flags
|
||||
}
|
||||
|
||||
private static byte[] createVideoBytes(int size) {
|
||||
byte[] videoBytes = new byte[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
videoBytes[i] = (byte) i;
|
||||
}
|
||||
return videoBytes;
|
||||
}
|
||||
|
||||
private static byte[] getIntegerBytes(int value) {
|
||||
return createByteArray(
|
||||
(value & 0xFF000000) >> 24,
|
||||
(value & 0x00FF0000) >> 16,
|
||||
(value & 0x0000FF00) >> 8,
|
||||
(value & 0x000000FF));
|
||||
}
|
||||
|
||||
private static byte[] createByteArray(int... intArray) {
|
||||
byte[] byteArray = new byte[intArray.length];
|
||||
for (int i = 0; i < byteArray.length; i++) {
|
||||
byteArray[i] = (byte) intArray[i];
|
||||
}
|
||||
return byteArray;
|
||||
}
|
||||
|
||||
/** Used by {@link #createMediaSegment} to return both cluster and video bytes together. */
|
||||
private static final class MediaSegment {
|
||||
|
||||
private final byte[] clusterBytes;
|
||||
private final byte[] videoBytes;
|
||||
|
||||
private MediaSegment(byte[] clusterBytes, byte[] videoBytes) {
|
||||
this.clusterBytes = clusterBytes;
|
||||
this.videoBytes = videoBytes;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Used by {@link #assertIndex(IndexPoint...)} to validate index elements. */
|
||||
private static final class IndexPoint {
|
||||
|
||||
private final long timeUs;
|
||||
private final int size;
|
||||
private final long durationUs;
|
||||
|
||||
private IndexPoint(long timeUs, int size, long durationUs) {
|
||||
this.timeUs = timeUs;
|
||||
this.size = size;
|
||||
this.durationUs = durationUs;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Used by {@link #createTracksElementWithVideo} to create a Track header with Encryption. */
|
||||
private static final class ContentEncodingSettings {
|
||||
|
||||
private final int order;
|
||||
private final int scope;
|
||||
private final int type;
|
||||
private final int algorithm;
|
||||
private final int aesCipherMode;
|
||||
|
||||
private ContentEncodingSettings(int order, int scope, int type, int algorithm,
|
||||
int aesCipherMode) {
|
||||
this.order = order;
|
||||
this.scope = scope;
|
||||
this.type = type;
|
||||
this.algorithm = algorithm;
|
||||
this.aesCipherMode = aesCipherMode;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -15,35 +15,312 @@
|
|||
*/
|
||||
package com.google.android.exoplayer.dash;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.chunk.Format;
|
||||
import com.google.android.exoplayer.dash.mpd.Representation;
|
||||
import com.google.android.exoplayer.dash.mpd.SegmentBase.SingleSegmentBase;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.TrackRenderer;
|
||||
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
|
||||
import com.google.android.exoplayer.chunk.Format;
|
||||
import com.google.android.exoplayer.chunk.FormatEvaluator;
|
||||
import com.google.android.exoplayer.chunk.FormatEvaluator.FixedEvaluator;
|
||||
import com.google.android.exoplayer.chunk.MediaChunk;
|
||||
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
|
||||
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
|
||||
import com.google.android.exoplayer.dash.mpd.Period;
|
||||
import com.google.android.exoplayer.dash.mpd.RangedUri;
|
||||
import com.google.android.exoplayer.dash.mpd.Representation;
|
||||
import com.google.android.exoplayer.dash.mpd.SegmentBase.MultiSegmentBase;
|
||||
import com.google.android.exoplayer.dash.mpd.SegmentBase.SegmentList;
|
||||
import com.google.android.exoplayer.dash.mpd.SegmentBase.SegmentTemplate;
|
||||
import com.google.android.exoplayer.dash.mpd.SegmentBase.SegmentTimelineElement;
|
||||
import com.google.android.exoplayer.dash.mpd.SegmentBase.SingleSegmentBase;
|
||||
import com.google.android.exoplayer.dash.mpd.UrlTemplate;
|
||||
import com.google.android.exoplayer.testutil.Util;
|
||||
import com.google.android.exoplayer.upstream.DataSource;
|
||||
import com.google.android.exoplayer.util.FakeClock;
|
||||
import com.google.android.exoplayer.util.ManifestFetcher;
|
||||
|
||||
import android.test.InstrumentationTestCase;
|
||||
|
||||
import org.mockito.Mock;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Tests {@link DashChunkSource}.
|
||||
*/
|
||||
public class DashChunkSourceTest extends TestCase {
|
||||
public class DashChunkSourceTest extends InstrumentationTestCase {
|
||||
|
||||
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
|
||||
|
||||
private static final long AVAILABILITY_START_TIME = 0;
|
||||
private static final long AVAILABILITY_LATENCY = 5000;
|
||||
private static final long AVAILABILITY_REALTIME_OFFSET = 1000;
|
||||
private static final long AVAILABILITY_CURRENT_TIME =
|
||||
AVAILABILITY_START_TIME + AVAILABILITY_LATENCY - AVAILABILITY_REALTIME_OFFSET;
|
||||
private static final FakeClock AVAILABILITY_CLOCK = new FakeClock(AVAILABILITY_CURRENT_TIME);
|
||||
|
||||
private static final int TALL_HEIGHT = 200;
|
||||
private static final int WIDE_WIDTH = 400;
|
||||
|
||||
private static final Format REGULAR_VIDEO = new Format("1", "video/mp4", 480, 240, -1, -1, 1000);
|
||||
private static final Format TALL_VIDEO = new Format("2", "video/mp4", 100, TALL_HEIGHT, -1, -1,
|
||||
1000);
|
||||
private static final Format WIDE_VIDEO = new Format("3", "video/mp4", WIDE_WIDTH, 50, -1, -1,
|
||||
1000);
|
||||
|
||||
@Mock private DataSource mockDataSource;
|
||||
@Mock private ManifestFetcher<MediaPresentationDescription> mockManifestFetcher;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
Util.setUpMockito(this);
|
||||
}
|
||||
|
||||
public void testMaxVideoDimensions() {
|
||||
DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO,
|
||||
null, null, null);
|
||||
MediaFormat out = MediaFormat.createVideoFormat("video/h264", 1, 1, 1, 1, null);
|
||||
chunkSource.getMaxVideoDimensions(out);
|
||||
|
||||
assertEquals(WIDE_WIDTH, out.getMaxVideoWidth());
|
||||
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
||||
}
|
||||
|
||||
public void testMaxVideoDimensionsLegacy() {
|
||||
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
||||
Format format1 = new Format("1", "video/mp4", 100, 200, -1, -1, 1000);
|
||||
Representation representation1 =
|
||||
Representation.newInstance(0, 0, null, 0, format1, segmentBase1);
|
||||
Representation.newInstance(0, 0, null, 0, TALL_VIDEO, segmentBase1);
|
||||
|
||||
SingleSegmentBase segmentBase2 = new SingleSegmentBase("https://example.com/2.mp4");
|
||||
Format format2 = new Format("2", "video/mp4", 400, 50, -1, -1, 1000);
|
||||
Representation representation2 =
|
||||
Representation.newInstance(0, 0, null, 0, format2, segmentBase2);
|
||||
Representation.newInstance(0, 0, null, 0, WIDE_VIDEO, segmentBase2);
|
||||
|
||||
DashChunkSource chunkSource = new DashChunkSource(null, null, representation1, representation2);
|
||||
MediaFormat out = MediaFormat.createVideoFormat("video/h264", 1, 1, 1, 1, null);
|
||||
chunkSource.getMaxVideoDimensions(out);
|
||||
|
||||
assertEquals(400, out.getMaxVideoWidth());
|
||||
assertEquals(200, out.getMaxVideoHeight());
|
||||
assertEquals(WIDE_WIDTH, out.getMaxVideoWidth());
|
||||
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
||||
}
|
||||
|
||||
public void testLiveEdgeNoLatencyWithTimeline() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(0L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge500msLatencyWithTimeline() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(500L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge1000msLatencyWithTimeline() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1000L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge1001msLatencyWithTimeline() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1001L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge2500msLatencyWithTimeline() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(2500L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdgeVeryHighLatencyWithTimeline() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(10000L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdgeNoLatencyWithTemplate() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(0L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
// this should actually return the "5th" segment, but it currently returns the "6th", which
|
||||
// doesn't actually exist yet; this will be resolved in a subsequent cl (cl/87518875).
|
||||
//assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
//assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdgeAlmostNoLatencyWithTemplate() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge500msLatencyWithTemplate() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(500L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge1000msLatencyWithTemplate() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1000L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge1001msLatencyWithTemplate() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1001L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdge2500msLatencyWithTemplate() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(2500L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
public void testLiveEdgeVeryHighLatencyWithTemplate() {
|
||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(10000L);
|
||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
||||
|
||||
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
|
||||
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
|
||||
}
|
||||
|
||||
private static MediaPresentationDescription generateMpd(boolean live,
|
||||
List<Representation> representations) {
|
||||
Representation firstRepresentation = representations.get(0);
|
||||
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
|
||||
Period period = new Period(null, firstRepresentation.periodStartMs,
|
||||
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
|
||||
long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US
|
||||
: firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
|
||||
return new MediaPresentationDescription(AVAILABILITY_START_TIME, duration, -1, live, -1, -1,
|
||||
null, Collections.singletonList(period));
|
||||
}
|
||||
|
||||
private static MediaPresentationDescription generateVodMpd() {
|
||||
List<Representation> representations = new ArrayList<Representation>();
|
||||
|
||||
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
||||
Representation representation1 =
|
||||
Representation.newInstance(0, 0, null, 0, TALL_VIDEO, segmentBase1);
|
||||
representations.add(representation1);
|
||||
|
||||
SingleSegmentBase segmentBase2 = new SingleSegmentBase("https://example.com/2.mp4");
|
||||
Representation representation2 =
|
||||
Representation.newInstance(0, 0, null, 0, WIDE_VIDEO, segmentBase2);
|
||||
representations.add(representation2);
|
||||
|
||||
return generateMpd(false, representations);
|
||||
}
|
||||
|
||||
private static MediaPresentationDescription generateLiveMpdWithTimeline() {
|
||||
List<Representation> representations = new ArrayList<Representation>();
|
||||
|
||||
List<SegmentTimelineElement> segmentTimeline = new ArrayList<SegmentTimelineElement>();
|
||||
segmentTimeline.add(new SegmentTimelineElement(0L, 1000L));
|
||||
segmentTimeline.add(new SegmentTimelineElement(1000L, 1000L));
|
||||
segmentTimeline.add(new SegmentTimelineElement(2000L, 1000L));
|
||||
segmentTimeline.add(new SegmentTimelineElement(3000L, 1000L));
|
||||
segmentTimeline.add(new SegmentTimelineElement(4000L, 1000L));
|
||||
List<RangedUri> mediaSegments = new ArrayList<RangedUri>();
|
||||
mediaSegments.add(new RangedUri("", "", 0L, 500L));
|
||||
mediaSegments.add(new RangedUri("", "", 500L, 500L));
|
||||
mediaSegments.add(new RangedUri("", "", 1000L, 500L));
|
||||
mediaSegments.add(new RangedUri("", "", 1500L, 500L));
|
||||
mediaSegments.add(new RangedUri("", "", 2000L, 500L));
|
||||
|
||||
MultiSegmentBase segmentBase = new SegmentList(null, 1000, 0,
|
||||
TrackRenderer.UNKNOWN_TIME_US, 1, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
|
||||
mediaSegments);
|
||||
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
||||
null, 0, REGULAR_VIDEO, segmentBase);
|
||||
representations.add(representation);
|
||||
|
||||
return generateMpd(true, representations);
|
||||
}
|
||||
|
||||
private static MediaPresentationDescription generateLiveMpdWithTemplate() {
|
||||
List<Representation> representations = new ArrayList<Representation>();
|
||||
|
||||
UrlTemplate initializationTemplate = null;
|
||||
UrlTemplate mediaTemplate = UrlTemplate.compile("$RepresentationID$/$Number$");
|
||||
MultiSegmentBase segmentBase = new SegmentTemplate(null, 1000, 0,
|
||||
TrackRenderer.UNKNOWN_TIME_US, 1, 1000, null,
|
||||
initializationTemplate, mediaTemplate, "http://www.youtube.com");
|
||||
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
||||
null, 0, REGULAR_VIDEO, segmentBase);
|
||||
representations.add(representation);
|
||||
|
||||
return generateMpd(true, representations);
|
||||
}
|
||||
|
||||
private DashChunkSource setupLiveEdgeTimelineTest(long liveEdgeLatencyMs) {
|
||||
MediaPresentationDescription manifest = generateLiveMpdWithTimeline();
|
||||
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
||||
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
|
||||
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
|
||||
AVAILABILITY_REALTIME_OFFSET * 1000);
|
||||
}
|
||||
|
||||
private DashChunkSource setupLiveEdgeTemplateTest(long liveEdgeLatencyMs) {
|
||||
MediaPresentationDescription manifest = generateLiveMpdWithTemplate();
|
||||
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
||||
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
|
||||
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
|
||||
AVAILABILITY_REALTIME_OFFSET * 1000);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -60,43 +60,33 @@ public class HlsMasterPlaylistParserTest extends TestCase {
|
|||
assertNotNull(variants);
|
||||
assertEquals(5, variants.size());
|
||||
|
||||
assertEquals(0, variants.get(0).index);
|
||||
assertEquals(1280000, variants.get(0).bandwidth);
|
||||
assertEquals(1280000, variants.get(0).bitrate);
|
||||
assertNotNull(variants.get(0).codecs);
|
||||
assertEquals(2, variants.get(0).codecs.length);
|
||||
assertEquals("mp4a.40.2", variants.get(0).codecs[0]);
|
||||
assertEquals("avc1.66.30", variants.get(0).codecs[1]);
|
||||
assertEquals("mp4a.40.2,avc1.66.30", variants.get(0).codecs);
|
||||
assertEquals(304, variants.get(0).width);
|
||||
assertEquals(128, variants.get(0).height);
|
||||
assertEquals("http://example.com/low.m3u8", variants.get(0).url);
|
||||
|
||||
assertEquals(1, variants.get(1).index);
|
||||
assertEquals(1280000, variants.get(1).bandwidth);
|
||||
assertEquals(1280000, variants.get(1).bitrate);
|
||||
assertNotNull(variants.get(1).codecs);
|
||||
assertEquals(2, variants.get(1).codecs.length);
|
||||
assertEquals("mp4a.40.2", variants.get(1).codecs[0]);
|
||||
assertEquals("avc1.66.30", variants.get(1).codecs[1]);
|
||||
assertEquals("mp4a.40.2 , avc1.66.30 ", variants.get(1).codecs);
|
||||
assertEquals("http://example.com/spaces_in_codecs.m3u8", variants.get(1).url);
|
||||
|
||||
assertEquals(2, variants.get(2).index);
|
||||
assertEquals(2560000, variants.get(2).bandwidth);
|
||||
assertEquals(2560000, variants.get(2).bitrate);
|
||||
assertEquals(null, variants.get(2).codecs);
|
||||
assertEquals(384, variants.get(2).width);
|
||||
assertEquals(160, variants.get(2).height);
|
||||
assertEquals("http://example.com/mid.m3u8", variants.get(2).url);
|
||||
|
||||
assertEquals(3, variants.get(3).index);
|
||||
assertEquals(7680000, variants.get(3).bandwidth);
|
||||
assertEquals(7680000, variants.get(3).bitrate);
|
||||
assertEquals(null, variants.get(3).codecs);
|
||||
assertEquals(-1, variants.get(3).width);
|
||||
assertEquals(-1, variants.get(3).height);
|
||||
assertEquals("http://example.com/hi.m3u8", variants.get(3).url);
|
||||
|
||||
assertEquals(4, variants.get(4).index);
|
||||
assertEquals(65000, variants.get(4).bandwidth);
|
||||
assertEquals(65000, variants.get(4).bitrate);
|
||||
assertNotNull(variants.get(4).codecs);
|
||||
assertEquals(1, variants.get(4).codecs.length);
|
||||
assertEquals("mp4a.40.5", variants.get(4).codecs[0]);
|
||||
assertEquals("mp4a.40.5", variants.get(4).codecs);
|
||||
assertEquals(-1, variants.get(4).width);
|
||||
assertEquals(-1, variants.get(4).height);
|
||||
assertEquals("http://example.com/audio-only.m3u8", variants.get(4).url);
|
||||
|
|
|
|||
|
|
@ -1,149 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyInt;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.google.android.exoplayer.SampleSource;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.MockitoAnnotations;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Tests for {@link BufferedNonBlockingInputStream}.
|
||||
*/
|
||||
public class BufferedNonBlockingInputStreamTest extends TestCase {
|
||||
|
||||
private static final int BUFFER_SIZE_BYTES = 16;
|
||||
|
||||
@Mock private NonBlockingInputStream mockInputStream;
|
||||
private BufferedNonBlockingInputStream bufferedInputStream;
|
||||
|
||||
@Override
|
||||
public void setUp() {
|
||||
MockitoAnnotations.initMocks(this);
|
||||
|
||||
bufferedInputStream = new BufferedNonBlockingInputStream(mockInputStream, BUFFER_SIZE_BYTES);
|
||||
}
|
||||
|
||||
public void testSkipClipsCountToBufferSizeWhenMarkSet() {
|
||||
// When marking and skipping more than the buffer size
|
||||
bufferedInputStream.mark();
|
||||
bufferedInputStream.skip(BUFFER_SIZE_BYTES + 1);
|
||||
|
||||
// Then BUFFER_SIZE_BYTES are read.
|
||||
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
|
||||
}
|
||||
|
||||
public void testSkipResetSkipUsesBufferedData() {
|
||||
// Given a buffered input stream that has already read BUFFER_SIZE_BYTES
|
||||
stubInputStreamForReadingBytes();
|
||||
bufferedInputStream.mark();
|
||||
bufferedInputStream.skip(BUFFER_SIZE_BYTES);
|
||||
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
|
||||
|
||||
// When resetting and reading the same amount, no extra data are read.
|
||||
bufferedInputStream.returnToMark();
|
||||
bufferedInputStream.skip(BUFFER_SIZE_BYTES);
|
||||
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
|
||||
}
|
||||
|
||||
public void testReturnsEndOfStreamAfterBufferedData() {
|
||||
// Given a buffered input stream that has read 1 byte (to end-of-stream) and has been reset
|
||||
stubInputStreamForReadingBytes();
|
||||
bufferedInputStream.mark();
|
||||
bufferedInputStream.skip(1);
|
||||
stubInputStreamForReadingEndOfStream();
|
||||
bufferedInputStream.returnToMark();
|
||||
|
||||
// When skipping, first 1 byte is returned, then end-of-stream.
|
||||
assertEquals(1, bufferedInputStream.skip(1));
|
||||
assertEquals(SampleSource.END_OF_STREAM, bufferedInputStream.skip(1));
|
||||
}
|
||||
|
||||
public void testReadAtOffset() {
|
||||
// Given a mock input stream that provide non-zero data
|
||||
stubInputStreamForReadingBytes();
|
||||
|
||||
// When reading a byte at offset 1
|
||||
byte[] bytes = new byte[2];
|
||||
bufferedInputStream.mark();
|
||||
bufferedInputStream.read(bytes, 1, 1);
|
||||
|
||||
// Then only the second byte is set.
|
||||
assertTrue(Arrays.equals(new byte[] {(byte) 0, (byte) 0xFF}, bytes));
|
||||
}
|
||||
|
||||
public void testSkipAfterMark() {
|
||||
// Given a mock input stream that provides non-zero data, with three bytes read
|
||||
stubInputStreamForReadingBytes();
|
||||
bufferedInputStream.skip(1);
|
||||
bufferedInputStream.mark();
|
||||
bufferedInputStream.skip(2);
|
||||
bufferedInputStream.returnToMark();
|
||||
|
||||
// Then it is possible to skip one byte after the mark and read two bytes.
|
||||
assertEquals(1, bufferedInputStream.skip(1));
|
||||
assertEquals(2, bufferedInputStream.read(new byte[2], 0, 2));
|
||||
verify(mockInputStream).read((byte[]) any(), eq(0), eq(1));
|
||||
verify(mockInputStream).read((byte[]) any(), eq(0), eq(2));
|
||||
verify(mockInputStream).read((byte[]) any(), eq(2), eq(1));
|
||||
}
|
||||
|
||||
/** Stubs the input stream to read 0xFF for all requests. */
|
||||
private void stubInputStreamForReadingBytes() {
|
||||
when(mockInputStream.read((byte[]) any(), anyInt(), anyInt())).thenAnswer(
|
||||
new Answer<Integer>() {
|
||||
|
||||
@Override
|
||||
public Integer answer(InvocationOnMock invocation) throws Throwable {
|
||||
byte[] bytes = (byte[]) invocation.getArguments()[0];
|
||||
int offset = (int) invocation.getArguments()[1];
|
||||
int length = (int) invocation.getArguments()[2];
|
||||
for (int i = 0; i < length; i++) {
|
||||
bytes[i + offset] = (byte) 0xFF;
|
||||
}
|
||||
return length;
|
||||
}
|
||||
|
||||
});
|
||||
when(mockInputStream.skip(anyInt())).thenAnswer(new Answer<Integer>() {
|
||||
|
||||
@Override
|
||||
public Integer answer(InvocationOnMock invocation) throws Throwable {
|
||||
return (int) invocation.getArguments()[0];
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
/** Stubs the input stream to read end-of-stream for all requests. */
|
||||
private void stubInputStreamForReadingEndOfStream() {
|
||||
when(mockInputStream.read((byte[]) any(), anyInt(), anyInt()))
|
||||
.thenReturn(SampleSource.END_OF_STREAM);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.upstream;
|
||||
|
||||
import com.google.android.exoplayer.testutil.Util;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link DataSourceStream}.
|
||||
*/
|
||||
public class DataSourceStreamTest extends TestCase {
|
||||
|
||||
private static final int DATA_LENGTH = 1024;
|
||||
private static final int BUFFER_LENGTH = 128;
|
||||
|
||||
public void testGetLoadedData() throws IOException, InterruptedException {
|
||||
byte[] testData = Util.buildTestData(DATA_LENGTH);
|
||||
DataSource dataSource = new ByteArrayDataSource(testData);
|
||||
DataSpec dataSpec = new DataSpec(null, 0, DATA_LENGTH, null);
|
||||
DataSourceStream dataSourceStream = new DataSourceStream(dataSource, dataSpec,
|
||||
new BufferPool(BUFFER_LENGTH));
|
||||
|
||||
dataSourceStream.load();
|
||||
// Assert that the read and load positions are correct.
|
||||
assertEquals(0, dataSourceStream.getReadPosition());
|
||||
assertEquals(testData.length, dataSourceStream.getLoadPosition());
|
||||
|
||||
int halfTestDataLength = testData.length / 2;
|
||||
byte[] readData = new byte[testData.length];
|
||||
int bytesRead = dataSourceStream.read(readData, 0, halfTestDataLength);
|
||||
// Assert that the read position is updated correctly.
|
||||
assertEquals(halfTestDataLength, bytesRead);
|
||||
assertEquals(halfTestDataLength, dataSourceStream.getReadPosition());
|
||||
|
||||
bytesRead += dataSourceStream.read(readData, bytesRead, testData.length - bytesRead);
|
||||
// Assert that the read position was updated correctly.
|
||||
assertEquals(testData.length, bytesRead);
|
||||
assertEquals(testData.length, dataSourceStream.getReadPosition());
|
||||
// Assert that the data read using the two read calls either side of getLoadedData is correct.
|
||||
assertTrue(Arrays.equals(testData, readData));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright (C) 2014 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.google.android.exoplayer.util;
|
||||
|
||||
/**
|
||||
* A {@link Clock} that returns a fixed value specified in the constructor.
|
||||
*/
|
||||
public class FakeClock implements Clock {
|
||||
|
||||
private final long timeMs;
|
||||
|
||||
public FakeClock(long timeMs) {
|
||||
this.timeMs = timeMs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long elapsedRealtime() {
|
||||
return timeMs;
|
||||
}
|
||||
|
||||
}
|
||||
Loading…
Reference in a new issue