mirror of
https://github.com/samsonjs/media.git
synced 2026-04-27 15:07:40 +00:00
Add MediaFormat on video frame metadata listener
This is useful for apps that want to access HDR metadata that MediaCodec puts in its output format. PiperOrigin-RevId: 274169985
This commit is contained in:
parent
4ae79105de
commit
1c66010b4a
5 changed files with 27 additions and 8 deletions
|
|
@ -113,6 +113,7 @@
|
||||||
* Add support for ID3-in-EMSG in HLS streams
|
* Add support for ID3-in-EMSG in HLS streams
|
||||||
([spec](https://aomediacodec.github.io/av1-id3/)).
|
([spec](https://aomediacodec.github.io/av1-id3/)).
|
||||||
* Make show and hide player controls accessible for TalkBack in `PlayerView`.
|
* Make show and hide player controls accessible for TalkBack in `PlayerView`.
|
||||||
|
* Pass the codec output `MediaFormat` to `VideoFrameMetadataListener`.
|
||||||
|
|
||||||
### 2.10.5 (2019-09-20) ###
|
### 2.10.5 (2019-09-20) ###
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -238,7 +238,7 @@ public class LibvpxVideoRenderer extends SimpleDecoderVideoRenderer {
|
||||||
throws VideoDecoderException {
|
throws VideoDecoderException {
|
||||||
if (frameMetadataListener != null) {
|
if (frameMetadataListener != null) {
|
||||||
frameMetadataListener.onVideoFrameAboutToBeRendered(
|
frameMetadataListener.onVideoFrameAboutToBeRendered(
|
||||||
presentationTimeUs, System.nanoTime(), outputFormat);
|
presentationTimeUs, System.nanoTime(), outputFormat, /* mediaFormat= */ null);
|
||||||
}
|
}
|
||||||
super.renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat);
|
super.renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -142,6 +142,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||||
|
|
||||||
private int pendingRotationDegrees;
|
private int pendingRotationDegrees;
|
||||||
private float pendingPixelWidthHeightRatio;
|
private float pendingPixelWidthHeightRatio;
|
||||||
|
@Nullable private MediaFormat currentMediaFormat;
|
||||||
private int currentWidth;
|
private int currentWidth;
|
||||||
private int currentHeight;
|
private int currentHeight;
|
||||||
private int currentUnappliedRotationDegrees;
|
private int currentUnappliedRotationDegrees;
|
||||||
|
|
@ -502,6 +503,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||||
lastInputTimeUs = C.TIME_UNSET;
|
lastInputTimeUs = C.TIME_UNSET;
|
||||||
outputStreamOffsetUs = C.TIME_UNSET;
|
outputStreamOffsetUs = C.TIME_UNSET;
|
||||||
pendingOutputStreamOffsetCount = 0;
|
pendingOutputStreamOffsetCount = 0;
|
||||||
|
currentMediaFormat = null;
|
||||||
clearReportedVideoSize();
|
clearReportedVideoSize();
|
||||||
clearRenderedFirstFrame();
|
clearRenderedFirstFrame();
|
||||||
frameReleaseTimeHelper.disable();
|
frameReleaseTimeHelper.disable();
|
||||||
|
|
@ -720,6 +722,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) {
|
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) {
|
||||||
|
currentMediaFormat = outputFormat;
|
||||||
boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT)
|
boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT)
|
||||||
&& outputFormat.containsKey(KEY_CROP_LEFT) && outputFormat.containsKey(KEY_CROP_BOTTOM)
|
&& outputFormat.containsKey(KEY_CROP_LEFT) && outputFormat.containsKey(KEY_CROP_BOTTOM)
|
||||||
&& outputFormat.containsKey(KEY_CROP_TOP);
|
&& outputFormat.containsKey(KEY_CROP_TOP);
|
||||||
|
|
@ -810,7 +813,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||||
|| (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs)));
|
|| (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs)));
|
||||||
if (forceRenderOutputBuffer) {
|
if (forceRenderOutputBuffer) {
|
||||||
long releaseTimeNs = System.nanoTime();
|
long releaseTimeNs = System.nanoTime();
|
||||||
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format);
|
notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format, currentMediaFormat);
|
||||||
if (Util.SDK_INT >= 21) {
|
if (Util.SDK_INT >= 21) {
|
||||||
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, releaseTimeNs);
|
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, releaseTimeNs);
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -854,7 +857,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||||
if (Util.SDK_INT >= 21) {
|
if (Util.SDK_INT >= 21) {
|
||||||
// Let the underlying framework time the release.
|
// Let the underlying framework time the release.
|
||||||
if (earlyUs < 50000) {
|
if (earlyUs < 50000) {
|
||||||
notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format);
|
notifyFrameMetadataListener(
|
||||||
|
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
|
||||||
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs);
|
renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
@ -872,7 +876,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format);
|
notifyFrameMetadataListener(
|
||||||
|
presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
|
||||||
renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
|
renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
@ -905,10 +910,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void notifyFrameMetadataListener(
|
private void notifyFrameMetadataListener(
|
||||||
long presentationTimeUs, long releaseTimeNs, Format format) {
|
long presentationTimeUs, long releaseTimeNs, Format format, MediaFormat mediaFormat) {
|
||||||
if (frameMetadataListener != null) {
|
if (frameMetadataListener != null) {
|
||||||
frameMetadataListener.onVideoFrameAboutToBeRendered(
|
frameMetadataListener.onVideoFrameAboutToBeRendered(
|
||||||
presentationTimeUs, releaseTimeNs, format);
|
presentationTimeUs, releaseTimeNs, format, mediaFormat);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,8 @@
|
||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer2.video;
|
package com.google.android.exoplayer2.video;
|
||||||
|
|
||||||
|
import android.media.MediaFormat;
|
||||||
|
import androidx.annotation.Nullable;
|
||||||
import com.google.android.exoplayer2.Format;
|
import com.google.android.exoplayer2.Format;
|
||||||
|
|
||||||
/** A listener for metadata corresponding to video frame being rendered. */
|
/** A listener for metadata corresponding to video frame being rendered. */
|
||||||
|
|
@ -26,6 +28,13 @@ public interface VideoFrameMetadataListener {
|
||||||
* @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds.
|
* @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds.
|
||||||
* If the platform API version of the device is less than 21, then this is the best effort.
|
* If the platform API version of the device is less than 21, then this is the best effort.
|
||||||
* @param format The format associated with the frame.
|
* @param format The format associated with the frame.
|
||||||
|
* @param mediaFormat The framework media format associated with the frame, or {@code null} if not
|
||||||
|
* known or not applicable (e.g., because the frame was not output by a {@link
|
||||||
|
* android.media.MediaCodec MediaCodec}).
|
||||||
*/
|
*/
|
||||||
void onVideoFrameAboutToBeRendered(long presentationTimeUs, long releaseTimeNs, Format format);
|
void onVideoFrameAboutToBeRendered(
|
||||||
|
long presentationTimeUs,
|
||||||
|
long releaseTimeNs,
|
||||||
|
Format format,
|
||||||
|
@Nullable MediaFormat mediaFormat);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@ package com.google.android.exoplayer2.ui.spherical;
|
||||||
import static com.google.android.exoplayer2.util.GlUtil.checkGlError;
|
import static com.google.android.exoplayer2.util.GlUtil.checkGlError;
|
||||||
|
|
||||||
import android.graphics.SurfaceTexture;
|
import android.graphics.SurfaceTexture;
|
||||||
|
import android.media.MediaFormat;
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
import android.opengl.Matrix;
|
import android.opengl.Matrix;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
|
|
@ -142,7 +143,10 @@ public final class SceneRenderer implements VideoFrameMetadataListener, CameraMo
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onVideoFrameAboutToBeRendered(
|
public void onVideoFrameAboutToBeRendered(
|
||||||
long presentationTimeUs, long releaseTimeNs, Format format) {
|
long presentationTimeUs,
|
||||||
|
long releaseTimeNs,
|
||||||
|
Format format,
|
||||||
|
@Nullable MediaFormat mediaFormat) {
|
||||||
sampleTimestampQueue.add(releaseTimeNs, presentationTimeUs);
|
sampleTimestampQueue.add(releaseTimeNs, presentationTimeUs);
|
||||||
setProjection(format.projectionData, format.stereoMode, releaseTimeNs);
|
setProjection(format.projectionData, format.stereoMode, releaseTimeNs);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue