diff --git a/.gitignore b/.gitignore index cb4cfaada1c..53de1451b32 100644 --- a/.gitignore +++ b/.gitignore @@ -69,6 +69,8 @@ extensions/flac/src/main/jni/flac # FFmpeg extension extensions/ffmpeg/src/main/jni/ffmpeg +extensions/ffmpeg/.cxx +extensions/ffmpeg/src/main/jni/include # Cronet extension extensions/cronet/jniLibs/* diff --git a/extensions/ffmpeg/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoDecoder.java b/extensions/ffmpeg/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoDecoder.java new file mode 100644 index 00000000000..9eea7c62511 --- /dev/null +++ b/extensions/ffmpeg/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoDecoder.java @@ -0,0 +1,246 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.android.exoplayer2.ext.ffmpeg; + +import android.util.Log; +import android.view.Surface; +import androidx.annotation.Nullable; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; +import com.google.android.exoplayer2.decoder.SimpleDecoder; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.VideoDecoderInputBuffer; +import com.google.android.exoplayer2.video.VideoDecoderOutputBuffer; +import java.nio.ByteBuffer; +import java.util.List; + +/** + * Ffmpeg Video decoder. + */ +/* package */ final class FfmpegVideoDecoder + extends + SimpleDecoder { + + private static final String TAG = "FfmpegVideoDecoder"; + + // LINT.IfChange + private static final int VIDEO_DECODER_SUCCESS = 0; + private static final int VIDEO_DECODER_ERROR_INVALID_DATA = -1; + private static final int VIDEO_DECODER_ERROR_OTHER = -2; + private static final int VIDEO_DECODER_ERROR_READ_FRAME = -3; + // LINT.ThenChange(../../../../../../../jni/ffmpeg_jni.cc) + + private final String codecName; + private long nativeContext; + @Nullable private final byte[] extraData; + private Format format; + + @C.VideoOutputMode private volatile int outputMode; + + /** + * Creates a Ffmpeg video Decoder. + * + * @param numInputBuffers Number of input buffers. + * @param numOutputBuffers Number of output buffers. + * @param initialInputBufferSize The initial size of each input buffer, in bytes. + * @param threads Number of threads libgav1 will use to decode. + * @throws FfmpegDecoderException Thrown if an exception occurs when initializing the + * decoder. + */ + public FfmpegVideoDecoder( + int numInputBuffers, int numOutputBuffers, int initialInputBufferSize, int threads, Format format) + throws FfmpegDecoderException { + super( + new VideoDecoderInputBuffer[numInputBuffers], + new VideoDecoderOutputBuffer[numOutputBuffers]); + if (!FfmpegLibrary.isAvailable()) { + throw new FfmpegDecoderException("Failed to load decoder native library."); + } + codecName = Assertions.checkNotNull(FfmpegLibrary.getCodecName(format.sampleMimeType)); + extraData = getExtraData(format.sampleMimeType, format.initializationData); + this.format = format; + nativeContext = ffmpegInitialize(codecName, extraData, threads); + if (nativeContext == 0) { + throw new FfmpegDecoderException("Failed to initialize decoder."); + } + setInitialInputBufferSize(initialInputBufferSize); + } + + /** + * Returns FFmpeg-compatible codec-specific initialization data ("extra data"), or {@code null} if + * not required. + */ + @Nullable + private static byte[] getExtraData(String mimeType, List initializationData) { + switch (mimeType) { + case MimeTypes.VIDEO_H264: + byte[] sps = initializationData.get(0); + byte[] pps = initializationData.get(1); + byte[] extraData = new byte[sps.length + pps.length]; + System.arraycopy(sps, 0, extraData, 0, sps.length); + System.arraycopy(pps, 0, extraData, sps.length, pps.length); + return extraData; + case MimeTypes.VIDEO_H265: + return initializationData.get(0); + default: + // Other codecs do not require extra data. + return null; + } + } + + @Override + public String getName() { + return "ffmpeg" + FfmpegLibrary.getVersion() + "-" + codecName; + } + + /** + * Sets the output mode for frames rendered by the decoder. + * + * @param outputMode The output mode. + */ + public void setOutputMode(@C.VideoOutputMode int outputMode) { + this.outputMode = outputMode; + } + + @Override + protected VideoDecoderInputBuffer createInputBuffer() { + return new VideoDecoderInputBuffer(); + } + + @Override + protected VideoDecoderOutputBuffer createOutputBuffer() { + return new VideoDecoderOutputBuffer(this::releaseOutputBuffer); + } + + @Override + @Nullable + protected FfmpegDecoderException decode( + VideoDecoderInputBuffer inputBuffer, VideoDecoderOutputBuffer outputBuffer, boolean reset) { + if (reset) { + nativeContext = ffmpegReset(nativeContext); + if (nativeContext == 0) { + return new FfmpegDecoderException("Error resetting (see logcat)."); + } + } + + // send packet + ByteBuffer inputData = Util.castNonNull(inputBuffer.data); + int inputSize = inputData.limit(); + // enqueue origin data + int sendPacketResult = ffmpegSendPacket(nativeContext, inputData, inputSize, + inputBuffer.timeUs); + if (sendPacketResult == VIDEO_DECODER_ERROR_INVALID_DATA) { + outputBuffer.setFlags(C.BUFFER_FLAG_DECODE_ONLY); + return null; + } else if (sendPacketResult == VIDEO_DECODER_ERROR_READ_FRAME) { + // need read frame + Log.d(TAG, "VIDEO_DECODER_ERROR_READ_FRAME: " + "timeUs=" + inputBuffer.timeUs); + } else if (sendPacketResult == VIDEO_DECODER_ERROR_OTHER) { + return new FfmpegDecoderException("ffmpegDecode error: (see logcat)"); + } + + // receive frame + boolean decodeOnly = inputBuffer.isDecodeOnly(); + // We need to dequeue the decoded frame from the decoder even when the input data is + // decode-only. + int getFrameResult = ffmpegReceiveFrame(nativeContext, outputMode, outputBuffer, decodeOnly); + if (getFrameResult == VIDEO_DECODER_ERROR_OTHER) { + return new FfmpegDecoderException("ffmpegDecode error: (see logcat)"); + } + + if (getFrameResult == VIDEO_DECODER_ERROR_INVALID_DATA) { + outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY); + } + + if (!decodeOnly) { + outputBuffer.colorInfo = inputBuffer.colorInfo; + } + + return null; + } + + @Override + protected FfmpegDecoderException createUnexpectedDecodeException(Throwable error) { + return new FfmpegDecoderException("Unexpected decode error", error); + } + + @Override + public void release() { + super.release(); + ffmpegRelease(nativeContext); + nativeContext = 0; + } + + /** + * Renders output buffer to the given surface. Must only be called when in {@link + * C#VIDEO_OUTPUT_MODE_SURFACE_YUV} mode. + * + * @param outputBuffer Output buffer. + * @param surface Output surface. + * @throws FfmpegDecoderException Thrown if called with invalid output mode or frame + * rendering fails. + */ + public void renderToSurface(VideoDecoderOutputBuffer outputBuffer, Surface surface) + throws FfmpegDecoderException { + if (outputBuffer.mode != C.VIDEO_OUTPUT_MODE_SURFACE_YUV) { + throw new FfmpegDecoderException("Invalid output mode."); + } + if (ffmpegRenderFrame( + nativeContext, surface, + outputBuffer, outputBuffer.width, outputBuffer.height) == VIDEO_DECODER_ERROR_OTHER) { + throw new FfmpegDecoderException( + "Buffer render error: "); + } + } + + private native long ffmpegInitialize(String codecName, @Nullable byte[] extraData, int threads); + + private native long ffmpegReset(long context); + + private native void ffmpegRelease(long context); + + private native int ffmpegRenderFrame( + long context, Surface surface, VideoDecoderOutputBuffer outputBuffer, + int displayedWidth, + int displayedHeight); + + /** + * Decodes the encoded data passed. + * + * @param context Decoder context. + * @param encodedData Encoded data. + * @param length Length of the data buffer. + * @return {@link #VIDEO_DECODER_SUCCESS} if successful, {@link #VIDEO_DECODER_ERROR_OTHER} if an + * error occurred. + */ + private native int ffmpegSendPacket(long context, ByteBuffer encodedData, int length, + long inputTime); + + /** + * Gets the decoded frame. + * + * @param context Decoder context. + * @param outputBuffer Output buffer for the decoded frame. + * @return {@link #VIDEO_DECODER_SUCCESS} if successful, {@link #VIDEO_DECODER_ERROR_INVALID_DATA} + * if successful but the frame is decode-only, {@link #VIDEO_DECODER_ERROR_OTHER} if an error + * occurred. + */ + private native int ffmpegReceiveFrame( + long context, int outputMode, VideoDecoderOutputBuffer outputBuffer, boolean decodeOnly); + +} diff --git a/extensions/ffmpeg/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoRenderer.java b/extensions/ffmpeg/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoRenderer.java index d2f2fce639c..a72f458936a 100644 --- a/extensions/ffmpeg/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoRenderer.java +++ b/extensions/ffmpeg/src/main/java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoRenderer.java @@ -15,6 +15,8 @@ */ package com.google.android.exoplayer2.ext.ffmpeg; +import static java.lang.Runtime.getRuntime; + import android.os.Handler; import android.view.Surface; import androidx.annotation.Nullable; @@ -23,6 +25,8 @@ import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.decoder.Decoder; import com.google.android.exoplayer2.drm.ExoMediaCrypto; +import com.google.android.exoplayer2.util.Assertions; +import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.TraceUtil; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.DecoderVideoRenderer; @@ -40,6 +44,24 @@ public final class FfmpegVideoRenderer extends DecoderVideoRenderer { private static final String TAG = "FfmpegVideoRenderer"; + private static final int DEFAULT_NUM_OF_INPUT_BUFFERS = 4; + private static final int DEFAULT_NUM_OF_OUTPUT_BUFFERS = 4; + /* Default size based on 720p resolution video compressed by a factor of two. */ + private static final int DEFAULT_INPUT_BUFFER_SIZE = + Util.ceilDivide(1280, 64) * Util.ceilDivide(720, 64) * (64 * 64 * 3 / 2) / 2; + + /** The number of input buffers. */ + private final int numInputBuffers; + /** + * The number of output buffers. The renderer may limit the minimum possible value due to + * requiring multiple output buffers to be dequeued at a time for it to make progress. + */ + private final int numOutputBuffers; + + private final int threads; + + @Nullable private FfmpegVideoDecoder decoder; + /** * Creates a new instance. * @@ -56,8 +78,42 @@ public FfmpegVideoRenderer( @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) { + this( + allowedJoiningTimeMs, + eventHandler, + eventListener, + maxDroppedFramesToNotify, + /* threads= */ getRuntime().availableProcessors(), + DEFAULT_NUM_OF_INPUT_BUFFERS, + DEFAULT_NUM_OF_OUTPUT_BUFFERS); + } + + /** + * Creates a new instance. + * + * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer + * can attempt to seamlessly join an ongoing playback. + * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be + * null if delivery of events is not required. + * @param eventListener A listener of events. May be null if delivery of events is not required. + * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between + * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. + * @param threads Number of threads libgav1 will use to decode. + * @param numInputBuffers Number of input buffers. + * @param numOutputBuffers Number of output buffers. + */ + public FfmpegVideoRenderer( + long allowedJoiningTimeMs, + @Nullable Handler eventHandler, + @Nullable VideoRendererEventListener eventListener, + int maxDroppedFramesToNotify, + int threads, + int numInputBuffers, + int numOutputBuffers) { super(allowedJoiningTimeMs, eventHandler, eventListener, maxDroppedFramesToNotify); - // TODO: Implement. + this.threads = threads; + this.numInputBuffers = numInputBuffers; + this.numOutputBuffers = numOutputBuffers; } @Override @@ -68,9 +124,6 @@ public String getName() { @Override @RendererCapabilities.Capabilities public final int supportsFormat(Format format) { - // TODO: Remove this line and uncomment the implementation below. - return FORMAT_UNSUPPORTED_TYPE; - /* String mimeType = Assertions.checkNotNull(format.sampleMimeType); if (!FfmpegLibrary.isAvailable() || !MimeTypes.isVideo(mimeType)) { return FORMAT_UNSUPPORTED_TYPE; @@ -84,35 +137,38 @@ public final int supportsFormat(Format format) { ADAPTIVE_SEAMLESS, TUNNELING_NOT_SUPPORTED); } - */ } - @SuppressWarnings("return.type.incompatible") @Override protected Decoder createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto) throws FfmpegDecoderException { TraceUtil.beginSection("createFfmpegVideoDecoder"); - // TODO: Implement, remove the SuppressWarnings annotation, and update the return type to use - // the concrete type of the decoder (probably FfmepgVideoDecoder). + int initialInputBufferSize = + format.maxInputSize != Format.NO_VALUE ? format.maxInputSize : DEFAULT_INPUT_BUFFER_SIZE; + FfmpegVideoDecoder decoder = + new FfmpegVideoDecoder(numInputBuffers, numOutputBuffers, initialInputBufferSize, threads, format); + this.decoder = decoder; TraceUtil.endSection(); - return null; + return decoder; } @Override protected void renderOutputBufferToSurface(VideoDecoderOutputBuffer outputBuffer, Surface surface) throws FfmpegDecoderException { - // TODO: Implement. + if (decoder == null) { + throw new FfmpegDecoderException( + "Failed to render output buffer to surface: decoder is not initialized."); + } + decoder.renderToSurface(outputBuffer, surface); + outputBuffer.release(); } @Override protected void setDecoderOutputMode(@C.VideoOutputMode int outputMode) { - // TODO: Uncomment the implementation below. - /* if (decoder != null) { decoder.setOutputMode(outputMode); } - */ } @Override diff --git a/extensions/ffmpeg/src/main/jni/ffmpeg_jni.cc b/extensions/ffmpeg/src/main/jni/ffmpeg_jni.cc index 7738e5c2d52..fe5e4126cf2 100644 --- a/extensions/ffmpeg/src/main/jni/ffmpeg_jni.cc +++ b/extensions/ffmpeg/src/main/jni/ffmpeg_jni.cc @@ -16,6 +16,10 @@ #include #include #include +#include +#include +#include +#include extern "C" { #ifdef __cplusplus @@ -56,6 +60,16 @@ extern "C" { Java_com_google_android_exoplayer2_ext_ffmpeg_FfmpegAudioDecoder_##NAME( \ JNIEnv *env, jobject thiz, ##__VA_ARGS__) +#define VIDEO_DECODER_FUNC(RETURN_TYPE, NAME, ...) \ + extern "C" { \ + JNIEXPORT RETURN_TYPE \ + Java_com_google_android_exoplayer2_ext_ffmpeg_FfmpegVideoDecoder_##NAME( \ + JNIEnv *env, jobject thiz, ##__VA_ARGS__); \ + } \ + JNIEXPORT RETURN_TYPE \ + Java_com_google_android_exoplayer2_ext_ffmpeg_FfmpegVideoDecoder_##NAME( \ + JNIEnv *env, jobject thiz, ##__VA_ARGS__) + #define ERROR_STRING_BUFFER_LENGTH 256 // Output format corresponding to AudioFormat.ENCODING_PCM_16BIT. @@ -68,6 +82,13 @@ static const int AUDIO_DECODER_ERROR_INVALID_DATA = -1; static const int AUDIO_DECODER_ERROR_OTHER = -2; // LINT.ThenChange(../java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegAudioDecoder.java) +// LINT.IfChange +static const int VIDEO_DECODER_SUCCESS = 0; +static const int VIDEO_DECODER_ERROR_INVALID_DATA = -1; +static const int VIDEO_DECODER_ERROR_OTHER = -2; +static const int VIDEO_DECODER_ERROR_READ_FRAME = -3; +// LINT.ThenChange(../java/com/google/android/exoplayer2/ext/ffmpeg/FfmpegVideoDecoder.java) + /** * Returns the AVCodec with the specified name, or NULL if it is not available. */ @@ -365,3 +386,342 @@ void releaseContext(AVCodecContext *context) { avcodec_free_context(&context); } + +// video + +// YUV plane indices. +const int kPlaneY = 0; +const int kPlaneU = 1; +const int kPlaneV = 2; +const int kMaxPlanes = 3; + +// Android YUV format. See: +// https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12. +const int kImageFormatYV12 = 0x32315659; + +struct JniContext { + ~JniContext() { + if (native_window) { + ANativeWindow_release(native_window); + } + } + + bool MaybeAcquireNativeWindow(JNIEnv *env, jobject new_surface) { + if (surface == new_surface) { + return true; + } + if (native_window) { + ANativeWindow_release(native_window); + } + native_window_width = 0; + native_window_height = 0; + native_window = ANativeWindow_fromSurface(env, new_surface); + if (native_window == nullptr) { + LOGE("kJniStatusANativeWindowError"); + surface = nullptr; + return false; + } + surface = new_surface; + return true; + } + + jfieldID data_field; + jfieldID yuvPlanes_field; + jfieldID yuvStrides_field; + jmethodID init_for_private_frame_method; + jmethodID init_for_yuv_frame_method; + jmethodID init_method; + + AVCodecContext *codecContext; + + ANativeWindow *native_window = nullptr; + jobject surface = nullptr; + int native_window_width = 0; + int native_window_height = 0; +}; + +void CopyPlane(const uint8_t *source, int source_stride, uint8_t *destination, + int destination_stride, int width, int height) { + while (height--) { + std::memcpy(destination, source, width); + source += source_stride; + destination += destination_stride; + } +} + +constexpr int AlignTo16(int value) { return (value + 15) & (~15); } + +JniContext *createVideoContext(JNIEnv *env, + AVCodec *codec, + jbyteArray extraData, + jint threads) { + JniContext *jniContext = new(std::nothrow)JniContext(); + + AVCodecContext *codecContext = avcodec_alloc_context3(codec); + if (!codecContext) { + LOGE("Failed to allocate context."); + return NULL; + } + + if (extraData) { + jsize size = env->GetArrayLength(extraData); + codecContext->extradata_size = size; + codecContext->extradata = + (uint8_t *) av_malloc(size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!codecContext->extradata) { + LOGE("Failed to allocate extradata."); + releaseContext(codecContext); + return NULL; + } + env->GetByteArrayRegion(extraData, 0, size, (jbyte *) codecContext->extradata); + } + + codecContext->thread_count = threads; + codecContext->err_recognition = AV_EF_IGNORE_ERR; + int result = avcodec_open2(codecContext, codec, NULL); + if (result < 0) { + logError("avcodec_open2", result); + releaseContext(codecContext); + return NULL; + } + + jniContext->codecContext = codecContext; + + // Populate JNI References. + const jclass outputBufferClass = env->FindClass( + "com/google/android/exoplayer2/video/VideoDecoderOutputBuffer"); + jniContext->data_field = env->GetFieldID(outputBufferClass, "data", "Ljava/nio/ByteBuffer;"); + jniContext->yuvPlanes_field = + env->GetFieldID(outputBufferClass, "yuvPlanes", "[Ljava/nio/ByteBuffer;"); + jniContext->yuvStrides_field = env->GetFieldID(outputBufferClass, "yuvStrides", "[I"); + jniContext->init_for_private_frame_method = + env->GetMethodID(outputBufferClass, "initForPrivateFrame", "(II)V"); + jniContext->init_for_yuv_frame_method = + env->GetMethodID(outputBufferClass, "initForYuvFrame", "(IIIII)Z"); + jniContext->init_method = + env->GetMethodID(outputBufferClass, "init", "(JILjava/nio/ByteBuffer;)V"); + + return jniContext; +} + +VIDEO_DECODER_FUNC(jlong, ffmpegInitialize, jstring codecName, jbyteArray extraData, jint threads) { + AVCodec *codec = getCodecByName(env, codecName); + if (!codec) { + LOGE("Codec not found."); + return 0L; + } + + return (jlong) createVideoContext(env, codec, extraData, threads); +} + +VIDEO_DECODER_FUNC(jlong, ffmpegReset, jlong jContext) { + JniContext *const jniContext = reinterpret_cast(jContext); + AVCodecContext *context = jniContext->codecContext; + if (!context) { + LOGE("Tried to reset without a context."); + return 0L; + } + + avcodec_flush_buffers(context); + return (jlong) jniContext; +} + +VIDEO_DECODER_FUNC(void, ffmpegRelease, jlong jContext) { + JniContext *const jniContext = reinterpret_cast(jContext); + AVCodecContext *context = jniContext->codecContext; + if (context) { + releaseContext(context); + } +} + + +VIDEO_DECODER_FUNC(jint, ffmpegSendPacket, jlong jContext, jobject encodedData, + jint length, jlong inputTimeUs) { + JniContext *const jniContext = reinterpret_cast(jContext); + AVCodecContext *avContext = jniContext->codecContext; + + uint8_t *inputBuffer = (uint8_t *) env->GetDirectBufferAddress(encodedData); + AVPacket packet; + av_init_packet(&packet); + packet.data = inputBuffer; + packet.size = length; + packet.pts = inputTimeUs; + + int result = 0; + // Queue input data. + result = avcodec_send_packet(avContext, &packet); + if (result) { + logError("avcodec_send_packet", result); + if (result == AVERROR_INVALIDDATA) { + // need more data + return VIDEO_DECODER_ERROR_INVALID_DATA; + } else if (result == AVERROR(EAGAIN)) { + // need read frame + return VIDEO_DECODER_ERROR_READ_FRAME; + } else { + return VIDEO_DECODER_ERROR_OTHER; + } + } + return result; +} + +VIDEO_DECODER_FUNC(jint, ffmpegReceiveFrame, jlong jContext, jint outputMode, jobject jOutputBuffer, + jboolean decodeOnly) { + JniContext *const jniContext = reinterpret_cast(jContext); + AVCodecContext *avContext = jniContext->codecContext; + int result = 0; + + AVFrame *frame = av_frame_alloc(); + if (!frame) { + LOGE("Failed to allocate output frame."); + return VIDEO_DECODER_ERROR_OTHER; + } + result = avcodec_receive_frame(avContext, frame); + + // fail + if (decodeOnly || result == AVERROR(EAGAIN)) { + // This is not an error. The input data was decode-only or no displayable + // frames are available. + av_frame_free(&frame); + return VIDEO_DECODER_ERROR_INVALID_DATA; + } + if (result) { + av_frame_free(&frame); + logError("avcodec_receive_frame", result); + return VIDEO_DECODER_ERROR_OTHER; + } + + // success + // init time and mode + env->CallVoidMethod(jOutputBuffer, jniContext->init_method, frame->pts, outputMode, nullptr); + + // init data + const jboolean init_result = env->CallBooleanMethod( + jOutputBuffer, jniContext->init_for_yuv_frame_method, + frame->width, + frame->height, + frame->linesize[0], frame->linesize[1], + 0); + if (env->ExceptionCheck()) { + // Exception is thrown in Java when returning from the native call. + return VIDEO_DECODER_ERROR_OTHER; + } + if (!init_result) { + return VIDEO_DECODER_ERROR_OTHER; + } + + const jobject data_object = env->GetObjectField(jOutputBuffer, jniContext->data_field); + jbyte *data = reinterpret_cast(env->GetDirectBufferAddress(data_object)); + const int32_t uvHeight = (frame->height + 1) / 2; + const uint64_t yLength = frame->linesize[0] * frame->height; + const uint64_t uvLength = frame->linesize[1] * uvHeight; + + // TODO: Support rotate YUV data + + memcpy(data, frame->data[0], yLength); + memcpy(data + yLength, frame->data[1], uvLength); + memcpy(data + yLength + uvLength, frame->data[2], uvLength); + + av_frame_free(&frame); + + return result; +} + +VIDEO_DECODER_FUNC(jint, ffmpegRenderFrame, jlong jContext, jobject jSurface, + jobject jOutputBuffer, jint displayedWidth, jint displayedHeight) { + JniContext *const jniContext = reinterpret_cast(jContext); + if (!jniContext->MaybeAcquireNativeWindow(env, jSurface)) { + return VIDEO_DECODER_ERROR_OTHER; + } + + if (jniContext->native_window_width != displayedWidth || + jniContext->native_window_height != displayedHeight) { + if (ANativeWindow_setBuffersGeometry( + jniContext->native_window, + displayedWidth, + displayedHeight, + kImageFormatYV12)) { + LOGE("kJniStatusANativeWindowError"); + return VIDEO_DECODER_ERROR_OTHER; + } + jniContext->native_window_width = displayedWidth; + jniContext->native_window_height = displayedHeight; + } + + ANativeWindow_Buffer native_window_buffer; + int result = ANativeWindow_lock(jniContext->native_window, &native_window_buffer, nullptr); + if (result == -19) { + // Surface: dequeueBuffer failed (No such device) + jniContext->surface = nullptr; + return VIDEO_DECODER_SUCCESS; + } else if (result || native_window_buffer.bits == nullptr) { + LOGE("kJniStatusANativeWindowError"); + return VIDEO_DECODER_ERROR_OTHER; + } + + jobject yuvPlanes_object = env->GetObjectField(jOutputBuffer, jniContext->yuvPlanes_field); + jobjectArray yuvPlanes_array = static_cast(yuvPlanes_object); + jobject yuvPlanesY = env->GetObjectArrayElement(yuvPlanes_array, kPlaneY); + jobject yuvPlanesU = env->GetObjectArrayElement(yuvPlanes_array, kPlaneU); + jobject yuvPlanesV = env->GetObjectArrayElement(yuvPlanes_array, kPlaneV); + jbyte *planeY = reinterpret_cast(env->GetDirectBufferAddress(yuvPlanesY)); + jbyte *planeU = reinterpret_cast(env->GetDirectBufferAddress(yuvPlanesU)); + jbyte *planeV = reinterpret_cast(env->GetDirectBufferAddress(yuvPlanesV)); + + jobject yuvStrides_object = env->GetObjectField(jOutputBuffer, jniContext->yuvStrides_field); + jintArray *yuvStrides_array = reinterpret_cast(&yuvStrides_object); + + int *yuvStrides = env->GetIntArrayElements(*yuvStrides_array, NULL); + int strideY = yuvStrides[kPlaneY]; + int strideU = yuvStrides[kPlaneU]; + int strideV = yuvStrides[kPlaneV]; + + // Y plane + CopyPlane(reinterpret_cast(planeY), + strideY, + reinterpret_cast(native_window_buffer.bits), + native_window_buffer.stride, + displayedWidth, + displayedHeight); + + const int y_plane_size = native_window_buffer.stride * native_window_buffer.height; + const int32_t native_window_buffer_uv_height = (native_window_buffer.height + 1) / 2; + const int native_window_buffer_uv_stride = AlignTo16(native_window_buffer.stride / 2); + + // TODO(b/140606738): Handle monochrome videos. + + // V plane + // Since the format for ANativeWindow is YV12, V plane is being processed + // before U plane. + const int v_plane_height = std::min(native_window_buffer_uv_height, + displayedHeight); + CopyPlane( + reinterpret_cast(planeV), + strideV, + reinterpret_cast(native_window_buffer.bits) + y_plane_size, + native_window_buffer_uv_stride, displayedWidth, + v_plane_height); + + const int v_plane_size = v_plane_height * native_window_buffer_uv_stride; + + // U plane + CopyPlane( + reinterpret_cast(planeU), + strideU, + reinterpret_cast(native_window_buffer.bits) + + y_plane_size + v_plane_size, + native_window_buffer_uv_stride, displayedWidth, + std::min(native_window_buffer_uv_height, + displayedHeight)); + + + env->ReleaseIntArrayElements(*yuvStrides_array, yuvStrides, 0); + + if (ANativeWindow_unlockAndPost(jniContext->native_window)) { + LOGE("kJniStatusANativeWindowError"); + return VIDEO_DECODER_ERROR_OTHER; + } + + return VIDEO_DECODER_SUCCESS; +} +