java - MediaCodec Android 上的 java.nio.BufferOverflowException
问题描述
我使用在这篇文章中找到的代码将位图编码为视频。
import android.graphics.Bitmap;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import io.reactivex.Completable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
public class BitmapToVideoEncoder {
private static final String TAG = BitmapToVideoEncoder.class.getSimpleName();
private IBitmapToVideoEncoderCallback mCallback;
private File mOutputFile;
private Queue<Bitmap> mEncodeQueue = new ConcurrentLinkedQueue();
private MediaCodec mediaCodec;
private MediaMuxer mediaMuxer;
private Object mFrameSync = new Object();
private CountDownLatch mNewFrameLatch;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static int mWidth;
private static int mHeight;
private static final int BIT_RATE = 16000000;
private static final int FRAME_RATE = 30; // Frames per second
private static final int I_FRAME_INTERVAL = 1;
private int mGenerateIndex = 0;
private int mTrackIndex;
private boolean mNoMoreFrames = false;
private boolean mAbort = false;
public interface IBitmapToVideoEncoderCallback {
void onEncodingComplete(File outputFile);
}
public BitmapToVideoEncoder(IBitmapToVideoEncoderCallback callback) {
mCallback = callback;
}
public boolean isEncodingStarted() {
return (mediaCodec != null) && (mediaMuxer != null) && !mNoMoreFrames && !mAbort;
}
public int getActiveBitmaps() {
return mEncodeQueue.size();
}
public void startEncoding(int width, int height, File outputFile) {
mWidth = width;
mHeight = height;
mOutputFile = outputFile;
String outputFileString;
try {
outputFileString = outputFile.getCanonicalPath();
} catch (IOException e) {
Log.e(TAG, "Unable to get path for " + outputFile);
return;
}
MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
Log.d(TAG, "found codec: " + codecInfo.getName());
int colorFormat;
try {
colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
} catch (Exception e) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
try {
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (IOException e) {
Log.e(TAG, "Unable to create MediaCodec " + e.getMessage());
return;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 6566350);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
try {
mediaMuxer = new MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
Log.e(TAG,"MediaMuxer creation failed. " + e.getMessage());
return;
}
Log.d(TAG, "Initialization complete. Starting encoder...");
Completable.fromAction(() -> encode())
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe();
}
public void stopEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to stop encoding since it never started");
return;
}
Log.d(TAG, "Stopping encoding");
mNoMoreFrames = true;
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void abortEncoding() {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to abort encoding since it never started");
return;
}
Log.d(TAG, "Aborting encoding");
mNoMoreFrames = true;
mAbort = true;
mEncodeQueue = new ConcurrentLinkedQueue(); // Drop all frames
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
public void queueFrame(Bitmap bitmap) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started");
return;
}
Log.d(TAG, "Queueing frame");
mEncodeQueue.add(bitmap);
synchronized (mFrameSync) {
if ((mNewFrameLatch != null) && (mNewFrameLatch.getCount() > 0)) {
mNewFrameLatch.countDown();
}
}
}
private void encode() {
Log.d(TAG, "Encoder started");
while(true) {
if (mNoMoreFrames && (mEncodeQueue.size() == 0)) break;
Bitmap bitmap = mEncodeQueue.poll();
if (bitmap == null) {
synchronized (mFrameSync) {
mNewFrameLatch = new CountDownLatch(1);
}
try {
mNewFrameLatch.await();
} catch (InterruptedException e) {}
bitmap = mEncodeQueue.poll();
}
if (bitmap == null) continue;
byte[] byteConvertFrame = getNV21(bitmap.getWidth(), bitmap.getHeight(), bitmap);
long TIMEOUT_USEC = 500000;
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
long ptsUsec = computePresentationTime(mGenerateIndex, FRAME_RATE);
if (inputBufIndex >= 0) {
final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
inputBuffer.clear();
inputBuffer.put(byteConvertFrame);
mediaCodec.queueInputBuffer(inputBufIndex, 0, byteConvertFrame.length, ptsUsec, 0);
mGenerateIndex++;
}
MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = mediaCodec.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else if (mBufferInfo.size != 0) {
ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
} else {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
mediaCodec.releaseOutputBuffer(encoderStatus, false);
}
}
}
release();
if (mAbort) {
mOutputFile.delete();
} else {
mCallback.onEncodingComplete(mOutputFile);
}
}
private void release() {
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
Log.d(TAG,"RELEASE CODEC");
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
Log.d(TAG,"RELEASE MUXER");
}
}
private static MediaCodecInfo selectCodec(String mimeType) {
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
private static int selectColorFormat(MediaCodecInfo codecInfo,
String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
private long computePresentationTime(long frameIndex, int framerate) {
return 132 + frameIndex * 1000000 / framerate;
}
}
即使我只将一个位图传递到编码器的缓冲区中,我也会收到此错误。
E/AndroidRuntime: FATAL EXCEPTION: main io.reactivex.exceptions.OnErrorNotImplementedException
at io.reactivex.internal.observers.EmptyCompletableObserver.onError(EmptyCompletableObserver.java:51)
at io.reactivex.internal.operators.completable.CompletableObserveOn$ObserveOnCompletableObserver.run(CompletableObserveOn.java:88)
at io.reactivex.android.schedulers.HandlerScheduler$ScheduledRunnable.run(HandlerScheduler.java:109)
at android.os.Handler.handleCallback(Handler.java:938)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:236)
at android.app.ActivityThread.main(ActivityThread.java:7861)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:600)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:967)
Caused by: java.nio.BufferOverflowException
at java.nio.DirectByteBuffer.put(DirectByteBuffer.java:298)
at java.nio.ByteBuffer.put(ByteBuffer.java:732)
at org.tensorflow.lite.examples.styletransfer.BitmapToVideoEncoder.encode(BitmapToVideoEncoder.java:199)
at org.tensorflow.lite.examples.styletransfer.BitmapToVideoEncoder.lambda$startEncoding$0$BitmapToVideoEncoder(BitmapToVideoEncoder.java:113)
at org.tensorflow.lite.examples.styletransfer.-$$Lambda$BitmapToVideoEncoder$yjvQOltdyGV-abeM_v-M6JuQMw0.run(Unknown Source:2)
at io.reactivex.internal.operators.completable.CompletableFromAction.subscribeActual(CompletableFromAction.java:34)
at io.reactivex.Completable.subscribe(Completable.java:1794)
at io.reactivex.internal.operators.completable.CompletableSubscribeOn$SubscribeOnObserver.run(CompletableSubscribeOn.java:64)
at io.reactivex.Scheduler$DisposeTask.run(Scheduler.java:463)
at io.reactivex.internal.schedulers.ScheduledRunnable.run(ScheduledRunnable.java:66)
at io.reactivex.internal.schedulers.ScheduledRunnable.call(ScheduledRunnable.java:57)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:301)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
at java.lang.Thread.run(Thread.java:923)
框架是 256x256 元素的位图。我了解缓冲区无法获取数据,因为数据太大。有什么办法可以增加数据的缓冲区大小吗?
解决方案
推荐阅读
- css - 如何在左侧和中心之间对齐文本?
- powershell - 使用 Powershell 从特定日期的 Outlook 下载 .xlsx 附件
- snowflake-cloud-data-platform - MAX_BATCH_ROWS 不适用于雪花中的外部函数
- arrays - 如何在MongoDB中过滤一个集合
- performance - 在执行并发请求时,是否可以在 jmeter 中按顺序从 csv 文件中读取数据?
- python - CleverHans 与 TensorFlow 插件不兼容
- c++ - 具有固定精度的重载 boost::multiprecision::pow
- python - 用日期时间做减法的正确方法是什么
- python - 自定义视觉同时上传多张图片
- c# - 将 OpenID Connect 用户映射到 ASP.NET Core 身份用户