gpt4 book ai didi

android - MediaCodec 从视频中获取所有帧

转载 作者:塔克拉玛干 更新时间:2023-11-02 20:32:52 28 4
gpt4 key购买 nike

我正在尝试使用 MediaCodec 从视频中检索所有帧以进行图像处理,我正在尝试渲染视频并从 outBuffers 捕获帧但我无法从接收到的字节启动位图实例。

我尝试将它渲染到一个表面或什么都没有(null),因为我注意到当你渲染到 null 时,outBuffers 正在获取渲染帧的字节。

这是代码:

    private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/test_videos/sample2.mp4";
private PlayerThread mPlayer = null;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

SurfaceView sv = new SurfaceView(this);
sv.getHolder().addCallback(this);
setContentView(sv);
}

protected void onDestroy() {
super.onDestroy();
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (mPlayer == null) {
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mPlayer != null) {
mPlayer.interrupt();
}
}

private void writeFrameToSDCard(byte[] bytes, int i, int sampleSize) {
try {
Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, sampleSize);

File file = new File(Environment.getExternalStorageDirectory() + "/test_videos/sample" + i + ".png");
if (file.exists())
file.delete();

file.createNewFile();

FileOutputStream out = new FileOutputStream(file.getAbsoluteFile());

bmp.compress(Bitmap.CompressFormat.PNG, 90, out);
out.close();

} catch (Exception e) {
e.printStackTrace();
}
}

private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;

public PlayerThread(Surface surface) {
this.surface = surface;
}

@Override
public void run() {
extractor = new MediaExtractor();
extractor.setDataSource(SAMPLE);

int index = extractor.getTrackCount();
Log.d("MediaCodecTag", "Track count: " + index);

for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, surface, null, 0);
break;
}
}

if (decoder == null) {
Log.e("DecodeActivity", "Can't find video info!");
return;
}

decoder.start();

ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
boolean isEOS = false;
long startMs = System.currentTimeMillis();

int i = 0;
while (!Thread.interrupted()) {
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];

int sampleSize = extractor.readSampleData(buffer, 0);

if (sampleSize < 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
}

/* saves frame to sdcard */
int outIndex = decoder.dequeueOutputBuffer(info, 10000); // outIndex most of the times null

switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);

// We use a very simple clock to keep the video FPS, or the video
// playback will be too fast
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
try {
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, true);
try {
byte[] dst = new byte[outputBuffers[outIndex].capacity()];
outputBuffers[outIndex].get(dst);
writeFrameToSDCard(dst, i, dst.length);
i++;
} catch (Exception e) {
Log.d("iDecodeActivity", "Error while creating bitmap with: " + e.getMessage());
}

break;
}

// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}

decoder.stop();
decoder.release();
extractor.release();
}
}

任何帮助将不胜感激

最佳答案

您可以解码到 SurfaceByteBuffer,但不能同时解码。因为您正在配置 Surface,所以输出缓冲区中的数据始终为零字节。

如果您为 ByteBuffer 解码配置,数据格式会有所不同,但据我所知永远不会是 Bitmap 理解的 ARGB 格式。您可以在 CTS EncodeDecodeTest 中的缓冲区到缓冲区测试中查看两种 YUV 格式的示例。在方法 checkFrame() 中。但是请注意,它所做的第一件事是检查格式并在无法识别时立即返回。

目前(Android 4.4),唯一可靠的方法是解码为 SurfaceTexture,用 GLES 渲染它,然后用 glReadPixels() 提取 RGB 数据>。 bigflake 上提供了示例代码——参见 ExtractMpegFramesTest (需要 API 16+)。

关于android - MediaCodec 从视频中获取所有帧,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/19754547/

28 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com