gpt4 book ai didi

java - TextureView.getBitmap() 函数是否有任何快速替代方案?

转载 作者:行者123 更新时间:2023-11-30 05:09:16 26 4
gpt4 key购买 nike

我正在使用 android vlc (LibVLC) 和 TextureView 以便在我的 android 应用程序中播放实时 rtsp 流。一切工作正常,但是我需要为某些对象检测任务尽可能获取当前播放帧,我正在使用 getBitmap() 函数来执行此操作。这里的问题是这个函数太慢了,随着 TextureView 中渲染的图像大小的增加,它需要越来越多的时间。

那么还有其他更快的方法吗?

请注意,我已经在 TextureView 和 SurfaceView 上尝试了 getDrawingCache() 函数,但它总是返回一个透明位图,所以经过一些小的研究我意识到这是因为 VLC 使用硬件加速以在表面纹理上渲染帧。

我确实也找到了许多与 this answer by fadden 类似的解决方案谈论使用 glReadPixels() 函数并指向 grafika作为代码示例的引用。然而(不幸的是)我几乎没有使用 OpenGL 的技能。 因此,如果您可以验证链接的答案,能否请您将我链接到一个简单的直接代码示例(关于我的案例)?

public class MainActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener,
org.videolan.libvlc.media.MediaPlayer.OnBufferingUpdateListener,
org.videolan.libvlc.media.MediaPlayer.OnCompletionListener,
org.videolan.libvlc.media.MediaPlayer.OnPreparedListener,
org.videolan.libvlc.media.MediaPlayer.OnVideoSizeChangedListener {

private AppCompatActivity me = this;
private MediaPlayer mMediaPlayer;
private TextureView mTextureViewmTextureView;
private String mUrl = "/storage/emulated/0/videos/test.mp4";
private static final String TAG = "MainActivity";

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.content_main);

mMediaPlayer = new MediaPlayer(VLCInstance.get());
mTextureViewmTextureView = (TextureView) findViewById(R.id.player);
mTextureView.setSurfaceTextureListener(this);
}

private void attachViewSurface() {
final IVLCVout vlcVout = mMediaPlayer.getVLCVout();
mMediaPlayer.setScale(0);
vlcVout.detachViews();
vlcVout.setVideoView(mTextureView);
vlcVout.setWindowSize(mTextureView.getWidth(), mTextureView.getHeight());
vlcVout.attachViews();
mTextureView.setKeepScreenOn(true);
}


private void play(String path) {
try {
Media media;
if (new File(path).exists()) {
media = new Media(VLCInstance.get(), path);
} else {
media = new Media(VLCInstance.get(), Uri.parse(path));
}

mMediaPlayer.setMedia(media);
mMediaPlayer.play();
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
}

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
attachViewSurface();

if (mMediaPlayer.hasMedia())
mMediaPlayer.play();
else
play(mUrl);
}

public Bitmap getImage() {
return mTextureView.getBitmap();
}

}

最佳答案

经过这么长时间,我决定给出这个答案,我现在将其用作替代方案。我发现 JavaCPPFFMpegFrameGrabber可用于播放 rtsp 流 或视频文件,但是这里有两个问题:

  1. FFMpegFrameGrabber.Grab() 直接读取可用的下一帧,速度太慢,以至于我无法在我的设备上抓取每秒不超过 6 帧 (CPU:1.5 GHz 64 位八核 ARM Cortex-A53)
  2. FFMpegFrameGrabber 没有渲染能力,它只是将当前视频帧抓取到一个 OpenCV Mat 对象或 Javacv Frame 对象(你可以使用相同库的类 AndroidFrameConverter 将 Frame 对象转换为 Bitmap)。

关于第一个问题,在我的情况下,我不需要超过 5 fps 就可以解决它。

对于第二个,我开发了一个基于 OpenGL 位图的渲染器,它几乎可以立即渲染抓取器抓取的位图图像(非常快)。这是我的代码:

app.gradle:

implementation group: 'org.bytedeco', name: 'javacv-platform', version: '1.4.3'
implementation group: 'org.bytedeco', name: 'javacv', version: '1.4.3'

抓取器:

class Player extends AsyncTask<BitmapRenderer, Bitmap, Object> {
BitmapRenderer glRenderer;
FFmpegFrameGrabber grabber = null;

@Override
protected Bitmap doInBackground(BitmapRenderer... objects) {
glRenderer = objects[0];

try {
grabber = new FFmpegFrameGrabber("/storage/emulated/0/Download/test.mp4");
grabber.start();
OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
Frame grabbedImage;
while ((grabbedImage = grabber.grabImage()) != null) {
Log.e("Android", "Frame Grabbed " + grabbedImage.imageWidth + "x" + grabbedImage.imageHeight);
AndroidFrameConverter frameConverter = new AndroidFrameConverter();
Bitmap bitmap = frameConverter.convert(grabbedImage);
publishProgress(bitmap);

opencv_core.Mat grabbedMat = converter.convert(grabbedImage);
if (grabbedMat != null)
imwrite("/storage/emulated/0/Download/videoplayback.jpg", grabbedMat);
}

} catch (FrameGrabber.Exception e) {
e.printStackTrace();
Log.e("Android", e.getMessage(), e);
}
return null;
}

@Override
protected void onProgressUpdate(Bitmap... values) {
super.onProgressUpdate(values);
glRenderer.draw(values[0]);
}

@Override
protected void onPostExecute(Object objects) {
super.onPostExecute(objects);
try {
grabber.stop();
grabber.release();
} catch (FrameGrabber.Exception e1) {
}
}
}

渲染器:

package com.example.gphspc.javacvtest;

import android.graphics.Bitmap;
import android.opengl.GLSurfaceView;
import android.opengl.GLUtils;
import android.view.ViewGroup;

import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class BitmapRenderer implements GLSurfaceView.Renderer {

private int[] textures;
private Bitmap bitmap;
private GLSurfaceView glSurfaceView;
private int parentWidth, parentHeight;
private boolean sizeModified = false;

public BitmapRenderer(GLSurfaceView glSurfaceView) {
this.glSurfaceView = glSurfaceView;
this.glSurfaceView.setEGLContextClientVersion(1);
this.glSurfaceView.setRenderer(this);
this.glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}

private static final float[] VERTEX_COORDINATES = new float[]{
-1.0f, +1.0f, 0.0f,
+1.0f, +1.0f, 0.0f,
-1.0f, -1.0f, 0.0f,
+1.0f, -1.0f, 0.0f
};

private static final float[] TEXTURE_COORDINATES = new float[]{
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f
};

private static final Buffer TEXCOORD_BUFFER = ByteBuffer.allocateDirect(TEXTURE_COORDINATES.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer().put(TEXTURE_COORDINATES).rewind();
private static final Buffer VERTEX_BUFFER = ByteBuffer.allocateDirect(VERTEX_COORDINATES.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer().put(VERTEX_COORDINATES).rewind();

public void draw(Bitmap bitmap) {
if (bitmap == null)
return;

this.bitmap = bitmap;

if (!sizeModified) {
ViewGroup.LayoutParams layoutParams = glSurfaceView.getLayoutParams();
Dimension newDims = getRelativeSize(new Dimension(bitmap.getWidth(), bitmap.getHeight()), glSurfaceView.getWidth(), glSurfaceView.getHeight());
layoutParams.width = newDims.getWidth();
layoutParams.height = newDims.getHeight();
glSurfaceView.setLayoutParams(layoutParams);
sizeModified = true;
}

glSurfaceView.requestRender();
}

public static Dimension getRelativeSize(Dimension dimension, int width, int height) {
int toWidth = width, toHeight = height;

int imgWidth = (int) dimension.getWidth();
int imgHeight = (int) dimension.getHeight();

if (imgWidth > imgHeight) {
toWidth = (int) ((double) height / ((double) imgHeight / imgWidth));
if (toWidth > width)
toWidth = width;
toHeight = (int) (toWidth * ((double) imgHeight / imgWidth));
} else if (imgWidth < imgHeight) {
toHeight = (int) ((double) width / ((double) imgWidth / imgHeight));
if (toHeight > height)
toHeight = height;
toWidth = (int) (toHeight * ((double) imgWidth / imgHeight));
}

return new Dimension(toWidth, toHeight);
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
textures = new int[1];
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

ViewGroup.LayoutParams layoutParams = glSurfaceView.getLayoutParams();
parentWidth = layoutParams.width;
parentHeight = layoutParams.height;
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
// gl.glOrthof(0f, width, 0f, height, -1f, 1f);
}

@Override
public void onDrawFrame(GL10 gl) {
if (bitmap != null) {

gl.glGenTextures(1, textures, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);

gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);

GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);

gl.glActiveTexture(GL10.GL_TEXTURE0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);

gl.glVertexPointer(3, GL10.GL_FLOAT, 0, VERTEX_BUFFER);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, TEXCOORD_BUFFER);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
}
}
}

class Dimension {
int width = 0, height = 0;

public Dimension(int width, int height) {
this.width = width;
this.height = height;
}

public int getWidth() {
return width;
}

public void setWidth(int width) {
this.width = width;
}

public int getHeight() {
return height;
}

public void setHeight(int height) {
this.height = height;
}
}

关于java - TextureView.getBitmap() 函数是否有任何快速替代方案?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/53988652/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com