gpt4 book ai didi

Android:带有预览回调的相机异步任务

转载 作者:塔克拉玛干 更新时间:2023-11-02 23:32:49 25 4
gpt4 key购买 nike

我已经设法使用自定义滤镜(灰度、色调等)进行相机预览。通过操纵 RGB 数组,然后将其绘制回 Canvas ,然后在表面 View 中显示,此自定义滤镜与预览回调一起应用。

这样做的缺点是 FPS 非常低。对于如此低的 FPS,如果我不使用 Asynctask 在后台线程中执行此操作,它就会在 UI 线程中执行过多的工作。所以我尝试使用 Asynctask 进行相机操作(我的主要目的是让 UI 即使在相机预览回调的繁重工作下仍然可以完美运行)。

但即使在我使用了 Asynctask 之后,它也没有太大帮助。所以我想知道是我的实现有误还是因为即使使用 asynctask UI 线程仍会受到影响?

我的代码 fragment 如下:

CameraActivity.java

public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onCreate");
setContentView(R.layout.camera_layout);
}

@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
protected void onResume() {
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onResume");
if(preview == null){
preview = new CameraPreviewAsync(this,camera);
preview.execute();
}
super.onResume();
}

@Override
protected void onPause() {
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onPause");
if(preview!=null){
preview.cancel(true);
camera = preview.getCamera();
if(camera!=null){
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
preview.setCamera(camera);
}
preview = null;
}
super.onPause();
}

@Override
public void onDestroy(){
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onDestroy");
super.onDestroy();
}

CameraPreviewAsync.java:

private final String TAG = "CameraPreviewAsync";

private CameraActivity camAct;
private Camera mCamera;
private int cameraId;
private SurfaceView mSurfaceView;
private SurfaceHolder mHolder;

private boolean isPreviewRunning = false;
private int[] rgbints;
private int width;
private int height;
private Bitmap mBitmap;

public CameraPreviewAsync(CameraActivity act, Camera cam){
this.camAct = act;
this.mCamera = cam;
this.mSurfaceView = (SurfaceView) act.findViewById(R.id.surfaceView);
}

public void resetSurface(){
if(mCamera!=null){
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
int tempId = R.id.surfaceView;
RelativeLayout buttonBar = (RelativeLayout) camAct.findViewById(R.id.buttonBar);
((RelativeLayout) camAct.findViewById(R.id.preview)).removeAllViews();

SurfaceView newSurface = new SurfaceView(camAct);
newSurface.setId(tempId);
RelativeLayout.LayoutParams layParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
layParams.alignWithParent = true;
newSurface.setLayoutParams(layParams);
((RelativeLayout) camAct.findViewById(R.id.preview)).addView(newSurface);
((RelativeLayout) camAct.findViewById(R.id.preview)).addView(buttonBar);
}

@Override
protected void onPreExecute() {
//Things to do before doInBackground executed
Log.d(TAG,"onPreExecute");

RelativeLayout.LayoutParams layParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
layParams.alignWithParent = true;
mSurfaceView.setLayoutParams(layParams);

//Check number of camera in the device, if less than 2 then remove swap button
if (Camera.getNumberOfCameras() < 2) {
((RelativeLayout) camAct.findViewById(R.id.buttonBar)).removeViewAt(R.id.cameraSwap);
}

//Opening the camera
cameraId = findBackFacingCamera();
if (cameraId < 0) {
cameraId = findFrontFacingCamera();
if (cameraId < 0)
Toast.makeText(camAct, "No camera found.", Toast.LENGTH_LONG).show();
else
mCamera = Camera.open(cameraId);
} else {
mCamera = Camera.open(cameraId);
}

//invalidate the menu bar and show menu appropriately
camAct.invalidateOptionsMenu();

// get Camera parameters and set it to Auto Focus
if(mCamera!=null){
Camera.Parameters params = mCamera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
// set the focus mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
// set Camera parameters
mCamera.setParameters(params);
}
}

super.onPreExecute();
}

@Override
protected Void doInBackground(Void... params) {
//Things to do in the background thread
Log.d(TAG,"doInBackground");

mHolder = mSurfaceView.getHolder();
mHolder.addCallback(surfaceCallback);

return null;
}

@Override
protected void onPostExecute(Void values) {
//Things to do after doInBackground
Log.d(TAG,"onPostExecute");

}

@Override
protected void onCancelled(){
super.onCancelled();
}

/*
* ************************************************************************************
* SURFACEHOLDER CALLBACK
* ************************************************************************************
*/
SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {

@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG,"surfaceCreated!!");
if(CameraActivity.filterMode == CameraActivity.NORMAL_FILTER){
try {
if (mCamera != null) {
mCamera.startPreview();
mCamera.setPreviewDisplay(holder);
}else{
Log.d(TAG,"CAMERA IS NULL in surfaceCreated!!");
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}else{
synchronized(mSurfaceView){
if(isPreviewRunning){
return;
}else{

mSurfaceView.setWillNotDraw(false);
if(mCamera!=null){
isPreviewRunning = true;
Camera.Parameters p = mCamera.getParameters();
List<Size> sizes = p.getSupportedPreviewSizes();

Size size = p.getPreviewSize();
width = size.width;
height = size.height;

p.setPreviewFormat(ImageFormat.NV21);
showSupportedCameraFormats(p);
mCamera.setParameters(p);

rgbints = new int[width * height];

mCamera.startPreview();
mCamera.setPreviewCallback(previewCallback);
}
}
}
}
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG,"surfaceDestroyed!");

if(CameraActivity.filterMode == CameraActivity.NORMAL_FILTER){
if (mCamera != null) {
mCamera.stopPreview();
isPreviewRunning = false;
}
}else{
synchronized(mSurfaceView){
if(mCamera!=null){
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
isPreviewRunning = false;
}
}
}
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.d(TAG,"surfaceChanged!");
}
};


/*
* ************************************************************************************
* CAMERA PREVIEW CALLBACK
* ************************************************************************************
*/

Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() {

@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (!isPreviewRunning)
return;
Canvas resCanvas = null;

if (mHolder == null) {
return;
}

try {
synchronized (mHolder) {
resCanvas = mHolder.lockCanvas(null);
int resCanvasW = resCanvas.getWidth();
int resCanvasH = resCanvas.getHeight();

if(mBitmap == null){
mBitmap = Bitmap.createBitmap (width, height, Bitmap.Config.ARGB_8888);
}

decodeYUV(rgbints, data, width, height);

Canvas canvas = new Canvas(mBitmap);

//Setting the filter
if(camAct.getCustomFilter().equalsIgnoreCase("NORMAL")) ;//don't change the rgb value
if(camAct.getCustomFilter().equalsIgnoreCase("GRAYSCALE")) rgbints = grayscale(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("INVERT")) rgbints = invert(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("BOOSTRED")) rgbints = boostColor(rgbints,1);
if(camAct.getCustomFilter().equalsIgnoreCase("BOOSTGREEN")) rgbints = boostColor(rgbints,2);
if(camAct.getCustomFilter().equalsIgnoreCase("BOOSTBLUE")) rgbints = boostColor(rgbints,3);
if(camAct.getCustomFilter().equalsIgnoreCase("NOISE")) rgbints = noise(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("HUE")) rgbints = hue(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("SATURATION")) rgbints = saturation(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("ENGRAVE")) rgbints = engrave(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("EMBOSS")) rgbints = emboss(rgbints);

// draw the decoded image, centered on canvas
canvas.drawBitmap(rgbints, 0, width, 0,0, width, height, false, null);

resCanvas.drawBitmap (mBitmap, resCanvasW-((width+resCanvasW)>>1), resCanvasH-((height+resCanvasH)>>1),null);
}
} catch (Exception e){
e.printStackTrace();
} finally {
// do this in a finally so that if an exception is thrown
// during the above, we don't leave the Surface in an
// inconsistent state
if (resCanvas != null) {
mHolder.unlockCanvasAndPost(resCanvas);
}
}
}
};

非常感谢任何帮助! :) 提前谢谢大家!

最佳答案

也许我的回答对你来说太晚了,但我正在研究同一个主题,所以我想无论如何我都会分享我的发现......

首先,如果在 AsyncTask 上调用了 Camera 的“打开”,然后该线程存在并占用存在 - 我们真的不能期望回调来自它,对吧。所以,如果我们想要回调 - 那么我们需要有一个线程,它的生命周期至少与我们想要回调的时间一​​样长。

但等等,还有更多... Camera.PreviewCallback 的文档不是最清楚的,但其中一个不好的提示是“此回调在 event thread open(int) 上调用被叫来的。” “事件”线程是什么意思?好吧,这不是很清楚 - 但查看 Android 代码并进行试验 - 他们需要的是一个包含 Looper 的线程。可能细节太多,但在 Camera 构造函数(从 open 方法调用)中有代码尝试首先获取当前线程的 Looper,如果不存在 - 它尝试获取主线程循环器 - 它继续存在界面线程。 Camera 然后使用 Handler 来分派(dispatch)回调,并通过它初始化的循环器来分派(dispatch)其他方法。现在您可能明白为什么即使您从另一个线程打开相机,您也会在主线程上收到回调 - 您的工作线程没有循环程序 - 所以相机默认使用主线程。

我从我的工作线程获得了回调,我在这些线程的方法中使用了 HandlerThread:

private void startCamera() {
if (mCameraThread == null) {
mCameraThread = new HandlerThread(CAMERA_THREAD_NAME);
mCameraThread.start();
mCameraHandler = new Handler(mCameraThread.getLooper());
}
mCameraHandler.post(new Runnable() {
@Override
public void run() {
try {
mCamera = Camera.open();

...

我使用调试器确认我的 onPreviewFrame 确实在工作线程上运行。我还在 UI 线程上运行动画,在我将帧处理从主线程移出之前它是不稳定的,但现在像黄油一样流畅。

请注意,如果您终止工作线程,那么您的回调当然也会停止,Camera(更确切地说是 Handler)会提示您尝试使用死线程。

顺便说一句,作为一种替代解决方案,当然可以在主线程上调用回调,但可以将帧数据的处理委托(delegate)给单独的线程。

关于Android:带有预览回调的相机异步任务,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/19216893/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com