gpt4 book ai didi

Android + OpenCV + 人脸检测 + 自定义布局

转载 作者:太空宇宙 更新时间:2023-11-03 21:48:02 26 4
gpt4 key购买 nike

我正在使用:

  • 安卓4.0.3
  • OpenCV 2.4.2
  • 三星 Galaxy S2

面部检测示例(来自 opencv 2.4.2)运行良好。但现在,我想创建一个自定义布局,实际上只使用从人脸检测中提取的数据,并在其上构建游戏。不一定要让 FdView 表面占据整个屏幕。

我做了下面这些修改,但是只显示黑屏。屏幕上没有任何显示。

添加了一个 fd.xml 布局:

<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="horizontal">

<org.opencv.samples.fd.FdView android:id="@+id/FdView"
android:layout_width="640dp"
android:layout_height="480dp"
android:visibility="visible"
/>

<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textColor="#FF0000"
android:text="hi"/>

修改了FdActivity.java的baseLoaderCallback:

    private BaseLoaderCallback  mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");

// Load native libs after OpenCV initialization
System.loadLibrary("detection_based_tracker");

//EXPERIMENT
setContentView(R.layout.fd);
FdView surface = (FdView) (findViewById(R.id.FdView));

surface = mView;
// Create and set View
mView = new FdView(mAppContext);
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
//setContentView(mView);


// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};

在 FdView.java 中添加了构造函数:

    public FdView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}

public FdView(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
}

在 SampleCvViewBase.java 中添加构造函数:

    public SampleCvViewBase(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
// TODO Auto-generated constructor stub
}

public SampleCvViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
// TODO Auto-generated constructor stub
}

最佳答案

我也有同样的问题。也在想办法。我试图在不占据整个屏幕的 SurfaceView 上显示图像。然后我读到你不能在不同的类中拥有你的相机处理程序类和链接的 SurfaceView 。所以把一切都粉碎成一个。

因此,目前我在 SurfaceView 上显示相机,并将帧数据复制到 mFrame 变量。基本上只是努力让 mFrame 得到处理(在多线程、Run()、函数中)并在 SurfaceView 上显示结果。

这是我的代码,如果您认为它有帮助的话:(请原谅格式化,因为我的代码也在进行中)

package org.opencv.samples.tutorial3;

import java.io.IOException;
import java.util.List;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;

import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.widget.TextView;

public class Sample3Native extends Activity implements SurfaceHolder.Callback,Runnable{


//Camera variables
private Camera cam;
private boolean previewing = false;
private SurfaceHolder mHolder;
private SurfaceView mViewer;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
Sample3View viewclass;
TextView text;
int value = 0;
//==========

int framecount = 0;

private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{

// Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample");

//constructor for viewclass that works on frames
viewclass = new Sample3View();

//setContentView(mView);
//OpenCam();
//setContentView(R.layout.main);

// Create and set View
CameraConstruct();
Camopen();

} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};

public Sample3Native()
{}

@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);

setContentView(R.layout.main);

OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack);
}

//Camera construction
public void CameraConstruct()
{
mViewer = (SurfaceView)findViewById(R.id.camera_view);
text = (TextView)findViewById(R.id.text);
mHolder = mViewer.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}


//calls camera screen setup when screen surface changes
public void surfaceChanged(SurfaceHolder holder, int format, int width,int height)
{
CamStartDisplay();
}

public void Camclose()
{
if(cam != null && previewing)
{
cam.setPreviewCallback(null);
cam.stopPreview();
cam.release();
cam = null;

previewing = false;
}

mThreadRun = false;
viewclass.PreviewStopped();
}

//only open camera, and get frame data
public void Camopen()
{
if(!previewing){
cam = Camera.open();
//rotate display
cam.setDisplayOrientation(90);
if (cam != null)
{
//copy viewed frame
cam.setPreviewCallbackWithBuffer(new PreviewCallback()
{

public void onPreviewFrame(byte[] data, Camera camera)
{
synchronized (this)
{
System.arraycopy(data, 0, mFrame, 0, data.length);

this.notify();
}
//text.setText(Integer.toString(value++));
camera.addCallbackBuffer(mBuffer);
}
});

}

}//if not previewing
}

//start preview
public void CamStartDisplay()
{
synchronized (this)
{
if(cam != null)
{
//stop previewing till after settings is changed
if(previewing == true)
{
cam.stopPreview();
previewing = false;
}

Camera.Parameters p = cam.getParameters();
for(Camera.Size s : p.getSupportedPreviewSizes())
{
p.setPreviewSize(s.width, s.height);
mFrameWidth = s.width;
mFrameHeight = s.height;
break;
}

p.setPreviewSize(mFrameWidth, mFrameHeight);

List<String> FocusModes = p.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
p.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
cam.setParameters(p);

//set the width and height for processing
viewclass.setFrame(mFrameWidth, mFrameHeight);

int size = mFrameWidth*mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(p.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mFrame = new byte [size];
cam.addCallbackBuffer(mBuffer);

viewclass.PreviewStarted(mFrameWidth, mFrameHeight);

//start display streaming
try
{
//cam.setPreviewDisplay(null);
cam.setPreviewDisplay(mHolder);
cam.startPreview();
previewing = true;
}
catch (IOException e)
{
e.printStackTrace();
}

}//end of if cam != null
}//synchronising
}

//thread gets started when the screen surface is created
public void surfaceCreated(SurfaceHolder holder) {
//Camopen();
//CamStartDisplay();
(new Thread(this)).start();
}

//called when the screen surface is stopped
public void surfaceDestroyed(SurfaceHolder holder)
{
Camclose();
}

//this is function that is run by thread
public void run()
{

mThreadRun = true;
while (mThreadRun)
{
//text.setText(Integer.toString(value++));
Bitmap bmp = null;

synchronized (this)
{
try
{
this.wait();

bmp = viewclass.processFrame(mFrame);
}
catch (InterruptedException e) {}
}

if (bmp != null)
{
Canvas canvas = mHolder.lockCanvas();

if (canvas != null)
{
canvas.drawBitmap(bmp, (canvas.getWidth() - mFrameWidth) / 2, (canvas.getHeight() - mFrameHeight) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}//if bmp != null
}// while thread in run
}


}//end class
此类中使用的

Sample3View 仅包含 processFrame 函数:

package org.opencv.samples.tutorial3;

import android.content.Context;
import android.graphics.Bitmap;
import android.widget.TextView;

class Sample3View {

private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;

private int frameWidth;
private int frameHeight;
private int count = 0;

Sample3Native samp;

//constructor
public Sample3View()
{
}

public void setFrame(int width,int height)
{
frameWidth = width;
frameHeight = height;
}

public void PreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}

public void PreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}

public Bitmap processFrame(byte[] data) {
int[] rgba = mRGBA;

FindFeatures(frameWidth, frameHeight, data, rgba);

Bitmap bmp = mBitmap;
bmp.setPixels(rgba, 0, frameWidth, 0, 0, frameWidth, frameHeight);


//samp.setValue(count++);
return bmp;
}

public native void FindFeatures(int width, int height, byte yuv[], int[] rgba);
}

是的,希望这对您有所帮助。如果我得到完整的解决方案,我也会发布它。如果你先得到解决方案,请也发布你的东西!享受

关于Android + OpenCV + 人脸检测 + 自定义布局,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/11487452/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com