gpt4 book ai didi

Android:在服务中使用 OpenCV VideoCapture

转载 作者:太空狗 更新时间:2023-10-29 13:28:08 26 4
gpt4 key购买 nike

我使用的是在 Android 设备启动时启动的服务。这是因为我不需要可见的 Activity 。到目前为止工作正常。但现在我正在尝试打开相机(在 MyService.onStart 中)并进行一些基本的图像处理。我知道默认的 Android 相机类需要一个用于视频预览的表面。这就是为什么我想使用 OpenCV 的 VideoCapture。

但是我得到这个错误:

No implementation found for native Lorg/opencv/highgui/VideoCapture;.n_VideoCapture:(I)J

我想知道这是否是因为我没有使用主 Activity 的 OpenCV 示例中使用的以下行。问题是,如何将其集成到我的服务中以及何时初始化 VideoCapture 成员。

OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback);

到目前为止,这是我的代码。大部分 OpenCV 代码取自 OpenCV 的 NativeCameraView 和 CameraBridgeViewBase

package com.example.boot;

import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;

import android.app.Service;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;

public final class MyService extends Service
{
private static final String TAG = "MyService";
private boolean mStopThread;
private Thread mThread;
private VideoCapture mCamera;
private int mFrameWidth;
private int mFrameHeight;
private int mCameraIndex = -1;
private Bitmap mCacheBitmap;

@Override
public IBinder onBind(Intent intent) {
return null;
}

public void onDestroy() {

this.disconnectCamera();

Toast.makeText(this, "service stopped", Toast.LENGTH_LONG).show();
Log.d(TAG, "onDestroy");
}

@Override
public void onStart(Intent intent, int startid)
{
Log.d(TAG, "service.onStart: begin");

try
{
if (!connectCamera(640, 480))
Log.e(TAG, "Could not connect camera");
else
Log.d(TAG, "Camera successfully connected");
}
catch(Exception e)
{
Log.e(TAG, "MyServer.connectCamera throws an exception: " + e.getMessage());
}

Toast.makeText(this, "service started", Toast.LENGTH_LONG).show();
Log.d(TAG, "service.onStart: end");
}

private boolean connectCamera(int width, int height) {
/* First step - initialize camera connection */
if (!initializeCamera(width, height))
return false;

/* now we can start update thread */
mThread = new Thread(new CameraWorker());
mThread.start();

return true;
}

private boolean initializeCamera(int width, int height) {
synchronized (this) {

if (mCameraIndex == -1)
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
else
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);

if (mCamera == null)
return false;

if (mCamera.isOpened() == false)
return false;

//java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();

/* Select the size that fits surface considering maximum size allowed */
Size frameSize = new Size(width, height);

mFrameWidth = (int)frameSize.width;
mFrameHeight = (int)frameSize.height;

AllocateCache();

mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
}

Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")");

return true;
}

protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}

private void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.release();
}
}
}

private void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
try {
mStopThread = true;
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
mStopThread = false;
}

/* Now release camera */
releaseCamera();
}

protected void deliverAndDrawFrame(NativeCameraFrame frame)
{
Mat modified = frame.rgba();

boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
}

private class NativeCameraFrame
{
public Mat rgba() {
mCapture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
return mRgba;
}

public Mat gray() {
mCapture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
return mGray;
}

public NativeCameraFrame(VideoCapture capture) {
mCapture = capture;
mGray = new Mat();
mRgba = new Mat();
}

private VideoCapture mCapture;
private Mat mRgba;
private Mat mGray;
};

private class CameraWorker implements Runnable
{
public void run()
{
do
{
if (!mCamera.grab()) {
Log.e(TAG, "Camera frame grab failed");
break;
}

deliverAndDrawFrame(new NativeCameraFrame(mCamera));

} while (!mStopThread);
}
}
}

最佳答案

您提到的行 (initAsync) 实际上用于加载 OpenCV 管理器。这应该是您要做的第一件事,因此它可能应该放在 onStart() 的开头。

关于Android:在服务中使用 OpenCV VideoCapture,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/19445304/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com