gpt4 book ai didi

java - Android Studio + OpenCV + FFmpeg

转载 作者:太空狗 更新时间:2023-10-29 14:50:52 25 4
gpt4 key购买 nike

我的代码有问题,它仅适用于 Genymotion 设备(Android 4.1.1),但对于 Genymotion 设备 5.0.1 和真实设备 Huawei honor 4c Android 4.4.2 没有。

我已经通过以下方式将 OpenCV 3.1 导入到 Android studio:https://stackoverflow.com/a/27421494/4244605
我通过以下方式将 JavaCV 添加到 FFmpeg:https://github.com/bytedeco/javacv

安卓工作室 1.5.1
minSdkVersion 15
compileSdkVersion 23

代码仅供测试。
OpenCVCameraActivity.java:

import android.app.Activity;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;

import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;

import java.io.File;
import java.nio.ShortBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;

@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity implements
CameraBridgeViewBase.CvCameraViewListener2,
View.OnTouchListener {

//name of activity, for DEBUGGING
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();

private OpenCVCameraPreview mOpenCvCameraView;
private List<Camera.Size> mResolutionList;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;

private static long frameCounter = 0;

long startTime = 0;
private Mat edgesMat;
boolean recording = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 1920;
private int imageHeight = 1080;
private int frameRate = 30;
private Frame yuvImage = null;
private File ffmpeg_link;
private FFmpegFrameRecorder recorder;

/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
ShortBuffer[] samples;


private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(OpenCVCameraActivity.this);
break;
default:
super.onManagerConnected(status);
break;
}
}
};

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");

getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

try {
setContentView(R.layout.activity_opencv);

mOpenCvCameraView = (OpenCVCameraPreview) findViewById(R.id.openCVCameraPreview);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);

//mOpenCvCameraView.enableFpsMeter();

ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
} catch (Exception e){
e.printStackTrace();
}
}

@Override
protected void onRestart() {
if (Static.DEBUG) Log.i(TAG, "onRestart()");
super.onRestart();
}

@Override
protected void onStart() {
if (Static.DEBUG) Log.i(TAG, "onStart()");
super.onStart();
}

@Override
protected void onResume() {
if (Static.DEBUG) Log.i(TAG, "onResume()");
super.onResume();

if (!OpenCVLoader.initDebug()) {
Log.i(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
} else {
Log.i(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}

}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
super.onCreateOptionsMenu(menu);

List<String> effects = mOpenCvCameraView.getEffectList();

if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}

mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];

int idx = 0;
ListIterator<String> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}

mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];

ListIterator<Camera.Size> resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Camera.Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}

return true;
}

@Override
protected void onPause() {
if (Static.DEBUG) Log.i(TAG, "onPause()");
super.onPause();

if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();

}

@Override
protected void onStop() {
if (Static.DEBUG) Log.i(TAG, "onStop()");
super.onStop();
}

@Override
protected void onDestroy() {
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
super.onDestroy();

if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}

public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {

++frameCounter;
//Log.i(TAG, "Frame number: "+frameCounter);

return inputFrame.rgba();
}


@Override
public void onCameraViewStarted(int width, int height) {
edgesMat = new Mat();
}

@Override
public void onCameraViewStopped() {
if (edgesMat != null)
edgesMat.release();

edgesMat = null;
}

public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
} else if (item.getGroupId() == 2) {
int id = item.getItemId();
Camera.Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}

return true;
}

@Override
public boolean onTouch(View v, MotionEvent event) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
return false;
}

/**
* Click to ImageButton to start recording.
*/
public void onClickBtnStartRecord2(View v) {
if (Static.DEBUG) Log.i(TAG, "onClickBtnStartRecord()");

if(!recording)
startRecording();
else
stopRecording();
}

private void startRecording() {
if (Static.DEBUG) Log.i(TAG, "startRecording()");
initRecorder();

try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}

private void stopRecording() {
if (Static.DEBUG) Log.i(TAG, "stopRecording()");

runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;

if(recorder != null && recording) {

recording = false;
Log.v(TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}


//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {

Log.w(TAG, "init recorder");
try {

if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(TAG, "create yuvImage");
}

Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.exists());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);

Log.i(TAG, "recorder initialize success");

audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
} catch (Exception e){
e.printStackTrace();
}
}

//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {

@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;

bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

audioData = ShortBuffer.allocate(bufferSize);

Log.d(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();

/* ffmpeg_audio encoding loop */
while(runAudioThread) {
//Log.v(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
try {
recorder.recordSamples(audioData);
//Log.v(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(TAG, "AudioThread Finished, release audioRecord");

/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(TAG, "audioRecord released");
}
}
}
}

OpenCVCameraPreview.java:

import android.content.Context;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;

import org.opencv.android.JavaCameraView;

import java.io.FileOutputStream;
import java.util.List;

public class OpenCVCameraPreview extends JavaCameraView implements Camera.PictureCallback {

private static final String TAG = OpenCVCameraPreview.class.getSimpleName();
private String mPictureFileName;

public OpenCVCameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
}

public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}

public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}

public String getEffect() {
return mCamera.getParameters().getColorEffect();
}

public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}

public List<Camera.Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}

public void setResolution(Camera.Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}

public Camera.Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}

public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);

// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}

@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);

// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);

fos.write(data);
fos.close();

} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}

}
}

Gradle :

apply plugin: 'com.android.application'

android {
compileSdkVersion 23
buildToolsVersion "23.0.2"

defaultConfig {
applicationId "co.example.example"
minSdkVersion 15
targetSdkVersion 23
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}

packagingOptions {
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/opencv/pom.properties'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/opencv/pom.xml'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/ffmpeg/pom.properties'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/ffmpeg/pom.xml'
}
}

repositories {
mavenCentral()
}

dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
testCompile 'junit:junit:4.12'
compile 'com.android.support:appcompat-v7:23.1.1'
compile 'com.google.android.gms:play-services-appindexing:8.1.0'

compile group: 'org.bytedeco', name: 'javacv', version: '1.1'
compile group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: '3.0.0-1.1', classifier: 'android-arm'
compile group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: '3.0.0-1.1', classifier: 'android-x86'
compile group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: '2.8.1-1.1', classifier: 'android-arm'
compile group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: '2.8.1-1.1', classifier: 'android-x86'

compile project(':openCVLibrary310')
}

proguard-rules.pro编辑:link

jniLibs:应用程序/src/main/jniLibs:

armeabi armeabi-v7a arm64-v8a mips mips64 x86 x86_64

问题

02-19 11:57:37.684 1759-1759/ I/OpenCVCameraActivity: onClickBtnStartRecord()
02-19 11:57:37.684 1759-1759/ I/OpenCVCameraActivity: startRecording()
02-19 11:57:37.684 1759-1759/ W/OpenCVCameraActivity: init recorder
02-19 11:57:37.691 1759-1759/ I/OpenCVCameraActivity: create yuvImage
02-19 11:57:37.691 1759-1759/ I/OpenCVCameraActivity: ffmpeg_url: /storage/emulated/0/stream.mp4
02-19 11:57:37.696 1759-1759/ I/OpenCVCameraActivity: ffmpeg_url: false
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x1d arg 0x18cc3
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x6ffffffe arg 0x21c30
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x6fffffff arg 0x1
02-19 11:57:37.838 1759-1759/co.example.example E/art: dlopen("/data/app/co.example.example-2/lib/x86/libjniavutil.so", RTLD_LAZY) failed: dlopen failed: cannot locate symbol "av_version_info" referenced by "libjniavutil.so"...
02-19 11:57:37.843 1759-1759/co.example.example I/art: Rejecting re-init on previously-failed class java.lang.Class<org.bytedeco.javacpp.avutil>
02-19 11:57:37.844 1759-1759/co.example.example E/AndroidRuntime: FATAL EXCEPTION: main
Process: co.example.example, PID: 1759
java.lang.IllegalStateException: Could not execute method of the activity
at android.view.View$1.onClick(View.java:4020)
at android.view.View.performClick(View.java:4780)
at android.view.View$PerformClick.run(View.java:19866)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5254)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698)
Caused by: java.lang.reflect.InvocationTargetException
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at android.view.View$1.onClick(View.java:4015)
at android.view.View.performClick(View.java:4780) 
at android.view.View$PerformClick.run(View.java:19866) 
at android.os.Handler.handleCallback(Handler.java:739) 
at android.os.Handler.dispatchMessage(Handler.java:95) 
at android.os.Looper.loop(Looper.java:135) 
at android.app.ActivityThread.main(ActivityThread.java:5254) 
at java.lang.reflect.Method.invoke(Native Method) 
at java.lang.reflect.Method.invoke(Method.java:372) 
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903) 
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698) 
Caused by: java.lang.UnsatisfiedLinkError: org.bytedeco.javacpp.avutil
at java.lang.Class.classForName(Native Method)
at java.lang.Class.forName(Class.java:309)
at org.bytedeco.javacpp.Loader.load(Loader.java:413)
at org.bytedeco.javacpp.Loader.load(Loader.java:381)
at org.bytedeco.javacpp.avcodec$AVPacket.<clinit>(avcodec.java:1650)
at org.bytedeco.javacv.FFmpegFrameRecorder.<init>(FFmpegFrameRecorder.java:149)
at org.bytedeco.javacv.FFmpegFrameRecorder.<init>(FFmpegFrameRecorder.java:129)
at co.example.example.OpenCVCameraActivity.initRecorder(OpenCVCameraActivity.java:320)
at co.example.example.OpenCVCameraActivity.startRecording(OpenCVCameraActivity.java:266)
at co.example.example.OpenCVCameraActivity.onClickBtnStartRecord2(OpenCVCameraActivity.java:259)
at java.lang.reflect.Method.invoke(Native Method) 
at java.lang.reflect.Method.invoke(Method.java:372) 
at android.view.View$1.onClick(View.java:4015) 
at android.view.View.performClick(View.java:4780) 
at android.view.View$PerformClick.run(View.java:19866) 
at android.os.Handler.handleCallback(Handler.java:739) 
at android.os.Handler.dispatchMessage(Handler.java:95) 
at android.os.Looper.loop(Looper.java:135) 
at android.app.ActivityThread.main(ActivityThread.java:5254) 
at java.lang.reflect.Method.invoke(Native Method) 
at java.lang.reflect.Method.invoke(Method.java:372) 
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903) 
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698) 

最佳答案

编辑:
当我在 Gradle 中编辑依赖项时:

dependencies {
...
compile project(':openCVLibrary310')
compile files('libs/ffmpeg.jar')
compile files('libs/javacpp.jar')
compile files('libs/javacv.jar')
compile files('libs/ffmpeg-android-arm.jar')
compile files('libs/ffmpeg-android-x86.jar')
}

应用抛出异常:

Exception Ljava/lang/UnsatisfiedLinkError; thrown while initializing Lorg/bytedeco/javacpp/avutil;
Exception Ljava/lang/NoClassDefFoundError; thrown while initializing Lorg/bytedeco/javacpp/avcodec$AVPacket;

解决方案:
从 Gradle 中删除 ffmpeg-android-arm.jarffmpeg-android-x86.jar

在我的例子中,对于 arm 架构,我从 ffmpeg-android-arm 的 lib 文件夹中提取所有 *.so 文件。 jarapp/src/main/jniLibsarmeabiarmeabi-v7a 文件夹。

对于 x86 架构,在我的例子中,我从 ffmpeg-android-x86 的 lib 文件夹中提取所有 *.so 文件。 jarapp/src/main/jniLibsx86 文件夹。

所以我可以从 Gradle 的 packagingOptions 中删除 exclude 'META-INF...
不需要对 proguard-rules.pro 进行编辑。

关于虚拟设备的注意事项:
解决方案不适用于 Genymotion,但适用于 Google 虚拟设备。

最终 Gradle:

apply plugin: 'com.android.application'

android {
compileSdkVersion 23
buildToolsVersion "23.0.2"

defaultConfig {
applicationId "co.example.example"
minSdkVersion 15
targetSdkVersion 23
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}

}

repositories {
mavenCentral()
}

dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
testCompile 'junit:junit:4.12'
compile 'com.android.support:appcompat-v7:23.1.1'
compile 'com.google.android.gms:play-services-appindexing:8.1.0'
compile files('libs/ffmpeg.jar')
compile files('libs/javacpp.jar')
compile files('libs/javacv.jar')
compile project(':openCVLibrary310')
}

关于java - Android Studio + OpenCV + FFmpeg,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/35461019/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com