gpt4 book ai didi

Fragment 中的 Android AR

转载 作者:行者123 更新时间:2023-11-29 01:44:00 26 4
gpt4 key购买 nike

我一直在尝试让一些 AR(增强现实)SDK 在 Fragment 中运行。但是,我似乎无法让它工作。

我找到了一些让 Metaio(AR 框架)在 fragment 中工作的人的代码。

所以我将该代码应用到我自己的项目中,它可以正常工作,但代码没有被编程为扫描图片。我想用它扫描图片标记。

我从Metaio的示例项目中复制了一些代码来扫描图片标记,但是它不起作用。

现在它失败了(之后的调试日志没有被记录):

trackingConfigFile = AssetsManager.getAssetPath(getActivity().getApplicationContext(), "AEDApp/Assets/TrackingData_PictureMarker.xml");

这是我的完整代码:

package com.example.bt6_aedapp;

import android.app.Application;
import android.content.res.Configuration;
import android.hardware.Camera.CameraInfo;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;

import com.metaio.cloud.plugin.MetaioCloudPlugin;
import com.metaio.sdk.MetaioDebug;
import com.metaio.sdk.MetaioSurfaceView;
import com.metaio.sdk.SensorsComponentAndroid;
import com.metaio.sdk.jni.ERENDER_SYSTEM;
import com.metaio.sdk.jni.ESCREEN_ROTATION;
import com.metaio.sdk.jni.IGeometry;
import com.metaio.sdk.jni.IMetaioSDKAndroid;
import com.metaio.sdk.jni.IMetaioSDKCallback;
import com.metaio.sdk.jni.MetaioSDK;
import com.metaio.sdk.jni.TrackingValuesVector;
import com.metaio.sdk.jni.Vector3d;
import com.metaio.tools.Screen;
import com.metaio.tools.SystemInfo;
import com.metaio.tools.io.AssetsManager;

public class fragmentA extends Fragment implements MetaioSurfaceView.Callback {

private Application mAppContext;

private ViewGroup mRootLayout;

String trackingConfigFile;

private MetaioSDKCallbackHandler mCallback;

private IGeometry mModel;

private IMetaioSDKAndroid mMetaioSDK;

private MetaioSurfaceView mSurfaceView;

private static boolean mNativeLibsLoaded = false;

private boolean mRendererInitialized;

private SensorsComponentAndroid mSensors;

static {
mNativeLibsLoaded = IMetaioSDKAndroid.loadNativeLibs();
}

@Override
public void onCreate(Bundle savedInstanceState) {

MetaioCloudPlugin.startJunaio(null, getActivity().getApplicationContext());

super.onCreate(savedInstanceState);
Log.d("LifeCycle", "onCreate");

mAppContext = getActivity().getApplication();
mMetaioSDK = null;
mSurfaceView = null;
mRendererInitialized = false;
try {

mCallback = new MetaioSDKCallbackHandler();

if (!mNativeLibsLoaded){
throw new Exception("Unsupported platform, failed to load the native libs");
}

// Create sensors component
mSensors = new SensorsComponentAndroid(mAppContext);

// Create Unifeye Mobile by passing Activity instance and
// application signature
mMetaioSDK = MetaioSDK.CreateMetaioSDKAndroid(getActivity(), getResources().getString(R.string.metaioSDKSignature));
mMetaioSDK.registerSensorsComponent(mSensors);

} catch (Throwable e) {
MetaioDebug.log(Log.ERROR, "ArCameraFragment.onCreate: failed to create or intialize metaio SDK: " + e.getMessage());
return;
}
}

@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
Log.d("LifeCycle", "onCreateView");
View view = inflater.inflate(R.layout.fragment_a, container, false);
mRootLayout = (ViewGroup)getActivity().findViewById(R.id.pager);
return view;
}

@Override
public void onStart() {
super.onStart();
Log.d("LifeCycle", "onStart");

if(mMetaioSDK == null){
return;
}
MetaioDebug.log("ArCameraFragment.onStart()");

try {
mSurfaceView = null;

// Start camera
startCamera();

// Add Unifeye GL Surface view
mSurfaceView = new MetaioSurfaceView(mAppContext);
mSurfaceView.registerCallback(this);
mSurfaceView.setKeepScreenOn(true);

MetaioDebug.log("ArCameraFragment.onStart: addContentView(mMetaioSurfaceView)");
mRootLayout.addView(mSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT));
mSurfaceView.setZOrderMediaOverlay(true);

} catch (Exception e) {
MetaioDebug.log(Log.ERROR, "Error creating views: " + e.getMessage());
MetaioDebug.printStackTrace(Log.ERROR, e);
}
}

@Override
public void onResume() {
super.onResume();
Log.d("LifeCycle", "onResume");

// make sure to resume the OpenGL surface
if (mSurfaceView != null) {
mSurfaceView.onResume();
}

if(mMetaioSDK != null){
mMetaioSDK.resume();
}
}

@Override
public void onPause() {
super.onPause();

Log.d("LifeCycle", "onPause");

// pause the OpenGL surface
if (mSurfaceView != null) {
mSurfaceView.onPause();
}

if (mMetaioSDK != null) {
// Disable the camera
mMetaioSDK.pause();
}

}

@Override
public void onStop() {
super.onStop();

Log.d("LifeCycle", "onStop");

if (mMetaioSDK != null) {
// Disable the camera
mMetaioSDK.stopCamera();
}

if (mSurfaceView != null) {
mRootLayout.removeView(mSurfaceView);
}

System.runFinalization();
System.gc();
}

@Override
public void onDestroy() {
super.onDestroy();

mCallback.delete();
mCallback = null;

/*Log.d("LifeCycle", "onDestroy");

try {
mRendererInitialized = false;
} catch (Exception e) {
MetaioDebug.printStackTrace(Log.ERROR, e);
}

MetaioDebug.log("ArCameraFragment.onDestroy");

if (mMetaioSDK != null) {
mMetaioSDK.delete();
mMetaioSDK = null;
}

MetaioDebug.log("ArCameraFragment.onDestroy releasing sensors");
if (mSensors != null) {
mSensors.registerCallback(null);
mSensors.release();
mSensors.delete();
mSensors = null;
}

// Memory.unbindViews(activity.findViewById(android.R.id.content));

System.runFinalization();
System.gc();*/
}

@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
final ESCREEN_ROTATION rotation = Screen.getRotation(getActivity());
mMetaioSDK.setScreenRotation(rotation);
MetaioDebug.log("onConfigurationChanged: " + rotation);
}

@Override
public void onDrawFrame() {

if(mMetaioSDK != null) {
TrackingValuesVector poses = mMetaioSDK.getTrackingValues();

if(poses.size() != 0) {
mModel.setCoordinateSystemID(poses.get(0).getCoordinateSystemID());
}
}

// Log.d("LifeCycle", "onDrawFrame");
/* if (mRendererInitialized) {
mMetaioSDK.render();
} */
}

@Override
public void onSurfaceCreated() {
Log.d("LifeCycle", "onSurfaceCreated");

try {
if (!mRendererInitialized) {
mMetaioSDK.initializeRenderer(mSurfaceView.getWidth(), mSurfaceView.getHeight(), Screen.getRotation(getActivity()),
ERENDER_SYSTEM.ERENDER_SYSTEM_OPENGL_ES_2_0);
mRendererInitialized = true;
} else {
MetaioDebug.log("ArCameraFragment.onSurfaceCreated: Reloading textures...");
mMetaioSDK.reloadTextures();
}

MetaioDebug.log("ArCameraFragment.onSurfaceCreated: Registering audio renderer...");
// mMetaioSDK.registerAudioCallback(mSurfaceView.getAudioRenderer());
mMetaioSDK.registerCallback(mCallback);

MetaioDebug.log("ARViewActivity.onSurfaceCreated");
} catch (Exception e) {
MetaioDebug.log(Log.ERROR, "ArCameraFragment.onSurfaceCreated: " + e.getMessage());
}

mSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
loadContents();
}
});

}

private void loadContents() {
try {
trackingConfigFile = AssetsManager.getAssetPath(getActivity().getApplicationContext(), "AEDApp/Assets/TrackingData_PictureMarker.xml");

boolean result = mMetaioSDK.setTrackingConfiguration(trackingConfigFile);

Log.d("result", Boolean.toString(result));

MetaioDebug.log("Tracking data loaded: " + result);

String aedLogo = AssetsManager.getAssetPath(getActivity().getApplicationContext(), "AEDApp/Assets/metaioman.md2");
Log.d("aedLogo", "aaa: " + aedLogo);
if(aedLogo != null) {
mModel = mMetaioSDK.createGeometry(aedLogo);

if(mModel != null) {
mModel.setScale(new Vector3d(4.0f, 4.0f, 4.0f));
}
else {
MetaioDebug.log(Log.ERROR, "Error loading geometry: " + aedLogo);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}

@Override
public void onSurfaceChanged(int width, int height) {
Log.d("LifeCycle", "onSurfaceChanged");

mMetaioSDK.resizeRenderer(width, height);
}

@Override
public void onSurfaceDestroyed() {
Log.d("LifeCycle", "onSurfaceDestroyed");

MetaioDebug.log("ArCameraFragment.onSurfaceDestroyed(){");
mSurfaceView = null;
// mMetaioSDK.registerAudioCallback(null);
}

protected void startCamera() {
final int cameraIndex = SystemInfo.getCameraIndex(CameraInfo.CAMERA_FACING_BACK);
if (mMetaioSDK != null) {
mMetaioSDK.startCamera(cameraIndex, 640, 480);
}
}

final class MetaioSDKCallbackHandler extends IMetaioSDKCallback {
@Override
public void onTrackingEvent(final TrackingValuesVector trackingValues) {

super.onTrackingEvent(trackingValues);
if(!trackingValues.isEmpty() && trackingValues.get(0).isTrackingState()){
Log.d("Track", "NOT EMPTY");
}
}
}
}

我真的希望有人能帮我解决这个问题,因为我想不通..:(


编辑

抛出的错误 (e.printStackTrace()) 是:

03-24 20:25:19.068: W/System.err(28062): java.lang.NullPointerException: null string

03-24 20:25:19.068: W/System.err(28062): at com.metaio.sdk.jni.MetaioSDKJNI.IMetaioSDK_setTrackingConfiguration__SWIG_1(Native Method)

03-24 20:25:19.068: W/System.err(28062): at com.metaio.sdk.jni.IMetaioSDK.setTrackingConfiguration(IMetaioSDK.java:106)

03-24 20:25:19.068: W/System.err(28062): at com.example.bt6_aedapp.fragmentA.loadContents(fragmentA.java:278)

03-24 20:25:19.068: W/System.err(28062): at com.example.bt6_aedapp.fragmentA.access$0(fragmentA.java:274)

03-24 20:25:19.068: W/System.err(28062): at com.example.bt6_aedapp.fragmentA$1.run(fragmentA.java:268)

03-24 20:25:19.068: W/System.err(28062): at android.opengl.GLSurfaceView$GLThread.guardedRun(GLSurfaceView.java:1463)

03-24 20:25:19.068: W/System.err(28062): at android.opengl.GLSurfaceView$GLThread.run(GLSurfaceView.java:1240)

我想用它做什么:

能够“扫描”图片 ( https://encrypted-tbn2.gstatic.com/images?q=tbn:ANd9GcQFqKIurD3QMU0zVeiwEhtm1twLmTCDlnFulfCwDkxTA1_XQjIQ ) 并在应用程序中检测图像。该图像在项目的 Assets 文件夹中的应用程序中被引用,我制作了一个 xml 文件,其中它的标记被定义为 Metaio 网站上的说明。检测之后,我将做一些数据库工作,但现在我需要让检测部分正常工作。

编辑如果有人知道我如何在 Fragments 中制作另一个 AR 框架,我很想知道。

最佳答案

我对 fragment 知之甚少,但至于空字符串,我认为这是因为您还没有提取 Assets 。

在此视频中 http://youtu.be/KVtCi-WwmFU?t=30m29s它的解释。基本上你要做的就是添加这段代码

private class AssetsExtracter extends AsyncTask<Integer, Integer, Boolean>{
@Override
protected Boolean doInBackground(Integer... params){
try
{
AssetsManager.extractAllAssets(getApplicationContext(), BuildConfig.DEBUG);
}catch (IOException e){
MetaioDebug.printStackTrace(Log.ERROR, e);
return false;
}
return true;
}
}

到您的 Activity (或者在这种情况下,我猜是您的 fragment )。然后你必须添加这个类的一个字段,比如

private AssetsExtracter mTask;

在你放置的 onCreate() 方法中

mTask = new AssetsExtracter();
mTask.execute(0);

之后您的 Assets 应该可以从 AssetsManager.getAssetPath(..) 获得,并且它不应再返回空字符串。

关于Fragment 中的 Android AR,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/22613654/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com