gpt4 book ai didi

java - Microsoft Face API(Android)中缺少属性类型。如何使用 Emotion 属性?

转载 作者:太空狗 更新时间:2023-10-29 13:49:31 24 4
gpt4 key购买 nike

我正在尝试将 Microsoft 的 Face API 与 Android Studio 结合使用来制作应用程序。现在,我只是在玩弄 API,但我遇到了问题。在应该可用的面部属性类型中(参见 on this page),我可以从中挑选的唯一属性类型是 Age、FacialHair、Gender、HeadPose 和 Smile。我很想使用 Emotion 属性类型,但无法识别。

我得到的错误:Cannot resolve symbol 'Emotion'

这里是相关的代码部分:

Face[] result = faceServiceClient.detect(inputStreams[0], true, true, new FaceServiceClient.FaceAttributeType[]{FaceServiceClient.FaceAttributeType.Emotion});

这是我的 MainActivity 中的完整代码:

package me.ianterry.face;

import android.app.ProgressDialog;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.AsyncTask;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;

import com.microsoft.projectoxford.face.*;
import com.microsoft.projectoxford.face.contract.*;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;

public class MainActivity extends AppCompatActivity {
private FaceServiceClient faceServiceClient =
new FaceServiceRestClient("https://westcentralus.api.cognitive.microsoft.com/face/v1.0", "MY KEY");

private ImageView mImageView;
private Button mProcessButton;
private ProgressDialog progress;
public final String TAG = "attributeMethod";

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

final Bitmap myBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.test_image);
mImageView = findViewById(R.id.image);
mImageView.setImageBitmap(myBitmap);

mProcessButton = findViewById(R.id.btn_process);
mProcessButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
detectAndFrame(myBitmap);
}
});
progress = new ProgressDialog(this);
}


private void detectAndFrame(final Bitmap myBitmap) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
myBitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());


AsyncTask<InputStream, String, Face[]> detectTask = new AsyncTask<InputStream, String, Face[]>() {
//private ProgressDialog progress = new ProgressDialog(MainActivity.this);

@Override
protected void onPostExecute(Face[] faces) {
progress.dismiss();
if (faces == null) {
return;
}
mImageView.setImageBitmap(drawFaceRectangleOnBitmap(myBitmap, faces));
attributeMethod(faces);
}

@Override
protected void onPreExecute() {
super.onPreExecute();
progress.show();
}

@Override
protected void onProgressUpdate(String... values) {
super.onProgressUpdate(values);
progress.setMessage(values[0]);
}

@Override
protected Face[] doInBackground(InputStream... inputStreams) {
//return new Face[0];
try {

publishProgress("Detecting...");
Face[] result = faceServiceClient.detect(inputStreams[0], true, true, new FaceServiceClient.FaceAttributeType[]{FaceServiceClient.FaceAttributeType.Emotion});
if (result == null) {
publishProgress("Detection finished. Nothing detected.");
return null;
}
publishProgress(String.format("Detection Finished. %d face(s) detected", result.length));
return result;
} catch (Exception e) {
publishProgress("Detection failed.");
return null;
}

}

};
detectTask.execute(inputStream);
}

private static Bitmap drawFaceRectangleOnBitmap(Bitmap myBitmap, Face[] faces) {
Bitmap bitmap = myBitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.WHITE);
int strokeWidth = 8;
paint.setStrokeWidth(strokeWidth);
if (faces != null) {
for (Face face : faces) {
FaceRectangle faceRectangle = face.faceRectangle;
canvas.drawRect(faceRectangle.left,
faceRectangle.top,
faceRectangle.left + faceRectangle.width,
faceRectangle.top + faceRectangle.height,
paint);

}
}
return bitmap;
}

private void attributeMethod(Face[] faces) {
for (Face face : faces) {
FaceAttribute attribute = face.faceAttributes;
Log.d(TAG, "age: " + attribute.age);
Log.d(TAG, "gender: " + attribute.gender);
}
}
}

此代码或多或少直接取自 this tutorial.

最佳答案

在 SDK 1.2.5 版中添加了对情感的支持。 Source

在 1.4.3 版发布之前,您应该使用 1.4.1 版。

关于java - Microsoft Face API(Android)中缺少属性类型。如何使用 Emotion 属性?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/49869760/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com