作者热门文章
- iOS/Objective-C 元类和类别
- objective-c - -1001 错误,当 NSURLSession 通过 httpproxy 和/etc/hosts
- java - 使用网络类获取 url 地址
- ios - 推送通知中不播放声音
我正在为 Olympus Air A01 开发实时更新实时取景图像上的面部检测结果的捕获程序。人脸检测结果是从 Camera Kit 获取的,但我的应用程序在检测到人脸时退出。我的程序在 didUpdateLiveView
中更新实时取景图像并将数据传递给 drawFaceFrame:cameraFrame:
Olympus Camera Kit 版本为1.1.0。
@interface ViewController() <OLYCameraLiveViewDelegate>
//OLYCamera Class
@property (weak, nonatomic) OLYCamera *camera;
//For live view
@property (weak, nonatomic) IBOutlet UIImageView *liveView;
//8 UIViews for face detection frame
@property (strong, nonatomic) IBOutletCollection(UIView) NSArray *faceViews;
@end
- (void)camera:(OLYCamera *)camera didUpdateLiveView:(NSData *)data metadata:(NSDictionary *)metadata
{
//UPDATE LIVE VIEW IMAGE HERE
CGRect frame = AVMakeRectWithAspectRatioInsideRect(self.liveView.image.size, self.liveView.frame);
[self drawFaceFrame:camera.detectedHumanFaces cameraFrame:frame];
}
}
//Draw face detection frame
- (void)drawFaceFrame:(NSDictionary *)faces cameraFrame:(CGRect)frame
{
const CGFloat ratioW = cameraFrame.size.width / self.imageView.image.size.width;
const CGFloat ratioH = cameraFrame.size.height / self.imageView.image.size.height;
unsigned int i = 0;
for ( ; detectedHumanFaces && i < detectedHumanFaces.count ; ++i)
{
NSString *key = [NSString stringWithFormat:@"%d", i];
NSValue *value = detectedHumanFaces[key];
CGRect rect = [value CGRectValue];
CGRect rectInImage = OLYCameraConvertRectOnViewfinderIntoLiveImage(rect, self.imageView.image);
CGRect rectInView = rectInImage;
rectInView.origin.x *= ratioW;
rectInView.origin.y *= ratioH;
rectInView.origin.x += cameraFrame.origin.x;
rectInView.origin.y += cameraFrame.origin.y;
rectInView.size.width *= ratioW;
rectInView.size.height *= ratioH;
if (i < self.faceViews.count)
{
UIView *faceView = [self.faceViews objectAtIndex:i];
CALayer *layer = [faceView layer];
[layer setBorderColor:[UIColor redColor].CGColor];
faceView.frame = rectInView;
faceView.hidden = NO;
}
}
// Hide unused frames
for ( ; i < [self.faceViews count] ; ++i)
{
UIView *faceView = [self.faceViews objectAtIndex:i];
faceView.hidden = YES;
}
}
最佳答案
似乎有两个问题:
detectedHumanFaces
。第一点不是很关键,但也不是很好。我推荐 Key-value Observation (KVO) 技术,该技术仅在相机检测到人脸时调用。
KVO 有效解决了第二个问题。您可以在观察者调用的方法中复制detectedHumanFaces
。 detectedHumanFacesValueDidChange
方法在下面的代码中由观察者调用。
@interface ViewController () <OLYCameraLiveViewDelegate>
//OLYCamera Class
@property (weak, nonatomic) OLYCamera *camera;
//For face detection frames
@property (strong, nonatomic) IBOutletCollection(UIView) NSArray *detectedHumanFaceViews;
@end
/// Called by observer when Camera Kit update detectedHumanFaces property.
- (void)detectedHumanFacesValueDidChange {
// Save live view image and detected face information
// to make consistent with the number of faces and their coordinates when app updates face detection frame.
UIImage *image = self.imageView.image;
NSDictionary *detectedHumanFaces = camera.detectedHumanFaces;
if (image == nil || detectedHumanFaces == nil) {
// Show face detection frame only if a live view image and detected face information are confirmed.
for (UIView *detectedHumanFaceView in self.detectedHumanFaceViews) {
detectedHumanFaceView.hidden = YES;
}
return;
}
for (NSInteger index = 0; index < self.detectedHumanFaceViews.count; index++) {
// Confirm detected face information corresponding to the view for face detection frame.
// The camera detects eight faces at the maximum.
UIView *detectedHumanFaceView = self.detectedHumanFaceViews[index];
NSString *faceKey = [NSString stringWithFormat:@"%ld", index];
NSValue *faceValue = detectedHumanFaces[faceKey];
if (!faceValue) {
detectedHumanFaceView.hidden = YES;
continue;
}
// Decide coordinates of the face detection frame on the screen
CGRect imageRect = AVMakeRectWithAspectRatioInsideRect(image.size, self.imageView.bounds);
CGFloat xRatio = imageRect.size.width / image.size.width;
CGFloat yRatio = imageRect.size.height / image.size.height;
CGRect faceRect = OLYCameraConvertRectOnViewfinderIntoLiveImage([faceValue CGRectValue], image);
CGFloat x = faceRect.origin.x * xRatio + imageRect.origin.x;
CGFloat y = faceRect.origin.y * yRatio + imageRect.origin.y;
CGFloat w = faceRect.size.width * xRatio;
CGFloat h = faceRect.size.height * yRatio;
CGRect viewRect = CGRectMake(x, y, w, h);
// Draw face detection frame.
detectedHumanFaceView.frame = viewRect;
detectedHumanFaceView.hidden = NO;
}
}
关于ios - 奥林巴斯相机套件 : App exits when it draws face detection frame on the live view,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/31931317/
我是一名优秀的程序员,十分优秀!