- iOS/Objective-C 元类和类别
- objective-c - -1001 错误,当 NSURLSession 通过 httpproxy 和/etc/hosts
- java - 使用网络类获取 url 地址
- ios - 推送通知中不播放声音
我正在构建一个 iOS 应用程序来执行一些基本检测。我从 AVCaptureVideoDataOutput 获取原始帧,将 CMSampleBufferRef 转换为 UIImage,调整 UIImage 的大小,然后将其转换为 CVPixelBufferRef。据我使用 Instruments 检测到的泄漏是我将 CGImage 转换为 CVPixelBufferRef 的最后一部分。
这是我使用的代码:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
videof = [[ASMotionDetect alloc] initWithSampleImage:[self resizeSampleBuffer:sampleBuffer]];
// ASMotionDetect is my class for detection and I use videof to calculate the movement
}
-(UIImage*)resizeSampleBuffer:(CMSampleBufferRef) sampleBuffer {
UIImage *img;
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
/* CVBufferRelease(imageBuffer); */ // do not call this!
img = [UIImage imageWithCGImage:newImage];
CGImageRelease(newImage);
newContext = nil;
img = [self resizeImageToSquare:img];
return img;
}
-(UIImage*)resizeImageToSquare:(UIImage*)_temp {
UIImage *img;
int w = _temp.size.width;
int h = _temp.size.height;
CGRect rect;
if (w>h) {
rect = CGRectMake((w-h)/2,0,h,h);
} else {
rect = CGRectMake(0, (h-w)/2, w, w);
}
//
img = [self crop:_temp inRect:rect];
return img;
}
-(UIImage*) crop:(UIImage*)image inRect:(CGRect)rect{
UIImage *sourceImage = image;
CGRect selectionRect = rect;
CGRect transformedRect = TransformCGRectForUIImageOrientation(selectionRect, sourceImage.imageOrientation, sourceImage.size);
CGImageRef resultImageRef = CGImageCreateWithImageInRect(sourceImage.CGImage, transformedRect);
UIImage *resultImage = [[UIImage alloc] initWithCGImage:resultImageRef scale:1.0 orientation:image.imageOrientation];
CGImageRelease(resultImageRef);
return resultImage;
}
在我的检测课上我有:
- (id)initWithSampleImage:(UIImage*)sampleImage {
if ((self = [super init])) {
_frame = new CVMatOpaque();
_histograms = new CVMatNDOpaque[kGridSize *
kGridSize];
[self extractFrameFromImage:sampleImage];
}
return self;
}
- (void)extractFrameFromImage:(UIImage*)sampleImage {
CGImageRef imageRef = [sampleImage CGImage];
CVImageBufferRef imageBuffer = [self pixelBufferFromCGImage:imageRef];
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Collect some information required to extract the frame.
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
// Extract the frame, convert it to grayscale, and shove it in _frame.
cv::Mat frame(height, width, CV_8UC4, baseAddress, bytesPerRow);
cv::cvtColor(frame, frame, CV_BGR2GRAY);
_frame->matrix = frame;
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CGImageRelease(imageRef);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
CVPixelBufferRef pxbuffer = NULL;
int width = CGImageGetWidth(image)*2;
int height = CGImageGetHeight(image)*2;
NSMutableDictionary *attributes = [NSMutableDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, [NSNumber numberWithInt:width], kCVPixelBufferWidthKey, [NSNumber numberWithInt:height], kCVPixelBufferHeightKey, nil];
CVPixelBufferPoolRef pixelBufferPool;
CVReturn theError = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef) attributes, &pixelBufferPool);
NSParameterAssert(theError == kCVReturnSuccess);
CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, pixelBufferPool, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, width,
height, 8, width*4, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
/* here is the problem: */
CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
通过 Instrument,我发现问题出在 CVPixelBufferRef 分配上,但我不明白为什么 - 有人能看出问题所在吗?
谢谢
最佳答案
在-pixelBufferFromCGImage:
中,pxBuffer
和pixelBufferPool
都没有释放。这对 pxBuffer
有意义,因为它是一个返回值,但对 pixelBufferPool
则不然——每次调用该方法时都会创建和泄漏一个。
快速修复应该是
-pixelBufferFromCGImage:
中的pixelBufferPool
-extractFrameFromImage:
中的pxBuffer
(-pixelBufferFromCGImage:
的返回值)您还应该将 -pixelBufferFromCGImage:
重命名为 -createPixelBufferFromCGImage:
以表明它返回一个保留的对象。
关于ios - CoreImage/CoreVideo 中的内存泄漏,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/11204748/
有人可以帮助我在 XCode 中运行 Instruments 时跟踪这些 CoreVideo 内存泄漏吗? 基本上,内存泄漏发生在我按下自定义动态 jpeg 播放器上的“录制视频”按钮时。由于 Lea
我正在构建一个 iOS 应用程序来执行一些基本检测。我从 AVCaptureVideoDataOutput 获取原始帧,将 CMSampleBufferRef 转换为 UIImage,调整 UIIma
根据苹果文档,CoreVideo.framework 是“包含用于处理音频和视频的低级例程。请勿直接使用此框架。” 那么,如果我使用它,我的应用会被拒绝吗? 最佳答案 “不要直接使用 X 框架”通常意
我正在尝试将 CoreVideo 与 Swift 结合使用,但出现此错误: let flags : CVOptionFlags = 0 CVPixelBufferLockBaseAddress(ima
我正在关注从 iPhone 启动 Facebook 直播 session 的示例 swift 代码: override func viewDidLoad() { super.viewDidLo
我是一名优秀的程序员,十分优秀!