gpt4 book ai didi

iOS 使用 vImage - 加速将 QCAR YUV 转换为 RGB

转载 作者:塔克拉玛干 更新时间:2023-11-02 08:36:44 30 4
gpt4 key购买 nike

我正在尝试测试转换 Vuforia 生成的 YUV 图像并将它们转换为 UIImage 使用 iOS Accelerate Framework 的 vImage 调用的性能。在代码的当前状态下,我只是想让它工作。现在转换会产生深色条纹图像。是否有关于 Vuforia 如何在其实现中布局 YUV 格式的任何已发布的详细信息?我最初的假设是他们使用 iOS 设备使用的双平面 420p 格式。相关测试代码如下。

UIImage *imageWithQCARCameraImage(const QCAR::Image *cameraImage)
{
UIImage *image = nil;

if (cameraImage) {
QCAR::PIXEL_FORMAT pixelFormat = cameraImage->getFormat();

CGColorSpaceRef colorSpace = NULL;
switch (pixelFormat) {
case QCAR::YUV:
case QCAR::RGB888:
colorSpace = CGColorSpaceCreateDeviceRGB();
break;
case QCAR::GRAYSCALE:
colorSpace = CGColorSpaceCreateDeviceGray();
break;
case QCAR::RGB565:
case QCAR::RGBA8888:
case QCAR::INDEXED:
std::cerr << "Image format conversion not implemented." << std::endl;
break;
case QCAR::UNKNOWN_FORMAT:
std::cerr << "Image format unknown." << std::endl;
break;
}

int bitsPerComponent = 8;
int width = cameraImage->getWidth();
int height = cameraImage->getHeight();
const void *baseAddress = cameraImage->getPixels();
size_t totalBytes = QCAR::getBufferSize(width, height, pixelFormat);

CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = NULL;

if (pixelFormat == QCAR::YUV) {
int bytesPerPixel = 4;
uint8_t *sourceDataAddress = (uint8_t *)baseAddress;

static vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = const_cast<void *>(baseAddress)
};

size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
static vImage_Buffer srcCb = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes)
};

static vImage_Buffer srcCr = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2))
};

static vImage_Buffer dest = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = imageData
};

//uint8_t permuteMap[] = { 1, 2, 3, 0 };
vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrToARGB info;

vImage_Error error;

error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);


error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
NULL,
1,
kvImageNoFlags);

vImage_CGImageFormat format =
{
.bitsPerComponent = static_cast<uint32_t>(bitsPerComponent),
.bitsPerPixel = static_cast<uint32_t>(3 * bitsPerComponent),
.colorSpace = colorSpace,
.bitmapInfo = bitmapInfo,
.version = 0,
.decode = NULL,
.renderingIntent = renderingIntent
};

imageRef = vImageCreateCGImageFromBuffer(&dest,
&format,
NULL,
NULL,
kvImageNoFlags,
&error);
if (error) {
std::cerr << "Err." << std::endl;
}
} else {
int bitsPerPixel = QCAR::getBitsPerPixel(pixelFormat);
int bytesPerRow = cameraImage->getStride();
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL,
baseAddress,
totalBytes,
NULL);

imageRef = CGImageCreate(width,
height,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpace,
bitmapInfo,
provider,
NULL,
false,
renderingIntent);
CGDataProviderRelease(provider);
}

if (imageRef != NULL) {
image = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
}

if (colorSpace != NULL) {
CGColorSpaceRelease(colorSpace);
}

}

return image;
}

最佳答案

void *baseAddress = buffer;
size_t totalBytes = width * height * 3 / 2;

uint8_t *sourceDataAddress = (uint8_t *)baseAddress;

vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.rowBytes = static_cast<size_t>(width),
.data = const_cast<void *>(baseAddress),
};

size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
vImage_Buffer srcCb = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes),
};

vImage_Buffer srcCr = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2)),
};

vImage_Buffer dest;
dest.data = NULL;
vImage_Error error = kvImageNoError;
error = vImageBuffer_Init(&dest, height, width, 32, kvImageNoFlags);
// vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrPixelRange pixelRange = { 16, 128, 235, 240, 255, 0, 255, 0 };
vImage_YpCbCrToARGB info;
error = kvImageNoError;
error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
error = kvImageNoError;
uint8_t permuteMap[4] = {3, 2, 1, 0}; // BGRA - iOS only support BGRA
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
permuteMap, // for iOS must be no NULL, mac can be NULL iOS only support BGRA
255,
kvImageNoFlags);

关于iOS 使用 vImage - 加速将 QCAR YUV 转换为 RGB,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/29490145/

30 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com