gpt4 book ai didi

ios 将文本与图像合并不适用于非西方字符

转载 作者:行者123 更新时间:2023-11-29 13:39:03 25 4
gpt4 key购买 nike

我正在尝试开发本地化的帮助文件。它适用于西方语言,但不显示非西方字符(仅输出图像文件)。如果文本以 utf-8 或 utf-16 编码,也会发生同样的情况不喜欢发布所有这些代码,但我无法找出问题所在。非常感谢任何帮助!

    - (IBAction)segmentedControlChanged:(UISegmentedControl *)sender {
UIImage *helpImage;
NSArray *helpText;
if (sender.selectedSegmentIndex == 1) {
helpImage = [UIImage imageNamed:@"HelpImage.png"];
helpText = [NSArray arrayWithObjects:
[NSDictionary dictionaryWithObjectsAndKeys:
NSLocalizedString(@"Tab1 - sample text", nil), kHelpTextKeyString,
@"Arial", kHelpTextKeyFontName,
//@"Helvetica-Bold", kHelpTextKeyFontName,
[NSNumber numberWithInt:20], kHelpTextKeyFontSize,
[[UIColor blackColor] CGColor], kHelpTextKeyColor,
CGRectCreateDictionaryRepresentation(CGRectMake(30.0, 55.0, 200.0, 28.0)), kHelpTextKeyRect,
nil],
// CGRectCreateDictionaryRepresentation(CGRectMake(38.0, 55.0, 200.0, 28.0)), kHelpTextKeyRect,
//CGRectCreateDictionaryRepresentation(CGRectMake(30.0, 55.0, 200.0, 28.0)), kHelpTextKeyRect,

[NSDictionary dictionaryWithObjectsAndKeys:
[NSArray arrayWithObjects:
NSLocalizedString(@"sample text ", nil),
NSLocalizedString(@" ", nil),
NSLocalizedString(@"more sample text", nil),
nil], kHelpTextKeyString,
@"Helvetica-Light", kHelpTextKeyFontName,
[NSNumber numberWithInt:10], kHelpTextKeyFontSize,
[[UIColor blackColor] CGColor], kHelpTextKeyColor,
CGRectCreateDictionaryRepresentation(CGRectMake(10.0, 80.0, 200.0, 28.0)), kHelpTextKeyRect,
nil],
nil];
}

// display actual image
[self displaySelectedHelpImage:helpImage withTextArray:helpText];
}


/

/ merge selected help image to text
- (void)displaySelectedHelpImage:(UIImage *)orgImage withTextArray:(NSArray *)textArr {
CGImageRef cgImage = [orgImage CGImage];
int pixelsWide = CGImageGetWidth(cgImage);
int pixelsHigh = CGImageGetHeight(cgImage);
int bitsPerComponent = CGImageGetBitsPerComponent(cgImage);//8; // fixed
int bitsPerPixel = CGImageGetBitsPerPixel(cgImage);//bitsPerComponent * numberOfCompnent;
int bytesPerRow = CGImageGetBytesPerRow(cgImage);//(pixelsWide * bitsPerPixel) // 8; // bytes
int byteCount = (bytesPerRow * pixelsHigh);
CGColorSpaceRef colorSpace = CGImageGetColorSpace(cgImage);//CGColorSpaceCreateDeviceRGB();

// Allocate data
NSMutableData *data = [NSMutableData dataWithLength:byteCount];
// Create a bitmap context
CGContextRef context = CGBitmapContextCreate([data mutableBytes], pixelsWide, pixelsHigh, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast); //kCGImageAlphaPremultipliedLast);//kCGImageAlphaNoneSkipLast); //kCGImageAlphaOnly);
// Set the blend mode to copy to avoid any alteration of the source data or to invert to invert image
CGContextSetBlendMode(context, kCGBlendModeCopy);
// Set alpha
CGContextSetAlpha(context, 1.0);
// Color image
//CGContextSetRGBFillColor(context, 1 ,1, 1, 1.0);
//CGContextFillRect(context, CGRectMake(0.0, 0.0, pixelsWide, pixelsHigh));
// Draw the image to extract the alpha channel
CGContextDrawImage(context, CGRectMake(0.0, 0.0, pixelsWide, pixelsHigh), cgImage);

// add text to image
// Changes the origin of the user coordinate system in a context
//CGContextTranslateCTM (context, pixelsWide, pixelsHigh);
// Rotate context upright
//CGContextRotateCTM (context, -180. * M_PI/180);
for (NSDictionary *dic in textArr) {

CGContextSelectFont (context,
//todo
[[dic objectForKey:kHelpTextKeyFontName] UTF8String],
[[dic objectForKey:kHelpTextKeyFontSize] intValue],
kCGEncodingMacRoman);
CGContextSetCharacterSpacing (context, 2);
CGContextSetTextDrawingMode (context, kCGTextFillStroke);

CGColorRef color = (CGColorRef)[dic objectForKey:kHelpTextKeyColor];
CGRect rect;
CGRectMakeWithDictionaryRepresentation((CFDictionaryRef)[dic objectForKey:kHelpTextKeyRect], &rect);

CGContextSetFillColorWithColor(context, color);
CGContextSetStrokeColorWithColor(context, color);

if ([[dic objectForKey:kHelpTextKeyString] isKindOfClass:[NSArray class]]) {
for (NSString *str in [dic objectForKey:kHelpTextKeyString]) {
CGContextShowTextAtPoint(context,
rect.origin.x,
pixelsHigh - rect.origin.y,
[str cStringUsingEncoding:[NSString defaultCStringEncoding]],
[str length]);
rect.origin.y += [[dic objectForKey:kHelpTextKeyFontSize] intValue];
}
} else {
CGContextShowTextAtPoint(context,
rect.origin.x,
pixelsHigh - rect.origin.y,
[[dic objectForKey:kHelpTextKeyString] cStringUsingEncoding:[NSString defaultCStringEncoding]],
[[dic objectForKey:kHelpTextKeyString] length]);
}
}

// Now the alpha channel has been copied into our NSData object above, so discard the context and lets make an image mask.
CGContextRelease(context);
// Create a data provider for our data object (NSMutableData is tollfree bridged to CFMutableDataRef, which is compatible with CFDataRef)
CGDataProviderRef dataProvider = CGDataProviderCreateWithCFData((CFMutableDataRef)data);
// Create our new mask image with the same size as the original image
//CGImageRef maskingImage = CGImageMaskCreate(pixelsWide, pixelsHigh, bitsPerComponent, bitsPerPixel, bytesPerRow, dataProvider, NULL, YES);

CGImageRef finalImage = CGImageCreate(pixelsWide,
pixelsHigh,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpace,
kCGBitmapByteOrderDefault,
dataProvider,
NULL,
YES,
kCGRenderingIntentDefault);

// And release the provider.
CGDataProviderRelease(dataProvider);

UIImage *theImage = [UIImage imageWithCGImage:finalImage];

// remove old scroll view
if (scrollView) {
[scrollView removeFromSuperview];
}

// construct new scroll view and size according to image
UIScrollView *tempScrollView = [[UIScrollView alloc] initWithFrame:containerView.bounds];
tempScrollView.contentSize = theImage.size;
scrollView = tempScrollView;

// construct an image view (sized at zero) and assign the help image to it
UIImageView *tempImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, theImage.size.width, 0.0)];
[tempImageView setImage:theImage];

// push image view to scrolle view and scroll view to container view
[tempScrollView addSubview:tempImageView];
[containerView addSubview:tempScrollView];

// animate
[UIView beginAnimations:@"ResizeImageView" context:NULL];
[UIView setAnimationDuration:1.0];

// recover actual image size through animation
[tempImageView setFrame:CGRectMake(0.0, 0.0, theImage.size.width, theImage.size.height)];

[UIView setAnimationDelegate:self];
[UIView setAnimationCurve: UIViewAnimationCurveEaseOut];

[UIView commitAnimations];- (IBAction)segmentedControlChanged:(UISegmentedControl *)sender {
UIImage *helpImage;
NSArray *helpText;
if (sender.selectedSegmentIndex == 1) {
helpImage = [UIImage imageNamed:@"HelpImage.png"];
helpText = [NSArray arrayWithObjects:
[NSDictionary dictionaryWithObjectsAndKeys:
NSLocalizedString(@"Tab1 - sample text", nil), kHelpTextKeyString,
@"Arial", kHelpTextKeyFontName,
//@"Helvetica-Bold", kHelpTextKeyFontName,
[NSNumber numberWithInt:20], kHelpTextKeyFontSize,
[[UIColor blackColor] CGColor], kHelpTextKeyColor,
CGRectCreateDictionaryRepresentation(CGRectMake(30.0, 55.0, 200.0, 28.0)), kHelpTextKeyRect,
nil],
// CGRectCreateDictionaryRepresentation(CGRectMake(38.0, 55.0, 200.0, 28.0)), kHelpTextKeyRect,
//CGRectCreateDictionaryRepresentation(CGRectMake(30.0, 55.0, 200.0, 28.0)), kHelpTextKeyRect,

[NSDictionary dictionaryWithObjectsAndKeys:
[NSArray arrayWithObjects:
NSLocalizedString(@"sample text ", nil),
NSLocalizedString(@" ", nil),
NSLocalizedString(@"more sample text", nil),
nil], kHelpTextKeyString,
@"Helvetica-Light", kHelpTextKeyFontName,
[NSNumber numberWithInt:10], kHelpTextKeyFontSize,
[[UIColor blackColor] CGColor], kHelpTextKeyColor,
CGRectCreateDictionaryRepresentation(CGRectMake(10.0, 80.0, 200.0, 28.0)), kHelpTextKeyRect,
nil],
nil];
}

// display actual image
[self displaySelectedHelpImage:helpImage withTextArray:helpText];
}


// merge selected help image to text
- (void)displaySelectedHelpImage:(UIImage *)orgImage withTextArray:(NSArray *)textArr {
CGImageRef cgImage = [orgImage CGImage];
int pixelsWide = CGImageGetWidth(cgImage);
int pixelsHigh = CGImageGetHeight(cgImage);
int bitsPerComponent = CGImageGetBitsPerComponent(cgImage);//8; // fixed
int bitsPerPixel = CGImageGetBitsPerPixel(cgImage);//bitsPerComponent * numberOfCompnent;
int bytesPerRow = CGImageGetBytesPerRow(cgImage);//(pixelsWide * bitsPerPixel) // 8; // bytes
int byteCount = (bytesPerRow * pixelsHigh);
CGColorSpaceRef colorSpace = CGImageGetColorSpace(cgImage);//CGColorSpaceCreateDeviceRGB();

// Allocate data
NSMutableData *data = [NSMutableData dataWithLength:byteCount];
// Create a bitmap context
CGContextRef context = CGBitmapContextCreate([data mutableBytes], pixelsWide, pixelsHigh, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast); //kCGImageAlphaPremultipliedLast);//kCGImageAlphaNoneSkipLast); //kCGImageAlphaOnly);
// Set the blend mode to copy to avoid any alteration of the source data or to invert to invert image
CGContextSetBlendMode(context, kCGBlendModeCopy);
// Set alpha
CGContextSetAlpha(context, 1.0);
// Color image
//CGContextSetRGBFillColor(context, 1 ,1, 1, 1.0);
//CGContextFillRect(context, CGRectMake(0.0, 0.0, pixelsWide, pixelsHigh));
// Draw the image to extract the alpha channel
CGContextDrawImage(context, CGRectMake(0.0, 0.0, pixelsWide, pixelsHigh), cgImage);

// add text to image
// Changes the origin of the user coordinate system in a context
//CGContextTranslateCTM (context, pixelsWide, pixelsHigh);
// Rotate context upright
//CGContextRotateCTM (context, -180. * M_PI/180);
for (NSDictionary *dic in textArr) {

CGContextSelectFont (context,
//todo
[[dic objectForKey:kHelpTextKeyFontName] UTF8String],
[[dic objectForKey:kHelpTextKeyFontSize] intValue],
kCGEncodingMacRoman);
CGContextSetCharacterSpacing (context, 2);
CGContextSetTextDrawingMode (context, kCGTextFillStroke);

CGColorRef color = (CGColorRef)[dic objectForKey:kHelpTextKeyColor];
CGRect rect;
CGRectMakeWithDictionaryRepresentation((CFDictionaryRef)[dic objectForKey:kHelpTextKeyRect], &rect);

CGContextSetFillColorWithColor(context, color);
CGContextSetStrokeColorWithColor(context, color);

if ([[dic objectForKey:kHelpTextKeyString] isKindOfClass:[NSArray class]]) {
for (NSString *str in [dic objectForKey:kHelpTextKeyString]) {
CGContextShowTextAtPoint(context,
rect.origin.x,
pixelsHigh - rect.origin.y,
[str cStringUsingEncoding:[NSString defaultCStringEncoding]],
[str length]);
rect.origin.y += [[dic objectForKey:kHelpTextKeyFontSize] intValue];
}
} else {
CGContextShowTextAtPoint(context,
rect.origin.x,
pixelsHigh - rect.origin.y,
[[dic objectForKey:kHelpTextKeyString] cStringUsingEncoding:[NSString defaultCStringEncoding]],
[[dic objectForKey:kHelpTextKeyString] length]);
}
}

// Now the alpha channel has been copied into our NSData object above, so discard the context and lets make an image mask.
CGContextRelease(context);
// Create a data provider for our data object (NSMutableData is tollfree bridged to CFMutableDataRef, which is compatible with CFDataRef)
CGDataProviderRef dataProvider = CGDataProviderCreateWithCFData((CFMutableDataRef)data);
// Create our new mask image with the same size as the original image
//CGImageRef maskingImage = CGImageMaskCreate(pixelsWide, pixelsHigh, bitsPerComponent, bitsPerPixel, bytesPerRow, dataProvider, NULL, YES);

CGImageRef finalImage = CGImageCreate(pixelsWide,
pixelsHigh,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpace,
kCGBitmapByteOrderDefault,
dataProvider,
NULL,
YES,
kCGRenderingIntentDefault);

// And release the provider.
CGDataProviderRelease(dataProvider);

UIImage *theImage = [UIImage imageWithCGImage:finalImage];

// remove old scroll view
if (scrollView) {
[scrollView removeFromSuperview];
}

// construct new scroll view and size according to image
UIScrollView *tempScrollView = [[UIScrollView alloc] initWithFrame:containerView.bounds];
tempScrollView.contentSize = theImage.size;
scrollView = tempScrollView;

// construct an image view (sized at zero) and assign the help image to it
UIImageView *tempImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, theImage.size.width, 0.0)];
[tempImageView setImage:theImage];

// push image view to scrolle view and scroll view to container view
[tempScrollView addSubview:tempImageView];
[containerView addSubview:tempScrollView];

// animate
[UIView beginAnimations:@"ResizeImageView" context:NULL];
[UIView setAnimationDuration:1.0];

// recover actual image size through animation
[tempImageView setFrame:CGRectMake(0.0, 0.0, theImage.size.width, theImage.size.height)];

[UIView setAnimationDelegate:self];
[UIView setAnimationCurve: UIViewAnimationCurveEaseOut];

[UIView commitAnimations];

最佳答案

您正在使用 CGContextShowTextAtPoint 绘制文本,它对非 ASCII 文本的支持很差。

类似问题:one , two . Apple's documentation explains the issue.

使用更高级的API来绘制文本,比如UIKit/UIStringDrawing.h中的方法,比如-[NSString drawAtPoint:withFont:]

关于ios 将文本与图像合并不适用于非西方字符,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/9706029/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com