gpt4 book ai didi

ios - Aztec 代码未扫描

转载 作者:塔克拉玛干 更新时间:2023-11-02 10:05:30 27 4
gpt4 key购买 nike

我正在尝试使用 Apple 原生 API 扫描 Aztec 代码。但我无法扫描它。 Apple guideline里面,我看过了,可以扫描Aztec code。但它不起作用。

请检查我使用的代码。

#import <UIKit/UIKit.h>

@interface igViewController : UIViewController

@end

#import <AVFoundation/AVFoundation.h>
#import "igViewController.h"

@interface igViewController () <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;

UIView *_highlightView;
UILabel *_label;
}
@end

@implementation igViewController

- (void)viewDidLoad
{
[super viewDidLoad];

_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];

_label = [[UILabel alloc] init];
_label.frame = CGRectMake(0, self.view.bounds.size.height - 40, self.view.bounds.size.width, 40);
_label.autoresizingMask = UIViewAutoresizingFlexibleTopMargin;
_label.backgroundColor = [UIColor colorWithWhite:0.15 alpha:0.65];
_label.textColor = [UIColor whiteColor];
_label.textAlignment = NSTextAlignmentCenter;
_label.text = @"(none)";
[self.view addSubview:_label];

_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;

_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(@"Error: %@", error);
}

_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];

_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
for (NSString* avail in _output.metadataObjectTypes) {
NSLog(@"Avail...%@", avail);
}
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];

[_session startRunning];

[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_label];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"Failed...");
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = @[AVMetadataObjectTypeAztecCode];

for (AVMetadataObject *metadata in metadataObjects) {
NSLog(@".....%@", metadata.type);
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObjectAVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}

if (detectionString != nil)
{
_label.text = detectionString;
break;
}
else
_label.text = @"(none)";
}

//_label.text = @"(nonessss)";

_highlightView.frame = highlightViewRect;
}

@end

最佳答案

这是我对 SO 的第一个回答,我是 Objective-C 和 iOS 开发的初学者,所以请对我温和一点。

我实际上无法帮助您修复代码中的错误,因为作为初学者,我仍然很难看到发生了什么,但我想告诉您,就在几天前,我成功地关注了 this tutorial如何准确地做你需要的。我调整了教程代码并在需要的地方添加了注释,因此如果您想尝试,应该很容易理解。似乎只在这里发布链接是不受欢迎的,所以我发布了我的代码。

这是一个 ViewController,它直接打开扫描 View 并在找到条形码(在我的例子中是 aztec)时使用react。它应该很容易适应您的需要。在教程中,他们使用了 AVMetadataObjectTypeQRCode,但要扫描 Aztec 代码,只需将其替换为 AVMetadataObjectTypeAztecCode。我已经在我的代码中这样做了。

ScanVC.h(在您的例子中是 igViewController)

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface ScanVC : UIViewController <AVCaptureMetadataOutputObjectsDelegate>

@property (retain, nonatomic) UILabel *scannerWindow;
@property (retain, nonatomic) UILabel *statusLabel;
@property (retain, nonatomic) UIButton *cancelButton;

@end

ScanVC.m

#import "ScanVC.h"

@interface ScanVC ()

@property (nonatomic) BOOL isReading;
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;

@end


@implementation ScanVC

@synthesize cancelButton;
@synthesize statusLabel;
@synthesize scannerWindow;


- (void)viewDidLoad {
[super viewDidLoad];

_isReading = NO;
_captureSession = nil;

//place a close button
cancelButton = [UIButton buttonWithType:UIButtonTypeSystem];
[cancelButton addTarget:self action:@selector(closeScan) forControlEvents:UIControlEventTouchUpInside];
[cancelButton setTitle:@"Close" forState:UIControlStateNormal];
cancelButton.frame = CGRectMake(0, 410, 250, 40);
[self.view addSubview:cancelButton];

//place a status label
statusLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 340, 250, 40)];
statusLabel.text = @"Currently not scanning";
[self.view addSubview:statusLabel];

//place the scanner window (adjust the size)
scannerWindow = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 250, 250)];
scannerWindow.text = @"Camera Capture Window";
[self.view addSubview:scannerWindow];

//start the scan immediately when the view loads
[self startStopScan];
}

- (void)closeScan {
if(_isReading) {
[self stopReading];
}

_isReading = !_isReading;

//dismiss the view controller here?
}];
}

- (void)startStopScan {

if (!_isReading) {
if([self startReading]) {
[statusLabel setText:@"Scanning for Barcode"];
}
} else {
[self stopReading];
}

_isReading = !_isReading;
}

- (BOOL)startReading {
NSError *error;

AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];

if(!input) {
NSLog(@"%@", [error localizedDescription]);
return NO;
}

_captureSession = [[AVCaptureSession alloc] init];
[_captureSession addInput:input];

AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[_captureSession addOutput:captureMetadataOutput];

dispatch_queue_t dispatchQueue;
dispatchQueue = dispatch_queue_create("myQueue", NULL);
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
[captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeAztecCode]];

//show the preview to the user
_videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
[_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[_videoPreviewLayer setFrame:scannerWindow.layer.bounds];
[scannerWindow.layer addSublayer:_videoPreviewLayer];

[_captureSession startRunning];

return YES;
}

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects != nil && [metadataObjects count] > 0) {
AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeAztecCode]) {
[statusLabel performSelectorOnMainThread:@selector(setText:) withObject:[metadataObj stringValue] waitUntilDone:NO];

[self performSelectorOnMainThread:@selector(stopReading) withObject:nil waitUntilDone:NO];
_isReading = NO;



//do things after a successful scan here
NSLog(@"scanner output %@", [metadataObj stringValue]);
}
}
}

- (void)stopReading {
[_captureSession stopRunning];
_captureSession = nil;

[_videoPreviewLayer removeFromSuperlayer];
}


@end

关于ios - Aztec 代码未扫描,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/23219885/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com