1.二维码生成
#import
@property (weak, nonatomic) IBOutlet UIImageView *QRCImage;
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
// 1.创建滤镜
CIFilter *filter = [CIFilter filterWithName:@"CIQRCodeGenerator"];
// 2.清空滤镜(还原默认设置, 因为滤镜有可能保存上一次的设置)
[filter setDefaults];
// 3.创建二维码需要保存的数据
NSData *data = [@"IOS开发很牛逼" dataUsingEncoding:NSUTF8StringEncoding];
// 4.设置数据(KVC)
[filter setValue:data forKey:@"inputMessage"];
// 5.生成二维码, 获取生成后的二维码图片
CIImage *image = [filter outputImage];
// 7.设置图片到容器上
self.QRCImage.image = [self createNonInterpolatedUIImageFormCIImage:image withSize:400];
}
// 根据CIImage生成指定大小的UIImage
// @param image CIImage
// @param size 图片宽度
- (UIImage *)createNonInterpolatedUIImageFormCIImage:(CIImage *)image withSize:(CGFloat) size
{
CGRect extent = CGRectIntegral(image.extent);
CGFloat scale = MIN(size/CGRectGetWidth(extent), size/CGRectGetHeight(extent));
// 1.创建bitmap;
size_t width = CGRectGetWidth(extent) * scale;
size_t height = CGRectGetHeight(extent) * scale;
CGColorSpaceRef cs = CGColorSpaceCreateDeviceGray();
CGContextRef bitmapRef = CGBitmapContextCreate(nil, width, height, 8, 0, cs, (CGBitmapInfo)kCGImageAlphaNone);
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef bitmapImage = [context createCGImage:image fromRect:extent];
CGContextSetInterpolationQuality(bitmapRef, kCGInterpolationNone);
CGContextScaleCTM(bitmapRef, scale, scale);
CGContextDrawImage(bitmapRef, extent, bitmapImage);
// 2.保存bitmap到图片
CGImageRef scaledImage = CGBitmapContextCreateImage(bitmapRef);
CGContextRelease(bitmapRef);
CGImageRelease(bitmapImage);
return [UIImage imageWithCGImage:scaledImage];
}
#import
@interface ViewController ()
@property(nonatomic,strong)AVCaptureSession *session;
@property(nonatomic,weak)AVCaptureVideoPreviewLayer *previeviewLayer;
@end
@implementation ViewController
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
// 1.获取输入设备(IOS中的输入设备: 摄像头.麦克风)
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// 2.根据输入设备创建输入对象
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:NULL];
// 3.创建输出对象
AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
// 4.设置数据对象的代理
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
// 5.创建会话
AVCaptureSession *session = [[AVCaptureSession alloc] init];
self.session = session;
// 6.将输入对象添加到会话中
if ([session canAddInput:input]) {
[session addInput:input];
}
// 7.将输出对象添加到会话中
if ([session canAddOutput:output]) {
[session addOutput:output];
}
// 8.设置输出数据的类型
[output setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode]];
// 9.设置预览界面
AVCaptureVideoPreviewLayer *previeviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
previeviewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:previeviewLayer];
self.previeviewLayer = previeviewLayer;
// 10.开始扫描
[session startRunning];
}
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
// 只要获取到扫描的数据就会调用
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
// 1.停止扫描
[self.session stopRunning];
// 2.移除预览界面
[self.previeviewLayer removeFromSuperlayer];
// 3.取出获取到的数据显示到界面上
UILabel *label = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 300, 300)];
label.backgroundColor = [UIColor redColor];
[self.view addSubview:label];
// 3.1从元数据中取出扫描到的数据
AVMetadataMachineReadableCodeObject *object = [metadataObjects lastObject];
// 3.1从从元数据中取出字符串
label.text = object.stringValue;
}
@end