上一篇寫了 基于AVFoundation框架進行人臉識別
本編編寫基于第三方 GPUImage框架 進行識別,帶濾鏡美顏,掃描區域設置、獲取識別到的人臉等功能!
下面給出基于GPUImage框架搭建人臉檢測功能代碼:
一、導入相關頭文件、設置相關代理和屬性
#import "GPUFaceViewController.h"
#import "GPUImage.h"
#import "GPUImageBeautifyFilter.h"
#define WS(weakSelf) __weak __typeof(&*self) weakSelf = self
#define kWidth [UIScreen mainScreen].bounds.size.width
#define kHeight [UIScreen mainScreen].bounds.size.height
@interface GPUFaceViewController ()<GPUImageVideoCameraDelegate,AVCaptureMetadataOutputObjectsDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;
@property (nonatomic, strong) GPUImageStillCamera *Camera;
@property (nonatomic, strong) GPUImageView *filterView;
@property (nonatomic, strong) UIButton *beautifyButton;
@property (strong, nonatomic) AVCaptureMetadataOutput *medaDataOutput;
@property (strong, nonatomic) dispatch_queue_t captureQueue;
@property (nonatomic, strong) AVCapturePhotoOutput *iOutput;
@property (nonatomic, strong) NSArray *faceObjects;
@property (nonatomic,strong)UIImageView *imageView;
@property(nonatomic,assign)BOOL isFirst;
@property(nonatomic,strong)UIView *roundView;
@property(nonatomic,strong)UIImageView *faceImgView;
@property(nonatomic,strong)NSData *imageData;
@property (nonatomic, strong) UIView *maskView;
@end
二、界面初始化
- (void)viewDidLoad {
[super viewDidLoad];
// self.view.backgroundColor = [UIColor whiteColor];
self.title = @"人臉識別";
_isFirst = YES;
[self faceDeviceInit];
[self initUI];
}
-(void)initUI{
_faceImgView = [[UIImageView alloc] initWithFrame:CGRectMake(kWidth - 120 , 64, 120, 120)];
_faceImgView.backgroundColor = [UIColor blueColor];
[self.view addSubview:_faceImgView];
UILabel *titleLab = [[UILabel alloc] initWithFrame:CGRectMake(52, 100, kWidth - 108, 18)];
titleLab.text = @"請對準臉部拍攝 提高認證成功率";
titleLab.textAlignment = NSTextAlignmentCenter;
titleLab.textColor = [UIColor redColor];
titleLab.font = [UIFont systemFontOfSize:17];
[self.view addSubview:titleLab];
}
三、相機設備初始化
//攝像頭相關設置
-(void)faceDeviceInit{
self.captureQueue = dispatch_queue_create("com.kimsungwhee.mosaiccamera.videoqueue", NULL);
self.videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1920x1080 cameraPosition:AVCaptureDevicePositionBack];
self.videoCamera.delegate = self;
self.videoCamera.videoCaptureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
self.filterView = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 64, kWidth, kHeight-64)];
self.filterView.backgroundColor = [UIColor clearColor];
self.filterView.fillMode = kGPUImageFillModePreserveAspectRatioAndFill;
[self.view addSubview:self.filterView];
[self.videoCamera startCameraCapture];
//美顏
[self beautify];
//Meta data
dispatch_async(dispatch_get_main_queue(), ^{
self.medaDataOutput = [[AVCaptureMetadataOutput alloc] init];
if ([self.videoCamera.captureSession canAddOutput:self.medaDataOutput]) {
[self.videoCamera.captureSession addOutput:self.medaDataOutput];
//類型設置為人臉
self.medaDataOutput.metadataObjectTypes = @[AVMetadataObjectTypeFace];
[self.medaDataOutput setMetadataObjectsDelegate:self queue:self.captureQueue];
}
});
//設置有效掃描區域
[self setScanArea];
}
四、設置有效掃描區域
-(void)setScanArea{
_maskView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, kWidth, kHeight)];
_maskView.backgroundColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:0.5];
[self.view addSubview:_maskView];
UIBezierPath *maskPath = [UIBezierPath bezierPathWithRect:CGRectMake(0, 0, kWidth, kHeight)];
[maskPath appendPath:[[UIBezierPath bezierPathWithRoundedRect:CGRectMake(73, 206, kWidth - 142, kWidth - 142) cornerRadius:(kWidth - 142)/2.0] bezierPathByReversingPath]];
CAShapeLayer *maskLayer = [[CAShapeLayer alloc] init];
maskLayer.path = maskPath.CGPath;
_maskView.layer.mask = maskLayer;
}
五、濾鏡美顏
- (void)beautify {
[self.videoCamera removeAllTargets];
GPUImageBeautifyFilter *beautifyFilter = [[GPUImageBeautifyFilter alloc] init];
[self.videoCamera addTarget:beautifyFilter];
[beautifyFilter addTarget:self.filterView];
}
六、顯示截取到的圖片,請求人臉識別接口
-(void)uploadFaceImg:(UIImage *)image{
_faceImgView.image = image;
NSLog(@"imageData:%@",self.imageData);
WS(weakSelf);
//這里設置為2秒后可以進行繼續檢測
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
weakSelf.isFirst = YES;
});
//這里開始寫請求接口的代碼
}
七、GPUImage代理方法
//GPUImage代理方法
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer {
WS(weakSelf);
//檢測到人臉
if (self.faceObjects && self.faceObjects.count > 0) {
NSLog(@"self.faceObjects.count == %ld",self.faceObjects.count);
//這個布爾值用于判斷檢測到人臉后,獲取到人臉照片,不用再進行持續檢測
if (_isFirst) {
//因為剛開始掃描到的人臉是模糊照片,所以延遲幾秒獲取
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
//該view用來獲取截取人臉的Frame
UIView *view = [[UIView alloc] initWithFrame:CGRectMake(73, 146, kWidth - 142, kWidth - 142)];
[self.view addSubview:view];
//截取屏幕
UIGraphicsBeginImageContext(weakSelf.filterView.bounds.size);
[weakSelf.filterView drawViewHierarchyInRect:weakSelf.filterView.bounds afterScreenUpdates:YES];;//截取動態圖形
[weakSelf.filterView.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
//截取屏幕中的人臉
UIImage *faceImage = [UIImage imageWithCGImage:CGImageCreateWithImageInRect(newImage.CGImage, view.frame)];
// UIImageWriteToSavedPhotosAlbum(firstImage, self, nil, nil); //將圖片保存到相冊
//轉換人臉圖片為NSData類型
weakSelf.imageData = UIImageJPEGRepresentation(faceImage, 0.05);
//獲取人臉圖片后,請求人臉識別接口
[self uploadFaceImg:faceImage];
});
_isFirst = NO;
}
}else {
//無人臉
// 這里可以寫未獲取到人臉的操作
}
}
八、實現 AVCaptureMetadataOutput
代理
//AVCaptureMetadataOutputObjectsDelegate ===== 拿出當前幀的圖片進行人臉識別
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
self.faceObjects = metadataObjects;