iOS 基于GPUImage框架搭建人臉識別,自帶濾鏡美顏,掃描區(qū)域設(shè)置等功能

上一篇寫了 基于AVFoundation框架進(jìn)行人臉識別
本編編寫基于第三方 GPUImage框架 進(jìn)行識別,帶濾鏡美顏,掃描區(qū)域設(shè)置、獲取識別到的人臉等功能!

下面給出基于GPUImage框架搭建人臉檢測功能代碼:
一、導(dǎo)入相關(guān)頭文件、設(shè)置相關(guān)代理和屬性
#import "GPUFaceViewController.h"
#import "GPUImage.h"
#import "GPUImageBeautifyFilter.h"

#define WS(weakSelf) __weak __typeof(&*self) weakSelf = self
#define kWidth [UIScreen mainScreen].bounds.size.width
#define kHeight [UIScreen mainScreen].bounds.size.height

@interface GPUFaceViewController ()<GPUImageVideoCameraDelegate,AVCaptureMetadataOutputObjectsDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;
@property (nonatomic, strong) GPUImageStillCamera *Camera;
@property (nonatomic, strong) GPUImageView *filterView;
@property (nonatomic, strong) UIButton *beautifyButton;
@property (strong, nonatomic) AVCaptureMetadataOutput *medaDataOutput;
@property (strong, nonatomic) dispatch_queue_t captureQueue;
@property (nonatomic, strong) AVCapturePhotoOutput *iOutput;

@property (nonatomic, strong) NSArray *faceObjects;
@property (nonatomic,strong)UIImageView *imageView;
@property(nonatomic,assign)BOOL isFirst;
@property(nonatomic,strong)UIView *roundView;
@property(nonatomic,strong)UIImageView *faceImgView;
@property(nonatomic,strong)NSData *imageData;
@property (nonatomic, strong) UIView *maskView;

@end

二、界面初始化
 - (void)viewDidLoad {
   [super viewDidLoad];
  // self.view.backgroundColor = [UIColor whiteColor];
   self.title = @"人臉識別";
   _isFirst = YES;
   
   [self faceDeviceInit];
   [self initUI];
}

-(void)initUI{
   _faceImgView = [[UIImageView alloc] initWithFrame:CGRectMake(kWidth - 120 , 64, 120, 120)];
   _faceImgView.backgroundColor = [UIColor blueColor];
   [self.view addSubview:_faceImgView];
   
   
   UILabel *titleLab = [[UILabel alloc] initWithFrame:CGRectMake(52, 100, kWidth - 108, 18)];
   titleLab.text = @"請對準(zhǔn)臉部拍攝  提高認(rèn)證成功率";
   titleLab.textAlignment = NSTextAlignmentCenter;
   titleLab.textColor = [UIColor redColor];
   titleLab.font = [UIFont systemFontOfSize:17];
   [self.view addSubview:titleLab];
}
三、相機(jī)設(shè)備初始化
//攝像頭相關(guān)設(shè)置
-(void)faceDeviceInit{
  self.captureQueue = dispatch_queue_create("com.kimsungwhee.mosaiccamera.videoqueue", NULL);
  
  self.videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1920x1080 cameraPosition:AVCaptureDevicePositionBack];
  self.videoCamera.delegate = self;
  
  self.videoCamera.videoCaptureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
  
  self.filterView = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 64, kWidth, kHeight-64)];
  self.filterView.backgroundColor = [UIColor clearColor];
  self.filterView.fillMode = kGPUImageFillModePreserveAspectRatioAndFill;
  [self.view addSubview:self.filterView];
  
  [self.videoCamera startCameraCapture];
  
  //美顏
  [self beautify];
  
  //Meta data
  dispatch_async(dispatch_get_main_queue(), ^{
      self.medaDataOutput = [[AVCaptureMetadataOutput alloc] init];
      if ([self.videoCamera.captureSession canAddOutput:self.medaDataOutput]) {
          [self.videoCamera.captureSession addOutput:self.medaDataOutput];
          //類型設(shè)置為人臉
          self.medaDataOutput.metadataObjectTypes = @[AVMetadataObjectTypeFace];
          [self.medaDataOutput setMetadataObjectsDelegate:self queue:self.captureQueue];
      }
  });

  //設(shè)置有效掃描區(qū)域
  [self setScanArea];
  
}
四、設(shè)置有效掃描區(qū)域
-(void)setScanArea{
    
    _maskView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, kWidth, kHeight)];
    
    _maskView.backgroundColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:0.5];
    
    [self.view addSubview:_maskView];
    
    UIBezierPath *maskPath = [UIBezierPath bezierPathWithRect:CGRectMake(0, 0, kWidth, kHeight)];
    
    [maskPath appendPath:[[UIBezierPath bezierPathWithRoundedRect:CGRectMake(73, 206, kWidth - 142, kWidth - 142) cornerRadius:(kWidth - 142)/2.0] bezierPathByReversingPath]];
    
    CAShapeLayer *maskLayer = [[CAShapeLayer alloc] init];
    
    maskLayer.path = maskPath.CGPath;
    
    _maskView.layer.mask = maskLayer;
    
}
五、濾鏡美顏
- (void)beautify {
    [self.videoCamera removeAllTargets];
    GPUImageBeautifyFilter *beautifyFilter = [[GPUImageBeautifyFilter alloc] init];
    [self.videoCamera addTarget:beautifyFilter];
    [beautifyFilter addTarget:self.filterView];
}
六、顯示截取到的圖片,請求人臉識別接口
-(void)uploadFaceImg:(UIImage *)image{
    _faceImgView.image = image;
   
    NSLog(@"imageData:%@",self.imageData);
    WS(weakSelf);
    //這里設(shè)置為2秒后可以進(jìn)行繼續(xù)檢測
    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
        weakSelf.isFirst = YES;
    });
   //這里開始寫請求接口的代碼
    
}
七、GPUImage代理方法

//GPUImage代理方法
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    WS(weakSelf);
    //檢測到人臉
    if (self.faceObjects && self.faceObjects.count > 0) {
        NSLog(@"self.faceObjects.count == %ld",self.faceObjects.count);
       //這個(gè)布爾值用于判斷檢測到人臉后,獲取到人臉照片,不用再進(jìn)行持續(xù)檢測
        if (_isFirst) {
             //因?yàn)閯傞_始掃描到的人臉是模糊照片,所以延遲幾秒獲取
            dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
                //該view用來獲取截取人臉的Frame
                UIView *view = [[UIView alloc] initWithFrame:CGRectMake(73, 146, kWidth - 142, kWidth - 142)];
                [self.view addSubview:view];
                
                //截取屏幕
                UIGraphicsBeginImageContext(weakSelf.filterView.bounds.size);
                [weakSelf.filterView drawViewHierarchyInRect:weakSelf.filterView.bounds afterScreenUpdates:YES];;//截取動(dòng)態(tài)圖形
                [weakSelf.filterView.layer renderInContext:UIGraphicsGetCurrentContext()];
                UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
                UIGraphicsEndImageContext();
                
                //截取屏幕中的人臉
                UIImage *faceImage = [UIImage imageWithCGImage:CGImageCreateWithImageInRect(newImage.CGImage, view.frame)];
                //  UIImageWriteToSavedPhotosAlbum(firstImage, self, nil, nil); //將圖片保存到相冊
                //轉(zhuǎn)換人臉圖片為NSData類型
                weakSelf.imageData = UIImageJPEGRepresentation(faceImage, 0.05);
                //獲取人臉圖片后,請求人臉識別接口
                [self uploadFaceImg:faceImage];
            });
            _isFirst = NO;
            
        }
    }else {
        //無人臉
      // 這里可以寫未獲取到人臉的操作
    }
}
八、實(shí)現(xiàn) AVCaptureMetadataOutput代理
//AVCaptureMetadataOutputObjectsDelegate ===== 拿出當(dāng)前幀的圖片進(jìn)行人臉識別
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
    self.faceObjects = metadataObjects;
    

結(jié)語:

以上就是基于GPUImage框架編寫人臉識別功能代碼, 如有問題請下方留言指正!
如有幫助請??支持一下 ??
Demo地址: https://github.com/zhwIdea/GPUFaceDetect
最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
平臺聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點(diǎn),簡書系信息發(fā)布平臺,僅提供信息存儲服務(wù)。