引言:隨著多媒體的普及,越來越多的短視頻被人們所喜愛,接受,傳播,所以快速而有效的合成高質(zhì)量的視頻成為剛需。
本文參考Skylpy作者,在原有基礎(chǔ)上進(jìn)行擴(kuò)展,編碼整理。
1.因?yàn)樯婕暗揭曨l合成和播放,所以需求先引入一些和視頻相關(guān)的資源庫
#import <AVKit/AVKit.h>
#import <MediaPlayer/MediaPlayer.h>
#import <AVFoundation/AVFoundation.h>
然后定義一些宏,方便UI布局
#define WWScreamW [UIScreen mainScreen].bounds.size.width
#define WWScreamH [UIScreen mainScreen].bounds.size.height
接著定義一些變量
@interface ViewController () {
NSMutableArray*imageArr; //未壓縮的圖片
NSMutableArray*imageArray; //經(jīng)過壓縮的圖片
}
//視頻地址
@property(nonatomic,strong)NSString*theVideoPath;
//合成進(jìn)度
@property(nonatomic,strong)UILabel *ww_progressLbe;
2.定義一個(gè)方法用于視圖布局
- (void)ww_setupView {
//視頻合成按鈕
UIButton *button = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[button setBounds:CGRectMake(0,0,WWScreamW * 0.25,50)];
button.center = CGPointMake(WWScreamW * 0.25, WWScreamH * 0.15);
[button setTitle:@"視頻合成"forState:UIControlStateNormal];
[button addTarget:self action:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];
button.backgroundColor = [UIColor redColor];
[self.view addSubview:button];
//視頻播放按鈕
UIButton *button1=[UIButton buttonWithType:UIButtonTypeRoundedRect];
[button1 setBounds:CGRectMake(0,0,WWScreamW * 0.25,50)];
button1.center = CGPointMake(WWScreamW * 0.75, WWScreamH * 0.15);
[button1 setTitle:@"視頻播放"forState:UIControlStateNormal];
[button1 addTarget:self action:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];
button1.backgroundColor = [UIColor redColor];
[self.view addSubview:button1];
//視頻合成播放進(jìn)度提示文本框
UILabel *lbe = [[UILabel alloc]init];
lbe.frame = CGRectMake(0, 0, WWScreamW * 0.25, 25);
lbe.center = CGPointMake(WWScreamW * 0.5, WWScreamH * 0.15);
lbe.textColor = [UIColor blackColor];
lbe.textAlignment = NSTextAlignmentCenter;
lbe.text = @"準(zhǔn)備就緒";
lbe.font = [UIFont systemFontOfSize:12];
self.ww_progressLbe = lbe;
[self.view addSubview:lbe];
}
3.定義一個(gè)方法用于賦值數(shù)據(jù)
- (void)ww_setupInit {
imageArray = [[NSMutableArray alloc]init];
imageArr = [[NSMutableArray alloc]init];
NSString *name = @"";
UIImage *img = nil;
//實(shí)先準(zhǔn)備21張圖片,命名為0.jpg至21.jpg
for (int i = 0; i < 22; i++) {
name = [NSString stringWithFormat:@"%d",i];
img = [UIImage imageNamed:name];
[imageArr addObject:img];
}
//對圖片進(jìn)行裁剪,方便合成等比例視頻
for (int i = 0; i < imageArr.count; i++) {
UIImage *imageNew = imageArr[i];
//設(shè)置image的尺寸
CGSize imgeSize = CGSizeMake(320, 480);
//對圖片大小進(jìn)行壓縮--
imageNew = [self imageWithImage:imageNew scaledToSize:imgeSize];
[imageArray addObject:imageNew];
}
}
對圖片進(jìn)行壓縮方法如下
-(UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize
{
// 新創(chuàng)建的位圖上下文 newSize為其大小
UIGraphicsBeginImageContext(newSize);
// 對圖片進(jìn)行尺寸的改變
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
// 從當(dāng)前上下文中獲取一個(gè)UIImage對象 即獲取新的圖片對象
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
4.視頻合成按鈕點(diǎn)擊操作事件
//視頻合成按鈕點(diǎn)擊操作
- (void)testCompressionSession {
//設(shè)置mov路徑
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
NSString *moviePath = [[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",@"test"]];
self.theVideoPath=moviePath;
//定義視頻的大小320 480 倍數(shù)
CGSize size = CGSizeMake(320,480);
NSError *error = nil;
// 轉(zhuǎn)成UTF-8編碼
unlink([moviePath UTF8String]);
NSLog(@"path->%@",moviePath);
// iphone提供了AVFoundation庫來方便的操作多媒體設(shè)備,AVAssetWriter這個(gè)類可以方便的將圖像和音頻寫成一個(gè)完整的視頻文件
AVAssetWriter *videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:moviePath]fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
if(error) {
NSLog(@"error =%@",[error localizedDescription]);
return;
}
//mov的格式設(shè)置 編碼格式 寬度 高度
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
// AVAssetWriterInputPixelBufferAdaptor提供CVPixelBufferPool實(shí)例,
// 可以使用分配像素緩沖區(qū)寫入輸出文件。使用提供的像素為緩沖池分配通常
// 是更有效的比添加像素緩沖區(qū)分配使用一個(gè)單獨(dú)的池
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if([videoWriter canAddInput:writerInput]){
NSLog(@"11111");
}else{
NSLog(@"22222");
}
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//合成多張圖片為一個(gè)視頻文件
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue",NULL);
int __block frame = 0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while([writerInput isReadyForMoreMediaData]) {
if(++frame >= [imageArray count] * 10) {
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(@"完成");
[[NSOperationQueue mainQueue] addOperationWithBlock:^{
self.ww_progressLbe.text = @"視頻合成完畢";
}];
}];
break;
}
CVPixelBufferRef buffer = NULL;
int idx = frame / 10;
NSLog(@"idx==%d",idx);
NSString *progress = [NSString stringWithFormat:@"%0.2lu",idx / [imageArr count]];
[[NSOperationQueue mainQueue] addOperationWithBlock:^{
self.ww_progressLbe.text = [NSString stringWithFormat:@"合成進(jìn)度:%@",progress];
}];
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[imageArray objectAtIndex:idx]CGImage]size:size];
if(buffer){
//設(shè)置每秒鐘播放圖片的個(gè)數(shù)
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,10)]) {
NSLog(@"FAIL");
} else {
NSLog(@"OK");
}
CFRelease(buffer);
}
}
}];
}
由圖片生成像素圖片類型的方法如下
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata !=NULL);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
// 當(dāng)你調(diào)用這個(gè)函數(shù)的時(shí)候,Quartz創(chuàng)建一個(gè)位圖繪制環(huán)境,也就是位圖上下文。當(dāng)你向上下文中繪制信息時(shí),Quartz把你要繪制的信息作為位圖數(shù)據(jù)繪制到指定的內(nèi)存塊。一個(gè)新的位圖上下文的像素格式由三個(gè)參數(shù)決定:每個(gè)組件的位數(shù),顏色空間,alpha選項(xiàng)
CGContextRef context = CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
//使用CGContextDrawImage繪制圖片 這里設(shè)置不正確的話 會(huì)導(dǎo)致視頻顛倒
// 當(dāng)通過CGContextDrawImage繪制圖片到一個(gè)context中時(shí),如果傳入的是UIImage的CGImageRef,因?yàn)閁IKit和CG坐標(biāo)系y軸相反,所以圖片繪制將會(huì)上下顛倒
CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
// 釋放色彩空間
CGColorSpaceRelease(rgbColorSpace);
// 釋放context
CGContextRelease(context);
// 解鎖pixel buffer
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
return pxbuffer;
}
5.視頻播放按鈕點(diǎn)擊操作事件
//視頻播放按鈕點(diǎn)擊操作
- (void)playAction {
NSLog(@"************%@",self.theVideoPath);
// 文件管理器
NSFileManager *fileManager = [[NSFileManager alloc]init];
if (![fileManager fileExistsAtPath:self.theVideoPath]) {
self.ww_progressLbe.text = @"文件不存在";
return;
}
NSURL *sourceMovieURL = [NSURL fileURLWithPath:self.theVideoPath];
AVAsset *movieAsset = [AVURLAsset URLAssetWithURL:sourceMovieURL options:nil];
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:movieAsset];
AVPlayer *player = [AVPlayer playerWithPlayerItem:playerItem];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
playerLayer.frame = CGRectMake(0, WWScreamH * 0.25, WWScreamW, WWScreamH * 0.65);
playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.view.layer addSublayer:playerLayer];
[player play];
}
項(xiàng)目連接地址
效果圖如下
視頻布局
WechatIMG43.jpeg
視頻播放
WechatIMG42.jpeg