1、概念
由于視頻播放器中音頻和視頻是分別播放和渲染的,就會(huì)出現(xiàn)聲音和畫面不同步的現(xiàn)象。為了使同一時(shí)刻聲音和畫面的一致性,我們就需要音視頻同步來實(shí)現(xiàn),這就是音視頻同步。
2、播放時(shí)間
2.1、音頻播放時(shí)間
音頻播放的時(shí)長是PCM數(shù)據(jù)決定的,根據(jù)數(shù)據(jù)大小和采樣率、通道數(shù)和位深度就能計(jì)算出播放的時(shí)長。只要采樣率、通道數(shù)、位深度不變,揚(yáng)聲器播放同一段PCM數(shù)據(jù)的時(shí)間就是固定不變的。
2.2、視頻播放時(shí)間
視頻其實(shí)沒有播放時(shí)長的概念,只有相鄰視頻畫面幀直接的時(shí)間間隔,調(diào)整時(shí)間間隔就能改變視頻畫面的渲染速度,來實(shí)現(xiàn)視頻的快慢控制。
3、音視頻同步方法:
第一種:音頻線性播放,視頻同步到音頻上。
第二種:視頻線性播放,音頻同步到視頻上。
第三種:用一個(gè)外部線性時(shí)間,音頻和視頻都同步到這個(gè)外部時(shí)間上。
由于人們對聲音更敏感,視頻畫面的一會(huì)兒快一會(huì)兒慢是察覺不出來的。而
聲音的節(jié)奏變化是很容易察覺的。所以我們這里采用第一種方式來同步音視頻。
4、音視頻同步實(shí)現(xiàn):
4.1、PTS和time_base
PTS即顯示時(shí)間戳,這個(gè)時(shí)間戳用來告訴播放器該在什么時(shí)候顯示這一幀的數(shù)據(jù)。
time_base即時(shí)間度量單位(時(shí)間基),可以類比:米、千克這種單位。
4.2、分別獲取音頻和視頻的PTS(播放時(shí)間戳):
PTS = avFrame->pts * av_q2d(avStream->time_base);
4.3、獲取音視頻PTS差值,根據(jù)差值來設(shè)置視頻的睡眠時(shí)間達(dá)到和音頻的相對同步。
視頻快了就休眠久點(diǎn),視頻慢了就休眠少點(diǎn),來達(dá)到同步。
5、實(shí)現(xiàn)
5.1、JfVideo.h在Video中拿到Audio對象
class JfVideo {
public:
...
JfAudio *audio;
...
public:
...
};
初始化時(shí)賦值
void JfFFmpeg::start() {
if (audio == NULL) {
if (LOG_DEBUG){
LOGE("AUDIO == NULL");
}
}
if (video == NULL) {
if (LOG_DEBUG){
LOGE("VIDEO == NULL");
}
}
video->audio = audio;
audio->play();
video->play();
...
5.2、獲取音視頻PTS差值
double getFrameDiffTime(AVFrame *avFrame);
/**
* 當(dāng)前幀的AVFrame
*/
double JfVideo::getFrameDiffTime(AVFrame *avFrame) {
//獲取當(dāng)前幀的pts
double pts = av_frame_get_best_effort_timestamp(avFrame);
if (pts == AV_NOPTS_VALUE){//判斷是否有效
pts = 0;
}
double timestamp = pts * av_q2d(time_base);//time_base是流的time_base,用來計(jì)算這幀在整個(gè)視頻中的時(shí)間位置
if (timestamp > 0){
clock = timestamp;
}
double diff = audio->clock - clock;
return diff;
}
5.3、實(shí)現(xiàn)同步
double JfVideo::getDelayTime(double diff) {
if (diff > 0.003){//音頻播放比視頻快0.003,讓視頻播放快點(diǎn)
delayTime = delayTime * 2 / 3;//讓視頻渲染時(shí)sleep略長的時(shí)間,但是可能會(huì)導(dǎo)致視頻播放的越來越快
if (delayTime < defaultDelayTime / 2){
delayTime = defaultDelayTime * 2 / 3;
} else if (delayTime > defaultDelayTime * 2){
delayTime = defaultDelayTime * 2;
}
} else if (diff < -0.003){//音頻播放比視頻慢0.003,讓視頻播放慢點(diǎn)
delayTime = delayTime * 3 / 2;//讓視頻渲染時(shí)sleep略長的時(shí)間,但是可能會(huì)導(dǎo)致視頻播放的越來越慢
if (delayTime < defaultDelayTime / 2){
delayTime = defaultDelayTime * 2 / 3;
} else if (delayTime > defaultDelayTime * 2){
delayTime = defaultDelayTime * 2;
}
} else if (diff == 0.003){
}
if (diff >= 0.5){
delayTime = 0;
} else if (diff <= -0.5){
delayTime = defaultDelayTime * 2;
}
if (fabs(diff) >= 10){//音頻不存在
delayTime = defaultDelayTime;
}
return delayTime;
}
5.4、視頻渲染速度調(diào)節(jié)
void *playVideo(void *data){
JfVideo *video = (JfVideo *)data;
while (video->playStatus != NULL && !video->playStatus->exit){
if (video->playStatus->seeking){
av_usleep(1000 * 100);
continue;
}
if (video->queue->getQueueSize() == 0){//加載狀態(tài)
if (!video->playStatus->loading){
video->playStatus->loading = true;
video->callJava->onCallLoading(CHILD_THREAD, true);
LOGD("VIDEO加載狀態(tài)");
}
av_usleep(1000 * 100);
continue;
} else {
if (video->playStatus->loading){
video->playStatus->loading = false;
video->callJava->onCallLoading(CHILD_THREAD, false);
LOGD("VIDEO播放狀態(tài)");
}
}
/*AVPacket *avPacket = av_packet_alloc();
if (video->queue->getAVPacket(avPacket) == 0){
//解碼渲染
LOGD("線程中獲取視頻AVPacket");
}
av_packet_free(&avPacket);//AVPacket中的第一個(gè)參數(shù),就是引用,減到0才真正釋放
av_free(avPacket);
avPacket = NULL;*/
AVPacket *avPacket = av_packet_alloc();
if (video->queue->getAVPacket(avPacket) != 0){
av_packet_free(&avPacket);//AVPacket中的第一個(gè)參數(shù),就是引用,減到0才真正釋放
av_free(avPacket);
avPacket = NULL;
continue;
}
if (avcodec_send_packet(video->pVCodecCtx,avPacket) != 0){
av_packet_free(&avPacket);//AVPacket中的第一個(gè)參數(shù),就是引用,減到0才真正釋放
av_free(avPacket);
avPacket = NULL;
continue;
}
AVFrame *avFrame = av_frame_alloc();
if (avcodec_receive_frame(video->pVCodecCtx,avFrame) != 0){
av_frame_free(&avFrame);
av_free(avFrame);
avFrame = NULL;
av_packet_free(&avPacket);//AVPacket中的第一個(gè)參數(shù),就是引用,減到0才真正釋放
av_free(avPacket);
avPacket = NULL;
continue;
}
if (LOG_DEBUG){
LOGD("子線程解碼一個(gè)AVFrame成功");
}
if (avFrame->format == AV_PIX_FMT_YUV420P){
//直接渲染
LOGD("YUV420P");
double diff = video->getFrameDiffTime(avFrame);
LOGD("DIFF IS %f",diff);
av_usleep(video->getDelayTime(diff) * 1000000);
video->callJava->onCallRenderYUV(
CHILD_THREAD,
video->pVCodecCtx->width,
video->pVCodecCtx->height,
avFrame->data[0],
avFrame->data[1],
avFrame->data[2]);
} else {
//轉(zhuǎn)成YUV420P
AVFrame *pFrameYUV420P = av_frame_alloc();
int num = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,video->pVCodecCtx->width,video->pVCodecCtx->height,1);
uint8_t *buffer = (uint8_t *)(av_malloc(num * sizeof(uint8_t)));
av_image_fill_arrays(
pFrameYUV420P->data,
pFrameYUV420P->linesize,
buffer,
AV_PIX_FMT_YUV420P,
video->pVCodecCtx->width,
video->pVCodecCtx->height,
1);
SwsContext *sws_ctx = sws_getContext(
video->pVCodecCtx->width,
video->pVCodecCtx->height,
video->pVCodecCtx->pix_fmt,
video->pVCodecCtx->width,
video->pVCodecCtx->height,
AV_PIX_FMT_YUV420P,
SWS_BICUBIC,
NULL,NULL,NULL
);
if (!sws_ctx){
av_frame_free(&pFrameYUV420P);
av_free(pFrameYUV420P);
av_free(buffer);
continue;
}
sws_scale(
sws_ctx,
avFrame->data,
avFrame->linesize,
0,
avFrame->height,
pFrameYUV420P->data,
pFrameYUV420P->linesize);//這里得到Y(jié)UV數(shù)據(jù)
LOGD("NO_YUV420P");
//渲染
double diff = video->getFrameDiffTime(avFrame);
LOGD("DIFF IS %f",diff);
av_usleep(video->getDelayTime(diff) * 1000000);
video->callJava->onCallRenderYUV(
CHILD_THREAD,
video->pVCodecCtx->width,
video->pVCodecCtx->height,
pFrameYUV420P->data[0],
pFrameYUV420P->data[1],
pFrameYUV420P->data[2]);
av_frame_free(&pFrameYUV420P);
av_free(pFrameYUV420P);
av_free(buffer);
sws_freeContext(sws_ctx);
}
av_frame_free(&avFrame);
av_free(avFrame);
avFrame = NULL;
av_packet_free(&avPacket);//AVPacket中的第一個(gè)參數(shù),就是引用,減到0才真正釋放
av_free(avPacket);
avPacket = NULL;
}
pthread_exit(&video->thread_play);
}