/**
|
* Ò¶º£»Ô
|
* QQȺ121376426
|
* http://blog.yundiantech.com/
|
*/
|
|
#include "VideoPlayerCSRW.h"
|
|
#include "Audio/PcmVolumeControl.h"
|
#include "parser/parser_callback.h"
|
|
#include <stdio.h>
|
|
#include <QDebug>
|
|
VideoPlayerCSRW::VideoPlayerCSRW()
|
{
|
mVideoType = NORMALVIDEO;
|
parseCB = nullptr;
|
initPlayer();
|
}
|
|
VideoPlayerCSRW::~VideoPlayerCSRW()
|
{
|
parseCB->quit();
|
parseCB = nullptr;
|
}
|
|
bool VideoPlayerCSRW::startPlay(const std::string &filePath, int videoType)
|
{
|
if (mPlayerState != VideoPlayer_Stop)
|
{
|
return false;
|
}
|
|
mIsQuit = false;
|
mIsPause = false;
|
|
if (!filePath.empty())
|
mFilePath = filePath;
|
mVideoType = videoType;
|
|
//Æô¶¯ÐµÄÏß³ÌʵÏÖ¶ÁÈ¡ÊÓÆµÎļþ
|
std::thread([&](VideoPlayerCSRW *pointer)
|
{
|
pointer->readVideoFile();
|
|
}, this).detach();
|
// readVideoFile();
|
|
parseCB = new parser_callback(&mVideoPacktList, &mAudioPacktList,
|
mConditon_Video, mConditon_Audio);
|
run_player(mFilePath.c_str(), mVideoType, parseCB);
|
|
return true;
|
|
}
|
|
int64_t VideoPlayerCSRW::getTotalTime()
|
{
|
return 1;
|
}
|
|
void VideoPlayerCSRW::readVideoFile()
|
{
|
///SDL³õʼ»¯ÐèÒª·ÅÈë×ÓÏß³ÌÖУ¬·ñÔòÓÐЩµçÄÔ»áÓÐÎÊÌâ¡£
|
if (SDL_Init(SDL_INIT_AUDIO))
|
{
|
doOpenSdlFailed(-100);
|
fprintf(stderr, "Could not initialize SDL - %s. \n", SDL_GetError());
|
return;
|
}
|
|
mIsReadThreadFinished = false;
|
mIsReadFinished = false;
|
|
pFormatCtx = nullptr;
|
pCodecCtx = nullptr;
|
pCodec = nullptr;
|
|
aCodecCtx = nullptr;
|
aCodec = nullptr;
|
aFrame = nullptr;
|
|
mAudioStream = new AVStream;
|
mVideoStream = new AVStream;
|
|
audio_clock = 0;
|
video_clock = 0;
|
|
doTotalTimeChanged(getTotalTime());
|
|
///´´½¨Ò»¸öÏß³ÌרÃÅÓÃÀ´½âÂëÊÓÆµ
|
std::thread([&](VideoPlayerCSRW *pointer)
|
{
|
pointer->decodeVideoThread();
|
|
}, this).detach();
|
|
//audio decoder
|
// find the audio decoder
|
int codecId = AV_CODEC_ID_PCM_ALAW;
|
aCodec = avcodec_find_decoder((AVCodecID)codecId);
|
if (!aCodec) {
|
fprintf(stderr, "Codec not found\n");
|
return;
|
}
|
|
aCodecCtx = avcodec_alloc_context3(aCodec);
|
if (!aCodecCtx) {
|
fprintf(stderr, "Could not allocate audio codec context\n");
|
return;
|
}
|
|
aCodecCtx->codec = aCodec;
|
aCodecCtx->sample_fmt = AV_SAMPLE_FMT_S16;
|
aCodecCtx->bit_rate = 64000;
|
aCodecCtx->sample_rate = 8000;
|
aCodecCtx->channels = 1;
|
aCodecCtx->channel_layout = AV_CH_LAYOUT_MONO;
|
aCodecCtx->codec_type = AVMEDIA_TYPE_AUDIO;
|
|
if (aCodec == NULL)
|
{
|
fprintf(stderr, "ACodec not found.\n");
|
}
|
else
|
{
|
//open it
|
///´ò¿ªÒôƵ½âÂëÆ÷
|
if (avcodec_open2(aCodecCtx, aCodec, nullptr) < 0)
|
{
|
fprintf(stderr, "Could not open audio codec.\n");
|
doOpenVideoFileFailed();
|
goto end;
|
}
|
|
///½âÂëÒôƵÏà¹Ø
|
aFrame = av_frame_alloc();
|
|
|
//ÖØ²ÉÑùÉèÖÃÑ¡Ïî-----------------------------------------------------------start
|
aFrame_ReSample = nullptr;
|
|
//frame->16bit 44100 PCM ͳһÒôƵ²ÉÑù¸ñʽÓë²ÉÑùÂÊ
|
swrCtx = nullptr;
|
|
//ÊäÈëµÄÉùµÀ²¼¾Ö
|
int in_ch_layout;
|
|
//Êä³öµÄÉùµÀ²¼¾Ö
|
int out_ch_layout = av_get_default_channel_layout(audio_tgt_channels); ///AV_CH_LAYOUT_STEREO
|
|
out_ch_layout &= ~AV_CH_LAYOUT_STEREO_DOWNMIX;
|
|
/// ÕâÀïÒôƵ²¥·ÅʹÓÃÁ˹̶¨µÄ²ÎÊý
|
/// Ç¿ÖÆ½«ÒôÆµÖØ²ÉÑù³É44100 Ë«ÉùµÀ AV_SAMPLE_FMT_S16
|
/// SDL²¥·ÅÖÐÒ²ÊÇÓÃÁËͬÑùµÄ²¥·Å²ÎÊý
|
//ÖØ²ÉÑùÉèÖÃÑ¡Ïî----------------
|
//ÊäÈëµÄ²ÉÑù¸ñʽ
|
in_sample_fmt = aCodecCtx->sample_fmt;
|
//Êä³öµÄ²ÉÑù¸ñʽ 16bit PCM
|
out_sample_fmt = AV_SAMPLE_FMT_S16;
|
//ÊäÈëµÄ²ÉÑùÂÊ
|
in_sample_rate = aCodecCtx->sample_rate;
|
//ÊäÈëµÄÉùµÀ²¼¾Ö
|
in_ch_layout = aCodecCtx->channel_layout;
|
|
//Êä³öµÄ²ÉÑùÂÊ
|
out_sample_rate = 44100;
|
//Êä³öµÄÉùµÀ²¼¾Ö
|
|
audio_tgt_channels = 2; ///av_get_channel_layout_nb_channels(out_ch_layout);
|
out_ch_layout = av_get_default_channel_layout(audio_tgt_channels); ///AV_CH_LAYOUT_STEREO
|
|
out_ch_layout &= ~AV_CH_LAYOUT_STEREO_DOWNMIX;
|
|
/// 2019-5-13Ìí¼Ó
|
/// wav/wmv Îļþ»ñÈ¡µ½µÄaCodecCtx->channel_layoutΪ0»áµ¼ÖºóÃæµÄ³õʼ»¯Ê§°Ü£¬Òò´ËÕâÀïÐèÒª¼Ó¸öÅжϡ£
|
if (in_ch_layout <= 0)
|
{
|
in_ch_layout = av_get_default_channel_layout(aCodecCtx->channels);
|
}
|
|
swrCtx = swr_alloc_set_opts(nullptr, out_ch_layout, out_sample_fmt, out_sample_rate,
|
in_ch_layout, in_sample_fmt, in_sample_rate, 0, nullptr);
|
|
/** Open the resampler with the specified parameters. */
|
int ret = swr_init(swrCtx);
|
if (ret < 0)
|
{
|
char buff[128]={0};
|
av_strerror(ret, buff, 128);
|
|
fprintf(stderr, "Could not open resample context %s\n", buff);
|
swr_free(&swrCtx);
|
swrCtx = nullptr;
|
doOpenVideoFileFailed();
|
goto end;
|
}
|
|
//´æ´¢pcmÊý¾Ý
|
int out_linesize = out_sample_rate * audio_tgt_channels;
|
|
// out_linesize = av_samples_get_buffer_size(NULL, audio_tgt_channels, av_get_bytes_per_sample(out_sample_fmt), out_sample_fmt, 1);
|
out_linesize = AVCODEC_MAX_AUDIO_FRAME_SIZE;
|
|
|
// mAudioStream = pFormatCtx->streams[audioStream];
|
|
///´ò¿ªSDL²¥·ÅÉùÒô
|
int code = openSDL();
|
|
if (code == 0)
|
{
|
SDL_LockAudioDevice(mAudioID);
|
SDL_PauseAudioDevice(mAudioID,0);
|
SDL_UnlockAudioDevice(mAudioID);
|
|
mIsAudioThreadFinished = false;
|
}
|
else
|
{
|
doOpenSdlFailed(code);
|
}
|
}
|
|
|
mPlayerState = VideoPlayer_Playing;
|
doPlayerStateChanged(VideoPlayer_Playing, mVideoStream != nullptr, mAudioStream != nullptr);
|
|
mVideoStartTime = av_gettime();
|
fprintf(stderr, "%s mIsQuit=%d mIsPause=%d \n", __FUNCTION__, mIsQuit, mIsPause);
|
|
///Îļþ¶ÁÈ¡½áÊø Ìø³öÑ»·µÄÇé¿ö
|
///µÈ´ý²¥·ÅÍê±Ï
|
while (!mIsQuit)
|
{
|
mSleep(100);
|
}
|
|
end:
|
|
parseCB->quit();
|
clearAudioQuene();
|
clearVideoQuene();
|
|
if (mPlayerState != VideoPlayer_Stop) //²»ÊÇÍⲿµ÷ÓõÄstop ÊÇÕý³£²¥·Å½áÊø
|
{
|
stop();
|
}
|
|
while((mVideoStream != nullptr && !mIsVideoThreadFinished) || (mAudioStream != nullptr && !mIsAudioThreadFinished))
|
{
|
mSleep(10);
|
} //È·±£ÊÓÆµÏ߳̽áÊøºó ÔÙÏú»Ù¶ÓÁÐ
|
|
closeSDL();
|
|
if (swrCtx != nullptr)
|
{
|
swr_free(&swrCtx);
|
swrCtx = nullptr;
|
}
|
|
if (aFrame != nullptr)
|
{
|
av_frame_free(&aFrame);
|
aFrame = nullptr;
|
}
|
|
if (aFrame_ReSample != nullptr)
|
{
|
av_frame_free(&aFrame_ReSample);
|
aFrame_ReSample = nullptr;
|
}
|
|
if (aCodecCtx != nullptr)
|
{
|
avcodec_close(aCodecCtx);
|
aCodecCtx = nullptr;
|
}
|
|
if (pCodecCtx != nullptr)
|
{
|
avcodec_close(pCodecCtx);
|
pCodecCtx = nullptr;
|
}
|
|
// avformat_close_input(&pFormatCtx);
|
// avformat_free_context(pFormatCtx);
|
|
SDL_Quit();
|
|
doPlayerStateChanged(VideoPlayer_Stop, mVideoStream != nullptr, mAudioStream != nullptr);
|
|
mIsReadThreadFinished = true;
|
|
fprintf(stderr, "%s finished \n", __FUNCTION__);
|
}
|
|
void VideoPlayerCSRW::decodeVideoThread()
|
{
|
fprintf(stderr, "%s start \n", __FUNCTION__);
|
|
mIsVideoThreadFinished = false;
|
|
int videoWidth = 0;
|
int videoHeight = 0;
|
|
double video_pts = 0; //µ±Ç°ÊÓÆµµÄpts
|
double audio_pts = 0; //񙮵pts
|
|
///½âÂëÊÓÆµÏà¹Ø
|
AVFrame *pFrame = nullptr;
|
AVFrame *pFrameYUV = nullptr;
|
uint8_t *yuv420pBuffer = nullptr; //½âÂëºóµÄyuvÊý¾Ý
|
struct SwsContext *imgConvertCtx = nullptr; //ÓÃÓÚ½âÂëºóµÄÊÓÆµ¸ñʽת»»
|
|
// AVCodecContext *pCodecCtx = mVideoStream->codec; //ÊÓÆµ½âÂëÆ÷
|
//debug add decoder
|
// find the MPEG-1 video decoder
|
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
|
if (!pCodec) {
|
fprintf(stderr, "Codec not found\n");
|
return;
|
}
|
|
pCodecCtx = avcodec_alloc_context3(pCodec);
|
if (!pCodecCtx) {
|
fprintf(stderr, "Could not allocate video codec context\n");
|
return;
|
}
|
|
auto parser = av_parser_init(pCodec->id);
|
if (!parser) {
|
fprintf(stderr, "parser not found\n");
|
return;
|
}
|
|
///´ò¿ªÊÓÆµ½âÂëÆ÷
|
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
|
{
|
fprintf(stderr, "Could not open video codec.\n");
|
doOpenVideoFileFailed();
|
return;
|
}
|
|
// FILE *fp_v = fopen("./pkts.h264", "wb");
|
|
pFrame = av_frame_alloc();
|
|
auto pkt = av_packet_alloc();
|
|
while(1)
|
{
|
if (mIsQuit)
|
{
|
clearVideoQuene(); //Çå¿Õ¶ÓÁÐ
|
break;
|
}
|
|
if (mIsPause == true) //ÅжÏÔÝÍ£
|
{
|
mSleep(10);
|
continue;
|
}
|
|
mConditon_Video->Lock();
|
|
static int emptyTimes = 0;
|
if (mVideoPacktList.size() <= 0)
|
{
|
mConditon_Video->Unlock();
|
if (mIsReadFinished)
|
{
|
//¶ÓÁÐÀïÃæÃ»ÓÐÊý¾ÝÁËÇÒ¶ÁÈ¡Íê±ÏÁË
|
break;
|
}
|
else
|
{
|
emptyTimes++;
|
if (emptyTimes >= 50){
|
//¶ÓÁÐÀïÃæÃ»ÓÐÊý¾ÝÁËÇÒ¶ÁÈ¡Íê±ÏÁË
|
break;
|
}
|
mSleep(1); //¶ÓÁÐÖ»ÊÇÔÝʱûÓÐÊý¾Ý¶øÒÑ
|
continue;
|
}
|
}
|
emptyTimes = 0;
|
|
AVPacket pkt1 = mVideoPacktList.front();
|
mVideoPacktList.pop_front();
|
|
mConditon_Video->Unlock();
|
|
// fwrite(pkt1.data, 1, pkt1.size, fp_v);
|
AVPacket *packet = &pkt1;
|
|
//ÊÕµ½Õâ¸öÊý¾Ý ˵Ã÷¸Õ¸ÕÖ´ÐйýÌø×ª ÏÖÔÚÐèÒª°Ñ½âÂëÆ÷µÄÊý¾Ý Çå³ýÒ»ÏÂ
|
if(strcmp((char*)packet->data, FLUSH_DATA) == 0)
|
{
|
fprintf(stderr, "strcmp((char*)packet->data, FLUSH_DATA) == 0\n");
|
avcodec_flush_buffers(pCodecCtx);
|
av_packet_unref(packet);
|
continue;
|
}
|
|
while(packet->size> 0) {
|
int ret = av_parser_parse2(parser, pCodecCtx, &pkt->data, &pkt->size,
|
packet->data, packet->size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
|
if (ret < 0) {
|
fprintf(stderr, "Error while parsing\n");
|
break;
|
}
|
packet->data += ret;
|
packet->size -= ret;
|
|
if (pkt->size) {
|
auto ret = avcodec_send_packet(pCodecCtx, pkt);
|
if (ret != 0)
|
{
|
qDebug("input AVPacket to decoder failed!, error:%d\n", ret);
|
av_packet_unref(pkt);
|
continue;
|
}
|
|
while (0 == avcodec_receive_frame(pCodecCtx, pFrame))
|
{
|
if (pkt->dts == AV_NOPTS_VALUE &&
|
pFrame->opaque&& *(uint64_t*) pFrame->opaque != AV_NOPTS_VALUE)
|
{
|
video_pts = *(uint64_t *) pFrame->opaque;
|
}
|
else if (pkt->dts != AV_NOPTS_VALUE)
|
{
|
video_pts = pkt->dts;
|
}
|
else
|
{
|
video_pts = 0;
|
}
|
|
// printf("timebase:%d, %d\n", pCodecCtx->time_base.num, pCodecCtx->time_base.den);
|
AVRational timebase = {1, 1200000};
|
video_pts *= av_q2d(timebase);//mVideoStream->time_base);
|
video_clock = video_pts;
|
//OUTPUT("%s %f \n", __FUNCTION__, video_pts);
|
if (seek_flag_video)
|
{
|
//·¢ÉúÁËÌø×ª ÔòÌø¹ý¹Ø¼üÖ¡µ½Ä¿µÄʱ¼äµÄÕ⼸֡
|
if (video_pts < seek_time)
|
{
|
av_packet_unref(pkt);
|
continue;
|
}
|
else
|
{
|
seek_flag_video = 0;
|
}
|
}
|
|
///ÒôÊÓÆµÍ¬²½£¬ÊµÏÖµÄÔÀí¾ÍÊÇ£¬ÅжÏÊÇ·ñµ½ÏÔʾ´Ë֡ͼÏñµÄʱ¼äÁË£¬Ã»µ½ÔòÐÝÃß5ms£¬È»ºó¼ÌÐøÅжÏ
|
while(1)
|
{
|
if (mIsQuit)
|
{
|
break;
|
}
|
|
/*if (mAudioStream != NULL && !mIsAudioThreadFinished)
|
{
|
if (mIsReadFinished && mAudioPacktList.size() <= 0)
|
{//¶ÁÈ¡ÍêÁË ÇÒÒôƵÊý¾ÝÒ²²¥·ÅÍêÁË ¾ÍÊ£ÏÂÊÓÆµÊý¾ÝÁË Ö±½ÓÏÔʾ³öÀ´ÁË ²»ÓÃͬ²½ÁË
|
break;
|
}
|
|
///ÓÐÒôƵµÄÇé¿öÏ£¬½«ÊÓÆµÍ¬²½µ½ÒôƵ
|
///¸úÒôƵµÄpts×ö¶Ô±È£¬±ÈÊÓÆµ¿ìÔò×öÑÓʱ
|
audio_pts = audio_clock;
|
}
|
else*/
|
{
|
///ûÓÐÒôƵµÄÇé¿öÏ£¬Ö±½Óͬ²½µ½ÍⲿʱÖÓ
|
audio_pts = (av_gettime() - mVideoStartTime) / 1000000.0;
|
audio_clock = audio_pts;
|
}
|
|
//OUTPUT("%s %f %f \n", __FUNCTION__, video_pts, audio_pts);
|
//Ö÷ÒªÊÇ Ìø×ªµÄʱºò ÎÒÃǰÑvideo_clockÉèÖóÉ0ÁË
|
//Òò´ËÕâÀïÐèÒª¸üÐÂvideo_pts
|
//·ñÔòµ±´ÓºóÃæÌø×ªµ½Ç°ÃæµÄʱºò »á¿¨ÔÚÕâÀï
|
video_pts = video_clock;
|
|
if (video_pts <= audio_pts) break;
|
|
int delayTime = (video_pts - audio_pts) * 1000;
|
|
delayTime = delayTime > 5 ? 5:delayTime;
|
|
if (!mIsNeedPause)
|
{
|
mSleep(delayTime);
|
}
|
}
|
|
if (pCodecCtx->width != videoWidth || pCodecCtx->height != videoHeight)
|
{
|
videoWidth = pFrame->width;
|
videoHeight = pFrame->height;
|
|
if (pFrameYUV != nullptr)
|
{
|
av_free(pFrameYUV);
|
}
|
|
if (yuv420pBuffer != nullptr)
|
{
|
av_free(yuv420pBuffer);
|
}
|
|
if (imgConvertCtx != nullptr)
|
{
|
sws_freeContext(imgConvertCtx);
|
}
|
|
pFrameYUV = av_frame_alloc();
|
|
int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1); //°´1×Ö½Ú½øÐÐÄÚ´æ¶ÔÆë,µÃµ½µÄÄÚ´æ´óС×î½Ó½üʵ¼Ê´óС
|
// int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 0); //°´0×Ö½Ú½øÐÐÄÚ´æ¶ÔÆë£¬µÃµ½µÄÄÚ´æ´óСÊÇ0
|
// int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 4); //°´4×Ö½Ú½øÐÐÄÚ´æ¶ÔÆë£¬µÃµ½µÄÄÚ´æ´óСÉÔ΢´óһЩ
|
|
unsigned int numBytes = static_cast<unsigned int>(yuvSize);
|
yuv420pBuffer = static_cast<uint8_t *>(av_malloc(numBytes * sizeof(uint8_t)));
|
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, yuv420pBuffer, AV_PIX_FMT_YUV420P,
|
pCodecCtx->width, pCodecCtx->height, 1);
|
|
///ÓÉÓÚ½âÂëºóµÄÊý¾Ý²»Ò»¶¨¶¼ÊÇyuv420p£¬Òò´ËÐèÒª½«½âÂëºóµÄÊý¾Ýͳһת»»³ÉYUV420P
|
imgConvertCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
|
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
|
AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
|
|
}
|
|
sws_scale(imgConvertCtx,
|
(uint8_t const * const *) pFrame->data,
|
pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,
|
pFrameYUV->linesize);
|
|
doDisplayVideo(yuv420pBuffer, pCodecCtx->width, pCodecCtx->height);
|
|
if (mIsNeedPause)
|
{
|
mIsPause = true;
|
mIsNeedPause = false;
|
}
|
|
}
|
}
|
}
|
av_packet_unref(&pkt1);
|
mSleep(40);
|
}
|
|
// if (fp_v) {
|
// fclose(fp_v);
|
// fp_v = NULL;
|
// }
|
|
parseCB->quit();
|
|
av_free(pFrame);
|
|
if (pFrameYUV != nullptr)
|
{
|
av_free(pFrameYUV);
|
}
|
|
if (yuv420pBuffer != nullptr)
|
{
|
av_free(yuv420pBuffer);
|
}
|
|
if (imgConvertCtx != nullptr)
|
{
|
sws_freeContext(imgConvertCtx);
|
}
|
|
if (!mIsQuit)
|
{
|
mIsQuit = true;
|
}
|
|
mIsVideoThreadFinished = true;
|
|
fprintf(stderr, "%s finished \n", __FUNCTION__);
|
|
return;
|
}
|
|
int VideoPlayerCSRW::decodeAudioFrame(bool isBlock)
|
{
|
int audioBufferSize = 0;
|
|
while(1)
|
{
|
if (mIsQuit)
|
{
|
mIsAudioThreadFinished = true;
|
clearAudioQuene(); //Çå¿Õ¶ÓÁÐ
|
break;
|
}
|
|
if (mIsPause == true) //ÅжÏÔÝÍ£
|
{
|
break;
|
}
|
|
mConditon_Audio->Lock();
|
|
if (mAudioPacktList.size() <= 0)
|
{
|
if (isBlock)
|
{
|
mConditon_Audio->Wait();
|
}
|
else
|
{
|
mConditon_Audio->Unlock();
|
break;
|
}
|
}
|
|
AVPacket packet = mAudioPacktList.front();
|
mAudioPacktList.pop_front();
|
//qDebug()<<__FUNCTION__<<mAudioPacktList.size();
|
mConditon_Audio->Unlock();
|
|
AVPacket *pkt = &packet;
|
|
/* if update, update the audio clock w/pts */
|
if (pkt->pts != AV_NOPTS_VALUE)
|
{
|
AVRational timebase = {1, 8000};
|
audio_clock = av_q2d(timebase) * pkt->pts;
|
}
|
|
//ÊÕµ½Õâ¸öÊý¾Ý ˵Ã÷¸Õ¸ÕÖ´ÐйýÌø×ª ÏÖÔÚÐèÒª°Ñ½âÂëÆ÷µÄÊý¾Ý Çå³ýÒ»ÏÂ
|
if(strcmp((char*)pkt->data,FLUSH_DATA) == 0)
|
{
|
avcodec_flush_buffers(aCodecCtx/*mAudioStream->codec*/);
|
av_packet_unref(pkt);
|
continue;
|
}
|
|
if (seek_flag_audio)
|
{
|
//·¢ÉúÁËÌø×ª ÔòÌø¹ý¹Ø¼üÖ¡µ½Ä¿µÄʱ¼äµÄÕ⼸֡
|
if (audio_clock < seek_time)
|
{
|
continue;
|
}
|
else
|
{
|
seek_flag_audio = 0;
|
}
|
}
|
|
//½âÂëAVPacket->AVFrame
|
int got_frame = 0;
|
int size = avcodec_decode_audio4(aCodecCtx, aFrame, &got_frame, &packet);
|
|
//±£´æÖزÉÑù֮ǰµÄÒ»¸öÉùµÀµÄÊý¾Ý·½·¨
|
//size_t unpadded_linesize = aFrame->nb_samples * av_get_bytes_per_sample((AVSampleFormat) aFrame->format);
|
//static FILE * fp = fopen("out.pcm", "wb");
|
//fwrite(aFrame->extended_data[0], 1, unpadded_linesize, fp);
|
|
av_packet_unref(&packet);
|
|
if (got_frame)
|
{
|
/// ffmpeg½âÂëÖ®ºóµÃµ½µÄÒôƵÊý¾Ý²»ÊÇSDLÏëÒªµÄ£¬
|
/// Òò´ËÕâÀïÐèÒªÖØ²ÉÑù³É44100 Ë«ÉùµÀ AV_SAMPLE_FMT_S16
|
if (aFrame_ReSample == NULL)
|
{
|
aFrame_ReSample = av_frame_alloc();
|
}
|
|
if (aFrame_ReSample->nb_samples != aFrame->nb_samples)
|
{
|
aFrame_ReSample->nb_samples = av_rescale_rnd(swr_get_delay(swrCtx, out_sample_rate) + aFrame->nb_samples,
|
out_sample_rate, in_sample_rate, AV_ROUND_UP);
|
|
av_samples_fill_arrays(aFrame_ReSample->data, aFrame_ReSample->linesize, audio_buf, audio_tgt_channels, aFrame_ReSample->nb_samples, out_sample_fmt, 0);
|
|
}
|
|
int len2 = swr_convert(swrCtx, aFrame_ReSample->data, aFrame_ReSample->nb_samples, (const uint8_t**)aFrame->data, aFrame->nb_samples);
|
int resampled_data_size = len2 * audio_tgt_channels * av_get_bytes_per_sample(out_sample_fmt);
|
|
audioBufferSize = resampled_data_size;
|
break;
|
}
|
}
|
|
return audioBufferSize;
|
}
|