#include "PL_AndroidStagefrightDecoder.h"
|
#include "MaterialBuffer.h"
|
#include "logger.h"
|
#include "MediaHelper.h"
|
|
#include <binder/ProcessState.h>
|
|
#include <media/stagefright/foundation/ADebug.h>
|
#include <media/stagefright/foundation/ALooper.h>
|
#include <media/stagefright/DataSource.h>
|
#include <media/stagefright/MetaData.h>
|
#include <media/stagefright/OMXClient.h>
|
#include <media/stagefright/OMXCodec.h>
|
#include <media/stagefright/foundation/base64.h>
|
|
#include <media/stagefright/rtsp/ARTPSession.h>
|
#include <media/stagefright/rtsp/ASessionDescription.h>
|
|
#todo 注意 rtspclient和后续步骤的解耦合(解码过程异步队列)
|
|
struct PL_ASFD_Internal
|
{
|
PL_ASFD_Config config;
|
|
PL_ASFD_Internal() :
|
config()
|
{
|
}
|
|
~PL_ASFD_Internal()
|
{
|
}
|
|
void reset()
|
{
|
PL_ASFD_Config _config;
|
config = _config;
|
}
|
};
|
|
PipeLineElem* create_PL_AndroidStagefrightDecoder()
|
{
|
return new PL_AndroidStagefrightDecoder;
|
}
|
|
struct ASFD_PaySource : public APacketSource
|
{
|
}
|
|
PL_AndroidStagefrightDecoder::PL_AndroidStagefrightDecoder() : internal(new PL_ASFD_Internal)
|
{
|
}
|
|
PL_AndroidStagefrightDecoder::~PL_AndroidStagefrightDecoder()
|
{
|
delete (PL_ASFD_Internal*)internal;
|
internal= nullptr;
|
}
|
|
bool PL_AndroidStagefrightDecoder::init(void* args)
|
{
|
PL_ASFD_Internal* in = (PL_ASFD_Internal*)internal;
|
in->reset();
|
|
PL_ASFD_Config* config = (PL_ASFD_Config*)args;
|
in->config = *config;
|
|
return true;
|
}
|
|
void PL_AndroidStagefrightDecoder::finit()
|
{
|
PL_ASFD_Internal* in = (PL_ASFD_Internal*)internal;
|
}
|
|
bool PL_AndroidStagefrightDecoder::pay(const PipeMaterial& pm)
|
{
|
PL_ASFD_Internal* in = (PL_ASFD_Internal*)internal;
|
|
if (pm.type != PipeMaterial::PMT_FRAME)
|
{
|
LOG_ERROR << "Only support PMT_FRAME" << std::endl;
|
return false;
|
}
|
|
if (pm.buffer == nullptr)
|
return false;
|
|
MB_Frame* frame = (MB_Frame*)pm.buffer;
|
if (frame->type != MB_Frame::MBFT_H264_NALU)
|
{
|
LOG_ERROR << "Only support MBFT_H264_NALU" << std::endl;
|
return false;
|
}
|
|
ssize_t bufidx = AMediaCodec_dequeueInputBuffer(in->codec, 2000);
|
LOGP(DEBUG, "input buffer bufidx=%zd, inputFrameCount=%d", bufidx, in->inputFrameCount++);
|
|
if (bufidx >= 0)
|
{
|
size_t bufsize;
|
uint8_t* inputBuff = AMediaCodec_getInputBuffer(in->codec, bufidx, &bufsize);
|
size_t sampleSize = std::min(bufsize, frame->buffSize);
|
memcpy(inputBuff, frame->buffer, sampleSize); // fill buffer
|
|
uint64_t presentationTimeUs = timeval_to_microseconds(frame->pts); //microseconds
|
|
media_status_t ms = AMediaCodec_queueInputBuffer(in->codec, bufidx, 0, sampleSize, presentationTimeUs, 0);
|
LOGP(DEBUG, "media_status_t=%d", ms);
|
}
|
else
|
{
|
LOG_WARN << "bufidx=" << bufidx << LOG_ENDL;
|
return false; // return true for gain
|
}
|
|
if (in->config.releaseOutputBuffIdxInPay)
|
{
|
AMediaCodecBufferInfo info;
|
in->lastOutputBuffIdx = AMediaCodec_dequeueOutputBuffer(in->codec, &info, 0);
|
LOG_WARN << "releaseOutputBuffIdxInPay bufidx=" << in->lastOutputBuffIdx << ", flags=" << info.flags << LOG_ENDL;
|
|
if (in->lastOutputBuffIdx >= 0)
|
{
|
if (in->config.releaseOutputBuffIdx)
|
{
|
AMediaCodec_releaseOutputBuffer(in->codec, in->lastOutputBuffIdx, info.size != 0);
|
in->lastOutputBuffIdx = -1;
|
}
|
}
|
}
|
|
return true;
|
}
|
|
bool PL_AndroidStagefrightDecoder::gain(PipeMaterial& pm)
|
{
|
PL_ASFD_Internal* in = (PL_ASFD_Internal*)internal;
|
|
AMediaCodecBufferInfo info;
|
in->lastOutputBuffIdx = AMediaCodec_dequeueOutputBuffer(in->codec, &info, 0);
|
if (in->lastOutputBuffIdx >= 0)
|
{
|
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM)
|
{
|
LOGP(WARNING, "output EOS");
|
}
|
//int64_t presentationNano = info.presentationTimeUs * 1000;
|
//if (d->renderstart < 0) {
|
// d->renderstart = systemnanotime() - presentationNano;
|
//}
|
//int64_t delay = (d->renderstart + presentationNano) - systemnanotime();
|
//if (delay > 0) {
|
// usleep(delay / 1000);
|
//}
|
|
AMediaFormat* format = AMediaCodec_getOutputFormat(in->codec);
|
if (format != NULL)
|
{
|
int32_t width, height, color;
|
AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &width);
|
AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &height);
|
AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, &color);
|
AMediaFormat_delete(format);
|
format = nullptr;
|
|
LOGP(DEBUG, "output media format, w=%d, h=%d, c=%d", width, height, color);
|
}
|
|
in->lastMbfBuffIdx.reset();
|
if (! in->config.releaseOutputBuffIdx)
|
{
|
in->lastMbfBuffIdx.type = MB_Frame::MBFT_INDEX;
|
in->lastMbfBuffIdx.buffer = (void*)(in->lastOutputBuffIdx);
|
in->lastMbfBuffIdx.buffSize = sizeof(in->lastOutputBuffIdx);
|
in->lastMbfBuffIdx.width = in->config.ak_width;
|
in->lastMbfBuffIdx.height = in->config.ak_height;
|
microseconds_to_timeval(info.presentationTimeUs, in->lastMbfBuffIdx.pts);
|
}
|
|
in->lastMbfBuffer.reset();
|
if ((in->config.generateDecodedDataPerFrame != 0) && (in->inputFrameCount % in->config.generateDecodedDataPerFrame == 0))
|
{
|
size_t outSize = in->buffSizeMax;
|
uint8_t* outputBuff = AMediaCodec_getOutputBuffer(in->codec, in->lastOutputBuffIdx, &outSize);
|
if (outputBuff != nullptr)
|
{
|
in->buffSize = std::min((size_t) info.size, in->buffSizeMax);
|
memcpy(in->buffer, outputBuff + info.offset, in->buffSize);
|
|
if (in->config.renderFromOutputBuff)
|
{
|
ANativeWindow* window = (ANativeWindow*)(in->config.windowSurface);
|
ANativeWindow_Buffer wbuffer;
|
if (ANativeWindow_lock(window, &wbuffer, NULL) == 0)
|
{
|
size_t bitsSize = 0;
|
if (wbuffer.format == WINDOW_FORMAT_RGBA_8888 || wbuffer.format == WINDOW_FORMAT_RGBX_8888)
|
bitsSize = wbuffer.height*wbuffer.width*4;
|
else if (wbuffer.format == WINDOW_FORMAT_RGB_565)
|
bitsSize = wbuffer.height*wbuffer.width*2;
|
else
|
bitsSize = wbuffer.height*wbuffer.width;
|
|
memcpy(wbuffer.bits, in->buffer, bitsSize);
|
//memcpy(in->buffer, wbuffer.bits, bitsSize);//#test copy opposite
|
ANativeWindow_unlockAndPost(window);
|
}
|
}
|
|
in->lastMbfBuffer.type = MB_Frame::MBFT_YUV420;
|
in->lastMbfBuffer.buffer = in->buffer;
|
in->lastMbfBuffer.buffSize = in->buffSize;
|
in->lastMbfBuffer.width = in->config.ak_width;
|
in->lastMbfBuffer.height = in->config.ak_height;
|
microseconds_to_timeval(info.presentationTimeUs, in->lastMbfBuffer.pts);
|
|
//if (in->lastMbfBuffer.buffSize > 10)
|
//{
|
// static size_t f = 0;
|
// char fname[50];
|
// sprintf(fname, "/sdcard/face-%u.yuv", ++f);
|
// FILE *pFile = fopen(fname, "wb");
|
// fwrite(in->lastMbfBuffer.buffer, sizeof(char), in->lastMbfBuffer.buffSize, pFile);
|
// printf("write face file %s\n", fname);
|
// fclose(pFile);
|
// if (f > 50) exit(0);
|
//}
|
}
|
}
|
|
pm.type = PipeMaterial::PMT_FRAME_LIST;
|
pm.buffer = *(in->lastMbfList); // in->lastMbfList is typeof MB_Frame**
|
pm.buffSize = sizeof(in->lastMbfList) / sizeof(MB_Frame*); // 2
|
pm.deleter = nullptr;
|
pm.former = this;
|
|
if (in->config.releaseOutputBuffIdx)
|
{
|
AMediaCodec_releaseOutputBuffer(in->codec, in->lastOutputBuffIdx, in->config.renderFromOutputBuffIdx);//info.size != 0
|
in->lastOutputBuffIdx = -1;
|
}
|
|
return true;
|
}
|
else if (in->lastOutputBuffIdx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED)
|
{
|
LOGP(DEBUG, "output buffers changed");
|
}
|
else if (in->lastOutputBuffIdx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED)
|
{
|
auto format = AMediaCodec_getOutputFormat(in->codec);
|
LOGP(INFO, "format changed to: %s", AMediaFormat_toString(format));
|
AMediaFormat_delete(format);
|
}
|
else if (in->lastOutputBuffIdx == AMEDIACODEC_INFO_TRY_AGAIN_LATER)
|
{
|
LOGP(DEBUG, "no output buffer right now");
|
}
|
else
|
{
|
LOGP(WARNING, "unexpected info code: %zd", in->lastOutputBuffIdx);
|
}
|
|
return false;
|
}
|