#include "PL_AndroidMediaCodecEncoder.h" #include "MaterialBuffer.h" #include "logger.h" #include "MediaHelper.h" #include #include #include #include #include struct PL_AMCE_Internal { uint8_t buffer[1920*1080*3];//#todo new from config size_t buffSize; const size_t buffSizeMax; size_t inputFrameCount; PL_AndroidMediaCodecEncoder_Config config; AMediaCodec* codec; bool payOK; MB_Frame tempFrame; // frame for gain PL_AMCE_Internal() : buffSize(0), buffSizeMax(sizeof(buffer)), inputFrameCount(0), config(), codec(nullptr), payOK(false), tempFrame() { } ~PL_AMCE_Internal() { } void reset() { buffSize = 0; inputFrameCount = 0; PL_AndroidMediaCodecEncoder_Config _config; config = _config; codec = nullptr;//#todo destory payOK = false; MB_Frame _tempFrame; tempFrame = _tempFrame; } }; PipeLineElem* create_PL_AndroidMediaCodecEncoder() { return new PL_AndroidMediaCodecEncoder; } PL_AndroidMediaCodecEncoder::PL_AndroidMediaCodecEncoder() : internal(new PL_AMCE_Internal) { } PL_AndroidMediaCodecEncoder::~PL_AndroidMediaCodecEncoder() { delete (PL_AMCE_Internal*)internal; internal= nullptr; } bool PL_AndroidMediaCodecEncoder::init(void* args) { PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal; in->reset(); PL_AndroidMediaCodecEncoder_Config* config = (PL_AndroidMediaCodecEncoder_Config*)args; in->config = *config; // see: developer.android.com/reference/android/media/MediaFormat.html#KEY_PROFILE AMediaFormat* format = AMediaFormat_new(); AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, config->ak_mime.c_str()); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, config->ak_height); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, config->ak_width); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, config->ak_bit_rate); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, config->ak_frame_rate); AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, config->ak_i_frame_interval); // android multipled 10 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, 1); //AMediaFormat_setInt32(format, "profile", 0x00000100); // see: https://developer.android.com/reference/android/media/MediaCodecInfo.CodecCapabilities.html#COLOR_FormatYUV420Flexible #define AMEDIA_COLOR_FormatYUV420Flexible 0x7f420888 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, config->ak_color_format); //AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_STRIDE, config->ak_width * 2); if (config->codecProfileLevel.profile != 0) AMediaFormat_setInt32(format, "profile", config->codecProfileLevel.profile); if (config->codecProfileLevel.level != 0) AMediaFormat_setInt32(format, "level", config->codecProfileLevel.level); //uint8_t sps[] = {0x0,0x0,0x0,0x1, 0x67, 0x42, 0x00, 0x2A, 0x95, 0xA8, 0x1E, 0x00, 0x89, 0xF9, 0x61, 0x00, 0x00, 0x07, 0x08, 0x00, 0x01, 0x5F, 0x90, 0x04}; //uint8_t pps[] = {0x0,0x0,0x0,0x1, 0x68, 0xCE, 0x3C, 0x80}; //AMediaFormat_setBuffer(format, "csd-0", sps, sizeof(sps)); // sps //AMediaFormat_setBuffer(format, "csd-1", pps, sizeof(pps)); // pps // should like: // mime: string(video/avc), durationUs: int64(10000000), width: int32(480), height: int32(360), max-input-size: int32(55067), csd-0: data, csd-1: data} LOG_INFO << "AMediaFormat_toString: " << AMediaFormat_toString(format) << LOG_ENDL; in->codec = AMediaCodec_createEncoderByType(config->ak_mime.c_str()); if (AMediaCodec_configure(in->codec, format, nullptr, nullptr, AMEDIACODEC_CONFIGURE_FLAG_ENCODE) != AMEDIA_OK) { AMediaFormat_delete(format); LOG_ERROR << "AMediaCodec_configure error" << LOG_ENDL; return false; } if (AMediaCodec_start(in->codec) != AMEDIA_OK) { AMediaFormat_delete(format); LOG_ERROR << "AMediaCodec_start error" << LOG_ENDL; return false; } AMediaFormat_delete(format); return true; } void PL_AndroidMediaCodecEncoder::finit() { PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal; //todo release codec // call AMediaCodec_stop } bool amce_pay_frame_breaker(const PipeMaterial* pm, void* args) { PL_AMCE_Internal* in = (PL_AMCE_Internal*)args; MB_Frame* frame = (MB_Frame*)pm->buffer; ssize_t bufidx = AMediaCodec_dequeueInputBuffer(in->codec, 2000); LOGP(DEBUG, "input buffer bufidx=%zd, inputFrameCount=%d", bufidx, in->inputFrameCount++); if (bufidx >= 0) { size_t bufsize; uint8_t* inputBuff = AMediaCodec_getInputBuffer(in->codec, bufidx, &bufsize); size_t sampleSize = std::min(bufsize, frame->buffSize); memcpy(inputBuff, frame->buffer, sampleSize); // fill buffer uint64_t presentationTimeUs = timeval_to_microseconds(frame->pts); //microseconds media_status_t ms = AMediaCodec_queueInputBuffer(in->codec, bufidx, 0, sampleSize, presentationTimeUs, 0); in->payOK = true; LOGP(DEBUG, "media_status_t=%d", ms); } else { LOG_WARN << "bufidx=" << bufidx << LOG_ENDL; in->payOK = false; return false; } return false; } bool PL_AndroidMediaCodecEncoder::pay(const PipeMaterial& pm) { PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal; in->payOK = false; if (!in->payOK) pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, amce_pay_frame_breaker, in); return in->payOK; } bool PL_AndroidMediaCodecEncoder::gain(PipeMaterial& pm) { PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal; if (!in->payOK) { LOG_WARN << "not in->payOK" << LOG_ENDL; return false; } pm.deleter = nullptr; pm.former = this; AMediaCodecBufferInfo info; ssize_t outputBuffIdx = AMediaCodec_dequeueOutputBuffer(in->codec, &info, 0); if (outputBuffIdx >= 0) { if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) { LOGP(WARNING, "output EOS"); } //AMediaFormat* format = AMediaCodec_getOutputFormat(in->codec); //if (format != NULL) //{ // int32_t width, height, color; // AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &width); // AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &height); // AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, &color); // AMediaFormat_delete(format); // format = nullptr; // LOGP(DEBUG, "output media format, w=%d, h=%d, c=%d", width, height, color); //} in->tempFrame.reset(); size_t outSize = in->buffSizeMax; uint8_t* outputBuff = AMediaCodec_getOutputBuffer(in->codec, outputBuffIdx, &outSize); if (outputBuff != nullptr) { in->buffSize = std::min((size_t) info.size, in->buffSizeMax); memcpy(in->buffer, outputBuff + info.offset, in->buffSize); in->tempFrame.type = MB_Frame::MBFT_H264_NALU_AUX; in->tempFrame.buffer = in->buffer; in->tempFrame.buffSize = in->buffSize; in->tempFrame.width = in->config.ak_width; in->tempFrame.height = in->config.ak_height; microseconds_to_timeval(info.presentationTimeUs, in->tempFrame.pts); pm.type = PipeMaterial::PMT_FRAME; pm.buffer = &(in->tempFrame); pm.buffSize = 0; //static size_t f = 0; //static FILE *pFile = fopen("/data/aa.264", "wb"); //fwrite(in->buffer, sizeof(char), in->buffSize, pFile); //if (++f > 400){ // fclose(pFile); // exit(0); //} } AMediaCodec_releaseOutputBuffer(in->codec, outputBuffIdx, false); return true; } else if (outputBuffIdx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) { LOGP(DEBUG, "output buffers changed"); } else if (outputBuffIdx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) { auto format = AMediaCodec_getOutputFormat(in->codec); LOGP(INFO, "format changed to: %s", AMediaFormat_toString(format)); uint8_t* sps = nullptr; size_t spsSize = 0; uint8_t* pps = nullptr; size_t ppsSize = 0; AMediaFormat_getBuffer(format, "csd-0", (void**)&sps, &spsSize); // sps AMediaFormat_getBuffer(format, "csd-1", (void**)&pps, &ppsSize); // pps if (spsSize != 0) { char* tmp = nullptr; std::string spsStr = tmp = base64_encode(((const char*)sps) + 4, spsSize - 4);//#todo aux delete[] tmp; std::string ppsStr = tmp = base64_encode(((const char*)pps) + 4, ppsSize - 4); delete[] tmp; tmp = nullptr; this->manager->set_param(PLGP_ENC_SPS_B64, spsStr); this->manager->set_param(PLGP_ENC_PPS_B64, ppsStr); } AMediaFormat_delete(format); } else if (outputBuffIdx == AMEDIACODEC_INFO_TRY_AGAIN_LATER) { LOGP(DEBUG, "no output buffer right now"); } else { LOGP(WARNING, "unexpected info code: %zd", outputBuffIdx); } return false; }