From 1e777cf1467b4e02c3f1ff03434ec46cf8bb9838 Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期四, 03 八月 2017 09:46:49 +0800
Subject: [PATCH] split client server
---
RtspFace/PL_RTSPServer.cpp | 1
RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp | 4
RtspFace/PL_AndroidMediaCodecEncoder.h | 63 ++++++++++
RtspFace/PL_AndroidMediaCodecEncoder.cpp | 266 ++++++++++++++++++++++++++++++++++++++++++++
4 files changed, 332 insertions(+), 2 deletions(-)
diff --git a/RtspFace/PL_AndroidMediaCodecEncoder.cpp b/RtspFace/PL_AndroidMediaCodecEncoder.cpp
new file mode 100644
index 0000000..ca2b98b
--- /dev/null
+++ b/RtspFace/PL_AndroidMediaCodecEncoder.cpp
@@ -0,0 +1,266 @@
+#include "PL_AndroidMediaCodecEncoder.h"
+#include "MaterialBuffer.h"
+#include "logger.h"
+#include "MediaHelper.h"
+
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaFormat.h>
+
+#include <android/bitmap.h>
+
+#include <stdlib.h>
+#include <liveMedia/liveMedia.hh>
+
+struct PL_AMCE_Internal
+{
+ uint8_t buffer[1920*1080*3];//#todo new from config
+ size_t buffSize;
+ const size_t buffSizeMax;
+
+ size_t inputFrameCount;
+
+ PL_AndroidMediaCodecEncoder_Config config;
+
+ AMediaCodec* codec;
+
+ bool payOK;
+
+ MB_Frame tempFrame; // frame for gain
+
+ PL_AMCE_Internal() :
+ buffSize(0), buffSizeMax(sizeof(buffer)),
+ inputFrameCount(0),
+ config(),
+ codec(nullptr),
+ payOK(false),
+ tempFrame()
+ {
+ }
+
+ ~PL_AMCE_Internal()
+ {
+ }
+
+ void reset()
+ {
+ buffSize = 0;
+
+ inputFrameCount = 0;
+
+ PL_AndroidMediaCodecEncoder_Config _config;
+ config = _config;
+
+ codec = nullptr;//#todo destory
+
+ payOK = false;
+
+ MB_Frame _tempFrame;
+ tempFrame = _tempFrame;
+ }
+};
+
+PipeLineElem* create_PL_AndroidMediaCodecEncoder()
+{
+ return new PL_AndroidMediaCodecEncoder;
+}
+
+PL_AndroidMediaCodecEncoder::PL_AndroidMediaCodecEncoder() : internal(new PL_AMCE_Internal)
+{
+}
+
+PL_AndroidMediaCodecEncoder::~PL_AndroidMediaCodecEncoder()
+{
+ delete (PL_AMCE_Internal*)internal;
+ internal= nullptr;
+}
+
+bool PL_AndroidMediaCodecEncoder::init(void* args)
+{
+ PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal;
+ in->reset();
+
+ PL_AndroidMediaCodecEncoder_Config* config = (PL_AndroidMediaCodecEncoder_Config*)args;
+ in->config = *config;
+
+ AMediaFormat* format = AMediaFormat_new();
+
+ AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, config->ak_mime.c_str());
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, config->ak_height);
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, config->ak_width);
+
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, config->ak_bit_rate);
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, config->ak_frame_rate);
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, config->ak_i_frame_interval);
+
+// see: https://developer.android.com/reference/android/media/MediaCodecInfo.CodecCapabilities.html#COLOR_FormatYUV420Flexible
+#define AMEDIA_COLOR_FormatYUV420Flexible 0x7f420888
+ AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, config->ak_color_format);
+ //AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_STRIDE, config->ak_width * 2);
+
+ //uint8_t sps[] = {0x0,0x0,0x0,0x1, 0x67, 0x42, 0x00, 0x2A, 0x95, 0xA8, 0x1E, 0x00, 0x89, 0xF9, 0x61, 0x00, 0x00, 0x07, 0x08, 0x00, 0x01, 0x5F, 0x90, 0x04};
+ //uint8_t pps[] = {0x0,0x0,0x0,0x1, 0x68, 0xCE, 0x3C, 0x80};
+ //AMediaFormat_setBuffer(format, "csd-0", sps, sizeof(sps)); // sps
+ //AMediaFormat_setBuffer(format, "csd-1", pps, sizeof(pps)); // pps
+
+ // should like:
+ // mime: string(video/avc), durationUs: int64(10000000), width: int32(480), height: int32(360), max-input-size: int32(55067), csd-0: data, csd-1: data}
+ LOG_INFO << "AMediaFormat_toString: " << AMediaFormat_toString(format) << LOG_ENDL;
+
+ in->codec = AMediaCodec_createEncoderByType(config->ak_mime.c_str());
+
+ if (AMediaCodec_configure(in->codec, format, nullptr, nullptr, AMEDIACODEC_CONFIGURE_FLAG_ENCODE) != AMEDIA_OK)
+ {
+ AMediaFormat_delete(format);
+ LOG_ERROR << "AMediaCodec_configure error" << LOG_ENDL;
+ return false;
+ }
+
+ if (AMediaCodec_start(in->codec) != AMEDIA_OK)
+ {
+ AMediaFormat_delete(format);
+ LOG_ERROR << "AMediaCodec_start error" << LOG_ENDL;
+ return false;
+ }
+
+ AMediaFormat_delete(format);
+ return true;
+}
+
+void PL_AndroidMediaCodecEncoder::finit()
+{
+ PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal;
+ //todo release codec
+ // call AMediaCodec_stop
+}
+
+bool amce_pay_frame_breaker(const PipeMaterial* pm, void* args)
+{
+ PL_AMCE_Internal* in = (PL_AMCE_Internal*)args;
+ MB_Frame* frame = (MB_Frame*)pm->buffer;
+
+ ssize_t bufidx = AMediaCodec_dequeueInputBuffer(in->codec, 2000);
+ LOGP(DEBUG, "input buffer bufidx=%zd, inputFrameCount=%d", bufidx, in->inputFrameCount++);
+
+ if (bufidx >= 0)
+ {
+ size_t bufsize;
+ uint8_t* inputBuff = AMediaCodec_getInputBuffer(in->codec, bufidx, &bufsize);
+ size_t sampleSize = std::min(bufsize, frame->buffSize);
+ memcpy(inputBuff, frame->buffer, sampleSize); // fill buffer
+
+ uint64_t presentationTimeUs = timeval_to_microseconds(frame->pts); //microseconds
+
+ media_status_t ms = AMediaCodec_queueInputBuffer(in->codec, bufidx, 0, sampleSize, presentationTimeUs, 0);
+
+ in->payOK = true;
+ LOGP(DEBUG, "media_status_t=%d", ms);
+ }
+ else
+ {
+ LOG_WARN << "bufidx=" << bufidx << LOG_ENDL;
+ in->payOK = false;
+ return false;
+ }
+
+ return false;
+}
+
+bool PL_AndroidMediaCodecEncoder::pay(const PipeMaterial& pm)
+{
+ PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal;
+
+ in->payOK = false;
+ if (!in->payOK)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, amce_pay_frame_breaker, in);
+
+ return in->payOK;
+}
+
+bool PL_AndroidMediaCodecEncoder::gain(PipeMaterial& pm)
+{
+ PL_AMCE_Internal* in = (PL_AMCE_Internal*)internal;
+
+ if (!in->payOK)
+ {
+ LOG_WARN << "not in->payOK" << LOG_ENDL;
+ return false;
+ }
+
+ pm.deleter = nullptr;
+ pm.former = this;
+
+ AMediaCodecBufferInfo info;
+ ssize_t outputBuffIdx = AMediaCodec_dequeueOutputBuffer(in->codec, &info, 0);
+ if (outputBuffIdx >= 0)
+ {
+ if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM)
+ {
+ LOGP(WARNING, "output EOS");
+ }
+
+ //AMediaFormat* format = AMediaCodec_getOutputFormat(in->codec);
+ //if (format != NULL)
+ //{
+ // int32_t width, height, color;
+ // AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &width);
+ // AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &height);
+ // AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, &color);
+ // AMediaFormat_delete(format);
+ // format = nullptr;
+ // LOGP(DEBUG, "output media format, w=%d, h=%d, c=%d", width, height, color);
+ //}
+
+ in->tempFrame.reset();
+
+ size_t outSize = in->buffSizeMax;
+ uint8_t* outputBuff = AMediaCodec_getOutputBuffer(in->codec, outputBuffIdx, &outSize);
+ if (outputBuff != nullptr)
+ {
+ in->buffSize = std::min((size_t) info.size, in->buffSizeMax);
+ memcpy(in->buffer, outputBuff + info.offset, in->buffSize);
+
+ in->tempFrame.type = MB_Frame::MBFT_H264_NALU_AUX;
+ in->tempFrame.buffer = in->buffer;
+ in->tempFrame.buffSize = in->buffSize;
+ in->tempFrame.width = in->config.ak_width;
+ in->tempFrame.height = in->config.ak_height;
+ microseconds_to_timeval(info.presentationTimeUs, in->tempFrame.pts);
+
+ pm.type = PipeMaterial::PMT_FRAME;
+ pm.buffer = &(in->tempFrame);
+ pm.buffSize = 0;
+
+ //static size_t f = 0;
+ //static FILE *pFile = fopen("/sdcard/aa.264", "wb");
+ //fwrite(in->buffer, sizeof(char), in->buffSize, pFile);
+ //if (++f > 400){
+ // fclose(pFile);
+ // exit(0);
+ //}
+ }
+
+ AMediaCodec_releaseOutputBuffer(in->codec, outputBuffIdx, false);
+
+ return true;
+ }
+ else if (outputBuffIdx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED)
+ {
+ LOGP(DEBUG, "output buffers changed");
+ }
+ else if (outputBuffIdx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED)
+ {
+ auto format = AMediaCodec_getOutputFormat(in->codec);
+ LOGP(INFO, "format changed to: %s", AMediaFormat_toString(format));
+ AMediaFormat_delete(format);
+ }
+ else if (outputBuffIdx == AMEDIACODEC_INFO_TRY_AGAIN_LATER)
+ {
+ LOGP(DEBUG, "no output buffer right now");
+ }
+ else
+ {
+ LOGP(WARNING, "unexpected info code: %zd", outputBuffIdx);
+ }
+
+ return false;
+}
diff --git a/RtspFace/PL_AndroidMediaCodecEncoder.h b/RtspFace/PL_AndroidMediaCodecEncoder.h
new file mode 100644
index 0000000..90ec190
--- /dev/null
+++ b/RtspFace/PL_AndroidMediaCodecEncoder.h
@@ -0,0 +1,63 @@
+#ifndef _PL_ANDROIDMEDIACODECENCODER_H_
+#define _PL_ANDROIDMEDIACODECENCODER_H_
+
+#include "PipeLine.h"
+
+struct PL_AndroidMediaCodecEncoder_Config
+{
+ // D:\adk\ndk-bundle\platforms\android-21\arch-arm64\usr\include\media\NdkMediaFormat.h
+ // AMEDIAFORMAT_KEY_AAC_PROFILE;
+ int32_t ak_bit_rate; // AMEDIAFORMAT_KEY_BIT_RATE;
+ // AMEDIAFORMAT_KEY_CHANNEL_COUNT;
+ // AMEDIAFORMAT_KEY_CHANNEL_MASK;
+ int32_t ak_color_format; // AMEDIAFORMAT_KEY_COLOR_FORMAT;
+ // AMEDIAFORMAT_KEY_DURATION;
+ // AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL;
+ int32_t ak_frame_rate; // AMEDIAFORMAT_KEY_FRAME_RATE;
+ int32_t ak_height; // AMEDIAFORMAT_KEY_HEIGHT;
+ // AMEDIAFORMAT_KEY_IS_ADTS;
+ // AMEDIAFORMAT_KEY_IS_AUTOSELECT;
+ // AMEDIAFORMAT_KEY_IS_DEFAULT;
+ // AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE;
+ int32_t ak_i_frame_interval; // AMEDIAFORMAT_KEY_I_FRAME_INTERVAL;
+ // AMEDIAFORMAT_KEY_LANGUAGE;
+ // AMEDIAFORMAT_KEY_MAX_HEIGHT;
+ // AMEDIAFORMAT_KEY_MAX_INPUT_SIZE;
+ // AMEDIAFORMAT_KEY_MAX_WIDTH;
+ std::string ak_mime; // AMEDIAFORMAT_KEY_MIME; // video/avc
+ // AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP;
+ // AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER;
+ // AMEDIAFORMAT_KEY_SAMPLE_RATE;
+ int32_t ak_width; // AMEDIAFORMAT_KEY_WIDTH;
+ // AMEDIAFORMAT_KEY_STRIDE;
+
+ PL_AndroidMediaCodecEncoder_Config() :
+ ak_bit_rate(0),
+ ak_color_format(0),
+ ak_frame_rate(0),
+ ak_height(0),
+ ak_i_frame_interval(0),
+ ak_mime(),
+ ak_width(0)
+ {}
+};
+
+class PL_AndroidMediaCodecEncoder : public PipeLineElem
+{
+public:
+ PL_AndroidMediaCodecEncoder();
+ virtual ~PL_AndroidMediaCodecEncoder();
+
+ virtual bool init(void* args);
+ virtual void finit();
+
+ virtual bool pay(const PipeMaterial& pm);
+ virtual bool gain(PipeMaterial& pm);
+
+private:
+ void* internal;
+};
+
+PipeLineElem* create_PL_AndroidMediaCodecEncoder();
+
+#endif
diff --git a/RtspFace/PL_RTSPServer.cpp b/RtspFace/PL_RTSPServer.cpp
index d705a8b..d3da325 100644
--- a/RtspFace/PL_RTSPServer.cpp
+++ b/RtspFace/PL_RTSPServer.cpp
@@ -114,6 +114,7 @@
*FrameSize = newBufferSize;
LOG_DEBUG << "send frame size=" << in.buffSize << std::endl;
+ return 1;
}
virtual char ReleaseFrame()
diff --git a/RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp b/RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp
index f5c7643..cd9c91f 100644
--- a/RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp
+++ b/RtspFace/PL_SensetimeFaceTrackMultiTrd.cpp
@@ -89,7 +89,7 @@
return ret;
}
- ret = cv_face_create_tracker(&tracker_handle, nullptr, config.point_size_config);
+ ret = cv_face_create_tracker(&tracker_handle, nullptr, config.point_size_config | CV_FACE_TRACKING_TWO_THREAD);
if (ret != 0)
{
LOGP(ERROR, "cv_face_create_tracker: %s/n", strerror(ret));
@@ -425,7 +425,7 @@
uint8_t *buffer, size_t width, size_t height, size_t stride,
cv_pixel_format cvPixFmt)
{
- PipeLineElemTimingDebugger td(nullptr);
+ //PipeLineElemTimingDebugger td(nullptr);
in->trackThread.do_face_track(buffer, cvPixFmt, width, height, stride);
return 0;
}
--
Gitblit v1.8.0