缩放和中文字体稳定版,NV12转RGB565功能正常
git-svn-id: http://192.168.1.226/svn/proxy@759 454eff88-639b-444f-9e54-f578c98de674
| | |
| | | return true;
|
| | | }
|
| | |
|
| | | bool convert_rgb565_origin_to_render(PL_ASVR_Internal *in) {
|
| | |
|
| | | ANativeWindow* window = (ANativeWindow*)(in->config.windowSurface);
|
| | | int widow_width = ANativeWindow_getWidth(window);
|
| | | int window_height = ANativeWindow_getHeight(window);
|
| | | int src_width = in->lastMbfBuffOrigin.width;
|
| | | int src_height = in->lastMbfBuffOrigin.height;
|
| | | if (src_width != widow_width || src_height != window_height)
|
| | | {
|
| | | uint8_t* src = (uint8_t*)in->lastMbfBuffOrigin.buffer;
|
| | | uint8_t* dst = (uint8_t*)in->buffer1;
|
| | | libyuv::ScalePlane_16((uint16*)src, src_width,
|
| | | src_width, src_height,
|
| | | (uint16*)dst, widow_width,
|
| | | widow_width, window_height,
|
| | | libyuv::kFilterNone);
|
| | | in->buff1Size = widow_width * window_height * 2;
|
| | | memcpy(in->buffer, in->buffer1, in->buff1Size);
|
| | | in->buffSize = in->buff1Size;
|
| | | }
|
| | | else
|
| | | {
|
| | | memcpy(in->buffer, in->lastMbfBuffOrigin.buffer, in->lastMbfBuffOrigin.buffSize);
|
| | | in->buffSize = in->lastMbfBuffOrigin.buffSize;
|
| | | }
|
| | | return true;
|
| | | }
|
| | | bool convert_nv12_origin_to_render(PL_ASVR_Internal* in)
|
| | | {
|
| | | if (false)
|
| | |
| | | libyuv::kFilterNone);
|
| | |
|
| | | libyuv::ScalePlane_16((uint16*)src_uv, MH_SUBSAMPLE1(src_width ,2),
|
| | | MH_SUBSAMPLE1(src_width ,2), src_height,
|
| | | MH_SUBSAMPLE1(src_width, 2), MH_SUBSAMPLE1(src_height, 2),
|
| | | (uint16*)dst_uv, MH_SUBSAMPLE1(dst_width ,2),
|
| | | MH_SUBSAMPLE1(dst_width, 2), dst_height,
|
| | | MH_SUBSAMPLE1(dst_width, 2), MH_SUBSAMPLE1(dst_height, 2),
|
| | | libyuv::kFilterNone);
|
| | |
|
| | | in->buff1Size = dst_width * dst_height * 1.5;
|
| | |
| | | return false;
|
| | |
|
| | | MB_Frame* frame = (MB_Frame*)pm->buffer;
|
| | | if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12)
|
| | | if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12 && frame->type != MB_Frame::MBFT_RGB565)
|
| | | {
|
| | | LOG_ERROR << "Only support MBFT_YUV420 and MBFT_NV12" << LOG_ENDL;
|
| | | LOG_ERROR << "Only support MBFT_YUV420 、MBFT_NV12 and MBFT_RGB565" << LOG_ENDL;
|
| | | in->payError = true;
|
| | | return false;
|
| | | }
|
| | |
| | | ret = convert_yuv420_origin_to_render(in);
|
| | | else if (in->lastMbfBuffOrigin.type == MB_Frame::MBFT_NV12)
|
| | | ret = convert_nv12_origin_to_render(in);
|
| | |
|
| | | else if (in->lastMbfBuffOrigin.type == MB_Frame::MBFT_RGB565)
|
| | | ret = convert_rgb565_origin_to_render(in);
|
| | | if (!ret)
|
| | | {
|
| | | LOG_ERROR << "convert yuv origin to render error" << LOG_ENDL;
|
| | |
| | | pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
|
| | | if (in->payError)
|
| | | pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
|
| | |
|
| | | if (in->payError)
|
| | | pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_RGB565, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
|
| | | return !(in->payError);
|
| | | }
|
| | |
|
| | |
| | |
|
| | | return true;
|
| | | }
|
| | |
|
| New file |
| | |
| | | #include "PL_ColorConv.h"
|
| | | #include "MaterialBuffer.h"
|
| | | #include "logger.h"
|
| | | #include <libyuv.h>
|
| | | #include <PbFaceList.pb.h>
|
| | | #define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
|
| | | struct PL_ColorConv_Internal
|
| | | {
|
| | | uint8_t* buffer;
|
| | | size_t buffSize;
|
| | | size_t buffSizeMax;
|
| | | bool payError;
|
| | |
|
| | | PipeMaterial::PipeMaterialBufferType lastPmType;
|
| | | MB_Frame tempFrame;
|
| | | PL_ColorConv_Config config;
|
| | |
|
| | | PL_ColorConv_Internal() :
|
| | | buffer(nullptr), buffSize(0), buffSizeMax(0), payError(true),
|
| | | lastPmType(PipeMaterial::PMT_NONE), tempFrame(), config()
|
| | | {
|
| | | }
|
| | |
|
| | | ~PL_ColorConv_Internal()
|
| | | {
|
| | | delete[] buffer;
|
| | | buffer = nullptr;
|
| | | }
|
| | |
|
| | | void reset()
|
| | | {
|
| | | buffSize = 0;
|
| | | payError = true;
|
| | |
|
| | | lastPmType = PipeMaterial::PMT_NONE;
|
| | |
|
| | | MB_Frame _tempFrame;
|
| | | tempFrame = _tempFrame;
|
| | |
|
| | | PL_ColorConv_Config _config;
|
| | | config = _config;
|
| | |
|
| | | if (buffer != nullptr)
|
| | | {
|
| | | delete[] buffer;
|
| | | buffer = nullptr;
|
| | | buffSizeMax = 0;
|
| | | }
|
| | | }
|
| | | };
|
| | |
|
| | | PipeLineElem* create_PL_ColorConv()
|
| | | {
|
| | | return new PL_ColorConv;
|
| | | }
|
| | |
|
| | | PL_ColorConv::PL_ColorConv() : internal(new PL_ColorConv_Internal)
|
| | | {
|
| | | }
|
| | |
|
| | | PL_ColorConv::~PL_ColorConv()
|
| | | {
|
| | | delete (PL_ColorConv_Internal*)internal;
|
| | | internal= nullptr;
|
| | | }
|
| | |
|
| | | bool PL_ColorConv::init(void* args)
|
| | | {
|
| | | PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
|
| | | in->reset();
|
| | |
|
| | | if (args != nullptr)
|
| | | {
|
| | | PL_ColorConv_Config* config = (PL_ColorConv_Config*)args;
|
| | | in->config = *config;
|
| | | }
|
| | |
|
| | | return true;
|
| | | }
|
| | |
|
| | | void PL_ColorConv::finit()
|
| | | {
|
| | | PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
|
| | | }
|
| | |
|
| | | bool image_to_rgb565(PL_ColorConv_Internal *in ,MB_Frame::MBFType srcType){
|
| | | const int srcHeight = in->tempFrame.height;
|
| | | const int srcWidth = in->tempFrame.width;
|
| | | int dstSize = srcHeight * srcWidth * 2;
|
| | | if (in->buffer == nullptr || in->buffSizeMax <dstSize)
|
| | | {
|
| | | if (in->buffer != nullptr)
|
| | | delete[] in->buffer;
|
| | | in->buffer = new uint8_t[dstSize];
|
| | | in->buffSizeMax = dstSize;
|
| | | in->buffSize = dstSize;
|
| | | LOG_INFO << "image_scale alloc buffer size=" << dstSize << std::endl;
|
| | | }
|
| | |
|
| | | if (srcType == MB_Frame::MBFT_YUV420)
|
| | | {
|
| | | //#todo
|
| | | LOG_ERROR << "srcType only support MBFT_NV12" << std::endl;
|
| | | return false;
|
| | | }
|
| | | else if (srcType == MB_Frame::MBFT_NV12)
|
| | | {
|
| | | const uint8_t * srcBuffer = (uint8_t *)in->tempFrame.buffer;
|
| | | const uint8_t *src_y = srcBuffer;
|
| | | const uint8_t *src_uv = src_y + (srcHeight * srcWidth);
|
| | | uint8_t *dst = (uint8_t *) (in->buffer);
|
| | | libyuv::NV12ToRGB565(src_y, srcWidth,
|
| | | src_uv, srcWidth,
|
| | | dst, srcWidth * 2,
|
| | | srcWidth, srcHeight);
|
| | | }
|
| | | else if (srcType == MB_Frame::MBFT_BGRA)
|
| | | {
|
| | | //#todo
|
| | | LOG_ERROR << "srcType only support MBFT_NV12" << std::endl;
|
| | | return false;
|
| | | } else
|
| | | {
|
| | | LOG_ERROR << "srcType only support MBFT_NV12" << std::endl;
|
| | | return false;
|
| | | }
|
| | | return true;
|
| | | }
|
| | |
|
| | | bool PL_ColorConv::pay(const PipeMaterial& pm)
|
| | | {
|
| | | PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
|
| | |
|
| | | in->payError = true;
|
| | |
|
| | | if (pm.buffer == nullptr)
|
| | | return false;
|
| | |
|
| | | bool ret = false;
|
| | |
|
| | | in->lastPmType = pm.type;
|
| | |
|
| | | switch(pm.type)
|
| | | {
|
| | | case PipeMaterial::PMT_BYTES:
|
| | | LOG_ERROR << "PL_ColorConv unsupport type: PMT_BYTES" << std::endl;
|
| | | break;
|
| | | case PipeMaterial::PMT_FRAME:
|
| | | {
|
| | | MB_Frame* frame = (MB_Frame*)pm.buffer;
|
| | | switch(frame->type)
|
| | | {
|
| | | case MB_Frame::MBFT_NV12:
|
| | | in->tempFrame = *frame;
|
| | | ret = image_to_rgb565(in, frame->type);
|
| | | break;
|
| | | default:
|
| | | LOG_ERROR << "PL_ColorConv Only support MBFT_NV12" << std::endl;
|
| | | return false;
|
| | | }
|
| | | }
|
| | | break;
|
| | | case PipeMaterial::PMT_PM_LIST:
|
| | | {
|
| | | // break pm list into single pm(s)
|
| | |
|
| | | MB_Frame* ppm = (MB_Frame*)pm.buffer;
|
| | | for (size_t i = 0; i < pm.buffSize; i++, ppm++)
|
| | | {
|
| | | if (ppm->type== PipeMaterial::PMT_FRAME)
|
| | | {
|
| | | MB_Frame* frame = (MB_Frame*)ppm->buffer;
|
| | | switch(frame->type)
|
| | | {
|
| | | case MB_Frame::MBFT_NV12:
|
| | | in->tempFrame = *frame;
|
| | | image_to_rgb565(in, frame->type);
|
| | | break;
|
| | | default:
|
| | | LOG_ERROR << "PL_ColorConv Only support MBFT_NV12" << std::endl;
|
| | | return false;
|
| | | }
|
| | | }
|
| | | }
|
| | | }break;
|
| | | default:
|
| | | LOG_ERROR << "PL_ColorConv Only support MBFT_NV12" << std::endl;
|
| | | return false;
|
| | | }
|
| | |
|
| | | in->payError = !ret;
|
| | | return ret;
|
| | | }
|
| | |
|
| | | bool PL_ColorConv::gain(PipeMaterial& pm)
|
| | | {
|
| | | PL_ColorConv_Internal* in = (PL_ColorConv_Internal*)internal;
|
| | |
|
| | | PipeMaterial newPm;
|
| | | newPm.type = PipeMaterial::PMT_NONE;
|
| | | newPm.former = this;
|
| | |
|
| | | switch(in->lastPmType)
|
| | | {
|
| | | case PipeMaterial::PMT_FRAME:
|
| | | case PipeMaterial::PMT_PM_LIST:
|
| | | {
|
| | | newPm.type = PipeMaterial::PMT_FRAME;
|
| | | in->tempFrame.buffer = in->buffer;
|
| | | in->tempFrame.buffSize = in->buffSize;
|
| | | in->tempFrame.type = MB_Frame::MBFT_RGB565;
|
| | | newPm.buffer = &(in->tempFrame);
|
| | | newPm.buffSize = 0;
|
| | | }
|
| | | break;
|
| | | default:
|
| | | LOG_ERROR << "Only support PMT_FRAME and PMT_PM_LIST" << std::endl;
|
| | | }
|
| | | pm = newPm;
|
| | | return !in->payError;
|
| | | }
|
| | |
|
| New file |
| | |
| | | #ifndef _PL_ColorConv_H_
|
| | | #define _PL_ColorConv_H_
|
| | |
|
| | | #include "PipeLine.h"
|
| | | #include "MaterialBuffer.h"
|
| | |
|
| | | struct PL_ColorConv_Config
|
| | | {
|
| | | int filterMode; // libyuv/scale.h/FilterMode
|
| | | // Used only pm.type==PMT_BYTES
|
| | | int frameType;
|
| | | uint16_t srcWidth;
|
| | | uint16_t srcHeight;
|
| | | PL_ColorConv_Config() :filterMode(0),frameType(0), srcWidth(0), srcHeight(0)
|
| | | { }
|
| | | };
|
| | |
|
| | | class PL_ColorConv : public PipeLineElem
|
| | | {
|
| | | public:
|
| | | PL_ColorConv();
|
| | | virtual ~PL_ColorConv();
|
| | |
|
| | | virtual bool init(void* args);
|
| | | virtual void finit();
|
| | |
|
| | | virtual bool pay(const PipeMaterial& pm);
|
| | | virtual bool gain(PipeMaterial& pm);
|
| | |
|
| | | private:
|
| | | void* internal;
|
| | | };
|
| | |
|
| | | PipeLineElem* create_PL_ColorConv();
|
| | |
|
| | | #endif
|
| | |
| | | #include <string.h> // for memcpy
|
| | | #include <opencv2/core/mat.hpp>
|
| | | #include <opencv2/imgproc.hpp>
|
| | |
|
| | | #ifdef ENABLE_WTEXT
|
| | | #include "CvUtil/CvxText.h"
|
| | | #endif
|
| | |
| | | #include "logger.h"
|
| | | #include <libyuv.h>
|
| | |
|
| | | #define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
|
| | |
|
| | | struct PL_Scale_Internal
|
| | | {
|
| | | uint8_t* buffer;
|
| | |
| | | bool payError;
|
| | |
|
| | | PipeMaterial::PipeMaterialBufferType lastPmType;
|
| | | MB_Frame lastFrame;
|
| | | MB_Frame tempFrame;
|
| | | PL_Scale_Config config;
|
| | |
|
| | | PL_Scale_Internal() :
|
| | | buffer(nullptr), buffSize(0), buffSizeMax(0), payError(true),
|
| | | lastPmType(PipeMaterial::PMT_NONE), lastFrame(), config()
|
| | | lastPmType(PipeMaterial::PMT_NONE), tempFrame(), config()
|
| | | {
|
| | | }
|
| | |
|
| | |
| | |
|
| | | lastPmType = PipeMaterial::PMT_NONE;
|
| | |
|
| | | MB_Frame _lastFrame;
|
| | | lastFrame = _lastFrame;
|
| | | MB_Frame _tempFrame;
|
| | | tempFrame = _tempFrame;
|
| | |
|
| | | PL_Scale_Config _config;
|
| | | config = _config;
|
| | |
| | | bool image_scale(PL_Scale_Internal* in,
|
| | | uint8_t* srcBuffer, MB_Frame::MBFType srcType, uint16_t srcWidth, uint16_t srcHeight)
|
| | | {
|
| | | #define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
|
| | |
|
| | | const int dst_width = in->config.toWidth;
|
| | | const int dst_height = in->config.toHeight;
|
| | |
| | | {
|
| | | case MB_Frame::MBFT_YUV420:
|
| | | case MB_Frame::MBFT_BGRA:
|
| | | in->lastFrame = *frame;
|
| | | in->tempFrame = *frame;
|
| | | ret = image_scale(in, (uint8_t*)frame->buffer, frame->type,
|
| | | frame->width, frame->height);
|
| | | break;
|
| | |
| | | case MB_Frame::MBFT_YUV420:
|
| | | case MB_Frame::MBFT_BGRA:
|
| | | case MB_Frame::MBFT_NV12:
|
| | | in->lastFrame = *frame;
|
| | | in->tempFrame = *frame;
|
| | | ret = image_scale(in, (uint8_t*)frame->buffer,frame->type,
|
| | | frame->width, frame->height);
|
| | | break;
|
| | |
| | | case PipeMaterial::PMT_PM_LIST:
|
| | | {
|
| | | newPm.type = PipeMaterial::PMT_FRAME;
|
| | | newPm.buffer = &(in->lastFrame);
|
| | | newPm.buffer = &(in->tempFrame);
|
| | | newPm.buffSize = 0;
|
| | |
|
| | | in->lastFrame.buffer = in->buffer;
|
| | | in->lastFrame.buffSize = in->buffSize;
|
| | | in->lastFrame.width = in->config.toWidth;
|
| | | in->lastFrame.height = in->config.toHeight;
|
| | | in->tempFrame.buffer = in->buffer;
|
| | | in->tempFrame.buffSize = in->buffSize;
|
| | | in->tempFrame.width = in->config.toWidth;
|
| | | in->tempFrame.height = in->config.toHeight;
|
| | | }
|
| | | break;
|
| | | default:
|
| | |
| | | int doFaceTrack(PL_SensetimeFaceTrack_Internal* in,
|
| | | uint8_t* buffer, size_t width, size_t height, size_t stride, cv_pixel_format cvPixFmt)
|
| | | {
|
| | | //PipeLineElemTimingDebugger td(nullptr);
|
| | | PipeLineElemTimingDebugger td(nullptr);
|
| | |
|
| | | if (in->config.doTrackPerFrame == 0)
|
| | | return 0;
|
| | |
| | | DebugNetwork.cpp |
| | | CaptureCamera.cpp |
| | | serial.c |
| | | TeleWrapper.cpp |
| | | #TeleWrapper.cpp |
| | | |
| | | "D:/workspace/proxy/RtspFace/PipeLine.cpp" |
| | | "D:/workspace/proxy/RtspFace/Logger/src/logger.cc" |
| | | "D:/workspace/proxy/RtspFace/MediaHelper.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_RTSPClient.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_AndroidSurfaceViewRender.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_SensetimeFaceTrack.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_Gainer.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_Paint.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_V4L2Source.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_BlockGrouping.cpp" |
| | | "D:/workspace/proxy/RtspFace/PL_Queue.cpp" |
| | | "D:/Documents/works/RtspFace/PL_Scale.cpp" |
| | | "D:/Documents/works/RtspFace/PipeLine.cpp" |
| | | "D:/Documents/works/RtspFace/Logger/src/logger.cc" |
| | | "D:/Documents/works/RtspFace/MediaHelper.cpp" |
| | | "D:/Documents/works/RtspFace/GraphicHelper.cpp" |
| | | "D:/Documents/works/RtspFace/PL_RTSPClient.cpp" |
| | | "D:/Documents/works/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp" |
| | | "D:/Documents/works/RtspFace/PL_AndroidSurfaceViewRender.cpp" |
| | | "D:/Documents/works/RtspFace/PL_SensetimeFaceTrack.cpp" |
| | | "D:/Documents/works/RtspFace/PL_ColorConv.cpp" |
| | | "D:/Documents/works/RtspFace/PL_Gainer.cpp" |
| | | "D:/Documents/works/RtspFace/PL_Paint.cpp" |
| | | "D:/Documents/works/RtspFace/PL_V4L2Source.cpp" |
| | | "D:/Documents/works/RtspFace/PL_BlockGrouping.cpp" |
| | | "D:/Documents/works/RtspFace/PL_Queue.cpp" |
| | | |
| | | "D:/workspace/proxy/FaceServer/proto_hton_ntoh.cpp" |
| | | "D:/workspace/proxy/FaceServer/PbFaceList.pb.cc" |
| | | "D:/Documents/works/FaceServer/proto_hton_ntoh.cpp" |
| | | "D:/Documents/works/FaceServer/PbFaceList.pb.cc" |
| | | |
| | | "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Capture.cpp" |
| | | "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Device.cpp" |
| | | "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2MmapDevice.cpp" |
| | | "D:/workspace/proxy/RtspFace/libv4l2cpp/src/V4l2Output.cpp" |
| | | "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Capture.cpp" |
| | | "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Device.cpp" |
| | | "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2MmapDevice.cpp" |
| | | "D:/Documents/works/RtspFace/libv4l2cpp/src/V4l2Output.cpp" |
| | | |
| | | "D:/Documents/works/RtspFace/CvUtil/CvxText.cpp" |
| | | |
| | | ) |
| | | |
| | |
| | | ) |
| | | |
| | | include_directories( |
| | | "D:/workspace/proxy/RtspFace" |
| | | "D:/workspace/proxy/FaceServer" |
| | | "D:/Documents/works/RtspFace" |
| | | "D:/Documents/works/FaceServer" |
| | | |
| | | #"D:/workspace/libhardware-android-5.1.1_r38/include" |
| | | #"D:/workspace/core-android-5.1.1_r38/include" |
| | |
| | | #"D:/workspace/core-android-5.1.1_r38/libsync/include" |
| | | #"D:/workspace/native-android-5.1.1_r38/opengl/include" |
| | | |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/BasicUsageEnvironment" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/groupsock" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/liveMedia" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/include/UsageEnvironment" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/BasicUsageEnvironment" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/groupsock" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/liveMedia" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/include/UsageEnvironment" |
| | | |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libyuv/include" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libyuv/include" |
| | | |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/opencv/include" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/opencv/include" |
| | | |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/protobuf/include" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/protobuf/include" |
| | | |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/st_face/include" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/st_face/include" |
| | | |
| | | "D:/workspace/proxy/RtspFace/libv4l2cpp/inc" |
| | | #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/include" |
| | | "D:/Documents/works/RtspFace/libv4l2cpp/inc" |
| | | |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/freetype/include/freetype2" |
| | | |
| | | #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/include" |
| | | ) |
| | | |
| | | target_link_libraries(rtspface |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libliveMedia.a" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libgroupsock.a" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libBasicUsageEnvironment.a" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/live555/lib/armeabi-v7a/libUsageEnvironment.a" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libliveMedia.a" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libgroupsock.a" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libBasicUsageEnvironment.a" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/live555/lib/arm64-v8a/libUsageEnvironment.a" |
| | | |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libyuv/lib/armeabi-v7a/libyuv_static.a" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/opencv/lib/armeabi-v7a/libopencv_java3.so" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/protobuf/lib/armeabi-v7a/libprotobuf.so" |
| | | "D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/st_face/lib/armeabi-v7a/libcvface_api.so" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libyuv/lib/arm64-v8a/libyuv_static.a" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/opencv/lib/arm64-v8a/libopencv_java3.so" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/protobuf/lib/arm64-v8a/libprotobuf.so" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/st_face/lib/arm64-v8a/libcvface_api.so" |
| | | "D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/freetype/lib/arm64-v8a/libfreetype.so" |
| | | |
| | | #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/lib/armeabi-v7a/libv4l2wrapper.a" |
| | | #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/libv4l2wrapper/lib/arm64-v8a/libv4l2wrapper.a" |
| | | |
| | | #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libgui.so" |
| | | #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libui.so" |
| | | #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libEGL.so" |
| | | #"D:/workspace/proxy/VisitFace/RtspNativeCodec/app/libs/android_sys/libGLESv3.so" |
| | | #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libgui.so" |
| | | #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libui.so" |
| | | #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libEGL.so" |
| | | #"D:/Documents/works/VisitFace/RtspNativeCodec/app/libs/android_sys/libGLESv3.so" |
| | | ) |
| | |
| | | #include "CameraWrapper.h"
|
| | | #include "CameraWrapper.h" |
| | | #include <logger.h>
|
| | | #include <Logger/src/logger.hpp>
|
| | | #include <PL_Gainer.h>
|
| | | #include <PL_Paint.h>
|
| | | #include <PL_Scale.h> |
| | | #include <MaterialBuffer.h>
|
| | | #include <PL_V4L2Source.h>
|
| | | #include <PL_BlockGrouping.h>
|
| | |
|
| | | #include <PL_ColorConv.h> |
| | | CameraWrapper::~CameraWrapper()
|
| | | {
|
| | | stop();
|
| | |
| | | PipeLine::register_global_elem_creator("PL_AndroidSurfaceViewRender", create_PL_AndroidSurfaceViewRender);
|
| | | PipeLine::register_global_elem_creator("PL_SensetimeFaceTrack", create_PL_SensetimeFaceTrack);
|
| | | PipeLine::register_global_elem_creator("PL_Gainer", create_PL_Gainer);
|
| | | PipeLine::register_global_elem_creator("PL_Scale", create_PL_Scale); |
| | | PipeLine::register_global_elem_creator("PL_ColorConv", create_PL_ColorConv); |
| | | PipeLine::register_global_elem_creator("PL_Paint", create_PL_Paint);
|
| | | PipeLine::register_global_elem_creator("PL_V4L2Source", create_PL_V4L2Source);
|
| | | PipeLine::register_global_elem_creator("PL_BlockGrouping", create_PL_BlockGrouping);
|
| | |
|
| | | bool ret = false;
|
| | |
|
| | | {
|
| | |
| | | return false;
|
| | | }
|
| | |
|
| | | PL_Gainer* plBG = (PL_Gainer*)pipeLineAnalyzer->push_elem("PL_BlockGrouping");
|
| | | PL_BlockGrouping* plBG = (PL_BlockGrouping*)pipeLineAnalyzer->push_elem("PL_BlockGrouping"); |
| | | PL_BlockGrouping_Config plbgConfig;
|
| | | plbgConfig.user_score_2_func = pl_analizer_plbg_user_score_2_func;
|
| | | plbgConfig.get_rect_func = pl_analizer_plbg_get_rect_func;
|
| | |
| | | return false;
|
| | | }
|
| | |
|
| | | ANativeWindow* window = (ANativeWindow*)(windowRender); |
| | | ANativeWindow_Buffer buffer; |
| | | if(windowRender != nullptr && ANativeWindow_lock(window, &buffer, NULL) == 0) |
| | | { |
| | | plScaleCfg.toHeight=buffer.height; |
| | | plScaleCfg.toWidth=buffer.width; |
| | | ANativeWindow_unlockAndPost(window); |
| | | } else |
| | | { |
| | | plScaleCfg.toHeight=480; |
| | | plScaleCfg.toWidth=640; |
| | | } |
| | | PL_Scale* plScale = (PL_Scale*)pipeLineRender->push_elem("PL_Scale"); |
| | | ret = plScale->init(&plScaleCfg); |
| | | if (!ret) |
| | | { |
| | | LOG_ERROR << "pipeLineRender.plScale init error" << LOG_ENDL; |
| | | return false; |
| | | } |
| | | |
| | | PL_ColorConv_Config PLColorConvCfg; |
| | | PL_ColorConv* plColorConv = (PL_ColorConv*)pipeLineRender->push_elem("PL_ColorConv"); |
| | | ret = plColorConv->init(&PLColorConvCfg); |
| | | if (!ret) |
| | | { |
| | | LOG_ERROR << "pipeLineRender.plPaint init error" << LOG_ENDL; |
| | | return false; |
| | | } |
| | | |
| | | PL_Paint_Config plPaintCfg;
|
| | | plPaintCfg.fontPath = "data/msyh.ttc"; |
| | | plPaintCfg.plplCtx = &plplContext;
|
| | | PL_Paint* plPaint = (PL_Paint*)pipeLineRender->push_elem("PL_Paint");
|
| | | ret = plPaint->init(&plPaintCfg);
|
| | |
| | | plplContext.cmds.clear();
|
| | | plplContext.params.clear();
|
| | |
|
| | | float width_scale =((float)cameraWrapper.plScaleCfg.toWidth) / cameraWrapper.amcdConfig.ak_width; |
| | | float height_scale =((float)cameraWrapper.plScaleCfg.toHeight) / cameraWrapper.amcdConfig.ak_height; |
| | | |
| | | for (st_ff_vect_t::const_iterator iter = faceFeatures.begin(); iter != faceFeatures.end(); ++iter)
|
| | | {
|
| | | plplContext.cmds.push_back(PLPLC_COLOR);
|
| | |
| | | }
|
| | |
|
| | | plplContext.cmds.push_back(PLPLC_RECT);
|
| | | plplContext.params.push_back(iter->rect.leftTop.X);
|
| | | plplContext.params.push_back(iter->rect.leftTop.Y);
|
| | | plplContext.params.push_back(iter->rect.rightBottom.X);
|
| | | plplContext.params.push_back(iter->rect.rightBottom.Y);
|
| | | plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale)); |
| | | plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale)); |
| | | plplContext.params.push_back((int)(iter->rect.rightBottom.X * width_scale)); |
| | | plplContext.params.push_back((int)(iter->rect.rightBottom.Y * height_scale)); |
| | |
|
| | | std::map<int, std::string>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id);
|
| | | std::map<int, std::wstring>::iterator iterFaceLabel = cameraWrapper.faceLabels.find(iter->id); |
| | | if (iterFaceLabel != cameraWrapper.faceLabels.end())
|
| | | {
|
| | | plplContext.cmds.push_back(PLPLC_TEXT);
|
| | | plplContext.params.push_back(iter->rect.leftTop.X);
|
| | | plplContext.params.push_back(iter->rect.leftTop.Y);
|
| | | const char* label = iterFaceLabel->second.c_str();
|
| | | plplContext.cmds.push_back(PLPLC_WTEXT); |
| | | plplContext.params.push_back((int)(iter->rect.leftTop.X * width_scale)); |
| | | plplContext.params.push_back((int)(iter->rect.leftTop.Y * height_scale)); |
| | | const wchar_t* label = iterFaceLabel->second.c_str(); |
| | | plplContext.params.push_back(PLPLType(label));
|
| | | }
|
| | | }
|
| | |
| | |
|
| | | void test_paint(CameraWrapper& cameraWrapper)
|
| | | {
|
| | | cameraWrapper.setFaceLabel(0, "vip");
|
| | | cameraWrapper.setFaceLabel(1, "abc");
|
| | | cameraWrapper.setFaceLabel(2, "wrn");
|
| | | cameraWrapper.plplContext.cmds.push_back(PLPLC_WTEXT); |
| | | cameraWrapper.plplContext.params.push_back(100); |
| | | cameraWrapper.plplContext.params.push_back(100); |
| | | cameraWrapper.plplContext.params.push_back(PLPLType(L"中文是啊")); |
| | | |
| | | cameraWrapper.setFaceLabel(0, L"会员vi"); |
| | | cameraWrapper.setFaceLabel(1, L"会员ab"); |
| | | cameraWrapper.setFaceLabel(3, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(4, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(5, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(6, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(7, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(8, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(9, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(10, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(11, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(12, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(13, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(14, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(15, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(16, L"会员wn"); |
| | | cameraWrapper.setFaceLabel(17, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(18, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(19, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(20, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(21, L"会员wr"); |
| | | cameraWrapper.setFaceLabel(22, L"会员wr"); |
| | |
|
| | | cameraWrapper.plplContext.cmds.push_back(PLPLC_COLOR);
|
| | | cameraWrapper.plplContext.params.push_back('F');
|
| | | cameraWrapper.plplContext.params.push_back(255);
|
| | | cameraWrapper.plplContext.params.push_back(0);
|
| | | cameraWrapper.plplContext.params.push_back(0);
|
| | | cameraWrapper.plplContext.params.push_back(255);
|
| | | cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT);
|
| | | cameraWrapper.plplContext.params.push_back(20);
|
| | | cameraWrapper.plplContext.params.push_back(20);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | cameraWrapper.plplContext.params.push_back(100);
|
| | | cameraWrapper.plplContext.params.push_back(255); |
| | | cameraWrapper.plplContext.params.push_back(255); |
| | | // cameraWrapper.plplContext.cmds.push_back(PLPLC_RECT); |
| | | // cameraWrapper.plplContext.params.push_back(20); |
| | | // cameraWrapper.plplContext.params.push_back(20); |
| | | // cameraWrapper.plplContext.params.push_back(100); |
| | | // cameraWrapper.plplContext.params.push_back(100); |
| | | }
|
| | |
|
| | | /*static*/ void* CameraWrapper::decoder_thd(void* arg)
|
| | |
| | | faceCacheLocked = false;
|
| | | }
|
| | |
|
| | | void CameraWrapper::setFaceLabel(int st_track_id, const std::string& label)
|
| | | void CameraWrapper::setFaceLabel(int st_track_id, const std::wstring& label) |
| | | {
|
| | | if (faceLabels.size() > 32)
|
| | | faceLabels.clear();
|
| | |
| | | #include <PL_AndroidSurfaceViewRender.h>
|
| | | #include <PL_SensetimeFaceTrack.h>
|
| | | #include <PL_Paint.h>
|
| | | #include <PL_Scale.h>
|
| | | #include <PL_Queue.h>
|
| | |
|
| | | //#include "looper.h"
|
| | |
| | | PL_AndroidMediaCodecDecoder_Config amcdConfig;
|
| | | PL_AndroidSurfaceViewRender_Config asvrConfig;
|
| | | SensetimeFaceTrackConfig sftConfig;
|
| | | PL_Scale_Config plScaleCfg;
|
| | |
|
| | | int cameraIdx;
|
| | | JavaVM* javaVM;
|
| | |
| | |
|
| | | PLPLContext plplContext;
|
| | |
|
| | | std::map<int, std::string> faceLabels;
|
| | | std::map<int, std::wstring> faceLabels;
|
| | |
|
| | | CameraWrapper() :
|
| | | pipeLineDecoderDetector(nullptr), pipeLineAnalyzer(nullptr), pipeLineRender(nullptr), queueFrame(nullptr), rtspConfig(), amcdConfig(), asvrConfig(), sftConfig(),
|
| | |
| | | void lockFace();
|
| | | void releaseFace();
|
| | |
|
| | | void setFaceLabel(int st_track_id, const std::string& label);
|
| | | void setFaceLabel(int st_track_id, const std::wstring& label);
|
| | |
|
| | | private:
|
| | | static void* decoder_thd(void *arg);
|
| | |
| | | #include <stdlib.h> |
| | | |
| | | #include "DebugNetwork.h" |
| | | #include "TeleWrapper.h" |
| | | |
| | | |
| | | //#include <mediastreamer2/include/mediastreamer2/msjava.h> |
| | |
| | | static std::string g_stface_license_str; |
| | | |
| | | CameraWrapper g_CameraWrappers[CAMERA_COUNT]; |
| | | |
| | | TeleWrapper g_TeleWrapper; |
| | | |
| | | CaptureCamera g_CaptureCamera; |
| | | |
| | |
| | | g_CameraWrappers[i].pipeLineRender = new PipeLine; |
| | | //PipeLine& pipeLine(*(g_CameraWrappers[i].pipeLineDecoderDetector)); |
| | | } |
| | | g_TeleWrapper.start(); |
| | | } |
| | | |
| | | void |
| | |
| | | |
| | | bool Java_cn_com_basic_face_util_RtspFaceNative_telCall(JNIEnv *env, jclass clazz, jstring phone) |
| | | { |
| | | |
| | | std::string _phone; |
| | | { |
| | | const char *utfFunc = env->GetStringUTFChars(phone, NULL); |
| | | _phone = utfFunc; |
| | | env->ReleaseStringUTFChars(phone, utfFunc); |
| | | } |
| | | |
| | | const char *phoneNum = _phone.c_str(); |
| | | TeleTask task; |
| | | task.command= TeleTask::CALL; |
| | | task.param = phoneNum; |
| | | g_TeleWrapper.push(task); |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_Hang(JNIEnv *env, jclass clazz) |
| | | { |
| | | TeleTask task; |
| | | task.command =TeleTask::HANGUP; |
| | | g_TeleWrapper.push(task); |
| | | } |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_TelShutdown(JNIEnv *env, jclass clazz) |
| | | { |
| | | LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_telShutdown" << LOG_ENDL; |
| | | g_TeleWrapper.stop(); |
| | | } |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring phoneNumber) |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring label) |
| | | { |
| | | LOG_DEBUG << "@@@ Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel" << LOG_ENDL; |
| | | assert(cameraIdx > 0 && cameraIdx <= CAMERA_COUNT); |
| | | cameraIdx -= 1; |
| | | CameraWrapper &cameraWrapper(g_CameraWrappers[cameraIdx]); |
| | | |
| | | std::string _phoneNumber; |
| | | { |
| | | const char *utfFunc = env->GetStringUTFChars(phoneNumber, NULL); |
| | | _phoneNumber = utfFunc; |
| | | env->ReleaseStringUTFChars(phoneNumber, utfFunc); |
| | | } |
| | | |
| | | cameraWrapper.setFaceLabel(stTrackId, _phoneNumber); |
| | | const char *utfChars = env->GetStringUTFChars(label, NULL); |
| | | std::string labelstr(utfChars); |
| | | std::wstring wlabelstr; |
| | | wlabelstr.assign(labelstr.begin(),labelstr.end()); |
| | | env->ReleaseStringUTFChars(label, utfChars); |
| | | cameraWrapper.setFaceLabel(stTrackId, wlabelstr); |
| | | } |
| | | |
| | | } // extern C |
| | |
| | | bool Java_cn_com_basic_face_util_RtspFaceNative_teleCall(JNIEnv *env, jclass clazz, jstring phoneNumber); |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_teleHang(JNIEnv *env, jclass clazz); |
| | | |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring phoneNumber); |
| | | void Java_cn_com_basic_face_util_RtspFaceNative_setFaceLabel(JNIEnv *env, jclass clazz, jint cameraIdx, jint stTrackId, jstring label); |
| | | |
| | | } |
| | | |
| | |
| | | ThisActivity = this; |
| | | |
| | | RtspFaceNative.init(); |
| | | RtspFaceNative.setLocalIP("192.168.1.74"); |
| | | RtspFaceNative.setLocalIP("192.168.1.37"); |
| | | |
| | | mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1); |
| | | |
| | |
| | | //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:admin@192.168.1.188:554/cam/realmonitor?channel=1&subtype=2"); |
| | | //mCreated = RtspFaceNative.createPlayer(2, "rtsp://Admin:1234@192.168.1.70/h264"); |
| | | //mCreated = RtspFaceNative.createPlayer(1, "rtsp://Admin:1234@192.168.1.70/h264_2"); |
| | | //mCreated = RtspFaceNative.createPlayer(1, "rtsp://Admin:1234@192.168.1.22/h264"); |
| | | //mCreated = RtspFaceNative.createPlayer(2, "rtsp://Admin:1234@192.168.1.22/h264"); |
| | | //mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/sub/av_stream"); |
| | | mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream"); |
| | | mCreated = RtspFaceNative.createPlayer(1, "rtsp://admin:a1234567@192.168.1.132:554/h264/ch1/main/av_stream"); |
| | | //mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream"); |
| | | //mCreated = RtspFaceNative.createPlayer(2, "rtsp://admin:a1234567@192.168.1.68:554/h264/ch1/main/av_stream"); |
| | | //mCreated = createPlayer(1, "rtsp://192.168.1.56:8554"); |