From 5e9814a090f20c2b9c39d6efdc9a732b6097ee7d Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期一, 24 七月 2017 20:12:53 +0800
Subject: [PATCH] aaa
---
RtspFace/PL_Scale.cpp | 226 ++++++--------
RtspFace/PL_SensetimeFaceTrack.cpp | 140 +++++----
RtspFace/PL_Fork1.h | 22
RtspFace/PL_Fork1.cpp | 122 ++++++++
RtspFace/PL_Paint.cpp | 33 -
/dev/null | 122 --------
RtspFace/PipeLine.h | 2
RtspFace/PL_AndroidSurfaceViewRender.h | 4
RtspFace/PL_ColorConv.cpp | 12
RtspFace/PL_Scale.h | 13
RtspFace/PL_AndroidSurfaceViewRender.cpp | 94 +++--
RtspFace/live555/testProgs/testRTSPClient.hpp | 72 ++--
RtspFace/PL_BlockGrouping.cpp | 7
RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp | 6
RtspFace/PL_AndroidMediaCodecDecoder.h | 5
15 files changed, 446 insertions(+), 434 deletions(-)
diff --git a/RtspFace/PL_AndroidMediaCodecDecoder.h b/RtspFace/PL_AndroidMediaCodecDecoder.h
index 99e98e8..77fab7a 100644
--- a/RtspFace/PL_AndroidMediaCodecDecoder.h
+++ b/RtspFace/PL_AndroidMediaCodecDecoder.h
@@ -38,6 +38,8 @@
bool releaseOutputBuffIdxInPay;
int generateDecodedDataPerFrame; // 0: do not generate, N: generate every N frame ((G . . .)/N ... (G . . .)/N ...)
+ bool ptsUseAbsoluteTime;
+
PL_AndroidMediaCodecDecoder_Config() :
ak_height(0),
ak_mime(),
@@ -48,7 +50,8 @@
releaseOutputBuffIdx(true),
releaseOutputBuffIdxInPay(false),
- generateDecodedDataPerFrame(0)
+ generateDecodedDataPerFrame(0),
+ ptsUseAbsoluteTime(false)
{}
};
diff --git a/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp b/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp
index 768f904..099ff6b 100644
--- a/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp
+++ b/RtspFace/PL_AndroidMediaCodecDecoder_ndk.cpp
@@ -343,7 +343,11 @@
in->lastMbfBuffer.buffSize = in->buffSize;
in->lastMbfBuffer.width = in->config.ak_width;
in->lastMbfBuffer.height = in->config.ak_height;
- microseconds_to_timeval(info.presentationTimeUs, in->lastMbfBuffer.pts);
+
+ if (in->config.ptsUseAbsoluteTime)
+ gettimeofday(&(in->lastMbfBuffer.pts), nullptr);
+ else
+ microseconds_to_timeval(info.presentationTimeUs, in->lastMbfBuffer.pts);
//if (in->lastMbfBuffer.buffSize > 10)
//{
diff --git a/RtspFace/PL_AndroidSurfaceViewRender.cpp b/RtspFace/PL_AndroidSurfaceViewRender.cpp
index 9e7596e..16a453a 100644
--- a/RtspFace/PL_AndroidSurfaceViewRender.cpp
+++ b/RtspFace/PL_AndroidSurfaceViewRender.cpp
@@ -130,6 +130,9 @@
bool convert_yuv420_origin_to_render(PL_ASVR_Internal* in)
{
+ if (in->config.directlyDisplay)
+ return true;
+
int src_width = in->lastMbfBuffOrigin.width;
int src_height = in->lastMbfBuffOrigin.height;
const uint8_t* src_y = (const uint8_t*)(in->lastMbfBuffOrigin.buffer);
@@ -249,35 +252,11 @@
return true;
}
-bool convert_rgb565_origin_to_render(PL_ASVR_Internal *in) {
-
- ANativeWindow* window = (ANativeWindow*)(in->config.windowSurface);
- int widow_width = ANativeWindow_getWidth(window);
- int window_height = ANativeWindow_getHeight(window);
- int src_width = in->lastMbfBuffOrigin.width;
- int src_height = in->lastMbfBuffOrigin.height;
- if (src_width != widow_width || src_height != window_height)
- {
- uint8_t* src = (uint8_t*)in->lastMbfBuffOrigin.buffer;
- uint8_t* dst = (uint8_t*)in->buffer1;
- libyuv::ScalePlane_16((uint16*)src, src_width,
- src_width, src_height,
- (uint16*)dst, widow_width,
- widow_width, window_height,
- libyuv::kFilterNone);
- in->buff1Size = widow_width * window_height * 2;
- memcpy(in->buffer, in->buffer1, in->buff1Size);
- in->buffSize = in->buff1Size;
- }
- else
- {
- memcpy(in->buffer, in->lastMbfBuffOrigin.buffer, in->lastMbfBuffOrigin.buffSize);
- in->buffSize = in->lastMbfBuffOrigin.buffSize;
- }
- return true;
-}
bool convert_nv12_origin_to_render(PL_ASVR_Internal* in)
{
+ if (in->config.directlyDisplay)
+ return true;
+
if (false)
{//#test test nv12 on yuv420 frame
const int src_width = in->lastMbfBuffOrigin.width;
@@ -409,6 +388,42 @@
return true;
}
+bool convert_rgb565_origin_to_render(PL_ASVR_Internal *in)
+{
+ if (in->config.directlyDisplay)
+ return true;
+
+ ANativeWindow* window = (ANativeWindow*)(in->config.windowSurface);
+ int src_width = in->lastMbfBuffOrigin.width;
+ int src_height = in->lastMbfBuffOrigin.height;
+
+ const int dst_width = (in->config.scaleToWidth <= 0 ? in->nativeWindowStride : in->config.scaleToWidth);
+ const int dst_height = (in->config.scaleToHeight <= 0 ? in->nativeWindowHeight : in->config.scaleToHeight);
+
+ if (src_width != dst_width || src_height != dst_height)
+ {
+ uint8_t* src = (uint8_t*)in->lastMbfBuffOrigin.buffer;
+ uint8_t* dst = (uint8_t*)in->buffer1;
+ libyuv::ScalePlane_16((uint16*)src, src_width,
+ src_width, src_height,
+ (uint16*)dst, dst_width,
+ dst_width, dst_height,
+ libyuv::kFilterNone);
+ in->buff1Size = dst_width * dst_height * 2;
+ memcpy(in->buffer, in->buffer1, in->buff1Size);
+ in->buffSize = in->buff1Size;
+ }
+ else
+ {
+ if (!in->config.directlyDisplay)
+ {
+ memcpy(in->buffer, in->lastMbfBuffOrigin.buffer, in->lastMbfBuffOrigin.buffSize);
+ in->buffSize = in->lastMbfBuffOrigin.buffSize;
+ }
+ }
+ return true;
+}
+
bool render_surface(PL_ASVR_Internal* in)
{
ANativeWindow* window = (ANativeWindow*)(in->config.windowSurface);
@@ -422,18 +437,23 @@
bitsSize = buffer.stride * buffer.height * 2;
else
bitsSize = buffer.stride * buffer.height;
-
- if (bitsSize > in->buffSize)
+
+ if (in->config.directlyDisplay)
+ memcpy(buffer.bits, in->lastMbfBuffOrigin.buffer, bitsSize);
+ else
{
- LOG_WARN << "surface buffer truncated" << LOG_ENDL;
- bitsSize = in->buffSize;
- }
- else if (bitsSize < in->buffSize)
- {
- LOG_WARN << "in buffer truncated" << LOG_ENDL;
+ if (bitsSize > in->buffSize)
+ {
+ LOG_WARN << "surface buffer truncated" << LOG_ENDL;
+ bitsSize = in->buffSize;
+ }
+ else if (bitsSize < in->buffSize)
+ {
+ LOG_WARN << "in buffer truncated" << LOG_ENDL;
+ }
+ memcpy(buffer.bits, in->buffer, bitsSize);
}
- memcpy(buffer.bits, in->buffer, bitsSize);
ANativeWindow_unlockAndPost(window);
}
else
@@ -512,7 +532,7 @@
if (in->payError)
pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
if (in->payError)
- pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
+ pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
if (in->payError)
pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_AndroidSurfaceViewRender::pay_breaker_MBFT_YUV, in);
if (in->payError)
diff --git a/RtspFace/PL_AndroidSurfaceViewRender.h b/RtspFace/PL_AndroidSurfaceViewRender.h
index 97e47b9..fab45a7 100644
--- a/RtspFace/PL_AndroidSurfaceViewRender.h
+++ b/RtspFace/PL_AndroidSurfaceViewRender.h
@@ -10,13 +10,15 @@
bool outputRenderFrame;
int scaleToWidth; // <=0 to fit size by windowSurfaceDecode
int scaleToHeight;
+ bool directlyDisplay;
PL_AndroidSurfaceViewRender_Config() :
windowSurface(nullptr),
outputOriginFrame(false),
outputRenderFrame(true),
scaleToWidth(0),
- scaleToHeight(0)
+ scaleToHeight(0),
+ directlyDisplay(false)
{}
};
diff --git a/RtspFace/PL_BlockGrouping.cpp b/RtspFace/PL_BlockGrouping.cpp
index 68ecddc..607e51b 100644
--- a/RtspFace/PL_BlockGrouping.cpp
+++ b/RtspFace/PL_BlockGrouping.cpp
@@ -469,8 +469,11 @@
cv::Mat yMat(cv::Size(w, h), CV_8UC1, block.croppedData);
cv::Mat edges;
- cv::GaussianBlur(yMat, edges, cv::Size(5, 5), 1.5, 1.5);
- cv::Canny(edges, edges, 0, 30, 3);
+ cv::GaussianBlur(yMat, edges, cv::Size(5, 5), 1.5, 1.5);
+ //cv::Canny(edges, edges, 50, 100, 3);
+ cv::Sobel(yMat, edges, CV_8UC1, 1, 0, 3);
+
+ //memcpy(block.croppedData, edges.data, w * h);
size_t sum = 0;
std::for_each(edges.begin<uint8_t>(), edges.end<uint8_t>(), [&](uint8_t v)
diff --git a/RtspFace/PL_ColorConv.cpp b/RtspFace/PL_ColorConv.cpp
index 5488e98..c20d0c9 100644
--- a/RtspFace/PL_ColorConv.cpp
+++ b/RtspFace/PL_ColorConv.cpp
@@ -3,7 +3,7 @@
#include "logger.h"
#include <libyuv.h>
#include <PbFaceList.pb.h>
-#define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
+
struct PL_ColorConv_Internal
{
uint8_t* buffer;
@@ -94,7 +94,7 @@
in->buffer = new uint8_t[dstSize];
in->buffSizeMax = dstSize;
in->buffSize = dstSize;
- LOG_INFO << "image_scale alloc buffer size=" << dstSize << std::endl;
+ LOG_INFO << "image_to_rgb565 alloc buffer size=" << dstSize << std::endl;
}
if (srcType == MB_Frame::MBFT_YUV420)
@@ -105,10 +105,10 @@
}
else if (srcType == MB_Frame::MBFT_NV12)
{
- const uint8_t * srcBuffer = (uint8_t *)in->tempFrame.buffer;
- const uint8_t *src_y = srcBuffer;
- const uint8_t *src_uv = src_y + (srcHeight * srcWidth);
- uint8_t *dst = (uint8_t *) (in->buffer);
+ const uint8_t* srcBuffer = (uint8_t*)in->tempFrame.buffer;
+ const uint8_t* src_y = srcBuffer;
+ const uint8_t* src_uv = src_y + (srcHeight * srcWidth);
+ uint8_t* dst = (uint8_t*)(in->buffer);
libyuv::NV12ToRGB565(src_y, srcWidth,
src_uv, srcWidth,
dst, srcWidth * 2,
diff --git a/RtspFace/PL_Fork.cpp b/RtspFace/PL_Fork.cpp
deleted file mode 100644
index 91af5a9..0000000
--- a/RtspFace/PL_Fork.cpp
+++ /dev/null
@@ -1,122 +0,0 @@
-#include "PL_Fork.h"
-#include "MaterialBuffer.h"
-#include "logger.h"
-
-struct PL_Fork_Internal
-{
- //uint8_t buffer[1920*1080*4];
- //size_t buffSize;
- //size_t buffSizeMax;
-
- PipeMaterial lastPm;
-
- MB_Frame lastFrame;
-
- PL_Fork_Config config;
-
- bool payError;
-
- PL_Fork_Internal() :
- //buffSize(0), buffSizeMax(sizeof(buffer)),
- lastPm(), lastFrame(), config(), payError(true)
- {
- }
-
- ~PL_Fork_Internal()
- {
- }
-
- void reset()
- {
- //buffSize = 0;
- payError = true;
-
- PipeMaterial _lastPm;
- lastPm = _lastPm;
-
- MB_Frame _lastFrame;
- lastFrame = _lastFrame;
-
- PL_Fork_Config _config;
- config = _config;
- }
-};
-
-PipeLineElem* create_PL_Fork()
-{
- return new PL_Fork;
-}
-
-PL_Fork::PL_Fork() : internal(new PL_Fork_Internal), pl(nullptr)
-{
-}
-
-PL_Fork::~PL_Fork()
-{
- delete (PL_Fork_Internal*)internal;
- internal= nullptr;
-}
-
-bool PL_Fork::init(void* args)
-{
- PL_Fork_Internal* in = (PL_Fork_Internal*)internal;
- in->reset();
-
- if (args == nullptr)
- {
- LOG_ERROR << "Config should give" << std::endl;
- return false;
- }
-
- PL_Fork_Config* config = (PL_Fork_Config*)args;
- in->config = *config;
-
- return true;
-}
-
-void PL_Fork::finit()
-{
- PL_Fork_Internal* in = (PL_Fork_Internal*)internal;
-
-}
-
-bool PL_Fork::pay(const PipeMaterial& pm)
-{
- PL_Fork_Internal* in = (PL_Fork_Internal*)internal;
- PL_Fork_Config& config(in->config);
-
- in->lastPm = pm;
-
- return false;
-}
-
-bool PL_Fork::gain(PipeMaterial& pm)
-{
- PL_Fork_Internal* in = (PL_Fork_Internal*)internal;
-
- return false;
-}
-
-void PL_Fork::attach_pipe_line(PipeLine* pl)
-{
- PL_Fork_Internal* in = (PL_Fork_Internal*)internal;
-
- if (this->pl != nullptr)
- {
- LOG_ERROR << "Has areadly attached pipe line" << std::endl;
- return;
- }
-
- if (pl == nullptr)
- {
- LOG_NOTICE << "Detach pipe line" << std::endl;
- this->pl->remove_elem(this);
- this->pl = nullptr;
- return;
- }
-
- this->pl = pl;
- PipeLine* mainPipeLineManager = this->manager;
- pl->push_front_elem(this);
- this->manager = mainPipeLineManager;
-}
diff --git a/RtspFace/PL_Fork1.cpp b/RtspFace/PL_Fork1.cpp
new file mode 100644
index 0000000..ec0ecc6
--- /dev/null
+++ b/RtspFace/PL_Fork1.cpp
@@ -0,0 +1,122 @@
+#include "PL_Fork1.h"
+#include "MaterialBuffer.h"
+#include "logger.h"
+
+struct PL_Fork1_Internal
+{
+ //uint8_t buffer[1920*1080*4];
+ //size_t buffSize;
+ //size_t buffSizeMax;
+
+ PipeMaterial lastPm;
+
+ MB_Frame lastFrame;
+
+ PL_Fork1_Config config;
+
+ bool payError;
+
+ PL_Fork1_Internal() :
+ //buffSize(0), buffSizeMax(sizeof(buffer)),
+ lastPm(), lastFrame(), config(), payError(true)
+ {
+ }
+
+ ~PL_Fork1_Internal()
+ {
+ }
+
+ void reset()
+ {
+ //buffSize = 0;
+ payError = true;
+
+ PipeMaterial _lastPm;
+ lastPm = _lastPm;
+
+ MB_Frame _lastFrame;
+ lastFrame = _lastFrame;
+
+ PL_Fork1_Config _config;
+ config = _config;
+ }
+};
+
+PipeLineElem* create_PL_Fork1()
+{
+ return new PL_Fork1;
+}
+
+PL_Fork1::PL_Fork1() : internal(new PL_Fork1_Internal), pl(nullptr)
+{
+}
+
+PL_Fork1::~PL_Fork1()
+{
+ delete (PL_Fork1_Internal*)internal;
+ internal= nullptr;
+}
+
+bool PL_Fork1::init(void* args)
+{
+ PL_Fork1_Internal* in = (PL_Fork1_Internal*)internal;
+ in->reset();
+
+ if (args == nullptr)
+ {
+ LOG_ERROR << "Config should give" << std::endl;
+ return false;
+ }
+
+ PL_Fork1_Config* config = (PL_Fork1_Config*)args;
+ in->config = *config;
+
+ return true;
+}
+
+void PL_Fork1::finit()
+{
+ PL_Fork1_Internal* in = (PL_Fork1_Internal*)internal;
+
+}
+
+bool PL_Fork1::pay(const PipeMaterial& pm)
+{
+ PL_Fork1_Internal* in = (PL_Fork1_Internal*)internal;
+ PL_Fork1_Config& config(in->config);
+
+ in->lastPm = pm;
+
+ return false;
+}
+
+bool PL_Fork1::gain(PipeMaterial& pm)
+{
+ PL_Fork1_Internal* in = (PL_Fork1_Internal*)internal;
+
+ return false;
+}
+
+void PL_Fork1::attach_pipe_line(PipeLine* pl)
+{
+ PL_Fork1_Internal* in = (PL_Fork1_Internal*)internal;
+
+ if (this->pl != nullptr)
+ {
+ LOG_ERROR << "Has areadly attached pipe line" << std::endl;
+ return;
+ }
+
+ if (pl == nullptr)
+ {
+ LOG_NOTICE << "Detach pipe line" << std::endl;
+ this->pl->remove_elem(this);
+ this->pl = nullptr;
+ return;
+ }
+
+ this->pl = pl;
+ PipeLine* mainPipeLineManager = this->manager;
+ pl->push_front_elem(this);
+ this->manager = mainPipeLineManager;
+}
diff --git a/RtspFace/PL_Fork.h b/RtspFace/PL_Fork1.h
similarity index 77%
rename from RtspFace/PL_Fork.h
rename to RtspFace/PL_Fork1.h
index 5eaaffe..e719e88 100644
--- a/RtspFace/PL_Fork.h
+++ b/RtspFace/PL_Fork1.h
@@ -1,11 +1,11 @@
-#ifndef _PL_FORK_H_
-#define _PL_FORK_H_
+#ifndef _PL_FORK1_H_
+#define _PL_FORK1_H_
#include "PipeLine.h"
// p1e1 p1e2<fork> p1e3 ... p1eN
// p2e1 p2e2... p2eN
-class PL_Fork : public PipeLineElem
+class PL_Fork1 : public PipeLineElem
{
public:
enum ForkBy
@@ -49,8 +49,8 @@
};
public:
- PL_Fork();
- virtual ~PL_Fork();
+ PL_Fork1();
+ virtual ~PL_Fork1();
virtual bool init(void* args);
virtual void finit();
@@ -65,19 +65,19 @@
PipeLine* pl;
};
-struct PL_Fork_Config
+struct PL_Fork1_Config
{
- PL_Fork::ForkBy forkBy;
- PL_Fork::ForkSync forkSync;
+ PL_Fork1::ForkBy forkBy;
+ PL_Fork1::ForkSync forkSync;
int mainPLType;
int branchPLType;
- PL_Fork_Config() :
- forkBy(PL_Fork::FB_NONE), forkSync(PL_Fork::FS_NONE), mainPLType(0), branchPLType(0)
+ PL_Fork1_Config() :
+ forkBy(PL_Fork1::FB_NONE), forkSync(PL_Fork1::FS_NONE), mainPLType(0), branchPLType(0)
{ }
};
-PipeLineElem* create_PL_Fork();
+PipeLineElem* create_PL_Fork1();
#endif
diff --git a/RtspFace/PL_Paint.cpp b/RtspFace/PL_Paint.cpp
index c6fb45c..acfd90c 100644
--- a/RtspFace/PL_Paint.cpp
+++ b/RtspFace/PL_Paint.cpp
@@ -308,7 +308,7 @@
#endif
}
-bool plplExecutor_YUV(PL_Paint_Internal *in)
+bool plpl_executor(PL_Paint_Internal *in)
{
MB_Frame* paintMb = &(in->lastMbfBuffOrigin);
int ret = true;
@@ -426,7 +426,7 @@
in->lastMbfBuffOrigin.height = frame->height;
in->lastMbfBuffOrigin.pts = frame->pts;
- in->payError = !plplExecutor_YUV(in);
+ in->payError = !plpl_executor(in);
return false;
}
@@ -435,29 +435,12 @@
{
PL_Paint_Internal* in = (PL_Paint_Internal*)internal;
in->payError = true;
-
- if (pm.type != PipeMaterial::PMT_FRAME)
- {
- LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
- return false;
- }
-
- if (pm.buffer == nullptr)
- return false;
-
- MB_Frame* frame = (MB_Frame*)pm.buffer;
- switch(frame->type)
- {
- case MB_Frame::MBFT_YUV420:
- case MB_Frame::MBFT_NV12:
- case MB_Frame::MBFT_RGB565:
- pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_RGB565, PL_Paint::pay_breaker_MBFT, in);
- return !(in->payError);
- default:
- LOG_ERROR << "Only support MBFT_YUV420 / MBFT_NV12 / MBFT_RGB565" << LOG_ENDL;
- in->payError = true;
- break;
- }
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_Paint::pay_breaker_MBFT, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_Paint::pay_breaker_MBFT, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_RGB565, PL_Paint::pay_breaker_MBFT, in);
return !(in->payError);
}
diff --git a/RtspFace/PL_Scale.cpp b/RtspFace/PL_Scale.cpp
index 8bc0574..bbb6995 100644
--- a/RtspFace/PL_Scale.cpp
+++ b/RtspFace/PL_Scale.cpp
@@ -1,9 +1,8 @@
#include "PL_Scale.h"
#include "MaterialBuffer.h"
#include "logger.h"
+#include "MediaHelper.h"
#include <libyuv.h>
-
-#define SUBSAMPLE(v, a) ((((v) + (a) - 1)) / (a))
struct PL_Scale_Internal
{
@@ -41,12 +40,11 @@
PL_Scale_Config _config;
config = _config;
- if (buffer != nullptr)
- {
+ if (buffSizeMax > 0)
delete[] buffer;
- buffer = nullptr;
- buffSizeMax = 0;
- }
+
+ buffer = nullptr;
+ buffSizeMax = 0;
}
};
@@ -91,31 +89,40 @@
}
-bool image_scale(PL_Scale_Internal* in,
- uint8_t* srcBuffer, MB_Frame::MBFType srcType, uint16_t srcWidth, uint16_t srcHeight)
+bool image_scale(PL_Scale_Internal* in, uint8_t* srcBuffer, size_t buffSize, MB_Frame::MBFType srcType, uint16_t srcWidth, uint16_t srcHeight)
{
-
const int dst_width = in->config.toWidth;
const int dst_height = in->config.toHeight;
size_t dstSizeMax = 0;
- if (srcType == MB_Frame::MBFT_YUV420||srcType == MB_Frame::MBFT_NV12)
- dstSizeMax = in->config.toWidth * in->config.toHeight * 1.5;
- else if (srcType == MB_Frame::MBFT_BGRA)
- dstSizeMax = in->config.toWidth * in->config.toHeight * 4;
+ if ((dst_width != srcWidth && dst_height != srcHeight) || in->config.copyData)
+ {
+ if (srcType == MB_Frame::MBFT_YUV420 || srcType == MB_Frame::MBFT_NV12)
+ dstSizeMax = in->config.toWidth * in->config.toHeight * 2; // #todo 1.5
+ else if (srcType == MB_Frame::MBFT_BGRA)
+ dstSizeMax = in->config.toWidth * in->config.toHeight * 4;
+ else
+ {
+ LOG_ERROR << "srcType only support MBFT_YUV420 and MBFT_BGRA" << std::endl;
+ return false;
+ }
+
+ if (in->buffer == nullptr || in->buffSizeMax < dstSizeMax)
+ {
+ if (in->buffer != nullptr)
+ delete[] in->buffer;
+ in->buffer = new uint8_t[dstSizeMax];
+ in->buffSizeMax = dstSizeMax;
+ LOG_INFO << "image_scale alloc buffer size=" << dstSizeMax << std::endl;
+ }
+ }
else
{
- LOG_ERROR << "srcType only support MBFT_YUV420 and MBFT_BGRA" << std::endl;
- return false;
- }
+ in->buffer = srcBuffer;
+ in->buffSize = buffSize;
+ in->buffSizeMax = 0;
- if (in->buffer == nullptr || in->buffSizeMax < dstSizeMax)
- {
- if (in->buffer != nullptr)
- delete[] in->buffer;
- in->buffer = new uint8_t[dstSizeMax];
- in->buffSizeMax = dstSizeMax;
- LOG_INFO << "image_scale alloc buffer size=" << dstSizeMax << std::endl;
+ return true;
}
if (srcType == MB_Frame::MBFT_YUV420)
@@ -129,12 +136,12 @@
libyuv::I420Scale(
src_y, srcWidth,
- src_u, SUBSAMPLE(srcWidth, 2),
- src_v, SUBSAMPLE(srcWidth, 2),
+ src_u, MH_SUBSAMPLE1(srcWidth, 2),
+ src_v, MH_SUBSAMPLE1(srcWidth, 2),
srcWidth, srcHeight,
dst_y, dst_width,
- dst_u, SUBSAMPLE(dst_width, 2),
- dst_v, SUBSAMPLE(dst_width, 2),
+ dst_u, MH_SUBSAMPLE1(dst_width, 2),
+ dst_v, MH_SUBSAMPLE1(dst_width, 2),
dst_width, dst_height,
(libyuv::FilterMode)(in->config.filterMode));
@@ -144,125 +151,82 @@
{
const uint8_t* src_y = (const uint8_t*)(srcBuffer);
const uint8_t* src_uv = (const uint8_t*)(src_y + (srcHeight * srcWidth));
- if (srcWidth != dst_width || srcHeight != dst_height)
- {
- // RK3288, 1920->640: 2.8~12ms, avg=4ms
- uint8_t* dst_y = (uint8_t*)(in->buffer);
- uint8_t* dst_uv = (uint8_t*)(dst_y + (dst_height * dst_width));
+ // RK3288, 1920->640: 2.8~12ms, avg=4ms
+ uint8_t* dst_y = (uint8_t*)(in->buffer);
+ uint8_t* dst_uv = (uint8_t*)(dst_y + (dst_height * dst_width));
- libyuv::ScalePlane(src_y, srcWidth,
- srcWidth, srcHeight,
- dst_y, dst_width,
- dst_width, dst_height,
- libyuv::kFilterNone);
+ libyuv::ScalePlane(src_y, srcWidth,
+ srcWidth, srcHeight,
+ dst_y, dst_width,
+ dst_width, dst_height,
+ libyuv::kFilterNone);
- libyuv::ScalePlane_16((uint16*)src_uv, SUBSAMPLE(srcWidth, 2),
- SUBSAMPLE(srcWidth, 2), SUBSAMPLE(srcHeight, 2),
- (uint16*)dst_uv, SUBSAMPLE(dst_width, 2),
- SUBSAMPLE(dst_width, 2), SUBSAMPLE(dst_height, 2),
- libyuv::kFilterNone);
- in->buffSize = dstSizeMax;
- }
- else if (srcType == MB_Frame::MBFT_BGRA)
- {
- //#todo
- LOG_ERROR << "srcType only support MBFT_YUV420 and MBFT_NV12" << std::endl;
- return false;
- }
- else
- {
- LOG_ERROR << "srcType only support MBFT_YUV420 and MBFT_NV12" << std::endl;
- return false;
- }
- return true;
+ libyuv::ScalePlane_16((uint16*)src_uv, MH_SUBSAMPLE1(srcWidth, 2),
+ MH_SUBSAMPLE1(srcWidth, 2), MH_SUBSAMPLE1(srcHeight, 2),
+ (uint16*)dst_uv, MH_SUBSAMPLE1(dst_width, 2),
+ MH_SUBSAMPLE1(dst_width, 2), MH_SUBSAMPLE1(dst_height, 2),
+ libyuv::kFilterNone);
+
+ in->buffSize = dstSizeMax;
}
+ else if (srcType == MB_Frame::MBFT_BGRA)
+ {
+ //#todo
+ LOG_ERROR << "srcType only support MBFT_YUV420 and MBFT_NV12" << std::endl;
+ return false;
+ }
+ else
+ {
+ LOG_ERROR << "srcType only support MBFT_YUV420 and MBFT_NV12" << std::endl;
+ return false;
+ }
+
+ return true;
+}
+
+/*static*/ bool PL_Scale::pay_breaker_MBFT(const PipeMaterial* pm, void* args)
+{
+ PL_Scale_Internal* in = (PL_Scale_Internal*)args;
+
+ MB_Frame* mbf = (MB_Frame*)pm->buffer;
+ in->payError = !image_scale(in, (uint8_t*)mbf->buffer, mbf->buffSize, mbf->type, mbf->width, mbf->height);
+
+ if (!(in->payError))
+ in->tempFrame = *mbf;
+
+ return false;
}
bool PL_Scale::pay(const PipeMaterial& pm)
{
PL_Scale_Internal* in = (PL_Scale_Internal*)internal;
-
- in->payError = true;
-
- if (pm.buffer == nullptr)
- return false;
-
- bool ret = false;
-
- in->lastPmType = pm.type;
-
- switch(pm.type)
- {
- case PipeMaterial::PMT_BYTES:
- {
- if (in->config.defaultBytesType <= 0 ||
- in->config.defaultBytesWidth <= 0 || in->config.defaultBytesHeight <= 0)
- {
- LOG_ERROR << "defaultBytesType/defaultBytesWidth/defaultBytesHeight not set" << std::endl;
- return false;
- }
-
- ret = image_scale(in, (uint8_t*)pm.buffer, (MB_Frame::MBFType)(in->config.defaultBytesType),
- in->config.defaultBytesWidth, in->config.defaultBytesHeight);
- }
- break;
- case PipeMaterial::PMT_FRAME:
- {
- MB_Frame* frame = (MB_Frame*)pm.buffer;
- switch(frame->type)
- {
- case MB_Frame::MBFT_YUV420:
- case MB_Frame::MBFT_BGRA:
- in->tempFrame = *frame;
- ret = image_scale(in, (uint8_t*)frame->buffer, frame->type,
- frame->width, frame->height);
- break;
- default:
- LOG_ERROR << "Only support MBFT_YUV420 / MBFT_BGRA" << std::endl;
- return false;
- }
- }
- break;
- case PipeMaterial::PMT_PM_LIST:
- {
- // break pm list into single pm(s)
- MB_Frame* ppm = (MB_Frame*)pm.buffer;
- for (size_t i = 0; i < pm.buffSize; i++, ppm++)
- {
- if (ppm->type== PipeMaterial::PMT_FRAME)
- {
- MB_Frame* frame = (MB_Frame*)ppm->buffer;
- switch(frame->type)
- {
- case MB_Frame::MBFT_YUV420:
- case MB_Frame::MBFT_BGRA:
- case MB_Frame::MBFT_NV12:
- in->tempFrame = *frame;
- ret = image_scale(in, (uint8_t*)frame->buffer,frame->type,
- frame->width, frame->height);
- break;
- default:
- LOG_ERROR << "Only support MBFT_YUV420 / MBFT_BGRA" << std::endl;
- return false;
- }
- }
- }
- }break;
- default:
- LOG_ERROR << "Only support PMT_BYTES / PMT_FRAME" << std::endl;
- return false;
- }
-
- in->payError = !ret;
- return ret;
+ in->payError = true;
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, pay_breaker_MBFT, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, pay_breaker_MBFT, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, pay_breaker_MBFT, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, pay_breaker_MBFT, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_RGB565, pay_breaker_MBFT, in);
+
+ if (!(in->payError))
+ in->lastPmType = pm.type;
+
+ return !(in->payError);
}
bool PL_Scale::gain(PipeMaterial& pm)
{
PL_Scale_Internal* in = (PL_Scale_Internal*)internal;
+ if (in->payError)
+ return false;
+
PipeMaterial newPm;
newPm.type = PipeMaterial::PMT_NONE;
newPm.former = this;
diff --git a/RtspFace/PL_Scale.h b/RtspFace/PL_Scale.h
index 76e00b5..3c074f2 100644
--- a/RtspFace/PL_Scale.h
+++ b/RtspFace/PL_Scale.h
@@ -5,6 +5,8 @@
struct PL_Scale_Config
{
+ bool copyData;
+
uint16_t toWidth;
uint16_t toHeight;
int filterMode; // libyuv/scale.h/FilterMode
@@ -13,10 +15,11 @@
int defaultBytesType; // MBFT_YUV420 / MBFT_BGRA
uint16_t defaultBytesWidth;
uint16_t defaultBytesHeight;
-
- PL_Scale_Config() :
- toWidth(0), toHeight(0), filterMode(0),
- defaultBytesType(0), defaultBytesWidth(0), defaultBytesHeight(0)
+
+ PL_Scale_Config() :
+ copyData(true),
+ toWidth(0), toHeight(0), filterMode(0),
+ defaultBytesType(0), defaultBytesWidth(0), defaultBytesHeight(0)
{ }
};
@@ -34,6 +37,8 @@
private:
void* internal;
+
+ static bool pay_breaker_MBFT(const PipeMaterial* pm, void* args);
};
PipeLineElem* create_PL_Scale();
diff --git a/RtspFace/PL_SensetimeFaceTrack.cpp b/RtspFace/PL_SensetimeFaceTrack.cpp
index 8bb4c37..6335e63 100644
--- a/RtspFace/PL_SensetimeFaceTrack.cpp
+++ b/RtspFace/PL_SensetimeFaceTrack.cpp
@@ -20,40 +20,40 @@
st_ff_vect_t faceFeatures;
bool payError;
-
+
cv_handle_t handle_track;
-
+
size_t frameCount;
-
- PL_SensetimeFaceTrack_Internal() :
- //buffSize(0), buffSizeMax(sizeof(buffer)),
- lastFrame(), pmList(), config(), faceFeatures(), payError(true),
- handle_track(nullptr),
- frameCount(0)
+
+ PL_SensetimeFaceTrack_Internal() :
+ //buffSize(0), buffSizeMax(sizeof(buffer)),
+ lastFrame(), pmList(), config(), faceFeatures(), payError(true),
+ handle_track(nullptr),
+ frameCount(0)
{
}
-
+
~PL_SensetimeFaceTrack_Internal()
{
}
-
+
void reset()
{
//buffSize = 0;
payError = true;
-
+
MB_Frame _lastFrame;
lastFrame = _lastFrame;
-
+
PipeMaterial _pm;
pmList[0] = _pm;
pmList[1] = _pm;
-
+
SensetimeFaceTrackConfig _config;
config = _config;
-
+
handle_track = nullptr;
-
+
frameCount = 0;
}
};
@@ -77,7 +77,7 @@
{
PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
in->reset();
-
+
#ifdef __ANDROID__
{
bool retLic = false;
@@ -123,7 +123,7 @@
}
}
#endif
-
+
SensetimeFaceTrackConfig* config = (SensetimeFaceTrackConfig*)args;
in->config = *config;
if (in->config.point_size == 21)
@@ -145,8 +145,8 @@
// per detect in RK3288: 800ms@1920w,200ms@640w; with CV_FACE_TRACKING_TWO_THREAD 10ms@1920w
// init handle
- cv_result_t cv_result = cv_face_create_tracker(&(in->handle_track), nullptr,
- in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD); // CV_FACE_TRACKING_TWO_THREAD | CV_FACE_RESIZE_IMG_XXXX
+ cv_result_t cv_result = cv_face_create_tracker(&(in->handle_track), nullptr,
+ in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD); // CV_FACE_TRACKING_TWO_THREAD | CV_FACE_RESIZE_IMG_XXXX
if (cv_result != CV_OK)
{
LOG_ERROR << "cv_face_create_tracker failed, error code" << cv_result << LOG_ENDL;
@@ -162,14 +162,14 @@
}
else
LOG_ERROR << "detect face count limit : " << val << LOG_ENDL;
-
+
return true;
}
void PL_SensetimeFaceTrack::finit()
{
PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
-
+
// destroy track handle
cv_face_destroy_tracker(in->handle_track);
in->handle_track = nullptr;
@@ -189,27 +189,26 @@
<< p_face[i].rect.bottom << "\t";
dumpfile << p_face[i].score << "\t"
- << p_face[i].points_count << "\t"
- << p_face[i].yaw << "\t"
- << p_face[i].pitch << "\t"
- << p_face[i].roll << "\t"
- << p_face[i].eye_dist << "\t"
- << p_face[i].ID << "\t";
+ << p_face[i].points_count << "\t"
+ << p_face[i].yaw << "\t"
+ << p_face[i].pitch << "\t"
+ << p_face[i].roll << "\t"
+ << p_face[i].eye_dist << "\t"
+ << p_face[i].ID << "\t";
cv_pointf_t points_array[256];
for (int j = 0; j < p_face[i].points_count; j++)
{
dumpfile << p_face[i].points_array[j].x << "\t"
- << p_face[i].points_array[j].y << "\t";
+ << p_face[i].points_array[j].y << "\t";
}
dumpfile << std::endl;
}
}
-int doFaceTrack(PL_SensetimeFaceTrack_Internal* in,
- uint8_t* buffer, size_t width, size_t height, size_t stride, cv_pixel_format cvPixFmt)
+int doFaceTrack(PL_SensetimeFaceTrack_Internal* in, MB_Frame* frame)
{
//PipeLineElemTimingDebugger td(nullptr);
@@ -218,16 +217,39 @@
if (in->frameCount % in->config.doTrackPerFrame != 0)
return 0;
+ //if (true)
+ //{
+ // struct timeval now;
+ // gettimeofday(&now, nullptr);
+ // const int fps = 20;
+ // const int ft = 1000 / fps; // ms
+ // if (now.tv_usec - frame->pts.tv_usec > 0.5 * ft * 1000)
+ // return 0;
+ //}
+
//resize(bgr_frame, bgr_frame, Size(frame_width, frame_height), 0, 0, INTER_LINEAR);
+
+ uint8_t* buffer = (uint8_t*)frame->buffer;
+ const size_t width = frame->width;
+ const size_t height = frame->height;
+ const size_t stride = frame->width;
+ cv_pixel_format cvPixFmt;
+ if (frame->type == MB_Frame::MBFT_YUV420)
+ cvPixFmt = CV_PIX_FMT_YUV420P;
+ else if (frame->type == MB_Frame::MBFT_NV12)
+ cvPixFmt = CV_PIX_FMT_NV12;
+ else
+ return -1;
int face_count = 0;
cv_result_t cv_result = CV_OK;
cv_face_t* p_face = nullptr;
-
+
+ //#test
+ //cvPixFmt = CV_PIX_FMT_GRAY8;
+
// realtime track
- cv_result = cv_face_track(in->handle_track, buffer, cvPixFmt,
- width, height, stride,
- CV_FACE_UP, &p_face, &face_count);
+ cv_result = cv_face_track(in->handle_track, buffer, cvPixFmt, width, height, stride, CV_FACE_UP, &p_face, &face_count);
if (cv_result != CV_OK)
{
LOG_ERROR << "cv_face_track failed, error : " << cv_result << LOG_ENDL;
@@ -263,27 +285,27 @@
faceFeature.eyeDistance = p_face[i].eye_dist;
LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
- p_face[i].rect.left, p_face[i].rect.top,
- p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
-
+ p_face[i].rect.left, p_face[i].rect.top,
+ p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
+
LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]",
- p_face[i].yaw,
- p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist);
+ p_face[i].yaw,
+ p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist);
#ifdef USE_OPENCV
if (in->config.draw_face_rect)
{
cv::Scalar scalar_color = CV_RGB(p_face[i].ID * 53 % 256,
- p_face[i].ID * 93 % 256,
- p_face[i].ID * 143 % 256);
-
+ p_face[i].ID * 93 % 256,
+ p_face[i].ID * 143 % 256);
+
//cv::rectangle(yMat, cv::Point2f(0, 0), cv::Point2f(50, 50), scalar_color, 2);
//cv::rectangle(yMat, cv::Point2f(500, 500), cv::Point2f(550, 550), scalar_color, 2);
-
+
cv::rectangle(yMat, cv::Point2f(static_cast<float>(p_face[i].rect.left),
- static_cast<float>(p_face[i].rect.top)),
- cv::Point2f(static_cast<float>(p_face[i].rect.right),
- static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2);
+ static_cast<float>(p_face[i].rect.top)),
+ cv::Point2f(static_cast<float>(p_face[i].rect.right),
+ static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2);
}
#endif
@@ -320,7 +342,7 @@
}
if (faceFeature.rect.leftTop.X < 0 || faceFeature.rect.rightBottom.X > width ||
- faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > height)
+ faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > height)
faceFeature.outOfFrame = true;
if (in->config.generate_face_feature)
@@ -397,7 +419,7 @@
// faceFeature.rect.rightBottom.x = 50+128;
// faceFeature.rect.rightBottom.y = 50+128;
// in->faceFeatures.push_back(faceFeature);
- //
+ //
// faceFeature.rect.leftTop.x = 300;
// faceFeature.rect.leftTop.y = 400;
// faceFeature.rect.rightBottom.x = 300+50;
@@ -418,10 +440,10 @@
LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
return false;
}
-
+
if (pm->buffer == nullptr)
return false;
-
+
MB_Frame* frame = (MB_Frame*)pm->buffer;
if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12)
{
@@ -431,10 +453,8 @@
in->faceFeatures.clear();
int face_count = 0;
- if (frame->type == MB_Frame::MBFT_YUV420)
- face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P);
- else if (frame->type == MB_Frame::MBFT_NV12)
- face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_NV12);
+ if (frame->type == MB_Frame::MBFT_YUV420 || frame->type == MB_Frame::MBFT_NV12)
+ face_count = doFaceTrack(in, frame);
if (face_count < 0)
{
@@ -443,7 +463,7 @@
}
else
in->payError = false;
-
+
//in->buffer readly
in->lastFrame.type = frame->type;
@@ -452,7 +472,7 @@
in->lastFrame.width = frame->width;
in->lastFrame.height = frame->height;
in->lastFrame.pts = frame->pts;
-
+
return false;
}
@@ -472,7 +492,7 @@
pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
in->frameCount++;
-
+
return !(in->payError);
}
@@ -498,17 +518,17 @@
in->pmList[0].buffer = &(in->lastFrame);
in->pmList[0].buffSize = 0;
in->pmList[0].former = this;
-
+
in->pmList[1].type = PipeMaterial::PMT_PTR;
in->pmList[1].buffer = &(in->faceFeatures);
in->pmList[1].buffSize = 0;
in->pmList[1].former = this;
-
+
pm.type = PipeMaterial::PMT_PM_LIST;
pm.buffer = in->pmList;
pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial);
}
-
+
pm.former = this;
return true;
}
diff --git a/RtspFace/PipeLine.h b/RtspFace/PipeLine.h
index 7af5305..393b2a5 100644
--- a/RtspFace/PipeLine.h
+++ b/RtspFace/PipeLine.h
@@ -130,7 +130,7 @@
class PipeLineElemTimingDebugger
{
public:
- PipeLineElemTimingDebugger(const PipeLineElem* _elem);
+ PipeLineElemTimingDebugger(const PipeLineElem* _elem = nullptr);
~PipeLineElemTimingDebugger();
const PipeLineElem* elem;
diff --git a/RtspFace/live555/testProgs/testRTSPClient.hpp b/RtspFace/live555/testProgs/testRTSPClient.hpp
index c1c4765..47dccd2 100644
--- a/RtspFace/live555/testProgs/testRTSPClient.hpp
+++ b/RtspFace/live555/testProgs/testRTSPClient.hpp
@@ -36,6 +36,8 @@
// Define the size of the buffer that we'll use:
#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 1920*1080*3//#todo
+#define INCREASE_RECEIVE_BUFFER_TO 8000000
+
// If you don't want to see debugging output for each received frame, then comment out the following line:
//#define DEBUG_PRINT_EACH_RECEIVED_FRAME 1
//#define DEBUG_PRINT_NPT 1
@@ -302,44 +304,50 @@
scs.subsession = scs.iter->next();
if (scs.subsession != NULL)
+ {
+ if (_ourRTSPClient->desiredPortNum != 0)
{
- if (_ourRTSPClient->desiredPortNum != 0)
- {
- scs.subsession->setClientPortNum(_ourRTSPClient->desiredPortNum);
- _ourRTSPClient->desiredPortNum += 2;
- }
-
- if (!scs.subsession->initiate())
- {
- LOG_ERROR << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg() << LOG_ENDL;
- setupNextSubsession(rtspClient); // give up on this subsession; go to the next one
- }
- else
- {
- LOG_INFO << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession (" << LOG_ENDL;
- if (scs.subsession->rtcpIsMuxed())
- LOG_INFO << "client port " << scs.subsession->clientPortNum() << LOG_ENDL;
- else
- LOG_INFO << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1 << LOG_ENDL;
- LOG_INFO << ")" << LOG_ENDL;
-
- // Continue setting up this subsession, by sending a RTSP "SETUP" command:
- rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, _ourRTSPClient->rtspConfig.requestStreamingOverTcp);
- }
- return;
+ scs.subsession->setClientPortNum(_ourRTSPClient->desiredPortNum);
+ _ourRTSPClient->desiredPortNum += 2;
}
+
+ if (!scs.subsession->initiate())
+ {
+ LOG_ERROR << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg() << LOG_ENDL;
+ setupNextSubsession(rtspClient); // give up on this subsession; go to the next one
+ }
+ else
+ {
+ LOG_INFO << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession (" << LOG_ENDL;
+ if (scs.subsession->rtcpIsMuxed())
+ LOG_INFO << "client port " << scs.subsession->clientPortNum() << LOG_ENDL;
+ else
+ LOG_INFO << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1 << LOG_ENDL;
+ LOG_INFO << ")" << LOG_ENDL;
+
+#ifdef INCREASE_RECEIVE_BUFFER_TO
+ //sysctl net.core.rmem_max=40000000
+ if (INCREASE_RECEIVE_BUFFER_TO > 0)
+ increaseReceiveBufferTo(env, scs.subsession->rtpSource()->RTPgs()->socketNum(), INCREASE_RECEIVE_BUFFER_TO);
+#endif
+
+ // Continue setting up this subsession, by sending a RTSP "SETUP" command:
+ rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, _ourRTSPClient->rtspConfig.requestStreamingOverTcp);
+ }
+ return;
+ }
// We've finished setting up all of the subsessions. Now, send a RTSP "PLAY" command to start the streaming:
if (scs.session->absStartTime() != NULL)
- {
- // Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command:
- rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime());
- }
+ {
+ // Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command:
+ rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime());
+ }
else
- {
- scs.duration = scs.session->playEndTime() - scs.session->playStartTime();
- rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY);
- }
+ {
+ scs.duration = scs.session->playEndTime() - scs.session->playStartTime();
+ rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY);
+ }
}
void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString)
--
Gitblit v1.8.0