From 663104b9be90ed303b87c8acddac8421583a9e39 Mon Sep 17 00:00:00 2001
From: houxiao <houxiao@454eff88-639b-444f-9e54-f578c98de674>
Date: 星期三, 16 八月 2017 12:38:59 +0800
Subject: [PATCH] aaaaa
---
RtspFace/PL_SensetimeFaceTrack.cpp | 402 ++++++++++++++++++++++++++++++---------------------------
1 files changed, 212 insertions(+), 190 deletions(-)
diff --git a/RtspFace/PL_SensetimeFaceTrack.cpp b/RtspFace/PL_SensetimeFaceTrack.cpp
index 263dd01..6335e63 100644
--- a/RtspFace/PL_SensetimeFaceTrack.cpp
+++ b/RtspFace/PL_SensetimeFaceTrack.cpp
@@ -20,40 +20,40 @@
st_ff_vect_t faceFeatures;
bool payError;
-
+
cv_handle_t handle_track;
-
+
size_t frameCount;
-
- PL_SensetimeFaceTrack_Internal() :
- //buffSize(0), buffSizeMax(sizeof(buffer)),
- lastFrame(), pmList(), config(), faceFeatures(), payError(true),
- handle_track(nullptr),
- frameCount(0)
+
+ PL_SensetimeFaceTrack_Internal() :
+ //buffSize(0), buffSizeMax(sizeof(buffer)),
+ lastFrame(), pmList(), config(), faceFeatures(), payError(true),
+ handle_track(nullptr),
+ frameCount(0)
{
}
-
+
~PL_SensetimeFaceTrack_Internal()
{
}
-
+
void reset()
{
//buffSize = 0;
payError = true;
-
+
MB_Frame _lastFrame;
lastFrame = _lastFrame;
-
+
PipeMaterial _pm;
pmList[0] = _pm;
pmList[1] = _pm;
-
+
SensetimeFaceTrackConfig _config;
config = _config;
-
+
handle_track = nullptr;
-
+
frameCount = 0;
}
};
@@ -77,53 +77,53 @@
{
PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
in->reset();
-
+
#ifdef __ANDROID__
{
bool retLic = false;
- if (in->config.license_str.empty())
- {
- if (in->config.license_file_path.empty())
- in->config.license_file_path = "/data/license.lic";
+ if (in->config.license_str.empty())
+ {
+ if (in->config.license_file_path.empty())
+ in->config.license_file_path = "/data/license.lic";
- FILE * licFile = fopen (in->config.license_file_path.c_str(),"rb");
- if (licFile != nullptr)
- {
- char licBuffer[1025 * 5] = {'\0'};
- size_t licSize = fread(licBuffer, sizeof(uint8_t), sizeof(licBuffer), licFile);
- fclose (licFile);
+ FILE * licFile = fopen (in->config.license_file_path.c_str(),"rb");
+ if (licFile != nullptr)
+ {
+ char licBuffer[1025 * 5] = {'\0'};
+ size_t licSize = fread(licBuffer, sizeof(uint8_t), sizeof(licBuffer), licFile);
+ fclose (licFile);
- if (licSize > 0)
- {
- int ret = cv_face_init_license_config(licBuffer);
- LOG_INFO << "cv_face_init_license_config 1 ret=" << ret << LOG_ENDL;
- retLic = true;
- }
- }
- else
- {
- LOG_WARN << "cv_face_init_license_config 2 errno=" << errno << LOG_ENDL;
- }
- }
- else
- {
- int ret = cv_face_init_license_config(in->config.license_str.c_str());
- LOG_INFO << "cv_face_init_license_config 3 ret=" << ret << LOG_ENDL;
- retLic = true;
- }
+ if (licSize > 0)
+ {
+ int ret = cv_face_init_license_config(licBuffer);
+ LOG_INFO << "cv_face_init_license_config 1 ret=" << ret << LOG_ENDL;
+ retLic = true;
+ }
+ }
+ else
+ {
+ LOG_WARN << "cv_face_init_license_config 2 errno=" << errno << LOG_ENDL;
+ }
+ }
+ else
+ {
+ int ret = cv_face_init_license_config(in->config.license_str.c_str());
+ LOG_INFO << "cv_face_init_license_config 3 ret=" << ret << LOG_ENDL;
+ retLic = true;
+ }
- //int ret = cv_face_init_license_config(_lic);
- //LOG_INFO << "cv_face_init_license_config 3 ret=" << ret << LOG_ENDL;
- //retLic = true;
+ //int ret = cv_face_init_license_config(_lic);
+ //LOG_INFO << "cv_face_init_license_config 3 ret=" << ret << LOG_ENDL;
+ //retLic = true;
if (!retLic)
- {
- LOG_WARN << "stface for android no license" << LOG_ENDL;
- return false;
- }
+ {
+ LOG_WARN << "stface for android no license" << LOG_ENDL;
+ return false;
+ }
}
#endif
-
+
SensetimeFaceTrackConfig* config = (SensetimeFaceTrackConfig*)args;
in->config = *config;
if (in->config.point_size == 21)
@@ -141,12 +141,12 @@
//in->config.generate_face_point = true;
//in->config.draw_face_feature_point = true;
- // if not use CV_FACE_TRACKING_TWO_THREAD, stfacesdk detect face per 20 frame and light flow tracking interval
- // per detect in RK3288: 800ms@1920w,200ms@640w; with CV_FACE_TRACKING_TWO_THREAD 10ms@1920w
+ // if not use CV_FACE_TRACKING_TWO_THREAD, stfacesdk detect face per 20 frame and light flow tracking interval
+ // per detect in RK3288: 800ms@1920w,200ms@640w; with CV_FACE_TRACKING_TWO_THREAD 10ms@1920w
// init handle
- cv_result_t cv_result = cv_face_create_tracker(&(in->handle_track), nullptr,
- in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD); // CV_FACE_TRACKING_TWO_THREAD | CV_FACE_RESIZE_IMG_XXXX
+ cv_result_t cv_result = cv_face_create_tracker(&(in->handle_track), nullptr,
+ in->config.point_size_config | CV_FACE_TRACKING_TWO_THREAD); // CV_FACE_TRACKING_TWO_THREAD | CV_FACE_RESIZE_IMG_XXXX
if (cv_result != CV_OK)
{
LOG_ERROR << "cv_face_create_tracker failed, error code" << cv_result << LOG_ENDL;
@@ -162,70 +162,94 @@
}
else
LOG_ERROR << "detect face count limit : " << val << LOG_ENDL;
-
+
return true;
}
void PL_SensetimeFaceTrack::finit()
{
PL_SensetimeFaceTrack_Internal* in = (PL_SensetimeFaceTrack_Internal*)internal;
-
+
// destroy track handle
cv_face_destroy_tracker(in->handle_track);
in->handle_track = nullptr;
+
+ in->reset();
}
static void test_dump_feature(cv_face_t* p_face, int face_count)
{
- static std::fstream dumpfile("/data/temp/dump-106-photo2.txt", std::ios_base::out | std::ios_base::trunc);
+ static std::fstream dumpfile("/data/temp/dump-106-photo2.txt", std::ios_base::out | std::ios_base::trunc);
- for (int i = 0; i < face_count; i++)
- {
- dumpfile << p_face[i].rect.left << "\t"
- << p_face[i].rect.top << "\t"
- << p_face[i].rect.right << "\t"
- << p_face[i].rect.bottom << "\t";
+ for (int i = 0; i < face_count; i++)
+ {
+ dumpfile << p_face[i].rect.left << "\t"
+ << p_face[i].rect.top << "\t"
+ << p_face[i].rect.right << "\t"
+ << p_face[i].rect.bottom << "\t";
- dumpfile << p_face[i].score << "\t"
- << p_face[i].points_count << "\t"
- << p_face[i].yaw << "\t"
- << p_face[i].pitch << "\t"
- << p_face[i].roll << "\t"
- << p_face[i].eye_dist << "\t"
- << p_face[i].ID << "\t";
+ dumpfile << p_face[i].score << "\t"
+ << p_face[i].points_count << "\t"
+ << p_face[i].yaw << "\t"
+ << p_face[i].pitch << "\t"
+ << p_face[i].roll << "\t"
+ << p_face[i].eye_dist << "\t"
+ << p_face[i].ID << "\t";
- cv_pointf_t points_array[256];
+ cv_pointf_t points_array[256];
- for (int j = 0; j < p_face[i].points_count; j++)
- {
- dumpfile << p_face[i].points_array[j].x << "\t"
- << p_face[i].points_array[j].y << "\t";
- }
+ for (int j = 0; j < p_face[i].points_count; j++)
+ {
+ dumpfile << p_face[i].points_array[j].x << "\t"
+ << p_face[i].points_array[j].y << "\t";
+ }
- dumpfile << std::endl;
- }
+ dumpfile << std::endl;
+ }
}
-int doFaceTrack(PL_SensetimeFaceTrack_Internal* in,
- uint8_t* buffer, size_t width, size_t height, size_t stride, cv_pixel_format cvPixFmt)
+int doFaceTrack(PL_SensetimeFaceTrack_Internal* in, MB_Frame* frame)
{
- //PipeLineElemTimingDebugger td(nullptr);
+ //PipeLineElemTimingDebugger td(nullptr);
- if (in->config.doTrackPerFrame == 0)
+ if (in->config.doTrackPerFrame == 0)
return 0;
if (in->frameCount % in->config.doTrackPerFrame != 0)
return 0;
+ //if (true)
+ //{
+ // struct timeval now;
+ // gettimeofday(&now, nullptr);
+ // const int fps = 20;
+ // const int ft = 1000 / fps; // ms
+ // if (now.tv_usec - frame->pts.tv_usec > 0.5 * ft * 1000)
+ // return 0;
+ //}
+
//resize(bgr_frame, bgr_frame, Size(frame_width, frame_height), 0, 0, INTER_LINEAR);
+
+ uint8_t* buffer = (uint8_t*)frame->buffer;
+ const size_t width = frame->width;
+ const size_t height = frame->height;
+ const size_t stride = frame->width;
+ cv_pixel_format cvPixFmt;
+ if (frame->type == MB_Frame::MBFT_YUV420)
+ cvPixFmt = CV_PIX_FMT_YUV420P;
+ else if (frame->type == MB_Frame::MBFT_NV12)
+ cvPixFmt = CV_PIX_FMT_NV12;
+ else
+ return -1;
int face_count = 0;
cv_result_t cv_result = CV_OK;
cv_face_t* p_face = nullptr;
-
+
+ //#test
+ //cvPixFmt = CV_PIX_FMT_GRAY8;
+
// realtime track
- cv_result = cv_face_track(in->handle_track, buffer, cvPixFmt,
- width, height, stride,
- CV_FACE_UP, &p_face, &face_count);
+ cv_result = cv_face_track(in->handle_track, buffer, cvPixFmt, width, height, stride, CV_FACE_UP, &p_face, &face_count);
if (cv_result != CV_OK)
{
LOG_ERROR << "cv_face_track failed, error : " << cv_result << LOG_ENDL;
@@ -233,8 +257,8 @@
return -1;
}
- //#test
- //test_dump_feature(p_face, face_count);
+ //#test
+ //test_dump_feature(p_face, face_count);
#ifdef USE_OPENCV
// draw the video
@@ -261,65 +285,65 @@
faceFeature.eyeDistance = p_face[i].eye_dist;
LOGP(DEBUG, "face: %d-----[%d, %d, %d, %d]-----id: %d", i,
- p_face[i].rect.left, p_face[i].rect.top,
- p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
-
+ p_face[i].rect.left, p_face[i].rect.top,
+ p_face[i].rect.right, p_face[i].rect.bottom, p_face[i].ID);
+
LOGP(DEBUG, "face pose: [yaw: %.2f, pitch: %.2f, roll: %.2f, eye distance: %.2f]",
- p_face[i].yaw,
- p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist);
+ p_face[i].yaw,
+ p_face[i].pitch, p_face[i].roll, p_face[i].eye_dist);
#ifdef USE_OPENCV
if (in->config.draw_face_rect)
{
cv::Scalar scalar_color = CV_RGB(p_face[i].ID * 53 % 256,
- p_face[i].ID * 93 % 256,
- p_face[i].ID * 143 % 256);
-
+ p_face[i].ID * 93 % 256,
+ p_face[i].ID * 143 % 256);
+
//cv::rectangle(yMat, cv::Point2f(0, 0), cv::Point2f(50, 50), scalar_color, 2);
//cv::rectangle(yMat, cv::Point2f(500, 500), cv::Point2f(550, 550), scalar_color, 2);
-
+
cv::rectangle(yMat, cv::Point2f(static_cast<float>(p_face[i].rect.left),
- static_cast<float>(p_face[i].rect.top)),
- cv::Point2f(static_cast<float>(p_face[i].rect.right),
- static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2);
+ static_cast<float>(p_face[i].rect.top)),
+ cv::Point2f(static_cast<float>(p_face[i].rect.right),
+ static_cast<float>(p_face[i].rect.bottom)), scalar_color, 2);
}
#endif
- if (in->config.generate_face_point)
- {
- for (int j = 0; j < p_face[i].points_count; j++)
- {
- PLGH_Point featurePoint;
- featurePoint.X = p_face[i].points_array[j].x;
- featurePoint.Y = p_face[i].points_array[j].y;
- faceFeature.featurePoints.points.push_back(featurePoint);
+ if (in->config.generate_face_point)
+ {
+ for (int j = 0; j < p_face[i].points_count; j++)
+ {
+ PLGH_Point featurePoint;
+ featurePoint.X = p_face[i].points_array[j].x;
+ featurePoint.Y = p_face[i].points_array[j].y;
+ faceFeature.featurePoints.points.push_back(featurePoint);
#ifdef USE_OPENCV
- if (in->config.draw_face_feature_point)
- {
- cv::circle(yMat, cv::Point2f(p_face[i].points_array[j].x, p_face[i].points_array[j].y), 1, cv::Scalar(255, 255, 255));
- }
+ if (in->config.draw_face_feature_point)
+ {
+ cv::circle(yMat, cv::Point2f(p_face[i].points_array[j].x, p_face[i].points_array[j].y), 1, cv::Scalar(255, 255, 255));
+ }
#endif
- }
+ }
- //int p46x = p_face[i].points_array[46].x;
- //int p46y = p_face[i].points_array[46].y;
- //int p6x = p_face[i].points_array[6].x;
- //int p6y = p_face[i].points_array[6].y;
- //double dist1 = std::sqrt((p46x-p6x)*(p46x-p6x)+(p46y-p6y)*(p46y-p6y));
+ //int p46x = p_face[i].points_array[46].x;
+ //int p46y = p_face[i].points_array[46].y;
+ //int p6x = p_face[i].points_array[6].x;
+ //int p6y = p_face[i].points_array[6].y;
+ //double dist1 = std::sqrt((p46x-p6x)*(p46x-p6x)+(p46y-p6y)*(p46y-p6y));
- //int p43x = p_face[i].points_array[43].x;
- //int p43y = p_face[i].points_array[43].y;
- //int p2x = p_face[i].points_array[2].x;
- //int p2y = p_face[i].points_array[2].y;
- //double dist2 = std::sqrt((p43x-p2x)*(p43x-p2x)+(p43y-p2y)*(p43y-p2y));
+ //int p43x = p_face[i].points_array[43].x;
+ //int p43y = p_face[i].points_array[43].y;
+ //int p2x = p_face[i].points_array[2].x;
+ //int p2y = p_face[i].points_array[2].y;
+ //double dist2 = std::sqrt((p43x-p2x)*(p43x-p2x)+(p43y-p2y)*(p43y-p2y));
- //LOGP(ERROR, "dist46_6/dist43_2=%f", dist1 / dist2);
- }
+ //LOGP(ERROR, "dist46_6/dist43_2=%f", dist1 / dist2);
+ }
- if (faceFeature.rect.leftTop.X < 0 || faceFeature.rect.rightBottom.X > width ||
- faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > height)
- faceFeature.outOfFrame = true;
+ if (faceFeature.rect.leftTop.X < 0 || faceFeature.rect.rightBottom.X > width ||
+ faceFeature.rect.leftTop.Y < 0 || faceFeature.rect.rightBottom.Y > height)
+ faceFeature.outOfFrame = true;
if (in->config.generate_face_feature)
{
@@ -334,7 +358,7 @@
// explode the range
if (in->config.explode_feature_rect_x != 0)
{
- faceFeature.rect.leftTop.X = clamp(faceFeature.rect.leftTop.X - in->config.explode_feature_rect_x, 0, faceFeature.rect.leftTop.X);
+ faceFeature.rect.leftTop.X = clamp(faceFeature.rect.leftTop.X - in->config.explode_feature_rect_x, 0, faceFeature.rect.leftTop.X);
faceFeature.rect.rightBottom.X = clamp(faceFeature.rect.rightBottom.X + in->config.explode_feature_rect_x, faceFeature.rect.rightBottom.X, int(width - 1));
}
@@ -344,29 +368,29 @@
faceFeature.rect.rightBottom.Y = clamp(faceFeature.rect.rightBottom.Y + in->config.explode_feature_rect_y, faceFeature.rect.rightBottom.Y, int(height - 1));
}
- //f (in->config.clamp_feature_rect)
- //
- // int dW = width - faceFeature.rect.width();
- // if (dW < 0)
- // {
- // dW = std::abs(dW) / 2 + 1;
- // faceFeature.rect.leftTop.x += dW;
- // faceFeature.rect.rightBottom.x -= dW;
- // faceFeature.rectClamp = true;
- // }
+ //f (in->config.clamp_feature_rect)
+ //
+ // int dW = width - faceFeature.rect.width();
+ // if (dW < 0)
+ // {
+ // dW = std::abs(dW) / 2 + 1;
+ // faceFeature.rect.leftTop.x += dW;
+ // faceFeature.rect.rightBottom.x -= dW;
+ // faceFeature.rectClamp = true;
+ // }
- // int dH = height - faceFeature.rect.height();
- // if (dH < 0)
- // {
- // dH = std::abs(dH) / 2 + 1;
- // faceFeature.rect.leftTop.y += dH;
- // faceFeature.rect.rightBottom.y -= dH;
- // faceFeature.rectClamp = true;
- // }
- //
+ // int dH = height - faceFeature.rect.height();
+ // if (dH < 0)
+ // {
+ // dH = std::abs(dH) / 2 + 1;
+ // faceFeature.rect.leftTop.y += dH;
+ // faceFeature.rect.rightBottom.y -= dH;
+ // faceFeature.rectClamp = true;
+ // }
+ //
- in->faceFeatures.push_back(faceFeature);
- }
+ in->faceFeatures.push_back(faceFeature);
+ }
}
//if (face_count > 0)
@@ -378,31 +402,31 @@
// fwrite (yMat.data , sizeof(char), 1920*1080*1.5, pFile);
// printf("write face file %s\n", fname);
// fclose(pFile);
- // if (f>20)exit(0);
+ // if (f>20)exit(0);
//}
// release the memory of face
cv_face_release_tracker_result(p_face, face_count);
- //#debug
- //if (face_count == 0)
- //{
- // face_count = 2;
- //
- // SensetimeFaceFeature faceFeature;
- // faceFeature.rect.leftTop.x = 50;
- // faceFeature.rect.leftTop.y = 50;
- // faceFeature.rect.rightBottom.x = 50+128;
- // faceFeature.rect.rightBottom.y = 50+128;
- // in->faceFeatures.push_back(faceFeature);
- //
+ //#debug
+ //if (face_count == 0)
+ //{
+ // face_count = 2;
+ //
+ // SensetimeFaceFeature faceFeature;
+ // faceFeature.rect.leftTop.x = 50;
+ // faceFeature.rect.leftTop.y = 50;
+ // faceFeature.rect.rightBottom.x = 50+128;
+ // faceFeature.rect.rightBottom.y = 50+128;
+ // in->faceFeatures.push_back(faceFeature);
+ //
// faceFeature.rect.leftTop.x = 300;
- // faceFeature.rect.leftTop.y = 400;
- // faceFeature.rect.rightBottom.x = 300+50;
- // faceFeature.rect.rightBottom.y = 400+60;
- // in->faceFeatures.push_back(faceFeature);
+ // faceFeature.rect.leftTop.y = 400;
+ // faceFeature.rect.rightBottom.x = 300+50;
+ // faceFeature.rect.rightBottom.y = 400+60;
+ // in->faceFeatures.push_back(faceFeature);
// LOG_WARN << "PL_SensetimeFaceTrack doFaceTrack add test data" << LOG_ENDL;
- //}
+ //}
return face_count;
}
@@ -416,10 +440,10 @@
LOG_ERROR << "Only support PMT_FRAME" << LOG_ENDL;
return false;
}
-
+
if (pm->buffer == nullptr)
return false;
-
+
MB_Frame* frame = (MB_Frame*)pm->buffer;
if (frame->type != MB_Frame::MBFT_YUV420 && frame->type != MB_Frame::MBFT_NV12)
{
@@ -429,10 +453,8 @@
in->faceFeatures.clear();
int face_count = 0;
- if (frame->type == MB_Frame::MBFT_YUV420)
- face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_YUV420P);
- else if (frame->type == MB_Frame::MBFT_NV12)
- face_count = doFaceTrack(in, (uint8_t*)frame->buffer, frame->width, frame->height, frame->width, CV_PIX_FMT_NV12);
+ if (frame->type == MB_Frame::MBFT_YUV420 || frame->type == MB_Frame::MBFT_NV12)
+ face_count = doFaceTrack(in, frame);
if (face_count < 0)
{
@@ -441,7 +463,7 @@
}
else
in->payError = false;
-
+
//in->buffer readly
in->lastFrame.type = frame->type;
@@ -450,7 +472,7 @@
in->lastFrame.width = frame->width;
in->lastFrame.height = frame->height;
in->lastFrame.pts = frame->pts;
-
+
return false;
}
@@ -460,17 +482,17 @@
//LOG_ERROR << "PL_SensetimeFaceTrack pay" << LOG_ENDL;
in->payError = true;
- if (in->payError)
- pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
- if (in->payError)
- pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
- if (in->payError)
- pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
- if (in->payError)
- pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME_LIST, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_YUV420, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
+ if (in->payError)
+ pm.breake(PipeMaterial::PMT_FRAME, MB_Frame::MBFT_NV12, PL_SensetimeFaceTrack::pay_breaker_MBFT_YUV, in);
in->frameCount++;
-
+
return !(in->payError);
}
@@ -496,17 +518,17 @@
in->pmList[0].buffer = &(in->lastFrame);
in->pmList[0].buffSize = 0;
in->pmList[0].former = this;
-
+
in->pmList[1].type = PipeMaterial::PMT_PTR;
in->pmList[1].buffer = &(in->faceFeatures);
in->pmList[1].buffSize = 0;
in->pmList[1].former = this;
-
+
pm.type = PipeMaterial::PMT_PM_LIST;
pm.buffer = in->pmList;
pm.buffSize = sizeof(in->pmList) / sizeof(PipeMaterial);
}
-
+
pm.former = this;
return true;
}
--
Gitblit v1.8.0