#include "PL_V4L2Source.h" #include "MaterialBuffer.h" #include "logger.h" #include "MediaHelper.h" #include #include #include #include #include struct PL_V4L2Source_Internal { uint8_t* buffer; int buffSize; size_t buffSizeMax; bool payError; MB_Frame lastFrame; PL_V4L2Source_Config config; V4l2Capture* videoCapture; PL_V4L2Source_Internal() : buffer(nullptr), buffSize(0), buffSizeMax(0), payError(true), lastFrame(), config(), videoCapture(nullptr) { } ~PL_V4L2Source_Internal() { delete[] buffer; buffer = nullptr; } void reset() { buffSize = 0; payError = true; MB_Frame _lastFrame; lastFrame = _lastFrame; PL_V4L2Source_Config _config; config = _config; if (buffer != nullptr) { delete[] buffer; buffer = nullptr; buffSizeMax = 0; } } }; PipeLineElem* create_PL_V4L2Source() { return new PL_V4L2Source; } PL_V4L2Source::PL_V4L2Source() : internal(new PL_V4L2Source_Internal) { } PL_V4L2Source::~PL_V4L2Source() { PL_V4L2Source_Internal* in = (PL_V4L2Source_Internal*)internal; delete in; } bool PL_V4L2Source::init(void* args) { PL_V4L2Source_Internal* in = (PL_V4L2Source_Internal*)internal; in->reset(); if (args != nullptr) { PL_V4L2Source_Config* config = (PL_V4L2Source_Config*)args; in->config = *config; } // init V4L2 capture interface V4L2DeviceParameters param(in->config.device.c_str(), in->config.format, in->config.width, in->config.height, in->config.fps, 1+1); in->videoCapture = V4l2Capture::create(param, (V4l2Access::IoType)in->config.ioType); if (in->videoCapture != nullptr) { in->buffSizeMax = in->videoCapture->getBufferSize(); in->buffer = new uint8_t[in->buffSizeMax]; } return in->videoCapture != nullptr; } void PL_V4L2Source::finit() { PL_V4L2Source_Internal* in = (PL_V4L2Source_Internal*)internal; // close videoCapture //#todo } bool PL_V4L2Source::pay(const PipeMaterial& pm) { return false; } bool PL_V4L2Source::gain(PipeMaterial& pm) { PL_V4L2Source_Internal* in = (PL_V4L2Source_Internal*)internal; pm.former = this; pm.deleter = nullptr; pm.args = nullptr; uint8_t tmpYUYV[in->config.width * in->config.height * 2]; in->buffSize = in->videoCapture->read((char*)tmpYUYV, sizeof(tmpYUYV)); if (in->buffSize <= 0 && errno != EAGAIN) { LOG(NOTICE) << "videoCapture stopped " << strerror(errno) << LOG_ENDL; return false; } uint8_t* dst_y = (uint8_t*)(in->buffer); uint8_t* dst_u = (uint8_t*)(dst_y + (in->config.width * in->config.height)); uint8_t* dst_v = (uint8_t*)(dst_u + (in->config.width * in->config.height / 4)); //libyuv::YUY2ToI420(tmpYUYV, in->config.width*2, // dst_y, in->config.width, // dst_u, MH_SUBSAMPLE1(in->config.width, 2), // dst_v, MH_SUBSAMPLE1(in->config.width, 2), // in->config.width, in->config.height // ); //in->lastFrame.type = MB_Frame::MBFT_YUV420; libyuv::YUY2ToNV12(tmpYUYV, in->config.width*2, dst_y, in->config.width, dst_u, in->config.width, in->config.width, in->config.height ); in->lastFrame.type = MB_Frame::MBFT_NV12; in->lastFrame.buffer = in->buffer; in->lastFrame.buffSize = in->config.width * in->config.height * 1.5; in->lastFrame.width = in->videoCapture->getWidth(); in->lastFrame.height = in->videoCapture->getHeight(); in->lastFrame.pts = microseconds_to_timeval(0); pm.type = PipeMaterial::PMT_FRAME; pm.buffer = &(in->lastFrame); pm.buffSize = 0; return true; }