cricket::CaptureState CustomVideoCapturer::Start(const cricket::VideoFormat& capture_format){
}
void* CustomVideoCapturer::grabCapture(CustomVideoCapturer* arg)
{
CustomVideoCapturer* customVideoCapturer = (CustomVideoCapturer*)(arg);
cv::Mat frame;
while (cvCapture.read(frame) && customVideoCapturer->IsRunning()){
cv::Mat bgra(frame.rows, frame.cols, CV_8UC4);
cv::cvtColor(frame, bgra, CV_BGR2BGRA); //opencv reads the stream in BGR format by default
webrtc::VideoFrame vframe;
vframe.CreateEmptyFrame(bgra.cols, bgra.rows, bgra.cols, (bgra.cols + 1) / 2, (bgra.cols + 1) / 2); //convert the frame to I420, which is the supported format for webrtc transport
webrtc::ConvertToI420(webrtc::kBGRA, bgra.ptr(), 0, 0, bgra.cols, bgra.rows, 0, webrtc::kVideoRotation_0, &vframe);
vector<uint8_t> capture_buffer_;
size_t length = webrtc::CalcBufferSize(webrtc::kI420, vframe.width(), vframe.height());
capture_buffer_.resize(length);
webrtc::ExtractBuffer(vframe, length, &capture_buffer_[0]);
cricket::WebRtcCapturedFrame* webrtc_frame = new cricket::WebRtcCapturedFrame(vframe, &capture_buffer_[0], length);
//Signal frame capture on worker thread
customVideoCapturer->worker->Invoke<void>(rtc::Bind(&CustomVideoCapturer::SignalFrameCapturedOnStartThread, customVideoCapturer, webrtc_frame));
}
return 0;
}
void CustomVideoCapturer::SignalFrameCapturedOnStartThread(const cricket::CapturedFrame *frame){
SignalFrameCaptured(this, frame);
}