void PeerConnectionImpl::initMediaStream(webrtc::MediaConstraintsInterface *connectionConstraints)
{
const char *mediaStreamLabel = "the_media_stream";
scoped_refptr<webrtc::MediaStreamInterface> ms = factory->CreateLocalMediaStream(mediaStreamLabel);
mediaStream = ms.get();
mediaStream->AddRef();
rtc::scoped_refptr<webrtc::VideoSourceInterface> vs;
rtc::scoped_refptr<webrtc::VideoTrackInterface> vt;
if (withVideo)
{
const char *videoTrackLabel = "the_video_track";
// create our custom video capture
cricket::VideoCapturer *vc = initVideoCapturer();
vs = factory->CreateVideoSource(vc, NULL);
vt = factory->CreateVideoTrack(videoTrackLabel, vs);
mediaStream->AddTrack(vt);
}
connection->AddStream(mediaStream);
}
cricket::VideoCapturer* PeerConnectionImpl::initVideoCapturer()
{
rtc::scoped_ptr<cricket::DeviceManagerInterface> deviceManager(cricket::DeviceManagerFactory::Create());
// use our factory which only creates our custom capturer
RawVideoCapturerFactory* f = new RawVideoCapturerFactory();
cricket::VideoDeviceCapturerFactory *cf = static_cast<cricket::VideoDeviceCapturerFactory*>(static_cast<void*>(f));
deviceManager->SetVideoDeviceCapturerFactory(cf);
// create the capturer
std::vector<cricket::Device> devices;
deviceManager->GetVideoCaptureDevices(&devices);
cricket::VideoCapturer* c = NULL;
for(auto d : devices)
{
c = deviceManager->CreateVideoCapturer(d);
if (c != NULL)
{
// this must be our custom one
videoCapturer = static_cast<RawVideoCapturer*>(c);
break;
}
}
return c;
}
(cross posted from webrtcbuilds)
The basic question now is, is it even possible to register a custom source of video frame data? Looking at video_capture_linux.cc, it seems to me that for Linux it appears to be hard coded to get video from the /dev/videoX devices only. It doesn't appear to allow for other sources of video. Please tell me I'm missing something!
Here's our previous working (if kludgy) code:void PeerConnectionImpl::initMediaStream(webrtc::MediaConstraintsInterface *connectionConstraints)
{
const char *mediaStreamLabel = "the_media_stream";
scoped_refptr<webrtc::MediaStreamInterface> ms = factory->CreateLocalMediaStream(mediaStreamLabel);
mediaStream = ms.get();
mediaStream->AddRef();
rtc::scoped_refptr<webrtc::VideoSourceInterface> vs;
rtc::scoped_refptr<webrtc::VideoTrackInterface> vt;
if (withVideo)
{
const char *videoTrackLabel = "the_video_track";
// create our custom video capture
cricket::VideoCapturer *vc = initVideoCapturer();
vs = factory->CreateVideoSource(vc, NULL);
vt = factory->CreateVideoTrack(videoTrackLabel, vs);
RTCAVFoundationVideoSource with this code:
#import "IrisRTCAVFoundationVideoSource+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCVideoSource+Private.h"
#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/api/test/fakevideotracksource.h"
//#include "webrtc/media/base/adaptedvideotracksource.h"
@implementation IrisRTCAVFoundationVideoSource {
cricket::FakeVideoCapturer *_capturer;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
constraints:(RTCMediaConstraints *)constraints {
NSParameterAssert(factory);
// We pass ownership of the capturer to the source, but since we own
// the source, it should be ok to keep a raw pointer to the
// capturer.
//_capturer = new cricket::FakeVideoCapturer();
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source = webrtc::FakeVideoTrackSource::Create();
return [super initWithNativeVideoSource:source];
}
@end
Can anyone tell me why is not showing nothing at all? I can see a new peer appearing in the browser but the preview is blank. Thanks guys. I'm stuck
Before start to implement a new VideoTrackSourceInterface, I want to test first the FakeVideoTrackSource.
#import "IrisRTCAVFoundationVideoSource+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCVideoSource+Private.h"
#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/api/test/fakevideotracksource.h"
//#include "webrtc/media/base/adaptedvideotracksource.h"
@implementation IrisRTCAVFoundationVideoSource {
cricket::FakeVideoCapturer *_capturer;
rtc::scoped_refptr<webrtc::FakeVideoTrackSource>_trackSource;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
constraints:(RTCMediaConstraints *)constraints {
NSParameterAssert(factory);
// We pass ownership of the capturer to the source, but since we own
// the source, it should be ok to keep a raw pointer to the
// capturer.
//_capturer = new cricket::FakeVideoCapturer();
_trackSource = webrtc::FakeVideoTrackSource::Create();
rtc::scoped_refptr<webrtc::FakeVideoTrackSource> source = _trackSource;
//factory.nativeFactory->CreateVideoSource(
// _capturer, constraints.nativeConstraints.get());
return [super initWithNativeVideoSource:source];
}
/*- (void) sendFrame:(CVImageBufferRef) pixelBuffer{
_trackSource->fake_video_capturer()->CaptureFrame();
}*/
-(void) sendSomething{
_trackSource->fake_video_capturer()->CaptureFrame();
}
@end
I tried implementing the Interface and didn't work. I implemented the interface an when I have a frame then called the event OnFrame(const webrtc::VideoFrame& frame) as following:
void StreamSource::OnFrame(const webrtc::VideoFrame& frame)
{
rtc::scoped_refptr<webrtc::VideoFrameBuffer buffer(frame.video_frame_buffer());
broadcaster_.OnFrame(frame);
}
In conductor.cc at the event AddStreams() I create a videosource by the following code :
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
peer_connection_factory_->CreateVideoTrack( kVideoLabel,new mystream::StreamSource())