Hello,
I have been trying to integrate WebRTC within my app and was able to get audio working, however I can't seem to be able to display my own video (and even less someone else's).
The strange part is that capture seems to work fine, as I am seeing that `onFrameCaptured` does get called (on the video capturer). Everything related to the camera itself seems to work (I do have permission to use the camera and I am getting the proper callbacks).
Here is my code:
class MainActivity : AppCompatActivity() {
private val rootEglBase = EglBase.create()
private val surfaceTextureHelper = SurfaceTextureHelper.create("SurfaceTextureHelper", rootEglBase.eglBaseContext)
private val peerConnectionFactory: PeerConnectionFactory by lazy {
PeerConnectionFactory.initialize(
PeerConnectionFactory.InitializationOptions.builder(this@MainActivity)
.createInitializationOptions())
PeerConnectionFactory.builder()
.createPeerConnectionFactory()
}
private fun createVideoCapturer(): VideoCapturer? {
return createCameraCapturer(Camera1Enumerator(true))
}
private fun createCameraCapturer(enumerator: CameraEnumerator): VideoCapturer? {
val deviceNames = enumerator.deviceNames
// Trying to find a front facing camera!
for (deviceName in deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
val videoCapturer = enumerator.createCapturer(deviceName, null)
if (videoCapturer != null) {
return videoCapturer
}
}
}
// We were not able to find a front cam. Look for other cameras
for (deviceName in deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
val videoCapturer = enumerator.createCapturer(deviceName, null)
if (videoCapturer != null) {
return videoCapturer
}
}
}
return null
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
}
override fun onResume() {
super.onResume()
val videoCapturer = createVideoCapturer()
val videoSource = peerConnectionFactory.createVideoSource(false)
videoCapturer?.initialize(surfaceTextureHelper, this, object : CapturerObserver {
override fun onCapturerStopped() {
print("wat")
}
override fun onCapturerStarted(p0: Boolean) {
print("wat")
}
override fun onFrameCaptured(p0: VideoFrame?) {
print("wot")
}
})
videoCapturer?.startCapture(1280, 720, 30)
val videoTrack = peerConnectionFactory.createVideoTrack("ARDAMSv0", videoSource)
videoTrack.setEnabled(true)
val renderer = SurfaceViewRenderer(this)
//init the SurfaceViewRenderer using the eglContext
renderer.init(rootEglBase.eglBaseContext, object : RendererCommon.RendererEvents {
override fun onFrameResolutionChanged(p0: Int, p1: Int, p2: Int) {
print("ha")
}
override fun onFirstFrameRendered() {
print("ha")
}
})
renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT)
videoTrack.addSink(renderer)
findViewById<FrameLayout>(R.id.renderer_container)?.addView(renderer)
}
}
I have been reading the example code over and over again and cannot find what I am doing differenty.
Am I missing a step, or doing something wrong?