func startRecording() {
guard recorder.isAvailable else {
print("Recording is not available at this time.")
return
}
recorder.isMicrophoneEnabled = false
if #available(iOS 11.0, *) {
recorder.startCapture(handler: { (sampleBuffer, bufferType, error) in
if (bufferType == .video) {
guard CMSampleBufferIsValid(sampleBuffer), CMSampleBufferDataIsReady(sampleBuffer), CMSampleBufferGetNumSamples(sampleBuffer) == 1 else {
print("invalid sampleBuffer")
return
}
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let rtcpixelBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer!)
let timeStampNs: Int64 = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000)
let videoFrame = RTCVideoFrame(buffer: rtcpixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: timeStampNs)
self.factory.videoSource().capturer(self.videoCapturer!, didCapture: videoFrame)
}
}) {
(error) in
if error != nil {
print(error)
}
}
} else {
// Fallback on earlier versions
}
}
I mean the only difference is the other delegate (didCapture instead of didCaptureVideoFrame), but my XCode claims, this has been renamed in Swift 3. And there is obviously no "didCaptureVidoeFrame" parameter in Swift.
The other difference is: The code above is supposed to work. Mine is NOT and I have no idea, why not. No video is sent.
More details https://groups.google.com/forum/?utm_medium=email&utm_source=footer#!topic/discuss-webrtc/sr9r6p2OXZY