Using WebRTC to send an iOS devices’ screen captur

2020-02-12 11:28发布

We would like to use WebRTC to send an iOS devices’ screen capture using ReplayKit. The ReplayKit has a processSampleBuffer callback which gives CMSampleBuffer.

But here is where we are stuck, we can’t seem to get the CMSampleBuffer to be sent to the connected peer. We have tried to create pixelBuffer from the sampleBuffer, and then create RTCVideoFrame.

we also extracted the RTCVideoSource from RTCPeerConnectionFactory and then used an RTCVideoCapturer and stream it to the localVideoSource.

Any idea what we are doing wrong?

var peerConnectionFactory: RTCPeerConnectionFactory?

override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
 switch sampleBufferType {
           case RPSampleBufferType.video:

        // create the CVPixelBuffer
        let pixelBuffer:CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!;

        // create the RTCVideoFrame
        var videoFrame:RTCVideoFrame?;
        let timestamp = NSDate().timeIntervalSince1970 * 1000
        videoFrame = RTCVideoFrame(pixelBuffer: pixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: Int64(timestamp))

        // connect the video frames to the WebRTC
        let localVideoSource = self.peerConnectionFactory!.videoSource()
        let videoCapturer = RTCVideoCapturer()
        localVideoSource.capturer(videoCapturer, didCapture: videoFrame!)

        let videoTrack : RTCVideoTrack =   self.peerConnectionFactory!.videoTrack(with: localVideoSource, trackId: "100”)

        let mediaStream: RTCMediaStream = (self.peerConnectionFactory?.mediaStream(withStreamId: “1"))!
        mediaStream.addVideoTrack(videoTrack)
        self.newPeerConnection!.add(mediaStream)

        break
    }
}

1条回答
虎瘦雄心在
2楼-- · 2020-02-12 12:17

This is a great idea to implement you just have to render the RTCVideoFrame in the method that you have used in the snippet, and all the other object will initialize outsize the method, best way. for better understanding, I am giving you a snippet.

    var peerConnectionFactory: RTCPeerConnectionFactory?
    var localVideoSource: RTCVideoSource?
    var videoCapturer: RTCVideoCapturer?
    func setupVideoCapturer(){
          // localVideoSource and videoCapturer will use 
            localVideoSource = self.peerConnectionFactory!.videoSource() 
            videoCapturer = RTCVideoCapturer()
    //      localVideoSource.capturer(videoCapturer, didCapture: videoFrame!)

            let videoTrack : RTCVideoTrack =   self.peerConnectionFactory!.videoTrack(with: localVideoSource, trackId: "100”)

            let mediaStream: RTCMediaStream = (self.peerConnectionFactory?.mediaStream(withStreamId: “1"))!
            mediaStream.addVideoTrack(videoTrack)
            self.newPeerConnection!.add(mediaStream)
        }


 override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
     switch sampleBufferType {
               case RPSampleBufferType.video:

            // create the CVPixelBuffer
            let pixelBuffer:CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!;

            // create the RTCVideoFrame
            var videoFrame:RTCVideoFrame?;
            let timestamp = NSDate().timeIntervalSince1970 * 1000
            videoFrame = RTCVideoFrame(pixelBuffer: pixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: Int64(timestamp))
            // connect the video frames to the WebRTC
            localVideoSource.capturer(videoCapturer, didCapture: videoFrame!)

            break
        }
    }

Hope this will help you.

查看更多
登录 后发表回答