Skip to content

Remote stream not shown in view even after getting remote track #158

@kukadiaabhishek

Description

@kukadiaabhishek

I am not able to see remote stream in my view after getting remote stream track, can't understand what's issue. please help

self.remoteVideoTrack Optional(RTC_OBJC_TYPE(RTCMediaStreamTrack):
video
213ab6dd-79cd-4ed1-92ad-a3f840512b72
enabled
Live)

Here is code:

class VideoViewController: UIViewController {
@IBOutlet private weak var localVideoView: UIView?
private let webRTCClient: WebRTCManager

init(webRTCClient: WebRTCManager) {
    self.webRTCClient = webRTCClient
    super.init(nibName: String(describing: VideoViewController.self), bundle: Bundle.main)
}

@available(*, unavailable)
required init?(coder aDecoder: NSCoder) {
    fatalError("init(coder:) has not been implemented")
}

override func viewDidLoad() {
    super.viewDidLoad()
    
    let localRenderer = RTCMTLVideoView(frame: self.localVideoView?.frame ?? CGRect.zero)
    let remoteRenderer = RTCMTLVideoView(frame: self.view.frame)
    localRenderer.videoContentMode = .scaleAspectFill
    remoteRenderer.videoContentMode = .scaleAspectFill
    
    DispatchQueue.main.async {
        self.webRTCClient.startCaptureLocalVideo(renderer: localRenderer)
        self.webRTCClient.renderRemoteVideo(to: remoteRenderer)
    }
    
    if let localVideoView = self.localVideoView {
        self.embedView(localRenderer, into: localVideoView)
    }
    self.embedView(remoteRenderer, into: self.view)
    self.view.sendSubviewToBack(remoteRenderer)
}

private func embedView(_ view: UIView, into containerView: UIView) {
    containerView.addSubview(view)
    view.translatesAutoresizingMaskIntoConstraints = false
    containerView.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "H:|[view]|",
                                                                options: [],
                                                                metrics: nil,
                                                                views: ["view":view]))
    
    containerView.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "V:|[view]|",
                                                                options: [],
                                                                metrics: nil,
                                                                views: ["view":view]))
    containerView.layoutIfNeeded()
}

@IBAction private func backDidTap(_ sender: Any) {
    self.dismiss(animated: true)
}

}

struct UserDashboard: View {
@StateObject private var socketManager = SocketManagers.shared
@Environment(.dismiss) var dismiss
@StateObject private var webRTCManager = WebRTCManager() // Initialize WebRTCManager

var body: some View {
    VStack {
        Text("WebRTC Matching")
            .font(.title)
            .padding()
        Text("Status: \(socketManager.status)")
            .font(.headline)
            .padding()
        if let _ = socketManager.roomId {
            VideoChatView(webRTCManager: webRTCManager) // Pass WebRTCManager
                .frame(maxWidth: .infinity, maxHeight: .infinity)
            Button("Next") {
                socketManager.handleNext()
            }
            .padding()
            .background(Color.blue)
            .foregroundColor(.white)
            .cornerRadius(8)
            Button("Close Socket") {
                socketManager.closeSocket()
                dismiss()
            }
            .padding()
            .background(Color.red)
            .foregroundColor(.white)
            .cornerRadius(8)
        } else {
            Button(action: {
                socketManager.startSearching()
            }) {
                Text(socketManager.isSearching ? "Searching..." : "Start Searching")
                    .padding()
                    .background(socketManager.isSearching ? Color.gray : Color.blue)
                    .foregroundColor(.white)
                    .cornerRadius(8)
            }
            .disabled(socketManager.isSearching)
        }
    }
    .padding()
    .onAppear {
        socketManager.connect()
    }
}

}

struct VideoChatView: View {
var webRTCManager: WebRTCManager
@State private var showVideoView = false

var body: some View {
    VStack {
        if showVideoView {
            VideoView(webRTCClient: webRTCManager)
        } else {
            Text("Preparing Video Chat...")
                .font(.headline)
                .padding()
        }
    }
    .onAppear {
        DispatchQueue.main.asyncAfter(deadline: .now() + 4  ) { // Delay of 2 seconds
            showVideoView = true
        }
    }
}

}

struct VideoView: UIViewControllerRepresentable {
var webRTCClient: WebRTCManager

func makeUIViewController(context: Context) -> VideoViewController {
    let videoViewController = VideoViewController(webRTCClient: webRTCClient)
    return videoViewController
}

func updateUIViewController(_ uiViewController: VideoViewController, context: Context) {
    // Update the view controller if needed
}

}

struct RemoteVideoView: UIViewRepresentable {
var track: RTCVideoTrack

func makeUIView(context: Context) -> RTCMTLVideoView {
    let videoView = RTCMTLVideoView(frame: CGRect(x: 0, y: 0, width: 200, height: 300))
    videoView.backgroundColor = .lightGray
    videoView.videoContentMode = .scaleAspectFill
    
    DispatchQueue.main.async {
        self.renderRemoteVideo(to: videoView)
    }
    
    return videoView
}

func renderRemoteVideo(to renderer: RTCVideoRenderer) {
    self.track.add(renderer)
}

func updateUIView(_ uiView: RTCMTLVideoView, context: Context) {
    DispatchQueue.main.async {
        track.remove(uiView)
        track.add(uiView)
    }
}

}

class SocketManagers: ObservableObject {
static let shared = SocketManagers()
private var manager: SocketManager!
var socket: SocketIOClient!

@Published var status: String = "Not Connected"
@Published var roomId: String? = nil
@Published var isSearching: Bool = false

private init() {
    let socketURL = URL(string: "https://backend-video-calling-node.onrender.com")!
    manager = SocketManager(socketURL: socketURL, config: [.log(true), .compress, .forceWebsockets(true)])
    socket = manager.defaultSocket
    setupSocketEvents()
}

func connect() {
    print("Connecting to Socket.IO server...")
    if UIDevice.modelName.contains("iPhone 16 Pro") || UIDevice.modelName.contains("iPhone XR") {
        socket.connect(withPayload: ["userId": "67c56fac2b9f86932be4f830"])
    } else {
        socket.connect(withPayload: ["userId": "67c56fa02b9f86932be4f82b"])
    }
}

func disconnect() {
    print("Disconnecting from Socket.IO server...")
    socket.disconnect()
}

private func setupSocketEvents() {
    socket.on(clientEvent: .connect) { data, ack in
        print("Socket connected")
        self.status = "Connected"
    }
    
    socket.on("room-assigned") { data, ack in
        if let dataDict = data as? [[String:Any]] {
            if let roomId = dataDict[0]["roomId"] as? String,
               let isInitiator = dataDict[0]["isInitiator"] as? Bool {
                self.roomId = roomId
                self.status = "Connected"
                self.isSearching = false
                // Join the room
                self.socket.emit("join-room", roomId)
                // Create an offer only if this user is the initiator
                if isInitiator {
                    WebRTCManager.shared.createOffer { offer in
                        let offerDict: [String: Any] = [
                            "type": "offer",
                            "sdp": offer.sdp
                        ]
                        self.socket.emit("signal", ["roomId": roomId, "signal": offerDict])
                    }
                }
            }
        }
    }
    
    socket.on("waiting-for-match") { data, ack in
        print("Waiting for a match...")
        self.status = "Waiting for a match..."
        self.isSearching = true
    }
    
    socket.on("match-timeout") { data, ack in
        print("Match timeout")
        self.status = "Match timeout. Try again."
        self.isSearching = false
    }
    
    socket.on("user-disconnected") { data, ack in
        print("User disconnected")
        self.status = "User disconnected. Searching for a new match..."
        self.roomId = nil
        self.startSearching()
    }
    
    socket.on("user-left") { data, ack in
        print("User left:", data)
        if let message = data[0] as? String {
            self.status = message
            self.roomId = nil
            self.startSearching()
        }
    }
    
    socket.on("signal") { data, ack in
        if let signalData = data[0] as? [String: Any] {
            print(signalData)
            if let signal = signalData["signal"] as? [String: Any] {
                if let type = signal["type"] as? String {
                    // Handle offer
                    if type == "offer", let sdp = signal["sdp"] as? String {
                        print("Received remote offer SDP:", sdp)
                        WebRTCManager.shared.handleRemoteOffer(offerSdp: sdp) { answer in
                            // Send the answer back to the initiator
                            let answerDict: [String: Any] = [
                                "type": "answer",
                                "sdp": answer.sdp
                            ]
                            self.socket.emit("signal", ["roomId": self.roomId ?? "", "signal": answerDict])
                        }
                    }
                    // Handle answer
                    else if type == "answer", let sdp = signal["sdp"] as? String {
                        let answer = RTCSessionDescription(type: .answer, sdp: sdp)
                        WebRTCManager.shared.setRemoteAnswer(answer: answer)
                    }
                    // Handle ICE candidate
                    else if type == "candidate",
                            let candidateData = signal["candidate"] as? [String: Any],
                            let candidateSdp = candidateData["candidate"] as? String,
                            let sdpMLineIndex = candidateData["sdpMLineIndex"] as? Int32,
                            let sdpMid = candidateData["sdpMid"] as? String {
                        let candidate = RTCIceCandidate(sdp: candidateSdp, sdpMLineIndex: sdpMLineIndex, sdpMid: sdpMid)
                        WebRTCManager.shared.addIceCandidate(candidate: candidate)
                    }
                }
            }
        }
    }
}

func startSearching() {
    guard !isSearching else { return }
    isSearching = true
    status = "Searching..."
    
    let deviceName = UIDevice.modelName
    debugPrint("Device Name: \(deviceName)")
    if deviceName.contains("iPhone 16 Pro") || deviceName.contains("iPhone XR") {
        socket.emit("login", with: ["67c56fac2b9f86932be4f830"]) {
            
        }
    } else {
        socket.emit("login", with: ["67c56fa02b9f86932be4f82b"]) {
            
        }
    }
}

func handleNext() {
    if let roomId = roomId {
        print("Handling next for room: \(roomId)")
        socket.emit("next", ["roomId": roomId, "userId": "userId"])
        self.roomId = nil
        self.status = "Searching for next user..."
        self.startSearching()
    }
}

func closeSocket() {
    print("Closing socket...")
    socket.emit("close-socket")
    socket.disconnect()
    status = "Socket closed. Please refresh the page to reconnect."
    roomId = nil
    isSearching = false
}

}

/// WebRTCManager
final class WebRTCManager: NSObject, ObservableObject, RTCPeerConnectionDelegate {

static let shared = WebRTCManager()
private var peerConnection: RTCPeerConnection?
private static let factory: RTCPeerConnectionFactory = {
    RTCInitializeSSL()
    let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
    let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
    return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
}()
private var videoCapturer: RTCVideoCapturer?
@Published var localVideoTrack: RTCVideoTrack?
@Published var remoteVideoTrack: RTCVideoTrack?
private var localVideoSource: RTCVideoSource?
private let rtcAudioSession =  RTCAudioSession.sharedInstance()
private let audioQueue = DispatchQueue(label: "audio")

override init() {
    super.init()
    print("WebRTCManager initialized")
    setupPeerConnection()
}

func setupPeerConnection() {
    print("Setting up peer connection...")
    let config = RTCConfiguration()

    // Add multiple STUN/TURN servers for better connectivity
    config.iceServers = [
        RTCIceServer(urlStrings: ["stun:stun.l.google.com:19302"]),
        RTCIceServer(urlStrings: ["stun:stun1.l.google.com:19302"]),
        RTCIceServer(urlStrings: ["stun:stun2.l.google.com:19302"])
    ]
    
    config.sdpSemantics = .unifiedPlan
    config.continualGatheringPolicy = .gatherContinually
    config.bundlePolicy = .maxBundle
    config.rtcpMuxPolicy = .require
    
    let constraints = RTCMediaConstraints(
        mandatoryConstraints: [
            "OfferToReceiveAudio": "true",
            "OfferToReceiveVideo": "true"
        ],
        optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue]
    )

    self.peerConnection = WebRTCManager.factory.peerConnection(with: config, constraints: constraints, delegate: nil)

    createMediaSenders()
    self.configureAudioSession()
    self.peerConnection?.delegate = self
}

private func configureAudioSession() {
    self.rtcAudioSession.lockForConfiguration()
    do {
        try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
        try self.rtcAudioSession.setMode(AVAudioSession.Mode.voiceChat.rawValue)
    } catch let error {
        debugPrint("Error changeing AVAudioSession category: \(error)")
    }
    self.rtcAudioSession.unlockForConfiguration()
}

func createMediaSenders() {
    print(#function)
    
    let streamId = "stream"

    //Add Audio
    let audioTrack = self.createAudioTrack()
    peerConnection?.add(audioTrack, streamIds: [streamId])
    
    //Add Video
    let videoTrack = self.createVideoTrack()
    self.localVideoTrack = videoTrack

// let videoTrack = WebRTCManager.factory.videoTrack(with: videoSource, trackId: "video0")
// localVideoTrack = videoTrack
peerConnection?.add(videoTrack, streamIds: [streamId])
}

func startCaptureLocalVideo(renderer: RTCVideoRenderer) {
    guard let capturer = self.videoCapturer as? RTCCameraVideoCapturer else {
        return
    }

    guard
        let frontCamera = (RTCCameraVideoCapturer.captureDevices().first { $0.position == .front }),
    
        // choose highest res
        let format = (RTCCameraVideoCapturer.supportedFormats(for: frontCamera).sorted { (f1, f2) -> Bool in
            let width1 = CMVideoFormatDescriptionGetDimensions(f1.formatDescription).width
            let width2 = CMVideoFormatDescriptionGetDimensions(f2.formatDescription).width
            return width1 < width2
        }).last,
        // choose highest fps
        let fps = (format.videoSupportedFrameRateRanges.sorted { return $0.maxFrameRate < $1.maxFrameRate }.last) else {
        return
    }

    capturer.startCapture(with: frontCamera,
                          format: format,
                          fps: Int(fps.maxFrameRate))
    
    self.localVideoTrack?.add(renderer)
}

func renderRemoteVideo(to renderer: RTCVideoRenderer) {
    DispatchQueue.main.async {
        self.remoteVideoTrack?.add(renderer)
    }
}

private func createAudioTrack() -> RTCAudioTrack {
    let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil)
    let audioSource = WebRTCManager.factory.audioSource(with: audioConstrains)
    let audioTrack = WebRTCManager.factory.audioTrack(with: audioSource, trackId: "audio0")
    return audioTrack
}

private func createVideoTrack() -> RTCVideoTrack {
    let videoSource = WebRTCManager.factory.videoSource()
    
    #if targetEnvironment(simulator)
    self.videoCapturer = RTCFileVideoCapturer(delegate: videoSource)
    #else
    self.videoCapturer = RTCCameraVideoCapturer(delegate: videoSource)
    #endif
    
    let videoTrack = WebRTCManager.factory.videoTrack(with: videoSource, trackId: "video0")
    return videoTrack
}

func stopLocalVideo() {
    print(#function)
    speakerOff()
    localVideoTrack = nil
}

func createOffer(completion: @escaping (RTCSessionDescription) -> Void) {
    print("Creating offer...")
    guard let peerConnection = peerConnection else { return }
    let constraints = RTCMediaConstraints(
        mandatoryConstraints: [
            "OfferToReceiveAudio": "true",
            "OfferToReceiveVideo": "true"
        ],
        optionalConstraints: nil
    )
    peerConnection.offer(for: constraints) { offer, error in
        guard let offer = offer else {
            print("Failed to create offer: \(String(describing: error))")
            return
        }
        print("Offer created:", offer.sdp)
        peerConnection.setLocalDescription(offer, completionHandler: { error in
            if let error = error {
                print("Failed to set local description: \(error)")
            } else {
                print("Local description set successfully")
            }
        })
        completion(offer)
    }
}

func handleRemoteOffer(offerSdp: String, completion: @escaping (RTCSessionDescription) -> Void) {
    print("Handling remote offer...")
    guard let peerConnection = peerConnection else { return }

    let offer = RTCSessionDescription(type: .offer, sdp: offerSdp)

    peerConnection.setRemoteDescription(offer, completionHandler: { error in
        if let error = error {
            print("Failed to set remote description: \(error)")
            return
        }
        print("Remote description set successfully")

        let constraints = RTCMediaConstraints(
            mandatoryConstraints: [
                "OfferToReceiveAudio": "true",
                "OfferToReceiveVideo": "true"
            ],
            optionalConstraints: nil
        )
        peerConnection.answer(for: constraints) { answer, error in
            guard let answer = answer else {
                print("Failed to create answer: \(String(describing: error))")
                return
            }
            print("Answer created:", answer.sdp)
            peerConnection.setLocalDescription(answer, completionHandler: { error in
                if let error = error {
                    print("Failed to set local description: \(error)")
                } else {
                    print("Local description set successfully")
                }
            })
            completion(answer)
        }
    })
}

func setRemoteAnswer(answer: RTCSessionDescription) {
    print("Setting remote answer...")
    peerConnection?.setRemoteDescription(answer, completionHandler: { error in
        if let error = error {
            print("Failed to set remote answer: \(error)")
        } else {
            print("Remote answer set successfully")
        }
    })
}

func addIceCandidate(candidate: RTCIceCandidate) {
    print("Adding ICE candidate: \(candidate.sdp)")
    peerConnection?.add(candidate)
}

// Fallback to the default playing device: headphones/bluetooth/ear speaker
func speakerOff() {
    self.audioQueue.async { [weak self] in
        guard let self = self else {
            return
        }
        
        self.rtcAudioSession.lockForConfiguration()
        do {
            try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
            try self.rtcAudioSession.overrideOutputAudioPort(.none)
        } catch let error {
            debugPrint("Error setting AVAudioSession category: \(error)")
        }
        self.rtcAudioSession.unlockForConfiguration()
    }
}

// Force speaker
func speakerOn() {
    self.audioQueue.async { [weak self] in
        guard let self = self else {
            return
        }
        
        self.rtcAudioSession.lockForConfiguration()
        do {
            try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
            try self.rtcAudioSession.overrideOutputAudioPort(.speaker)
            try self.rtcAudioSession.setActive(true)
        } catch let error {
            debugPrint("Couldn't force audio to speaker: \(error)")
        }
        self.rtcAudioSession.unlockForConfiguration()
    }
}

// RTCPeerConnectionDelegate methods
func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
    print("Remote stream added")
    DispatchQueue.main.async {
        if let videoTrack = stream.videoTracks.first {
            self.remoteVideoTrack = videoTrack
            //self.remoteVideoTrack = peerConnection.transceivers.first { $0.mediaType == .video }?.receiver.track as? RTCVideoTrack
            self.speakerOn()
        }
        //self.remoteVideoTrack = peerConnection.transceivers.first { $0.mediaType == .video }?.receiver.track as? RTCVideoTrack
        print("self.remoteVideoTrack", self.remoteVideoTrack)
        //self.speakerOn()
    }
}

func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {
    print("Remote stream removed")
    DispatchQueue.main.async {
        self.remoteVideoTrack = nil
    }
}

func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
    print("Signaling state changed: \(stateChanged.rawValue)")
}

func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
    print("ICE connection state changed: \(newState.rawValue)")
    if newState == .connected {
        print("ICE connection established")
    }
}

func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {
    print("ICE gathering state changed: \(newState.rawValue)")
}

func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
    print("Generated ICE candidate: \(candidate.sdp)")
    let candidateDict: [String: Any] = [
        "type": "candidate",
        "candidate": [
            "candidate": candidate.sdp,
            "sdpMLineIndex": candidate.sdpMLineIndex,
            "sdpMid": candidate.sdpMid ?? ""
        ]
    ]
    SocketManagers.shared.socket.emit("signal", ["roomId": SocketManagers.shared.roomId ?? "", "signal": candidateDict])
}

func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) {
    print("Removed ICE candidates: \(candidates)")
}

func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {
    print("Data channel opened: \(dataChannel)")
}

func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) {
    print("Peer connection should negotiate")
}

// Track-specific delegate method
func peerConnection(_ peerConnection: RTCPeerConnection, didAdd receiver: RTCRtpReceiver, streams: [RTCMediaStream]) {
    print("Did add receiver: \(receiver)")
}

}

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions