iOS Microphone not working or not sending the voice through webrtc when the call is answered from locked screen iOS Microphone not working or not sending the voice through webrtc when the call is answered from locked screen ios ios

iOS Microphone not working or not sending the voice through webrtc when the call is answered from locked screen


Please Note that I share my code and its about to my needs and I share for reference. you need to change it according to your need.

when you receive voip notification create new incident of your webrtc handling class, andadd this two lines to code block because enabling audio session from voip notification fails

RTCAudioSession.sharedInstance().useManualAudio = trueRTCAudioSession.sharedInstance().isAudioEnabled = false 

didReceive method;

func pushRegistry(_ registry: PKPushRegistry, didReceiveIncomingPushWith payload: PKPushPayload, for type: PKPushType, completion: @escaping () -> Void) {               let state = UIApplication.shared.applicationState                                               if(payload.dictionaryPayload["hangup"] == nil && state != .active                   ){                                                           Globals.voipPayload = payload.dictionaryPayload as! [String:Any] // I pass parameters to Webrtc handler via Global singleton to create answer according to sdp sent by payload.                                               RTCAudioSession.sharedInstance().useManualAudio = true                       RTCAudioSession.sharedInstance().isAudioEnabled = false                                                                                       Globals.sipGateway = SipGateway() // my Webrtc and Janus gateway handler class                                                                Globals.sipGateway?.configureCredentials(true) // I check janus gateway credentials stored in Shared preferences and initiate websocket connection and create peerconnection to my janus gateway which is signaling server for my environment                                                             initProvider() //Crating callkit provider                                              self.update.remoteHandle = CXHandle(type: .generic, value:String(describing: payload.dictionaryPayload["caller_id"]!))                          Globals.callId = UUID()                                    let state = UIApplication.shared.applicationState                                                                       Globals.provider.reportNewIncomingCall(with:Globals.callId , update: self.update, completion: { error in                                                                                })                                                      }                         }                    func  initProvider(){            let config = CXProviderConfiguration(localizedName: "ulakBEL")            config.iconTemplateImageData = UIImage(named: "ulakbel")!.pngData()            config.ringtoneSound = "ringtone.caf"                   // config.includesCallsInRecents = false;                    config.supportsVideo = false                        Globals.provider = CXProvider(configuration:config )            Globals.provider.setDelegate(self, queue: nil)             update = CXCallUpdate()             update.hasVideo = false             update.supportsDTMF = true              }    

modify your didActivate and didDeActive delegate functions like below,

func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {       print("CallManager didActivate")       RTCAudioSession.sharedInstance().audioSessionDidActivate(audioSession)       RTCAudioSession.sharedInstance().isAudioEnabled = true      // self.callDelegate?.callIsAnswered()        }   func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {       print("CallManager didDeactivate")RTCAudioSession.sharedInstance().audioSessionDidDeactivate(audioSession)       RTCAudioSession.sharedInstance().isAudioEnabled = false        }

in Webrtc handler class configure media senders and audiosession

private func createPeerConnection(webRTCCallbacks:PluginHandleWebRTCCallbacksDelegate) {           let rtcConfig =  RTCConfiguration.init()        rtcConfig.iceServers = server.iceServers        rtcConfig.bundlePolicy = RTCBundlePolicy.maxBundle        rtcConfig.rtcpMuxPolicy = RTCRtcpMuxPolicy.require        rtcConfig.continualGatheringPolicy = .gatherContinually        rtcConfig.sdpSemantics = .planB                let constraints = RTCMediaConstraints(mandatoryConstraints: nil,                                                 optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])                   pc = sessionFactory.peerConnection(with: rtcConfig, constraints: constraints, delegate: nil)        self.createMediaSenders()        self.configureAudioSession()                         if webRTCCallbacks.getJsep() != nil{        handleRemoteJsep(webrtcCallbacks: webRTCCallbacks)        }          }

mediaSenders;

private func createMediaSenders() {        let streamId = "stream"                // Audio        let audioTrack = self.createAudioTrack()        self.pc.add(audioTrack, streamIds: [streamId])                // Video      /*  let videoTrack = self.createVideoTrack()        self.localVideoTrack = videoTrack        self.peerConnection.add(videoTrack, streamIds: [streamId])        self.remoteVideoTrack = self.peerConnection.transceivers.first { $0.mediaType == .video }?.receiver.track as? RTCVideoTrack                // Data        if let dataChannel = createDataChannel() {            dataChannel.delegate = self            self.localDataChannel = dataChannel        }*/    }  private func createAudioTrack() -> RTCAudioTrack {        let audioConstrains = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: nil)        let audioSource = sessionFactory.audioSource(with: audioConstrains)        let audioTrack = sessionFactory.audioTrack(with: audioSource, trackId: "audio0")        return audioTrack    }

audioSession ;

private func configureAudioSession() {        self.rtcAudioSession.lockForConfiguration()        do {            try self.rtcAudioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)            try self.rtcAudioSession.setMode(AVAudioSession.Mode.voiceChat.rawValue)        } catch let error {            debugPrint("Error changeing AVAudioSession category: \(error)")        }        self.rtcAudioSession.unlockForConfiguration()    }

Please consider that because I worked with callbacks and delegates code includes delegates and callback chunks. you can ignore them accordingly!!

FOR REFERENCE You can also check the example at this link