Merge pull request #3587 from vector-im/feature/fga/fix_video_call_crash

Fix crash after video call
This commit is contained in:
Benoit Marty 2021-06-30 17:22:09 +02:00 committed by GitHub
commit a8bef415b7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 149 additions and 126 deletions

1
changelog.d/3577.bugfix Normal file
View File

@ -0,0 +1 @@
Fix crash after video call.

View File

@ -83,9 +83,9 @@ import java.util.concurrent.TimeUnit
import javax.inject.Provider import javax.inject.Provider
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
private const val STREAM_ID = "ARDAMS" private const val STREAM_ID = "userMedia"
private const val AUDIO_TRACK_ID = "ARDAMSa0" private const val AUDIO_TRACK_ID = "${STREAM_ID}a0"
private const val VIDEO_TRACK_ID = "ARDAMSv0" private const val VIDEO_TRACK_ID = "${STREAM_ID}v0"
private val DEFAULT_AUDIO_CONSTRAINTS = MediaConstraints() private val DEFAULT_AUDIO_CONSTRAINTS = MediaConstraints()
class WebRtcCall( class WebRtcCall(
@ -274,12 +274,77 @@ class WebRtcCall(
peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, PeerConnectionObserver(this)) peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, PeerConnectionObserver(this))
} }
/**
* Without consultation
*/
fun transferToUser(targetUserId: String, targetRoomId: String?) {
sessionScope?.launch(dispatcher) {
mxCall.transfer(
targetUserId = targetUserId,
targetRoomId = targetRoomId,
createCallId = CallIdGenerator.generate(),
awaitCallId = null
)
endCall(sendEndSignaling = false)
}
}
/**
* With consultation
*/
fun transferToCall(transferTargetCall: WebRtcCall) {
sessionScope?.launch(dispatcher) {
val newCallId = CallIdGenerator.generate()
transferTargetCall.mxCall.transfer(
targetUserId = mxCall.opponentUserId,
targetRoomId = null,
createCallId = null,
awaitCallId = newCallId
)
mxCall.transfer(
targetUserId = transferTargetCall.mxCall.opponentUserId,
targetRoomId = null,
createCallId = newCallId,
awaitCallId = null
)
endCall(sendEndSignaling = false)
transferTargetCall.endCall(sendEndSignaling = false)
}
}
fun acceptIncomingCall() {
sessionScope?.launch {
Timber.v("## VOIP acceptIncomingCall from state ${mxCall.state}")
if (mxCall.state == CallState.LocalRinging) {
internalAcceptIncomingCall()
}
}
}
/**
* Sends a DTMF digit to the other party
* @param digit The digit (nb. string - '#' and '*' are dtmf too)
*/
fun sendDtmfDigit(digit: String) {
sessionScope?.launch {
for (sender in peerConnection?.senders.orEmpty()) {
if (sender.track()?.kind() == "audio" && sender.dtmf()?.canInsertDtmf() == true) {
try {
sender.dtmf()?.insertDtmf(digit, 100, 70)
return@launch
} catch (failure: Throwable) {
Timber.v("Fail to send Dtmf digit")
}
}
}
}
}
fun attachViewRenderers(localViewRenderer: SurfaceViewRenderer?, remoteViewRenderer: SurfaceViewRenderer, mode: String?) { fun attachViewRenderers(localViewRenderer: SurfaceViewRenderer?, remoteViewRenderer: SurfaceViewRenderer, mode: String?) {
sessionScope?.launch(dispatcher) {
Timber.v("## VOIP attachViewRenderers localRendeder $localViewRenderer / $remoteViewRenderer") Timber.v("## VOIP attachViewRenderers localRendeder $localViewRenderer / $remoteViewRenderer")
localSurfaceRenderers.addIfNeeded(localViewRenderer) localSurfaceRenderers.addIfNeeded(localViewRenderer)
remoteSurfaceRenderers.addIfNeeded(remoteViewRenderer) remoteSurfaceRenderers.addIfNeeded(remoteViewRenderer)
sessionScope?.launch(dispatcher) {
when (mode) { when (mode) {
VectorCallActivity.INCOMING_ACCEPT -> { VectorCallActivity.INCOMING_ACCEPT -> {
internalAcceptIncomingCall() internalAcceptIncomingCall()
@ -299,67 +364,31 @@ class WebRtcCall(
} }
} }
/** private suspend fun attachViewRenderersInternal() = withContext(dispatcher) {
* Without consultation // render local video in pip view
*/ localSurfaceRenderers.forEach { renderer ->
suspend fun transferToUser(targetUserId: String, targetRoomId: String?) { renderer.get()?.let { pipSurface ->
mxCall.transfer( pipSurface.setMirror(cameraInUse?.type == CameraType.FRONT)
targetUserId = targetUserId, // no need to check if already added, addSink is checking that
targetRoomId = targetRoomId, localVideoTrack?.addSink(pipSurface)
createCallId = CallIdGenerator.generate(),
awaitCallId = null
)
endCall(sendEndSignaling = false)
}
/**
* With consultation
*/
suspend fun transferToCall(transferTargetCall: WebRtcCall) {
val newCallId = CallIdGenerator.generate()
transferTargetCall.mxCall.transfer(
targetUserId = mxCall.opponentUserId,
targetRoomId = null,
createCallId = null,
awaitCallId = newCallId
)
mxCall.transfer(
targetUserId = transferTargetCall.mxCall.opponentUserId,
targetRoomId = null,
createCallId = newCallId,
awaitCallId = null
)
endCall(sendEndSignaling = false)
transferTargetCall.endCall(sendEndSignaling = false)
}
fun acceptIncomingCall() {
sessionScope?.launch {
Timber.v("## VOIP acceptIncomingCall from state ${mxCall.state}")
if (mxCall.state == CallState.LocalRinging) {
internalAcceptIncomingCall()
}
} }
} }
/** // If remote track exists, then sink it to surface
* Sends a DTMF digit to the other party remoteSurfaceRenderers.forEach { renderer ->
* @param digit The digit (nb. string - '#' and '*' are dtmf too) renderer.get()?.let { participantSurface ->
*/ remoteVideoTrack?.addSink(participantSurface)
fun sendDtmfDigit(digit: String) {
for (sender in peerConnection?.senders.orEmpty()) {
if (sender.track()?.kind() == "audio" && sender.dtmf()?.canInsertDtmf() == true) {
try {
sender.dtmf()?.insertDtmf(digit, 100, 70)
return
} catch (failure: Throwable) {
Timber.v("Fail to send Dtmf digit")
}
} }
} }
} }
fun detachRenderers(renderers: List<SurfaceViewRenderer>?) { fun detachRenderers(renderers: List<SurfaceViewRenderer>?) {
sessionScope?.launch(dispatcher) {
detachRenderersInternal(renderers)
}
}
private suspend fun detachRenderersInternal(renderers: List<SurfaceViewRenderer>?) = withContext(dispatcher) {
Timber.v("## VOIP detachRenderers") Timber.v("## VOIP detachRenderers")
if (renderers.isNullOrEmpty()) { if (renderers.isNullOrEmpty()) {
// remove all sinks // remove all sinks
@ -452,24 +481,6 @@ class WebRtcCall(
}) })
} }
private fun attachViewRenderersInternal() {
// render local video in pip view
localSurfaceRenderers.forEach { renderer ->
renderer.get()?.let { pipSurface ->
pipSurface.setMirror(this.cameraInUse?.type == CameraType.FRONT)
// no need to check if already added, addSink is checking that
localVideoTrack?.addSink(pipSurface)
}
}
// If remote track exists, then sink it to surface
remoteSurfaceRenderers.forEach { renderer ->
renderer.get()?.let { participantSurface ->
remoteVideoTrack?.addSink(participantSurface)
}
}
}
private suspend fun getTurnServer(): TurnServerResponse? { private suspend fun getTurnServer(): TurnServerResponse? {
return tryOrNull { return tryOrNull {
sessionProvider.get()?.callSignalingService()?.getTurnServer() sessionProvider.get()?.callSignalingService()?.getTurnServer()
@ -580,10 +591,12 @@ class WebRtcCall(
} }
fun setCaptureFormat(format: CaptureFormat) { fun setCaptureFormat(format: CaptureFormat) {
sessionScope?.launch(dispatcher) {
Timber.v("## VOIP setCaptureFormat $format") Timber.v("## VOIP setCaptureFormat $format")
videoCapturer?.changeCaptureFormat(format.width, format.height, format.fps) videoCapturer?.changeCaptureFormat(format.width, format.height, format.fps)
currentCaptureFormat = format currentCaptureFormat = format
} }
}
private fun updateMuteStatus() { private fun updateMuteStatus() {
val micShouldBeMuted = micMuted || remoteOnHold val micShouldBeMuted = micMuted || remoteOnHold
@ -645,14 +658,18 @@ class WebRtcCall(
} }
fun muteCall(muted: Boolean) { fun muteCall(muted: Boolean) {
sessionScope?.launch(dispatcher) {
micMuted = muted micMuted = muted
updateMuteStatus() updateMuteStatus()
} }
}
fun enableVideo(enabled: Boolean) { fun enableVideo(enabled: Boolean) {
sessionScope?.launch(dispatcher) {
videoMuted = !enabled videoMuted = !enabled
updateMuteStatus() updateMuteStatus()
} }
}
fun canSwitchCamera(): Boolean { fun canSwitchCamera(): Boolean {
return availableCamera.size > 1 return availableCamera.size > 1
@ -668,9 +685,10 @@ class WebRtcCall(
} }
fun switchCamera() { fun switchCamera() {
sessionScope?.launch(dispatcher) {
Timber.v("## VOIP switchCamera") Timber.v("## VOIP switchCamera")
if (mxCall.state is CallState.Connected && mxCall.isVideoCall) { if (mxCall.state is CallState.Connected && mxCall.isVideoCall) {
val oppositeCamera = getOppositeCameraIfAny() ?: return val oppositeCamera = getOppositeCameraIfAny() ?: return@launch
videoCapturer?.switchCamera( videoCapturer?.switchCamera(
object : CameraVideoCapturer.CameraSwitchHandler { object : CameraVideoCapturer.CameraSwitchHandler {
// Invoked on success. |isFrontCamera| is true if the new camera is front facing. // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
@ -692,6 +710,7 @@ class WebRtcCall(
) )
} }
} }
}
private suspend fun createAnswer(): SessionDescription? { private suspend fun createAnswer(): SessionDescription? {
Timber.w("## VOIP createAnswer") Timber.w("## VOIP createAnswer")
@ -718,11 +737,12 @@ class WebRtcCall(
return currentCaptureFormat return currentCaptureFormat
} }
private fun release() { private suspend fun release() {
listeners.clear() listeners.clear()
mxCall.removeListener(this) mxCall.removeListener(this)
timer.stop() timer.stop()
timer.tickListener = null timer.tickListener = null
detachRenderersInternal(null)
videoCapturer?.stopCapture() videoCapturer?.stopCapture()
videoCapturer?.dispose() videoCapturer?.dispose()
videoCapturer = null videoCapturer = null
@ -736,6 +756,8 @@ class WebRtcCall(
localAudioTrack = null localAudioTrack = null
localVideoSource = null localVideoSource = null
localVideoTrack = null localVideoTrack = null
remoteAudioTrack = null
remoteVideoTrack = null
cameraAvailabilityCallback = null cameraAvailabilityCallback = null
} }
@ -745,7 +767,7 @@ class WebRtcCall(
if (stream.audioTracks.size > 1 || stream.videoTracks.size > 1) { if (stream.audioTracks.size > 1 || stream.videoTracks.size > 1) {
Timber.e("## VOIP StreamObserver weird looking stream: $stream") Timber.e("## VOIP StreamObserver weird looking stream: $stream")
// TODO maybe do something more?? // TODO maybe do something more??
mxCall.hangUp() endCall(true)
return@launch return@launch
} }
if (stream.audioTracks.size == 1) { if (stream.audioTracks.size == 1) {
@ -774,8 +796,9 @@ class WebRtcCall(
} }
fun endCall(sendEndSignaling: Boolean = true, reason: CallHangupContent.Reason? = null) { fun endCall(sendEndSignaling: Boolean = true, reason: CallHangupContent.Reason? = null) {
sessionScope?.launch(dispatcher) {
if (mxCall.state == CallState.Terminated) { if (mxCall.state == CallState.Terminated) {
return return@launch
} }
// Close tracks ASAP // Close tracks ASAP
localVideoTrack?.setEnabled(false) localVideoTrack?.setEnabled(false)
@ -786,10 +809,8 @@ class WebRtcCall(
} }
val wasRinging = mxCall.state is CallState.LocalRinging val wasRinging = mxCall.state is CallState.LocalRinging
mxCall.state = CallState.Terminated mxCall.state = CallState.Terminated
sessionScope?.launch(dispatcher) {
release() release()
onCallEnded(callId) onCallEnded(callId)
}
if (sendEndSignaling) { if (sendEndSignaling) {
if (wasRinging) { if (wasRinging) {
mxCall.reject() mxCall.reject()
@ -798,6 +819,7 @@ class WebRtcCall(
} }
} }
} }
}
// Call listener // Call listener