diff --git a/Monal/Classes/AVCallUI.swift b/Monal/Classes/AVCallUI.swift index 9c50ecdcae..82ddb392dc 100644 --- a/Monal/Classes/AVCallUI.swift +++ b/Monal/Classes/AVCallUI.swift @@ -36,8 +36,9 @@ struct AVCallUI: View { @State private var controlsVisible = true @State private var localRendererLocation: CGPoint = CGPoint( x: UIScreen.main.bounds.size.width - (UIScreen.main.bounds.size.width/5.0 + 24.0), - y: 16, + y: 16 ) + @State private var cameraPosition: AVCaptureDevice.Position = .front private var ringingPlayer: AVAudioPlayer! private var busyPlayer: AVAudioPlayer! private var errorPlayer: AVAudioPlayer! @@ -72,7 +73,7 @@ struct AVCallUI: View { func maybeStartRenderer() { if MLCallType(rawValue:call.callType) == .video && MLCallState(rawValue:call.state) == .connected { DDLogError("Starting renderer...") - call.obj.startCaptureLocalVideo(withRenderer: self.localRenderer) + call.obj.startCaptureLocalVideo(withRenderer: self.localRenderer, andCameraPosition:cameraPosition) call.obj.renderRemoteVideo(withRenderer: self.remoteRenderer) } } @@ -189,6 +190,13 @@ struct AVCallUI: View { self.localRendererLocation = value.location } ) + .onTapGesture(count: 2) { + if cameraPosition == .front { + cameraPosition = .back + } else { + cameraPosition = .front + } + } } } } diff --git a/Monal/Classes/MLCall.h b/Monal/Classes/MLCall.h index 1b264aa39c..cf77ceeb9b 100644 --- a/Monal/Classes/MLCall.h +++ b/Monal/Classes/MLCall.h @@ -9,6 +9,8 @@ #ifndef MLCall_h #define MLCall_h +#import + NS_ASSUME_NONNULL_BEGIN @class WebRTCClient; @@ -81,7 +83,7 @@ typedef NS_ENUM(NSUInteger, MLCallEncryptionState) { //these will not use the correct RTCVideoRenderer protocol like in the implementation because the forward declaration of //RTCVideoRenderer will not be visible to swift until we have swift 5.9 (feature flag ImportObjcForwardDeclarations) or swift 6.0 support //see https://github.com/apple/swift-evolution/blob/main/proposals/0384-importing-forward-declared-objc-interfaces-and-protocols.md --(void) startCaptureLocalVideoWithRenderer:(id) renderer; +-(void) startCaptureLocalVideoWithRenderer:(id) renderer andCameraPosition:(AVCaptureDevicePosition) position; -(void) stopCaptureLocalVideo; -(void) renderRemoteVideoWithRenderer:(id) renderer; diff --git a/Monal/Classes/MLCall.m b/Monal/Classes/MLCall.m index ac61b1b74d..e77390265b 100644 --- a/Monal/Classes/MLCall.m +++ b/Monal/Classes/MLCall.m @@ -137,9 +137,9 @@ -(void) dealloc #pragma mark - public interface --(void) startCaptureLocalVideoWithRenderer:(id) renderer +-(void) startCaptureLocalVideoWithRenderer:(id) renderer andCameraPosition:(AVCaptureDevicePosition) position { - [self.webRTCClient startCaptureLocalVideoWithRenderer:renderer]; + [self.webRTCClient startCaptureLocalVideoWithRenderer:renderer andCameraPosition:position]; } -(void) stopCaptureLocalVideo diff --git a/Monal/Classes/WebRTCClient.swift b/Monal/Classes/WebRTCClient.swift index dd62291faa..6bf16abf29 100644 --- a/Monal/Classes/WebRTCClient.swift +++ b/Monal/Classes/WebRTCClient.swift @@ -165,13 +165,13 @@ final class WebRTCClient: NSObject { // MARK: Media @objc - func startCaptureLocalVideo(renderer: RTCVideoRenderer) { + func startCaptureLocalVideo(renderer: RTCVideoRenderer, andCameraPosition position: AVCaptureDevice.Position) { guard let capturer = self.videoCapturer as? RTCCameraVideoCapturer else { return } guard - let frontCamera = (RTCCameraVideoCapturer.captureDevices().first { $0.position == .front }), + let frontCamera = (RTCCameraVideoCapturer.captureDevices().first { $0.position == position }), // choose highest res let format = (RTCCameraVideoCapturer.supportedFormats(for: frontCamera).sorted { (f1, f2) -> Bool in