Init video call

This commit is contained in:
Benoit Martins 2024-01-04 16:57:05 +01:00
parent 111fef6603
commit 3046336e57
7 changed files with 334 additions and 102 deletions

View file

@ -25,6 +25,7 @@
D719ABC92ABC6FD700B41C10 /* CoreContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = D719ABC82ABC6FD700B41C10 /* CoreContext.swift */; };
D719ABCC2ABC769C00B41C10 /* AssistantView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D719ABCB2ABC769C00B41C10 /* AssistantView.swift */; };
D719ABCF2ABC779A00B41C10 /* AccountLoginViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D719ABCE2ABC779A00B41C10 /* AccountLoginViewModel.swift */; };
D71A0E192B485ADF0002C6CD /* ViewExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = D71A0E182B485ADF0002C6CD /* ViewExtension.swift */; };
D71FCA7F2AE1397200D2E43E /* ContactsListViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D71FCA7E2AE1397200D2E43E /* ContactsListViewModel.swift */; };
D71FCA812AE14CFC00D2E43E /* ContactsListFragment.swift in Sources */ = {isa = PBXBuildFile; fileRef = D71FCA802AE14CFC00D2E43E /* ContactsListFragment.swift */; };
D71FCA832AE14D6E00D2E43E /* ContactFragment.swift in Sources */ = {isa = PBXBuildFile; fileRef = D71FCA822AE14D6E00D2E43E /* ContactFragment.swift */; };
@ -116,6 +117,7 @@
D719ABC82ABC6FD700B41C10 /* CoreContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreContext.swift; sourceTree = "<group>"; };
D719ABCB2ABC769C00B41C10 /* AssistantView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AssistantView.swift; sourceTree = "<group>"; };
D719ABCE2ABC779A00B41C10 /* AccountLoginViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccountLoginViewModel.swift; sourceTree = "<group>"; };
D71A0E182B485ADF0002C6CD /* ViewExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewExtension.swift; sourceTree = "<group>"; };
D71FCA7E2AE1397200D2E43E /* ContactsListViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactsListViewModel.swift; sourceTree = "<group>"; };
D71FCA802AE14CFC00D2E43E /* ContactsListFragment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactsListFragment.swift; sourceTree = "<group>"; };
D71FCA822AE14D6E00D2E43E /* ContactFragment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactFragment.swift; sourceTree = "<group>"; };
@ -214,6 +216,7 @@
66C491F82B24D25A00CEA16D /* ConfigExtension.swift */,
D76005F52B0798B00054B79A /* IntExtension.swift */,
D717071F2AC5989C0037746F /* TextExtension.swift */,
D71A0E182B485ADF0002C6CD /* ViewExtension.swift */,
);
path = Extensions;
sourceTree = "<group>";
@ -637,6 +640,7 @@
D71FCA832AE14D6E00D2E43E /* ContactFragment.swift in Sources */,
D7C3650C2AF0084000FE6142 /* EditContactViewModel.swift in Sources */,
D7B5678E2B28888F00DE63EB /* CallView.swift in Sources */,
D71A0E192B485ADF0002C6CD /* ViewExtension.swift in Sources */,
D750D3392AD3E6EE00EC99C5 /* PopupLoadingView.swift in Sources */,
D7E6D0492AE933AD00A57AAF /* FavoriteContactsListFragment.swift in Sources */,
662B69DB2B25DE25007118BF /* ProviderDelegate.swift in Sources */,

View file

@ -102,6 +102,11 @@ final class CoreContext: ObservableObject {
}
})
self.mCore.videoCaptureEnabled = true
self.mCore.videoDisplayEnabled = true
self.mCore.videoActivationPolicy!.automaticallyAccept = true
try? self.mCore.start()
// Create a Core listener to listen for the callback we need

View file

@ -29,7 +29,6 @@ rls_uri=sips:rls@sip.linphone.org
ec_calibrator_cool_tones=1
[video]
displaytype=MSAndroidTextureDisplay
auto_resize_preview_to_keep_ratio=1
max_conference_size=vga

View file

@ -21,6 +21,7 @@
import SwiftUI
import CallKit
import AVFAudio
import linphonesw
struct CallView: View {
@ -41,14 +42,24 @@ struct CallView: View {
@State var options: Int = 1
@State var imageAudioRoute: String = ""
@State var angleDegree = 0.0
@State var fullscreenVideo = false
var body: some View {
GeometryReader { geo in
if #available(iOS 16.4, *) {
innerView(geoHeight: geo.size.height)
.sheet(isPresented: .constant(telecomManager.callStarted && !hideButtonsSheet && idiom != .pad && !(orientation == .landscapeLeft || orientation == .landscapeRight
|| UIScreen.main.bounds.size.width > UIScreen.main.bounds.size.height))) {
GeometryReader { _ in
innerView(geoHeight: geo.size.height, geoWidth: geo.size.width)
.sheet(isPresented:
.constant(
telecomManager.callStarted
&& !fullscreenVideo
&& !hideButtonsSheet
&& idiom != .pad
&& !(orientation == .landscapeLeft || orientation == .landscapeRight || UIScreen.main.bounds.size.width > UIScreen.main.bounds.size.height)
)
) {
GeometryReader { _ in
VStack(spacing: 0) {
HStack(spacing: 12) {
Button {
@ -68,8 +79,9 @@ struct CallView: View {
Spacer()
Button {
callViewModel.toggleVideo()
} label: {
Image("video-camera")
Image(callViewModel.cameraDisplayed ? "video-camera" : "video-camera-slash")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
@ -81,7 +93,7 @@ struct CallView: View {
.cornerRadius(40)
Button {
callViewModel.muteCall()
callViewModel.toggleMuteMicrophone()
} label: {
Image(callViewModel.micMutted ? "microphone-slash" : "microphone")
.renderingMode(.template)
@ -129,6 +141,7 @@ struct CallView: View {
}
.frame(height: geo.size.height * 0.15)
.padding(.horizontal, 20)
.padding(.top, -6)
HStack(spacing: 0) {
VStack {
@ -231,6 +244,7 @@ struct CallView: View {
VStack {
Button {
callViewModel.togglePause()
} label: {
Image("pause")
.renderingMode(.template)
@ -250,6 +264,7 @@ struct CallView: View {
VStack {
Button {
callViewModel.toggleRecording()
} label: {
Image("record-fill")
.renderingMode(.template)
@ -409,35 +424,52 @@ struct CallView: View {
}
@ViewBuilder
func innerView(geoHeight: CGFloat) -> some View {
func innerView(geoHeight: CGFloat, geoWidth: CGFloat) -> some View {
VStack {
Rectangle()
.foregroundColor(Color.orangeMain500)
.edgesIgnoringSafeArea(.top)
.frame(height: 0)
HStack {
if callViewModel.direction == .Outgoing {
Image("outgoing-call")
.resizable()
.frame(width: 15, height: 15)
.padding(.horizontal)
Text("Outgoing call")
.foregroundStyle(.white)
} else {
Image("incoming-call")
.resizable()
.frame(width: 15, height: 15)
.padding(.horizontal)
Text("Incoming call")
.foregroundStyle(.white)
}
Spacer()
}
.frame(height: 40)
if !fullscreenVideo {
Rectangle()
.foregroundColor(Color.orangeMain500)
.edgesIgnoringSafeArea(.top)
.frame(height: 0)
HStack {
if callViewModel.direction == .Outgoing {
Image("outgoing-call")
.resizable()
.frame(width: 15, height: 15)
.padding(.horizontal)
Text("Outgoing call")
.foregroundStyle(.white)
} else {
Image("incoming-call")
.resizable()
.frame(width: 15, height: 15)
.padding(.horizontal)
Text("Incoming call")
.foregroundStyle(.white)
}
Spacer()
if callViewModel.cameraDisplayed {
Button {
callViewModel.switchCamera()
} label: {
Image("camera-rotate")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 30, height: 30)
.padding(.horizontal)
}
}
}
.frame(height: 40)
.zIndex(1)
}
ZStack {
VStack {
@ -497,8 +529,40 @@ struct CallView: View {
Spacer()
}
if !telecomManager.callStarted {
LinphoneVideoViewHolder { view in
coreContext.doOnCoreQueue { core in
core.nativeVideoWindow = view
}
}
.frame(width: 120*5, height: 160*5)
.scaledToFill()
.clipped()
.onTapGesture {
fullscreenVideo.toggle()
}
if callViewModel.cameraDisplayed {
HStack {
Spacer()
VStack {
Spacer()
LinphoneVideoViewHolder { view in
coreContext.doOnCoreQueue { core in
core.nativePreviewWindow = view
}
}
.frame(width: 120*1.2, height: 160*1.2)
.cornerRadius(20)
.padding(10)
.rotationEffect(Angle(degrees: angleDegree))
.padding(.trailing, abs(angleDegree/2))
}
}
.frame(maxWidth: fullscreenVideo ? geoWidth : geoWidth - 8, maxHeight: fullscreenVideo ? geoHeight + 140 : geoHeight - 140)
}
if !telecomManager.callStarted && !fullscreenVideo {
VStack {
ActivityIndicator()
.frame(width: 20, height: 20)
@ -517,14 +581,38 @@ struct CallView: View {
.background(.clear)
}
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.frame(maxWidth: fullscreenVideo ? geoWidth : geoWidth - 8, maxHeight: fullscreenVideo ? geoHeight + 140 : geoHeight - 140)
.background(Color.gray600)
.cornerRadius(20)
.padding(.horizontal, 4)
.padding(.horizontal, fullscreenVideo ? 0 : 4)
.onRotate { newOrientation in
orientation = newOrientation
if orientation == .portrait || orientation == .portraitUpsideDown {
angleDegree = 0
} else {
if orientation == .landscapeLeft {
angleDegree = -90
} else if orientation == .landscapeRight {
angleDegree = 90
}
}
}
.onAppear {
if orientation == .portrait && orientation == .portraitUpsideDown {
angleDegree = 0
} else {
if orientation == .landscapeLeft {
angleDegree = -90
} else if orientation == .landscapeRight {
angleDegree = 90
}
}
}
if telecomManager.callStarted {
if !fullscreenVideo {
if telecomManager.callStarted {
if telecomManager.callStarted && idiom != .pad && !(orientation == .landscapeLeft || orientation == .landscapeRight
|| UIScreen.main.bounds.size.width > UIScreen.main.bounds.size.height) {
|| UIScreen.main.bounds.size.width > UIScreen.main.bounds.size.height) {
HStack(spacing: 12) {
HStack {
@ -552,6 +640,7 @@ struct CallView: View {
Spacer()
Button {
callViewModel.toggleVideo()
} label: {
Image("video-camera")
.renderingMode(.template)
@ -565,7 +654,7 @@ struct CallView: View {
.cornerRadius(40)
Button {
callViewModel.muteCall()
callViewModel.toggleMuteMicrophone()
} label: {
Image(callViewModel.micMutted ? "microphone-slash" : "microphone")
.renderingMode(.template)
@ -593,50 +682,55 @@ struct CallView: View {
}
.frame(height: geoHeight * 0.15)
.padding(.horizontal, 20)
.padding(.top, -6)
}
} else {
HStack(spacing: 12) {
HStack {
Spacer()
Button {
callViewModel.terminateCall()
} label: {
Image("phone-disconnect")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
}
.frame(width: 90, height: 60)
.background(Color.redDanger500)
.cornerRadius(40)
Button {
callViewModel.acceptCall()
} label: {
Image("phone")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
}
.frame(width: 90, height: 60)
.background(Color.greenSuccess500)
.cornerRadius(40)
Spacer()
}
.frame(height: 60)
}
.padding(.horizontal, 25)
.padding(.top, 20)
}
} else {
HStack(spacing: 12) {
HStack {
Spacer()
Button {
callViewModel.terminateCall()
} label: {
Image("phone-disconnect")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
}
.frame(width: 90, height: 60)
.background(Color.redDanger500)
.cornerRadius(40)
Button {
callViewModel.acceptCall()
} label: {
Image("phone")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
}
.frame(width: 90, height: 60)
.background(Color.greenSuccess500)
.cornerRadius(40)
Spacer()
}
.frame(height: 60)
}
.padding(.horizontal, 25)
.padding(.top, 20)
}
}
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.background(Color.gray900)
.if(fullscreenVideo) { view in
view.ignoresSafeArea(.all)
}
}
func getAudioRouteImage() {

View file

@ -31,12 +31,14 @@ class CallViewModel: ObservableObject {
@Published var remoteAddressString: String = "example.linphone@sip.linphone.org"
@Published var remoteAddress: Address?
@Published var avatarModel: ContactAvatarModel?
@Published var audioSessionImage: String = ""
@State var micMutted: Bool = false
@Published var micMutted: Bool = false
@Published var cameraDisplayed: Bool = false
@State var timeElapsed: Int = 0
let timer = Timer.publish(every: 1, on: .main, in: .common).autoconnect()
var currentCall: Call?
init() {
do {
@ -48,21 +50,26 @@ class CallViewModel: ObservableObject {
coreContext.doOnCoreQueue { core in
if core.currentCall != nil && core.currentCall!.remoteAddress != nil {
self.currentCall = core.currentCall
DispatchQueue.main.async {
self.direction = .Incoming
self.remoteAddressString = String(core.currentCall!.remoteAddress!.asStringUriOnly().dropFirst(4))
self.remoteAddress = core.currentCall!.remoteAddress!
self.remoteAddressString = String(self.currentCall!.remoteAddress!.asStringUriOnly().dropFirst(4))
self.remoteAddress = self.currentCall!.remoteAddress!
let friend = ContactsManager.shared.getFriendWithAddress(address: core.currentCall!.remoteAddress!)
let friend = ContactsManager.shared.getFriendWithAddress(address: self.currentCall!.remoteAddress!)
if friend != nil && friend!.address != nil && friend!.address!.displayName != nil {
self.displayName = friend!.address!.displayName!
} else {
if core.currentCall!.remoteAddress!.displayName != nil {
self.displayName = core.currentCall!.remoteAddress!.displayName!
} else if core.currentCall!.remoteAddress!.username != nil {
self.displayName = core.currentCall!.remoteAddress!.username!
if self.currentCall!.remoteAddress!.displayName != nil {
self.displayName = self.currentCall!.remoteAddress!.displayName!
} else if self.currentCall!.remoteAddress!.username != nil {
self.displayName = self.currentCall!.remoteAddress!.username!
}
}
//self.avatarModel = ???
self.micMutted = self.currentCall!.microphoneMuted
self.cameraDisplayed = self.currentCall!.cameraEnabled == true
}
}
}
@ -74,9 +81,9 @@ class CallViewModel: ObservableObject {
telecomManager.callStarted = false
}
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
self.telecomManager.terminateCall(call: core.currentCall!)
coreContext.doOnCoreQueue { _ in
if self.currentCall != nil {
self.telecomManager.terminateCall(call: self.currentCall!)
}
}
@ -90,23 +97,112 @@ class CallViewModel: ObservableObject {
}
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
self.telecomManager.acceptCall(core: core, call: core.currentCall!, hasVideo: false)
if self.currentCall != nil {
self.telecomManager.acceptCall(core: core, call: self.currentCall!, hasVideo: false)
}
}
timer.upstream.connect().cancel()
}
func muteCall() {
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
self.micMutted = !self.micMutted
core.currentCall!.microphoneMuted = self.micMutted
func toggleMuteMicrophone() {
coreContext.doOnCoreQueue { _ in
if self.currentCall != nil {
self.currentCall!.microphoneMuted = !self.currentCall!.microphoneMuted
self.micMutted = self.currentCall!.microphoneMuted
Log.info(
"[CallViewModel] Microphone mute switch \(self.micMutted)"
)
}
}
}
func toggleVideo() {
coreContext.doOnCoreQueue { core in
if self.currentCall != nil {
do {
let params = try core.createCallParams(call: self.currentCall)
params.videoEnabled = !params.videoEnabled
Log.info(
"[CallViewModel] Updating call with video enabled set to \(params.videoEnabled)"
)
try self.currentCall!.update(params: params)
self.cameraDisplayed = self.currentCall!.cameraEnabled == true
} catch {
}
}
}
}
func switchCamera() {
coreContext.doOnCoreQueue { core in
let currentDevice = core.videoDevice
Log.info("[CallViewModel] Current camera device is \(currentDevice)")
core.videoDevicesList.forEach { camera in
if camera != currentDevice && camera != "StaticImage: Static picture" {
Log.info("[CallViewModel] New camera device will be \(camera)")
do {
try core.setVideodevice(newValue: camera)
} catch _ {
}
}
}
}
}
func toggleRecording() {
coreContext.doOnCoreQueue { _ in
if self.currentCall != nil && self.currentCall!.params != nil {
if self.currentCall!.params!.isRecording {
Log.info("[CallViewModel] Stopping call recording")
self.currentCall!.stopRecording()
} else {
Log.info("[CallViewModel] Starting call recording \(self.currentCall!.params!.isRecording)")
self.currentCall!.startRecording()
Log.info("[CallViewModel] Starting call recording \(self.currentCall!.params!.isRecording)")
}
//var recording = self.currentCall!.params!.isRecording
//isRecording.postValue(recording)
}
}
}
func togglePause() {
coreContext.doOnCoreQueue { _ in
if self.currentCall != nil && self.currentCall!.remoteAddress != nil {
do {
if self.isCallPaused() {
Log.info("[CallViewModel] Resuming call \(self.currentCall!.remoteAddress!.asStringUriOnly())")
try self.currentCall!.resume()
} else {
Log.info("[CallViewModel] Pausing call \(self.currentCall!.remoteAddress!.asStringUriOnly())")
try self.currentCall!.pause()
}
} catch _ {
}
}
}
}
private func isCallPaused() -> Bool {
var result = false
if self.currentCall != nil {
switch self.currentCall!.state {
case Call.State.Paused, Call.State.Pausing:
result = true
default:
result = false
}
}
return result
}
func counterToMinutes() -> String {
let currentTime = timeElapsed
let seconds = currentTime % 60

View file

@ -64,7 +64,6 @@ class ContactAvatarModel: ObservableObject {
}
func addSubscription() {
friendSuscription = self.friend?.publisher?.onPresenceReceived?.postOnMainQueue { (cbValue: (Friend)) in
print("publisherpublisher onLogCollectionUploadStateChanged \(cbValue.address?.asStringUriOnly() ?? "")")

View file

@ -0,0 +1,35 @@
/*
* Copyright (c) 2010-2020 Belledonne Communications SARL.
*
* This file is part of linphone-iphone
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import SwiftUI
extension View {
/// Applies the given transform if the given condition evaluates to `true`.
/// - Parameters:
/// - condition: The condition to evaluate.
/// - transform: The transform to apply to the source `View`.
/// - Returns: Either the original `View` or the modified `View` if the condition is `true`.
@ViewBuilder func `if`<Content: View>(_ condition: Bool, transform: (Self) -> Content) -> some View {
if condition {
transform(self)
} else {
self
}
}
}