Init audio route

This commit is contained in:
Benoit Martins 2023-12-28 16:53:47 +01:00
parent 63d83b13f6
commit 81448d8006
7 changed files with 456 additions and 94 deletions

View file

@ -155,8 +155,6 @@ final class CoreContext: ObservableObject {
}
self.mCore.publisher?.onLogCollectionUploadStateChanged?.postOnMainQueue { (cbValue: (_: Core, _: Core.LogCollectionUploadState, info: String)) in
print("publisherpublisher onLogCollectionUploadStateChanged")
if cbValue.info.starts(with: "https") {
UIPasteboard.general.setValue(
cbValue.info,

View file

@ -187,6 +187,9 @@
},
"Block the number" : {
},
"Bluetooth" : {
},
"Call history" : {
@ -280,6 +283,9 @@
},
"Dont save modifications?" : {
},
"Earpiece" : {
},
"Edit" : {
@ -504,6 +510,9 @@
},
"Skip" : {
},
"Speaker" : {
},
"Start" : {

View file

@ -399,10 +399,13 @@ class TelecomManager: ObservableObject {
}
}
/*
if speakerBeforePause {
speakerBeforePause = false
AudioRouteUtils.routeAudioToSpeaker(core: core)
}
*/
actionToFulFill?.fulfill()
actionToFulFill = nil
case .Paused:
@ -518,6 +521,11 @@ class TelecomManager: ObservableObject {
break
}
//AudioRouteUtils.isBluetoothAvailable(core: core)
//AudioRouteUtils.isHeadsetAudioRouteAvailable(core: core)
//AudioRouteUtils.isBluetoothAudioRouteAvailable(core: core)
/*
let readyForRoutechange = callkitAudioSessionActivated == nil || (callkitAudioSessionActivated == true)
if readyForRoutechange && (cstate == .IncomingReceived || cstate == .OutgoingInit || cstate == .Connected || cstate == .StreamsRunning) {
if (call.currentParams?.videoEnabled ?? false) && AudioRouteUtils.isReceiverEnabled(core: core) && call.conference == nil {
@ -527,6 +535,7 @@ class TelecomManager: ObservableObject {
AudioRouteUtils.routeAudioToBluetooth(core: core, call: call)
}
}
*/
}
// post Notification kLinphoneCallUpdate
NotificationCenter.default.post(name: Notification.Name("LinphoneCallUpdate"), object: self, userInfo: [

View file

@ -17,33 +17,43 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// swiftlint:disable type_body_length
import SwiftUI
import CallKit
import AVFAudio
struct CallView: View {
@ObservedObject private var coreContext = CoreContext.shared
@ObservedObject private var telecomManager = TelecomManager.shared
@ObservedObject private var contactsManager = ContactsManager.shared
@ObservedObject var callViewModel: CallViewModel
@ObservedObject var callViewModel: CallViewModel
private var idiom: UIUserInterfaceIdiom { UIDevice.current.userInterfaceIdiom }
@State private var orientation = UIDevice.current.orientation
let pub = NotificationCenter.default.publisher(for: AVAudioSession.routeChangeNotification)
@State var startDate = Date.now
@State var timeElapsed: Int = 0
@State var micMutted: Bool = false
let timer = Timer.publish(every: 1, on: .main, in: .common).autoconnect()
@State var audioRouteIsSpeaker: Bool = false
@State var audioRouteSheet: Bool = false
@State var hideButtonsSheet: Bool = false
@State var options: Int = 1
@State var imageAudioRoute: String = ""
var body: some View {
GeometryReader { geo in
if #available(iOS 16.4, *) {
innerView()
.sheet(isPresented: $telecomManager.callStarted) {
innerView(geoHeight: geo.size.height)
.sheet(isPresented: .constant(telecomManager.callStarted && !hideButtonsSheet && idiom != .pad && !(orientation == .landscapeLeft || orientation == .landscapeRight
|| UIScreen.main.bounds.size.width > UIScreen.main.bounds.size.height))) {
GeometryReader { _ in
VStack(spacing: 0) {
HStack(spacing: 12) {
Button {
terminateCall()
callViewModel.terminateCall()
} label: {
Image("phone-disconnect")
.renderingMode(.template)
@ -72,26 +82,55 @@ struct CallView: View {
.cornerRadius(40)
Button {
muteCall()
callViewModel.muteCall()
} label: {
Image(micMutted ? "microphone-slash" : "microphone")
Image(callViewModel.micMutted ? "microphone-slash" : "microphone")
.renderingMode(.template)
.resizable()
.foregroundStyle(micMutted ? .black : .white)
.foregroundStyle(callViewModel.micMutted ? .black : .white)
.frame(width: 32, height: 32)
}
.frame(width: 60, height: 60)
.background(micMutted ? .white : Color.gray500)
.background(callViewModel.micMutted ? .white : Color.gray500)
.cornerRadius(40)
Button {
options = callViewModel.getAudioRoute()
print("audioRouteIsSpeakeraudioRouteIsSpeaker output \(AVAudioSession.sharedInstance().currentRoute.outputs)")
print("audioRouteIsSpeakeraudioRouteIsSpeaker inputs \(AVAudioSession.sharedInstance().availableInputs?.count)")
if AVAudioSession.sharedInstance().availableInputs != nil
&& !AVAudioSession.sharedInstance().availableInputs!.filter({ $0.portType.rawValue.contains("Bluetooth") }).isEmpty {
hideButtonsSheet = true
DispatchQueue.global().asyncAfter(deadline: .now() + 0.5) {
audioRouteSheet = true
}
} else {
audioRouteIsSpeaker = !audioRouteIsSpeaker
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(audioRouteIsSpeaker ? .speaker : .none)
} catch _ {
}
}
} label: {
Image("speaker-high")
Image(imageAudioRoute)
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
.onAppear(perform: getAudioRouteImage)
.onReceive(pub) { (output) in
self.getAudioRouteImage()
}
}
.frame(width: 60, height: 60)
@ -263,18 +302,129 @@ struct CallView: View {
Spacer()
}
.frame(maxHeight: .infinity, alignment: .top)
.background(.black)
.presentationBackground(.black)
.presentationDetents([.fraction(0.1), .medium])
.interactiveDismissDisabled()
.presentationBackgroundInteraction(.enabled)
}
}
.sheet(isPresented: $audioRouteSheet, onDismiss: {
audioRouteSheet = false
hideButtonsSheet = false
}) {
VStack(spacing: 0) {
Button(action: {
options = 1
audioRouteIsSpeaker = false
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(audioRouteIsSpeaker ? .speaker : .none)
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .voiceChat, options: .defaultToSpeaker)
try AVAudioSession.sharedInstance().setActive(true)
} catch _ {
}
}, label: {
HStack {
Image(options == 1 ? "radio-button-fill" : "radio-button")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 25, height: 25, alignment: .leading)
Text("Earpiece")
.default_text_style_white(styleSize: 15)
Spacer()
Image("ear")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 25, height: 25, alignment: .leading)
}
})
.frame(maxHeight: .infinity)
Button(action: {
options = 2
audioRouteIsSpeaker = true
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(audioRouteIsSpeaker ? .speaker : .none)
} catch _ {
}
}, label: {
HStack {
Image(options == 2 ? "radio-button-fill" : "radio-button")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 25, height: 25, alignment: .leading)
Text("Speaker")
.default_text_style_white(styleSize: 15)
Spacer()
Image("speaker-high")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 25, height: 25, alignment: .leading)
}
})
.frame(maxHeight: .infinity)
Button(action: {
options = 3
audioRouteIsSpeaker = false
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(audioRouteIsSpeaker ? .speaker : .none)
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .voiceChat, options: .allowBluetooth)
try AVAudioSession.sharedInstance().setActive(true)
} catch _ {
}
}, label: {
HStack {
Image(options == 3 ? "radio-button-fill" : "radio-button")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 25, height: 25, alignment: .leading)
Text("Bluetooth")
.default_text_style_white(styleSize: 15)
Spacer()
Image("bluetooth")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 25, height: 25, alignment: .leading)
}
})
.frame(maxHeight: .infinity)
}
.padding(.horizontal, 20)
.presentationBackground(Color.gray600)
.presentationDetents([.fraction(0.3)])
.frame(maxHeight: .infinity)
}
}
}
}
@ViewBuilder
func innerView() -> some View {
func innerView(geoHeight: CGFloat) -> some View {
VStack {
Rectangle()
.foregroundColor(Color.orangeMain500)
@ -369,9 +519,9 @@ struct CallView: View {
.frame(width: 20, height: 20)
.padding(.top, 100)
Text(counterToMinutes())
.onReceive(timer) { firedDate in
timeElapsed = Int(firedDate.timeIntervalSince(startDate))
Text(callViewModel.counterToMinutes())
.onReceive(callViewModel.timer) { firedDate in
callViewModel.timeElapsed = Int(firedDate.timeIntervalSince(startDate))
}
.padding(.top)
@ -388,20 +538,84 @@ struct CallView: View {
.padding(.horizontal, 4)
if telecomManager.callStarted {
HStack(spacing: 12) {
HStack {
}
.frame(height: 60)
}
.padding(.horizontal, 25)
.padding(.top, 20)
if telecomManager.callStarted && idiom != .pad && !(orientation == .landscapeLeft || orientation == .landscapeRight
|| UIScreen.main.bounds.size.width > UIScreen.main.bounds.size.height) {
HStack(spacing: 12) {
HStack {
}
.frame(height: 60)
}
.padding(.horizontal, 25)
.padding(.top, 20)
} else {
HStack(spacing: 12) {
Button {
callViewModel.terminateCall()
} label: {
Image("phone-disconnect")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
}
.frame(width: 90, height: 60)
.background(Color.redDanger500)
.cornerRadius(40)
Spacer()
Button {
} label: {
Image("video-camera")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
}
.frame(width: 60, height: 60)
.background(Color.gray500)
.cornerRadius(40)
Button {
callViewModel.muteCall()
} label: {
Image(callViewModel.micMutted ? "microphone-slash" : "microphone")
.renderingMode(.template)
.resizable()
.foregroundStyle(callViewModel.micMutted ? .black : .white)
.frame(width: 32, height: 32)
}
.frame(width: 60, height: 60)
.background(callViewModel.micMutted ? .white : Color.gray500)
.cornerRadius(40)
Button {
} label: {
Image("speaker-high")
.renderingMode(.template)
.resizable()
.foregroundStyle(.white)
.frame(width: 32, height: 32)
}
.frame(width: 60, height: 60)
.background(Color.gray500)
.cornerRadius(40)
}
.frame(height: geoHeight * 0.15)
.padding(.horizontal, 20)
}
} else {
HStack(spacing: 12) {
HStack {
Spacer()
Button {
terminateCall()
callViewModel.terminateCall()
} label: {
Image("phone-disconnect")
.renderingMode(.template)
@ -415,7 +629,7 @@ struct CallView: View {
.cornerRadius(40)
Button {
acceptCall()
callViewModel.acceptCall()
} label: {
Image("phone")
.renderingMode(.template)
@ -439,60 +653,24 @@ struct CallView: View {
.frame(maxWidth: .infinity, maxHeight: .infinity)
.background(Color.gray900)
}
func terminateCall() {
withAnimation {
telecomManager.callInProgress = false
telecomManager.callStarted = false
}
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
telecomManager.terminateCall(call: core.currentCall!)
}
}
timer.upstream.connect().cancel()
}
func acceptCall() {
withAnimation {
telecomManager.callInProgress = true
telecomManager.callStarted = true
}
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
telecomManager.acceptCall(core: core, call: core.currentCall!, hasVideo: false)
}
}
timer.upstream.connect().cancel()
}
func muteCall() {
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
micMutted = !micMutted
core.currentCall!.microphoneMuted = micMutted
}
}
}
func counterToMinutes() -> String {
let currentTime = timeElapsed
let seconds = currentTime % 60
let minutes = String(format: "%02d", Int(currentTime / 60))
let hours = String(format: "%02d", Int(currentTime / 3600))
if Int(currentTime / 3600) > 0 {
return "\(hours):\(minutes):\(seconds < 10 ? "0" : "")\(seconds)"
} else {
return "\(minutes):\(seconds < 10 ? "0" : "")\(seconds)"
}
}
func getAudioRouteImage() {
print("getAudioRouteImagegetAudioRouteImage")
imageAudioRoute = AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue == "Speaker" }).isEmpty
? (
AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue.contains("Bluetooth") }).isEmpty
? (
AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue == "Receiver" }).isEmpty
? "headset"
: "speaker-slash"
)
: "bluetooth"
)
: "speaker-high"
}
}
#Preview {
CallView(callViewModel: CallViewModel())
CallView(callViewModel: CallViewModel())
}
// swiftlint:enable type_body_length

View file

@ -17,7 +17,9 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import SwiftUI
import linphonesw
import AVFAudio
class CallViewModel: ObservableObject {
@ -29,8 +31,14 @@ class CallViewModel: ObservableObject {
@Published var remoteAddressString: String = "example.linphone@sip.linphone.org"
@Published var remoteAddress: Address?
@Published var avatarModel: ContactAvatarModel?
@Published var audioSessionImage: String = ""
@State var micMutted: Bool = false
@State var timeElapsed: Int = 0
let timer = Timer.publish(every: 1, on: .main, in: .common).autoconnect()
init() {
setupNotifications()
coreContext.doOnCoreQueue { core in
if core.currentCall != nil && core.currentCall!.remoteAddress != nil {
DispatchQueue.main.async {
@ -52,4 +60,123 @@ class CallViewModel: ObservableObject {
}
}
}
func terminateCall() {
withAnimation {
telecomManager.callInProgress = false
telecomManager.callStarted = false
}
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
self.telecomManager.terminateCall(call: core.currentCall!)
}
}
timer.upstream.connect().cancel()
}
func acceptCall() {
withAnimation {
telecomManager.callInProgress = true
telecomManager.callStarted = true
}
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
self.telecomManager.acceptCall(core: core, call: core.currentCall!, hasVideo: false)
}
}
timer.upstream.connect().cancel()
}
func muteCall() {
coreContext.doOnCoreQueue { core in
if core.currentCall != nil {
self.micMutted = !self.micMutted
core.currentCall!.microphoneMuted = self.micMutted
}
}
}
func counterToMinutes() -> String {
let currentTime = timeElapsed
let seconds = currentTime % 60
let minutes = String(format: "%02d", Int(currentTime / 60))
let hours = String(format: "%02d", Int(currentTime / 3600))
if Int(currentTime / 3600) > 0 {
return "\(hours):\(minutes):\(seconds < 10 ? "0" : "")\(seconds)"
} else {
return "\(minutes):\(seconds < 10 ? "0" : "")\(seconds)"
}
}
func setupNotifications() {
/*
notifCenter.addObserver(self,
selector: #selector(handleRouteChange),
name: AVAudioSession.routeChangeNotification,
object: nil)
*/
//NotificationCenter.default.addObserver(self, selector: Selector(("handleRouteChange")), name: UITextView.textDidChangeNotification, object: nil)
}
func handleRouteChange(notification: Notification) {
guard let userInfo = notification.userInfo,
let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else {
return
}
// Switch over the route change reason.
switch reason {
case .newDeviceAvailable, .oldDeviceUnavailable: // New device found.
print("handleRouteChangehandleRouteChange handleRouteChange")
AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue == "Speaker" }).isEmpty
? (
AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue.contains("Bluetooth") }).isEmpty
? (
AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue == "Receiver" }).isEmpty
? "headset"
: "speaker-slash"
)
: "bluetooth"
)
: "speaker-high"
/*
case .oldDeviceUnavailable: // Old device removed.
if let previousRoute =
userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription {
}
*/
default: ()
}
}
func hasHeadphones(in routeDescription: AVAudioSessionRouteDescription) -> Bool {
// Filter the outputs to only those with a port type of headphones.
return !routeDescription.outputs.filter({$0.portType == .headphones}).isEmpty
}
func getAudioRoute() -> Int {
if AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue == "Speaker" }).isEmpty {
if AVAudioSession.sharedInstance().currentRoute.outputs.filter({ $0.portType.rawValue.contains("Bluetooth") }).isEmpty {
return 1
} else {
return 3
}
} else {
return 2
}
}
}

View file

@ -511,19 +511,56 @@ struct ContentView: View {
}
if isShowStartCallFragment {
StartCallFragment(
startCallViewModel: startCallViewModel,
isShowStartCallFragment: $isShowStartCallFragment,
showingDialer: $showingDialer
)
.zIndex(3)
.transition(.move(edge: .bottom))
.halfSheet(showSheet: $showingDialer) {
DialerBottomSheet(
if #available(iOS 16.4, *) {
if idiom != .pad {
StartCallFragment(
startCallViewModel: startCallViewModel,
isShowStartCallFragment: $isShowStartCallFragment,
showingDialer: $showingDialer
)
.zIndex(3)
.transition(.move(edge: .bottom))
.sheet(isPresented: $showingDialer) {
DialerBottomSheet(
startCallViewModel: startCallViewModel,
showingDialer: $showingDialer
)
.presentationDetents([.medium])
//.interactiveDismissDisabled()
.presentationBackgroundInteraction(.enabled(upThrough: .medium))
}
} else {
StartCallFragment(
startCallViewModel: startCallViewModel,
isShowStartCallFragment: $isShowStartCallFragment,
showingDialer: $showingDialer
)
.zIndex(3)
.transition(.move(edge: .bottom))
.halfSheet(showSheet: $showingDialer) {
DialerBottomSheet(
startCallViewModel: startCallViewModel,
showingDialer: $showingDialer
)
} onDismiss: {}
}
} else {
StartCallFragment(
startCallViewModel: startCallViewModel,
isShowStartCallFragment: $isShowStartCallFragment,
showingDialer: $showingDialer
)
} onDismiss: {}
.zIndex(3)
.transition(.move(edge: .bottom))
.halfSheet(showSheet: $showingDialer) {
DialerBottomSheet(
startCallViewModel: startCallViewModel,
showingDialer: $showingDialer
)
} onDismiss: {}
}
}
if isShowDeleteContactPopup {
@ -624,7 +661,7 @@ struct ContentView: View {
}
if telecomManager.callInProgress {
CallView(callViewModel: CallViewModel())
CallView(callViewModel: CallViewModel())
.zIndex(3)
.transition(.scale.combined(with: .move(edge: .top)))
}

View file

@ -156,6 +156,8 @@ struct StartCallFragment: View {
}
ContactsListFragment(contactViewModel: ContactViewModel(), contactsListViewModel: ContactsListViewModel(), showingSheet: .constant(false), startCallFunc: { addr in
showingDialer = false
DispatchQueue.global().asyncAfter(deadline: .now() + 0.2) {
magicSearch.searchForContacts(
sourceFlags: MagicSearch.Source.Friends.rawValue | MagicSearch.Source.LdapServers.rawValue)
@ -236,6 +238,8 @@ struct StartCallFragment: View {
}
}
.onTapGesture {
showingDialer = false
DispatchQueue.global().asyncAfter(deadline: .now() + 0.2) {
magicSearch.searchForContacts(
sourceFlags: MagicSearch.Source.Friends.rawValue | MagicSearch.Source.LdapServers.rawValue)