mirror of
https://gitlab.linphone.org/BC/public/linphone-iphone.git
synced 2026-01-17 11:08:06 +00:00
Add voice recording feature
This commit is contained in:
parent
68f740658b
commit
1693c21e2e
15 changed files with 892 additions and 233 deletions
21
Linphone/Assets.xcassets/pause-fill.imageset/Contents.json
vendored
Normal file
21
Linphone/Assets.xcassets/pause-fill.imageset/Contents.json
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "pause-fill.svg",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
1
Linphone/Assets.xcassets/pause-fill.imageset/pause-fill.svg
vendored
Normal file
1
Linphone/Assets.xcassets/pause-fill.imageset/pause-fill.svg
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" fill="#000000" viewBox="0 0 256 256"><path d="M216,48V208a16,16,0,0,1-16,16H160a16,16,0,0,1-16-16V48a16,16,0,0,1,16-16h40A16,16,0,0,1,216,48ZM96,32H56A16,16,0,0,0,40,48V208a16,16,0,0,0,16,16H96a16,16,0,0,0,16-16V48A16,16,0,0,0,96,32Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 311 B |
21
Linphone/Assets.xcassets/stop-fill.imageset/Contents.json
vendored
Normal file
21
Linphone/Assets.xcassets/stop-fill.imageset/Contents.json
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "stop-fill.svg",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
1
Linphone/Assets.xcassets/stop-fill.imageset/stop-fill.svg
vendored
Normal file
1
Linphone/Assets.xcassets/stop-fill.imageset/stop-fill.svg
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" fill="#000000" viewBox="0 0 256 256"><path d="M216,56V200a16,16,0,0,1-16,16H56a16,16,0,0,1-16-16V56A16,16,0,0,1,56,40H200A16,16,0,0,1,216,56Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 219 B |
|
|
@ -81,7 +81,7 @@ struct ContactsInnerFragment: View {
|
|||
showingSheet: $showingSheet, startCallFunc: {_ in })}
|
||||
.safeAreaInset(edge: .top, content: {
|
||||
Spacer()
|
||||
.frame(height: 14)
|
||||
.frame(height: 12)
|
||||
})
|
||||
.listStyle(.plain)
|
||||
.overlay(
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ struct ChatBubbleView: View {
|
|||
@State private var ticker = Ticker()
|
||||
@State private var isPressed: Bool = false
|
||||
@State private var timePassed: TimeInterval?
|
||||
@State private var sliderValue: Double = 0.5
|
||||
|
||||
var body: some View {
|
||||
HStack {
|
||||
|
|
@ -160,8 +159,8 @@ struct ChatBubbleView: View {
|
|||
|
||||
if !eventLogMessage.message.text.isEmpty {
|
||||
Text(eventLogMessage.message.text)
|
||||
.foregroundStyle(Color.grayMain2c700)
|
||||
.default_text_style(styleSize: 16)
|
||||
.foregroundStyle(Color.grayMain2c700)
|
||||
.default_text_style(styleSize: 16)
|
||||
}
|
||||
|
||||
HStack(alignment: .center) {
|
||||
|
|
@ -397,14 +396,10 @@ struct ChatBubbleView: View {
|
|||
.clipped()
|
||||
} else if eventLogMessage.message.attachments.first!.type == .voiceRecording {
|
||||
CustomSlider(
|
||||
value: $sliderValue,
|
||||
range: 0...1,
|
||||
thumbColor: .blue,
|
||||
trackColor: .gray,
|
||||
trackHeight: 8,
|
||||
cornerRadius: 10
|
||||
conversationViewModel: conversationViewModel,
|
||||
eventLogMessage: eventLogMessage
|
||||
)
|
||||
.padding()
|
||||
.frame(width: geometryProxy.size.width - 160, height: 50)
|
||||
} else {
|
||||
HStack {
|
||||
VStack {
|
||||
|
|
@ -609,6 +604,122 @@ extension View {
|
|||
}
|
||||
}
|
||||
|
||||
struct CustomSlider: View {
|
||||
@ObservedObject var conversationViewModel: ConversationViewModel
|
||||
|
||||
let eventLogMessage: EventLogMessage
|
||||
|
||||
@State private var value: Double = 0.0
|
||||
@State private var isPlaying: Bool = false
|
||||
@State private var timer: Timer?
|
||||
|
||||
var minTrackColor: Color = .white.opacity(0.5)
|
||||
var maxTrackGradient: Gradient = Gradient(colors: [Color.orangeMain300, Color.orangeMain500])
|
||||
|
||||
var body: some View {
|
||||
GeometryReader { geometry in
|
||||
let radius = geometry.size.height * 0.5
|
||||
ZStack(alignment: .leading) {
|
||||
LinearGradient(
|
||||
gradient: maxTrackGradient,
|
||||
startPoint: .leading,
|
||||
endPoint: .trailing
|
||||
)
|
||||
.frame(width: geometry.size.width, height: geometry.size.height)
|
||||
HStack {
|
||||
Rectangle()
|
||||
.foregroundColor(minTrackColor)
|
||||
.frame(width: self.value * geometry.size.width / 100, height: geometry.size.height)
|
||||
.animation(self.value > 0 ? .linear(duration: 0.1) : nil, value: self.value)
|
||||
}
|
||||
|
||||
HStack {
|
||||
Button(
|
||||
action: {
|
||||
if isPlaying {
|
||||
conversationViewModel.pauseVoiceRecordPlayer()
|
||||
pauseProgress()
|
||||
} else {
|
||||
conversationViewModel.startVoiceRecordPlayer(voiceRecordPath: eventLogMessage.message.attachments.first!.full)
|
||||
playProgress()
|
||||
}
|
||||
},
|
||||
label: {
|
||||
Image(isPlaying ? "pause-fill" : "play-fill")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.orangeMain500)
|
||||
.frame(width: 20, height: 20)
|
||||
}
|
||||
)
|
||||
.padding(8)
|
||||
.background(.white)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 25))
|
||||
|
||||
Spacer()
|
||||
|
||||
HStack {
|
||||
Text((eventLogMessage.message.attachments.first!.duration/1000).convertDurationToString())
|
||||
.default_text_style(styleSize: 16)
|
||||
.padding(.horizontal, 5)
|
||||
}
|
||||
.padding(8)
|
||||
.background(.white)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 25))
|
||||
}
|
||||
.padding(.horizontal, 10)
|
||||
}
|
||||
.clipShape(RoundedRectangle(cornerRadius: radius))
|
||||
.onDisappear {
|
||||
resetProgress()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func playProgress() {
|
||||
isPlaying = true
|
||||
self.value = conversationViewModel.getPositionVoiceRecordPlayer(voiceRecordPath: eventLogMessage.message.attachments.first!.full)
|
||||
timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { _ in
|
||||
if self.value < 100.0 {
|
||||
let valueTmp = conversationViewModel.getPositionVoiceRecordPlayer(voiceRecordPath: eventLogMessage.message.attachments.first!.full)
|
||||
if self.value > 90 && self.value == valueTmp {
|
||||
self.value = 100
|
||||
} else {
|
||||
if valueTmp == 0 && !conversationViewModel.isPlayingVoiceRecordPlayer(voiceRecordPath: eventLogMessage.message.attachments.first!.full) {
|
||||
stopProgress()
|
||||
value = 0.0
|
||||
isPlaying = false
|
||||
} else {
|
||||
self.value = valueTmp
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resetProgress()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pause the progress
|
||||
private func pauseProgress() {
|
||||
isPlaying = false
|
||||
stopProgress()
|
||||
}
|
||||
|
||||
// Reset the progress
|
||||
private func resetProgress() {
|
||||
conversationViewModel.stopVoiceRecordPlayer()
|
||||
stopProgress()
|
||||
value = 0.0
|
||||
isPlaying = false
|
||||
}
|
||||
|
||||
// Stop the progress and invalidate the timer
|
||||
private func stopProgress() {
|
||||
timer?.invalidate()
|
||||
timer = nil
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
#Preview {
|
||||
ChatBubbleView(conversationViewModel: ConversationViewModel(), index: 0)
|
||||
|
|
|
|||
|
|
@ -50,6 +50,7 @@ struct ConversationFragment: View {
|
|||
@State private var isShowCamera = false
|
||||
|
||||
@State private var mediasIsLoading = false
|
||||
@State private var voiceRecordingInProgress = false
|
||||
|
||||
@State private var isShowConversationForwardMessageFragment = false
|
||||
|
||||
|
|
@ -102,6 +103,7 @@ struct ConversationFragment: View {
|
|||
ImagePicker(conversationViewModel: conversationViewModel, selectedMedia: self.$conversationViewModel.mediasToSend)
|
||||
.edgesIgnoringSafeArea(.all)
|
||||
}
|
||||
.background(Color.gray100.ignoresSafeArea(.keyboard))
|
||||
} else {
|
||||
innerView(geometry: geometry)
|
||||
.background(.white)
|
||||
|
|
@ -141,6 +143,7 @@ struct ConversationFragment: View {
|
|||
.fullScreenCover(isPresented: $isShowCamera) {
|
||||
ImagePicker(conversationViewModel: conversationViewModel, selectedMedia: self.$conversationViewModel.mediasToSend)
|
||||
}
|
||||
.background(Color.gray100.ignoresSafeArea(.keyboard))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -513,117 +516,123 @@ struct ConversationFragment: View {
|
|||
}
|
||||
|
||||
HStack(spacing: 0) {
|
||||
Button {
|
||||
} label: {
|
||||
Image("smiley")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.grayMain2c500)
|
||||
.frame(width: 28, height: 28, alignment: .leading)
|
||||
.padding(.all, 6)
|
||||
.padding(.top, 4)
|
||||
}
|
||||
.padding(.horizontal, isMessageTextFocused ? 0 : 2)
|
||||
|
||||
Button {
|
||||
self.isShowPhotoLibrary = true
|
||||
self.mediasIsLoading = true
|
||||
} label: {
|
||||
Image("paperclip")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading ? Color.grayMain2c300 : Color.grayMain2c500)
|
||||
.frame(width: isMessageTextFocused ? 0 : 28, height: isMessageTextFocused ? 0 : 28, alignment: .leading)
|
||||
.padding(.all, isMessageTextFocused ? 0 : 6)
|
||||
.padding(.top, 4)
|
||||
.disabled(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading)
|
||||
}
|
||||
.padding(.horizontal, isMessageTextFocused ? 0 : 2)
|
||||
|
||||
Button {
|
||||
self.isShowCamera = true
|
||||
} label: {
|
||||
Image("camera")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading ? Color.grayMain2c300 : Color.grayMain2c500)
|
||||
.frame(width: isMessageTextFocused ? 0 : 28, height: isMessageTextFocused ? 0 : 28, alignment: .leading)
|
||||
.padding(.all, isMessageTextFocused ? 0 : 6)
|
||||
.padding(.top, 4)
|
||||
.disabled(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading)
|
||||
}
|
||||
.padding(.horizontal, isMessageTextFocused ? 0 : 2)
|
||||
|
||||
HStack {
|
||||
if #available(iOS 16.0, *) {
|
||||
TextField("Say something...", text: $conversationViewModel.messageText, axis: .vertical)
|
||||
.default_text_style(styleSize: 15)
|
||||
.focused($isMessageTextFocused)
|
||||
.padding(.vertical, 5)
|
||||
} else {
|
||||
ZStack(alignment: .leading) {
|
||||
TextEditor(text: $conversationViewModel.messageText)
|
||||
.multilineTextAlignment(.leading)
|
||||
.frame(maxHeight: 160)
|
||||
.fixedSize(horizontal: false, vertical: true)
|
||||
if !voiceRecordingInProgress {
|
||||
Button {
|
||||
} label: {
|
||||
Image("smiley")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.grayMain2c500)
|
||||
.frame(width: 28, height: 28, alignment: .leading)
|
||||
.padding(.all, 6)
|
||||
.padding(.top, 4)
|
||||
}
|
||||
.padding(.horizontal, isMessageTextFocused ? 0 : 2)
|
||||
|
||||
Button {
|
||||
self.isShowPhotoLibrary = true
|
||||
self.mediasIsLoading = true
|
||||
} label: {
|
||||
Image("paperclip")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading ? Color.grayMain2c300 : Color.grayMain2c500)
|
||||
.frame(width: isMessageTextFocused ? 0 : 28, height: isMessageTextFocused ? 0 : 28, alignment: .leading)
|
||||
.padding(.all, isMessageTextFocused ? 0 : 6)
|
||||
.padding(.top, 4)
|
||||
.disabled(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading)
|
||||
}
|
||||
.padding(.horizontal, isMessageTextFocused ? 0 : 2)
|
||||
|
||||
Button {
|
||||
self.isShowCamera = true
|
||||
} label: {
|
||||
Image("camera")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading ? Color.grayMain2c300 : Color.grayMain2c500)
|
||||
.frame(width: isMessageTextFocused ? 0 : 28, height: isMessageTextFocused ? 0 : 28, alignment: .leading)
|
||||
.padding(.all, isMessageTextFocused ? 0 : 6)
|
||||
.padding(.top, 4)
|
||||
.disabled(conversationViewModel.maxMediaCount <= conversationViewModel.mediasToSend.count || mediasIsLoading)
|
||||
}
|
||||
.padding(.horizontal, isMessageTextFocused ? 0 : 2)
|
||||
|
||||
HStack {
|
||||
if #available(iOS 16.0, *) {
|
||||
TextField("Say something...", text: $conversationViewModel.messageText, axis: .vertical)
|
||||
.default_text_style(styleSize: 15)
|
||||
.focused($isMessageTextFocused)
|
||||
|
||||
if conversationViewModel.messageText.isEmpty {
|
||||
Text("Say something...")
|
||||
.padding(.leading, 4)
|
||||
.opacity(conversationViewModel.messageText.isEmpty ? 1 : 0)
|
||||
.foregroundStyle(Color.gray300)
|
||||
.padding(.vertical, 5)
|
||||
} else {
|
||||
ZStack(alignment: .leading) {
|
||||
TextEditor(text: $conversationViewModel.messageText)
|
||||
.multilineTextAlignment(.leading)
|
||||
.frame(maxHeight: 160)
|
||||
.fixedSize(horizontal: false, vertical: true)
|
||||
.default_text_style(styleSize: 15)
|
||||
.focused($isMessageTextFocused)
|
||||
|
||||
if conversationViewModel.messageText.isEmpty {
|
||||
Text("Say something...")
|
||||
.padding(.leading, 4)
|
||||
.opacity(conversationViewModel.messageText.isEmpty ? 1 : 0)
|
||||
.foregroundStyle(Color.gray300)
|
||||
.default_text_style(styleSize: 15)
|
||||
}
|
||||
}
|
||||
.onTapGesture {
|
||||
isMessageTextFocused = true
|
||||
}
|
||||
}
|
||||
.onTapGesture {
|
||||
isMessageTextFocused = true
|
||||
}
|
||||
}
|
||||
|
||||
if conversationViewModel.messageText.isEmpty && conversationViewModel.mediasToSend.isEmpty {
|
||||
Button {
|
||||
} label: {
|
||||
Image("microphone")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.grayMain2c500)
|
||||
.frame(width: 28, height: 28, alignment: .leading)
|
||||
.padding(.all, 6)
|
||||
.padding(.top, 4)
|
||||
}
|
||||
} else {
|
||||
Button {
|
||||
if conversationViewModel.displayedConversationHistorySize > 0 {
|
||||
NotificationCenter.default.post(name: .onScrollToBottom, object: nil)
|
||||
|
||||
if conversationViewModel.messageText.isEmpty && conversationViewModel.mediasToSend.isEmpty {
|
||||
Button {
|
||||
voiceRecordingInProgress = true
|
||||
} label: {
|
||||
Image("microphone")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.grayMain2c500)
|
||||
.frame(width: 28, height: 28, alignment: .leading)
|
||||
.padding(.all, 6)
|
||||
.padding(.top, 4)
|
||||
}
|
||||
conversationViewModel.sendMessage()
|
||||
} label: {
|
||||
Image("paper-plane-tilt")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.orangeMain500)
|
||||
.frame(width: 28, height: 28, alignment: .leading)
|
||||
.padding(.all, 6)
|
||||
.padding(.top, 4)
|
||||
.rotationEffect(.degrees(45))
|
||||
} else {
|
||||
Button {
|
||||
if conversationViewModel.displayedConversationHistorySize > 0 {
|
||||
NotificationCenter.default.post(name: .onScrollToBottom, object: nil)
|
||||
}
|
||||
conversationViewModel.sendMessage()
|
||||
} label: {
|
||||
Image("paper-plane-tilt")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.orangeMain500)
|
||||
.frame(width: 28, height: 28, alignment: .leading)
|
||||
.padding(.all, 6)
|
||||
.padding(.top, 4)
|
||||
.rotationEffect(.degrees(45))
|
||||
}
|
||||
.padding(.trailing, 4)
|
||||
}
|
||||
.padding(.trailing, 4)
|
||||
}
|
||||
.padding(.leading, 15)
|
||||
.padding(.trailing, 5)
|
||||
.padding(.vertical, 6)
|
||||
.frame(maxWidth: .infinity, minHeight: 55)
|
||||
.background(.white)
|
||||
.cornerRadius(30)
|
||||
.overlay(
|
||||
RoundedRectangle(cornerRadius: 30)
|
||||
.inset(by: 0.5)
|
||||
.stroke(Color.gray200, lineWidth: 1.5)
|
||||
)
|
||||
.padding(.horizontal, 4)
|
||||
} else {
|
||||
VoiceRecorderPlayer(conversationViewModel: conversationViewModel, voiceRecordingInProgress: $voiceRecordingInProgress)
|
||||
.frame(maxHeight: 60)
|
||||
}
|
||||
.padding(.leading, 15)
|
||||
.padding(.trailing, 5)
|
||||
.padding(.vertical, 6)
|
||||
.frame(maxWidth: .infinity, minHeight: 55)
|
||||
.background(.white)
|
||||
.cornerRadius(30)
|
||||
.overlay(
|
||||
RoundedRectangle(cornerRadius: 30)
|
||||
.inset(by: 0.5)
|
||||
.stroke(Color.gray200, lineWidth: 1.5)
|
||||
)
|
||||
.padding(.horizontal, 4)
|
||||
}
|
||||
.frame(maxWidth: .infinity, minHeight: 60)
|
||||
.padding(.top, 12)
|
||||
|
|
@ -1010,6 +1019,187 @@ struct ImagePicker: UIViewControllerRepresentable {
|
|||
}
|
||||
}
|
||||
|
||||
struct VoiceRecorderPlayer: View {
|
||||
@ObservedObject var conversationViewModel: ConversationViewModel
|
||||
|
||||
@Binding var voiceRecordingInProgress: Bool
|
||||
|
||||
@StateObject var audioRecorder = AudioRecorder()
|
||||
|
||||
@State private var value: Double = 0.0
|
||||
@State private var isPlaying: Bool = false
|
||||
@State private var isRecording: Bool = true
|
||||
@State private var timer: Timer?
|
||||
|
||||
var minTrackColor: Color = .white.opacity(0.5)
|
||||
var maxTrackGradient: Gradient = Gradient(colors: [Color.orangeMain300, Color.orangeMain500])
|
||||
|
||||
var body: some View {
|
||||
GeometryReader { geometry in
|
||||
let radius = geometry.size.height * 0.5
|
||||
HStack {
|
||||
Button(
|
||||
action: {
|
||||
self.audioRecorder.stopVoiceRecorder()
|
||||
voiceRecordingInProgress = false
|
||||
},
|
||||
label: {
|
||||
Image("x")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.orangeMain500)
|
||||
.frame(width: 25, height: 25)
|
||||
}
|
||||
)
|
||||
.padding(10)
|
||||
.background(.white)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 25))
|
||||
|
||||
ZStack(alignment: .leading) {
|
||||
LinearGradient(
|
||||
gradient: maxTrackGradient,
|
||||
startPoint: .leading,
|
||||
endPoint: .trailing
|
||||
)
|
||||
.frame(width: geometry.size.width - 110, height: 50)
|
||||
HStack {
|
||||
if !isRecording {
|
||||
Rectangle()
|
||||
.foregroundColor(minTrackColor)
|
||||
.frame(width: self.value * (geometry.size.width - 110) / 100, height: 50)
|
||||
} else {
|
||||
Rectangle()
|
||||
.foregroundColor(minTrackColor)
|
||||
.frame(width: CGFloat(audioRecorder.soundPower) * (geometry.size.width - 110) / 100, height: 50)
|
||||
}
|
||||
}
|
||||
|
||||
HStack {
|
||||
Button(
|
||||
action: {
|
||||
if isRecording {
|
||||
self.audioRecorder.stopVoiceRecorder()
|
||||
isRecording = false
|
||||
} else if isPlaying {
|
||||
conversationViewModel.pauseVoiceRecordPlayer()
|
||||
pauseProgress()
|
||||
} else {
|
||||
if audioRecorder.audioFilename != nil {
|
||||
conversationViewModel.startVoiceRecordPlayer(voiceRecordPath: audioRecorder.audioFilename!)
|
||||
playProgress()
|
||||
}
|
||||
}
|
||||
},
|
||||
label: {
|
||||
Image(isRecording ? "stop-fill" : (isPlaying ? "pause-fill" : "play-fill"))
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.orangeMain500)
|
||||
.frame(width: 20, height: 20)
|
||||
}
|
||||
)
|
||||
.padding(8)
|
||||
.background(.white)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 25))
|
||||
|
||||
Spacer()
|
||||
|
||||
HStack {
|
||||
if isRecording {
|
||||
Image("record-fill")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(isRecording ? Color.redDanger500 : Color.orangeMain500)
|
||||
.frame(width: 18, height: 18)
|
||||
}
|
||||
|
||||
Text(Int(audioRecorder.recordingTime).convertDurationToString())
|
||||
.default_text_style(styleSize: 16)
|
||||
.padding(.horizontal, 5)
|
||||
}
|
||||
.padding(8)
|
||||
.background(.white)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 25))
|
||||
}
|
||||
.padding(.horizontal, 10)
|
||||
}
|
||||
.clipShape(RoundedRectangle(cornerRadius: radius))
|
||||
|
||||
Button {
|
||||
if conversationViewModel.displayedConversationHistorySize > 0 {
|
||||
NotificationCenter.default.post(name: .onScrollToBottom, object: nil)
|
||||
}
|
||||
conversationViewModel.sendMessage(audioRecorder: self.audioRecorder)
|
||||
voiceRecordingInProgress = false
|
||||
} label: {
|
||||
Image("paper-plane-tilt")
|
||||
.renderingMode(.template)
|
||||
.resizable()
|
||||
.foregroundStyle(Color.orangeMain500)
|
||||
.frame(width: 28, height: 28, alignment: .leading)
|
||||
.padding(.all, 6)
|
||||
.padding(.top, 4)
|
||||
.rotationEffect(.degrees(45))
|
||||
}
|
||||
.padding(.trailing, 4)
|
||||
}
|
||||
.padding(.horizontal, 4)
|
||||
.padding(.vertical, 5)
|
||||
.onAppear {
|
||||
self.audioRecorder.startRecording()
|
||||
}
|
||||
.onDisappear {
|
||||
self.audioRecorder.stopVoiceRecorder()
|
||||
resetProgress()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func playProgress() {
|
||||
isPlaying = true
|
||||
if audioRecorder.audioFilename != nil {
|
||||
self.value = conversationViewModel.getPositionVoiceRecordPlayer(voiceRecordPath: audioRecorder.audioFilename!)
|
||||
timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { _ in
|
||||
if self.value < 100.0 {
|
||||
let valueTmp = conversationViewModel.getPositionVoiceRecordPlayer(voiceRecordPath: audioRecorder.audioFilename!)
|
||||
if self.value > 90 && self.value == valueTmp {
|
||||
self.value = 100
|
||||
} else {
|
||||
if valueTmp == 0 && !conversationViewModel.isPlayingVoiceRecordPlayer(voiceRecordPath: audioRecorder.audioFilename!) {
|
||||
stopProgress()
|
||||
value = 0.0
|
||||
isPlaying = false
|
||||
} else {
|
||||
self.value = valueTmp
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resetProgress()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pause the progress
|
||||
private func pauseProgress() {
|
||||
isPlaying = false
|
||||
stopProgress()
|
||||
}
|
||||
|
||||
// Reset the progress
|
||||
private func resetProgress() {
|
||||
conversationViewModel.stopVoiceRecordPlayer()
|
||||
stopProgress()
|
||||
value = 0.0
|
||||
isPlaying = false
|
||||
}
|
||||
|
||||
// Stop the progress and invalidate the timer
|
||||
private func stopProgress() {
|
||||
timer?.invalidate()
|
||||
timer = nil
|
||||
}
|
||||
}
|
||||
/*
|
||||
#Preview {
|
||||
ConversationFragment(conversationViewModel: ConversationViewModel(), conversationsListViewModel: ConversationsListViewModel(), sections: [MessagesSection], ids: [""])
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ struct ConversationsListFragment: View {
|
|||
}
|
||||
.safeAreaInset(edge: .top, content: {
|
||||
Spacer()
|
||||
.frame(height: 14)
|
||||
.frame(height: 12)
|
||||
})
|
||||
.listStyle(.plain)
|
||||
.overlay(
|
||||
|
|
|
|||
|
|
@ -119,6 +119,7 @@ struct UIList: UIViewRepresentable {
|
|||
tableView.showsVerticalScrollIndicator = true
|
||||
tableView.estimatedSectionHeaderHeight = 1
|
||||
tableView.estimatedSectionFooterHeight = UITableView.automaticDimension
|
||||
tableView.keyboardDismissMode = .interactive
|
||||
tableView.backgroundColor = UIColor(.white)
|
||||
tableView.scrollsToTop = true
|
||||
|
||||
|
|
|
|||
|
|
@ -83,16 +83,18 @@ public struct Attachment: Codable, Identifiable, Hashable {
|
|||
public let thumbnail: URL
|
||||
public let full: URL
|
||||
public let type: AttachmentType
|
||||
public let duration: Int
|
||||
|
||||
public init(id: String, name: String, thumbnail: URL, full: URL, type: AttachmentType) {
|
||||
public init(id: String, name: String, thumbnail: URL, full: URL, type: AttachmentType, duration: Int = 0) {
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.thumbnail = thumbnail
|
||||
self.full = full
|
||||
self.type = type
|
||||
self.duration = duration
|
||||
}
|
||||
|
||||
public init(id: String, name: String, url: URL, type: AttachmentType) {
|
||||
self.init(id: id, name: name, thumbnail: url, full: url, type: type)
|
||||
public init(id: String, name: String, url: URL, type: AttachmentType, duration: Int = 0) {
|
||||
self.init(id: id, name: name, thumbnail: url, full: url, type: type, duration: duration)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ import AVFoundation
|
|||
// swiftlint:disable line_length
|
||||
// swiftlint:disable type_body_length
|
||||
// swiftlint:disable cyclomatic_complexity
|
||||
|
||||
class ConversationViewModel: ObservableObject {
|
||||
|
||||
private var coreContext = CoreContext.shared
|
||||
|
|
@ -50,9 +49,16 @@ class ConversationViewModel: ObservableObject {
|
|||
|
||||
@Published var isShowSelectedMessageToDisplayDetails: Bool = false
|
||||
@Published var selectedMessageToDisplayDetails: EventLogMessage?
|
||||
@Published var selectedMessageToPlayVoiceRecording: EventLogMessage?
|
||||
@Published var selectedMessage: EventLogMessage?
|
||||
@Published var messageToReply: EventLogMessage?
|
||||
|
||||
@Published var sheetCategories: [SheetCategory] = []
|
||||
|
||||
var vrpManager: VoiceRecordPlayerManager?
|
||||
@Published var isPlaying = false
|
||||
@Published var progress: Double = 0.0
|
||||
|
||||
struct SheetCategory: Identifiable {
|
||||
let id = UUID()
|
||||
let name: String
|
||||
|
|
@ -66,8 +72,6 @@ class ConversationViewModel: ObservableObject {
|
|||
var isMe: Bool = false
|
||||
}
|
||||
|
||||
@Published var sheetCategories: [SheetCategory] = []
|
||||
|
||||
init() {}
|
||||
|
||||
func addConversationDelegate() {
|
||||
|
|
@ -103,11 +107,13 @@ class ConversationViewModel: ObservableObject {
|
|||
statusTmp = .sending
|
||||
}
|
||||
|
||||
if let indexMessage = self.conversationMessagesSection[0].rows.firstIndex(where: {$0.eventLog.chatMessage?.messageId == message.messageId}) {
|
||||
if indexMessage < self.conversationMessagesSection[0].rows.count && self.conversationMessagesSection[0].rows[indexMessage].message.status != statusTmp {
|
||||
DispatchQueue.main.async {
|
||||
self.objectWillChange.send()
|
||||
self.conversationMessagesSection[0].rows[indexMessage].message.status = statusTmp
|
||||
if !self.conversationMessagesSection.isEmpty && !self.conversationMessagesSection[0].rows.isEmpty {
|
||||
if let indexMessage = self.conversationMessagesSection[0].rows.firstIndex(where: {$0.eventLog.chatMessage?.messageId == message.messageId}) {
|
||||
if indexMessage < self.conversationMessagesSection[0].rows.count && self.conversationMessagesSection[0].rows[indexMessage].message.status != statusTmp {
|
||||
DispatchQueue.main.async {
|
||||
self.objectWillChange.send()
|
||||
self.conversationMessagesSection[0].rows[indexMessage].message.status = statusTmp
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -317,7 +323,8 @@ class ConversationViewModel: ObservableObject {
|
|||
id: UUID().uuidString,
|
||||
name: content.name!,
|
||||
url: path!,
|
||||
type: typeTmp
|
||||
type: typeTmp,
|
||||
duration: typeTmp == . voiceRecording ? content.fileDuration : 0
|
||||
)
|
||||
attachmentNameList += ", \(content.name!)"
|
||||
attachmentList.append(attachment)
|
||||
|
|
@ -532,7 +539,8 @@ class ConversationViewModel: ObservableObject {
|
|||
id: UUID().uuidString,
|
||||
name: content.name!,
|
||||
url: path!,
|
||||
type: typeTmp
|
||||
type: typeTmp,
|
||||
duration: typeTmp == . voiceRecording ? content.fileDuration : 0
|
||||
)
|
||||
attachmentNameList += ", \(content.name!)"
|
||||
attachmentList.append(attachment)
|
||||
|
|
@ -744,7 +752,8 @@ class ConversationViewModel: ObservableObject {
|
|||
id: UUID().uuidString,
|
||||
name: content.name!,
|
||||
url: path!,
|
||||
type: typeTmp
|
||||
type: typeTmp,
|
||||
duration: typeTmp == . voiceRecording ? content.fileDuration : 0
|
||||
)
|
||||
attachmentNameList += ", \(content.name!)"
|
||||
attachmentList.append(attachment)
|
||||
|
|
@ -1029,7 +1038,8 @@ class ConversationViewModel: ObservableObject {
|
|||
id: UUID().uuidString,
|
||||
name: content.name!,
|
||||
url: path!,
|
||||
type: typeTmp
|
||||
type: typeTmp,
|
||||
duration: typeTmp == . voiceRecording ? content.fileDuration : 0
|
||||
)
|
||||
attachmentNameList += ", \(content.name!)"
|
||||
attachmentList.append(attachment)
|
||||
|
|
@ -1198,7 +1208,7 @@ class ConversationViewModel: ObservableObject {
|
|||
}
|
||||
}
|
||||
|
||||
func sendMessage() {
|
||||
func sendMessage(audioRecorder: AudioRecorder? = nil) {
|
||||
coreContext.doOnCoreQueue { _ in
|
||||
do {
|
||||
var message: ChatMessage?
|
||||
|
|
@ -1219,75 +1229,74 @@ class ConversationViewModel: ObservableObject {
|
|||
}
|
||||
}
|
||||
|
||||
/*
|
||||
if (isVoiceRecording.value == true && voiceMessageRecorder.file != null) {
|
||||
stopVoiceRecorder()
|
||||
val content = voiceMessageRecorder.createContent()
|
||||
if (content != null) {
|
||||
Log.i(
|
||||
"$TAG Voice recording content created, file name is ${content.name} and duration is ${content.fileDuration}"
|
||||
)
|
||||
message.addContent(content)
|
||||
} else {
|
||||
Log.e("$TAG Voice recording content couldn't be created!")
|
||||
}
|
||||
} else {
|
||||
*/
|
||||
self.mediasToSend.forEach { attachment in
|
||||
if audioRecorder != nil {
|
||||
do {
|
||||
let content = try Factory.Instance.createContent()
|
||||
|
||||
switch attachment.type {
|
||||
case .image:
|
||||
content.type = "image"
|
||||
/*
|
||||
case .audio:
|
||||
content.type = "audio"
|
||||
*/
|
||||
case .video:
|
||||
content.type = "video"
|
||||
/*
|
||||
case .pdf:
|
||||
content.type = "application"
|
||||
case .plainText:
|
||||
content.type = "text"
|
||||
*/
|
||||
default:
|
||||
content.type = "file"
|
||||
}
|
||||
|
||||
// content.subtype = attachment.type == .plainText ? "plain" : FileUtils.getExtensionFromFileName(attachment.fileName)
|
||||
content.subtype = attachment.full.pathExtension
|
||||
|
||||
content.name = attachment.full.lastPathComponent
|
||||
audioRecorder!.stopVoiceRecorder()
|
||||
let content = try audioRecorder!.linphoneAudioRecorder.createContent()
|
||||
Log.info(
|
||||
"[ConversationViewModel] Voice recording content created, file name is \(content.name ?? "") and duration is \(content.fileDuration)"
|
||||
)
|
||||
|
||||
if message != nil {
|
||||
|
||||
let path = FileManager.default.temporaryDirectory.appendingPathComponent((attachment.full.lastPathComponent.addingPercentEncoding(withAllowedCharacters: .urlHostAllowed) ?? ""))
|
||||
let newPath = URL(string: FileUtil.sharedContainerUrl().appendingPathComponent("Library/Images").absoluteString
|
||||
+ (attachment.full.lastPathComponent.addingPercentEncoding(withAllowedCharacters: .urlHostAllowed) ?? ""))
|
||||
/*
|
||||
let data = try Data(contentsOf: path)
|
||||
let decodedData: () = try data.write(to: path)
|
||||
*/
|
||||
|
||||
do {
|
||||
if FileManager.default.fileExists(atPath: newPath!.path) {
|
||||
try FileManager.default.removeItem(atPath: newPath!.path)
|
||||
}
|
||||
try FileManager.default.moveItem(atPath: path.path, toPath: newPath!.path)
|
||||
|
||||
let filePathTmp = newPath?.absoluteString
|
||||
content.filePath = String(filePathTmp!.dropFirst(7))
|
||||
message!.addFileContent(content: content)
|
||||
} catch {
|
||||
Log.error(error.localizedDescription)
|
||||
}
|
||||
message!.addContent(content: content)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.mediasToSend.forEach { attachment in
|
||||
do {
|
||||
let content = try Factory.Instance.createContent()
|
||||
|
||||
switch attachment.type {
|
||||
case .image:
|
||||
content.type = "image"
|
||||
/*
|
||||
case .audio:
|
||||
content.type = "audio"
|
||||
*/
|
||||
case .video:
|
||||
content.type = "video"
|
||||
/*
|
||||
case .pdf:
|
||||
content.type = "application"
|
||||
case .plainText:
|
||||
content.type = "text"
|
||||
*/
|
||||
default:
|
||||
content.type = "file"
|
||||
}
|
||||
|
||||
// content.subtype = attachment.type == .plainText ? "plain" : FileUtils.getExtensionFromFileName(attachment.fileName)
|
||||
content.subtype = attachment.full.pathExtension
|
||||
|
||||
content.name = attachment.full.lastPathComponent
|
||||
|
||||
if message != nil {
|
||||
|
||||
let path = FileManager.default.temporaryDirectory.appendingPathComponent((attachment.full.lastPathComponent.addingPercentEncoding(withAllowedCharacters: .urlHostAllowed) ?? ""))
|
||||
let newPath = URL(string: FileUtil.sharedContainerUrl().appendingPathComponent("Library/Images").absoluteString
|
||||
+ (attachment.full.lastPathComponent.addingPercentEncoding(withAllowedCharacters: .urlHostAllowed) ?? ""))
|
||||
/*
|
||||
let data = try Data(contentsOf: path)
|
||||
let decodedData: () = try data.write(to: path)
|
||||
*/
|
||||
|
||||
do {
|
||||
if FileManager.default.fileExists(atPath: newPath!.path) {
|
||||
try FileManager.default.removeItem(atPath: newPath!.path)
|
||||
}
|
||||
try FileManager.default.moveItem(atPath: path.path, toPath: newPath!.path)
|
||||
|
||||
let filePathTmp = newPath?.absoluteString
|
||||
content.filePath = String(filePathTmp!.dropFirst(7))
|
||||
message!.addFileContent(content: content)
|
||||
} catch {
|
||||
Log.error(error.localizedDescription)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
}
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
// }
|
||||
|
||||
if message != nil && !message!.contents.isEmpty {
|
||||
Log.info("[ConversationViewModel] Sending message")
|
||||
|
|
@ -1621,49 +1630,335 @@ class ConversationViewModel: ObservableObject {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct CustomSlider: View {
|
||||
@Binding var value: Double
|
||||
var range: ClosedRange<Double>
|
||||
var thumbColor: Color
|
||||
var trackColor: Color
|
||||
var trackHeight: CGFloat
|
||||
var cornerRadius: CGFloat
|
||||
|
||||
var body: some View {
|
||||
VStack {
|
||||
ZStack {
|
||||
// Slider track with rounded corners
|
||||
Rectangle()
|
||||
.fill(trackColor)
|
||||
.frame(height: trackHeight)
|
||||
.cornerRadius(cornerRadius)
|
||||
|
||||
// Progress track to show the current value
|
||||
Rectangle()
|
||||
.fill(thumbColor.opacity(0.5))
|
||||
.frame(width: CGFloat((value - range.lowerBound) / (range.upperBound - range.lowerBound)) * UIScreen.main.bounds.width, height: trackHeight)
|
||||
.cornerRadius(cornerRadius)
|
||||
|
||||
// Thumb (handle) with rounded appearance
|
||||
Circle()
|
||||
.fill(thumbColor)
|
||||
.frame(width: 30, height: 30)
|
||||
.offset(x: CGFloat((value - range.lowerBound) / (range.upperBound - range.lowerBound)) * UIScreen.main.bounds.width - 20)
|
||||
.gesture(DragGesture(minimumDistance: 0)
|
||||
.onChanged { gesture in
|
||||
let sliderWidth = UIScreen.main.bounds.width
|
||||
let dragX = gesture.location.x
|
||||
let newValue = range.lowerBound + Double(dragX / sliderWidth) * (range.upperBound - range.lowerBound)
|
||||
value = min(max(newValue, range.lowerBound), range.upperBound)
|
||||
}
|
||||
)
|
||||
func startVoiceRecordPlayer(voiceRecordPath: URL) {
|
||||
coreContext.doOnCoreQueue { core in
|
||||
if self.vrpManager == nil || self.vrpManager!.voiceRecordPath != voiceRecordPath {
|
||||
self.vrpManager = VoiceRecordPlayerManager(core: core, voiceRecordPath: voiceRecordPath)
|
||||
}
|
||||
|
||||
if self.vrpManager != nil {
|
||||
self.vrpManager!.startVoiceRecordPlayer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getPositionVoiceRecordPlayer(voiceRecordPath: URL) -> Double {
|
||||
if self.vrpManager != nil && self.vrpManager!.voiceRecordPath == voiceRecordPath {
|
||||
return self.vrpManager!.positionVoiceRecordPlayer()
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
func isPlayingVoiceRecordPlayer(voiceRecordPath: URL) -> Bool {
|
||||
if self.vrpManager != nil && self.vrpManager!.voiceRecordPath == voiceRecordPath {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func pauseVoiceRecordPlayer() {
|
||||
coreContext.doOnCoreQueue { _ in
|
||||
if self.vrpManager != nil {
|
||||
self.vrpManager!.pauseVoiceRecordPlayer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func stopVoiceRecordPlayer() {
|
||||
coreContext.doOnCoreQueue { _ in
|
||||
if self.vrpManager != nil {
|
||||
self.vrpManager!.stopVoiceRecordPlayer()
|
||||
}
|
||||
}
|
||||
.padding(.horizontal, 20)
|
||||
}
|
||||
}
|
||||
// swiftlint:enable line_length
|
||||
// swiftlint:enable type_body_length
|
||||
// swiftlint:enable cyclomatic_complexity
|
||||
|
||||
class VoiceRecordPlayerManager {
|
||||
private var core: Core
|
||||
var voiceRecordPath: URL
|
||||
private var voiceRecordPlayer: Player?
|
||||
//private var isPlayingVoiceRecord = false
|
||||
private var voiceRecordAudioFocusRequest: AVAudioSession?
|
||||
//private var voiceRecordPlayerPosition: Double = 0
|
||||
//private var voiceRecordingDuration: TimeInterval = 0
|
||||
|
||||
init(core: Core, voiceRecordPath: URL) {
|
||||
self.core = core
|
||||
self.voiceRecordPath = voiceRecordPath
|
||||
}
|
||||
|
||||
private func initVoiceRecordPlayer() {
|
||||
print("Creating player for voice record")
|
||||
do {
|
||||
voiceRecordPlayer = try core.createLocalPlayer(soundCardName: getSpeakerSoundCard(core: core), videoDisplayName: nil, windowId: nil)
|
||||
} catch {
|
||||
print("Couldn't create local player!")
|
||||
}
|
||||
|
||||
print("Voice record player created")
|
||||
print("Opening voice record file [\(voiceRecordPath.absoluteString)]")
|
||||
|
||||
do {
|
||||
try voiceRecordPlayer!.open(filename: String(voiceRecordPath.absoluteString.dropFirst(7)))
|
||||
print("Player opened file at [\(voiceRecordPath.absoluteString)]")
|
||||
} catch {
|
||||
print("Player failed to open file at [\(voiceRecordPath.absoluteString)]")
|
||||
}
|
||||
}
|
||||
|
||||
func startVoiceRecordPlayer() {
|
||||
if voiceRecordAudioFocusRequest == nil {
|
||||
voiceRecordAudioFocusRequest = AVAudioSession.sharedInstance()
|
||||
if let request = voiceRecordAudioFocusRequest {
|
||||
try? request.setActive(true)
|
||||
}
|
||||
}
|
||||
|
||||
if isPlayerClosed() {
|
||||
print("Player closed, let's open it first")
|
||||
initVoiceRecordPlayer()
|
||||
|
||||
if voiceRecordPlayer!.state == .Closed {
|
||||
print("It seems the player fails to open the file, abort playback")
|
||||
// Handle the failure (e.g. show a toast)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
do {
|
||||
try voiceRecordPlayer!.start()
|
||||
print("Playing voice record")
|
||||
} catch {
|
||||
print("Player failed to start voice recording")
|
||||
}
|
||||
}
|
||||
|
||||
func positionVoiceRecordPlayer() -> Double {
|
||||
if !isPlayerClosed() {
|
||||
return Double(voiceRecordPlayer!.currentPosition) / Double(voiceRecordPlayer!.duration) * 100
|
||||
} else {
|
||||
return 0.0
|
||||
}
|
||||
}
|
||||
|
||||
func pauseVoiceRecordPlayer() {
|
||||
if !isPlayerClosed() {
|
||||
print("Pausing voice record")
|
||||
try? voiceRecordPlayer?.pause()
|
||||
}
|
||||
}
|
||||
|
||||
private func isPlayerClosed() -> Bool {
|
||||
return voiceRecordPlayer == nil || voiceRecordPlayer?.state == .Closed
|
||||
}
|
||||
|
||||
func stopVoiceRecordPlayer() {
|
||||
if !isPlayerClosed() {
|
||||
print("Stopping voice record")
|
||||
try? voiceRecordPlayer?.pause()
|
||||
try? voiceRecordPlayer?.seek(timeMs: 0)
|
||||
voiceRecordPlayer?.close()
|
||||
}
|
||||
|
||||
if let request = voiceRecordAudioFocusRequest {
|
||||
try? request.setActive(false)
|
||||
voiceRecordAudioFocusRequest = nil
|
||||
}
|
||||
}
|
||||
|
||||
func getSpeakerSoundCard(core: Core) -> String? {
|
||||
var speakerCard: String? = nil
|
||||
var earpieceCard: String? = nil
|
||||
core.audioDevices.forEach { device in
|
||||
if (device.hasCapability(capability: .CapabilityPlay)) {
|
||||
if (device.type == .Speaker) {
|
||||
speakerCard = device.id
|
||||
} else if (device.type == .Earpiece) {
|
||||
earpieceCard = device.id
|
||||
}
|
||||
}
|
||||
}
|
||||
return speakerCard != nil ? speakerCard : earpieceCard
|
||||
}
|
||||
|
||||
func changeRouteToSpeaker() {
|
||||
core.outputAudioDevice = core.audioDevices.first { $0.type == AudioDevice.Kind.Speaker }
|
||||
UIDevice.current.isProximityMonitoringEnabled = false
|
||||
}
|
||||
}
|
||||
|
||||
class AudioRecorder: NSObject, ObservableObject {
|
||||
var linphoneAudioRecorder: Recorder!
|
||||
var recordingSession: AVAudioSession?
|
||||
@Published var isRecording = false
|
||||
@Published var audioFilename: URL?
|
||||
@Published var audioFilenameAAC: URL?
|
||||
@Published var recordingTime: TimeInterval = 0
|
||||
@Published var soundPower: Float = 0
|
||||
|
||||
var timer: Timer?
|
||||
|
||||
func startRecording() {
|
||||
recordingSession = AVAudioSession.sharedInstance()
|
||||
CoreContext.shared.doOnCoreQueue { core in
|
||||
core.activateAudioSession(activated: true)
|
||||
}
|
||||
|
||||
if recordingSession != nil {
|
||||
do {
|
||||
try recordingSession!.setCategory(.playAndRecord, mode: .default)
|
||||
try recordingSession!.setActive(true)
|
||||
recordingSession!.requestRecordPermission { allowed in
|
||||
if allowed {
|
||||
self.initVoiceRecorder()
|
||||
} else {
|
||||
print("Permission to record not granted.")
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
print("Failed to setup recording session.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func initVoiceRecorder() {
|
||||
CoreContext.shared.doOnCoreQueue { core in
|
||||
Log.info("[ConversationViewModel] [AudioRecorder] Creating voice message recorder")
|
||||
let recorderParams = try? core.createRecorderParams()
|
||||
if recorderParams != nil {
|
||||
recorderParams!.fileFormat = MediaFileFormat.Mkv
|
||||
|
||||
let recordingAudioDevice = self.getAudioRecordingDeviceIdForVoiceMessage()
|
||||
recorderParams!.audioDevice = recordingAudioDevice
|
||||
Log.info(
|
||||
"[ConversationViewModel] [AudioRecorder] Using device \(recorderParams!.audioDevice?.id ?? "Error id") to make the voice message recording"
|
||||
)
|
||||
|
||||
self.linphoneAudioRecorder = try? core.createRecorder(params: recorderParams!)
|
||||
Log.info("[ConversationViewModel] [AudioRecorder] Voice message recorder created")
|
||||
|
||||
self.startVoiceRecorder()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func startVoiceRecorder() {
|
||||
switch linphoneAudioRecorder.state {
|
||||
case .Running:
|
||||
Log.warn("[ConversationViewModel] [AudioRecorder] Recorder is already recording")
|
||||
case .Paused:
|
||||
Log.warn("[ConversationViewModel] [AudioRecorder] Recorder is paused, resuming recording")
|
||||
try? linphoneAudioRecorder.start()
|
||||
case .Closed:
|
||||
var extensionFileFormat: String = ""
|
||||
switch linphoneAudioRecorder.params?.fileFormat {
|
||||
case .Smff:
|
||||
extensionFileFormat = "smff"
|
||||
case .Mkv:
|
||||
extensionFileFormat = "mka"
|
||||
default:
|
||||
extensionFileFormat = "wav"
|
||||
}
|
||||
|
||||
let tempFileName = "voice-recording-\(Int(Date().timeIntervalSince1970)).\(extensionFileFormat)"
|
||||
audioFilename = FileUtil.sharedContainerUrl().appendingPathComponent("Library/Images").appendingPathComponent(tempFileName)
|
||||
|
||||
if audioFilename != nil {
|
||||
Log.warn("[ConversationViewModel] [AudioRecorder] Recorder is closed, starting recording in \(audioFilename!.absoluteString)")
|
||||
try? linphoneAudioRecorder.open(file: String(audioFilename!.absoluteString.dropFirst(7)))
|
||||
try? linphoneAudioRecorder.start()
|
||||
}
|
||||
|
||||
startTimer()
|
||||
|
||||
DispatchQueue.main.async {
|
||||
self.isRecording = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func stopVoiceRecorder() {
|
||||
if linphoneAudioRecorder.state == .Running {
|
||||
Log.info("[ConversationViewModel] [AudioRecorder] Closing voice recorder")
|
||||
try? linphoneAudioRecorder.pause()
|
||||
linphoneAudioRecorder.close()
|
||||
}
|
||||
|
||||
stopTimer()
|
||||
|
||||
DispatchQueue.main.async {
|
||||
self.isRecording = false
|
||||
}
|
||||
|
||||
if let request = recordingSession {
|
||||
Log.info("[ConversationViewModel] [AudioRecorder] Releasing voice recording audio focus request")
|
||||
try? request.setActive(false)
|
||||
recordingSession = nil
|
||||
CoreContext.shared.doOnCoreQueue { core in
|
||||
core.activateAudioSession(activated: false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func startTimer() {
|
||||
DispatchQueue.main.async {
|
||||
self.recordingTime = 0
|
||||
let maxVoiceRecordDuration = Config.voiceRecordingMaxDuration
|
||||
self.timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { _ in // More frequent updates
|
||||
self.recordingTime += 0.1
|
||||
self.updateSoundPower()
|
||||
let duration = self.linphoneAudioRecorder.duration
|
||||
if duration >= maxVoiceRecordDuration {
|
||||
print("[ConversationViewModel] [AudioRecorder] Max duration for voice recording exceeded (\(maxVoiceRecordDuration)ms), stopping.")
|
||||
self.stopVoiceRecorder()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func stopTimer() {
|
||||
self.timer?.invalidate()
|
||||
self.timer = nil
|
||||
}
|
||||
|
||||
func updateSoundPower() {
|
||||
let soundPowerTmp = linphoneAudioRecorder.captureVolume * 1000 // Capture sound power
|
||||
soundPower = soundPowerTmp < 10 ? 0 : (soundPowerTmp > 100 ? 100 : (soundPowerTmp - 10))
|
||||
}
|
||||
|
||||
func getAudioRecordingDeviceIdForVoiceMessage() -> AudioDevice? {
|
||||
// In case no headset/hearing aid/bluetooth is connected, use microphone sound card
|
||||
// If none are available, default one will be used
|
||||
var headsetCard: AudioDevice?
|
||||
var bluetoothCard: AudioDevice?
|
||||
var microphoneCard: AudioDevice?
|
||||
|
||||
CoreContext.shared.doOnCoreQueue { core in
|
||||
for device in core.audioDevices {
|
||||
if device.hasCapability(capability: .CapabilityRecord) {
|
||||
switch device.type {
|
||||
case .Headphones, .Headset:
|
||||
headsetCard = device
|
||||
case .Bluetooth, .HearingAid:
|
||||
bluetoothCard = device
|
||||
case .Microphone:
|
||||
microphoneCard = device
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Log.info("Found headset/headphones/hearingAid sound card [\(String(describing: headsetCard))], "
|
||||
+ "bluetooth sound card [\(String(describing: bluetoothCard))] and microphone card [\(String(describing: microphoneCard))]")
|
||||
|
||||
return headsetCard ?? bluetoothCard ?? microphoneCard
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ struct HistoryListFragment: View {
|
|||
}
|
||||
.safeAreaInset(edge: .top, content: {
|
||||
Spacer()
|
||||
.frame(height: 14)
|
||||
.frame(height: 12)
|
||||
})
|
||||
.listStyle(.plain)
|
||||
.overlay(
|
||||
|
|
|
|||
|
|
@ -162,7 +162,7 @@ struct MeetingsFragment: View {
|
|||
}
|
||||
.safeAreaInset(edge: .top, content: {
|
||||
Spacer()
|
||||
.frame(height: 14)
|
||||
.frame(height: 12)
|
||||
})
|
||||
.listStyle(.plain)
|
||||
.overlay(
|
||||
|
|
|
|||
|
|
@ -57,5 +57,7 @@ extension Config {
|
|||
static let defaultPass = Config.get().getString(section: "app", key: "pass", defaultString: "")
|
||||
|
||||
static let pushNotificationsInterval = Config.get().getInt(section: "net", key: "pn-call-remote-push-interval", defaultValue: 3)
|
||||
|
||||
static let voiceRecordingMaxDuration = Config.get().getInt(section: "app", key: "voice_recording_max_duration", defaultValue: 600000)
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,3 +24,17 @@ extension String {
|
|||
return NSLocalizedString(self, comment: comment != nil ? comment! : self)
|
||||
}
|
||||
}
|
||||
|
||||
extension String {
|
||||
var isOnlyEmojis: Bool {
|
||||
let filteredText = self.filter { !$0.isWhitespace }
|
||||
return !filteredText.isEmpty && filteredText.allSatisfy { $0.isEmoji }
|
||||
}
|
||||
}
|
||||
|
||||
extension Character {
|
||||
var isEmoji: Bool {
|
||||
guard let scalar = unicodeScalars.first else { return false }
|
||||
return scalar.properties.isEmoji && (scalar.value > 0x238C || unicodeScalars.count > 1)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue