BubbleChat UI updates for image, video and audio messages

This commit is contained in:
Benoit Martins 2023-02-28 17:16:14 +01:00 committed by QuentinArguillere
parent f4a3261866
commit 52b14654c9
7 changed files with 508 additions and 122 deletions

View file

@ -0,0 +1,72 @@
//
// ChatConversationTableViewModel.swift
// linphone
//
// Created by Benoît Martins on 23/02/2023.
//
import UIKit
import Foundation
import linphonesw
class ChatConversationTableViewModel: ControlsViewModel {
static let sharedModel = ChatConversationTableViewModel()
var messageListHistory : [ChatMessage] = []
var chatRoom: ChatRoom? = nil
override init() {
super.init()
}
func reloadData() {
updateData()
//tableView.reloadData()
//scroll(toLastUnread: false)
}
func updateData() {
clearEventList()
if (chatRoom == nil) {
return
}
let oneToOne = chatRoom!.hasCapability(mask: Int(LinphoneChatRoomCapabilitiesOneToOne.rawValue))
let chatRoomEvents = chatRoom?.getHistoryEvents(nbEvents: 20)
chatRoomEvents?.forEach({ eventLog in
let event = eventLog
let eventType = event.type
if oneToOne && !eventTypeIsOfInterestForOne(toOneRoom: eventType) {
} else {
if let chat = event.chatMessage {
messageListHistory.append(chat)
} //linphone_event_log_get_chat_message(event)
/*
if auto_download is available and file transfer in progress, not add event now
if !(autoDownload && chat != nil && linphone_chat_message_is_file_transfer_in_progress(chat)) {
totalEventList.append(NSValue(pointer: linphone_event_log_ref(event)))
if listSize <= BASIC_EVENT_LIST {
eventList.append(NSValue(pointer: linphone_event_log_ref(event)))
}
}
listSize -= 1*/
}
})
}
func clearEventList() {
messageListHistory.removeAll()
}
func eventTypeIsOfInterestForOne(toOneRoom type: EventLogType) -> Bool {
return type.rawValue == LinphoneEventLogTypeConferenceChatMessage.rawValue || type.rawValue == LinphoneEventLogTypeConferenceEphemeralMessageEnabled.rawValue || type.rawValue == LinphoneEventLogTypeConferenceEphemeralMessageDisabled.rawValue || type.rawValue == LinphoneEventLogTypeConferenceEphemeralMessageLifetimeChanged.rawValue
}
}

View file

@ -51,7 +51,6 @@ class ChatConversationViewModel: ControlsViewModel {
var vrPlayerTimer = Timer()
var voiceRecorder : Recorder? = nil
var linphonePlayer : Player? = nil
var secureLevel : UIImage?
var imageT : [UIImage?] = []
@ -436,6 +435,7 @@ class ChatConversationViewModel: ControlsViewModel {
}else if(type == "public.movie"){
ChatConversationViewModel.sharedModel.data.append(try Data(contentsOf: url))
var tmpImage = ChatConversationViewModel.sharedModel.createThumbnailOfVideoFromFileURL(videoURL: url.relativeString)
print("MultilineMessageCell configure ChatMessage 100000000 \(url.relativeString)")
if tmpImage == nil { tmpImage = UIImage(named: "chat_error")}
ChatConversationViewModel.sharedModel.imageT.append(tmpImage)
}else{
@ -544,28 +544,12 @@ class ChatConversationViewModel: ControlsViewModel {
}
func initSharedPlayer() {
print("[Voice Message] Creating shared player")
let core = Core.getSwiftObject(cObject: LinphoneManager.getLc())
do{
linphonePlayer = try core.createLocalPlayer(soundCardName: CallManager.instance().getSpeakerSoundCard(), videoDisplayName: nil, windowId: nil)
}catch{
print(error)
}
AudioPlayer.initSharedPlayer()
}
func startSharedPlayer(_ path: String?) {
print("[Voice Message] Starting shared player path = \(String(describing: path))")
if ((linphonePlayer!.userData) != nil) {
print("[Voice Message] a play was requested (\(String(describing: path)), but there is already one going (\(String(describing: linphonePlayer?.userData))")
let userInfo = [
"path": linphonePlayer!.userData
]
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "LinphoneVoiceMessagePlayerEOF"), object: nil, userInfo: userInfo as [AnyHashable : Any])
}
CallManager.instance().changeRouteToSpeaker()
linphone_player_open(linphonePlayer?.getCobject, path)
linphone_player_start(linphonePlayer?.getCobject)
AudioPlayer.startSharedPlayer(path)
AudioPlayer.sharedModel.fileChanged.value = path
}
func cancelVoiceRecordingVM() {
@ -574,23 +558,11 @@ class ChatConversationViewModel: ControlsViewModel {
isPendingVoiceRecord = false
isVoiceRecording = false
if (voiceRecorder != nil) && linphone_recorder_get_state(voiceRecorder?.getCobject) != LinphoneRecorderClosed {
linphone_recorder_close(voiceRecorder?.getCobject)
let recordingFile = linphone_recorder_get_file(voiceRecorder?.getCobject)
if let recordingFile {
AppManager.removeFile(file: String(utf8String: recordingFile)!)
}
AudioPlayer.cancelVoiceRecordingVM(voiceRecorder)
}
}
func stopSharedPlayer() {
print("[Voice Message] Stopping shared player path = \(String(describing: linphone_player_get_user_data(linphonePlayer?.getCobject)))")
do{
try linphonePlayer?.pause()
try linphonePlayer?.seek(timeMs: 0)
linphonePlayer?.close()
linphonePlayer?.userData = nil
}catch{
print(error)
}
AudioPlayer.stopSharedPlayer()
}
}

View file

@ -6,59 +6,11 @@
//
import UIKit
let textExample = [
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed non risus. Suspendisse lectus tortor, dignissim sit amet, adipiscing nec, ultricies sed, dolor. Cras elementum ultrices diam. Maecenas ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper.",
"Ut in risus volutpat libero pharetra tempor. Cras vestibulum bibendum augue. Praesent egestas leo in pede. Praesent blandit odio eu enim. Pellentesque sed dui ut augue blandit sodales.",
"sed pede pellentesque fermentum. Maecenas adipiscing ante non diam sodales hendrerit.",
"ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat.",
"Maecenas ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper. Duis arcu massa, scelerisque vitae, consequat in, pretium a, enim. Pellentesque congue. Ut in risus volutpat libero pharetra tempor. Cras vestibulum bibendum augue. Praesent egestas leo in pede. Praesent blandit odio eu enim.",
"nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper. Duis arcu massa, scelerisque vitae, consequat in, pretium a, enim. Pellentesque congue. Ut in risus volutpat libero pharetra tempor. Cras vestibulum bibendum augue. Praesent egestas leo in pede. Praesent blandit odio eu enim. Pellentesque sed dui ut augue blandit sodales. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae",
"Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Aliquam nibh. Mauris ac mauris sed pede pellentesque fermentum. Maecenas adipiscing",
"Lorem ipsum dolor sit amet",
"Salut Salut Salut",
"Salut",
"1",
"Oui",
"test",
"Salut",
"Salut",
"1",
"Oui",
"test",
"Salut",
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed non risus. Suspendisse lectus tortor, dignissim sit amet, adipiscing nec, ultricies sed, dolor. Cras elementum ultrices diam. Maecenas ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper. Duis arcu massa, scelerisque vitae, consequat in, pretium a, enim. Pellentesque congue. Ut in risus volutpat libero pharetra tempor. Cras vestibulum bibendum augue. Praesent egestas leo in pede. Praesent blandit odio eu enim. Pellentesque sed dui ut augue blandit sodales. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Aliquam nibh. Mauris ac mauris sed pede pellentesque fermentum. Maecenas adipiscing ante non diam sodales hendrerit.",
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed non risus. Suspendisse lectus tortor, dignissim sit amet, adipiscing nec, ultricies sed, dolor. Cras elementum ultrices diam. Maecenas ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper.",
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed non risus. Suspendisse lectus tortor, dignissim sit amet, adipiscing nec, ultricies sed, dolor. Cras elementum ultrices diam. Maecenas ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper."
]
let messageExample : [Int] = [
0,
1,
0,
0,
1,
1,
1,
0,
0,
1,
0,
1,
1,
0,
0,
1,
1,
0,
1,
1,
1,
0
]
import Foundation
import linphonesw
class ChatConversationTableViewSwift: UIViewController, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
private(set) var collectionView: UICollectionView
var collectionView: UICollectionView
// Initializers
init() {
@ -98,34 +50,68 @@ class ChatConversationTableViewSwift: UIViewController, UICollectionViewDataSour
UIDeviceBridge.displayModeSwitched.readCurrentAndObserve { _ in
self.collectionView.backgroundColor = VoipTheme.backgroundWhiteBlack.get()
}
NotificationCenter.default.addObserver(self, selector: #selector(self.rotated), name: UIDevice.orientationDidChangeNotification, object: nil)
}
deinit {
NotificationCenter.default.removeObserver(self)
}
@objc func rotated() {
collectionView.reloadData()
}
override func viewWillAppear(_ animated: Bool) {
/*
if view.subviews.count > 0
{
view.subviews.forEach({ $0.removeFromSuperview()})
}
view.addSubview(collectionView)
*/
ChatConversationTableViewModel.sharedModel.reloadData()
collectionView.reloadData()
}
override func viewDidAppear(_ animated: Bool) {
//let bottomOffset = CGPoint(x: 0, y: collectionView.contentSize.height)
//collectionView.setContentOffset(bottomOffset, animated: false)
let indexPath = IndexPath(item: ChatConversationTableViewModel.sharedModel.messageListHistory.count - 1, section: 0)
self.collectionView.scrollToItem(at: indexPath, at: .bottom, animated: false)
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
self.collectionView.scrollToItem(at: IndexPath(row: ChatConversationTableViewModel.sharedModel.messageListHistory.count-1, section: 0), at: .bottom, animated: false)
}
// MARK: - UICollectionViewDataSource -
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: MultilineMessageCell.reuseId, for: indexPath) as! MultilineMessageCell
cell.configure(text: textExample[indexPath.row], mess: messageExample[indexPath.row])
let basic = isBasicChatRoom(ChatConversationTableViewModel.sharedModel.chatRoom?.getCobject)
cell.configure(message: ChatConversationTableViewModel.sharedModel.messageListHistory[indexPath.row], isBasic: basic)
print("MultilineMessageCell configure ChatMessage audio \(indexPath.row)")
return cell
}
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return textExample.count
return ChatConversationTableViewModel.sharedModel.messageListHistory.count
}
// MARK: - UICollectionViewDelegateFlowLayout -
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
//let sectionInset = (collectionViewLayout as! UICollectionViewFlowLayout).sectionInset
let referenceHeight: CGFloat = 100
let referenceWidth: CGFloat = 100
/*let referenceWidth = collectionView.safeAreaLayoutGuide.layoutFrame.width
- sectionInset.left
- sectionInset.right
- collectionView.contentInset.left
- collectionView.contentInset.right*/
return CGSize(width: referenceWidth, height: referenceHeight)
}
func isBasicChatRoom(_ room: OpaquePointer?) -> Bool {
if room == nil {
return true
}
let charRoomBasic = ChatRoom.getSwiftObject(cObject: room!)
let isBasic = charRoomBasic.hasCapability(mask: Int(LinphoneChatRoomCapabilitiesBasic.rawValue))
return isBasic
}
}

View file

@ -38,7 +38,7 @@ class ChatConversationViewSwift: BackActionsNavigationView, PHPickerViewControll
@objc var linphoneChatRoom: OpaquePointer? = nil
@objc let tableController = ChatConversationTableView()
@objc let tableControllerSwift = ChatConversationTableViewSwift()
@objc var tableControllerSwift = ChatConversationTableViewSwift()
@objc var pendingForwardMessage : OpaquePointer? = nil
@objc var sharingMedia : Bool = false
@objc var markAsRead : Bool = false
@ -227,7 +227,8 @@ class ChatConversationViewSwift: BackActionsNavigationView, PHPickerViewControll
ChatConversationViewModel.sharedModel.createChatConversation()
topBar.backgroundColor = VoipTheme.voipToolbarBackgroundColor.get()
self.contentView.addSubview(tableController.tableView)
//self.contentView.addSubview(tableController.tableView)
tableControllerSwift = ChatConversationTableViewSwift()
self.contentView.addSubview(tableControllerSwift.view)
// Setup Autolayout constraints
@ -237,7 +238,9 @@ class ChatConversationViewSwift: BackActionsNavigationView, PHPickerViewControll
tableControllerSwift.view.topAnchor.constraint(equalTo: self.contentView.topAnchor, constant: 0).isActive = true
tableControllerSwift.view.rightAnchor.constraint(equalTo: self.contentView.rightAnchor, constant: 0).isActive = true
tableController.chatRoom = ChatConversationViewModel.sharedModel.chatRoom?.getCobject
ChatConversationTableViewModel.sharedModel.chatRoom = ChatConversationViewModel.sharedModel.chatRoom
//tableController.chatRoom = ChatConversationViewModel.sharedModel.chatRoom?.getCobject
refreshControl.addTarget(self, action: #selector(refreshData), for: .valueChanged)
tableController.refreshControl = refreshControl
tableController.toggleSelectionButton = action1SelectAllButton
@ -686,7 +689,7 @@ class ChatConversationViewSwift: BackActionsNavigationView, PHPickerViewControll
func sendMessageInMessageField(rootMessage: ChatMessage?) {
if ChatConversationViewModel.sharedModel.sendMessage(message: messageView.messageText.text, withExterlBodyUrl: nil, rootMessage: rootMessage) {
if ChatConversationViewModel.sharedModel.sendMessage(message: messageView.messageText.text.trimmingCharacters(in: .whitespacesAndNewlines), withExterlBodyUrl: nil, rootMessage: rootMessage) {
messageView.messageText.text = ""
messageView.isComposing = false
}
@ -1417,6 +1420,8 @@ class ChatConversationViewSwift: BackActionsNavigationView, PHPickerViewControll
self.recordingWaveImageMask.isHidden = true
self.recordingPlayButton.isHidden = true
self.recordingStopButton.isHidden = false
print("MultilineMessageCell configure ChatMessage animPlayerOnce \(ChatConversationViewModel.sharedModel.voiceRecorder?.file)")
ChatConversationViewModel.sharedModel.startSharedPlayer(ChatConversationViewModel.sharedModel.voiceRecorder?.file)
self.animPlayerOnce()
ChatConversationViewModel.sharedModel.vrPlayerTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { timer in
@ -1426,15 +1431,27 @@ class ChatConversationViewSwift: BackActionsNavigationView, PHPickerViewControll
}
func animPlayerOnce() {
self.recordingWaveView.progress += 1.0 / Float(ChatConversationViewModel.sharedModel.linphonePlayer!.duration/1000)
UIView.animate(withDuration: 1, delay: 0.0, options: [.curveLinear], animations: {
self.recordingWaveView.progress += 1.0 / Float(AudioPlayer.getSharedPlayer()!.duration/1000)
AudioPlayer.sharedModel.fileChanged.observe { file in
if file != ChatConversationViewModel.sharedModel.voiceRecorder?.file {
self.stopVoiceRecordPlayer()
}
}
UIView.animate(withDuration: 1, delay: 0.0, options: .curveLinear, animations: {
self.recordingWaveView.layoutIfNeeded()
if(self.recordingWaveView.progress >= 1.0){
}) { Bool in
if(self.recordingWaveView.progress >= 1.0 && ChatConversationViewModel.sharedModel.isPlayingVoiceRecording){
DispatchQueue.main.asyncAfter(deadline: .now() + 1.5) {
self.stopVoiceRecordPlayer()
if(ChatConversationViewModel.sharedModel.isPlayingVoiceRecording){
self.stopVoiceRecordPlayer()
print("MultilineMessageCell configure ChatMessage animPlayerOnce timer out")
}else{
print("MultilineMessageCell configure ChatMessage animPlayerOnce timer out cancelled")
}
}
}
})
}
}
func stopVoiceRecordPlayer() {

View file

@ -6,6 +6,8 @@
//
import UIKit
import Foundation
import linphonesw
class MultilineMessageCell: UICollectionViewCell {
static let reuseId = "MultilineMessageCellReuseId"
@ -15,14 +17,19 @@ class MultilineMessageCell: UICollectionViewCell {
private let bubble: UIView = UIView(frame: .zero)
private let imageUser: UIView = UIView(frame: .zero)
private let chatRead = UIImageView(image: UIImage(named: "chat_read.png"))
var constraint1 : NSLayoutConstraint? = nil
var constraint2 : NSLayoutConstraint? = nil
//var imageConstraint : [NSLayoutConstraint?] = []
let labelInset = UIEdgeInsets(top: 0, left: 0, bottom: 0, right: 0)
var isPlayingVoiceRecording = false
var vrPlayerTimer = Timer()
override init(frame: CGRect) {
super.init(frame: frame)
let labelInset = UIEdgeInsets(top: 0, left: 0, bottom: 0, right: 0)
contentView.addSubview(contentBubble)
contentBubble.translatesAutoresizingMaskIntoConstraints = false
contentBubble.topAnchor.constraint(equalTo: contentView.topAnchor, constant: 0).isActive = true
@ -45,19 +52,8 @@ class MultilineMessageCell: UICollectionViewCell {
bubble.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom).isActive = true
bubble.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left).isActive = true
bubble.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right).isActive = true
bubble.layer.cornerRadius = 10.0
label.numberOfLines = 0
label.lineBreakMode = .byWordWrapping
bubble.addSubview(label)
label.translatesAutoresizingMaskIntoConstraints = false
label.topAnchor.constraint(equalTo: contentView.topAnchor, constant: labelInset.top+10).isActive = true
label.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom-10).isActive = true
label.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left+10).isActive = true
label.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right-10).isActive = true
contentBubble.addSubview(chatRead)
chatRead.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: -2).isActive = true
chatRead.trailingAnchor.constraint(equalTo: contentView.trailingAnchor, constant: -8).isActive = true
@ -69,10 +65,26 @@ class MultilineMessageCell: UICollectionViewCell {
fatalError("Storyboards are quicker, easier, more seductive. Not stronger then Code.")
}
func configure(text: String?, mess: Int) {
label.text = text
print("Storyboards are quicker \(mess)")
if mess == 1 {
override func removeFromSuperview() {
print("MultilineMessageCell configure ChatMessage animPlayerOnce stop stopstop died")
}
func configure(message: ChatMessage, isBasic: Bool) {
/*
For Multimedia
message.contents.forEach { content in
label.text = content.utf8Text
}
*/
if bubble.subviews.count > 0
{
bubble.subviews.forEach({ $0.removeFromSuperview()})
}
label.text = message.contents.first?.utf8Text.trimmingCharacters(in: .whitespacesAndNewlines)
if !message.isOutgoing {
constraint1?.isActive = true
constraint2?.isActive = false
imageUser.isHidden = false
@ -85,6 +97,147 @@ class MultilineMessageCell: UICollectionViewCell {
bubble.backgroundColor = UIColor("A").withAlphaComponent(0.2)
chatRead.isHidden = false
}
if isBasic {
if message.contents.first?.type == "text"{
createBubbleText()
}else if message.contents.first?.type == "image"{
createBubbleImage(message: message)
}else if message.contents.first?.type == "video"{
createBubbleVideo(message: message)
}else if message.contents.first?.type == "audio"{
createBubbleAudio(message: message)
}else{
//createBubbleText()
}
}
}
func createBubbleText(){
print("MultilineMessageCell configure ChatMessage other")
label.numberOfLines = 0
label.lineBreakMode = .byWordWrapping
bubble.addSubview(label)
label.translatesAutoresizingMaskIntoConstraints = false
label.topAnchor.constraint(equalTo: contentView.topAnchor, constant: labelInset.top+10).isActive = true
label.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom-10).isActive = true
label.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left+10).isActive = true
label.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right-10).isActive = true
}
func createBubbleImage(message: ChatMessage){
print("MultilineMessageCell configure ChatMessage image")
let imageViewBubble = UIImageView(image: UIImage(named: "chat_error"))
bubble.addSubview(imageViewBubble)
imageViewBubble.translatesAutoresizingMaskIntoConstraints = false
imageViewBubble.topAnchor.constraint(equalTo: contentView.topAnchor, constant: labelInset.top+10).isActive = true
imageViewBubble.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom-10).isActive = true
imageViewBubble.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left+10).isActive = true
imageViewBubble.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right-10).isActive = true
if let imageMessage = UIImage(named: message.contents.first!.filePath){
imageViewBubble.image = resizeImage(image: imageMessage, targetSize: CGSizeMake(UIScreen.main.bounds.size.width*3/4, 300.0))
}
}
func createBubbleVideo(message: ChatMessage){
print("MultilineMessageCell configure ChatMessage video")
let imageViewBubble = UIImageView(image: UIImage(named: "file_video_default"))
let imagePlayViewBubble = UIImageView(image: UIImage(named: "vr_play"))
bubble.addSubview(imageViewBubble)
imageViewBubble.translatesAutoresizingMaskIntoConstraints = false
imageViewBubble.topAnchor.constraint(equalTo: contentView.topAnchor, constant: labelInset.top+10).isActive = true
imageViewBubble.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom-10).isActive = true
imageViewBubble.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left+10).isActive = true
imageViewBubble.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right-10).isActive = true
if #available(iOS 13.0, *) {
imagePlayViewBubble.image = (UIImage(named: "vr_play")!.withTintColor(.white))
}
imageViewBubble.addSubview(imagePlayViewBubble)
imagePlayViewBubble.centerXAnchor.constraint(equalTo: imageViewBubble.centerXAnchor).isActive = true
imagePlayViewBubble.centerYAnchor.constraint(equalTo: imageViewBubble.centerYAnchor).isActive = true
imagePlayViewBubble.size(w: 40, h: 40).done()
if let imageMessage = createThumbnailOfVideoFromFileURL(videoURL: message.contents.first!.filePath){
imageViewBubble.image = resizeImage(image: imageMessage, targetSize: CGSizeMake(UIScreen.main.bounds.size.width*3/4, 300.0))
}
}
func createBubbleAudio(message: ChatMessage){
print("MultilineMessageCell configure ChatMessage audio")
let recordingView = UIView()
let recordingPlayButton = CallControlButton(width: 40, height: 40, buttonTheme:VoipTheme.nav_button("vr_play"))
let recordingStopButton = CallControlButton(width: 40, height: 40, buttonTheme:VoipTheme.nav_button("vr_stop"))
let recordingWaveView = UIProgressView()
let recordingDurationTextView = StyledLabel(VoipTheme.chat_conversation_recording_duration)
let recordingWaveImage = UIImageView(image: UIImage(named: "vr_wave.png"))
bubble.addSubview(recordingView)
recordingView.translatesAutoresizingMaskIntoConstraints = false
recordingView.topAnchor.constraint(equalTo: contentView.topAnchor, constant: labelInset.top+10).isActive = true
recordingView.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom-10).isActive = true
recordingView.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left+10).isActive = true
recordingView.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right-10).isActive = true
recordingView.height(50.0).width(280).done()
recordingView.addSubview(recordingWaveView)
recordingWaveView.translatesAutoresizingMaskIntoConstraints = false
recordingWaveView.topAnchor.constraint(equalTo: contentView.topAnchor, constant: labelInset.top+10).isActive = true
recordingWaveView.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom-10).isActive = true
recordingWaveView.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left+10).isActive = true
recordingWaveView.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right-10).isActive = true
recordingWaveView.progressViewStyle = .bar
recordingWaveView.layer.cornerRadius = 5
recordingWaveView.progressTintColor = message.isOutgoing ? UIColor("A") : UIColor("D")
recordingWaveView.clipsToBounds = true
recordingWaveView.layer.sublayers![1].cornerRadius = 5
recordingWaveView.subviews[1].clipsToBounds = true
recordingWaveView.addSubview(recordingPlayButton)
recordingPlayButton.alignParentLeft(withMargin: 10).matchParentHeight().done()
recordingWaveView.addSubview(recordingStopButton)
recordingStopButton.alignParentLeft(withMargin: 10).matchParentHeight().done()
recordingStopButton.isHidden = true
recordingWaveView.addSubview(recordingWaveImage)
recordingWaveImage.alignParentLeft(withMargin: 60).alignParentRight(withMargin: 60).height(26).alignHorizontalCenterWith(recordingView).done()
recordingWaveView.addSubview(recordingDurationTextView)
recordingDurationTextView.alignParentRight(withMargin: 10).matchParentHeight().done()
recordingDurationTextView.text = recordingDuration(message.contents.first?.filePath)
recordingPlayButton.onClickAction = {
self.playRecordedMessage(recordingPlayButton: recordingPlayButton, recordingStopButton: recordingStopButton, recordingWaveView: recordingWaveView, voiceRecorder: message.contents.first?.filePath)
}
recordingStopButton.onClickAction = {
self.stopVoiceRecordPlayer(recordingPlayButton: recordingPlayButton, recordingStopButton: recordingStopButton, recordingWaveView: recordingWaveView)
}
}
func createBubbleAudioFile(message: ChatMessage){
print("MultilineMessageCell configure ChatMessage audio file")
let imageAudioBubble = UIImage(named: "file_audio_default")
let imageViewBubble = UIImageView(image: UIImage(named: "chat_error"))
if let imageMessage = imageAudioBubble{
bubble.addSubview(imageViewBubble)
imageViewBubble.translatesAutoresizingMaskIntoConstraints = false
imageViewBubble.topAnchor.constraint(equalTo: contentView.topAnchor, constant: labelInset.top+10).isActive = true
imageViewBubble.bottomAnchor.constraint(equalTo: contentView.bottomAnchor, constant: labelInset.bottom-10).isActive = true
imageViewBubble.leadingAnchor.constraint(equalTo: contentBubble.leadingAnchor, constant: labelInset.left+10).isActive = true
imageViewBubble.trailingAnchor.constraint(equalTo: contentBubble.trailingAnchor, constant: labelInset.right-10).isActive = true
imageViewBubble.image = imageMessage
}
}
override func preferredLayoutAttributesFitting(_ layoutAttributes: UICollectionViewLayoutAttributes) -> UICollectionViewLayoutAttributes {
@ -98,4 +251,108 @@ class MultilineMessageCell: UICollectionViewCell {
return layoutAttributes
}
func createThumbnailOfVideoFromFileURL(videoURL: String) -> UIImage? {
let asset = AVAsset(url: URL(string: "file://" + videoURL)!)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
do {
let img = try assetImgGenerate.copyCGImage(at: CMTimeMake(value: 1, timescale: 10), actualTime: nil)
let thumbnail = UIImage(cgImage: img)
return thumbnail
} catch _{
return nil
}
}
func resizeImage(image: UIImage, targetSize: CGSize) -> UIImage {
let size = image.size
let widthRatio = targetSize.width / size.width
let heightRatio = targetSize.height / size.height
// Figure out what our orientation is, and use that to form the rectangle
var newSize: CGSize
if(widthRatio > heightRatio) {
newSize = CGSize(width: size.width * heightRatio, height: size.height * heightRatio)
} else {
newSize = CGSize(width: size.width * widthRatio, height: size.height * widthRatio)
}
// This is the rect that we've calculated out and this is what is actually used below
let rect = CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height)
// Actually do the resizing to the rect using the ImageContext stuff
UIGraphicsBeginImageContextWithOptions(newSize, true, 2.0)
image.draw(in: rect)
let newImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newImage!
}
//Audio
func playRecordedMessage(recordingPlayButton: UIButton, recordingStopButton:UIButton, recordingWaveView: UIProgressView, voiceRecorder: String?) {
AudioPlayer.initSharedPlayer()
AudioPlayer.sharedModel.fileChanged.value = voiceRecorder
print("MultilineMessageCell configure ChatMessage animPlayerOnce \(String(describing: voiceRecorder))")
recordingPlayButton.isHidden = true
recordingStopButton.isHidden = false
AudioPlayer.startSharedPlayer(voiceRecorder)
self.animPlayerOnce(recordingPlayButton: recordingPlayButton, recordingStopButton: recordingStopButton, recordingWaveView: recordingWaveView, voiceRecorder: voiceRecorder)
vrPlayerTimer = Timer.scheduledTimer(withTimeInterval: 1.01, repeats: true) { timer in
self.animPlayerOnce(recordingPlayButton: recordingPlayButton, recordingStopButton: recordingStopButton, recordingWaveView: recordingWaveView, voiceRecorder: voiceRecorder)
print("MultilineMessageCell configure ChatMessage animPlayerOnce timer")
}
isPlayingVoiceRecording = true
}
func recordingDuration(_ _voiceRecordingFile: String?) -> String? {
let core = Core.getSwiftObject(cObject: LinphoneManager.getLc())
var result = ""
do{
let linphonePlayer = try core.createLocalPlayer(soundCardName: nil, videoDisplayName: nil, windowId: nil)
try linphonePlayer.open(filename: _voiceRecordingFile!)
result = formattedDuration(linphonePlayer.duration)!
linphonePlayer.close()
}catch{
print(error)
}
return result
}
func formattedDuration(_ valueMs: Int) -> String? {
return String(format: "%02ld:%02ld", valueMs / 60000, (valueMs % 60000) / 1000)
}
func animPlayerOnce(recordingPlayButton: UIButton, recordingStopButton:UIButton, recordingWaveView: UIProgressView, voiceRecorder: String?) {
print("MultilineMessageCell configure ChatMessage animPlayerOnce \(String(describing: AudioPlayer.getSharedPlayer()!.duration))")
recordingWaveView.progress += floor(1.0 / Float(AudioPlayer.getSharedPlayer()!.duration/1000) * 10) / 10.0
AudioPlayer.sharedModel.fileChanged.observe { file in
if (file != voiceRecorder && self.isPlayingVoiceRecording) {
print("MultilineMessageCell configure ChatMessage animPlayerOnce stop stopstop\(String(describing: AudioPlayer.getSharedPlayer()!.duration))")
self.stopVoiceRecordPlayer(recordingPlayButton: recordingPlayButton, recordingStopButton: recordingStopButton, recordingWaveView: recordingWaveView)
}
}
UIView.animate(withDuration: 1, delay: 0.0, options: .curveLinear, animations: {
recordingWaveView.layoutIfNeeded()
}) { Bool in
if(recordingWaveView.progress >= 1.0 && self.isPlayingVoiceRecording){
UIView.animate(withDuration: 1, delay: 0.0, options: .curveLinear, animations: {
recordingWaveView.layoutIfNeeded()
})
self.stopVoiceRecordPlayer(recordingPlayButton: recordingPlayButton, recordingStopButton: recordingStopButton, recordingWaveView: recordingWaveView)
}
}
}
func stopVoiceRecordPlayer(recordingPlayButton: UIButton, recordingStopButton:UIButton, recordingWaveView: UIProgressView) {
AudioPlayer.stopSharedPlayer()
recordingWaveView.progress = 0.0
recordingWaveView.setProgress(recordingWaveView.progress, animated: false)
recordingPlayButton.isHidden = false
recordingStopButton.isHidden = true
isPlayingVoiceRecording = false
vrPlayerTimer.invalidate()
}
}

View file

@ -0,0 +1,74 @@
//
// PlayerAudio.swift
// linphone
//
// Created by Benoît Martins on 27/02/2023.
//
import Foundation
import linphonesw
class AudioPlayer: ControlsViewModel {
static let sharedModel = AudioPlayer()
static var linphonePlayer : Player? = nil
var fileChanged = MutableLiveData<String>()
static func getSharedPlayer() -> Player?{
return linphonePlayer
}
static func initSharedPlayer(){
print("[Voice Message] Creating shared player")
let core = Core.getSwiftObject(cObject: LinphoneManager.getLc())
do{
if linphonePlayer?.userData == nil {
linphonePlayer = try core.createLocalPlayer(soundCardName: CallManager.instance().getSpeakerSoundCard(), videoDisplayName: nil, windowId: nil)
}
}catch{
print(error)
}
}
static func startSharedPlayer(_ path: String?) {
print("[Voice Message] Starting shared player path = \(String(describing: path))")
if ((linphonePlayer!.userData) != nil) {
print("[Voice Message] a play was requested (\(String(describing: path)), but there is already one going (\(String(describing: linphonePlayer?.userData))")
let userInfo = [
"path": linphonePlayer!.userData
]
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "LinphoneVoiceMessagePlayerEOF"), object: nil, userInfo: userInfo as [AnyHashable : Any])
}
CallManager.instance().changeRouteToSpeaker()
do{
try linphonePlayer?.open(filename: path!)
try linphonePlayer?.start()
print("MultilineMessageCell configure ChatMessage animPlayerOnce linphonePlayer started")
}catch{
print("error")
}
}
static func cancelVoiceRecordingVM(_ voiceRecorder: Recorder?) {
voiceRecorder?.close()
let recordingFile = voiceRecorder?.file
if let recordingFile {
AppManager.removeFile(file: String(utf8String: recordingFile)!)
}
}
static func stopSharedPlayer() {
print("[Voice Message] Stopping shared player path = \(String(describing: linphonePlayer?.userData))")
do{
try linphonePlayer?.pause()
try linphonePlayer?.seek(timeMs: 0)
//linphonePlayer?.close()
linphonePlayer?.userData = nil
}catch{
print(error)
}
}
}

View file

@ -950,6 +950,8 @@
D77057F1292E4A340031A970 /* ChatConversationViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D77057F0292E4A340031A970 /* ChatConversationViewModel.swift */; };
D779D39829A3C933007B8087 /* ChatConversationTableViewSwift.swift in Sources */ = {isa = PBXBuildFile; fileRef = D779D39729A3C933007B8087 /* ChatConversationTableViewSwift.swift */; };
D779D39A29A4C285007B8087 /* MultilineMessageCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = D779D39929A4C285007B8087 /* MultilineMessageCell.swift */; };
D779D39C29A76DE6007B8087 /* ChatConversationTableViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D779D39B29A76DE6007B8087 /* ChatConversationTableViewModel.swift */; };
D779D39E29AC9E93007B8087 /* AudioPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = D779D39D29AC9E92007B8087 /* AudioPlayer.swift */; };
D7A7545029507038005C9D4A /* CustomAlertController.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7A7544F29507038005C9D4A /* CustomAlertController.swift */; };
D7C6DE832948CF3100756E03 /* DropDownCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7C6DE812948CF3100756E03 /* DropDownCell.swift */; };
D7C6DE842948CF3100756E03 /* DropDownCell.xib in Resources */ = {isa = PBXBuildFile; fileRef = D7C6DE822948CF3100756E03 /* DropDownCell.xib */; };
@ -2191,6 +2193,8 @@
D77057F0292E4A340031A970 /* ChatConversationViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatConversationViewModel.swift; sourceTree = "<group>"; };
D779D39729A3C933007B8087 /* ChatConversationTableViewSwift.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatConversationTableViewSwift.swift; sourceTree = "<group>"; };
D779D39929A4C285007B8087 /* MultilineMessageCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultilineMessageCell.swift; sourceTree = "<group>"; };
D779D39B29A76DE6007B8087 /* ChatConversationTableViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatConversationTableViewModel.swift; sourceTree = "<group>"; };
D779D39D29AC9E92007B8087 /* AudioPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioPlayer.swift; sourceTree = "<group>"; };
D7A7544F29507038005C9D4A /* CustomAlertController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomAlertController.swift; sourceTree = "<group>"; };
D7C6DE812948CF3100756E03 /* DropDownCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DropDownCell.swift; sourceTree = "<group>"; };
D7C6DE822948CF3100756E03 /* DropDownCell.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = DropDownCell.xib; sourceTree = "<group>"; };
@ -3458,6 +3462,7 @@
C63F71B0285A24B10066163B /* TimestampUtils.swift */,
D74A44902923BAF90017D063 /* BackActionsNavigationView.swift */,
D7A7544F29507038005C9D4A /* CustomAlertController.swift */,
D779D39D29AC9E92007B8087 /* AudioPlayer.swift */,
);
path = Util;
sourceTree = "<group>";
@ -3833,6 +3838,7 @@
isa = PBXGroup;
children = (
D77057F0292E4A340031A970 /* ChatConversationViewModel.swift */,
D779D39B29A76DE6007B8087 /* ChatConversationTableViewModel.swift */,
);
path = ViewModels;
sourceTree = "<group>";
@ -5115,6 +5121,7 @@
files = (
63B81A0F1B57DA33009604A6 /* TPKeyboardAvoidingTableView.m in Sources */,
CF1DE92D210A0F5D00A0A97E /* UILinphoneAudioPlayer.m in Sources */,
D779D39C29A76DE6007B8087 /* ChatConversationTableViewModel.swift in Sources */,
1D60589B0D05DD56006BFB54 /* main.m in Sources */,
C63F725C285A24B10066163B /* IncomingCallView.swift in Sources */,
C63F726B285A24B10066163B /* ButtonWithStateBackgrounds.swift in Sources */,
@ -5172,6 +5179,7 @@
6377AC801BDE4069007F7625 /* UIBackToCallButton.m in Sources */,
6308F9C51BF0DD6600D1234B /* XMLRPCHelper.m in Sources */,
C63F7235285A24B10066163B /* CallsViewModel.swift in Sources */,
D779D39E29AC9E93007B8087 /* AudioPlayer.swift in Sources */,
C63F722D285A24B10066163B /* CoreExtensions.swift in Sources */,
C63F722F285A24B10066163B /* AddressExtensions.swift in Sources */,
D3ED3E871586291E006C0DE4 /* TabBarView.m in Sources */,