mirror of
https://github.com/ichmagmaus111/ghostgram.git
synced 2026-04-24 08:36:07 +02:00
534dd03f41
Bug 1 — Video recorder freeze with SendDelayManager enabled
When SendDelayManager is active, video notes (кружки) and media are
enqueued into Namespaces.Message.ScheduledLocal, not the main chat
history. This broke setupSendActionOnViewUpdate which expects the message
to appear in the regular history before triggering its callback (dismiss
recorder, collapse input). The callback never fired → recorder overlay
stayed on screen → app froze.
Fix: in requestVideoRecorder's completion closure, detect when
SendDelayManager.shared.isEnabled, immediately dismiss the recorder and
clear the interface state, bypassing the broken animation path.
Bug 2 — Scheduled messages remain visible after being sent
When AntiDeleteManager is enabled, the .DeleteMessages case in
AccountStateManagementUtils uses to skip non-Cloud namespaces
(ScheduledCloud, ScheduledLocal). However after the loop no code
physically removed those skipped messages from Postbox — they stayed
in the scheduled list forever, appearing as 'planned' messages that
never disappeared.
Fix: collect non-Cloud IDs during the Anti-Delete loop and physically
delete them via _internal_deleteMessages after the loop.
Bug 3 — Account switcher avatar not loading
Race condition in avatar loading: resourceData was subscribed to first,
then fetchedMediaResource triggered the network fetch. The signal's
callback fired before data arrived, calling buildButton(nil)
which discarded the real avatar.
Fix: trigger fetchedMediaResource first, then subscribe to resourceData
with filter { $0.complete } |> take(1) so the signal stays alive until
the download completes.
848 lines
43 KiB
Swift
848 lines
43 KiB
Swift
import Foundation
|
|
import UIKit
|
|
import Postbox
|
|
import SwiftSignalKit
|
|
import Display
|
|
import AsyncDisplayKit
|
|
import TelegramCore
|
|
import SafariServices
|
|
import MobileCoreServices
|
|
import Intents
|
|
import LegacyComponents
|
|
import TelegramPresentationData
|
|
import TelegramUIPreferences
|
|
import DeviceAccess
|
|
import TextFormat
|
|
import TelegramBaseController
|
|
import AccountContext
|
|
import TelegramStringFormatting
|
|
import OverlayStatusController
|
|
import DeviceLocationManager
|
|
import ShareController
|
|
import UrlEscaping
|
|
import ContextUI
|
|
import ComposePollUI
|
|
import AlertUI
|
|
import PresentationDataUtils
|
|
import UndoUI
|
|
import TelegramCallsUI
|
|
import TelegramNotices
|
|
import GameUI
|
|
import ScreenCaptureDetection
|
|
import GalleryUI
|
|
import OpenInExternalAppUI
|
|
import LegacyUI
|
|
import InstantPageUI
|
|
import LocationUI
|
|
import BotPaymentsUI
|
|
import DeleteChatPeerActionSheetItem
|
|
import HashtagSearchUI
|
|
import LegacyMediaPickerUI
|
|
import Emoji
|
|
import PeerAvatarGalleryUI
|
|
import PeerInfoUI
|
|
import RaiseToListen
|
|
import UrlHandling
|
|
import AvatarNode
|
|
import AppBundle
|
|
import LocalizedPeerData
|
|
import PhoneNumberFormat
|
|
import SettingsUI
|
|
import UrlWhitelist
|
|
import TelegramIntents
|
|
import TooltipUI
|
|
import StatisticsUI
|
|
import MediaResources
|
|
import LocalMediaResources
|
|
import GalleryData
|
|
import ChatInterfaceState
|
|
import InviteLinksUI
|
|
import Markdown
|
|
import TelegramPermissionsUI
|
|
import Speak
|
|
import TranslateUI
|
|
import UniversalMediaPlayer
|
|
import WallpaperBackgroundNode
|
|
import ChatListUI
|
|
import CalendarMessageScreen
|
|
import ReactionSelectionNode
|
|
import ReactionListContextMenuContent
|
|
import AttachmentUI
|
|
import AttachmentTextInputPanelNode
|
|
import MediaPickerUI
|
|
import ChatPresentationInterfaceState
|
|
import Pasteboard
|
|
import ChatSendMessageActionUI
|
|
import ChatTextLinkEditUI
|
|
import WebUI
|
|
import PremiumUI
|
|
import ImageTransparency
|
|
import StickerPackPreviewUI
|
|
import TextNodeWithEntities
|
|
import EntityKeyboard
|
|
import ChatTitleView
|
|
import EmojiStatusComponent
|
|
import ChatTimerScreen
|
|
import MediaPasteboardUI
|
|
import ChatListHeaderComponent
|
|
import ChatControllerInteraction
|
|
import FeaturedStickersScreen
|
|
import ChatEntityKeyboardInputNode
|
|
import StorageUsageScreen
|
|
import AvatarEditorScreen
|
|
import ChatScheduleTimeController
|
|
import ICloudResources
|
|
import StoryContainerScreen
|
|
import MoreHeaderButton
|
|
import VolumeButtons
|
|
import ChatAvatarNavigationNode
|
|
import ChatContextQuery
|
|
import PeerReportScreen
|
|
import PeerSelectionController
|
|
import SaveToCameraRoll
|
|
import ChatMessageDateAndStatusNode
|
|
import ReplyAccessoryPanelNode
|
|
import TextSelectionNode
|
|
import ChatMessagePollBubbleContentNode
|
|
import ChatMessageItem
|
|
import ChatMessageItemImpl
|
|
import ChatMessageItemView
|
|
import ChatMessageItemCommon
|
|
import ChatMessageAnimatedStickerItemNode
|
|
import ChatMessageBubbleItemNode
|
|
import ChatNavigationButton
|
|
import WebsiteType
|
|
import PeerInfoScreen
|
|
import MediaEditorScreen
|
|
import WallpaperGalleryScreen
|
|
import WallpaperGridScreen
|
|
import VideoMessageCameraScreen
|
|
import TopMessageReactions
|
|
import AudioWaveform
|
|
import PeerNameColorScreen
|
|
import ChatEmptyNode
|
|
import ChatMediaInputStickerGridItem
|
|
import AdsInfoScreen
|
|
|
|
extension ChatControllerImpl {
|
|
func requestAudioRecorder(beginWithTone: Bool, existingDraft: ChatInterfaceMediaDraftState.Audio? = nil) {
|
|
if self.audioRecorderValue == nil {
|
|
if self.recorderFeedback == nil && existingDraft == nil {
|
|
self.recorderFeedback = HapticFeedback()
|
|
self.recorderFeedback?.prepareImpact(.light)
|
|
}
|
|
|
|
var resumeData: AudioRecorderResumeData?
|
|
if let existingDraft, let path = self.context.account.postbox.mediaBox.completedResourcePath(existingDraft.resource), let compressedData = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedIfSafe]), let recorderResumeData = existingDraft.resumeData {
|
|
resumeData = AudioRecorderResumeData(compressedData: compressedData, resumeData: recorderResumeData)
|
|
}
|
|
|
|
self.audioRecorder.set(
|
|
self.context.sharedContext.mediaManager.audioRecorder(
|
|
resumeData: resumeData,
|
|
beginWithTone: beginWithTone,
|
|
applicationBindings: self.context.sharedContext.applicationBindings,
|
|
beganWithTone: { _ in
|
|
}
|
|
)
|
|
)
|
|
}
|
|
}
|
|
|
|
func requestVideoRecorder() {
|
|
if self.videoRecorderValue == nil {
|
|
if let currentInputPanelFrame = self.chatDisplayNode.currentInputPanelFrame() {
|
|
if self.recorderFeedback == nil {
|
|
self.recorderFeedback = HapticFeedback()
|
|
self.recorderFeedback?.prepareImpact(.light)
|
|
}
|
|
|
|
var isScheduledMessages = false
|
|
if case .scheduledMessages = self.presentationInterfaceState.subject {
|
|
isScheduledMessages = true
|
|
}
|
|
|
|
var isBot = false
|
|
|
|
var allowLiveUpload = false
|
|
var viewOnceAvailable = false
|
|
if let peerId = self.chatLocation.peerId {
|
|
allowLiveUpload = peerId.namespace != Namespaces.Peer.SecretChat
|
|
viewOnceAvailable = !isScheduledMessages && peerId.namespace == Namespaces.Peer.CloudUser && peerId != self.context.account.peerId && !isBot && self.presentationInterfaceState.sendPaidMessageStars == nil
|
|
} else if case .customChatContents = self.chatLocation {
|
|
allowLiveUpload = true
|
|
}
|
|
|
|
if let user = self.presentationInterfaceState.renderedPeer?.peer as? TelegramUser, user.botInfo != nil {
|
|
isBot = true
|
|
}
|
|
|
|
let controller = VideoMessageCameraScreen(
|
|
context: self.context,
|
|
updatedPresentationData: self.updatedPresentationData,
|
|
allowLiveUpload: allowLiveUpload,
|
|
viewOnceAvailable: viewOnceAvailable,
|
|
inputPanelFrame: (currentInputPanelFrame, self.chatDisplayNode.inputNode != nil),
|
|
chatNode: self.chatDisplayNode.historyNode,
|
|
completion: { [weak self] message, silentPosting, scheduleTime in
|
|
guard let self, let videoController = self.videoRecorderValue else {
|
|
return
|
|
}
|
|
|
|
guard var message else {
|
|
self.recorderFeedback?.error()
|
|
self.recorderFeedback = nil
|
|
self.videoRecorder.set(.single(nil))
|
|
return
|
|
}
|
|
|
|
let replyMessageSubject = self.presentationInterfaceState.interfaceState.replyMessageSubject
|
|
let correlationId = Int64.random(in: 0 ..< Int64.max)
|
|
message = message
|
|
.withUpdatedReplyToMessageId(replyMessageSubject?.subjectModel)
|
|
.withUpdatedCorrelationId(correlationId)
|
|
|
|
var shouldAnimateMessageTransition = self.chatDisplayNode.shouldAnimateMessageTransition
|
|
if self.chatLocation.threadId == nil, let channel = self.presentationInterfaceState.renderedPeer?.peer as? TelegramChannel, channel.isMonoForum, let linkedMonoforumId = channel.linkedMonoforumId, let mainChannel = self.presentationInterfaceState.renderedPeer?.peers[linkedMonoforumId] as? TelegramChannel, mainChannel.hasPermission(.manageDirect) {
|
|
shouldAnimateMessageTransition = false
|
|
}
|
|
|
|
var usedCorrelationId = false
|
|
|
|
// GHOSTGRAM: When SendDelayManager is active the message lands in
|
|
// ScheduledLocal namespace, NOT in the main history. This means
|
|
// setupSendActionOnViewUpdate's callback would NEVER fire (it waits
|
|
// for the message to appear in the normal chat view), causing the
|
|
// video recorder overlay to stay on screen and the app to freeze.
|
|
// Solution: dismiss the recorder immediately and skip the animation.
|
|
let isSendDelayActive = SendDelayManager.shared.isEnabled
|
|
|
|
if !isSendDelayActive, scheduleTime == nil, shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() {
|
|
usedCorrelationId = true
|
|
self.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNodeImpl.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController, weak self] in
|
|
videoController?.hideVideoSnapshot()
|
|
guard let self else {
|
|
return
|
|
}
|
|
self.videoRecorder.set(.single(nil))
|
|
})
|
|
} else {
|
|
self.videoRecorder.set(.single(nil))
|
|
}
|
|
|
|
if isSendDelayActive {
|
|
// Dismiss recorder and clear state immediately without waiting
|
|
// for the scheduled message to appear in history.
|
|
self.chatDisplayNode.collapseInput()
|
|
self.updateChatPresentationInterfaceState(animated: true, interactive: false, {
|
|
$0.updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil).withUpdatedSendMessageEffect(nil).withUpdatedMediaDraftState(nil).withUpdatedPostSuggestionState(nil) }
|
|
})
|
|
} else {
|
|
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
|
|
if let self {
|
|
self.chatDisplayNode.collapseInput()
|
|
|
|
self.updateChatPresentationInterfaceState(animated: true, interactive: false, {
|
|
$0.updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil).withUpdatedSendMessageEffect(nil).withUpdatedMediaDraftState(nil).withUpdatedPostSuggestionState(nil) }
|
|
})
|
|
}
|
|
}, usedCorrelationId ? correlationId : nil)
|
|
}
|
|
|
|
|
|
let messages = [message]
|
|
let transformedMessages: [EnqueueMessage]
|
|
if let silentPosting {
|
|
transformedMessages = self.transformEnqueueMessages(messages, silentPosting: silentPosting)
|
|
} else if let scheduleTime {
|
|
transformedMessages = self.transformEnqueueMessages(messages, silentPosting: false, scheduleTime: scheduleTime)
|
|
} else {
|
|
transformedMessages = self.transformEnqueueMessages(messages)
|
|
}
|
|
|
|
self.sendMessages(transformedMessages)
|
|
}
|
|
)
|
|
controller.onResume = { [weak self] in
|
|
guard let self else {
|
|
return
|
|
}
|
|
self.resumeMediaRecorder()
|
|
}
|
|
self.videoRecorder.set(.single(controller))
|
|
}
|
|
}
|
|
}
|
|
|
|
func dismissMediaRecorder(_ action: ChatFinishMediaRecordingAction) {
|
|
var updatedAction = action
|
|
var isScheduledMessages = false
|
|
if case .scheduledMessages = self.presentationInterfaceState.subject {
|
|
isScheduledMessages = true
|
|
}
|
|
|
|
if let _ = self.presentationInterfaceState.slowmodeState, !isScheduledMessages {
|
|
updatedAction = .preview
|
|
}
|
|
|
|
var sendImmediately = false
|
|
if let _ = self.presentationInterfaceState.sendPaidMessageStars, case .send = action {
|
|
updatedAction = .preview
|
|
sendImmediately = true
|
|
}
|
|
|
|
if let audioRecorderValue = self.audioRecorderValue {
|
|
switch action {
|
|
case .pause:
|
|
audioRecorderValue.pause()
|
|
default:
|
|
audioRecorderValue.stop()
|
|
}
|
|
|
|
self.dismissAllTooltips()
|
|
|
|
switch updatedAction {
|
|
case .dismiss:
|
|
self.recorderDataDisposable.set(nil)
|
|
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
|
self.audioRecorder.set(.single(nil))
|
|
case .preview, .pause:
|
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(.waitingForPreview)
|
|
}
|
|
})
|
|
|
|
var resource: LocalFileMediaResource?
|
|
self.recorderDataDisposable.set(
|
|
(audioRecorderValue.takenRecordedData()
|
|
|> deliverOnMainQueue).startStrict(
|
|
next: { [weak self] data in
|
|
if let strongSelf = self, let data = data {
|
|
if data.duration < 0.5 {
|
|
strongSelf.recorderFeedback?.error()
|
|
strongSelf.recorderFeedback = nil
|
|
strongSelf.audioRecorder.set(.single(nil))
|
|
strongSelf.recorderDataDisposable.set(nil)
|
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(nil)
|
|
}
|
|
})
|
|
} else if let waveform = data.waveform {
|
|
if resource == nil {
|
|
resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count))
|
|
strongSelf.context.account.postbox.mediaBox.storeResourceData(resource!.id, data: data.compressedData)
|
|
}
|
|
|
|
let audioWaveform: AudioWaveform
|
|
if let recordedMediaPreview = strongSelf.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview {
|
|
audioWaveform = audio.waveform
|
|
} else {
|
|
audioWaveform = AudioWaveform(bitstream: waveform, bitsPerSample: 5)
|
|
}
|
|
|
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInterfaceState {
|
|
$0.withUpdatedMediaDraftState(.audio(
|
|
ChatInterfaceMediaDraftState.Audio(
|
|
resource: resource!,
|
|
fileSize: Int32(data.compressedData.count),
|
|
duration: data.duration,
|
|
waveform: audioWaveform,
|
|
trimRange: data.trimRange,
|
|
resumeData: data.resumeData
|
|
)
|
|
))
|
|
}.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(nil)
|
|
}
|
|
})
|
|
strongSelf.recorderFeedback = nil
|
|
strongSelf.updateDownButtonVisibility()
|
|
|
|
if sendImmediately {
|
|
strongSelf.interfaceInteraction?.sendRecordedMedia(false, false)
|
|
}
|
|
}
|
|
}
|
|
})
|
|
)
|
|
case let .send(viewOnce):
|
|
self.chatDisplayNode.updateRecordedMediaDeleted(false)
|
|
self.recorderDataDisposable.set((audioRecorderValue.takenRecordedData()
|
|
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
|
|
if let strongSelf = self, let data = data {
|
|
if data.duration < 0.5 {
|
|
strongSelf.recorderFeedback?.error()
|
|
strongSelf.recorderFeedback = nil
|
|
strongSelf.audioRecorder.set(.single(nil))
|
|
strongSelf.recorderDataDisposable.set(nil)
|
|
} else {
|
|
let randomId = Int64.random(in: Int64.min ... Int64.max)
|
|
let originalData = data.compressedData
|
|
let duration = data.duration
|
|
let waveformBuffer: Data? = data.waveform
|
|
|
|
// GHOSTGRAM: Check if Voice Morpher needs processing
|
|
let needsProcessing = VoiceMorpherManager.shared.isEnabled && VoiceMorpherManager.shared.selectedPreset != .disabled
|
|
|
|
if needsProcessing {
|
|
// Show processing indicator
|
|
let statusController = OverlayStatusController(theme: strongSelf.presentationData.theme, type: .loading(cancelled: nil))
|
|
statusController.statusBar.statusBarStyle = strongSelf.presentationData.theme.rootController.statusBarStyle.style
|
|
strongSelf.present(statusController, in: .window(.root))
|
|
|
|
// Process async
|
|
VoiceMorpherEngine.shared.processOggData(originalData) { [weak self, weak statusController] result in
|
|
DispatchQueue.main.async {
|
|
statusController?.dismiss()
|
|
|
|
guard let strongSelf = self else { return }
|
|
|
|
let processedData: Data
|
|
switch result {
|
|
case .success(let data):
|
|
processedData = data
|
|
case .failure:
|
|
processedData = originalData // fallback to original on error
|
|
}
|
|
|
|
strongSelf.finishSendingVoiceMessage(
|
|
randomId: randomId,
|
|
processedData: processedData,
|
|
duration: duration,
|
|
waveformBuffer: waveformBuffer,
|
|
viewOnce: viewOnce
|
|
)
|
|
}
|
|
}
|
|
} else {
|
|
// No processing needed, send directly
|
|
strongSelf.finishSendingVoiceMessage(
|
|
randomId: randomId,
|
|
processedData: originalData,
|
|
duration: duration,
|
|
waveformBuffer: waveformBuffer,
|
|
viewOnce: viewOnce
|
|
)
|
|
}
|
|
|
|
strongSelf.recorderFeedback?.tap()
|
|
strongSelf.recorderFeedback = nil
|
|
strongSelf.recorderDataDisposable.set(nil)
|
|
}
|
|
}
|
|
}))
|
|
}
|
|
} else if let videoRecorderValue = self.videoRecorderValue {
|
|
if case .send = updatedAction {
|
|
self.chatDisplayNode.updateRecordedMediaDeleted(false)
|
|
videoRecorderValue.sendVideoRecording()
|
|
self.recorderDataDisposable.set(nil)
|
|
} else {
|
|
if case .dismiss = updatedAction {
|
|
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
|
self.recorderDataDisposable.set(nil)
|
|
}
|
|
|
|
switch updatedAction {
|
|
case .preview, .pause:
|
|
if videoRecorderValue.stopVideoRecording() {
|
|
self.recorderDataDisposable.set((videoRecorderValue.takenRecordedData()
|
|
|> deliverOnMainQueue).startStrict(next: { [weak self] data in
|
|
if let strongSelf = self, let data = data {
|
|
if data.duration < 1.0 {
|
|
strongSelf.recorderFeedback?.error()
|
|
strongSelf.recorderFeedback = nil
|
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(nil)
|
|
}
|
|
})
|
|
strongSelf.recorderDataDisposable.set(nil)
|
|
strongSelf.videoRecorder.set(.single(nil))
|
|
} else {
|
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInterfaceState {
|
|
$0.withUpdatedMediaDraftState(.video(
|
|
ChatInterfaceMediaDraftState.Video(
|
|
duration: data.duration,
|
|
frames: data.frames,
|
|
framesUpdateTimestamp: data.framesUpdateTimestamp,
|
|
trimRange: data.trimRange
|
|
)
|
|
))
|
|
}.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(nil)
|
|
}
|
|
})
|
|
strongSelf.recorderFeedback = nil
|
|
strongSelf.updateDownButtonVisibility()
|
|
}
|
|
}
|
|
}))
|
|
}
|
|
default:
|
|
self.recorderDataDisposable.set(nil)
|
|
self.videoRecorder.set(.single(nil))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
func stopMediaRecorder(pause: Bool = false) {
|
|
if let audioRecorderValue = self.audioRecorderValue {
|
|
if let _ = self.presentationInterfaceState.inputTextPanelState.mediaRecordingState {
|
|
self.dismissMediaRecorder(pause ? .pause : .preview)
|
|
} else {
|
|
audioRecorderValue.stop()
|
|
self.audioRecorder.set(.single(nil))
|
|
}
|
|
} else if let _ = self.videoRecorderValue {
|
|
if let _ = self.presentationInterfaceState.inputTextPanelState.mediaRecordingState {
|
|
self.dismissMediaRecorder(pause ? .pause : .preview)
|
|
} else {
|
|
self.videoRecorder.set(.single(nil))
|
|
}
|
|
}
|
|
}
|
|
|
|
func resumeMediaRecorder() {
|
|
self.recorderDataDisposable.set(nil)
|
|
|
|
self.context.sharedContext.mediaManager.playlistControl(.playback(.pause), type: nil)
|
|
|
|
if let videoRecorderValue = self.videoRecorderValue {
|
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInputTextPanelState { panelState in
|
|
let recordingStatus = videoRecorderValue.recordingStatus
|
|
return panelState.withUpdatedMediaRecordingState(.video(status: .recording(InstantVideoControllerRecordingStatus(micLevel: recordingStatus.micLevel, duration: recordingStatus.duration)), isLocked: true))
|
|
}.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
|
|
})
|
|
} else {
|
|
let proceed = {
|
|
self.withAudioRecorder(resuming: true, { audioRecorder in
|
|
audioRecorder.resume()
|
|
|
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: true))
|
|
}.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
|
|
})
|
|
})
|
|
}
|
|
|
|
let _ = (ApplicationSpecificNotice.getVoiceMessagesResumeTrimWarning(accountManager: self.context.sharedContext.accountManager)
|
|
|> deliverOnMainQueue).start(next: { [weak self] count in
|
|
guard let self else {
|
|
return
|
|
}
|
|
if count > 0 {
|
|
proceed()
|
|
return
|
|
}
|
|
if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange, trimRange.lowerBound > 0.1 || trimRange.upperBound < audio.duration {
|
|
self.present(
|
|
textAlertController(
|
|
context: self.context,
|
|
title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Title,
|
|
text: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Text,
|
|
actions: [
|
|
TextAlertAction(type: .genericAction, title: self.presentationData.strings.Common_Cancel, action: {}),
|
|
TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Proceed, action: {
|
|
proceed()
|
|
let _ = ApplicationSpecificNotice.incrementVoiceMessagesResumeTrimWarning(accountManager: self.context.sharedContext.accountManager).start()
|
|
})
|
|
]
|
|
), in: .window(.root)
|
|
)
|
|
} else {
|
|
proceed()
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
func lockMediaRecorder() {
|
|
if self.presentationInterfaceState.inputTextPanelState.mediaRecordingState != nil {
|
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
return $0.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(panelState.mediaRecordingState?.withLocked(true))
|
|
}
|
|
})
|
|
}
|
|
|
|
if let _ = self.audioRecorderValue {
|
|
self.maybePresentAudioPauseTooltip()
|
|
} else if let videoRecorderValue = self.videoRecorderValue {
|
|
videoRecorderValue.lockVideoRecording()
|
|
}
|
|
}
|
|
|
|
func deleteMediaRecording() {
|
|
if let _ = self.audioRecorderValue {
|
|
self.audioRecorder.set(.single(nil))
|
|
} else if let _ = self.videoRecorderValue {
|
|
self.videoRecorder.set(.single(nil))
|
|
}
|
|
|
|
self.recorderDataDisposable.set(nil)
|
|
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInterfaceState { $0.withUpdatedMediaDraftState(nil) }
|
|
})
|
|
self.updateDownButtonVisibility()
|
|
|
|
self.dismissAllTooltips()
|
|
}
|
|
|
|
private func maybePresentAudioPauseTooltip() {
|
|
let _ = (ApplicationSpecificNotice.getVoiceMessagesPauseSuggestion(accountManager: self.context.sharedContext.accountManager)
|
|
|> deliverOnMainQueue).startStandalone(next: { [weak self] pauseCounter in
|
|
guard let self else {
|
|
return
|
|
}
|
|
|
|
if pauseCounter >= 3 {
|
|
return
|
|
} else {
|
|
Queue.mainQueue().after(0.3) {
|
|
self.displayPauseTooltip(text: self.presentationData.strings.Chat_PauseVoiceMessageTooltip)
|
|
}
|
|
let _ = ApplicationSpecificNotice.incrementVoiceMessagesPauseSuggestion(accountManager: self.context.sharedContext.accountManager).startStandalone()
|
|
}
|
|
})
|
|
}
|
|
|
|
private func displayPauseTooltip(text: String) {
|
|
guard let layout = self.validLayout else {
|
|
return
|
|
}
|
|
|
|
self.dismissAllTooltips()
|
|
|
|
let insets = layout.insets(options: [.input])
|
|
var screenWidth = layout.size.width
|
|
if layout.metrics.isTablet {
|
|
if layout.size.height == layout.deviceMetrics.screenSize.width {
|
|
screenWidth = layout.deviceMetrics.screenSize.height
|
|
} else {
|
|
screenWidth = layout.deviceMetrics.screenSize.width
|
|
}
|
|
}
|
|
|
|
let location = CGRect(origin: CGPoint(x: screenWidth - layout.safeInsets.right - 50.0, y: layout.size.height - insets.bottom - 128.0), size: CGSize())
|
|
|
|
let tooltipController = TooltipScreen(
|
|
account: self.context.account,
|
|
sharedContext: self.context.sharedContext,
|
|
text: .markdown(text: text),
|
|
balancedTextLayout: true,
|
|
constrainWidth: 240.0,
|
|
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
|
|
arrowStyle: .small,
|
|
icon: nil,
|
|
location: .point(location, .right),
|
|
displayDuration: .default,
|
|
inset: 8.0,
|
|
cornerRadius: 8.0,
|
|
shouldDismissOnTouch: { _, _ in
|
|
return .ignore
|
|
}
|
|
)
|
|
self.present(tooltipController, in: .window(.root))
|
|
}
|
|
|
|
private func withAudioRecorder(resuming: Bool, _ f: (ManagedAudioRecorder) -> Void) {
|
|
if let audioRecorder = self.audioRecorderValue {
|
|
f(audioRecorder)
|
|
} else if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview {
|
|
self.requestAudioRecorder(beginWithTone: false, existingDraft: audio)
|
|
if let audioRecorder = self.audioRecorderValue {
|
|
f(audioRecorder)
|
|
|
|
if !resuming {
|
|
self.recorderDataDisposable.set(
|
|
(audioRecorder.takenRecordedData()
|
|
|> deliverOnMainQueue).startStrict(
|
|
next: { [weak self] data in
|
|
if let strongSelf = self, let data = data {
|
|
let audioWaveform = audio.waveform
|
|
|
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
$0.updatedInterfaceState {
|
|
$0.withUpdatedMediaDraftState(.audio(
|
|
ChatInterfaceMediaDraftState.Audio(
|
|
resource: audio.resource,
|
|
fileSize: Int32(data.compressedData.count),
|
|
duration: data.duration,
|
|
waveform: audioWaveform,
|
|
trimRange: data.trimRange,
|
|
resumeData: data.resumeData
|
|
)
|
|
))
|
|
}.updatedInputTextPanelState { panelState in
|
|
return panelState.withUpdatedMediaRecordingState(nil)
|
|
}
|
|
})
|
|
strongSelf.updateDownButtonVisibility()
|
|
}
|
|
})
|
|
)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) {
|
|
if let videoRecorder = self.videoRecorderValue {
|
|
videoRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
|
|
} else {
|
|
self.withAudioRecorder(resuming: false, { audioRecorder in
|
|
audioRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
|
|
})
|
|
}
|
|
}
|
|
|
|
func sendMediaRecording(
|
|
silentPosting: Bool? = nil,
|
|
scheduleTime: Int32? = nil,
|
|
repeatPeriod: Int32? = nil,
|
|
viewOnce: Bool = false,
|
|
messageEffect: ChatSendMessageEffect? = nil,
|
|
postpone: Bool = false
|
|
) {
|
|
self.chatDisplayNode.updateRecordedMediaDeleted(false)
|
|
|
|
guard let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState else {
|
|
return
|
|
}
|
|
|
|
switch recordedMediaPreview {
|
|
case let .audio(audio):
|
|
self.audioRecorder.set(.single(nil))
|
|
|
|
var isScheduledMessages = false
|
|
if case .scheduledMessages = self.presentationInterfaceState.subject {
|
|
isScheduledMessages = true
|
|
}
|
|
|
|
if let _ = self.presentationInterfaceState.slowmodeState, !isScheduledMessages {
|
|
if let rect = self.chatDisplayNode.frameForInputActionButton() {
|
|
self.interfaceInteraction?.displaySlowmodeTooltip(self.chatDisplayNode.view, rect)
|
|
}
|
|
return
|
|
}
|
|
|
|
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
|
|
if let strongSelf = self {
|
|
strongSelf.chatDisplayNode.collapseInput()
|
|
|
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: false, {
|
|
$0.updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil).withUpdatedMediaDraftState(nil).withUpdatedSendMessageEffect(nil).withUpdatedPostSuggestionState(nil) }
|
|
})
|
|
|
|
strongSelf.updateDownButtonVisibility()
|
|
}
|
|
}, nil)
|
|
|
|
var attributes: [MessageAttribute] = []
|
|
if viewOnce {
|
|
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
|
|
}
|
|
if let messageEffect {
|
|
attributes.append(EffectMessageAttribute(id: messageEffect.id))
|
|
}
|
|
|
|
let resource: TelegramMediaResource
|
|
var waveform = audio.waveform
|
|
var finalDuration: Int = Int(audio.duration)
|
|
if let trimRange = audio.trimRange, trimRange.lowerBound > 0.0 || trimRange.upperBound < Double(audio.duration) {
|
|
let randomId = Int64.random(in: Int64.min ... Int64.max)
|
|
let tempPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).ogg"
|
|
resource = LocalFileAudioMediaResource(randomId: randomId, path: tempPath, trimRange: trimRange)
|
|
self.context.account.postbox.mediaBox.moveResourceData(audio.resource.id, toTempPath: tempPath)
|
|
waveform = waveform.subwaveform(from: trimRange.lowerBound / Double(audio.duration), to: trimRange.upperBound / Double(audio.duration))
|
|
finalDuration = Int(trimRange.upperBound - trimRange.lowerBound)
|
|
} else {
|
|
resource = audio.resource
|
|
}
|
|
|
|
let waveformBuffer = waveform.makeBitstream()
|
|
|
|
let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: finalDuration, title: nil, performer: nil, waveform: waveformBuffer)], alternativeRepresentations: [])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
|
|
|
|
let transformedMessages: [EnqueueMessage]
|
|
if let silentPosting = silentPosting {
|
|
transformedMessages = self.transformEnqueueMessages(messages, silentPosting: silentPosting, postpone: postpone)
|
|
} else if let scheduleTime = scheduleTime {
|
|
transformedMessages = self.transformEnqueueMessages(messages, silentPosting: false, scheduleTime: scheduleTime, repeatPeriod: repeatPeriod, postpone: postpone)
|
|
} else {
|
|
transformedMessages = self.transformEnqueueMessages(messages)
|
|
}
|
|
|
|
guard let peerId = self.chatLocation.peerId else {
|
|
return
|
|
}
|
|
|
|
let _ = (enqueueMessages(account: self.context.account, peerId: peerId, messages: transformedMessages)
|
|
|> deliverOnMainQueue).startStandalone(next: { [weak self] _ in
|
|
if let strongSelf = self, strongSelf.presentationInterfaceState.subject != .scheduledMessages {
|
|
strongSelf.chatDisplayNode.historyNode.scrollToEndOfHistory()
|
|
}
|
|
})
|
|
|
|
donateSendMessageIntent(account: self.context.account, sharedContext: self.context.sharedContext, intentContext: .chat, peerIds: [peerId])
|
|
case .video:
|
|
self.videoRecorderValue?.sendVideoRecording(silentPosting: silentPosting, scheduleTime: scheduleTime, messageEffect: messageEffect)
|
|
}
|
|
}
|
|
|
|
// MARK: - GHOSTGRAM: Voice Morpher Helper
|
|
|
|
private func finishSendingVoiceMessage(
|
|
randomId: Int64,
|
|
processedData: Data,
|
|
duration: Double,
|
|
waveformBuffer: Data?,
|
|
viewOnce: Bool
|
|
) {
|
|
let resource = LocalFileMediaResource(fileId: randomId)
|
|
self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: processedData)
|
|
|
|
let correlationId = Int64.random(in: 0 ..< Int64.max)
|
|
var usedCorrelationId = false
|
|
|
|
var shouldAnimateMessageTransition = self.chatDisplayNode.shouldAnimateMessageTransition
|
|
if self.chatLocation.threadId == nil, let channel = self.presentationInterfaceState.renderedPeer?.peer as? TelegramChannel, channel.isMonoForum, let linkedMonoforumId = channel.linkedMonoforumId, let mainChannel = self.presentationInterfaceState.renderedPeer?.peers[linkedMonoforumId] as? TelegramChannel, mainChannel.hasPermission(.manageDirect) {
|
|
shouldAnimateMessageTransition = false
|
|
}
|
|
|
|
if shouldAnimateMessageTransition, let textInputPanelNode = self.chatDisplayNode.textInputPanelNode, let micButton = textInputPanelNode.micButton {
|
|
usedCorrelationId = true
|
|
self.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .audioMicInput(ChatMessageTransitionNodeImpl.Source.AudioMicInput(micButton: micButton)), initiated: { [weak self] in
|
|
self?.audioRecorder.set(.single(nil))
|
|
})
|
|
} else {
|
|
self.audioRecorder.set(.single(nil))
|
|
}
|
|
|
|
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
|
|
if let strongSelf = self {
|
|
strongSelf.chatDisplayNode.collapseInput()
|
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: false, {
|
|
$0.updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil).withUpdatedSendMessageEffect(nil).withUpdatedPostSuggestionState(nil) }
|
|
})
|
|
}
|
|
}, usedCorrelationId ? correlationId : nil)
|
|
|
|
var attributes: [MessageAttribute] = []
|
|
if viewOnce {
|
|
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
|
|
}
|
|
|
|
self.sendMessages([.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(processedData.count), attributes: [.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: waveformBuffer)], alternativeRepresentations: [])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: correlationId, bubbleUpEmojiOrStickersets: [])])
|
|
}
|
|
}
|