mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Various fixes
This commit is contained in:
parent
48531db09e
commit
2df32e47e2
@ -248,24 +248,24 @@
|
||||
[itemViews addObject:viewItem];
|
||||
}
|
||||
|
||||
if (_hasSearchButton)
|
||||
{
|
||||
TGMenuSheetButtonItemView *viewItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:TGLocalized(@"ProfilePhoto.SearchWeb") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
|
||||
{
|
||||
__strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
__strong TGMenuSheetController *strongController = weakController;
|
||||
if (strongController == nil)
|
||||
return;
|
||||
|
||||
[strongController dismissAnimated:true];
|
||||
if (strongSelf != nil)
|
||||
strongSelf.requestSearchController(nil);
|
||||
}];
|
||||
[itemViews addObject:viewItem];
|
||||
}
|
||||
// if (_hasSearchButton)
|
||||
// {
|
||||
// TGMenuSheetButtonItemView *viewItem = [[TGMenuSheetButtonItemView alloc] initWithTitle:TGLocalized(@"ProfilePhoto.SearchWeb") type:TGMenuSheetButtonTypeDefault fontSize:20.0 action:^
|
||||
// {
|
||||
// __strong TGMediaAvatarMenuMixin *strongSelf = weakSelf;
|
||||
// if (strongSelf == nil)
|
||||
// return;
|
||||
//
|
||||
// __strong TGMenuSheetController *strongController = weakController;
|
||||
// if (strongController == nil)
|
||||
// return;
|
||||
//
|
||||
// [strongController dismissAnimated:true];
|
||||
// if (strongSelf != nil)
|
||||
// strongSelf.requestSearchController(nil);
|
||||
// }];
|
||||
// [itemViews addObject:viewItem];
|
||||
// }
|
||||
|
||||
if (_hasViewButton)
|
||||
{
|
||||
|
@ -183,9 +183,9 @@ private final class LegacyComponentsGlobalsProviderImpl: NSObject, LegacyCompone
|
||||
switch type {
|
||||
case TGAudioSessionTypePlayAndRecord, TGAudioSessionTypePlayAndRecordHeadphones:
|
||||
if legacyContext.sharedContext.currentMediaInputSettings.with({ $0 }).pauseMusicOnRecording {
|
||||
convertedType = .record(speaker: false, withOthers: false)
|
||||
convertedType = .record(speaker: false, video: true, withOthers: false)
|
||||
} else {
|
||||
convertedType = .recordWithOthers
|
||||
convertedType = .record(speaker: false, video: true, withOthers: true)
|
||||
}
|
||||
default:
|
||||
convertedType = .play(mixWithOthers: false)
|
||||
|
@ -18,10 +18,9 @@ public enum ManagedAudioSessionType: Equatable {
|
||||
case ambient
|
||||
case play(mixWithOthers: Bool)
|
||||
case playWithPossiblePortOverride
|
||||
case record(speaker: Bool, withOthers: Bool)
|
||||
case record(speaker: Bool, video: Bool, withOthers: Bool)
|
||||
case voiceCall
|
||||
case videoCall
|
||||
case recordWithOthers
|
||||
|
||||
var isPlay: Bool {
|
||||
switch self {
|
||||
@ -39,7 +38,7 @@ private func nativeCategoryForType(_ type: ManagedAudioSessionType, headphones:
|
||||
return .ambient
|
||||
case .play:
|
||||
return .playback
|
||||
case .record, .recordWithOthers, .voiceCall, .videoCall:
|
||||
case .record, .voiceCall, .videoCall:
|
||||
return .playAndRecord
|
||||
case .playWithPossiblePortOverride:
|
||||
if headphones {
|
||||
@ -633,10 +632,8 @@ public final class ManagedAudioSession: NSObject {
|
||||
|
||||
var lastIsRecordWithOthers = false
|
||||
if let lastHolder = self.holders.last {
|
||||
if case let .record(_, withOthers) = lastHolder.audioSessionType {
|
||||
if case let .record(_, _, withOthers) = lastHolder.audioSessionType {
|
||||
lastIsRecordWithOthers = withOthers
|
||||
} else if case .recordWithOthers = lastHolder.audioSessionType {
|
||||
lastIsRecordWithOthers = true
|
||||
}
|
||||
}
|
||||
if !deactivating {
|
||||
@ -804,11 +801,14 @@ public final class ManagedAudioSession: NSObject {
|
||||
options.insert(.allowBluetooth)
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
options.insert(.mixWithOthers)
|
||||
case .record:
|
||||
case let .record(_, video, mixWithOthers):
|
||||
options.insert(.allowBluetooth)
|
||||
case .recordWithOthers:
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
options.insert(.mixWithOthers)
|
||||
if video {
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
}
|
||||
if mixWithOthers {
|
||||
options.insert(.mixWithOthers)
|
||||
}
|
||||
}
|
||||
managedAudioSessionLog("ManagedAudioSession setting category and options")
|
||||
let mode: AVAudioSession.Mode
|
||||
@ -817,7 +817,7 @@ public final class ManagedAudioSession: NSObject {
|
||||
mode = .voiceChat
|
||||
case .videoCall:
|
||||
mode = .videoChat
|
||||
case .recordWithOthers:
|
||||
case .record(_, true, _):
|
||||
mode = .videoRecording
|
||||
default:
|
||||
mode = .default
|
||||
@ -838,7 +838,7 @@ public final class ManagedAudioSession: NSObject {
|
||||
try AVAudioSession.sharedInstance().setMode(mode)
|
||||
if AVAudioSession.sharedInstance().categoryOptions != options {
|
||||
switch type {
|
||||
case .voiceCall, .videoCall, .recordWithOthers:
|
||||
case .voiceCall, .videoCall:
|
||||
managedAudioSessionLog("ManagedAudioSession resetting options")
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
|
||||
default:
|
||||
@ -960,7 +960,7 @@ public final class ManagedAudioSession: NSObject {
|
||||
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
||||
}
|
||||
|
||||
if case let .record(speaker, _) = type, !speaker, let input = AVAudioSession.sharedInstance().availableInputs?.first {
|
||||
if case let .record(_, video, _) = type, video, let input = AVAudioSession.sharedInstance().availableInputs?.first {
|
||||
if let dataSources = input.dataSources {
|
||||
for source in dataSources {
|
||||
if source.dataSourceName.contains("Front") {
|
||||
@ -973,13 +973,13 @@ public final class ManagedAudioSession: NSObject {
|
||||
|
||||
if resetToBuiltin {
|
||||
var updatedType = type
|
||||
if case .record(false, let withOthers) = updatedType, self.isHeadsetPluggedInValue {
|
||||
updatedType = .record(speaker: true, withOthers: withOthers)
|
||||
if case .record(false, let video, let withOthers) = updatedType, self.isHeadsetPluggedInValue {
|
||||
updatedType = .record(speaker: true, video: video, withOthers: withOthers)
|
||||
}
|
||||
switch updatedType {
|
||||
case .record(false, _):
|
||||
case .record(false, _, _):
|
||||
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.speaker)
|
||||
case .voiceCall, .playWithPossiblePortOverride, .record(true, _):
|
||||
case .voiceCall, .playWithPossiblePortOverride, .record(true, _, _):
|
||||
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
||||
if let routes = AVAudioSession.sharedInstance().availableInputs {
|
||||
var alreadySet = false
|
||||
|
@ -2766,7 +2766,7 @@ public class CameraScreen: ViewController {
|
||||
}
|
||||
|
||||
private func requestAudioSession() {
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .record(speaker: false, video: true, withOthers: true), activate: { _ in
|
||||
if #available(iOS 13.0, *) {
|
||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
}
|
||||
|
@ -400,6 +400,8 @@ public final class EmojiStatusComponent: Component {
|
||||
emojiThemeColor = themeColor
|
||||
emojiLoopMode = loopMode
|
||||
emojiSize = size
|
||||
} else if case let .premium(color) = component.content {
|
||||
iconTintColor = color
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4420,7 +4420,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
checkPostingAvailability = true
|
||||
}
|
||||
if needsAudioSession {
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .record(speaker: false, video: true, withOthers: true), activate: { _ in
|
||||
if #available(iOS 13.0, *) {
|
||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
}
|
||||
|
@ -80,6 +80,7 @@ public final class SettingsThemeWallpaperNode: ASDisplayNode {
|
||||
self.statusNode.isUserInteractionEnabled = false
|
||||
|
||||
self.emojiContainerNode = ASDisplayNode()
|
||||
self.emojiContainerNode.isUserInteractionEnabled = false
|
||||
self.emojiImageNode = TransformImageNode()
|
||||
|
||||
super.init()
|
||||
|
@ -1673,9 +1673,9 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
private func requestAudioSession() {
|
||||
let audioSessionType: ManagedAudioSessionType
|
||||
if self.context.sharedContext.currentMediaInputSettings.with({ $0 }).pauseMusicOnRecording {
|
||||
audioSessionType = .record(speaker: false, withOthers: false)
|
||||
audioSessionType = .record(speaker: false, video: true, withOthers: false)
|
||||
} else {
|
||||
audioSessionType = .recordWithOthers
|
||||
audioSessionType = .record(speaker: false, video: true, withOthers: true)
|
||||
}
|
||||
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: audioSessionType, activate: { [weak self] _ in
|
||||
|
@ -401,7 +401,7 @@ final class ManagedAudioRecorderContext {
|
||||
|
||||
if self.audioSessionDisposable == nil {
|
||||
let queue = self.queue
|
||||
self.audioSessionDisposable = self.mediaManager.audioSession.push(audioSessionType: .record(speaker: self.beginWithTone, withOthers: false), activate: { [weak self] state in
|
||||
self.audioSessionDisposable = self.mediaManager.audioSession.push(audioSessionType: .record(speaker: self.beginWithTone, video: false, withOthers: false), activate: { [weak self] state in
|
||||
queue.async {
|
||||
if let strongSelf = self, !strongSelf.paused {
|
||||
strongSelf.hasAudioSession = true
|
||||
|
Loading…
x
Reference in New Issue
Block a user