mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'experimental-2' of gitlab.com:peter-iakovlev/telegram-ios into experimental-2
This commit is contained in:
commit
9bcbd429aa
1
Random.txt
Normal file
1
Random.txt
Normal file
@ -0,0 +1 @@
|
||||
LS3VfNVetXNy6mHmek8hegOh0wsvmJs0hcrc7PLA9eI=
|
@ -3,7 +3,7 @@
|
||||
@implementation Serialization
|
||||
|
||||
- (NSUInteger)currentLayer {
|
||||
return 116;
|
||||
return 117;
|
||||
}
|
||||
|
||||
- (id _Nullable)parseMessage:(NSData * _Nullable)data {
|
||||
|
@ -40,7 +40,7 @@ public struct PresentationCallState: Equatable {
|
||||
case connecting(Data?)
|
||||
case active(Double, Int32?, Data)
|
||||
case reconnecting(Double, Int32?, Data)
|
||||
case terminating
|
||||
case terminating(CallSessionTerminationReason?)
|
||||
case terminated(CallId?, CallSessionTerminationReason?, Bool)
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,6 @@
|
||||
@protocol TGModernConversationInputMicButtonDecoration <NSObject>
|
||||
|
||||
- (void)updateLevel:(CGFloat)level;
|
||||
- (void)tick:(CGFloat)level;
|
||||
- (void)setColor:(UIColor *)color;
|
||||
- (void)stopAnimating;
|
||||
- (void)startAnimating;
|
||||
|
@ -514,8 +514,8 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
|
||||
_innerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||
_outerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||
if (toSmallSize) {
|
||||
_decoration.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.33f, 0.33f), CGAffineTransformMakeTranslation(-4, 0));
|
||||
_innerIconWrapperView.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.492f, 0.492f), CGAffineTransformMakeTranslation(-TGScreenPixel, 0));
|
||||
_decoration.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.33f, 0.33f), CGAffineTransformMakeTranslation(0, 2 - TGScreenPixel));
|
||||
_innerIconWrapperView.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.492f, 0.492f), CGAffineTransformMakeTranslation(-TGScreenPixel, 1));
|
||||
} else {
|
||||
_decoration.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||
_decoration.alpha = 0.0;
|
||||
@ -846,7 +846,6 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
|
||||
NSTimeInterval t = CACurrentMediaTime();
|
||||
|
||||
_currentLevel = _currentLevel * 0.9f + _inputLevel * 0.1f;
|
||||
[_decoration tick:_currentLevel];
|
||||
|
||||
_currentTranslation = MIN(0.0, _currentTranslation * 0.7f + _targetTranslation * 0.3f);
|
||||
_cancelTranslation = MIN(0.0, _cancelTranslation * 0.7f + _cancelTargetTranslation * 0.3f);
|
||||
|
@ -72,7 +72,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
case alternativeFolderTabs(Bool)
|
||||
case playerEmbedding(Bool)
|
||||
case playlistPlayback(Bool)
|
||||
case enableHighBitrateVideoCalls(Bool)
|
||||
case preferredVideoCodec(Int, String, String?, Bool)
|
||||
case hostInfo(PresentationTheme, String)
|
||||
case versionInfo(PresentationTheme)
|
||||
|
||||
@ -88,14 +88,14 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
return DebugControllerSection.experiments.rawValue
|
||||
case .clearTips, .reimport, .resetData, .resetDatabase, .resetHoles, .reindexUnread, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .alternativeFolderTabs, .playerEmbedding, .playlistPlayback:
|
||||
return DebugControllerSection.experiments.rawValue
|
||||
case .enableHighBitrateVideoCalls:
|
||||
case .preferredVideoCodec:
|
||||
return DebugControllerSection.videoExperiments.rawValue
|
||||
case .hostInfo, .versionInfo:
|
||||
return DebugControllerSection.info.rawValue
|
||||
}
|
||||
}
|
||||
|
||||
var stableId: Int32 {
|
||||
var stableId: Int {
|
||||
switch self {
|
||||
case .sendLogs:
|
||||
return 0
|
||||
@ -147,12 +147,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
return 24
|
||||
case .playlistPlayback:
|
||||
return 25
|
||||
case .enableHighBitrateVideoCalls:
|
||||
return 26
|
||||
case let .preferredVideoCodec(index, _, _, _):
|
||||
return 26 + index
|
||||
case .hostInfo:
|
||||
return 29
|
||||
return 100
|
||||
case .versionInfo:
|
||||
return 30
|
||||
return 101
|
||||
}
|
||||
}
|
||||
|
||||
@ -570,12 +570,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
})
|
||||
}).start()
|
||||
})
|
||||
case let .enableHighBitrateVideoCalls(value):
|
||||
return ItemListSwitchItem(presentationData: presentationData, title: "HD Video Calls", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||
case let .preferredVideoCodec(_, title, value, isSelected):
|
||||
return ItemListCheckboxItem(presentationData: presentationData, title: title, style: .right, checked: isSelected, zeroSeparatorInsets: false, sectionId: self.section, action: {
|
||||
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
||||
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
|
||||
var settings = settings as? ExperimentalUISettings ?? ExperimentalUISettings.defaultSettings
|
||||
settings.enableHighBitrateVideoCalls = value
|
||||
settings.preferredVideoCodec = value
|
||||
return settings
|
||||
})
|
||||
}).start()
|
||||
@ -625,7 +625,18 @@ private func debugControllerEntries(presentationData: PresentationData, loggingS
|
||||
entries.append(.alternativeFolderTabs(experimentalSettings.foldersTabAtBottom))
|
||||
entries.append(.playerEmbedding(experimentalSettings.playerEmbedding))
|
||||
entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
|
||||
entries.append(.enableHighBitrateVideoCalls(experimentalSettings.enableHighBitrateVideoCalls))
|
||||
|
||||
let codecs: [(String, String?)] = [
|
||||
("No Preference", nil),
|
||||
("H265", "H265"),
|
||||
("H264", "H264"),
|
||||
("VP8", "VP8"),
|
||||
("VP9", "VP9")
|
||||
]
|
||||
|
||||
for i in 0 ..< codecs.count {
|
||||
entries.append(.preferredVideoCodec(i, codecs[i].0, codecs[i].1, experimentalSettings.preferredVideoCodec == codecs[i].1))
|
||||
}
|
||||
|
||||
if let backupHostOverride = networkSettings?.backupHostOverride {
|
||||
entries.append(.hostInfo(presentationData.theme, "Host: \(backupHostOverride)"))
|
||||
|
@ -358,6 +358,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
private var validLayout: (ContainerViewLayout, CGFloat)?
|
||||
private var disableActionsUntilTimestamp: Double = 0.0
|
||||
|
||||
private var displayedVersionOutdatedAlert: Bool = false
|
||||
|
||||
var isMuted: Bool = false {
|
||||
didSet {
|
||||
self.buttonsNode.isMuted = self.isMuted
|
||||
@ -417,9 +419,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
self.containerTransformationNode.clipsToBounds = true
|
||||
|
||||
self.containerNode = ASDisplayNode()
|
||||
if self.shouldStayHiddenUntilConnection {
|
||||
self.containerNode.alpha = 0.0
|
||||
}
|
||||
|
||||
self.imageNode = TransformImageNode()
|
||||
self.imageNode.contentAnimations = [.subsequentUpdates]
|
||||
@ -564,7 +563,13 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
|
||||
self.backButtonNode.addTarget(self, action: #selector(self.backPressed), forControlEvents: .touchUpInside)
|
||||
|
||||
if !shouldStayHiddenUntilConnection && call.isVideo && call.isOutgoing {
|
||||
if shouldStayHiddenUntilConnection {
|
||||
self.containerNode.alpha = 0.0
|
||||
Queue.mainQueue().after(3.0, { [weak self] in
|
||||
self?.containerNode.alpha = 1.0
|
||||
self?.animateIn()
|
||||
})
|
||||
} else if call.isVideo && call.isOutgoing {
|
||||
self.containerNode.alpha = 0.0
|
||||
Queue.mainQueue().after(1.0, { [weak self] in
|
||||
self?.containerNode.alpha = 1.0
|
||||
@ -881,8 +886,20 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
case .hungUp, .missed:
|
||||
statusValue = .text(string: self.presentationData.strings.Call_StatusEnded, displayLogo: false)
|
||||
}
|
||||
case .error:
|
||||
statusValue = .text(string: self.presentationData.strings.Call_StatusFailed, displayLogo: false)
|
||||
case let .error(error):
|
||||
let text = self.presentationData.strings.Call_StatusFailed
|
||||
switch error {
|
||||
case .notSupportedByPeer:
|
||||
if !self.displayedVersionOutdatedAlert, let peer = self.peer {
|
||||
self.displayedVersionOutdatedAlert = true
|
||||
|
||||
self.present?(textAlertController(sharedContext: self.sharedContext, title: nil, text: self.presentationData.strings.Call_ParticipantVersionOutdatedError(peer.displayTitle(strings: self.presentationData.strings, displayOrder: self.presentationData.nameDisplayOrder)).0, actions: [TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Common_OK, action: {
|
||||
})]))
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
statusValue = .text(string: text, displayLogo: false)
|
||||
}
|
||||
} else {
|
||||
statusValue = .text(string: self.presentationData.strings.Call_StatusEnded, displayLogo: false)
|
||||
@ -1202,6 +1219,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
|
||||
let previewVideoSide = interpolate(from: 350.0, to: 200.0, value: 1.0 - self.pictureInPictureTransitionFraction)
|
||||
var previewVideoSize = layout.size.aspectFitted(CGSize(width: previewVideoSide, height: previewVideoSide))
|
||||
previewVideoSize = CGSize(width: 30.0, height: 45.0).aspectFitted(previewVideoSize)
|
||||
if let minimizedVideoNode = minimizedVideoNode {
|
||||
switch minimizedVideoNode.currentOrientation {
|
||||
case .rotation90, .rotation270:
|
||||
@ -1377,6 +1395,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
|
||||
if let expandedVideoNode = self.expandedVideoNode {
|
||||
transition.updateAlpha(node: expandedVideoNode, alpha: 1.0)
|
||||
var expandedVideoTransition = transition
|
||||
if expandedVideoNode.frame.isEmpty || self.disableAnimationForExpandedVideoOnce {
|
||||
expandedVideoTransition = .immediate
|
||||
@ -1424,6 +1443,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
|
||||
|
||||
if let minimizedVideoNode = self.minimizedVideoNode {
|
||||
transition.updateAlpha(node: minimizedVideoNode, alpha: pipTransitionAlpha)
|
||||
var minimizedVideoTransition = transition
|
||||
var didAppear = false
|
||||
if minimizedVideoNode.frame.isEmpty {
|
||||
|
@ -169,7 +169,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
public let isOutgoing: Bool
|
||||
public var isVideo: Bool
|
||||
public var isVideoPossible: Bool
|
||||
public let enableHighBitrateVideoCalls: Bool
|
||||
public let preferredVideoCodec: String?
|
||||
public let peer: Peer?
|
||||
|
||||
private let serializedData: String?
|
||||
@ -265,7 +265,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
updatedNetworkType: Signal<NetworkType, NoError>,
|
||||
startWithVideo: Bool,
|
||||
isVideoPossible: Bool,
|
||||
enableHighBitrateVideoCalls: Bool
|
||||
preferredVideoCodec: String?
|
||||
) {
|
||||
self.account = account
|
||||
self.audioSession = audioSession
|
||||
@ -292,9 +292,9 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.isOutgoing = isOutgoing
|
||||
self.isVideo = initialState?.type == .video
|
||||
self.isVideoPossible = isVideoPossible
|
||||
self.preferredVideoCodec = preferredVideoCodec
|
||||
self.peer = peer
|
||||
self.isVideo = startWithVideo
|
||||
self.enableHighBitrateVideoCalls = enableHighBitrateVideoCalls
|
||||
if self.isVideo {
|
||||
self.videoCapturer = OngoingCallVideoCapturer()
|
||||
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .active, remoteVideoState: .inactive, remoteAudioState: .active, remoteBatteryLevel: .normal))
|
||||
@ -508,7 +508,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
} else {
|
||||
if self.isVideo {
|
||||
mappedVideoState = .active
|
||||
} else if self.isVideoPossible {
|
||||
} else if self.isVideoPossible && sessionState.isVideoPossible {
|
||||
mappedVideoState = .inactive
|
||||
} else {
|
||||
mappedVideoState = .notAvailable
|
||||
@ -557,8 +557,8 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
case .accepting:
|
||||
self.callWasActive = true
|
||||
presentationState = PresentationCallState(state: .connecting(nil), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
|
||||
case .dropping:
|
||||
presentationState = PresentationCallState(state: .terminating, videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
|
||||
case let .dropping(reason):
|
||||
presentationState = PresentationCallState(state: .terminating(reason), videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
|
||||
case let .terminated(id, reason, options):
|
||||
presentationState = PresentationCallState(state: .terminated(id, reason, self.callWasActive && (options.contains(.reportRating) || self.shouldPresentCallRating)), videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
|
||||
case let .requesting(ringing):
|
||||
@ -570,7 +570,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
case .initializing:
|
||||
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
|
||||
case .failed:
|
||||
presentationState = nil
|
||||
presentationState = PresentationCallState(state: .terminating(.error(.disconnected)), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
|
||||
self.callSessionManager.drop(internalId: self.internalId, reason: .disconnect, debugLog: .single(nil))
|
||||
case .connected:
|
||||
let timestamp: Double
|
||||
@ -606,7 +606,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
if let _ = audioSessionControl, !wasActive || previousControl == nil {
|
||||
let logName = "\(id.id)_\(id.accessHash)"
|
||||
|
||||
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, enableHighBitrateVideoCalls: self.enableHighBitrateVideoCalls, audioSessionActive: self.audioSessionActive.get(), logName: logName)
|
||||
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec)
|
||||
self.ongoingContext = ongoingContext
|
||||
ongoingContext.setIsMuted(self.isMutedValue)
|
||||
|
||||
|
@ -314,7 +314,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
|
||||
updatedNetworkType: firstState.0.networkType,
|
||||
startWithVideo: firstState.2.isVideo,
|
||||
isVideoPossible: firstState.2.isVideoPossible,
|
||||
enableHighBitrateVideoCalls: experimentalSettings.enableHighBitrateVideoCalls
|
||||
preferredVideoCodec: experimentalSettings.preferredVideoCodec
|
||||
)
|
||||
strongSelf.updateCurrentCall(call)
|
||||
strongSelf.currentCallPromise.set(.single(call))
|
||||
@ -333,7 +333,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
|
||||
} else {
|
||||
for (account, _, state, _, _) in ringingStates {
|
||||
if state.id != self.currentCall?.internalId {
|
||||
account.callSessionManager.drop(internalId: state.id, reason: .missed, debugLog: .single(nil))
|
||||
account.callSessionManager.drop(internalId: state.id, reason: .busy, debugLog: .single(nil))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -556,7 +556,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
|
||||
updatedNetworkType: account.networkType,
|
||||
startWithVideo: isVideo,
|
||||
isVideoPossible: isVideoPossible,
|
||||
enableHighBitrateVideoCalls: experimentalSettings.enableHighBitrateVideoCalls
|
||||
preferredVideoCodec: experimentalSettings.preferredVideoCodec
|
||||
)
|
||||
strongSelf.updateCurrentCall(call)
|
||||
strongSelf.currentCallPromise.set(.single(call))
|
||||
|
@ -97,7 +97,7 @@ enum CallSessionInternalState {
|
||||
case requested(id: Int64, accessHash: Int64, a: Data, gA: Data, config: SecretChatEncryptionConfig, remoteConfirmationTimestamp: Int32?)
|
||||
case confirming(id: Int64, accessHash: Int64, key: Data, keyId: Int64, keyVisualHash: Data, disposable: Disposable)
|
||||
case active(id: Int64, accessHash: Int64, beginTimestamp: Int32, key: Data, keyId: Int64, keyVisualHash: Data, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowsP2P: Bool)
|
||||
case dropping(Disposable)
|
||||
case dropping(reason: CallSessionTerminationReason, disposable: Disposable)
|
||||
case terminated(id: Int64?, accessHash: Int64?, reason: CallSessionTerminationReason, reportRating: Bool, sendDebugLogs: Bool)
|
||||
}
|
||||
|
||||
@ -138,7 +138,7 @@ public enum CallSessionState {
|
||||
case accepting
|
||||
case requesting(ringing: Bool)
|
||||
case active(id: CallId, key: Data, keyVisualHash: Data, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowsP2P: Bool)
|
||||
case dropping
|
||||
case dropping(reason: CallSessionTerminationReason)
|
||||
case terminated(id: CallId?, reason: CallSessionTerminationReason, options: CallTerminationOptions)
|
||||
|
||||
fileprivate init(_ context: CallSessionContext) {
|
||||
@ -155,8 +155,8 @@ public enum CallSessionState {
|
||||
self = .requesting(ringing: remoteConfirmationTimestamp != nil)
|
||||
case let .active(id, accessHash, _, key, _, keyVisualHash, connections, maxLayer, version, allowsP2P):
|
||||
self = .active(id: CallId(id: id, accessHash: accessHash), key: key, keyVisualHash: keyVisualHash, connections: connections, maxLayer: maxLayer, version: version, allowsP2P: allowsP2P)
|
||||
case .dropping:
|
||||
self = .dropping
|
||||
case let .dropping(reason, _):
|
||||
self = .dropping(reason: reason)
|
||||
case let .terminated(id, accessHash, reason, reportRating, sendDebugLogs):
|
||||
var options = CallTerminationOptions()
|
||||
if reportRating {
|
||||
@ -186,6 +186,7 @@ public struct CallSession {
|
||||
public let isOutgoing: Bool
|
||||
public let type: CallType
|
||||
public let state: CallSessionState
|
||||
public let isVideoPossible: Bool
|
||||
}
|
||||
|
||||
public enum CallSessionConnection: Equatable {
|
||||
@ -277,7 +278,7 @@ private final class CallSessionContext {
|
||||
let peerId: PeerId
|
||||
let isOutgoing: Bool
|
||||
var type: CallSession.CallType
|
||||
let isVideoPossible: Bool
|
||||
var isVideoPossible: Bool
|
||||
var state: CallSessionInternalState
|
||||
let subscribers = Bag<(CallSession) -> Void>()
|
||||
let signalingSubscribers = Bag<(Data) -> Void>()
|
||||
@ -412,7 +413,7 @@ private final class CallSessionManagerContext {
|
||||
let index = context.subscribers.add { next in
|
||||
subscriber.putNext(next)
|
||||
}
|
||||
subscriber.putNext(CallSession(id: internalId, isOutgoing: context.isOutgoing, type: context.type, state: CallSessionState(context)))
|
||||
subscriber.putNext(CallSession(id: internalId, isOutgoing: context.isOutgoing, type: context.type, state: CallSessionState(context), isVideoPossible: context.isVideoPossible))
|
||||
disposable.set(ActionDisposable {
|
||||
queue.async {
|
||||
if let strongSelf = self, let context = strongSelf.contexts[internalId] {
|
||||
@ -473,7 +474,7 @@ private final class CallSessionManagerContext {
|
||||
|
||||
private func contextUpdated(internalId: CallSessionInternalId) {
|
||||
if let context = self.contexts[internalId] {
|
||||
let session = CallSession(id: internalId, isOutgoing: context.isOutgoing, type: context.type, state: CallSessionState(context))
|
||||
let session = CallSession(id: internalId, isOutgoing: context.isOutgoing, type: context.type, state: CallSessionState(context), isVideoPossible: context.isVideoPossible)
|
||||
for subscriber in context.subscribers.copyItems() {
|
||||
subscriber(session)
|
||||
}
|
||||
@ -526,7 +527,9 @@ private final class CallSessionManagerContext {
|
||||
wasRinging = true
|
||||
let internalReason: DropCallSessionReason
|
||||
switch reason {
|
||||
case .busy, .hangUp:
|
||||
case .busy:
|
||||
internalReason = .busy
|
||||
case .hangUp:
|
||||
internalReason = .hangUp(0)
|
||||
case .disconnect:
|
||||
internalReason = .disconnect
|
||||
@ -578,7 +581,8 @@ private final class CallSessionManagerContext {
|
||||
|
||||
if let (id, accessHash, reason) = dropData {
|
||||
self.contextIdByStableId.removeValue(forKey: id)
|
||||
context.state = .dropping((dropCallSession(network: self.network, addUpdates: self.addUpdates, stableId: id, accessHash: accessHash, isVideo: isVideo, reason: reason)
|
||||
let mappedReason: CallSessionTerminationReason = .ended(.hungUp)
|
||||
context.state = .dropping(reason: mappedReason, disposable: (dropCallSession(network: self.network, addUpdates: self.addUpdates, stableId: id, accessHash: accessHash, isVideo: isVideo, reason: reason)
|
||||
|> deliverOn(self.queue)).start(next: { [weak self] reportRating, sendDebugLogs in
|
||||
if let strongSelf = self {
|
||||
if let context = strongSelf.contexts[internalId] {
|
||||
@ -803,6 +807,9 @@ private final class CallSessionManagerContext {
|
||||
switch callProtocol {
|
||||
case let .phoneCallProtocol(_, _, maxLayer, versions):
|
||||
if !versions.isEmpty {
|
||||
let isVideoPossible = self.videoVersions().contains(where: { versions.contains($0) })
|
||||
context.isVideoPossible = isVideoPossible
|
||||
|
||||
context.state = .active(id: id, accessHash: accessHash, beginTimestamp: startDate, key: key, keyId: calculatedKeyId, keyVisualHash: keyVisualHash, connections: parseConnectionSet(primary: connections.first!, alternative: Array(connections[1...])), maxLayer: maxLayer, version: versions[0], allowsP2P: allowsP2P)
|
||||
self.contextUpdated(internalId: internalId)
|
||||
} else {
|
||||
@ -819,6 +826,9 @@ private final class CallSessionManagerContext {
|
||||
switch callProtocol {
|
||||
case let .phoneCallProtocol(_, _, maxLayer, versions):
|
||||
if !versions.isEmpty {
|
||||
let isVideoPossible = self.videoVersions().contains(where: { versions.contains($0) })
|
||||
context.isVideoPossible = isVideoPossible
|
||||
|
||||
context.state = .active(id: id, accessHash: accessHash, beginTimestamp: startDate, key: key, keyId: keyId, keyVisualHash: keyVisualHash, connections: parseConnectionSet(primary: connections.first!, alternative: Array(connections[1...])), maxLayer: maxLayer, version: versions[0], allowsP2P: allowsP2P)
|
||||
self.contextUpdated(internalId: internalId)
|
||||
} else {
|
||||
@ -848,7 +858,7 @@ private final class CallSessionManagerContext {
|
||||
}
|
||||
}
|
||||
if let context = self.contexts[internalId] {
|
||||
let callSession = CallSession(id: internalId, isOutgoing: context.isOutgoing, type: context.type, state: CallSessionState(context))
|
||||
let callSession = CallSession(id: internalId, isOutgoing: context.isOutgoing, type: context.type, state: CallSessionState(context), isVideoPossible: context.isVideoPossible)
|
||||
if let resultRingingStateValue = resultRingingStateValue {
|
||||
resultRingingState = (resultRingingStateValue, callSession)
|
||||
}
|
||||
|
@ -3,53 +3,81 @@ import UIKit
|
||||
import Display
|
||||
import LegacyComponents
|
||||
|
||||
private enum Constants {
|
||||
|
||||
static let maxLevel: CGFloat = 4
|
||||
}
|
||||
|
||||
final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration {
|
||||
|
||||
private let smallBlob = BlobView(
|
||||
pointsCount: 8,
|
||||
minRandomness: 0.1,
|
||||
maxRandomness: 0.5,
|
||||
minSpeed: 0.2,
|
||||
maxSpeed: 0.6,
|
||||
minScale: 0.45,
|
||||
maxScale: 0.55,
|
||||
scaleSpeed: 0.2,
|
||||
isCircle: true
|
||||
)
|
||||
private let mediumBlob = BlobView(
|
||||
pointsCount: 8,
|
||||
minRandomness: 1,
|
||||
maxRandomness: 1,
|
||||
minSpeed: 1.5,
|
||||
maxSpeed: 7,
|
||||
minScale: 0.52,
|
||||
maxScale: 0.87,
|
||||
scaleSpeed: 0.2,
|
||||
isCircle: false
|
||||
)
|
||||
private let bigBlob = BlobView(
|
||||
pointsCount: 8,
|
||||
minRandomness: 1,
|
||||
maxRandomness: 1,
|
||||
minSpeed: 1.5,
|
||||
maxSpeed: 7,
|
||||
minScale: 0.57,
|
||||
maxScale: 1,
|
||||
scaleSpeed: 0.2,
|
||||
isCircle: false
|
||||
)
|
||||
private let smallBlob: BlobView
|
||||
private let mediumBlob: BlobView
|
||||
private let bigBlob: BlobView
|
||||
|
||||
override init(frame: CGRect) {
|
||||
private let maxLevel: CGFloat
|
||||
|
||||
private var displayLinkAnimator: ConstantDisplayLinkAnimator?
|
||||
|
||||
private var audioLevel: CGFloat = 0
|
||||
private var presentationAudioLevel: CGFloat = 0
|
||||
|
||||
private(set) var isAnimating = false
|
||||
|
||||
typealias BlobRange = (min: CGFloat, max: CGFloat)
|
||||
|
||||
init(
|
||||
frame: CGRect,
|
||||
maxLevel: CGFloat,
|
||||
smallBlobRange: BlobRange,
|
||||
mediumBlobRange: BlobRange,
|
||||
bigBlobRange: BlobRange
|
||||
) {
|
||||
self.maxLevel = maxLevel
|
||||
|
||||
self.smallBlob = BlobView(
|
||||
pointsCount: 8,
|
||||
minRandomness: 0.1,
|
||||
maxRandomness: 0.5,
|
||||
minSpeed: 0.2,
|
||||
maxSpeed: 0.6,
|
||||
minScale: smallBlobRange.min,
|
||||
maxScale: smallBlobRange.max,
|
||||
scaleSpeed: 0.2,
|
||||
isCircle: true
|
||||
)
|
||||
self.mediumBlob = BlobView(
|
||||
pointsCount: 8,
|
||||
minRandomness: 1,
|
||||
maxRandomness: 1,
|
||||
minSpeed: 1.5,
|
||||
maxSpeed: 7,
|
||||
minScale: mediumBlobRange.min,
|
||||
maxScale: mediumBlobRange.max,
|
||||
scaleSpeed: 0.2,
|
||||
isCircle: false
|
||||
)
|
||||
self.bigBlob = BlobView(
|
||||
pointsCount: 8,
|
||||
minRandomness: 1,
|
||||
maxRandomness: 1,
|
||||
minSpeed: 1.5,
|
||||
maxSpeed: 7,
|
||||
minScale: bigBlobRange.min,
|
||||
maxScale: bigBlobRange.max,
|
||||
scaleSpeed: 0.2,
|
||||
isCircle: false
|
||||
)
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
addSubview(bigBlob)
|
||||
addSubview(mediumBlob)
|
||||
addSubview(smallBlob)
|
||||
|
||||
displayLinkAnimator = ConstantDisplayLinkAnimator() { [weak self] in
|
||||
guard let strongSelf = self else { return }
|
||||
|
||||
strongSelf.presentationAudioLevel = strongSelf.presentationAudioLevel * 0.9 + strongSelf.audioLevel * 0.1
|
||||
|
||||
strongSelf.smallBlob.level = strongSelf.presentationAudioLevel
|
||||
strongSelf.mediumBlob.level = strongSelf.presentationAudioLevel
|
||||
strongSelf.bigBlob.level = strongSelf.presentationAudioLevel
|
||||
}
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
@ -63,45 +91,61 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
|
||||
}
|
||||
|
||||
func updateLevel(_ level: CGFloat) {
|
||||
let normalizedLevel = min(1, max(level / Constants.maxLevel, 0))
|
||||
let normalizedLevel = min(1, max(level / maxLevel, 0))
|
||||
|
||||
smallBlob.updateSpeedLevel(to: normalizedLevel)
|
||||
mediumBlob.updateSpeedLevel(to: normalizedLevel)
|
||||
bigBlob.updateSpeedLevel(to: normalizedLevel)
|
||||
}
|
||||
|
||||
func tick(_ level: CGFloat) {
|
||||
let normalizedLevel = min(1, max(level / Constants.maxLevel, 0))
|
||||
|
||||
smallBlob.level = normalizedLevel
|
||||
mediumBlob.level = normalizedLevel
|
||||
bigBlob.level = normalizedLevel
|
||||
audioLevel = normalizedLevel
|
||||
}
|
||||
|
||||
func startAnimating() {
|
||||
mediumBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.1, removeOnCompletion: false)
|
||||
bigBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.1, removeOnCompletion: false)
|
||||
guard !isAnimating else { return }
|
||||
isAnimating = true
|
||||
|
||||
mediumBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false)
|
||||
bigBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false)
|
||||
|
||||
updateBlobsState()
|
||||
|
||||
displayLinkAnimator?.isPaused = false
|
||||
}
|
||||
|
||||
func stopAnimating() {
|
||||
mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.1, removeOnCompletion: false)
|
||||
bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.1, removeOnCompletion: false)
|
||||
guard isAnimating else { return }
|
||||
isAnimating = false
|
||||
|
||||
mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.15, removeOnCompletion: false)
|
||||
bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.15, removeOnCompletion: false)
|
||||
|
||||
updateBlobsState()
|
||||
|
||||
displayLinkAnimator?.isPaused = true
|
||||
}
|
||||
|
||||
private func updateBlobsState() {
|
||||
if isAnimating {
|
||||
if smallBlob.frame.size != .zero {
|
||||
smallBlob.startAnimating()
|
||||
mediumBlob.startAnimating()
|
||||
bigBlob.startAnimating()
|
||||
}
|
||||
} else {
|
||||
smallBlob.stopAnimating()
|
||||
mediumBlob.stopAnimating()
|
||||
bigBlob.stopAnimating()
|
||||
}
|
||||
}
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
|
||||
let isInitial = smallBlob.frame == .zero
|
||||
|
||||
smallBlob.frame = bounds
|
||||
mediumBlob.frame = bounds
|
||||
bigBlob.frame = bounds
|
||||
|
||||
if isInitial {
|
||||
smallBlob.startAnimating()
|
||||
mediumBlob.startAnimating()
|
||||
bigBlob.startAnimating()
|
||||
}
|
||||
updateBlobsState()
|
||||
}
|
||||
}
|
||||
|
||||
@ -221,40 +265,19 @@ final class BlobView: UIView {
|
||||
animateToNewShape()
|
||||
}
|
||||
|
||||
func animateToNewScale() {
|
||||
let scaleLevelForAnimation: CGFloat = {
|
||||
if scaleLevelsToBalance.isEmpty {
|
||||
return 0
|
||||
}
|
||||
return scaleLevelsToBalance.reduce(0, +) / CGFloat(scaleLevelsToBalance.count)
|
||||
}()
|
||||
let isDownscale = lastScaleLevel > scaleLevelForAnimation
|
||||
lastScaleLevel = scaleLevelForAnimation
|
||||
|
||||
shapeLayer.pop_removeAnimation(forKey: "scale")
|
||||
|
||||
let currentScale = minScale + (maxScale - minScale) * scaleLevelForAnimation
|
||||
let scaleAnimation = POPBasicAnimation(propertyNamed: kPOPLayerScaleXY)!
|
||||
scaleAnimation.toValue = CGPoint(x: currentScale, y: currentScale)
|
||||
scaleAnimation.duration = isDownscale ? 0.45 : CFTimeInterval(scaleSpeed)
|
||||
scaleAnimation.completionBlock = { [weak self] animation, finished in
|
||||
if finished {
|
||||
self?.animateToNewScale()
|
||||
}
|
||||
}
|
||||
shapeLayer.pop_add(scaleAnimation, forKey: "scale")
|
||||
|
||||
scaleLevel = 0
|
||||
scaleLevelsToBalance.removeAll()
|
||||
func stopAnimating() {
|
||||
fromPoints = currentPoints
|
||||
toPoints = nil
|
||||
pop_removeAnimation(forKey: "blob")
|
||||
}
|
||||
|
||||
func animateToNewShape() {
|
||||
private func animateToNewShape() {
|
||||
guard !isCircle else { return }
|
||||
|
||||
if pop_animation(forKey: "blob") != nil {
|
||||
fromPoints = currentPoints
|
||||
toPoints = nil
|
||||
shapeLayer.pop_removeAnimation(forKey: "blob")
|
||||
pop_removeAnimation(forKey: "blob")
|
||||
}
|
||||
|
||||
if fromPoints == nil {
|
||||
|
@ -34,7 +34,6 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
private var iconNode: TransformImageNode?
|
||||
private var statusNode: SemanticStatusNode?
|
||||
private var playbackAudioLevelView: VoiceBlobView?
|
||||
private var displayLinkAnimator: ConstantDisplayLinkAnimator?
|
||||
private var streamingStatusNode: RadialStatusNode?
|
||||
private var tapRecognizer: UITapGestureRecognizer?
|
||||
|
||||
@ -63,21 +62,10 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
|
||||
var visibility: Bool = false {
|
||||
didSet {
|
||||
if self.visibility != oldValue {
|
||||
if self.visibility {
|
||||
if self.displayLinkAnimator == nil {
|
||||
self.displayLinkAnimator = ConstantDisplayLinkAnimator(update: { [weak self] in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.currentAudioLevel = strongSelf.currentAudioLevel * 0.9 + strongSelf.inputAudioLevel * 0.1
|
||||
strongSelf.playbackAudioLevelView?.tick(strongSelf.currentAudioLevel)
|
||||
})
|
||||
}
|
||||
self.displayLinkAnimator?.isPaused = false
|
||||
} else {
|
||||
self.displayLinkAnimator?.isPaused = true
|
||||
}
|
||||
guard self.visibility != oldValue else { return }
|
||||
|
||||
if !self.visibility {
|
||||
self.playbackAudioLevelView?.stopAnimating()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -449,8 +437,13 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
if hasThumbnail {
|
||||
minLayoutWidth = max(titleLayout.size.width, descriptionMaxWidth) + 86.0
|
||||
} else if isVoice {
|
||||
var descriptionAndStatusWidth = descriptionLayout.size.width
|
||||
if let statusSize = statusSize {
|
||||
descriptionAndStatusWidth += 6 + statusSize.width
|
||||
}
|
||||
let calcDuration = max(minVoiceLength, min(maxVoiceLength, CGFloat(audioDuration)))
|
||||
minLayoutWidth = minVoiceWidth + (maxVoiceWidth - minVoiceWidth) * (calcDuration - minVoiceLength) / (maxVoiceLength - minVoiceLength)
|
||||
minLayoutWidth = max(descriptionAndStatusWidth + 56, minLayoutWidth)
|
||||
} else {
|
||||
minLayoutWidth = max(titleLayout.size.width, descriptionMaxWidth) + 44.0 + 8.0
|
||||
}
|
||||
@ -477,7 +470,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
return (minLayoutWidth, { boundingWidth in
|
||||
let progressDiameter: CGFloat = (isVoice && !hasThumbnail) ? 37.0 : 44.0
|
||||
let progressDiameter: CGFloat = 44.0
|
||||
|
||||
var iconFrame: CGRect?
|
||||
let progressFrame: CGRect
|
||||
@ -487,10 +480,19 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
if hasThumbnail {
|
||||
let currentIconFrame = CGRect(origin: CGPoint(x: -1.0, y: -7.0), size: CGSize(width: 74.0, height: 74.0))
|
||||
iconFrame = currentIconFrame
|
||||
progressFrame = CGRect(origin: CGPoint(x: currentIconFrame.minX + floor((currentIconFrame.size.width - progressDiameter) / 2.0), y: currentIconFrame.minY + floor((currentIconFrame.size.height - progressDiameter) / 2.0)), size: CGSize(width: progressDiameter, height: progressDiameter))
|
||||
progressFrame = CGRect(
|
||||
origin: CGPoint(
|
||||
x: currentIconFrame.minX + floor((currentIconFrame.size.width - progressDiameter) / 2.0),
|
||||
y: currentIconFrame.minY + floor((currentIconFrame.size.height - progressDiameter) / 2.0)
|
||||
),
|
||||
size: CGSize(width: progressDiameter, height: progressDiameter)
|
||||
)
|
||||
controlAreaWidth = 86.0
|
||||
} else {
|
||||
progressFrame = CGRect(origin: CGPoint(x: 0.0, y: isVoice ? -5.0 : 0.0), size: CGSize(width: progressDiameter, height: progressDiameter))
|
||||
progressFrame = CGRect(
|
||||
origin: CGPoint(x: 3.0, y: -3.0),
|
||||
size: CGSize(width: progressDiameter, height: progressDiameter)
|
||||
)
|
||||
controlAreaWidth = progressFrame.maxX + 8.0
|
||||
}
|
||||
|
||||
@ -506,7 +508,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
|
||||
let descriptionFrame: CGRect
|
||||
if isVoice {
|
||||
descriptionFrame = CGRect(origin: CGPoint(x: 43.0, y: 19.0), size: descriptionLayout.size)
|
||||
descriptionFrame = CGRect(origin: CGPoint(x: 56.0, y: 22.0), size: descriptionLayout.size)
|
||||
} else {
|
||||
descriptionFrame = CGRect(origin: CGPoint(x: titleFrame.minX, y: titleFrame.maxY - 1.0), size: descriptionLayout.size)
|
||||
}
|
||||
@ -516,7 +518,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
let textSizes = titleFrame.union(descriptionFrame).size
|
||||
fittedLayoutSize = CGSize(width: textSizes.width + controlAreaWidth, height: 59.0)
|
||||
} else if isVoice {
|
||||
fittedLayoutSize = CGSize(width: minLayoutWidth, height: 27.0)
|
||||
fittedLayoutSize = CGSize(width: minLayoutWidth, height: 38.0)
|
||||
} else {
|
||||
let unionSize = titleFrame.union(descriptionFrame).union(progressFrame).size
|
||||
fittedLayoutSize = CGSize(width: unionSize.width, height: unionSize.height + 6.0)
|
||||
@ -529,8 +531,9 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
if let statusFrameValue = statusFrame, descriptionFrame.intersects(statusFrameValue) {
|
||||
fittedLayoutSize.height += statusFrameValue.height
|
||||
statusFrame = statusFrameValue.offsetBy(dx: 0.0, dy: statusFrameValue.height)
|
||||
let intersection = descriptionFrame.intersection(statusFrameValue)
|
||||
let addedWidth = intersection.width + 20
|
||||
fittedLayoutSize.width += addedWidth
|
||||
}
|
||||
if let statusFrameValue = statusFrame, let iconFrame = iconFrame, iconFrame.intersects(statusFrameValue) {
|
||||
fittedLayoutSize.height += 15.0
|
||||
@ -598,7 +601,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
strongSelf.waveformScrubbingNode = waveformScrubbingNode
|
||||
strongSelf.addSubnode(waveformScrubbingNode)
|
||||
}
|
||||
strongSelf.waveformScrubbingNode?.frame = CGRect(origin: CGPoint(x: 43.0, y: -1.0), size: CGSize(width: boundingWidth - 41.0, height: 12.0))
|
||||
strongSelf.waveformScrubbingNode?.frame = CGRect(origin: CGPoint(x: 57.0, y: 1.0), size: CGSize(width: boundingWidth - 60.0, height: 15.0))
|
||||
let waveformColor: UIColor
|
||||
if incoming {
|
||||
if consumableContentIcon != nil {
|
||||
@ -679,7 +682,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
strongSelf.waveformNode.displaysAsynchronously = !presentationData.isPreview
|
||||
strongSelf.statusNode?.displaysAsynchronously = !presentationData.isPreview
|
||||
strongSelf.statusNode?.frame = progressFrame
|
||||
strongSelf.playbackAudioLevelView?.frame = progressFrame.insetBy(dx: -20.0, dy: -20.0)
|
||||
strongSelf.playbackAudioLevelView?.frame = progressFrame.insetBy(dx: -12.0, dy: -12.0)
|
||||
strongSelf.progressFrame = progressFrame
|
||||
strongSelf.streamingCacheStatusFrame = streamingCacheStatusFrame
|
||||
strongSelf.fileIconImage = fileIconImage
|
||||
@ -860,19 +863,35 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
let statusNode = SemanticStatusNode(backgroundNodeColor: backgroundNodeColor, foregroundNodeColor: foregroundNodeColor)
|
||||
self.statusNode = statusNode
|
||||
statusNode.frame = progressFrame
|
||||
|
||||
if self.playbackAudioLevelView == nil, false {
|
||||
let playbackAudioLevelView = VoiceBlobView(frame: progressFrame.insetBy(dx: -20.0, dy: -20.0))
|
||||
playbackAudioLevelView.setColor(presentationData.theme.theme.chat.inputPanel.actionControlFillColor)
|
||||
self.playbackAudioLevelView = playbackAudioLevelView
|
||||
self.view.addSubview(playbackAudioLevelView)
|
||||
}
|
||||
|
||||
self.addSubnode(statusNode)
|
||||
} else if let statusNode = self.statusNode {
|
||||
statusNode.backgroundNodeColor = backgroundNodeColor
|
||||
}
|
||||
|
||||
if state != .none && isVoice && self.playbackAudioLevelView == nil {
|
||||
let blobFrame = progressFrame.insetBy(dx: -12.0, dy: -12.0)
|
||||
let playbackAudioLevelView = VoiceBlobView(
|
||||
frame: blobFrame,
|
||||
maxLevel: 0.3,
|
||||
smallBlobRange: (0, 0),
|
||||
mediumBlobRange: (0.7, 0.8),
|
||||
bigBlobRange: (0.8, 0.9)
|
||||
)
|
||||
self.playbackAudioLevelView = playbackAudioLevelView
|
||||
self.view.addSubview(playbackAudioLevelView)
|
||||
|
||||
let maskRect = CGRect(origin: .zero, size: blobFrame.size)
|
||||
let playbackMaskLayer = CAShapeLayer()
|
||||
playbackMaskLayer.frame = maskRect
|
||||
playbackMaskLayer.fillRule = .evenOdd
|
||||
let maskPath = UIBezierPath()
|
||||
maskPath.append(UIBezierPath(roundedRect: maskRect.insetBy(dx: 12, dy: 12), cornerRadius: 22))
|
||||
maskPath.append(UIBezierPath(rect: maskRect))
|
||||
playbackMaskLayer.path = maskPath.cgPath
|
||||
playbackAudioLevelView.layer.mask = playbackMaskLayer
|
||||
}
|
||||
self.playbackAudioLevelView?.setColor(presentationData.theme.theme.chat.inputPanel.actionControlFillColor)
|
||||
|
||||
if streamingState != .none && self.streamingStatusNode == nil {
|
||||
let streamingStatusNode = RadialStatusNode(backgroundNodeColor: .clear)
|
||||
self.streamingStatusNode = streamingStatusNode
|
||||
@ -893,6 +912,13 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
statusNode?.removeFromSupernode()
|
||||
}
|
||||
})
|
||||
|
||||
switch state {
|
||||
case .pause:
|
||||
self.playbackAudioLevelView?.startAnimating()
|
||||
default:
|
||||
self.playbackAudioLevelView?.stopAnimating()
|
||||
}
|
||||
}
|
||||
|
||||
if let streamingStatusNode = self.streamingStatusNode {
|
||||
|
@ -190,8 +190,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
|
||||
let panelHeight = defaultHeight(metrics: metrics)
|
||||
|
||||
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40)))
|
||||
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: -UIScreenPixel), size: CGSize(width: 44.0, height: panelHeight)))
|
||||
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: 1), size: CGSize(width: 40.0, height: 40)))
|
||||
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: 2 - UIScreenPixel), size: CGSize(width: 44.0, height: 44)))
|
||||
self.binNode.frame = self.deleteButton.bounds
|
||||
|
||||
if let slowmodeState = interfaceState.slowmodeState, !interfaceState.isScheduledMessages {
|
||||
@ -230,9 +230,10 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
self.prevInputPanelNode = nil
|
||||
|
||||
if let audioRecordingDotNode = prevTextInputPanelNode.audioRecordingDotNode {
|
||||
audioRecordingDotNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||
let startAlpha = CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1.0)
|
||||
audioRecordingDotNode.layer.removeAllAnimations()
|
||||
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1.0), to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||
audioRecordingDotNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||
audioRecordingDotNode.layer.animateAlpha(from: startAlpha, to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||
}
|
||||
|
||||
if let audioRecordingTimeNode = prevTextInputPanelNode.audioRecordingTimeNode {
|
||||
|
@ -238,7 +238,13 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
|
||||
private lazy var micDecoration: (UIView & TGModernConversationInputMicButtonDecoration) = {
|
||||
let blobView = VoiceBlobView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 220.0, height: 220.0)))
|
||||
let blobView = VoiceBlobView(
|
||||
frame: CGRect(origin: CGPoint(), size: CGSize(width: 220.0, height: 220.0)),
|
||||
maxLevel: 4,
|
||||
smallBlobRange: (0.45, 0.55),
|
||||
mediumBlobRange: (0.52, 0.87),
|
||||
bigBlobRange: (0.57, 1.00)
|
||||
)
|
||||
blobView.setColor(self.theme.chat.inputPanel.actionControlFillColor)
|
||||
return blobView
|
||||
}()
|
||||
|
@ -1060,13 +1060,15 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
||||
}
|
||||
|
||||
animateDotAppearing = transition.isAnimated && !hideInfo
|
||||
if let mediaRecordingState = mediaRecordingState, case .waitingForPreview = mediaRecordingState {
|
||||
animateDotAppearing = false
|
||||
}
|
||||
|
||||
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40))
|
||||
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: audioRecordingTimeNode.frame.midY - 20), size: CGSize(width: 40.0, height: 40))
|
||||
if animateDotAppearing {
|
||||
let dotStartScale: CGFloat = (audioRecordingDotNode.layer.presentation()?.value(forKeyPath: "transform.scale.x") as? CGFloat) ?? 1
|
||||
audioRecordingDotNode.layer.animateScale(from: dotStartScale, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||
audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||
if audioRecordingDotNode.layer.animation(forKey: "recording") == nil {
|
||||
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
|
||||
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
|
||||
if finished {
|
||||
let animation = CAKeyframeAnimation(keyPath: "opacity")
|
||||
animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber]
|
||||
|
@ -9,9 +9,9 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
|
||||
public var chatListPhotos: Bool
|
||||
public var knockoutWallpaper: Bool
|
||||
public var foldersTabAtBottom: Bool
|
||||
public var enableHighBitrateVideoCalls: Bool
|
||||
public var playerEmbedding: Bool
|
||||
public var playlistPlayback: Bool
|
||||
public var preferredVideoCodec: String?
|
||||
|
||||
public static var defaultSettings: ExperimentalUISettings {
|
||||
return ExperimentalUISettings(
|
||||
@ -21,9 +21,9 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
|
||||
chatListPhotos: false,
|
||||
knockoutWallpaper: false,
|
||||
foldersTabAtBottom: false,
|
||||
enableHighBitrateVideoCalls: false,
|
||||
playerEmbedding: false,
|
||||
playlistPlayback: false
|
||||
playlistPlayback: false,
|
||||
preferredVideoCodec: nil
|
||||
)
|
||||
}
|
||||
|
||||
@ -34,9 +34,9 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
|
||||
chatListPhotos: Bool,
|
||||
knockoutWallpaper: Bool,
|
||||
foldersTabAtBottom: Bool,
|
||||
enableHighBitrateVideoCalls: Bool,
|
||||
playerEmbedding: Bool,
|
||||
playlistPlayback: Bool
|
||||
playlistPlayback: Bool,
|
||||
preferredVideoCodec: String?
|
||||
) {
|
||||
self.keepChatNavigationStack = keepChatNavigationStack
|
||||
self.skipReadHistory = skipReadHistory
|
||||
@ -44,9 +44,9 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
|
||||
self.chatListPhotos = chatListPhotos
|
||||
self.knockoutWallpaper = knockoutWallpaper
|
||||
self.foldersTabAtBottom = foldersTabAtBottom
|
||||
self.enableHighBitrateVideoCalls = enableHighBitrateVideoCalls
|
||||
self.playerEmbedding = playerEmbedding
|
||||
self.playlistPlayback = playlistPlayback
|
||||
self.preferredVideoCodec = preferredVideoCodec
|
||||
}
|
||||
|
||||
public init(decoder: PostboxDecoder) {
|
||||
@ -56,9 +56,9 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
|
||||
self.chatListPhotos = decoder.decodeInt32ForKey("chatListPhotos", orElse: 0) != 0
|
||||
self.knockoutWallpaper = decoder.decodeInt32ForKey("knockoutWallpaper", orElse: 0) != 0
|
||||
self.foldersTabAtBottom = decoder.decodeInt32ForKey("foldersTabAtBottom", orElse: 0) != 0
|
||||
self.enableHighBitrateVideoCalls = decoder.decodeInt32ForKey("enableHighBitrateVideoCalls", orElse: 0) != 0
|
||||
self.playerEmbedding = decoder.decodeInt32ForKey("playerEmbedding", orElse: 0) != 0
|
||||
self.playlistPlayback = decoder.decodeInt32ForKey("playlistPlayback", orElse: 0) != 0
|
||||
self.preferredVideoCodec = decoder.decodeOptionalStringForKey("preferredVideoCodec")
|
||||
}
|
||||
|
||||
public func encode(_ encoder: PostboxEncoder) {
|
||||
@ -68,9 +68,11 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
|
||||
encoder.encodeInt32(self.chatListPhotos ? 1 : 0, forKey: "chatListPhotos")
|
||||
encoder.encodeInt32(self.knockoutWallpaper ? 1 : 0, forKey: "knockoutWallpaper")
|
||||
encoder.encodeInt32(self.foldersTabAtBottom ? 1 : 0, forKey: "foldersTabAtBottom")
|
||||
encoder.encodeInt32(self.enableHighBitrateVideoCalls ? 1 : 0, forKey: "enableHighBitrateVideoCalls")
|
||||
encoder.encodeInt32(self.playerEmbedding ? 1 : 0, forKey: "playerEmbedding")
|
||||
encoder.encodeInt32(self.playlistPlayback ? 1 : 0, forKey: "playlistPlayback")
|
||||
if let preferredVideoCodec = self.preferredVideoCodec {
|
||||
encoder.encodeString(preferredVideoCodec, forKey: "preferredVideoCodec")
|
||||
}
|
||||
}
|
||||
|
||||
public func isEqual(to: PreferencesEntry) -> Bool {
|
||||
|
@ -18,12 +18,19 @@ private func callConnectionDescription(_ connection: CallSessionConnection) -> O
|
||||
}
|
||||
}
|
||||
|
||||
private func callConnectionDescriptionWebrtc(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtc? {
|
||||
private func callConnectionDescriptionsWebrtc(_ connection: CallSessionConnection) -> [OngoingCallConnectionDescriptionWebrtc] {
|
||||
switch connection {
|
||||
case .reflector:
|
||||
return nil
|
||||
return []
|
||||
case let .webRtcReflector(reflector):
|
||||
return OngoingCallConnectionDescriptionWebrtc(connectionId: reflector.id, hasStun: reflector.hasStun, hasTurn: reflector.hasTurn, ip: reflector.ip.isEmpty ? reflector.ipv6 : reflector.ip, port: reflector.port, username: reflector.username, password: reflector.password)
|
||||
var result: [OngoingCallConnectionDescriptionWebrtc] = []
|
||||
if !reflector.ip.isEmpty {
|
||||
result.append(OngoingCallConnectionDescriptionWebrtc(connectionId: reflector.id, hasStun: reflector.hasStun, hasTurn: reflector.hasTurn, ip: reflector.ip, port: reflector.port, username: reflector.username, password: reflector.password))
|
||||
}
|
||||
if !reflector.ipv6.isEmpty {
|
||||
result.append(OngoingCallConnectionDescriptionWebrtc(connectionId: reflector.id, hasStun: reflector.hasStun, hasTurn: reflector.hasTurn, ip: reflector.ipv6, port: reflector.port, username: reflector.username, password: reflector.password))
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@ -544,7 +551,7 @@ public final class OngoingCallContext {
|
||||
return result
|
||||
}
|
||||
|
||||
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, enableHighBitrateVideoCalls: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
|
||||
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String, preferredVideoCodec: String?) {
|
||||
let _ = setupLogs
|
||||
OngoingCallThreadLocalContext.applyServerConfig(serializedData)
|
||||
|
||||
@ -587,17 +594,12 @@ public final class OngoingCallContext {
|
||||
continue
|
||||
}
|
||||
processedConnections.append(connection)
|
||||
if let mapped = callConnectionDescriptionWebrtc(connection) {
|
||||
if mapped.ip.isEmpty {
|
||||
continue
|
||||
}
|
||||
filteredConnections.append(mapped)
|
||||
}
|
||||
filteredConnections.append(contentsOf: callConnectionDescriptionsWebrtc(connection))
|
||||
}
|
||||
|
||||
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
|
||||
callSessionManager?.sendSignalingData(internalId: internalId, data: data)
|
||||
}, videoCapturer: video?.impl, preferredAspectRatio: Float(preferredAspectRatio), enableHighBitrateVideoCalls: enableHighBitrateVideoCalls)
|
||||
}, videoCapturer: video?.impl, preferredAspectRatio: Float(preferredAspectRatio), preferredVideoCodec: preferredVideoCodec)
|
||||
|
||||
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
||||
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
|
||||
|
@ -14,6 +14,7 @@ static_library(
|
||||
"tgcalls/tgcalls/legacy/**",
|
||||
"tgcalls/tgcalls/platform/tdesktop/**",
|
||||
"tgcalls/tgcalls/platform/windows/**",
|
||||
"tgcalls/tgcalls/platform/android/**",
|
||||
"tgcalls/tgcalls/platform/darwin/VideoCameraCapturerMac.*",
|
||||
"tgcalls/tgcalls/platform/darwin/VideoMetalViewMac.*",
|
||||
]),
|
||||
|
@ -14,6 +14,7 @@ objc_library(
|
||||
], exclude = [
|
||||
"tgcalls/tgcalls/legacy/**",
|
||||
"tgcalls/tgcalls/platform/tdesktop/**",
|
||||
"tgcalls/tgcalls/platform/android/**",
|
||||
"tgcalls/tgcalls/platform/windows/**",
|
||||
"tgcalls/tgcalls/platform/darwin/VideoCameraCapturerMac.*",
|
||||
"tgcalls/tgcalls/platform/darwin/VideoMetalViewMac.*",
|
||||
|
@ -99,7 +99,9 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived;
|
||||
- (void)setOnOrientationUpdated:(void (^ _Nullable)(OngoingCallVideoOrientationWebrtc))onOrientationUpdated;
|
||||
- (void)setOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated;
|
||||
|
||||
#ifdef WEBRTC_MAC
|
||||
- (void)setVideoContentMode:(CALayerContentsGravity _Nonnull )mode;
|
||||
#endif
|
||||
@end
|
||||
|
||||
@interface OngoingCallThreadLocalContextVideoCapturer : NSObject
|
||||
@ -123,7 +125,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc, OngoingCallRemoteAudioStateWebrtc, OngoingCallRemoteBatteryLevelWebrtc, float);
|
||||
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
|
||||
|
||||
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio enableHighBitrateVideoCalls:(bool)enableHighBitrateVideoCalls;
|
||||
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec;
|
||||
|
||||
- (void)beginTermination;
|
||||
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
|
||||
|
@ -15,7 +15,7 @@
|
||||
#import "platform/darwin/VideoMetalViewMac.h"
|
||||
#define GLVideoView VideoMetalView
|
||||
#define UIViewContentModeScaleAspectFill kCAGravityResizeAspectFill
|
||||
#define UIViewContentModeScaleAspectFit kCAGravityResizeAspect
|
||||
#define UIViewContentModeScaleAspect kCAGravityResizeAspect
|
||||
|
||||
#else
|
||||
#import "platform/darwin/VideoMetalView.h"
|
||||
@ -207,7 +207,6 @@
|
||||
NSTimeInterval _callPacketTimeout;
|
||||
|
||||
std::unique_ptr<tgcalls::Instance> _tgVoip;
|
||||
OngoingCallThreadLocalContextWebrtcTerminationResult *_terminationResult;
|
||||
|
||||
OngoingCallStateWebrtc _state;
|
||||
OngoingCallVideoStateWebrtc _videoState;
|
||||
@ -299,15 +298,21 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
return 92;
|
||||
}
|
||||
|
||||
+ (NSArray<NSString *> * _Nonnull)versionsWithIncludeReference:(bool)includeReference {
|
||||
if (includeReference) {
|
||||
return @[@"2.7.7", @"2.8.8"];
|
||||
+ (NSArray<NSString *> * _Nonnull)versionsWithIncludeReference:(bool)__unused includeReference {
|
||||
return @[@"2.7.7", @"3.0.0"];
|
||||
}
|
||||
|
||||
+ (tgcalls::ProtocolVersion)protocolVersionFromLibraryVersion:(NSString *)version {
|
||||
if ([version isEqualToString:@"2.7.7"]) {
|
||||
return tgcalls::ProtocolVersion::V0;
|
||||
} else if ([version isEqualToString:@"3.0.0"]) {
|
||||
return tgcalls::ProtocolVersion::V1;
|
||||
} else {
|
||||
return @[@"2.7.7"];
|
||||
return tgcalls::ProtocolVersion::V0;
|
||||
}
|
||||
}
|
||||
|
||||
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio enableHighBitrateVideoCalls:(bool)enableHighBitrateVideoCalls {
|
||||
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_version = version;
|
||||
@ -370,6 +375,11 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::string> preferredVideoCodecs;
|
||||
if (preferredVideoCodec != nil) {
|
||||
preferredVideoCodecs.push_back([preferredVideoCodec UTF8String]);
|
||||
}
|
||||
|
||||
std::vector<tgcalls::Endpoint> endpoints;
|
||||
|
||||
tgcalls::Config config = {
|
||||
@ -384,7 +394,9 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
.logPath = "", //logPath.length == 0 ? "" : std::string(logPath.UTF8String),
|
||||
.maxApiLayer = [OngoingCallThreadLocalContextWebrtc maxLayer],
|
||||
.preferredAspectRatio = preferredAspectRatio,
|
||||
.enableHighBitrateVideo = enableHighBitrateVideoCalls
|
||||
.enableHighBitrateVideo = true,
|
||||
.preferredVideoCodecs = preferredVideoCodecs,
|
||||
.protocolVersion = [OngoingCallThreadLocalContextWebrtc protocolVersionFromLibraryVersion:version]
|
||||
};
|
||||
|
||||
auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
|
||||
@ -396,7 +408,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
tgcalls::Register<tgcalls::InstanceImpl>();
|
||||
tgcalls::Register<tgcalls::InstanceImplReference>();
|
||||
});
|
||||
_tgVoip = tgcalls::Meta::Create([version UTF8String], (tgcalls::Descriptor){
|
||||
.config = config,
|
||||
@ -529,29 +540,19 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
return false;
|
||||
}
|
||||
|
||||
- (void)stopInstanceIfNeeded {
|
||||
if (!_tgVoip) {
|
||||
return;
|
||||
}
|
||||
tgcalls::FinalState finalState = _tgVoip->stop();
|
||||
_tgVoip.reset();
|
||||
_terminationResult = [[OngoingCallThreadLocalContextWebrtcTerminationResult alloc] initWithFinalState:finalState];
|
||||
}
|
||||
|
||||
- (void)beginTermination {
|
||||
[self stopInstanceIfNeeded];
|
||||
}
|
||||
|
||||
- (void)stop:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
|
||||
[self stopInstanceIfNeeded];
|
||||
- (void)stopWithTerminationResult:(OngoingCallThreadLocalContextWebrtcTerminationResult *)terminationResult completion:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
|
||||
_tgVoip.reset();
|
||||
|
||||
if (completion) {
|
||||
if (_terminationResult) {
|
||||
NSString *debugLog = [NSString stringWithUTF8String:_terminationResult.finalState.debugLog.c_str()];
|
||||
_lastDerivedState = [[NSData alloc] initWithBytes:_terminationResult.finalState.persistentState.value.data() length:_terminationResult.finalState.persistentState.value.size()];
|
||||
if (terminationResult) {
|
||||
NSString *debugLog = [NSString stringWithUTF8String:terminationResult.finalState.debugLog.c_str()];
|
||||
_lastDerivedState = [[NSData alloc] initWithBytes:terminationResult.finalState.persistentState.value.data() length:terminationResult.finalState.persistentState.value.size()];
|
||||
|
||||
if (completion) {
|
||||
completion(debugLog, _terminationResult.finalState.trafficStats.bytesSentWifi, _terminationResult.finalState.trafficStats.bytesReceivedWifi, _terminationResult.finalState.trafficStats.bytesSentMobile, _terminationResult.finalState.trafficStats.bytesReceivedMobile);
|
||||
completion(debugLog, terminationResult.finalState.trafficStats.bytesSentWifi, terminationResult.finalState.trafficStats.bytesReceivedWifi, terminationResult.finalState.trafficStats.bytesSentMobile, terminationResult.finalState.trafficStats.bytesReceivedMobile);
|
||||
}
|
||||
} else {
|
||||
if (completion) {
|
||||
@ -561,6 +562,33 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)stop:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
|
||||
if (!_tgVoip) {
|
||||
return;
|
||||
}
|
||||
if (completion == nil) {
|
||||
_tgVoip->stop([](tgcalls::FinalState finalState) {
|
||||
});
|
||||
_tgVoip.reset();
|
||||
return;
|
||||
}
|
||||
|
||||
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
|
||||
id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
|
||||
_tgVoip->stop([weakSelf, queue, completion = [completion copy]](tgcalls::FinalState finalState) {
|
||||
[queue dispatch:^{
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (!strongSelf) {
|
||||
return;
|
||||
}
|
||||
|
||||
OngoingCallThreadLocalContextWebrtcTerminationResult *terminationResult = [[OngoingCallThreadLocalContextWebrtcTerminationResult alloc] initWithFinalState:finalState];
|
||||
|
||||
[strongSelf stopWithTerminationResult:terminationResult completion:completion];
|
||||
}];
|
||||
});
|
||||
}
|
||||
|
||||
- (NSString *)debugInfo {
|
||||
if (_tgVoip != nullptr) {
|
||||
NSString *version = [self version];
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit da1160dfbf4ac4b0dee65d481bd9c634932cd5a2
|
||||
Subproject commit a7d9b717fdf7e8e441b47692dc5771684b2d7970
|
Loading…
x
Reference in New Issue
Block a user