mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Cherry-pick various fixes
This commit is contained in:
parent
2d23d6c497
commit
3fc919e42d
@ -10886,6 +10886,7 @@ Sorry for the inconvenience.";
|
||||
"Call.StatusWeakSignal" = "Weak network signal";
|
||||
|
||||
"Conversation.ContactAddContact" = "ADD";
|
||||
"Conversation.ContactAddContactLong" = "ADD CONTACT";
|
||||
"Conversation.ContactMessage" = "MESSAGE";
|
||||
|
||||
"Chat.PlayOnceVideoMessageTooltip" = "This video message can only be played once.";
|
||||
|
@ -304,31 +304,35 @@ final class CameraOutput: NSObject {
|
||||
self.currentMode = mode
|
||||
self.lastSampleTimestamp = nil
|
||||
|
||||
let codecType: AVVideoCodecType
|
||||
if case .roundVideo = mode {
|
||||
codecType = .h264
|
||||
} else {
|
||||
if hasHEVCHardwareEncoder {
|
||||
codecType = .hevc
|
||||
} else {
|
||||
codecType = .h264
|
||||
}
|
||||
}
|
||||
|
||||
guard var videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
|
||||
return .complete()
|
||||
}
|
||||
|
||||
var dimensions: CGSize = CGSize(width: 1080, height: 1920)
|
||||
if orientation == .landscapeLeft || orientation == .landscapeRight {
|
||||
dimensions = CGSize(width: 1920, height: 1080)
|
||||
}
|
||||
var orientation = orientation
|
||||
let dimensions: CGSize
|
||||
let videoSettings: [String: Any]
|
||||
if case .roundVideo = mode {
|
||||
videoSettings[AVVideoWidthKey] = 400
|
||||
videoSettings[AVVideoHeightKey] = 400
|
||||
dimensions = CGSize(width: 400, height: 400)
|
||||
dimensions = videoMessageDimensions.cgSize
|
||||
orientation = .landscapeRight
|
||||
|
||||
let compressionProperties: [String: Any] = [
|
||||
AVVideoAverageBitRateKey: 1000 * 1000,
|
||||
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
|
||||
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
|
||||
]
|
||||
videoSettings = [
|
||||
AVVideoCodecKey: AVVideoCodecType.h264,
|
||||
AVVideoCompressionPropertiesKey: compressionProperties,
|
||||
AVVideoWidthKey: Int(dimensions.width),
|
||||
AVVideoHeightKey: Int(dimensions.height)
|
||||
]
|
||||
} else {
|
||||
let codecType: AVVideoCodecType = hasHEVCHardwareEncoder ? .hevc : .h264
|
||||
if orientation == .landscapeLeft || orientation == .landscapeRight {
|
||||
dimensions = CGSize(width: 1920, height: 1080)
|
||||
} else {
|
||||
dimensions = CGSize(width: 1080, height: 1920)
|
||||
}
|
||||
guard let settings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
|
||||
return .complete()
|
||||
}
|
||||
videoSettings = settings
|
||||
}
|
||||
|
||||
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
|
||||
@ -514,10 +518,10 @@ final class CameraOutput: NSObject {
|
||||
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
|
||||
|
||||
var updatedExtensions = extensions
|
||||
updatedExtensions["CVBytesPerRow"] = 400 * 4
|
||||
updatedExtensions["CVBytesPerRow"] = videoMessageDimensions.width * 4
|
||||
|
||||
var newFormatDescription: CMFormatDescription?
|
||||
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: 400, height: 400, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
|
||||
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: videoMessageDimensions.width, height: videoMessageDimensions.height, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
|
||||
guard status == noErr, let newFormatDescription else {
|
||||
return nil
|
||||
}
|
||||
|
@ -5,6 +5,9 @@ import CoreMedia
|
||||
import CoreVideo
|
||||
import Metal
|
||||
import Display
|
||||
import TelegramCore
|
||||
|
||||
let videoMessageDimensions = PixelDimensions(width: 400, height: 400)
|
||||
|
||||
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
|
||||
outputBufferPool: CVPixelBufferPool?,
|
||||
@ -114,8 +117,7 @@ class CameraRoundVideoFilter {
|
||||
}
|
||||
self.inputFormatDescription = formatDescription
|
||||
|
||||
let diameter: CGFloat = 400.0
|
||||
let circleImage = generateImage(CGSize(width: diameter, height: diameter), opaque: false, scale: 1.0, rotatedContext: { size, context in
|
||||
let circleImage = generateImage(videoMessageDimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
|
||||
let bounds = CGRect(origin: .zero, size: size)
|
||||
context.clear(bounds)
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
@ -158,7 +160,7 @@ class CameraRoundVideoFilter {
|
||||
|
||||
var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
|
||||
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
|
||||
let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||
|
||||
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
|
||||
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
|
||||
@ -203,18 +205,14 @@ class CameraRoundVideoFilter {
|
||||
guard let finalImage else {
|
||||
return nil
|
||||
}
|
||||
|
||||
if finalImage.extent.width != 400 {
|
||||
print("wtf: \(finalImage)")
|
||||
}
|
||||
|
||||
|
||||
var pbuf: CVPixelBuffer?
|
||||
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
|
||||
guard let outputPixelBuffer = pbuf else {
|
||||
return nil
|
||||
}
|
||||
|
||||
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
|
||||
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: videoMessageDimensions.cgSize), colorSpace: outputColorSpace)
|
||||
|
||||
return outputPixelBuffer
|
||||
}
|
||||
|
@ -307,7 +307,18 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
|
||||
}
|
||||
|
||||
let (messageButtonWidth, messageContinueLayout) = makeMessageButtonLayout(constrainedSize.width, nil, false, item.presentationData.strings.Conversation_ContactMessage.uppercased(), mainColor, false, false)
|
||||
let (addButtonWidth, addContinueLayout) = makeAddButtonLayout(constrainedSize.width, nil, false, !canMessage && !canAdd ? item.presentationData.strings.Conversation_ViewContactDetails.uppercased() : item.presentationData.strings.Conversation_ContactAddContact.uppercased(), mainColor, false, false)
|
||||
|
||||
let addTitle: String
|
||||
if !canMessage && !canAdd {
|
||||
addTitle = item.presentationData.strings.Conversation_ViewContactDetails
|
||||
} else {
|
||||
if canMessage {
|
||||
addTitle = item.presentationData.strings.Conversation_ContactAddContact
|
||||
} else {
|
||||
addTitle = item.presentationData.strings.Conversation_ContactAddContactLong
|
||||
}
|
||||
}
|
||||
let (addButtonWidth, addContinueLayout) = makeAddButtonLayout(constrainedSize.width, nil, false, addTitle.uppercased(), mainColor, false, false)
|
||||
|
||||
let maxButtonWidth = max(messageButtonWidth, addButtonWidth)
|
||||
var maxContentWidth: CGFloat = avatarSize.width + 7.0
|
||||
@ -327,7 +338,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
|
||||
let lineWidth: CGFloat = 3.0
|
||||
|
||||
var buttonCount = 1
|
||||
if canMessage {
|
||||
if canMessage && canAdd {
|
||||
buttonCount += 1
|
||||
}
|
||||
var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth))
|
||||
@ -387,7 +398,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
|
||||
strongSelf.messageButtonNode.isHidden = !canMessage
|
||||
|
||||
let backgroundInsets = layoutConstants.text.bubbleInsets
|
||||
let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: contentWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0))
|
||||
let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0))
|
||||
|
||||
if let statusSizeAndApply = statusSizeAndApply {
|
||||
strongSelf.dateAndStatusNode.frame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.left, y: backgroundFrame.maxY + 3.0), size: statusSizeAndApply.0)
|
||||
|
@ -532,7 +532,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
fileprivate var liveUploadInterface: LegacyLiveUploadInterface?
|
||||
private var currentLiveUploadPath: String?
|
||||
fileprivate var currentLiveUploadData: LegacyLiveUploadInterfaceResult?
|
||||
|
||||
|
||||
fileprivate let backgroundView: UIVisualEffectView
|
||||
fileprivate let containerView: UIView
|
||||
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
|
||||
@ -689,16 +689,27 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
}
|
||||
|
||||
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
|
||||
guard let controller = self.controller else {
|
||||
return
|
||||
}
|
||||
if #available(iOS 13.0, *) {
|
||||
let _ = ((self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing)
|
||||
|> filter { $0 }
|
||||
|> take(1)).startStandalone(next: { _ in
|
||||
let _ = (combineLatest(queue: Queue.mainQueue(),
|
||||
self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing,
|
||||
controller.audioSessionReady.get()
|
||||
)
|
||||
|> filter { $0 && $1 }
|
||||
|> take(1)).startStandalone(next: { _, _ in
|
||||
f()
|
||||
})
|
||||
} else {
|
||||
Queue.mainQueue().after(0.35) {
|
||||
let _ = (combineLatest(queue: Queue.mainQueue(),
|
||||
.single(true) |> delay(0.35, queue: Queue.mainQueue()),
|
||||
controller.audioSessionReady.get()
|
||||
)
|
||||
|> filter { $0 && $1 }
|
||||
|> take(1)).startStandalone(next: { _, _ in
|
||||
f()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1241,6 +1252,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
fileprivate let completion: (EnqueueMessage?, Bool?, Int32?) -> Void
|
||||
|
||||
private var audioSessionDisposable: Disposable?
|
||||
fileprivate let audioSessionReady = ValuePromise<Bool>(false)
|
||||
|
||||
private let hapticFeedback = HapticFeedback()
|
||||
|
||||
@ -1484,11 +1496,13 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
finalDuration = duration
|
||||
}
|
||||
|
||||
let dimensions = PixelDimensions(width: 400, height: 400)
|
||||
|
||||
var thumbnailImage = video.thumbnail
|
||||
if startTime > 0.0 {
|
||||
let composition = composition(with: results)
|
||||
let imageGenerator = AVAssetImageGenerator(asset: composition)
|
||||
imageGenerator.maximumSize = CGSize(width: 400, height: 400)
|
||||
imageGenerator.maximumSize = dimensions.cgSize
|
||||
imageGenerator.appliesPreferredTrackTransform = true
|
||||
|
||||
if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) {
|
||||
@ -1496,7 +1510,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
}
|
||||
}
|
||||
|
||||
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: PixelDimensions(width: 400, height: 400), cropOffset: .zero, cropRect: CGRect(origin: .zero, size: CGSize(width: 400.0, height: 400.0)), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
|
||||
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
|
||||
|
||||
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
|
||||
if let valuesData = try? JSONEncoder().encode(values) {
|
||||
@ -1614,10 +1628,13 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
}
|
||||
|
||||
private func requestAudioSession() {
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { [weak self] _ in
|
||||
if #available(iOS 13.0, *) {
|
||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
}
|
||||
if let self {
|
||||
self.audioSessionReady.set(true)
|
||||
}
|
||||
}, deactivate: { _ in
|
||||
return .single(Void())
|
||||
})
|
||||
|
@ -350,11 +350,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if isFirstTime, !self.viewOnceButton.isHidden {
|
||||
self.maybePresentViewOnceTooltip()
|
||||
}
|
||||
|
||||
|
||||
let panelHeight = defaultHeight(metrics: metrics)
|
||||
|
||||
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: 1), size: CGSize(width: 40.0, height: 40)))
|
||||
@ -488,6 +484,10 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
}
|
||||
}
|
||||
|
||||
if isFirstTime, !self.viewOnceButton.isHidden {
|
||||
self.maybePresentViewOnceTooltip()
|
||||
}
|
||||
|
||||
return panelHeight
|
||||
}
|
||||
|
||||
|
@ -126,6 +126,29 @@ public func parseInternalUrl(query: String) -> ParsedInternalUrl? {
|
||||
}
|
||||
if !pathComponents.isEmpty && !pathComponents[0].isEmpty {
|
||||
let peerName: String = pathComponents[0]
|
||||
|
||||
if pathComponents[0].hasPrefix("+") || pathComponents[0].hasPrefix("%20") {
|
||||
let component = pathComponents[0].replacingOccurrences(of: "%20", with: "+")
|
||||
if component.rangeOfCharacter(from: CharacterSet(charactersIn: "0123456789+").inverted) == nil {
|
||||
var attach: String?
|
||||
var startAttach: String?
|
||||
if let queryItems = components.queryItems {
|
||||
for queryItem in queryItems {
|
||||
if let value = queryItem.value {
|
||||
if queryItem.name == "attach" {
|
||||
attach = value
|
||||
} else if queryItem.name == "startattach" {
|
||||
startAttach = value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return .phone(component.replacingOccurrences(of: "+", with: ""), attach, startAttach)
|
||||
} else {
|
||||
return .join(String(component.dropFirst()))
|
||||
}
|
||||
}
|
||||
if pathComponents.count == 1 {
|
||||
if let queryItems = components.queryItems {
|
||||
if peerName == "socks" || peerName == "proxy" {
|
||||
@ -288,27 +311,6 @@ public func parseInternalUrl(query: String) -> ParsedInternalUrl? {
|
||||
}
|
||||
} else if pathComponents[0].hasPrefix(phonebookUsernamePathPrefix), let idValue = Int64(String(pathComponents[0][pathComponents[0].index(pathComponents[0].startIndex, offsetBy: phonebookUsernamePathPrefix.count)...])) {
|
||||
return .peerId(PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(idValue)))
|
||||
} else if pathComponents[0].hasPrefix("+") || pathComponents[0].hasPrefix("%20") {
|
||||
let component = pathComponents[0].replacingOccurrences(of: "%20", with: "+")
|
||||
if component.rangeOfCharacter(from: CharacterSet(charactersIn: "0123456789+").inverted) == nil {
|
||||
var attach: String?
|
||||
var startAttach: String?
|
||||
if let queryItems = components.queryItems {
|
||||
for queryItem in queryItems {
|
||||
if let value = queryItem.value {
|
||||
if queryItem.name == "attach" {
|
||||
attach = value
|
||||
} else if queryItem.name == "startattach" {
|
||||
startAttach = value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return .phone(component.replacingOccurrences(of: "+", with: ""), attach, startAttach)
|
||||
} else {
|
||||
return .join(String(component.dropFirst()))
|
||||
}
|
||||
} else if pathComponents[0].hasPrefix("$") || pathComponents[0].hasPrefix("%24") {
|
||||
var component = pathComponents[0].replacingOccurrences(of: "%24", with: "$")
|
||||
if component.hasPrefix("$") {
|
||||
|
Loading…
x
Reference in New Issue
Block a user