mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-11-07 17:30:12 +00:00
Cherry-pick various fixes
This commit is contained in:
parent
2d23d6c497
commit
3fc919e42d
@ -10886,6 +10886,7 @@ Sorry for the inconvenience.";
|
|||||||
"Call.StatusWeakSignal" = "Weak network signal";
|
"Call.StatusWeakSignal" = "Weak network signal";
|
||||||
|
|
||||||
"Conversation.ContactAddContact" = "ADD";
|
"Conversation.ContactAddContact" = "ADD";
|
||||||
|
"Conversation.ContactAddContactLong" = "ADD CONTACT";
|
||||||
"Conversation.ContactMessage" = "MESSAGE";
|
"Conversation.ContactMessage" = "MESSAGE";
|
||||||
|
|
||||||
"Chat.PlayOnceVideoMessageTooltip" = "This video message can only be played once.";
|
"Chat.PlayOnceVideoMessageTooltip" = "This video message can only be played once.";
|
||||||
|
|||||||
@ -304,31 +304,35 @@ final class CameraOutput: NSObject {
|
|||||||
self.currentMode = mode
|
self.currentMode = mode
|
||||||
self.lastSampleTimestamp = nil
|
self.lastSampleTimestamp = nil
|
||||||
|
|
||||||
let codecType: AVVideoCodecType
|
var orientation = orientation
|
||||||
|
let dimensions: CGSize
|
||||||
|
let videoSettings: [String: Any]
|
||||||
if case .roundVideo = mode {
|
if case .roundVideo = mode {
|
||||||
codecType = .h264
|
dimensions = videoMessageDimensions.cgSize
|
||||||
} else {
|
orientation = .landscapeRight
|
||||||
if hasHEVCHardwareEncoder {
|
|
||||||
codecType = .hevc
|
|
||||||
} else {
|
|
||||||
codecType = .h264
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
guard var videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
|
let compressionProperties: [String: Any] = [
|
||||||
return .complete()
|
AVVideoAverageBitRateKey: 1000 * 1000,
|
||||||
}
|
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
|
||||||
|
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
|
||||||
var dimensions: CGSize = CGSize(width: 1080, height: 1920)
|
]
|
||||||
|
videoSettings = [
|
||||||
|
AVVideoCodecKey: AVVideoCodecType.h264,
|
||||||
|
AVVideoCompressionPropertiesKey: compressionProperties,
|
||||||
|
AVVideoWidthKey: Int(dimensions.width),
|
||||||
|
AVVideoHeightKey: Int(dimensions.height)
|
||||||
|
]
|
||||||
|
} else {
|
||||||
|
let codecType: AVVideoCodecType = hasHEVCHardwareEncoder ? .hevc : .h264
|
||||||
if orientation == .landscapeLeft || orientation == .landscapeRight {
|
if orientation == .landscapeLeft || orientation == .landscapeRight {
|
||||||
dimensions = CGSize(width: 1920, height: 1080)
|
dimensions = CGSize(width: 1920, height: 1080)
|
||||||
|
} else {
|
||||||
|
dimensions = CGSize(width: 1080, height: 1920)
|
||||||
}
|
}
|
||||||
var orientation = orientation
|
guard let settings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
|
||||||
if case .roundVideo = mode {
|
return .complete()
|
||||||
videoSettings[AVVideoWidthKey] = 400
|
}
|
||||||
videoSettings[AVVideoHeightKey] = 400
|
videoSettings = settings
|
||||||
dimensions = CGSize(width: 400, height: 400)
|
|
||||||
orientation = .landscapeRight
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
|
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
|
||||||
@ -514,10 +518,10 @@ final class CameraOutput: NSObject {
|
|||||||
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
|
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
|
||||||
|
|
||||||
var updatedExtensions = extensions
|
var updatedExtensions = extensions
|
||||||
updatedExtensions["CVBytesPerRow"] = 400 * 4
|
updatedExtensions["CVBytesPerRow"] = videoMessageDimensions.width * 4
|
||||||
|
|
||||||
var newFormatDescription: CMFormatDescription?
|
var newFormatDescription: CMFormatDescription?
|
||||||
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: 400, height: 400, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
|
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: videoMessageDimensions.width, height: videoMessageDimensions.height, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
|
||||||
guard status == noErr, let newFormatDescription else {
|
guard status == noErr, let newFormatDescription else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,6 +5,9 @@ import CoreMedia
|
|||||||
import CoreVideo
|
import CoreVideo
|
||||||
import Metal
|
import Metal
|
||||||
import Display
|
import Display
|
||||||
|
import TelegramCore
|
||||||
|
|
||||||
|
let videoMessageDimensions = PixelDimensions(width: 400, height: 400)
|
||||||
|
|
||||||
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
|
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
|
||||||
outputBufferPool: CVPixelBufferPool?,
|
outputBufferPool: CVPixelBufferPool?,
|
||||||
@ -114,8 +117,7 @@ class CameraRoundVideoFilter {
|
|||||||
}
|
}
|
||||||
self.inputFormatDescription = formatDescription
|
self.inputFormatDescription = formatDescription
|
||||||
|
|
||||||
let diameter: CGFloat = 400.0
|
let circleImage = generateImage(videoMessageDimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
|
||||||
let circleImage = generateImage(CGSize(width: diameter, height: diameter), opaque: false, scale: 1.0, rotatedContext: { size, context in
|
|
||||||
let bounds = CGRect(origin: .zero, size: size)
|
let bounds = CGRect(origin: .zero, size: size)
|
||||||
context.clear(bounds)
|
context.clear(bounds)
|
||||||
context.setFillColor(UIColor.white.cgColor)
|
context.setFillColor(UIColor.white.cgColor)
|
||||||
@ -158,7 +160,7 @@ class CameraRoundVideoFilter {
|
|||||||
|
|
||||||
var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
|
var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
|
||||||
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
|
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
|
||||||
let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height)
|
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||||
|
|
||||||
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
|
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
|
||||||
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
|
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
|
||||||
@ -204,17 +206,13 @@ class CameraRoundVideoFilter {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if finalImage.extent.width != 400 {
|
|
||||||
print("wtf: \(finalImage)")
|
|
||||||
}
|
|
||||||
|
|
||||||
var pbuf: CVPixelBuffer?
|
var pbuf: CVPixelBuffer?
|
||||||
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
|
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
|
||||||
guard let outputPixelBuffer = pbuf else {
|
guard let outputPixelBuffer = pbuf else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
|
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: videoMessageDimensions.cgSize), colorSpace: outputColorSpace)
|
||||||
|
|
||||||
return outputPixelBuffer
|
return outputPixelBuffer
|
||||||
}
|
}
|
||||||
|
|||||||
@ -307,7 +307,18 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let (messageButtonWidth, messageContinueLayout) = makeMessageButtonLayout(constrainedSize.width, nil, false, item.presentationData.strings.Conversation_ContactMessage.uppercased(), mainColor, false, false)
|
let (messageButtonWidth, messageContinueLayout) = makeMessageButtonLayout(constrainedSize.width, nil, false, item.presentationData.strings.Conversation_ContactMessage.uppercased(), mainColor, false, false)
|
||||||
let (addButtonWidth, addContinueLayout) = makeAddButtonLayout(constrainedSize.width, nil, false, !canMessage && !canAdd ? item.presentationData.strings.Conversation_ViewContactDetails.uppercased() : item.presentationData.strings.Conversation_ContactAddContact.uppercased(), mainColor, false, false)
|
|
||||||
|
let addTitle: String
|
||||||
|
if !canMessage && !canAdd {
|
||||||
|
addTitle = item.presentationData.strings.Conversation_ViewContactDetails
|
||||||
|
} else {
|
||||||
|
if canMessage {
|
||||||
|
addTitle = item.presentationData.strings.Conversation_ContactAddContact
|
||||||
|
} else {
|
||||||
|
addTitle = item.presentationData.strings.Conversation_ContactAddContactLong
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let (addButtonWidth, addContinueLayout) = makeAddButtonLayout(constrainedSize.width, nil, false, addTitle.uppercased(), mainColor, false, false)
|
||||||
|
|
||||||
let maxButtonWidth = max(messageButtonWidth, addButtonWidth)
|
let maxButtonWidth = max(messageButtonWidth, addButtonWidth)
|
||||||
var maxContentWidth: CGFloat = avatarSize.width + 7.0
|
var maxContentWidth: CGFloat = avatarSize.width + 7.0
|
||||||
@ -327,7 +338,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
|
|||||||
let lineWidth: CGFloat = 3.0
|
let lineWidth: CGFloat = 3.0
|
||||||
|
|
||||||
var buttonCount = 1
|
var buttonCount = 1
|
||||||
if canMessage {
|
if canMessage && canAdd {
|
||||||
buttonCount += 1
|
buttonCount += 1
|
||||||
}
|
}
|
||||||
var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth))
|
var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth))
|
||||||
@ -387,7 +398,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
|
|||||||
strongSelf.messageButtonNode.isHidden = !canMessage
|
strongSelf.messageButtonNode.isHidden = !canMessage
|
||||||
|
|
||||||
let backgroundInsets = layoutConstants.text.bubbleInsets
|
let backgroundInsets = layoutConstants.text.bubbleInsets
|
||||||
let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: contentWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0))
|
let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0))
|
||||||
|
|
||||||
if let statusSizeAndApply = statusSizeAndApply {
|
if let statusSizeAndApply = statusSizeAndApply {
|
||||||
strongSelf.dateAndStatusNode.frame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.left, y: backgroundFrame.maxY + 3.0), size: statusSizeAndApply.0)
|
strongSelf.dateAndStatusNode.frame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.left, y: backgroundFrame.maxY + 3.0), size: statusSizeAndApply.0)
|
||||||
|
|||||||
@ -689,16 +689,27 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
|
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
|
||||||
|
guard let controller = self.controller else {
|
||||||
|
return
|
||||||
|
}
|
||||||
if #available(iOS 13.0, *) {
|
if #available(iOS 13.0, *) {
|
||||||
let _ = ((self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing)
|
let _ = (combineLatest(queue: Queue.mainQueue(),
|
||||||
|> filter { $0 }
|
self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing,
|
||||||
|> take(1)).startStandalone(next: { _ in
|
controller.audioSessionReady.get()
|
||||||
|
)
|
||||||
|
|> filter { $0 && $1 }
|
||||||
|
|> take(1)).startStandalone(next: { _, _ in
|
||||||
f()
|
f()
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
Queue.mainQueue().after(0.35) {
|
let _ = (combineLatest(queue: Queue.mainQueue(),
|
||||||
|
.single(true) |> delay(0.35, queue: Queue.mainQueue()),
|
||||||
|
controller.audioSessionReady.get()
|
||||||
|
)
|
||||||
|
|> filter { $0 && $1 }
|
||||||
|
|> take(1)).startStandalone(next: { _, _ in
|
||||||
f()
|
f()
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1241,6 +1252,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
fileprivate let completion: (EnqueueMessage?, Bool?, Int32?) -> Void
|
fileprivate let completion: (EnqueueMessage?, Bool?, Int32?) -> Void
|
||||||
|
|
||||||
private var audioSessionDisposable: Disposable?
|
private var audioSessionDisposable: Disposable?
|
||||||
|
fileprivate let audioSessionReady = ValuePromise<Bool>(false)
|
||||||
|
|
||||||
private let hapticFeedback = HapticFeedback()
|
private let hapticFeedback = HapticFeedback()
|
||||||
|
|
||||||
@ -1484,11 +1496,13 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
finalDuration = duration
|
finalDuration = duration
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let dimensions = PixelDimensions(width: 400, height: 400)
|
||||||
|
|
||||||
var thumbnailImage = video.thumbnail
|
var thumbnailImage = video.thumbnail
|
||||||
if startTime > 0.0 {
|
if startTime > 0.0 {
|
||||||
let composition = composition(with: results)
|
let composition = composition(with: results)
|
||||||
let imageGenerator = AVAssetImageGenerator(asset: composition)
|
let imageGenerator = AVAssetImageGenerator(asset: composition)
|
||||||
imageGenerator.maximumSize = CGSize(width: 400, height: 400)
|
imageGenerator.maximumSize = dimensions.cgSize
|
||||||
imageGenerator.appliesPreferredTrackTransform = true
|
imageGenerator.appliesPreferredTrackTransform = true
|
||||||
|
|
||||||
if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) {
|
if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) {
|
||||||
@ -1496,7 +1510,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: PixelDimensions(width: 400, height: 400), cropOffset: .zero, cropRect: CGRect(origin: .zero, size: CGSize(width: 400.0, height: 400.0)), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
|
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
|
||||||
|
|
||||||
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
|
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
|
||||||
if let valuesData = try? JSONEncoder().encode(values) {
|
if let valuesData = try? JSONEncoder().encode(values) {
|
||||||
@ -1614,10 +1628,13 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func requestAudioSession() {
|
private func requestAudioSession() {
|
||||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
|
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { [weak self] _ in
|
||||||
if #available(iOS 13.0, *) {
|
if #available(iOS 13.0, *) {
|
||||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||||
}
|
}
|
||||||
|
if let self {
|
||||||
|
self.audioSessionReady.set(true)
|
||||||
|
}
|
||||||
}, deactivate: { _ in
|
}, deactivate: { _ in
|
||||||
return .single(Void())
|
return .single(Void())
|
||||||
})
|
})
|
||||||
|
|||||||
@ -351,10 +351,6 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if isFirstTime, !self.viewOnceButton.isHidden {
|
|
||||||
self.maybePresentViewOnceTooltip()
|
|
||||||
}
|
|
||||||
|
|
||||||
let panelHeight = defaultHeight(metrics: metrics)
|
let panelHeight = defaultHeight(metrics: metrics)
|
||||||
|
|
||||||
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: 1), size: CGSize(width: 40.0, height: 40)))
|
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: 1), size: CGSize(width: 40.0, height: 40)))
|
||||||
@ -488,6 +484,10 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if isFirstTime, !self.viewOnceButton.isHidden {
|
||||||
|
self.maybePresentViewOnceTooltip()
|
||||||
|
}
|
||||||
|
|
||||||
return panelHeight
|
return panelHeight
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -126,6 +126,29 @@ public func parseInternalUrl(query: String) -> ParsedInternalUrl? {
|
|||||||
}
|
}
|
||||||
if !pathComponents.isEmpty && !pathComponents[0].isEmpty {
|
if !pathComponents.isEmpty && !pathComponents[0].isEmpty {
|
||||||
let peerName: String = pathComponents[0]
|
let peerName: String = pathComponents[0]
|
||||||
|
|
||||||
|
if pathComponents[0].hasPrefix("+") || pathComponents[0].hasPrefix("%20") {
|
||||||
|
let component = pathComponents[0].replacingOccurrences(of: "%20", with: "+")
|
||||||
|
if component.rangeOfCharacter(from: CharacterSet(charactersIn: "0123456789+").inverted) == nil {
|
||||||
|
var attach: String?
|
||||||
|
var startAttach: String?
|
||||||
|
if let queryItems = components.queryItems {
|
||||||
|
for queryItem in queryItems {
|
||||||
|
if let value = queryItem.value {
|
||||||
|
if queryItem.name == "attach" {
|
||||||
|
attach = value
|
||||||
|
} else if queryItem.name == "startattach" {
|
||||||
|
startAttach = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return .phone(component.replacingOccurrences(of: "+", with: ""), attach, startAttach)
|
||||||
|
} else {
|
||||||
|
return .join(String(component.dropFirst()))
|
||||||
|
}
|
||||||
|
}
|
||||||
if pathComponents.count == 1 {
|
if pathComponents.count == 1 {
|
||||||
if let queryItems = components.queryItems {
|
if let queryItems = components.queryItems {
|
||||||
if peerName == "socks" || peerName == "proxy" {
|
if peerName == "socks" || peerName == "proxy" {
|
||||||
@ -288,27 +311,6 @@ public func parseInternalUrl(query: String) -> ParsedInternalUrl? {
|
|||||||
}
|
}
|
||||||
} else if pathComponents[0].hasPrefix(phonebookUsernamePathPrefix), let idValue = Int64(String(pathComponents[0][pathComponents[0].index(pathComponents[0].startIndex, offsetBy: phonebookUsernamePathPrefix.count)...])) {
|
} else if pathComponents[0].hasPrefix(phonebookUsernamePathPrefix), let idValue = Int64(String(pathComponents[0][pathComponents[0].index(pathComponents[0].startIndex, offsetBy: phonebookUsernamePathPrefix.count)...])) {
|
||||||
return .peerId(PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(idValue)))
|
return .peerId(PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(idValue)))
|
||||||
} else if pathComponents[0].hasPrefix("+") || pathComponents[0].hasPrefix("%20") {
|
|
||||||
let component = pathComponents[0].replacingOccurrences(of: "%20", with: "+")
|
|
||||||
if component.rangeOfCharacter(from: CharacterSet(charactersIn: "0123456789+").inverted) == nil {
|
|
||||||
var attach: String?
|
|
||||||
var startAttach: String?
|
|
||||||
if let queryItems = components.queryItems {
|
|
||||||
for queryItem in queryItems {
|
|
||||||
if let value = queryItem.value {
|
|
||||||
if queryItem.name == "attach" {
|
|
||||||
attach = value
|
|
||||||
} else if queryItem.name == "startattach" {
|
|
||||||
startAttach = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return .phone(component.replacingOccurrences(of: "+", with: ""), attach, startAttach)
|
|
||||||
} else {
|
|
||||||
return .join(String(component.dropFirst()))
|
|
||||||
}
|
|
||||||
} else if pathComponents[0].hasPrefix("$") || pathComponents[0].hasPrefix("%24") {
|
} else if pathComponents[0].hasPrefix("$") || pathComponents[0].hasPrefix("%24") {
|
||||||
var component = pathComponents[0].replacingOccurrences(of: "%24", with: "$")
|
var component = pathComponents[0].replacingOccurrences(of: "%24", with: "$")
|
||||||
if component.hasPrefix("$") {
|
if component.hasPrefix("$") {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user