Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2024-10-09 00:06:27 +04:00
commit c6a8183036
17 changed files with 30923 additions and 26 deletions

View File

@ -13095,5 +13095,10 @@ Sorry for the inconvenience.";
"WebBrowser.AuthChallenge.Title" = "Sign in to %@";
"WebBrowser.AuthChallenge.Text" = "Your login information will be sent securely.";
"ChatList.Search.FilterPublicPosts" = "Public Posts";
"DialogList.SearchSectionPublicPosts" = "Public Posts";
"Chat.PrivateMessageEditTimestamp.Date" = "edited %@";
"Chat.PrivateMessageEditTimestamp.TodayAt" = "edited today at %@";
"Chat.PrivateMessageEditTimestamp.YesterdayAt" = "edited yesterday at %@";

View File

@ -44,6 +44,7 @@ extern int FFMpegCodecIdVP9;
- (bool)isAttachedPicAtStreamIndex:(int32_t)streamIndex;
- (int)codecIdAtStreamIndex:(int32_t)streamIndex;
- (double)duration;
- (int64_t)startTimeAtStreamIndex:(int32_t)streamIndex;
- (int64_t)durationAtStreamIndex:(int32_t)streamIndex;
- (bool)codecParamsAtStreamIndex:(int32_t)streamIndex toContext:(FFMpegAVCodecContext *)context;
- (FFMpegFpsAndTimebase)fpsAndTimebaseForStreamIndex:(int32_t)streamIndex defaultTimeBase:(CMTime)defaultTimeBase;

View File

@ -103,6 +103,10 @@ int FFMpegCodecIdVP9 = AV_CODEC_ID_VP9;
return (double)_impl->duration / AV_TIME_BASE;
}
- (int64_t)startTimeAtStreamIndex:(int32_t)streamIndex {
return _impl->streams[streamIndex]->start_time;
}
- (int64_t)durationAtStreamIndex:(int32_t)streamIndex {
return _impl->streams[streamIndex]->duration;
}

View File

@ -38,15 +38,17 @@ private final class SoftwareVideoStream {
let index: Int
let fps: CMTime
let timebase: CMTime
let startTime: CMTime
let duration: CMTime
let decoder: FFMpegMediaVideoFrameDecoder
let rotationAngle: Double
let aspect: Double
init(index: Int, fps: CMTime, timebase: CMTime, duration: CMTime, decoder: FFMpegMediaVideoFrameDecoder, rotationAngle: Double, aspect: Double) {
init(index: Int, fps: CMTime, timebase: CMTime, startTime: CMTime, duration: CMTime, decoder: FFMpegMediaVideoFrameDecoder, rotationAngle: Double, aspect: Double) {
self.index = index
self.fps = fps
self.timebase = timebase
self.startTime = startTime
self.duration = duration
self.decoder = decoder
self.rotationAngle = rotationAngle
@ -126,6 +128,13 @@ public final class SoftwareVideoSource {
let fpsAndTimebase = avFormatContext.fpsAndTimebase(forStreamIndex: streamIndex, defaultTimeBase: CMTimeMake(value: 1, timescale: 40000))
let (fps, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
let startTime: CMTime
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
if rawStartTime == Int64(bitPattern: 0x8000000000000000 as UInt64) {
startTime = CMTime(value: 0, timescale: timebase.timescale)
} else {
startTime = CMTimeMake(value: rawStartTime, timescale: timebase.timescale)
}
let duration = CMTimeMake(value: avFormatContext.duration(atStreamIndex: streamIndex), timescale: timebase.timescale)
let metrics = avFormatContext.metricsForStream(at: streamIndex)
@ -137,7 +146,7 @@ public final class SoftwareVideoSource {
let codecContext = FFMpegAVCodecContext(codec: codec)
if avFormatContext.codecParams(atStreamIndex: streamIndex, to: codecContext) {
if codecContext.open() {
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
break
}
}
@ -222,6 +231,13 @@ public final class SoftwareVideoSource {
}
}
public func readTrackInfo() -> (offset: CMTime, duration: CMTime)? {
guard let videoStream = self.videoStream else {
return nil
}
return (videoStream.startTime, CMTimeMaximum(CMTime(value: 0, timescale: videoStream.duration.timescale), CMTimeSubtract(videoStream.duration, videoStream.startTime)))
}
public func readFrame(maxPts: CMTime?) -> (MediaTrackFrame?, CGFloat, CGFloat, Bool) {
guard let videoStream = self.videoStream, let avFormatContext = self.avFormatContext else {
return (nil, 0.0, 1.0, false)

View File

@ -7098,10 +7098,7 @@ final class VoiceChatContextReferenceContentSource: ContextReferenceContentSourc
}
public func shouldUseV2VideoChatImpl(context: AccountContext) -> Bool {
var useV2 = false
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_enable_videochatui_v2"] {
useV2 = true
}
var useV2 = true
if context.sharedContext.immediateExperimentalUISettings.disableCallV2 {
useV2 = false
}

View File

@ -6229,7 +6229,7 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
}
for contentNode in self.contentNodes {
if contentNode is ChatMessageMediaBubbleContentNode || contentNode is ChatMessageGiftBubbleContentNode {
if contentNode is ChatMessageMediaBubbleContentNode || contentNode is ChatMessageGiftBubbleContentNode || contentNode is ChatMessageWebpageBubbleContentNode || contentNode is ChatMessageInvoiceBubbleContentNode || contentNode is ChatMessageGameBubbleContentNode {
contentNode.visibility = mapVisibility(effectiveMediaVisibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode)
} else {
contentNode.visibility = mapVisibility(effectiveVisibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode)

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "edited.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -192,7 +192,7 @@ extension ChatControllerImpl {
if canViewMessageReactionList(message: message) {
items = ContextController.Items(content: .custom(ReactionListContextMenuContent(
context: self.context,
displayReadTimestamps: false,
displayReadTimestamps: true,
availableReactions: availableReactions,
animationCache: self.controllerInteraction!.presentationContext.animationCache,
animationRenderer: self.controllerInteraction!.presentationContext.animationRenderer,

View File

@ -1857,6 +1857,20 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
reactionCount = 0
}
}
let isEdited = message.attributes.contains(where: { attribute in
if let attribute = attribute as? EditedMessageAttribute, !attribute.isHidden, attribute.date != 0 {
return true
}
return false
})
if isEdited {
if !actions.isEmpty {
actions.insert(.separator, at: 0)
}
actions.insert(.custom(ChatReadReportContextItem(context: context, message: message, hasReadReports: false, isEdit: true, stats: MessageReadStats(reactionCount: 0, peers: [], readTimestamps: [:]), action: nil), false), at: 0)
}
if let peer = message.peers[message.id.peerId], (canViewStats || reactionCount != 0) {
var hasReadReports = false
@ -1880,18 +1894,18 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
} else {
reactionCount = 0
}
/*var readStats = readStats
if !canViewStats {
readStats = MessageReadStats(reactionCount: 0, peers: [])
}*/
if hasReadReports || reactionCount != 0 {
if !actions.isEmpty {
actions.insert(.separator, at: 0)
}
var readStats = readStats
if !(hasReadReports || reactionCount != 0) {
readStats = MessageReadStats(reactionCount: 0, peers: [], readTimestamps: [:])
}
actions.insert(.custom(ChatReadReportContextItem(context: context, message: message, hasReadReports: hasReadReports, stats: readStats, action: { c, f, stats, customReactionEmojiPacks, firstCustomEmojiReaction in
actions.insert(.custom(ChatReadReportContextItem(context: context, message: message, hasReadReports: hasReadReports, isEdit: false, stats: readStats, action: { c, f, stats, customReactionEmojiPacks, firstCustomEmojiReaction in
if message.id.peerId.namespace == Namespaces.Peer.CloudUser {
if let stats, stats.peers.isEmpty {
c.dismiss(completion: {
@ -2634,13 +2648,15 @@ final class ChatReadReportContextItem: ContextMenuCustomItem {
fileprivate let context: AccountContext
fileprivate let message: Message
fileprivate let hasReadReports: Bool
fileprivate let isEdit: Bool
fileprivate let stats: MessageReadStats?
fileprivate let action: (ContextControllerProtocol, @escaping (ContextMenuActionResult) -> Void, MessageReadStats?, [StickerPackCollectionInfo], TelegramMediaFile?) -> Void
fileprivate let action: ((ContextControllerProtocol, @escaping (ContextMenuActionResult) -> Void, MessageReadStats?, [StickerPackCollectionInfo], TelegramMediaFile?) -> Void)?
init(context: AccountContext, message: Message, hasReadReports: Bool, stats: MessageReadStats?, action: @escaping (ContextControllerProtocol, @escaping (ContextMenuActionResult) -> Void, MessageReadStats?, [StickerPackCollectionInfo], TelegramMediaFile?) -> Void) {
init(context: AccountContext, message: Message, hasReadReports: Bool, isEdit: Bool, stats: MessageReadStats?, action: ((ContextControllerProtocol, @escaping (ContextMenuActionResult) -> Void, MessageReadStats?, [StickerPackCollectionInfo], TelegramMediaFile?) -> Void)?) {
self.context = context
self.message = message
self.hasReadReports = hasReadReports
self.isEdit = isEdit
self.stats = stats
self.action = action
}
@ -2719,7 +2735,9 @@ private final class ChatReadReportContextItemNode: ASDisplayNode, ContextMenuCus
self.buttonNode.accessibilityLabel = presentationData.strings.VoiceChat_StopRecording
self.iconNode = ASImageNode()
if self.item.message.id.peerId.namespace == Namespaces.Peer.CloudUser {
if self.item.isEdit {
self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/MenuEditIcon"), color: presentationData.theme.actionSheet.primaryTextColor)
} else if self.item.message.id.peerId.namespace == Namespaces.Peer.CloudUser {
self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Chat/Message/MenuReadIcon"), color: presentationData.theme.actionSheet.primaryTextColor)
} else if let reactionsAttribute = item.message.reactionsAttribute, !reactionsAttribute.reactions.isEmpty {
self.iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Reactions"), color: presentationData.theme.actionSheet.primaryTextColor)
@ -2818,12 +2836,12 @@ private final class ChatReadReportContextItemNode: ASDisplayNode, ContextMenuCus
if let currentStats = self.currentStats {
if self.item.message.id.peerId.namespace == Namespaces.Peer.CloudUser {
self.buttonNode.isUserInteractionEnabled = currentStats.peers.isEmpty
self.buttonNode.isUserInteractionEnabled = item.action != nil && currentStats.peers.isEmpty
} else {
self.buttonNode.isUserInteractionEnabled = !currentStats.peers.isEmpty || reactionCount != 0
self.buttonNode.isUserInteractionEnabled = item.action != nil && (!currentStats.peers.isEmpty || reactionCount != 0)
}
} else {
self.buttonNode.isUserInteractionEnabled = reactionCount != 0
self.buttonNode.isUserInteractionEnabled = item.action != nil && reactionCount != 0
self.disposable = (item.context.engine.messages.messageReadStats(id: item.message.id)
|> deliverOnMainQueue).startStrict(next: { [weak self] value in
@ -2862,9 +2880,9 @@ private final class ChatReadReportContextItemNode: ASDisplayNode, ContextMenuCus
func updateStats(stats: MessageReadStats, transition: ContainedViewLayoutTransition) {
if self.item.message.id.peerId.namespace == Namespaces.Peer.CloudUser {
self.buttonNode.isUserInteractionEnabled = stats.peers.isEmpty
self.buttonNode.isUserInteractionEnabled = self.item.action != nil && stats.peers.isEmpty
} else {
self.buttonNode.isUserInteractionEnabled = !stats.peers.isEmpty || stats.reactionCount != 0
self.buttonNode.isUserInteractionEnabled = self.item.action != nil && (!stats.peers.isEmpty || stats.reactionCount != 0)
}
guard let (calculatedWidth, size) = self.validLayout else {
@ -2908,7 +2926,24 @@ private final class ChatReadReportContextItemNode: ASDisplayNode, ContextMenuCus
reactionCount = currentStats.reactionCount
if currentStats.peers.isEmpty {
if self.item.message.id.peerId.namespace == Namespaces.Peer.CloudUser {
if self.item.isEdit, let attribute = self.item.message.attributes.first(where: { $0 is EditedMessageAttribute }) as? EditedMessageAttribute, !attribute.isHidden, attribute.date != 0 {
let dateText = humanReadableStringForTimestamp(strings: self.presentationData.strings, dateTimeFormat: self.presentationData.dateTimeFormat, timestamp: attribute.date, alwaysShowTime: true, allowYesterday: true, format: HumanReadableStringFormat(
dateFormatString: { value in
return PresentationStrings.FormattedString(string: self.presentationData.strings.Chat_PrivateMessageEditTimestamp_Date(value).string, ranges: [])
},
tomorrowFormatString: { value in
return PresentationStrings.FormattedString(string: self.presentationData.strings.Chat_PrivateMessageEditTimestamp_TodayAt(value).string, ranges: [])
},
todayFormatString: { value in
return PresentationStrings.FormattedString(string: self.presentationData.strings.Chat_PrivateMessageEditTimestamp_TodayAt(value).string, ranges: [])
},
yesterdayFormatString: { value in
return PresentationStrings.FormattedString(string: self.presentationData.strings.Chat_PrivateMessageEditTimestamp_YesterdayAt(value).string, ranges: [])
}
)).string
self.textNode.attributedText = NSAttributedString(string: dateText, font: Font.regular(floor(self.presentationData.listsFontSize.baseDisplaySize * 0.8)), textColor: self.presentationData.theme.contextMenu.primaryColor)
} else if self.item.message.id.peerId.namespace == Namespaces.Peer.CloudUser {
let text = NSAttributedString(string: self.presentationData.strings.Chat_ContextMenuReadDate_ReadAvailablePrefix, font: Font.regular(floor(self.presentationData.listsFontSize.baseDisplaySize * 0.8)), textColor: self.presentationData.theme.contextMenu.primaryColor)
if self.textNode.attributedText != text {
animatePositions = false
@ -3026,7 +3061,12 @@ private final class ChatReadReportContextItemNode: ASDisplayNode, ContextMenuCus
let positionTransition: ContainedViewLayoutTransition = animatePositions ? transition : .immediate
let verticalOrigin = floor((size.height - combinedTextHeight) / 2.0)
let textFrame = CGRect(origin: CGPoint(x: sideInset + iconSize.width + 4.0, y: verticalOrigin), size: textSize)
var textFrame = CGRect(origin: CGPoint(x: sideInset + iconSize.width + 4.0, y: verticalOrigin), size: textSize)
if self.item.isEdit {
textFrame.origin.x -= 2.0
}
positionTransition.updateFrameAdditive(node: self.textNode, frame: textFrame)
transition.updateAlpha(node: self.textNode, alpha: self.currentStats == nil ? 0.0 : 1.0)
@ -3070,7 +3110,11 @@ private final class ChatReadReportContextItemNode: ASDisplayNode, ContextMenuCus
transition.updateAlpha(node: self.shimmerNode, alpha: self.currentStats == nil ? 1.0 : 0.0)
if !iconSize.width.isZero {
transition.updateFrameAdditive(node: self.iconNode, frame: CGRect(origin: CGPoint(x: sideInset + 1.0, y: floor((size.height - iconSize.height) / 2.0)), size: iconSize))
var iconFrame = CGRect(origin: CGPoint(x: sideInset + 1.0, y: floor((size.height - iconSize.height) / 2.0)), size: iconSize)
if self.item.isEdit {
iconFrame.origin.x -= 2.0
}
transition.updateFrameAdditive(node: self.iconNode, frame: iconFrame)
}
let avatarsContent: AnimatedAvatarSetContext.Content
@ -3155,12 +3199,15 @@ private final class ChatReadReportContextItemNode: ASDisplayNode, ContextMenuCus
guard let controller = self.getController() else {
return
}
self.item.action(controller, { [weak self] result in
self.item.action?(controller, { [weak self] result in
self?.actionSelected(result)
}, self.currentStats, self.customEmojiPacks, self.firstCustomEmojiReaction)
}
var isActionEnabled: Bool {
if self.item.action == nil {
return false
}
var reactionCount = 0
for reaction in mergedMessageReactionsAndPeers(accountPeerId: self.item.context.account.peerId, accountPeer: nil, message: self.item.message).reactions {
reactionCount += Int(reaction.count)

View File

@ -1,4 +1,44 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load(
"@build_bazel_rules_apple//apple:resources.bzl",
"apple_resource_bundle",
"apple_resource_group",
)
load("//build-system/bazel-utils:plist_fragment.bzl",
"plist_fragment",
)
filegroup(
name = "HlsBundleContents",
srcs = glob([
"HlsBundle/**",
]),
visibility = ["//visibility:public"],
)
plist_fragment(
name = "HlsBundleInfoPlist",
extension = "plist",
template =
"""
<key>CFBundleIdentifier</key>
<string>org.telegram.TelegramUniversalVideoContent</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleName</key>
<string>TelegramUniversalVideoContent</string>
"""
)
apple_resource_bundle(
name = "HlsBundle",
infoplists = [
":HlsBundleInfoPlist",
],
resources = [
":HlsBundleContents",
],
)
swift_library(
name = "TelegramUniversalVideoContent",
@ -9,6 +49,9 @@ swift_library(
copts = [
"-warnings-as-errors",
],
data = [
":HlsBundle",
],
deps = [
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
"//submodules/Display:Display",

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,9 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Developement</title>
<meta name="viewport" content="width=device-width, initial-scale=1"></head>
<body>
<script src="runtime.bundle.js"></script><script src="index.bundle.js"></script><script src="print.bundle.js"></script></body>
</html>

View File

@ -0,0 +1,27 @@
"use strict";
(self["webpackChunkmy3d"] = self["webpackChunkmy3d"] || []).push([["print"],{
/***/ "./src/print.js":
/*!**********************!*\
!*** ./src/print.js ***!
\**********************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* binding */ printMe)
/* harmony export */ });
function printMe() {
console.log('I get called from print.js1234!');
}
/***/ })
},
/******/ __webpack_require__ => { // webpackRuntimeModules
/******/ var __webpack_exec__ = (moduleId) => (__webpack_require__(__webpack_require__.s = moduleId))
/******/ var __webpack_exports__ = (__webpack_exec__("./src/print.js"));
/******/ }
]);
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJpbnQuYnVuZGxlLmpzIiwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7QUFBZTtBQUNmO0FBQ0EiLCJzb3VyY2VzIjpbIndlYnBhY2s6Ly9teTNkLy4vc3JjL3ByaW50LmpzIl0sInNvdXJjZXNDb250ZW50IjpbImV4cG9ydCBkZWZhdWx0IGZ1bmN0aW9uIHByaW50TWUoKSB7XG4gIGNvbnNvbGUubG9nKCdJIGdldCBjYWxsZWQgZnJvbSBwcmludC5qczEyMzQhJyk7XG59XG4iXSwibmFtZXMiOltdLCJzb3VyY2VSb290IjoiIn0=

File diff suppressed because one or more lines are too long

View File

@ -215,7 +215,11 @@ public final class HLSVideoContent: UniversalVideoContent {
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
if #available(iOS 17.1, *) {
#if DEBUG
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
#else
return HLSVideoJSContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
#endif
} else {
return HLSVideoAVContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
}