Various improvements

This commit is contained in:
Ilya Laktyushin 2023-01-23 15:48:57 +04:00
parent 443d0293f6
commit ad0fe38ba7
17 changed files with 664 additions and 519 deletions

View File

@ -0,0 +1,261 @@
import Foundation
import UIKit
import Display
import Accelerate
public final class AvatarBadgeView: UIImageView {
enum OriginalContent: Equatable {
case color(UIColor)
case image(UIImage)
static func ==(lhs: OriginalContent, rhs: OriginalContent) -> Bool {
switch lhs {
case let .color(color):
if case .color(color) = rhs {
return true
} else {
return false
}
case let .image(lhsImage):
if case let .image(rhsImage) = rhs {
return lhsImage === rhsImage
} else {
return false
}
}
}
}
private struct Parameters: Equatable {
var size: CGSize
var text: String
}
private var originalContent: OriginalContent?
private var parameters: Parameters?
private var hasContent: Bool = false
override public init(frame: CGRect) {
super.init(frame: frame)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(content: OriginalContent) {
if self.originalContent != content || !self.hasContent {
self.originalContent = content
self.update()
}
}
public func update(size: CGSize, text: String) {
let parameters = Parameters(size: size, text: text)
if self.parameters != parameters || !self.hasContent {
self.parameters = parameters
self.update()
}
}
private func update() {
guard let originalContent = self.originalContent, let parameters = self.parameters else {
return
}
self.hasContent = true
let blurredWidth = 16
let blurredHeight = 16
guard let blurredContext = DrawingContext(size: CGSize(width: CGFloat(blurredWidth), height: CGFloat(blurredHeight)), scale: 1.0, opaque: true) else {
return
}
let blurredSize = CGSize(width: CGFloat(blurredWidth), height: CGFloat(blurredHeight))
blurredContext.withContext { c in
switch originalContent {
case let .color(color):
c.setFillColor(color.cgColor)
c.fill(CGRect(origin: CGPoint(), size: blurredSize))
case let .image(image):
c.setFillColor(UIColor.black.cgColor)
c.fill(CGRect(origin: CGPoint(), size: blurredSize))
c.scaleBy(x: blurredSize.width / parameters.size.width, y: blurredSize.height / parameters.size.height)
let offsetFactor: CGFloat = 1.0 - 0.6
let imageFrame = CGRect(origin: CGPoint(x: parameters.size.width - image.size.width + offsetFactor * parameters.size.width, y: parameters.size.height - image.size.height + offsetFactor * parameters.size.height), size: image.size)
UIGraphicsPushContext(c)
image.draw(in: imageFrame)
UIGraphicsPopContext()
}
}
var rSum: Int64 = 0
var gSum: Int64 = 0
var bSum: Int64 = 0
for y in 0 ..< blurredHeight {
let row = blurredContext.bytes.assumingMemoryBound(to: UInt8.self).advanced(by: y * blurredContext.bytesPerRow)
for x in 0 ..< blurredWidth {
let pixel = row.advanced(by: x * 4)
bSum += Int64(pixel.advanced(by: 0).pointee)
gSum += Int64(pixel.advanced(by: 1).pointee)
rSum += Int64(pixel.advanced(by: 2).pointee)
}
}
let colorNorm = CGFloat(blurredWidth * blurredHeight)
let invColorNorm: CGFloat = 1.0 / (255.0 * colorNorm)
let aR = CGFloat(rSum) * invColorNorm
let aG = CGFloat(gSum) * invColorNorm
let aB = CGFloat(bSum) * invColorNorm
let luminance: CGFloat = 0.299 * aR + 0.587 * aG + 0.114 * aB
let isLightImage = luminance > 0.9
var brightness: CGFloat = 1.0
if isLightImage {
brightness = 0.99
} else {
brightness = 0.94
}
var destinationBuffer = vImage_Buffer()
destinationBuffer.width = UInt(blurredWidth)
destinationBuffer.height = UInt(blurredHeight)
destinationBuffer.data = blurredContext.bytes
destinationBuffer.rowBytes = blurredContext.bytesPerRow
vImageBoxConvolve_ARGB8888(
&destinationBuffer,
&destinationBuffer,
nil,
0, 0,
UInt32(15),
UInt32(15),
nil,
vImage_Flags(kvImageTruncateKernel | kvImageDoNotTile)
)
let divisor: Int32 = 0x1000
let rwgt: CGFloat = 0.3086
let gwgt: CGFloat = 0.6094
let bwgt: CGFloat = 0.0820
let adjustSaturation: CGFloat = 1.7
let a = (1.0 - adjustSaturation) * rwgt + adjustSaturation
let b = (1.0 - adjustSaturation) * rwgt
let c = (1.0 - adjustSaturation) * rwgt
let d = (1.0 - adjustSaturation) * gwgt
let e = (1.0 - adjustSaturation) * gwgt + adjustSaturation
let f = (1.0 - adjustSaturation) * gwgt
let g = (1.0 - adjustSaturation) * bwgt
let h = (1.0 - adjustSaturation) * bwgt
let i = (1.0 - adjustSaturation) * bwgt + adjustSaturation
let satMatrix: [CGFloat] = [
a, b, c, 0,
d, e, f, 0,
g, h, i, 0,
0, 0, 0, 1
]
let brighnessMatrix: [CGFloat] = [
brightness, 0, 0, 0,
0, brightness, 0, 0,
0, 0, brightness, 0,
0, 0, 0, 1
]
func matrixMul(a: [CGFloat], b: [CGFloat], result: inout [CGFloat]) {
for i in 0 ..< 4 {
for j in 0 ..< 4 {
var sum: CGFloat = 0.0
for k in 0 ..< 4 {
sum += a[i + k * 4] * b[k + j * 4]
}
result[i + j * 4] = sum
}
}
}
var resultMatrix = Array<CGFloat>(repeating: 0.0, count: 4 * 4)
matrixMul(a: satMatrix, b: brighnessMatrix, result: &resultMatrix)
var matrix: [Int16] = resultMatrix.map { value in
return Int16(value * CGFloat(divisor))
}
vImageMatrixMultiply_ARGB8888(&destinationBuffer, &destinationBuffer, &matrix, divisor, nil, nil, vImage_Flags(kvImageDoNotTile))
guard let blurredImage = blurredContext.generateImage() else {
return
}
self.image = generateImage(parameters.size, rotatedContext: { size, context in
UIGraphicsPushContext(context)
context.clear(CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.copy)
context.setFillColor(UIColor.black.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
blurredImage.draw(in: CGRect(origin: CGPoint(), size: size), blendMode: .sourceIn, alpha: 1.0)
context.setBlendMode(.normal)
let textColor: UIColor
if isLightImage {
textColor = UIColor(white: 0.7, alpha: 1.0)
} else {
textColor = .white
}
var fontSize: CGFloat = floor(parameters.size.height * 0.48)
while true {
let string = NSAttributedString(string: parameters.text, font: Font.bold(fontSize), textColor: textColor)
let stringBounds = string.boundingRect(with: CGSize(width: 100.0, height: 100.0), options: .usesLineFragmentOrigin, context: nil)
if stringBounds.width <= size.width - 5.0 * 2.0 || fontSize <= 2.0 {
string.draw(at: CGPoint(x: stringBounds.minX + floorToScreenPixels((size.width - stringBounds.width) / 2.0), y: stringBounds.minY + floorToScreenPixels((size.height - stringBounds.height) / 2.0)))
break
} else {
fontSize -= 1.0
}
}
let lineWidth: CGFloat = 1.5
let lineInset: CGFloat = 2.0
let lineRadius: CGFloat = size.width * 0.5 - lineInset - lineWidth * 0.5
context.setLineWidth(lineWidth)
context.setStrokeColor(textColor.cgColor)
context.setLineCap(.round)
context.addArc(center: CGPoint(x: size.width * 0.5, y: size.height * 0.5), radius: lineRadius, startAngle: CGFloat.pi * 0.5, endAngle: -CGFloat.pi * 0.5, clockwise: false)
context.strokePath()
let sectionAngle: CGFloat = CGFloat.pi / 11.0
for i in 0 ..< 10 {
if i % 2 == 0 {
continue
}
let startAngle = CGFloat.pi * 0.5 - CGFloat(i) * sectionAngle - sectionAngle * 0.15
let endAngle = startAngle - sectionAngle * 0.75
context.addArc(center: CGPoint(x: size.width * 0.5, y: size.height * 0.5), radius: lineRadius, startAngle: startAngle, endAngle: endAngle, clockwise: true)
context.strokePath()
}
/*if isLightImage {
context.setLineWidth(UIScreenPixel)
context.setStrokeColor(textColor.withMultipliedAlpha(1.0).cgColor)
context.strokeEllipse(in: CGRect(origin: CGPoint(), size: size).insetBy(dx: UIScreenPixel * 0.5, dy: UIScreenPixel * 0.5))
}*/
UIGraphicsPopContext()
})
}
}

View File

@ -59,14 +59,6 @@ private class AvatarNodeParameters: NSObject {
}
}
private let grayscaleColors: [UIColor] = [
UIColor(rgb: 0xb1b1b1), UIColor(rgb: 0xcdcdcd)
]
private let savedMessagesColors: [UIColor] = [
UIColor(rgb: 0x2a9ef1), UIColor(rgb: 0x72d5fd)
]
private func calculateColors(explicitColorIndex: Int?, peerId: EnginePeer.Id?, icon: AvatarNodeIcon, theme: PresentationTheme?) -> [UIColor] {
let colorIndex: Int
if let explicitColorIndex = explicitColorIndex {
@ -86,13 +78,13 @@ private func calculateColors(explicitColorIndex: Int?, peerId: EnginePeer.Id?, i
let colors: [UIColor]
if icon != .none {
if case .deletedIcon = icon {
colors = grayscaleColors
colors = AvatarNode.grayscaleColors
} else if case .phoneIcon = icon {
colors = grayscaleColors
colors = AvatarNode.grayscaleColors
} else if case .savedMessagesIcon = icon {
colors = savedMessagesColors
colors = AvatarNode.savedMessagesColors
} else if case .repliesIcon = icon {
colors = savedMessagesColors
colors = AvatarNode.savedMessagesColors
} else if case .editAvatarIcon = icon, let theme {
colors = [theme.list.itemAccentColor.withAlphaComponent(0.1), theme.list.itemAccentColor.withAlphaComponent(0.1)]
} else if case let .archivedChatsIcon(hiddenByDefault) = icon, let theme = theme {
@ -104,14 +96,14 @@ private func calculateColors(explicitColorIndex: Int?, peerId: EnginePeer.Id?, i
}
colors = [backgroundColors.1, backgroundColors.0]
} else {
colors = grayscaleColors
colors = AvatarNode.grayscaleColors
}
} else if colorIndex == -1 {
if let theme {
let backgroundColors = theme.chatList.unpinnedArchiveAvatarColor.backgroundColors.colors
colors = [backgroundColors.1, backgroundColors.0]
} else {
colors = grayscaleColors
colors = AvatarNode.grayscaleColors
}
} else {
colors = AvatarNode.gradientColors[colorIndex % AvatarNode.gradientColors.count]
@ -224,6 +216,14 @@ public final class AvatarNode: ASDisplayNode {
[UIColor(rgb: 0xd669ed), UIColor(rgb: 0xe0a2f3)],
]
static let grayscaleColors: [UIColor] = [
UIColor(rgb: 0xb1b1b1), UIColor(rgb: 0xcdcdcd)
]
static let savedMessagesColors: [UIColor] = [
UIColor(rgb: 0x2a9ef1), UIColor(rgb: 0x72d5fd)
]
public var font: UIFont {
didSet {
if oldValue.pointSize != font.pointSize {
@ -390,7 +390,19 @@ public final class AvatarNode: ASDisplayNode {
self.imageNode.isHidden = true
}
public func setPeer(context genericContext: AccountContext, account: Account? = nil, theme: PresentationTheme, peer: EnginePeer?, authorOfMessage: MessageReference? = nil, overrideImage: AvatarNodeImageOverride? = nil, emptyColor: UIColor? = nil, clipStyle: AvatarNodeClipStyle = .round, synchronousLoad: Bool = false, displayDimensions: CGSize = CGSize(width: 60.0, height: 60.0), storeUnrounded: Bool = false) {
public func setPeer(
context genericContext: AccountContext,
account: Account? = nil,
theme: PresentationTheme,
peer: EnginePeer?,
authorOfMessage: MessageReference? = nil,
overrideImage: AvatarNodeImageOverride? = nil,
emptyColor: UIColor? = nil,
clipStyle: AvatarNodeClipStyle = .round,
synchronousLoad: Bool = false,
displayDimensions: CGSize = CGSize(width: 60.0, height: 60.0),
storeUnrounded: Bool = false
) {
var synchronousLoad = synchronousLoad
var representation: TelegramMediaImageRepresentation?
var icon = AvatarNodeIcon.none
@ -688,385 +700,3 @@ public final class AvatarNode: ASDisplayNode {
}
}
}
public func drawPeerAvatarLetters(context: CGContext, size: CGSize, round: Bool = true, font: UIFont, letters: [String], peerId: EnginePeer.Id) {
if round {
context.beginPath()
context.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height:
size.height))
context.clip()
}
let colorIndex: Int
if peerId.namespace == .max {
colorIndex = -1
} else {
colorIndex = Int(clamping: abs(peerId.id._internalGetInt64Value()))
}
let colorsArray: NSArray
if colorIndex == -1 {
colorsArray = grayscaleColors.map(\.cgColor) as NSArray
} else {
colorsArray = AvatarNode.gradientColors[colorIndex % AvatarNode.gradientColors.count].map(\.cgColor) as NSArray
}
var locations: [CGFloat] = [1.0, 0.0]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
context.resetClip()
context.setBlendMode(.normal)
let string = letters.count == 0 ? "" : (letters[0] + (letters.count == 1 ? "" : letters[1]))
let attributedString = NSAttributedString(string: string, attributes: [NSAttributedString.Key.font: font, NSAttributedString.Key.foregroundColor: UIColor.white])
let line = CTLineCreateWithAttributedString(attributedString)
let lineBounds = CTLineGetBoundsWithOptions(line, .useGlyphPathBounds)
let lineOffset = CGPoint(x: string == "B" ? 1.0 : 0.0, y: 0.0)
let lineOrigin = CGPoint(x: floorToScreenPixels(-lineBounds.origin.x + (size.width - lineBounds.size.width) / 2.0) + lineOffset.x, y: floorToScreenPixels(-lineBounds.origin.y + (size.height - lineBounds.size.height) / 2.0))
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: 1.0, y: -1.0)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
let textPosition = context.textPosition
context.translateBy(x: lineOrigin.x, y: lineOrigin.y)
CTLineDraw(line, context)
context.translateBy(x: -lineOrigin.x, y: -lineOrigin.y)
context.textPosition = textPosition
}
public enum AvatarBackgroundColor {
case blue
case yellow
case green
case purple
case red
case violet
}
public func generateAvatarImage(size: CGSize, icon: UIImage?, iconScale: CGFloat = 1.0, cornerRadius: CGFloat? = nil, circleCorners: Bool = false, color: AvatarBackgroundColor) -> UIImage? {
return generateImage(size, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.beginPath()
if let cornerRadius {
if circleCorners {
let roundedRect = CGPath(roundedRect: CGRect(origin: .zero, size: size), cornerWidth: cornerRadius, cornerHeight: cornerRadius, transform: nil)
context.addPath(roundedRect)
} else {
let roundedRect = UIBezierPath(roundedRect: CGRect(origin: .zero, size: size), cornerRadius: cornerRadius)
context.addPath(roundedRect.cgPath)
}
} else {
context.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height))
}
context.clip()
let colorIndex: Int
switch color {
case .blue:
colorIndex = 5
case .yellow:
colorIndex = 1
case .green:
colorIndex = 3
case .purple:
colorIndex = 2
case .red:
colorIndex = 0
case .violet:
colorIndex = 6
}
let colorsArray: NSArray
if colorIndex == -1 {
colorsArray = grayscaleColors.map(\.cgColor) as NSArray
} else {
colorsArray = AvatarNode.gradientColors[colorIndex % AvatarNode.gradientColors.count].map(\.cgColor) as NSArray
}
var locations: [CGFloat] = [1.0, 0.0]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
context.resetClip()
context.setBlendMode(.normal)
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: 1.0, y: -1.0)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
if let icon = icon {
let iconSize = CGSize(width: icon.size.width * iconScale, height: icon.size.height * iconScale)
let iconFrame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) / 2.0), y: floor((size.height - iconSize.height) / 2.0)), size: iconSize)
context.draw(icon.cgImage!, in: iconFrame)
}
})
}
public final class AvatarBadgeView: UIImageView {
enum OriginalContent: Equatable {
case color(UIColor)
case image(UIImage)
static func ==(lhs: OriginalContent, rhs: OriginalContent) -> Bool {
switch lhs {
case let .color(color):
if case .color(color) = rhs {
return true
} else {
return false
}
case let .image(lhsImage):
if case let .image(rhsImage) = rhs {
return lhsImage === rhsImage
} else {
return false
}
}
}
}
private struct Parameters: Equatable {
var size: CGSize
var text: String
}
private var originalContent: OriginalContent?
private var parameters: Parameters?
private var hasContent: Bool = false
override public init(frame: CGRect) {
super.init(frame: frame)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(content: OriginalContent) {
if self.originalContent != content || !self.hasContent {
self.originalContent = content
self.update()
}
}
public func update(size: CGSize, text: String) {
let parameters = Parameters(size: size, text: text)
if self.parameters != parameters || !self.hasContent {
self.parameters = parameters
self.update()
}
}
private func update() {
guard let originalContent = self.originalContent, let parameters = self.parameters else {
return
}
self.hasContent = true
let blurredWidth = 16
let blurredHeight = 16
guard let blurredContext = DrawingContext(size: CGSize(width: CGFloat(blurredWidth), height: CGFloat(blurredHeight)), scale: 1.0, opaque: true) else {
return
}
let blurredSize = CGSize(width: CGFloat(blurredWidth), height: CGFloat(blurredHeight))
blurredContext.withContext { c in
switch originalContent {
case let .color(color):
c.setFillColor(color.cgColor)
c.fill(CGRect(origin: CGPoint(), size: blurredSize))
case let .image(image):
c.setFillColor(UIColor.black.cgColor)
c.fill(CGRect(origin: CGPoint(), size: blurredSize))
c.scaleBy(x: blurredSize.width / parameters.size.width, y: blurredSize.height / parameters.size.height)
let offsetFactor: CGFloat = 1.0 - 0.6
let imageFrame = CGRect(origin: CGPoint(x: parameters.size.width - image.size.width + offsetFactor * parameters.size.width, y: parameters.size.height - image.size.height + offsetFactor * parameters.size.height), size: image.size)
UIGraphicsPushContext(c)
image.draw(in: imageFrame)
UIGraphicsPopContext()
}
}
var rSum: Int64 = 0
var gSum: Int64 = 0
var bSum: Int64 = 0
for y in 0 ..< blurredHeight {
let row = blurredContext.bytes.assumingMemoryBound(to: UInt8.self).advanced(by: y * blurredContext.bytesPerRow)
for x in 0 ..< blurredWidth {
let pixel = row.advanced(by: x * 4)
bSum += Int64(pixel.advanced(by: 0).pointee)
gSum += Int64(pixel.advanced(by: 1).pointee)
rSum += Int64(pixel.advanced(by: 2).pointee)
}
}
let colorNorm = CGFloat(blurredWidth * blurredHeight)
let invColorNorm: CGFloat = 1.0 / (255.0 * colorNorm)
let aR = CGFloat(rSum) * invColorNorm
let aG = CGFloat(gSum) * invColorNorm
let aB = CGFloat(bSum) * invColorNorm
let luminance: CGFloat = 0.299 * aR + 0.587 * aG + 0.114 * aB
let isLightImage = luminance > 0.9
var brightness: CGFloat = 1.0
if isLightImage {
brightness = 0.99
} else {
brightness = 0.94
}
var destinationBuffer = vImage_Buffer()
destinationBuffer.width = UInt(blurredWidth)
destinationBuffer.height = UInt(blurredHeight)
destinationBuffer.data = blurredContext.bytes
destinationBuffer.rowBytes = blurredContext.bytesPerRow
vImageBoxConvolve_ARGB8888(
&destinationBuffer,
&destinationBuffer,
nil,
0, 0,
UInt32(15),
UInt32(15),
nil,
vImage_Flags(kvImageTruncateKernel | kvImageDoNotTile)
)
let divisor: Int32 = 0x1000
let rwgt: CGFloat = 0.3086
let gwgt: CGFloat = 0.6094
let bwgt: CGFloat = 0.0820
let adjustSaturation: CGFloat = 1.7
let a = (1.0 - adjustSaturation) * rwgt + adjustSaturation
let b = (1.0 - adjustSaturation) * rwgt
let c = (1.0 - adjustSaturation) * rwgt
let d = (1.0 - adjustSaturation) * gwgt
let e = (1.0 - adjustSaturation) * gwgt + adjustSaturation
let f = (1.0 - adjustSaturation) * gwgt
let g = (1.0 - adjustSaturation) * bwgt
let h = (1.0 - adjustSaturation) * bwgt
let i = (1.0 - adjustSaturation) * bwgt + adjustSaturation
let satMatrix: [CGFloat] = [
a, b, c, 0,
d, e, f, 0,
g, h, i, 0,
0, 0, 0, 1
]
let brighnessMatrix: [CGFloat] = [
brightness, 0, 0, 0,
0, brightness, 0, 0,
0, 0, brightness, 0,
0, 0, 0, 1
]
func matrixMul(a: [CGFloat], b: [CGFloat], result: inout [CGFloat]) {
for i in 0 ..< 4 {
for j in 0 ..< 4 {
var sum: CGFloat = 0.0
for k in 0 ..< 4 {
sum += a[i + k * 4] * b[k + j * 4]
}
result[i + j * 4] = sum
}
}
}
var resultMatrix = Array<CGFloat>(repeating: 0.0, count: 4 * 4)
matrixMul(a: satMatrix, b: brighnessMatrix, result: &resultMatrix)
var matrix: [Int16] = resultMatrix.map { value in
return Int16(value * CGFloat(divisor))
}
vImageMatrixMultiply_ARGB8888(&destinationBuffer, &destinationBuffer, &matrix, divisor, nil, nil, vImage_Flags(kvImageDoNotTile))
guard let blurredImage = blurredContext.generateImage() else {
return
}
self.image = generateImage(parameters.size, rotatedContext: { size, context in
UIGraphicsPushContext(context)
context.clear(CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.copy)
context.setFillColor(UIColor.black.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
blurredImage.draw(in: CGRect(origin: CGPoint(), size: size), blendMode: .sourceIn, alpha: 1.0)
context.setBlendMode(.normal)
let textColor: UIColor
if isLightImage {
textColor = UIColor(white: 0.7, alpha: 1.0)
} else {
textColor = .white
}
var fontSize: CGFloat = floor(parameters.size.height * 0.48)
while true {
let string = NSAttributedString(string: parameters.text, font: Font.bold(fontSize), textColor: textColor)
let stringBounds = string.boundingRect(with: CGSize(width: 100.0, height: 100.0), options: .usesLineFragmentOrigin, context: nil)
if stringBounds.width <= size.width - 5.0 * 2.0 || fontSize <= 2.0 {
string.draw(at: CGPoint(x: stringBounds.minX + floorToScreenPixels((size.width - stringBounds.width) / 2.0), y: stringBounds.minY + floorToScreenPixels((size.height - stringBounds.height) / 2.0)))
break
} else {
fontSize -= 1.0
}
}
let lineWidth: CGFloat = 1.5
let lineInset: CGFloat = 2.0
let lineRadius: CGFloat = size.width * 0.5 - lineInset - lineWidth * 0.5
context.setLineWidth(lineWidth)
context.setStrokeColor(textColor.cgColor)
context.setLineCap(.round)
context.addArc(center: CGPoint(x: size.width * 0.5, y: size.height * 0.5), radius: lineRadius, startAngle: CGFloat.pi * 0.5, endAngle: -CGFloat.pi * 0.5, clockwise: false)
context.strokePath()
let sectionAngle: CGFloat = CGFloat.pi / 11.0
for i in 0 ..< 10 {
if i % 2 == 0 {
continue
}
let startAngle = CGFloat.pi * 0.5 - CGFloat(i) * sectionAngle - sectionAngle * 0.15
let endAngle = startAngle - sectionAngle * 0.75
context.addArc(center: CGPoint(x: size.width * 0.5, y: size.height * 0.5), radius: lineRadius, startAngle: startAngle, endAngle: endAngle, clockwise: true)
context.strokePath()
}
/*if isLightImage {
context.setLineWidth(UIScreenPixel)
context.setStrokeColor(textColor.withMultipliedAlpha(1.0).cgColor)
context.strokeEllipse(in: CGRect(origin: CGPoint(), size: size).insetBy(dx: UIScreenPixel * 0.5, dy: UIScreenPixel * 0.5))
}*/
UIGraphicsPopContext()
})
}
}

View File

@ -307,3 +307,129 @@ public func peerAvatarImage(account: Account, peerReference: PeerReference?, aut
return nil
}
}
public func drawPeerAvatarLetters(context: CGContext, size: CGSize, round: Bool = true, font: UIFont, letters: [String], peerId: EnginePeer.Id) {
if round {
context.beginPath()
context.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height:
size.height))
context.clip()
}
let colorIndex: Int
if peerId.namespace == .max {
colorIndex = -1
} else {
colorIndex = Int(clamping: abs(peerId.id._internalGetInt64Value()))
}
let colorsArray: NSArray
if colorIndex == -1 {
colorsArray = AvatarNode.grayscaleColors.map(\.cgColor) as NSArray
} else {
colorsArray = AvatarNode.gradientColors[colorIndex % AvatarNode.gradientColors.count].map(\.cgColor) as NSArray
}
var locations: [CGFloat] = [1.0, 0.0]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
context.resetClip()
context.setBlendMode(.normal)
let string = letters.count == 0 ? "" : (letters[0] + (letters.count == 1 ? "" : letters[1]))
let attributedString = NSAttributedString(string: string, attributes: [NSAttributedString.Key.font: font, NSAttributedString.Key.foregroundColor: UIColor.white])
let line = CTLineCreateWithAttributedString(attributedString)
let lineBounds = CTLineGetBoundsWithOptions(line, .useGlyphPathBounds)
let lineOffset = CGPoint(x: string == "B" ? 1.0 : 0.0, y: 0.0)
let lineOrigin = CGPoint(x: floorToScreenPixels(-lineBounds.origin.x + (size.width - lineBounds.size.width) / 2.0) + lineOffset.x, y: floorToScreenPixels(-lineBounds.origin.y + (size.height - lineBounds.size.height) / 2.0))
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: 1.0, y: -1.0)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
let textPosition = context.textPosition
context.translateBy(x: lineOrigin.x, y: lineOrigin.y)
CTLineDraw(line, context)
context.translateBy(x: -lineOrigin.x, y: -lineOrigin.y)
context.textPosition = textPosition
}
public enum AvatarBackgroundColor {
case blue
case yellow
case green
case purple
case red
case violet
}
public func generateAvatarImage(size: CGSize, icon: UIImage?, iconScale: CGFloat = 1.0, cornerRadius: CGFloat? = nil, circleCorners: Bool = false, color: AvatarBackgroundColor) -> UIImage? {
return generateImage(size, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.beginPath()
if let cornerRadius {
if circleCorners {
let roundedRect = CGPath(roundedRect: CGRect(origin: .zero, size: size), cornerWidth: cornerRadius, cornerHeight: cornerRadius, transform: nil)
context.addPath(roundedRect)
} else {
let roundedRect = UIBezierPath(roundedRect: CGRect(origin: .zero, size: size), cornerRadius: cornerRadius)
context.addPath(roundedRect.cgPath)
}
} else {
context.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height))
}
context.clip()
let colorIndex: Int
switch color {
case .blue:
colorIndex = 5
case .yellow:
colorIndex = 1
case .green:
colorIndex = 3
case .purple:
colorIndex = 2
case .red:
colorIndex = 0
case .violet:
colorIndex = 6
}
let colorsArray: NSArray
if colorIndex == -1 {
colorsArray = AvatarNode.grayscaleColors.map(\.cgColor) as NSArray
} else {
colorsArray = AvatarNode.gradientColors[colorIndex % AvatarNode.gradientColors.count].map(\.cgColor) as NSArray
}
var locations: [CGFloat] = [1.0, 0.0]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
context.resetClip()
context.setBlendMode(.normal)
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: 1.0, y: -1.0)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
if let icon = icon {
let iconSize = CGSize(width: icon.size.width * iconScale, height: icon.size.height * iconScale)
let iconFrame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) / 2.0), y: floor((size.height - iconSize.height) / 2.0)), size: iconSize)
context.draw(icon.cgImage!, in: iconFrame)
}
})
}

View File

@ -50,10 +50,12 @@ func telegramMediaImageFromApiPhoto(_ photo: Api.Photo) -> TelegramMediaImage? {
resource = CloudPhotoSizeMediaResource(datacenterId: dcId, photoId: id, accessHash: accessHash, sizeSpec: type, size: Int64(size), fileReference: fileReference.makeData())
videoRepresentations.append(TelegramMediaImage.VideoRepresentation(dimensions: PixelDimensions(width: w, height: h), resource: resource, startTimestamp: videoStartTs))
case let .videoSizeEmojiMarkup(emojiId, backgroundColors):
emojiMarkup = TelegramMediaImage.EmojiMarkup(fileId: emojiId, backgroundColors: backgroundColors)
case .videoSizeStickerMarkup:
break
case let .videoSizeEmojiMarkup(fileId, backgroundColors):
emojiMarkup = TelegramMediaImage.EmojiMarkup(content: .emoji(fileId: fileId), backgroundColors: backgroundColors)
case let .videoSizeStickerMarkup(stickerSet, fileId, backgroundColors):
if let packReference = StickerPackReference(apiInputSet: stickerSet) {
emojiMarkup = TelegramMediaImage.EmojiMarkup(content: .sticker(packReference: packReference, fileId: fileId), backgroundColors: backgroundColors)
}
}
}
}

View File

@ -131,26 +131,42 @@ public final class TelegramMediaImage: Media, Equatable, Codable {
}
public final class EmojiMarkup: Equatable, PostboxCoding {
public let fileId: Int64
public enum Content: Equatable {
case emoji(fileId: Int64)
case sticker(packReference: StickerPackReference, fileId: Int64)
}
public let content: Content
public let backgroundColors: [Int32]
public init(fileId: Int64, backgroundColors: [Int32]) {
self.fileId = fileId
public init(content: Content, backgroundColors: [Int32]) {
self.content = content
self.backgroundColors = backgroundColors
}
public init(decoder: PostboxDecoder) {
self.fileId = decoder.decodeInt64ForKey("f", orElse: 0)
if let fileId = decoder.decodeOptionalInt64ForKey("f") {
self.content = .emoji(fileId: fileId)
} else if let packReference = decoder.decodeObjectForKey("p", decoder: { StickerPackReference(decoder: $0) }) as? StickerPackReference {
self.content = .sticker(packReference: packReference, fileId: decoder.decodeInt64ForKey("sf", orElse: 0))
} else {
fatalError()
}
self.backgroundColors = decoder.decodeInt32ArrayForKey("b")
}
public func encode(_ encoder: PostboxEncoder) {
encoder.encodeInt64(self.fileId, forKey: "f")
switch self.content {
case let .emoji(fileId):
encoder.encodeInt64(fileId, forKey: "f")
case let .sticker(packReference, fileId):
encoder.encodeObject(packReference, forKey: "p")
encoder.encodeInt64(fileId, forKey: "sf")
}
encoder.encodeInt32Array(self.backgroundColors, forKey: "b")
}
public static func ==(lhs: EmojiMarkup, rhs: EmojiMarkup) -> Bool {
if lhs.fileId != rhs.fileId {
if lhs.content != rhs.content {
return false
}
if lhs.backgroundColors != rhs.backgroundColors {

View File

@ -900,11 +900,11 @@ public func makeDefaultDayPresentationTheme(extendingThemeReference: Presentatio
panelContentControlVibrantOverlayColor: UIColor(white: 0.85, alpha: 0.65),
panelContentControlVibrantSelectionColor: UIColor(white: 0.85, alpha: 0.1),
panelContentControlOpaqueOverlayColor: UIColor(white: 0.0, alpha: 0.2),
panelContentControlOpaqueSelectionColor: UIColor(white: 0.0, alpha: 0.1),
panelContentControlOpaqueSelectionColor: UIColor(rgb: 0x000000, alpha: 0.06),
panelContentVibrantSearchOverlayColor: UIColor(white: 0.6, alpha: 0.55),
panelContentVibrantSearchOverlaySelectedColor: UIColor(white: 0.4, alpha: 0.6),
panelContentVibrantSearchOverlayHighlightColor: UIColor(white: 0.2, alpha: 0.02),
panelContentOpaqueSearchOverlayColor: UIColor(white: 0.0, alpha: 0.3),
panelContentOpaqueSearchOverlayColor: UIColor(rgb: 0x8e8e93),
panelContentOpaqueSearchOverlaySelectedColor: UIColor(white: 0.0, alpha: 0.4),
panelContentOpaqueSearchOverlayHighlightColor: UIColor(white: 0.0, alpha: 0.1),
stickersBackgroundColor: UIColor(rgb: 0xe8ebf0),

View File

@ -77,21 +77,18 @@ final class AvatarEditorScreenComponent: Component {
let context: AccountContext
let ready: Promise<Bool>
let peerType: AvatarEditorScreen.PeerType
let initialFileId: Int64?
let initialBackgroundColors: [Int32]?
let markup: TelegramMediaImage.EmojiMarkup?
init(
context: AccountContext,
ready: Promise<Bool>,
peerType: AvatarEditorScreen.PeerType,
initialFileId: Int64?,
initialBackgroundColors: [Int32]?
markup: TelegramMediaImage.EmojiMarkup?
) {
self.context = context
self.ready = ready
self.peerType = peerType
self.initialFileId = initialFileId
self.initialBackgroundColors = initialBackgroundColors
self.markup = markup
}
static func ==(lhs: AvatarEditorScreenComponent, rhs: AvatarEditorScreenComponent) -> Bool {
@ -101,10 +98,7 @@ final class AvatarEditorScreenComponent: Component {
if lhs.peerType != rhs.peerType {
return false
}
if lhs.initialFileId != rhs.initialFileId {
return false
}
if lhs.initialBackgroundColors != rhs.initialBackgroundColors {
if lhs.markup != rhs.markup {
return false
}
return true
@ -127,7 +121,9 @@ final class AvatarEditorScreenComponent: Component {
var isSearchActive: Bool = false
init(context: AccountContext, ready: Promise<Bool>, initialFileId: Int64?, initialBackgroundColors: [Int32]?) {
private var fileDisposable: Disposable?
init(context: AccountContext, ready: Promise<Bool>, markup: TelegramMediaImage.EmojiMarkup?) {
self.context = context
self.ready = ready
@ -136,15 +132,33 @@ final class AvatarEditorScreenComponent: Component {
super.init()
if let initialFileId, let initialBackgroundColors {
let _ = (context.engine.stickers.resolveInlineStickers(fileIds: [initialFileId])
if let markup {
switch markup.content {
case let .emoji(fileId):
self.fileDisposable = (context.engine.stickers.resolveInlineStickers(fileIds: [fileId])
|> deliverOnMainQueue).start(next: { [weak self] files in
if let strongSelf = self, let file = files.values.first {
strongSelf.selectedFile = file
strongSelf.updated(transition: .immediate)
}
})
self.selectedBackground = .gradient(initialBackgroundColors.map { UInt32(bitPattern: $0) })
case let .sticker(packReference, fileId):
self.fileDisposable = (context.engine.stickers.loadedStickerPack(reference: packReference, forceActualized: false)
|> map { pack -> TelegramMediaFile? in
if case let .result(_, items, _) = pack, let item = items.first(where: { $0.file.fileId.id == fileId }) {
return item.file
}
return nil
}
|> deliverOnMainQueue).start(next: { [weak self] file in
if let strongSelf = self, let file {
strongSelf.selectedFile = file
strongSelf.updated(transition: .immediate)
}
})
}
self.selectedBackground = .gradient(markup.backgroundColors.map { UInt32(bitPattern: $0) })
self.previousColor = self.selectedBackground
} else {
self.selectedBackground = defaultBackgrounds.first!
@ -152,14 +166,17 @@ final class AvatarEditorScreenComponent: Component {
self.previousColor = self.selectedBackground
}
deinit {
self.fileDisposable?.dispose()
}
}
func makeState() -> State {
return State(
context: self.context,
ready: self.ready,
initialFileId: self.initialFileId,
initialBackgroundColors: self.initialBackgroundColors
markup: self.markup
)
}
@ -244,8 +261,6 @@ final class AvatarEditorScreenComponent: Component {
if wasEmpty && self.state?.selectedFile == nil {
self.state?.selectedFile = data.emoji.panelItemGroups.first?.items.first?.itemFile
}
self.state?.updated(transition: .immediate)
self.state?.ready.set(.single(true))
let updateSearchQuery: (EmojiPagerContentComponent.SearchQuery?) -> Void = { [weak self] query in
guard let strongSelf = self, let context = strongSelf.state?.context else {
@ -592,7 +607,7 @@ final class AvatarEditorScreenComponent: Component {
customLayout: nil,
externalBackground: nil,
externalExpansionView: nil,
useOpaqueTheme: false,
useOpaqueTheme: true,
hideBackground: true
)
@ -716,9 +731,12 @@ final class AvatarEditorScreenComponent: Component {
customLayout: nil,
externalBackground: nil,
externalExpansionView: nil,
useOpaqueTheme: false,
useOpaqueTheme: true,
hideBackground: true
)
self.state?.updated(transition: .immediate)
self.state?.ready.set(.single(true))
}
private var isExpanded = false
@ -1405,12 +1423,12 @@ public final class AvatarEditorScreen: ViewControllerComponentContainer {
return signal
}
public init(context: AccountContext, inputData: Signal<AvatarKeyboardInputData, NoError>, peerType: PeerType, initialFileId: Int64?, initialBackgroundColors: [Int32]?) {
public init(context: AccountContext, inputData: Signal<AvatarKeyboardInputData, NoError>, peerType: PeerType, markup: TelegramMediaImage.EmojiMarkup?) {
self.context = context
self.inputData = inputData
let componentReady = Promise<Bool>()
super.init(context: context, component: AvatarEditorScreenComponent(context: context, ready: componentReady, peerType: peerType, initialFileId: initialFileId, initialBackgroundColors: initialBackgroundColors), navigationBarAppearance: .transparent)
super.init(context: context, component: AvatarEditorScreenComponent(context: context, ready: componentReady, peerType: peerType, markup: markup), navigationBarAppearance: .transparent)
self.navigationPresentation = .modal
self.readyValue.set(componentReady.get() |> timeout(0.3, queue: .mainQueue(), alternate: .single(true)))

View File

@ -623,6 +623,8 @@ private final class WallpaperColorHueSaturationNode: ASDisplayNode {
self.initialTouchLocation = touchLocation
self.previousTouchLocation = nil
}
self.view.window?.endEditing(true)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
@ -944,6 +946,8 @@ final class WallpaperColorPickerNode: ASDisplayNode {
self.colorChanged?(self.color)
}
}
self.view.window?.endEditing(true)
}
}

View File

@ -4157,7 +4157,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
createNewGroupImpl?()
}))
let presentConfirmation: (String, @escaping () -> Void) -> Void = { [weak self] peerName, completion in
let presentConfirmation: (String, Bool, @escaping () -> Void) -> Void = { [weak self] peerName, isChannel, completion in
guard let strongSelf = self else {
return
}
@ -4190,7 +4190,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
attributedText = formattedString
} else {
let stringWithRanges = strongSelf.presentationData.strings.RequestPeer_SelectionConfirmationInviteWithRightsText(botName, peerName, stringForAdminRights(strings: strongSelf.presentationData.strings, adminRights: botAdminRights))
let stringWithRanges = strongSelf.presentationData.strings.RequestPeer_SelectionConfirmationInviteWithRightsText(botName, peerName, stringForAdminRights(strings: strongSelf.presentationData.strings, adminRights: botAdminRights, isChannel: isChannel))
let formattedString = NSMutableAttributedString(string: stringWithRanges.string, font: Font.regular(13.0), textColor: theme.primaryColor, paragraphAlignment: .center)
for range in stringWithRanges.ranges.prefix(2) {
formattedString.addAttribute(.font, value: Font.semibold(13.0), range: range.range)
@ -4218,9 +4218,12 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
guard let strongSelf = self else {
return
}
var isChannel = false
if let channel = peer as? TelegramChannel, case .broadcast = channel.info {
isChannel = true
}
let peerName = EnginePeer(peer).displayTitle(strings: strongSelf.presentationData.strings, displayOrder: strongSelf.presentationData.nameDisplayOrder)
presentConfirmation(peerName, {
presentConfirmation(peerName, isChannel, {
let _ = context.engine.peers.sendBotRequestedPeer(messageId: messageId, buttonId: buttonId, requestedPeerId: peer.id).start()
controller?.dismiss()
})
@ -4231,7 +4234,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
break
case let .group(group):
let createGroupController = createGroupControllerImpl(context: context, peerIds: peerId.flatMap { [$0] } ?? [], mode: .requestPeer(group), willComplete: { peerName, complete in
presentConfirmation(peerName, {
presentConfirmation(peerName, false, {
complete()
})
}, completion: { peerId, dismiss in
@ -4242,7 +4245,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
controller?.replace(with: createGroupController)
case let .channel(channel):
let createChannelController = createChannelController(context: context, mode: .requestPeer(channel), willComplete: { peerName, complete in
presentConfirmation(peerName, {
presentConfirmation(peerName, true, {
complete()
})
}, completion: { peerId, dismiss in
@ -6752,6 +6755,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return nil
}
}
|> distinctUntilChanged
} else {
return .single(nil)
}
@ -10077,7 +10081,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
let _ = updateTranslationSettingsInteractively(accountManager: strongSelf.context.sharedContext.accountManager, { current in
var updated = current
if var ignoredLanguages = updated.ignoredLanguages {
if !ignoredLanguages.contains(langCode) {
ignoredLanguages.append(langCode)
}
updated.ignoredLanguages = ignoredLanguages
} else {
updated.ignoredLanguages = [strongSelf.presentationData.strings.baseLanguageCode, langCode]

View File

@ -2015,6 +2015,39 @@ public final class ChatHistoryListNode: ListView, ChatHistoryNode {
return
}
var messageIdsToTranslate: [MessageId] = []
if let translateToLanguage {
let extendedRange: Int = 2
var wideIndexRange = (historyView.filteredEntries.count - 1 - visible.lastIndex - extendedRange, historyView.filteredEntries.count - 1 - visible.firstIndex + extendedRange)
wideIndexRange = (max(0, min(historyView.filteredEntries.count - 1, wideIndexRange.0)), max(0, min(historyView.filteredEntries.count - 1, wideIndexRange.1)))
if wideIndexRange.0 > wideIndexRange.1 {
assert(false)
return
}
if wideIndexRange.0 <= wideIndexRange.1 {
for i in (wideIndexRange.0 ... wideIndexRange.1) {
switch historyView.filteredEntries[i] {
case let .MessageEntry(message, _, _, _, _, _):
if let translation = message.attributes.first(where: { $0 is TranslationMessageAttribute }) as? TranslationMessageAttribute, translation.toLang == translateToLanguage {
} else if !message.text.isEmpty {
messageIdsToTranslate.append(message.id)
}
case let .MessageGroupEntry(_, messages, _):
for (message, _, _, _, _) in messages {
if let translation = message.attributes.first(where: { $0 is TranslationMessageAttribute }) as? TranslationMessageAttribute, translation.toLang == translateToLanguage {
} else if !message.text.isEmpty {
messageIdsToTranslate.append(message.id)
}
}
default:
break
}
}
}
}
let readIndexRange = (0, historyView.filteredEntries.count - 1 - visible.firstIndex)
let toEarlierRange = (0, historyView.filteredEntries.count - 1 - visible.lastIndex - 1)
@ -2027,7 +2060,6 @@ public final class ChatHistoryListNode: ListView, ChatHistoryNode {
var messageIdsWithUnseenPersonalMention: [MessageId] = []
var messageIdsWithUnseenReactions: [MessageId] = []
var messageIdsWithInactiveExtendedMedia = Set<MessageId>()
var messageIdsToTranslate: [MessageId] = []
var downloadableResourceIds: [(messageId: MessageId, resourceId: String)] = []
var allVisibleAnchorMessageIds: [(MessageId, Int)] = []
var visibleAdOpaqueIds: [Data] = []
@ -2072,13 +2104,6 @@ public final class ChatHistoryListNode: ListView, ChatHistoryNode {
}
}
if let translateToLanguage {
if let translation = message.attributes.first(where: { $0 is TranslationMessageAttribute }) as? TranslationMessageAttribute, translation.toLang == translateToLanguage {
} else if !message.text.isEmpty {
messageIdsToTranslate.append(message.id)
}
}
for media in message.media {
if let _ = media as? TelegramMediaUnsupported {
contentRequiredValidation = true
@ -2153,12 +2178,6 @@ public final class ChatHistoryListNode: ListView, ChatHistoryNode {
}
}
}
if let translateToLanguage {
if let translation = message.attributes.first(where: { $0 is TranslationMessageAttribute }) as? TranslationMessageAttribute, translation.toLang == translateToLanguage {
} else if !message.text.isEmpty {
messageIdsToTranslate.append(message.id)
}
}
for media in message.media {
if let telegramFile = media as? TelegramMediaFile {
downloadableResourceIds.append((message.id, telegramFile.resource.id.stringRepresentation))
@ -2407,6 +2426,8 @@ public final class ChatHistoryListNode: ListView, ChatHistoryNode {
}
}
}
self.isTopReplyThreadMessageShown.set(isTopReplyThreadMessageShownValue)
self.updateTopVisibleMessageRange(topVisibleMessageRange)
let _ = self.visibleMessageRange.swap(topVisibleMessageRange.flatMap { range in
@ -2414,7 +2435,6 @@ public final class ChatHistoryListNode: ListView, ChatHistoryNode {
})
if let loaded = displayedRange.visibleRange, let firstEntry = historyView.filteredEntries.first, let lastEntry = historyView.filteredEntries.last {
var mathesFirst = false
if loaded.firstIndex <= 5 {
var firstHasGroups = false

View File

@ -446,7 +446,7 @@ final class ChatReportPeerTitlePanelNode: ChatTitleAccessoryPanelNode {
let buttonWidth = floor((width - maxInset * 2.0 - additionalRightInset) / CGFloat(self.buttons.count))
var nextButtonOrigin: CGFloat = maxInset
for (_, view) in self.buttons {
view.frame = CGRect(origin: CGPoint(x: nextButtonOrigin, y: 0.0), size: CGSize(width: buttonWidth, height: panelHeight))
view.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((width - buttonWidth) / 2.0), y: 0.0), size: CGSize(width: buttonWidth, height: panelHeight))
nextButtonOrigin += buttonWidth
}
} else {

View File

@ -85,6 +85,24 @@ final class ChatTranslationPanelNode: ASDisplayNode {
}
if themeUpdated || isEnabledUpdated {
if previousIsEnabled != nil && isEnabledUpdated {
var offset: CGFloat = 30.0
if interfaceState.translationState?.isEnabled == false {
offset *= -1
}
if let snapshotView = self.button.view.snapshotContentTree() {
snapshotView.frame = self.button.frame
self.button.supernode?.view.addSubview(snapshotView)
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
snapshotView?.removeFromSuperview()
})
snapshotView.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: offset), duration: 0.2, removeOnCompletion: false, additive: true)
self.button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.button.layer.animatePosition(from: CGPoint(x: 0.0, y: -offset), to: CGPoint(), duration: 0.2, additive: true)
}
}
var languageCode = interfaceState.strings.baseLanguageCode
let rawSuffix = "-raw"
if languageCode.hasSuffix(rawSuffix) {

View File

@ -653,7 +653,7 @@ public func createChannelController(context: AccountContext, mode: CreateChannel
} else {
peerType = .user
}
let controller = AvatarEditorScreen(context: context, inputData: keyboardInputData.get(), peerType: peerType, initialFileId: nil, initialBackgroundColors: nil)
let controller = AvatarEditorScreen(context: context, inputData: keyboardInputData.get(), peerType: peerType, markup: nil)
controller.imageCompletion = imageCompletion
controller.videoCompletion = videoCompletion
pushControllerImpl?(controller)

View File

@ -977,7 +977,7 @@ public func createGroupControllerImpl(context: AccountContext, peerIds: [PeerId]
} else {
peerType = .user
}
let controller = AvatarEditorScreen(context: context, inputData: keyboardInputData.get(), peerType: peerType, initialFileId: nil, initialBackgroundColors: nil)
let controller = AvatarEditorScreen(context: context, inputData: keyboardInputData.get(), peerType: peerType, markup: nil)
controller.imageCompletion = imageCompletion
controller.videoCompletion = videoCompletion
pushImpl?(controller)

View File

@ -7286,7 +7286,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewDelegate
} else {
peerType = .user
}
let controller = AvatarEditorScreen(context: strongSelf.context, inputData: keyboardInputData.get(), peerType: peerType, initialFileId: emojiMarkup?.fileId, initialBackgroundColors: emojiMarkup?.backgroundColors)
let controller = AvatarEditorScreen(context: strongSelf.context, inputData: keyboardInputData.get(), peerType: peerType, markup: emojiMarkup)
controller.imageCompletion = imageCompletion
controller.videoCompletion = videoCompletion
(strongSelf.controller?.navigationController?.topViewController as? ViewController)?.push(controller)

View File

@ -1064,12 +1064,44 @@ final class PeerSelectionControllerNode: ASDisplayNode {
}
}
func stringForAdminRights(strings: PresentationStrings, adminRights: TelegramChatAdminRights) -> String {
func stringForAdminRights(strings: PresentationStrings, adminRights: TelegramChatAdminRights, isChannel: Bool) -> String {
var rights: [String] = []
func append(_ string: String) {
rights.append("\(string)")
}
if isChannel {
if adminRights.rights.contains(.canChangeInfo) {
append(strings.RequestPeer_Requirement_Channel_Rights_Info)
}
if adminRights.rights.contains(.canPostMessages) {
append(strings.RequestPeer_Requirement_Channel_Rights_Send)
}
if adminRights.rights.contains(.canDeleteMessages) {
append(strings.RequestPeer_Requirement_Channel_Rights_Delete)
}
if adminRights.rights.contains(.canEditMessages) {
append(strings.RequestPeer_Requirement_Channel_Rights_Edit)
}
if adminRights.rights.contains(.canInviteUsers) {
append(strings.RequestPeer_Requirement_Channel_Rights_Invite)
}
if adminRights.rights.contains(.canPinMessages) {
append(strings.RequestPeer_Requirement_Channel_Rights_Pin)
}
if adminRights.rights.contains(.canManageTopics) {
append(strings.RequestPeer_Requirement_Channel_Rights_Topics)
}
if adminRights.rights.contains(.canManageCalls) {
append(strings.RequestPeer_Requirement_Channel_Rights_VideoChats)
}
if adminRights.rights.contains(.canBeAnonymous) {
append(strings.RequestPeer_Requirement_Channel_Rights_Anonymous)
}
if adminRights.rights.contains(.canAddAdmins) {
append(strings.RequestPeer_Requirement_Channel_Rights_AddAdmins)
}
} else {
if adminRights.rights.contains(.canChangeInfo) {
append(strings.RequestPeer_Requirement_Group_Rights_Info)
}
@ -1103,6 +1135,7 @@ func stringForAdminRights(strings: PresentationStrings, adminRights: TelegramCha
if adminRights.rights.contains(.canAddAdmins) {
append(strings.RequestPeer_Requirement_Group_Rights_AddAdmins)
}
}
if !rights.isEmpty {
return String(rights.joined(separator: "\n"))
} else {

View File

@ -126,14 +126,7 @@ public func translateMessageIds(context: AccountContext, messageIds: [EngineMess
public func chatTranslationState(context: AccountContext, peerId: EnginePeer.Id) -> Signal<ChatTranslationState?, NoError> {
if #available(iOS 12.0, *) {
let baseLang = context.sharedContext.currentPresentationData.with { $0 }.strings.baseLanguageCode
return cachedChatTranslationState(engine: context.engine, peerId: peerId)
|> mapToSignal { cached in
if let cached, cached.baseLang == baseLang {
return .single(cached)
} else {
return .single(nil)
|> then(
context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.translationSettings])
return context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.translationSettings])
|> mapToSignal { sharedData in
let settings = sharedData.entries[ApplicationSpecificSharedDataKeys.translationSettings]?.get(TranslationSettings.self) ?? TranslationSettings.defaultSettings
@ -144,12 +137,23 @@ public func chatTranslationState(context: AccountContext, peerId: EnginePeer.Id)
dontTranslateLanguages = [baseLang]
}
return context.account.viewTracker.aroundMessageHistoryViewForLocation(.peer(peerId: peerId, threadId: nil), index: .upperBound, anchorIndex: .upperBound, count: 10, fixedCombinedReadStates: nil)
return cachedChatTranslationState(engine: context.engine, peerId: peerId)
|> mapToSignal { cached in
if let cached, cached.baseLang == baseLang {
if !dontTranslateLanguages.contains(cached.fromLang) {
return .single(cached)
} else {
return .single(nil)
}
} else {
return .single(nil)
|> then(
context.account.viewTracker.aroundMessageHistoryViewForLocation(.peer(peerId: peerId, threadId: nil), index: .upperBound, anchorIndex: .upperBound, count: 10, fixedCombinedReadStates: nil)
|> filter { messageHistoryView -> Bool in
return messageHistoryView.0.entries.count > 1
}
|> take(1)
|> map { messageHistoryView, _, _ -> ChatTranslationState in
|> map { messageHistoryView, _, _ -> ChatTranslationState? in
let messages = messageHistoryView.entries.map(\.message)
var fromLangs: [String: Int] = [:]
@ -165,18 +169,21 @@ public func chatTranslationState(context: AccountContext, peerId: EnginePeer.Id)
languageRecognizer.reset()
let filteredLanguages = hypotheses.filter { supportedTranslationLanguages.contains($0.key.rawValue) }.sorted(by: { $0.value > $1.value })
if let language = filteredLanguages.first(where: { supportedTranslationLanguages.contains($0.key.rawValue) }), !dontTranslateLanguages.contains(language.key.rawValue) {
if let language = filteredLanguages.first(where: { supportedTranslationLanguages.contains($0.key.rawValue) }) {
let fromLang = language.key.rawValue
fromLangs[fromLang] = (fromLangs[fromLang] ?? 0) + 1
}
count += 1
}
if count >= 5 {
break
}
}
if let _ = fromLangs["ru"] {
fromLangs["bg"] = nil
}
var mostFrequent: (String, Int)?
for (lang, count) in fromLangs {
if let current = mostFrequent, count > current.1 {
@ -188,12 +195,16 @@ public func chatTranslationState(context: AccountContext, peerId: EnginePeer.Id)
let fromLang = mostFrequent?.0 ?? ""
let state = ChatTranslationState(baseLang: baseLang, fromLang: fromLang, toLang: nil, isEnabled: false)
let _ = updateChatTranslationState(engine: context.engine, peerId: peerId, state: state).start()
if !dontTranslateLanguages.contains(fromLang) {
return state
} else {
return nil
}
}
)
}
}
}
} else {
return .single(nil)
}