mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-09-04 20:00:53 +00:00
Merge commit '1eafdc2ae0566e873be5cf053b60f22f6ac0777e'
This commit is contained in:
commit
40bca36df4
@ -149,6 +149,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
|||||||
if file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm" {
|
if file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm" {
|
||||||
if self.animationNode == nil {
|
if self.animationNode == nil {
|
||||||
let animationNode = DefaultAnimatedStickerNodeImpl()
|
let animationNode = DefaultAnimatedStickerNodeImpl()
|
||||||
|
animationNode.clipsToBounds = true
|
||||||
animationNode.autoplay = false
|
animationNode.autoplay = false
|
||||||
self.animationNode = animationNode
|
self.animationNode = animationNode
|
||||||
animationNode.started = { [weak self, weak animationNode] in
|
animationNode.started = { [weak self, weak animationNode] in
|
||||||
@ -183,6 +184,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
|||||||
self.imageNode.isHidden = false
|
self.imageNode.isHidden = false
|
||||||
self.didSetUpAnimationNode = false
|
self.didSetUpAnimationNode = false
|
||||||
}
|
}
|
||||||
|
self.imageNode.isHidden = false
|
||||||
self.imageNode.setSignal(chatMessageSticker(account: self.context.account, userLocation: .other, file: file, small: false, synchronousLoad: false))
|
self.imageNode.setSignal(chatMessageSticker(account: self.context.account, userLocation: .other, file: file, small: false, synchronousLoad: false))
|
||||||
self.stickerFetchedDisposable.set(freeMediaFileResourceInteractiveFetched(account: self.context.account, userLocation: .other, fileReference: stickerPackFileReference(file), resource: chatMessageStickerResource(file: file, small: false)).start())
|
self.stickerFetchedDisposable.set(freeMediaFileResourceInteractiveFetched(account: self.context.account, userLocation: .other, fileReference: stickerPackFileReference(file), resource: chatMessageStickerResource(file: file, small: false)).start())
|
||||||
}
|
}
|
||||||
@ -354,7 +356,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
|||||||
self.imageNode.frame = imageFrame
|
self.imageNode.frame = imageFrame
|
||||||
if let animationNode = self.animationNode {
|
if let animationNode = self.animationNode {
|
||||||
if self.isReaction {
|
if self.isReaction {
|
||||||
animationNode.cornerRadius = floor(imageSize.width * 0.03)
|
animationNode.cornerRadius = floor(imageSize.width * 0.1)
|
||||||
}
|
}
|
||||||
animationNode.frame = imageFrame
|
animationNode.frame = imageFrame
|
||||||
animationNode.updateLayout(size: imageSize)
|
animationNode.updateLayout(size: imageSize)
|
||||||
@ -488,15 +490,23 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
|||||||
self.stickerEntity.content = .file(animation, .reaction(updateReaction.reaction, style))
|
self.stickerEntity.content = .file(animation, .reaction(updateReaction.reaction, style))
|
||||||
}
|
}
|
||||||
|
|
||||||
if let animationNode = self.animationNode, let snapshot = animationNode.view.snapshotView(afterScreenUpdates: false) {
|
var nodeToTransitionOut: ASDisplayNode?
|
||||||
snapshot.frame = animationNode.frame
|
if let animationNode = self.animationNode {
|
||||||
snapshot.layer.transform = animationNode.transform
|
nodeToTransitionOut = animationNode
|
||||||
|
} else if !self.imageNode.isHidden {
|
||||||
|
nodeToTransitionOut = self.imageNode
|
||||||
|
}
|
||||||
|
|
||||||
|
if let nodeToTransitionOut, let snapshot = nodeToTransitionOut.view.snapshotView(afterScreenUpdates: false) {
|
||||||
|
snapshot.frame = nodeToTransitionOut.frame
|
||||||
|
snapshot.layer.transform = nodeToTransitionOut.transform
|
||||||
snapshot.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
|
snapshot.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
|
||||||
snapshot.removeFromSuperview()
|
snapshot.removeFromSuperview()
|
||||||
})
|
})
|
||||||
snapshot.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
|
snapshot.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
|
||||||
self.addSubview(snapshot)
|
self.addSubview(snapshot)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.animationNode?.removeFromSupernode()
|
self.animationNode?.removeFromSupernode()
|
||||||
self.animationNode = nil
|
self.animationNode = nil
|
||||||
self.didSetUpAnimationNode = false
|
self.didSetUpAnimationNode = false
|
||||||
@ -507,8 +517,17 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
|||||||
self.applyVisibility()
|
self.applyVisibility()
|
||||||
self.setNeedsLayout()
|
self.setNeedsLayout()
|
||||||
|
|
||||||
self.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
let nodeToTransitionIn: ASDisplayNode?
|
||||||
self.animationNode?.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
|
if let animationNode = self.animationNode {
|
||||||
|
nodeToTransitionIn = animationNode
|
||||||
|
} else {
|
||||||
|
nodeToTransitionIn = self.imageNode
|
||||||
|
}
|
||||||
|
|
||||||
|
if let nodeToTransitionIn {
|
||||||
|
nodeToTransitionIn.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||||
|
nodeToTransitionIn.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
|
||||||
|
}
|
||||||
|
|
||||||
let _ = self.dismissReactionSelection()
|
let _ = self.dismissReactionSelection()
|
||||||
}
|
}
|
||||||
|
@ -272,7 +272,7 @@ func presentLegacyMediaPickerGallery(context: AccountContext, peer: EnginePeer?,
|
|||||||
|> take(1)
|
|> take(1)
|
||||||
|> deliverOnMainQueue).start(next: { sendWhenOnlineAvailable in
|
|> deliverOnMainQueue).start(next: { sendWhenOnlineAvailable in
|
||||||
let legacySheetController = LegacyController(presentation: .custom, theme: presentationData.theme, initialLayout: nil)
|
let legacySheetController = LegacyController(presentation: .custom, theme: presentationData.theme, initialLayout: nil)
|
||||||
let sheetController = TGMediaPickerSendActionSheetController(context: legacyController.context, isDark: true, sendButtonFrame: model.interfaceView.doneButtonFrame, canSendSilently: hasSilentPosting, canSendWhenOnline: sendWhenOnlineAvailable && effectiveHasSchedule, canSchedule: effectiveHasSchedule, reminder: reminder, hasTimer: false)
|
let sheetController = TGMediaPickerSendActionSheetController(context: legacyController.context, isDark: true, sendButtonFrame: model.interfaceView.doneButtonFrame, canSendSilently: hasSilentPosting, canSendWhenOnline: sendWhenOnlineAvailable && effectiveHasSchedule, canSchedule: effectiveHasSchedule, reminder: reminder, hasTimer: hasTimer)
|
||||||
let dismissImpl = { [weak model] in
|
let dismissImpl = { [weak model] in
|
||||||
model?.dismiss(true, false)
|
model?.dismiss(true, false)
|
||||||
dismissAll()
|
dismissAll()
|
||||||
|
@ -253,29 +253,39 @@ public class PremiumLimitDisplayComponent: Component {
|
|||||||
positionAnimation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
|
positionAnimation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
|
||||||
self.badgeView.layer.add(positionAnimation, forKey: "appearance1")
|
self.badgeView.layer.add(positionAnimation, forKey: "appearance1")
|
||||||
|
|
||||||
|
let rotateAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||||
|
rotateAnimation.fromValue = 0.0 as NSNumber
|
||||||
|
rotateAnimation.toValue = -0.3 as NSNumber
|
||||||
|
rotateAnimation.duration = 0.15
|
||||||
|
rotateAnimation.fillMode = .forwards
|
||||||
|
rotateAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
|
||||||
|
rotateAnimation.isRemovedOnCompletion = false
|
||||||
|
self.badgeView.layer.add(rotateAnimation, forKey: "appearance2")
|
||||||
|
|
||||||
Queue.mainQueue().after(0.5, {
|
Queue.mainQueue().after(0.5, {
|
||||||
let rotateAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
let bounceAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||||
rotateAnimation.fromValue = 0.0 as NSNumber
|
bounceAnimation.fromValue = -0.3 as NSNumber
|
||||||
rotateAnimation.toValue = 0.2 as NSNumber
|
bounceAnimation.toValue = 0.05 as NSNumber
|
||||||
rotateAnimation.duration = 0.2
|
bounceAnimation.duration = 0.15
|
||||||
rotateAnimation.fillMode = .forwards
|
bounceAnimation.fillMode = .forwards
|
||||||
rotateAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
|
bounceAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
|
||||||
rotateAnimation.isRemovedOnCompletion = false
|
bounceAnimation.isRemovedOnCompletion = false
|
||||||
self.badgeView.layer.add(rotateAnimation, forKey: "appearance2")
|
self.badgeView.layer.add(bounceAnimation, forKey: "appearance3")
|
||||||
|
self.badgeView.layer.removeAnimation(forKey: "appearance2")
|
||||||
|
|
||||||
if !self.badgeView.isHidden {
|
if !self.badgeView.isHidden {
|
||||||
self.hapticFeedback.impact(.light)
|
self.hapticFeedback.impact(.light)
|
||||||
}
|
}
|
||||||
|
|
||||||
Queue.mainQueue().after(0.2) {
|
Queue.mainQueue().after(0.15) {
|
||||||
let returnAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
let returnAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||||
returnAnimation.fromValue = 0.2 as NSNumber
|
returnAnimation.fromValue = 0.05 as NSNumber
|
||||||
returnAnimation.toValue = 0.0 as NSNumber
|
returnAnimation.toValue = 0.0 as NSNumber
|
||||||
returnAnimation.duration = 0.18
|
returnAnimation.duration = 0.1
|
||||||
returnAnimation.fillMode = .forwards
|
returnAnimation.fillMode = .forwards
|
||||||
returnAnimation.timingFunction = CAMediaTimingFunction(name: .easeIn)
|
returnAnimation.timingFunction = CAMediaTimingFunction(name: .easeIn)
|
||||||
self.badgeView.layer.add(returnAnimation, forKey: "appearance3")
|
self.badgeView.layer.add(returnAnimation, forKey: "appearance4")
|
||||||
self.badgeView.layer.removeAnimation(forKey: "appearance2")
|
self.badgeView.layer.removeAnimation(forKey: "appearance3")
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -285,7 +295,7 @@ public class PremiumLimitDisplayComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let badgeText = component.badgeText {
|
if let badgeText = component.badgeText {
|
||||||
self.badgeCountLabel.configure(with: badgeText, duration: from != nil ? 0.3 : 0.9)
|
self.badgeCountLabel.configure(with: badgeText, duration: from != nil ? 0.3 : 0.5)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,9 +199,12 @@ private class TimerPickerItemView: UIView {
|
|||||||
|
|
||||||
var value: (Int32, String)? {
|
var value: (Int32, String)? {
|
||||||
didSet {
|
didSet {
|
||||||
if let (_, string) = self.value {
|
if let (value, string) = self.value {
|
||||||
let components = string.components(separatedBy: " ")
|
let components = string.components(separatedBy: " ")
|
||||||
if components.count > 1 {
|
if value == viewOnceTimeout {
|
||||||
|
self.valueLabel.text = string
|
||||||
|
self.unitLabel.text = ""
|
||||||
|
} else if components.count > 1 {
|
||||||
self.valueLabel.text = components[0]
|
self.valueLabel.text = components[0]
|
||||||
self.unitLabel.text = components[1]
|
self.unitLabel.text = components[1]
|
||||||
}
|
}
|
||||||
@ -236,8 +239,12 @@ private class TimerPickerItemView: UIView {
|
|||||||
self.valueLabel.sizeToFit()
|
self.valueLabel.sizeToFit()
|
||||||
self.unitLabel.sizeToFit()
|
self.unitLabel.sizeToFit()
|
||||||
|
|
||||||
self.valueLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 20.0 - self.valueLabel.frame.size.width, y: floor((self.frame.height - self.valueLabel.frame.height) / 2.0)), size: self.valueLabel.frame.size)
|
if let (value, _) = self.value, value == viewOnceTimeout {
|
||||||
self.unitLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 12.0, y: floor((self.frame.height - self.unitLabel.frame.height) / 2.0) + 2.0), size: self.unitLabel.frame.size)
|
self.valueLabel.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((self.frame.width - self.valueLabel.frame.size.width) / 2.0), y: floor((self.frame.height - self.valueLabel.frame.height) / 2.0)), size: self.valueLabel.frame.size)
|
||||||
|
} else {
|
||||||
|
self.valueLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 28.0 - self.valueLabel.frame.size.width, y: floor((self.frame.height - self.valueLabel.frame.height) / 2.0)), size: self.valueLabel.frame.size)
|
||||||
|
self.unitLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 20.0, y: floor((self.frame.height - self.unitLabel.frame.height) / 2.0) + 2.0), size: self.unitLabel.frame.size)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -414,7 +421,14 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
|||||||
if let pickerView = pickerView as? TimerCustomPickerView {
|
if let pickerView = pickerView as? TimerCustomPickerView {
|
||||||
switch strongSelf.mode {
|
switch strongSelf.mode {
|
||||||
case .sendTimer:
|
case .sendTimer:
|
||||||
strongSelf.completion?(timerValues[pickerView.selectedRow(inComponent: 0)])
|
let row = pickerView.selectedRow(inComponent: 0)
|
||||||
|
let value: Int32
|
||||||
|
if row == 0 {
|
||||||
|
value = viewOnceTimeout
|
||||||
|
} else {
|
||||||
|
value = timerValues[row - 1]
|
||||||
|
}
|
||||||
|
strongSelf.completion?(value)
|
||||||
case .autoremove:
|
case .autoremove:
|
||||||
let timeInterval = strongSelf.autoremoveTimerValues[pickerView.selectedRow(inComponent: 0)]
|
let timeInterval = strongSelf.autoremoveTimerValues[pickerView.selectedRow(inComponent: 0)]
|
||||||
strongSelf.completion?(Int32(timeInterval))
|
strongSelf.completion?(Int32(timeInterval))
|
||||||
@ -456,6 +470,8 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
|||||||
|
|
||||||
self.contentContainerNode.view.addSubview(pickerView)
|
self.contentContainerNode.view.addSubview(pickerView)
|
||||||
self.pickerView = pickerView
|
self.pickerView = pickerView
|
||||||
|
|
||||||
|
pickerView.selectRow(1, inComponent: 0, animated: false)
|
||||||
case .autoremove:
|
case .autoremove:
|
||||||
let pickerView = TimerCustomPickerView()
|
let pickerView = TimerCustomPickerView()
|
||||||
pickerView.dataSource = self
|
pickerView.dataSource = self
|
||||||
@ -514,7 +530,7 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
|||||||
func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int {
|
func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int {
|
||||||
switch self.mode {
|
switch self.mode {
|
||||||
case .sendTimer:
|
case .sendTimer:
|
||||||
return timerValues.count
|
return timerValues.count + 1
|
||||||
case .autoremove:
|
case .autoremove:
|
||||||
return self.autoremoveTimerValues.count
|
return self.autoremoveTimerValues.count
|
||||||
case .mute:
|
case .mute:
|
||||||
@ -525,17 +541,30 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
|||||||
func pickerView(_ pickerView: UIPickerView, viewForRow row: Int, forComponent component: Int, reusing view: UIView?) -> UIView {
|
func pickerView(_ pickerView: UIPickerView, viewForRow row: Int, forComponent component: Int, reusing view: UIView?) -> UIView {
|
||||||
switch self.mode {
|
switch self.mode {
|
||||||
case .sendTimer:
|
case .sendTimer:
|
||||||
let value = timerValues[row]
|
if row == 0 {
|
||||||
let string = timeIntervalString(strings: self.presentationData.strings, value: value)
|
let string = self.presentationData.strings.MediaPicker_Timer_ViewOnce
|
||||||
if let view = view as? TimerPickerItemView {
|
if let view = view as? TimerPickerItemView {
|
||||||
|
view.value = (viewOnceTimeout, string)
|
||||||
|
return view
|
||||||
|
}
|
||||||
|
|
||||||
|
let view = TimerPickerItemView()
|
||||||
|
view.value = (viewOnceTimeout, string)
|
||||||
|
view.textColor = .white
|
||||||
|
return view
|
||||||
|
} else {
|
||||||
|
let value = timerValues[row - 1]
|
||||||
|
let string = timeIntervalString(strings: self.presentationData.strings, value: value)
|
||||||
|
if let view = view as? TimerPickerItemView {
|
||||||
|
view.value = (value, string)
|
||||||
|
return view
|
||||||
|
}
|
||||||
|
|
||||||
|
let view = TimerPickerItemView()
|
||||||
view.value = (value, string)
|
view.value = (value, string)
|
||||||
|
view.textColor = .white
|
||||||
return view
|
return view
|
||||||
}
|
}
|
||||||
|
|
||||||
let view = TimerPickerItemView()
|
|
||||||
view.value = (value, string)
|
|
||||||
view.textColor = .white
|
|
||||||
return view
|
|
||||||
case .autoremove:
|
case .autoremove:
|
||||||
let itemView: TimerPickerItemView
|
let itemView: TimerPickerItemView
|
||||||
if let current = view as? TimerPickerItemView {
|
if let current = view as? TimerPickerItemView {
|
||||||
|
@ -370,6 +370,7 @@ public final class MediaEditor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.audioDelayTimer?.invalidate()
|
self.audioDelayTimer?.invalidate()
|
||||||
|
self.audioDelayTimer = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
public func replaceSource(_ image: UIImage, additionalImage: UIImage?, time: CMTime) {
|
public func replaceSource(_ image: UIImage, additionalImage: UIImage?, time: CMTime) {
|
||||||
@ -573,11 +574,9 @@ public final class MediaEditor {
|
|||||||
// self.maybeGeneratePersonSegmentation(image)
|
// self.maybeGeneratePersonSegmentation(image)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let audioTrack = self.values.audioTrack {
|
if let _ = self.values.audioTrack {
|
||||||
self.setAudioTrack(audioTrack)
|
self.setupAudioPlayback()
|
||||||
self.setAudioTrackVolume(self.values.audioTrackVolume)
|
self.updateAudioPlaybackRange()
|
||||||
self.setAudioTrackTrimRange(self.values.audioTrackTrimRange, apply: true)
|
|
||||||
self.setAudioTrackOffset(self.values.audioTrackOffset, apply: true)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let player {
|
if let player {
|
||||||
@ -609,6 +608,19 @@ public final class MediaEditor {
|
|||||||
startPlayback()
|
startPlayback()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if let audioPlayer = self.audioPlayer {
|
||||||
|
let offset = self.values.audioTrackOffset ?? 0.0
|
||||||
|
let lowerBound = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
||||||
|
|
||||||
|
let audioTime = CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(1000))
|
||||||
|
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||||
|
if audioPlayer.status != .readyToPlay {
|
||||||
|
Queue.mainQueue().after(0.1) {
|
||||||
|
audioPlayer.play()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
audioPlayer.play()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -936,6 +948,10 @@ public final class MediaEditor {
|
|||||||
} else {
|
} else {
|
||||||
if audioPlayer.status == .readyToPlay {
|
if audioPlayer.status == .readyToPlay {
|
||||||
audioPlayer.setRate(rate, time: audioTime, atHostTime: futureTime)
|
audioPlayer.setRate(rate, time: audioTime, atHostTime: futureTime)
|
||||||
|
if rate > 0.0 {
|
||||||
|
// audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||||
|
audioPlayer.play()
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||||
if rate > 0.0 {
|
if rate > 0.0 {
|
||||||
@ -1051,7 +1067,12 @@ public final class MediaEditor {
|
|||||||
if let audioPlayer = self.audioPlayer {
|
if let audioPlayer = self.audioPlayer {
|
||||||
audioPlayer.pause()
|
audioPlayer.pause()
|
||||||
|
|
||||||
self.destroyTimeObservers()
|
if self.sourceIsVideo {
|
||||||
|
self.audioDelayTimer?.invalidate()
|
||||||
|
self.audioDelayTimer = nil
|
||||||
|
} else {
|
||||||
|
self.destroyTimeObservers()
|
||||||
|
}
|
||||||
self.audioPlayer = nil
|
self.audioPlayer = nil
|
||||||
|
|
||||||
if !self.sourceIsVideo {
|
if !self.sourceIsVideo {
|
||||||
@ -1059,15 +1080,23 @@ public final class MediaEditor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let audioTrack {
|
self.setupAudioPlayback()
|
||||||
|
}
|
||||||
|
|
||||||
|
private func setupAudioPlayback() {
|
||||||
|
if let audioTrack = self.values.audioTrack {
|
||||||
let path = fullDraftPath(peerId: self.context.account.peerId, path: audioTrack.path)
|
let path = fullDraftPath(peerId: self.context.account.peerId, path: audioTrack.path)
|
||||||
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: path))
|
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: path))
|
||||||
let playerItem = AVPlayerItem(asset: audioAsset)
|
let playerItem = AVPlayerItem(asset: audioAsset)
|
||||||
let player = AVPlayer(playerItem: playerItem)
|
let player = AVPlayer(playerItem: playerItem)
|
||||||
player.automaticallyWaitsToMinimizeStalling = false
|
player.automaticallyWaitsToMinimizeStalling = false
|
||||||
self.audioPlayer = player
|
self.audioPlayer = player
|
||||||
self.maybeGenerateAudioSamples(asset: audioAsset)
|
self.maybeGenerateAudioSamples(asset: audioAsset)
|
||||||
|
|
||||||
|
if let volume = self.values.audioTrackVolume {
|
||||||
|
self.audioPlayer?.volume = Float(volume)
|
||||||
|
}
|
||||||
|
|
||||||
self.setupTimeObservers()
|
self.setupTimeObservers()
|
||||||
|
|
||||||
if !self.sourceIsVideo {
|
if !self.sourceIsVideo {
|
||||||
@ -1081,9 +1110,8 @@ public final class MediaEditor {
|
|||||||
return values.withUpdatedAudioTrackTrimRange(trimRange)
|
return values.withUpdatedAudioTrackTrimRange(trimRange)
|
||||||
}
|
}
|
||||||
|
|
||||||
if apply, let trimRange {
|
if apply, let _ = trimRange {
|
||||||
let offset = self.values.audioTrackOffset ?? 0.0
|
self.updateAudioPlaybackRange()
|
||||||
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1094,9 +1122,7 @@ public final class MediaEditor {
|
|||||||
|
|
||||||
if apply {
|
if apply {
|
||||||
let offset = offset ?? 0.0
|
let offset = offset ?? 0.0
|
||||||
let duration = self.duration ?? 0.0
|
|
||||||
let lowerBound = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
let lowerBound = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
||||||
let upperBound = self.values.audioTrackTrimRange?.upperBound ?? duration
|
|
||||||
|
|
||||||
let audioTime: CMTime
|
let audioTime: CMTime
|
||||||
if self.sourceIsVideo {
|
if self.sourceIsVideo {
|
||||||
@ -1105,7 +1131,7 @@ public final class MediaEditor {
|
|||||||
} else {
|
} else {
|
||||||
audioTime = CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(1000))
|
audioTime = CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(1000))
|
||||||
}
|
}
|
||||||
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000))
|
self.updateAudioPlaybackRange()
|
||||||
self.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
self.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||||
if !self.sourceIsVideo {
|
if !self.sourceIsVideo {
|
||||||
self.audioPlayer?.play()
|
self.audioPlayer?.play()
|
||||||
@ -1113,6 +1139,15 @@ public final class MediaEditor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func updateAudioPlaybackRange() {
|
||||||
|
if let upperBound = self.values.audioTrackTrimRange?.upperBound {
|
||||||
|
let offset = self.values.audioTrackOffset ?? 0.0
|
||||||
|
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000))
|
||||||
|
} else {
|
||||||
|
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = .invalid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public func setAudioTrackVolume(_ volume: CGFloat?) {
|
public func setAudioTrackVolume(_ volume: CGFloat?) {
|
||||||
self.updateValues(mode: .skipRendering) { values in
|
self.updateValues(mode: .skipRendering) { values in
|
||||||
return values.withUpdatedAudioTrackVolume(volume)
|
return values.withUpdatedAudioTrackVolume(volume)
|
||||||
|
@ -1120,6 +1120,9 @@ public extension MediaEditorValues {
|
|||||||
if !self.entities.isEmpty {
|
if !self.entities.isEmpty {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
if self.additionalVideoPath != nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -483,7 +483,7 @@ public final class MediaEditorVideoExport {
|
|||||||
kCVPixelBufferMetalCompatibilityKey as String: true,
|
kCVPixelBufferMetalCompatibilityKey as String: true,
|
||||||
AVVideoColorPropertiesKey: colorProperties
|
AVVideoColorPropertiesKey: colorProperties
|
||||||
]
|
]
|
||||||
if !"".isEmpty, let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing && additionalAsset == nil {
|
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing {
|
||||||
} else {
|
} else {
|
||||||
self.setupComposer()
|
self.setupComposer()
|
||||||
}
|
}
|
||||||
|
@ -2096,7 +2096,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mediaEditor = MediaEditor(context: self.context, subject: subject.editorSubject, values: initialValues, hasHistogram: true)
|
let mediaEditor = MediaEditor(context: self.context, subject: subject.editorSubject, values: initialValues, hasHistogram: true)
|
||||||
if let initialVideoPosition = self.controller?.initialVideoPosition {
|
if let initialVideoPosition = controller.initialVideoPosition {
|
||||||
mediaEditor.seek(initialVideoPosition, andPlay: true)
|
mediaEditor.seek(initialVideoPosition, andPlay: true)
|
||||||
}
|
}
|
||||||
mediaEditor.attachPreviewView(self.previewView)
|
mediaEditor.attachPreviewView(self.previewView)
|
||||||
@ -2157,7 +2157,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
self.gradientView.image = gradientImage
|
self.gradientView.image = gradientImage
|
||||||
|
|
||||||
if self.controller?.isEditingStory == true && subject.isVideo {
|
if self.controller?.isEditingStory == true {
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
self.previewContainerView.alpha = 1.0
|
self.previewContainerView.alpha = 1.0
|
||||||
@ -2178,11 +2178,20 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
self.mediaEditor = mediaEditor
|
self.mediaEditor = mediaEditor
|
||||||
self.mediaEditorPromise.set(.single(mediaEditor))
|
self.mediaEditorPromise.set(.single(mediaEditor))
|
||||||
|
|
||||||
if self.controller?.isEditingStory == true && subject.isVideo {
|
if controller.isEditingStory == true {
|
||||||
mediaEditor.onFirstDisplay = { [weak self] in
|
mediaEditor.onFirstDisplay = { [weak self] in
|
||||||
if let self {
|
if let self {
|
||||||
self.previewContainerView.alpha = 1.0
|
if subject.isPhoto {
|
||||||
self.backgroundDimView.isHidden = false
|
self.previewContainerView.layer.allowsGroupOpacity = true
|
||||||
|
self.previewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in
|
||||||
|
self.previewContainerView.layer.allowsGroupOpacity = false
|
||||||
|
self.previewContainerView.alpha = 1.0
|
||||||
|
self.backgroundDimView.isHidden = false
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
self.previewContainerView.alpha = 1.0
|
||||||
|
self.backgroundDimView.isHidden = false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -3103,7 +3112,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
|
|
||||||
func presentAudioPicker() {
|
func presentAudioPicker() {
|
||||||
var isSettingTrack = false
|
var isSettingTrack = false
|
||||||
self.controller?.present(legacyICloudFilePicker(theme: self.presentationData.theme, mode: .import, documentTypes: ["public.mp3", "public.mpeg-4-audio", "public.aac-audio"], forceDarkTheme: true, dismissed: { [weak self] in
|
self.controller?.present(legacyICloudFilePicker(theme: self.presentationData.theme, mode: .import, documentTypes: ["public.mp3", "public.mpeg-4-audio", "public.aac-audio", "org.xiph.flac"], forceDarkTheme: true, dismissed: { [weak self] in
|
||||||
if let self {
|
if let self {
|
||||||
Queue.mainQueue().after(0.1) {
|
Queue.mainQueue().after(0.1) {
|
||||||
if !isSettingTrack {
|
if !isSettingTrack {
|
||||||
@ -3177,14 +3186,27 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func maybeFixMisencodedText(_ text: String) -> String {
|
||||||
|
let charactersToSearchFor = CharacterSet(charactersIn: "àåèîóûþÿ")
|
||||||
|
if text.lowercased().rangeOfCharacter(from: charactersToSearchFor) != nil {
|
||||||
|
if let data = text.data(using: .windowsCP1252), let string = String(data: data, encoding: .windowsCP1251) {
|
||||||
|
return string
|
||||||
|
} else {
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var artist: String?
|
var artist: String?
|
||||||
var title: String?
|
var title: String?
|
||||||
for data in audioAsset.commonMetadata {
|
for data in audioAsset.commonMetadata {
|
||||||
if data.commonKey == .commonKeyArtist {
|
if data.commonKey == .commonKeyArtist, let value = data.stringValue {
|
||||||
artist = data.stringValue
|
artist = maybeFixMisencodedText(value)
|
||||||
}
|
}
|
||||||
if data.commonKey == .commonKeyTitle {
|
if data.commonKey == .commonKeyTitle, let value = data.stringValue {
|
||||||
title = data.stringValue
|
title = maybeFixMisencodedText(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3461,6 +3483,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
let stickerEntity = DrawingStickerEntity(content: .file(reaction.stillAnimation, .reaction(.builtin(heart), .white)))
|
let stickerEntity = DrawingStickerEntity(content: .file(reaction.stillAnimation, .reaction(.builtin(heart), .white)))
|
||||||
self.interaction?.insertEntity(stickerEntity, scale: 1.175)
|
self.interaction?.insertEntity(stickerEntity, scale: 1.175)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.mediaEditor?.play()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.stickerScreen = controller
|
self.stickerScreen = controller
|
||||||
|
@ -336,7 +336,8 @@ final class VideoScrubberComponent: Component {
|
|||||||
let location = gestureRecognizer.location(in: self.audioContainerView)
|
let location = gestureRecognizer.location(in: self.audioContainerView)
|
||||||
return self.audioContainerView.bounds.contains(location)
|
return self.audioContainerView.bounds.contains(location)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var ignoreScrollUpdates = false
|
||||||
private func updateAudioOffset(done: Bool) {
|
private func updateAudioOffset(done: Bool) {
|
||||||
guard self.audioScrollView.contentSize.width > 0.0, let component = self.component, let duration = self.component?.audioData?.duration else {
|
guard self.audioScrollView.contentSize.width > 0.0, let component = self.component, let duration = self.component?.audioData?.duration else {
|
||||||
return
|
return
|
||||||
@ -353,6 +354,9 @@ final class VideoScrubberComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
||||||
|
guard !self.ignoreScrollUpdates else {
|
||||||
|
return
|
||||||
|
}
|
||||||
self.updateAudioOffset(done: false)
|
self.updateAudioOffset(done: false)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -490,8 +494,12 @@ final class VideoScrubberComponent: Component {
|
|||||||
var trimDuration = component.duration
|
var trimDuration = component.duration
|
||||||
|
|
||||||
var isFirstTime = false
|
var isFirstTime = false
|
||||||
|
var audioChanged = false
|
||||||
var animateAudioAppearance = false
|
var animateAudioAppearance = false
|
||||||
if let previousComponent {
|
if let previousComponent {
|
||||||
|
if let previousAudioData = previousComponent.audioData, previousAudioData.title != component.audioData?.title {
|
||||||
|
audioChanged = true
|
||||||
|
}
|
||||||
if previousComponent.audioData == nil, component.audioData != nil {
|
if previousComponent.audioData == nil, component.audioData != nil {
|
||||||
self.positionAnimation = nil
|
self.positionAnimation = nil
|
||||||
animateAudioAppearance = true
|
animateAudioAppearance = true
|
||||||
@ -588,12 +596,20 @@ final class VideoScrubberComponent: Component {
|
|||||||
|
|
||||||
self.audioScrollView.isUserInteractionEnabled = self.isAudioSelected || component.audioOnly
|
self.audioScrollView.isUserInteractionEnabled = self.isAudioSelected || component.audioOnly
|
||||||
audioTransition.setFrame(view: self.audioScrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: availableSize.width, height: audioScrubberHeight)))
|
audioTransition.setFrame(view: self.audioScrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: availableSize.width, height: audioScrubberHeight)))
|
||||||
self.audioScrollView.contentSize = CGSize(width: audioTotalWidth, height: audioScrubberHeight)
|
|
||||||
|
let contentSize = CGSize(width: audioTotalWidth, height: audioScrubberHeight)
|
||||||
|
self.ignoreScrollUpdates = true
|
||||||
|
if self.audioScrollView.contentSize != contentSize {
|
||||||
|
self.audioScrollView.contentSize = contentSize
|
||||||
|
}
|
||||||
|
|
||||||
if isFirstTime, let offset = component.audioData?.offset, let duration = component.audioData?.duration, duration > 0.0 {
|
if isFirstTime, let offset = component.audioData?.offset, let duration = component.audioData?.duration, duration > 0.0 {
|
||||||
let contentOffset = offset * audioTotalWidth / duration
|
let contentOffset = offset * audioTotalWidth / duration
|
||||||
self.audioScrollView.contentOffset = CGPoint(x: contentOffset, y: 0.0)
|
self.audioScrollView.contentOffset = CGPoint(x: contentOffset, y: 0.0)
|
||||||
|
} else if audioChanged {
|
||||||
|
self.audioScrollView.contentOffset = .zero
|
||||||
}
|
}
|
||||||
|
self.ignoreScrollUpdates = false
|
||||||
|
|
||||||
audioTransition.setCornerRadius(layer: self.audioClippingView.layer, cornerRadius: self.isAudioSelected ? 0.0 : 9.0)
|
audioTransition.setCornerRadius(layer: self.audioClippingView.layer, cornerRadius: self.isAudioSelected ? 0.0 : 9.0)
|
||||||
|
|
||||||
@ -627,26 +643,39 @@ final class VideoScrubberComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let audioTitle = NSAttributedString(string: trackTitle, font: Font.semibold(13.0), textColor: .white)
|
let audioTitle = NSAttributedString(string: trackTitle, font: Font.semibold(13.0), textColor: .white)
|
||||||
let audioTitleSize = self.audioTitle.update(
|
let audioTitleSize: CGSize
|
||||||
transition: transition,
|
if !trackTitle.isEmpty {
|
||||||
component: AnyComponent(
|
audioTitleSize = self.audioTitle.update(
|
||||||
MultilineTextComponent(
|
transition: transition,
|
||||||
text: .plain(audioTitle)
|
component: AnyComponent(
|
||||||
)
|
MultilineTextComponent(
|
||||||
),
|
text: .plain(audioTitle)
|
||||||
environment: {},
|
)
|
||||||
containerSize: availableSize
|
),
|
||||||
)
|
environment: {},
|
||||||
|
containerSize: availableSize
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
if let audioTitleView = self.audioTitle.view {
|
||||||
|
audioTitleSize = audioTitleView.bounds.size
|
||||||
|
} else {
|
||||||
|
audioTitleSize = .zero
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let spacing: CGFloat = 4.0
|
let spacing: CGFloat = 4.0
|
||||||
let iconSize = CGSize(width: 14.0, height: 14.0)
|
let iconSize = CGSize(width: 14.0, height: 14.0)
|
||||||
let contentTotalWidth = iconSize.width + audioTitleSize.width + spacing
|
let contentTotalWidth = iconSize.width + audioTitleSize.width + spacing
|
||||||
|
|
||||||
|
var audioContentTransition = audioTransition
|
||||||
|
if animateAudioAppearance, component.audioData != nil {
|
||||||
|
audioContentTransition = .immediate
|
||||||
|
}
|
||||||
audioTransition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
|
audioTransition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
|
||||||
|
|
||||||
let audioIconFrame = CGRect(origin: CGPoint(x: max(8.0, floorToScreenPixels((deselectedAudioClipWidth - contentTotalWidth) / 2.0)), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
|
let audioIconFrame = CGRect(origin: CGPoint(x: max(8.0, floorToScreenPixels((deselectedAudioClipWidth - contentTotalWidth) / 2.0)), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
|
||||||
audioTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
|
audioContentTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
|
||||||
audioTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
|
audioContentTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
|
||||||
|
|
||||||
let trackTitleIsVisible = !self.isAudioSelected && !component.audioOnly && !trackTitle.isEmpty
|
let trackTitleIsVisible = !self.isAudioSelected && !component.audioOnly && !trackTitle.isEmpty
|
||||||
if let view = self.audioTitle.view {
|
if let view = self.audioTitle.view {
|
||||||
@ -661,7 +690,7 @@ final class VideoScrubberComponent: Component {
|
|||||||
|
|
||||||
let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
|
let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
|
||||||
view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size)
|
view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size)
|
||||||
audioTransition.setPosition(view: view, position: audioTitleFrame.center)
|
audioContentTransition.setPosition(view: view, position: audioTitleFrame.center)
|
||||||
}
|
}
|
||||||
audioTransition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? 1.0 : 0.0)
|
audioTransition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? 1.0 : 0.0)
|
||||||
|
|
||||||
|
@ -454,6 +454,7 @@ final class StoryItemOverlaysView: UIView {
|
|||||||
placeholderColor: flags.contains(.isDark) ? UIColor(white: 1.0, alpha: 0.1) : UIColor(white: 0.0, alpha: 0.1),
|
placeholderColor: flags.contains(.isDark) ? UIColor(white: 1.0, alpha: 0.1) : UIColor(white: 0.0, alpha: 0.1),
|
||||||
pointSize: CGSize(width: min(256, itemSize.width), height: min(256, itemSize.height))
|
pointSize: CGSize(width: min(256, itemSize.width), height: min(256, itemSize.height))
|
||||||
)
|
)
|
||||||
|
customEmojiView.clipsToBounds = true
|
||||||
customEmojiView.updateTextColor(flags.contains(.isDark) ? .white : .black)
|
customEmojiView.updateTextColor(flags.contains(.isDark) ? .white : .black)
|
||||||
|
|
||||||
self.customEmojiLoadDisposable?.dispose()
|
self.customEmojiLoadDisposable?.dispose()
|
||||||
@ -491,6 +492,7 @@ final class StoryItemOverlaysView: UIView {
|
|||||||
stickerTransition.setPosition(view: customEmojiView, position: stickerFrame.center)
|
stickerTransition.setPosition(view: customEmojiView, position: stickerFrame.center)
|
||||||
stickerTransition.setBounds(view: customEmojiView, bounds: CGRect(origin: CGPoint(), size: stickerFrame.size))
|
stickerTransition.setBounds(view: customEmojiView, bounds: CGRect(origin: CGPoint(), size: stickerFrame.size))
|
||||||
stickerTransition.setScale(view: customEmojiView, scale: stickerScale)
|
stickerTransition.setScale(view: customEmojiView, scale: stickerScale)
|
||||||
|
customEmojiView.layer.cornerRadius = stickerFrame.size.width * 0.1
|
||||||
|
|
||||||
customEmojiView.isActive = isActive
|
customEmojiView.isActive = isActive
|
||||||
}
|
}
|
||||||
|
@ -1 +0,0 @@
|
|||||||
|
|
Loading…
x
Reference in New Issue
Block a user