mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-08-08 08:31:13 +00:00
Animation and placeholder progress
This commit is contained in:
parent
a688d65873
commit
a2dd1a14a8
@ -252,7 +252,7 @@ class AnimatedCountLabel: UILabel {
|
||||
|
||||
let currentChars = chars.map { $0.attributedText ?? .init() }
|
||||
|
||||
let maxAnimationDuration: TimeInterval = 0.5
|
||||
let maxAnimationDuration: TimeInterval = 1.2
|
||||
var numberOfChanges = abs(newChars.count - currentChars.count)
|
||||
for index in 0..<min(newChars.count, currentChars.count) {
|
||||
let newCharIndex = newChars.count - 1 - index
|
||||
@ -262,7 +262,7 @@ class AnimatedCountLabel: UILabel {
|
||||
}
|
||||
}
|
||||
|
||||
let initialDuration: TimeInterval = min(maxAnimationDuration / 2, maxAnimationDuration / Double(numberOfChanges)) /// 0.25
|
||||
let initialDuration: TimeInterval = min(0.25, maxAnimationDuration / Double(numberOfChanges)) /// 0.25
|
||||
|
||||
// let currentWidth = itemWidth * CGFloat(currentChars.count)
|
||||
// let newWidth = itemWidth * CGFloat(newChars.count)
|
||||
@ -271,7 +271,7 @@ class AnimatedCountLabel: UILabel {
|
||||
var changeIndex = 0
|
||||
|
||||
var newLayers = [AnimatedCharLayer]()
|
||||
|
||||
let isInitialSet = currentChars.isEmpty
|
||||
for index in 0..<min(newChars.count, currentChars.count) {
|
||||
let newCharIndex = newChars.count - 1 - index
|
||||
let currCharIndex = currentChars.count - 1 - index
|
||||
@ -283,7 +283,7 @@ class AnimatedCountLabel: UILabel {
|
||||
|
||||
let initialDuration = newChars[newCharIndex] != currentChars[currCharIndex] ? initialDuration : 0
|
||||
|
||||
if newChars[newCharIndex] != currentChars[currCharIndex] {
|
||||
if !isInitialSet && newChars[newCharIndex] != currentChars[currCharIndex] {
|
||||
animateOut(for: chars[currCharIndex].layer, duration: initialDuration, beginTime: TimeInterval(changeIndex) * interItemDelay)
|
||||
} else {
|
||||
chars[currCharIndex].layer.removeFromSuperlayer()
|
||||
@ -304,7 +304,7 @@ class AnimatedCountLabel: UILabel {
|
||||
)
|
||||
// newLayer.frame = .init(x: CGFloat(chars.count - 1 - index) * (40 + interItemSpacing), y: 0, width: itemWidth, height: itemWidth * 1.8)
|
||||
containerView.layer.addSublayer(newLayer)
|
||||
if newChars[newCharIndex] != currentChars[currCharIndex] {
|
||||
if !isInitialSet && newChars[newCharIndex] != currentChars[currCharIndex] {
|
||||
newLayer.layer.opacity = 0
|
||||
animateIn(for: newLayer.layer, duration: initialDuration, beginTime: TimeInterval(changeIndex) * interItemDelay)
|
||||
changeIndex += 1
|
||||
@ -344,7 +344,9 @@ class AnimatedCountLabel: UILabel {
|
||||
}*/
|
||||
newLayer.frame = .init(x: offset/*CGFloat(newCharIndex) * (40 + interItemSpacing)*/, y: 0, width: newChars[newCharIndex].string == "," ? commaFrameWidth : itemWidth, height: itemWidth * 1.8 + (newChars[newCharIndex].string == "," ? 4 : 0))
|
||||
containerView.layer.addSublayer(newLayer)
|
||||
animateIn(for: newLayer.layer, duration: initialDuration, beginTime: TimeInterval(changeIndex) * interItemDelay)
|
||||
if !isInitialSet {
|
||||
animateIn(for: newLayer.layer, duration: initialDuration, beginTime: TimeInterval(changeIndex) * interItemDelay)
|
||||
}
|
||||
newLayers.append(newLayer)
|
||||
changeIndex += 1
|
||||
}
|
||||
@ -368,7 +370,7 @@ class AnimatedCountLabel: UILabel {
|
||||
}
|
||||
// containerView.backgroundColor = .red.withAlphaComponent(0.3)
|
||||
}
|
||||
} else {
|
||||
} else if countWidth > 0 {
|
||||
containerView.frame = .init(x: self.bounds.midX - countWidth / 2, y: 0, width: countWidth, height: self.bounds.height)
|
||||
didBegin = true
|
||||
}
|
||||
@ -389,32 +391,49 @@ class AnimatedCountLabel: UILabel {
|
||||
// layer.add(animation, forKey: "opacity")
|
||||
//
|
||||
//
|
||||
let beginTimeOffset: CFTimeInterval = /*beginTime == .zero ? 0 :*/ /*CFTimeInterval(DispatchTime.now().uptimeNanoseconds / 1000000000)*/ layer.convertTime(CACurrentMediaTime(), to: nil)
|
||||
|
||||
let opacityInAnimation = CABasicAnimation(keyPath: "opacity")
|
||||
opacityInAnimation.fromValue = 1
|
||||
opacityInAnimation.toValue = 0
|
||||
opacityInAnimation.duration = duration
|
||||
opacityInAnimation.beginTime = CACurrentMediaTime() + beginTime
|
||||
layer.add(opacityInAnimation, forKey: "opacity")
|
||||
// opacityInAnimation.duration = duration
|
||||
// opacityInAnimation.beginTime = beginTimeOffset + beginTime
|
||||
// opacityInAnimation.completion = { _ in
|
||||
// layer.removeFromSuperlayer()
|
||||
// }
|
||||
// layer.add(opacityInAnimation, forKey: "opacity")
|
||||
|
||||
Timer.scheduledTimer(withTimeInterval: duration + beginTime, repeats: false) { timer in
|
||||
DispatchQueue.main.async { // After(deadline: .now() + duration + beginTime) {
|
||||
layer.removeFromSuperlayer()
|
||||
}
|
||||
}
|
||||
// let timer = Timer.scheduledTimer(withTimeInterval: duration + beginTime, repeats: false) { timer in
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + duration * 0.95 + beginTime) {
|
||||
// DispatchQueue.main.async {
|
||||
// layer.backgroundColor = UIColor.red.withAlphaComponent(0.3).cgColor
|
||||
// }
|
||||
// DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
|
||||
layer.removeFromSuperlayer()
|
||||
// }
|
||||
// timer.invalidate()
|
||||
}
|
||||
// RunLoop.current.add(timer, forMode: .common)
|
||||
|
||||
let scaleOutAnimation = CABasicAnimation(keyPath: "transform.scale")
|
||||
scaleOutAnimation.fromValue = 1 // layer.presentation()?.value(forKey: "transform.scale") ?? 1
|
||||
scaleOutAnimation.toValue = 0.1
|
||||
scaleOutAnimation.duration = duration
|
||||
scaleOutAnimation.beginTime = CACurrentMediaTime() + beginTime
|
||||
layer.add(scaleOutAnimation, forKey: "scaleout")
|
||||
scaleOutAnimation.toValue = 0.0
|
||||
// scaleOutAnimation.duration = duration
|
||||
// scaleOutAnimation.beginTime = beginTimeOffset + beginTime
|
||||
// layer.add(scaleOutAnimation, forKey: "scaleout")
|
||||
|
||||
let translate = CABasicAnimation(keyPath: "transform.translation")
|
||||
translate.fromValue = CGPoint.zero
|
||||
translate.toValue = CGPoint(x: 0, y: -layer.bounds.height * 0.3)// -layer.bounds.height + 3.0)
|
||||
translate.duration = duration
|
||||
translate.beginTime = CACurrentMediaTime() + beginTime
|
||||
layer.add(translate, forKey: "translate")
|
||||
// translate.duration = duration
|
||||
// translate.beginTime = beginTimeOffset + beginTime
|
||||
// layer.add(translate, forKey: "translate")
|
||||
|
||||
let group = CAAnimationGroup()
|
||||
group.animations = [opacityInAnimation, scaleOutAnimation, translate]
|
||||
group.duration = duration
|
||||
group.beginTime = beginTimeOffset + beginTime
|
||||
layer.add(group, forKey: "out")
|
||||
}
|
||||
|
||||
func animateIn(for newLayer: CALayer, duration: CFTimeInterval, beginTime: CFTimeInterval) {
|
||||
@ -442,7 +461,7 @@ class AnimatedCountLabel: UILabel {
|
||||
|
||||
let animation = CAKeyframeAnimation()
|
||||
animation.keyPath = "position.y"
|
||||
animation.values = [18, -6, 0]
|
||||
animation.values = [20, -6, 0]
|
||||
animation.keyTimes = [0, 0.64, 1]
|
||||
animation.timingFunction = CAMediaTimingFunction.init(name: .easeInEaseOut)
|
||||
animation.duration = duration / 0.64
|
||||
|
@ -1760,7 +1760,7 @@ public final class _MediaStreamComponentController: ViewControllerComponentConta
|
||||
// self.view.layer.cornerCurve = .continuous
|
||||
// }
|
||||
|
||||
self.view.layer.animatePosition(from: self.view.center, to: CGPoint(x: self.view.center.x, y: self.view.bounds.maxY + self.view.bounds.height / 2), duration: 1.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
|
||||
self.view.layer.animatePosition(from: self.view.center, to: CGPoint(x: self.view.center.x, y: self.view.bounds.maxY + self.view.bounds.height / 2), duration: 0.4, /*timingFunction: kCAMediaTimingFunctionSpring, */completion: { _ in
|
||||
})
|
||||
// self.view.layer.animateScale(from: 1.0, to: 0.001, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
|
||||
// }
|
||||
|
@ -151,12 +151,27 @@ final class _MediaStreamVideoComponent: Component {
|
||||
let borderShimmer = StandaloneShimmerEffect()
|
||||
let shimmerOverlayLayer = CALayer()
|
||||
let shimmerBorderLayer = CALayer()
|
||||
let placeholderView = UIImageView()
|
||||
|
||||
func update(component: _MediaStreamVideoComponent, availableSize: CGSize, state: State, transition: Transition) -> CGSize {
|
||||
self.state = state
|
||||
if component.videoLoading && placeholderView.superview == nil {
|
||||
addSubview(placeholderView)
|
||||
}
|
||||
placeholderView.alpha = 0.7
|
||||
// placeholderView.image = lastFrame[component.call.peerId.id.description]
|
||||
if let frame = lastFrame[component.call.peerId.id.description] {
|
||||
placeholderView.addSubview(frame)
|
||||
frame.frame = placeholderView.bounds
|
||||
placeholderView.backgroundColor = .green
|
||||
} else {
|
||||
placeholderView.subviews.forEach { $0.removeFromSuperview() }
|
||||
placeholderView.backgroundColor = .red
|
||||
}
|
||||
placeholderView.backgroundColor = .red
|
||||
if component.videoLoading {
|
||||
if loadingBlurView.superview == nil {
|
||||
addSubview(loadingBlurView)
|
||||
// addSubview(loadingBlurView)
|
||||
}
|
||||
if shimmerOverlayLayer.superlayer == nil {
|
||||
loadingBlurView.layer.addSublayer(shimmerOverlayLayer)
|
||||
@ -201,6 +216,7 @@ final class _MediaStreamVideoComponent: Component {
|
||||
|
||||
if let videoView = self.videoRenderingContext.makeView(input: input, blur: false, forceSampleBufferDisplayLayer: true) {
|
||||
self.videoView = videoView
|
||||
self.placeholderView.removeFromSuperview()
|
||||
self.addSubview(videoView)
|
||||
videoView.alpha = 0
|
||||
UIView.animate(withDuration: 0.3) {
|
||||
@ -313,6 +329,12 @@ final class _MediaStreamVideoComponent: Component {
|
||||
}
|
||||
|
||||
if let videoView = self.videoView {
|
||||
// TODO: REMOVE FROM HERE and move to call end (or at least to background)
|
||||
// if let presentation = videoView.snapshotView(afterScreenUpdates: false) {
|
||||
if videoView.bounds.size.width > 0, let snapshot = videoView.snapshotView(afterScreenUpdates: false) ?? videoView.snapshotView(afterScreenUpdates: true) {
|
||||
lastFrame[component.call.peerId.id.description] = snapshot// ()!
|
||||
}
|
||||
// }
|
||||
var aspect = videoView.getAspect()
|
||||
// saveAspect(aspect)
|
||||
if component.isFullscreen {
|
||||
@ -379,6 +401,10 @@ final class _MediaStreamVideoComponent: Component {
|
||||
loadingBlurView.frame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) / 2.0), y: floor((availableSize.height - videoSize.height) / 2.0)), size: videoSize)
|
||||
loadingBlurView.layer.cornerRadius = 10
|
||||
|
||||
placeholderView.frame = loadingBlurView.frame
|
||||
placeholderView.layer.cornerRadius = 10
|
||||
placeholderView.clipsToBounds = true
|
||||
|
||||
shimmerOverlayLayer.frame = loadingBlurView.bounds
|
||||
shimmerBorderLayer.frame = loadingBlurView.bounds
|
||||
shimmerBorderLayer.mask?.frame = loadingBlurView.bounds
|
||||
@ -478,10 +504,26 @@ final class _MediaStreamVideoComponent: Component {
|
||||
}
|
||||
|
||||
func pictureInPictureControllerWillStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
|
||||
self.videoView?.alpha = 0
|
||||
}
|
||||
UIView.animate(withDuration: 0.3) { [self] in
|
||||
// Fading to make
|
||||
let presentation = self.videoView!.snapshotView(afterScreenUpdates: false)! // (self.videoView?.layer.presentation())!
|
||||
self.addSubview(presentation)
|
||||
presentation.frame = self.videoView!.frame
|
||||
// let image = UIGraphicsImageRenderer(size: presentation.bounds.size).image { context in
|
||||
// presentation.render(in: context.cgContext)
|
||||
// }
|
||||
// print(image)
|
||||
self.videoView?.alpha = 0
|
||||
// self.videoView?.alpha = 0.5
|
||||
// presentation.animateAlpha(from: 1, to: 0, duration: 0.1, completion: { _ in presentation.removeFromSuperlayer() })
|
||||
UIView.animate(withDuration: 0.1, animations: {
|
||||
presentation.alpha = 0
|
||||
}, completion: { _ in
|
||||
presentation.removeFromSuperview()
|
||||
})
|
||||
// DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) {
|
||||
// presentation.removeFromSuperlayer()
|
||||
// }
|
||||
UIView.animate(withDuration: 0.1) { [self] in
|
||||
videoBlurView?.alpha = 0
|
||||
}
|
||||
// TODO: make safe
|
||||
@ -533,3 +575,25 @@ final class _MediaStreamVideoComponent: Component {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, transition: transition)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: move to appropriate place
|
||||
var lastFrame: [String: UIView] = [:]
|
||||
|
||||
extension UIView {
|
||||
func snapshot() -> UIImage? {
|
||||
UIGraphicsBeginImageContextWithOptions(bounds.size, true, UIScreen.main.scale)
|
||||
|
||||
guard let currentContext = UIGraphicsGetCurrentContext() else {
|
||||
UIGraphicsEndImageContext()
|
||||
return nil
|
||||
}
|
||||
|
||||
layer.render(in: currentContext)
|
||||
|
||||
let image = UIGraphicsGetImageFromCurrentImageContext()
|
||||
|
||||
UIGraphicsEndImageContext()
|
||||
|
||||
return image
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user