Video avatar fixes

This commit is contained in:
Ilya Laktyushin 2020-07-12 01:19:13 +03:00
parent 69f662b17f
commit 64e7fe01d9
8 changed files with 115 additions and 220 deletions

View File

@ -1,30 +1,7 @@
#import "GPUImage.h"
typedef NS_ENUM(NSInteger, YUGPUImageHighPassSkinSmoothingRadiusUnit) {
YUGPUImageHighPassSkinSmoothingRadiusUnitPixel = 1,
YUGPUImageHighPassSkinSmoothingRadiusUnitFractionOfImageWidth = 2
};
@interface YUGPUImageHighPassSkinSmoothingRadius : NSObject <NSCopying,NSSecureCoding>
@property (nonatomic,readonly) CGFloat value;
@property (nonatomic,readonly) YUGPUImageHighPassSkinSmoothingRadiusUnit unit;
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)radiusInPixels:(CGFloat)pixels;
+ (instancetype)radiusAsFractionOfImageWidth:(CGFloat)fraction;
@end
@interface YUGPUImageHighPassSkinSmoothingFilter : GPUImageFilterGroup
@property (nonatomic) CGFloat amount;
@property (nonatomic,copy) NSArray<NSValue *> *controlPoints;
@property (nonatomic,copy) YUGPUImageHighPassSkinSmoothingRadius *radius;
@property (nonatomic) CGFloat sharpnessFactor;
@end

View File

@ -1,6 +1,5 @@
#import "YUGPUImageHighPassSkinSmoothingFilter.h"
#import "GPUImageExposureFilter.h"
#import "GPUImageDissolveBlendFilter.h"
#import "GPUImageSharpenFilter.h"
#import "GPUImageToneCurveFilter.h"
@ -19,9 +18,9 @@ SHADER_STRING
for (int i = 0; i < 3; ++i)
{
if (hardLightColor < 0.5) {
hardLightColor = hardLightColor * hardLightColor * 2.;
hardLightColor = hardLightColor * hardLightColor * 2.0;
} else {
hardLightColor = 1. - (1. - hardLightColor) * (1. - hardLightColor) * 2.;
hardLightColor = 1.0 - (1.0 - hardLightColor) * (1.0 - hardLightColor) * 2.0;
}
}
@ -40,7 +39,8 @@ SHADER_STRING
uniform sampler2D sourceImage;
void main() {
vec4 image = texture2D(sourceImage, texCoord);
vec4 source = texture2D(sourceImage, texCoord);
vec4 image = vec4(source.rgb * pow(2.0, -1.0), source.w);
vec4 base = vec4(image.g,image.g,image.g,1.0);
vec4 overlay = vec4(image.b,image.b,image.b,1.0);
float ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
@ -67,7 +67,6 @@ SHADER_STRING
@interface YUGPUImageStillImageHighPassFilter : GPUImageFilterGroup
@property (nonatomic) CGFloat radiusInPixels;
@property (nonatomic, weak) GPUImageGaussianBlurFilter *blurFilter;
@end
@ -95,10 +94,6 @@ SHADER_STRING
self.blurFilter.blurRadiusInPixels = radiusInPixels;
}
- (CGFloat)radiusInPixels {
return self.blurFilter.blurRadiusInPixels;
}
@end
@interface YUCIHighPassSkinSmoothingMaskGenerator : GPUImageFilterGroup
@ -136,56 +131,6 @@ SHADER_STRING
self.highPassFilter.radiusInPixels = highPassRadiusInPixels;
}
- (CGFloat)highPassRadiusInPixels {
return self.highPassFilter.radiusInPixels;
}
@end
@interface YUGPUImageHighPassSkinSmoothingRadius ()
@property (nonatomic) CGFloat value;
@property (nonatomic) YUGPUImageHighPassSkinSmoothingRadiusUnit unit;
@end
@implementation YUGPUImageHighPassSkinSmoothingRadius
+ (instancetype)radiusInPixels:(CGFloat)pixels {
YUGPUImageHighPassSkinSmoothingRadius *radius = [YUGPUImageHighPassSkinSmoothingRadius new];
radius.unit = YUGPUImageHighPassSkinSmoothingRadiusUnitPixel;
radius.value = pixels;
return radius;
}
+ (instancetype)radiusAsFractionOfImageWidth:(CGFloat)fraction {
YUGPUImageHighPassSkinSmoothingRadius *radius = [YUGPUImageHighPassSkinSmoothingRadius new];
radius.unit = YUGPUImageHighPassSkinSmoothingRadiusUnitFractionOfImageWidth;
radius.value = fraction;
return radius;
}
- (id)copyWithZone:(NSZone *)zone {
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
if (self = [super init]) {
self.value = [[aDecoder decodeObjectOfClass:[NSNumber class] forKey:NSStringFromSelector(@selector(value))] floatValue];
self.unit = [[aDecoder decodeObjectOfClass:[NSNumber class] forKey:NSStringFromSelector(@selector(unit))] integerValue];
}
return self;
}
- (void)encodeWithCoder:(NSCoder *)aCoder {
[aCoder encodeObject:@(self.value) forKey:NSStringFromSelector(@selector(value))];
[aCoder encodeObject:@(self.unit) forKey:NSStringFromSelector(@selector(unit))];
}
+ (BOOL)supportsSecureCoding {
return YES;
}
@end
NSString * const YUGPUImageHighpassSkinSmoothingCompositingFilterFragmentShaderString =
@ -204,18 +149,15 @@ SHADER_STRING
vec4 image = texture2D(sourceImage, texCoord);
vec4 toneCurvedImage = texture2D(inputImageTexture2, texCoord2);
vec4 mask = texture2D(inputImageTexture3, texCoord3);
gl_FragColor = vec4(mix(image.rgb,toneCurvedImage.rgb,1.0 - mask.b),1.0);
gl_FragColor = vec4(mix(image.rgb, toneCurvedImage.rgb, 1.0 - mask.b), 1.0);
}
);
@interface YUGPUImageHighPassSkinSmoothingFilter ()
@property (nonatomic,weak) YUCIHighPassSkinSmoothingMaskGenerator *maskGenerator;
@property (nonatomic,weak) GPUImageDissolveBlendFilter *dissolveFilter;
@property (nonatomic,weak) GPUImageSharpenFilter *sharpenFilter;
@property (nonatomic,weak) GPUImageToneCurveFilter *skinToneCurveFilter;
@property (nonatomic) CGSize currentInputSize;
@ -226,14 +168,9 @@ SHADER_STRING
- (instancetype)init {
if (self = [super init]) {
GPUImageExposureFilter *exposureFilter = [[GPUImageExposureFilter alloc] init];
exposureFilter.exposure = -1.0;
[self addFilter:exposureFilter];
YUCIHighPassSkinSmoothingMaskGenerator *maskGenerator = [[YUCIHighPassSkinSmoothingMaskGenerator alloc] init];
[self addFilter:maskGenerator];
self.maskGenerator = maskGenerator;
[exposureFilter addTarget:maskGenerator];
GPUImageToneCurveFilter *skinToneCurveFilter = [[GPUImageToneCurveFilter alloc] init];
[self addFilter:skinToneCurveFilter];
@ -258,21 +195,14 @@ SHADER_STRING
[composeFilter addTarget:sharpen];
self.sharpenFilter = sharpen;
self.initialFilters = @[exposureFilter,skinToneCurveFilter,dissolveFilter,composeFilter];
self.initialFilters = @[maskGenerator,skinToneCurveFilter,dissolveFilter,composeFilter];
self.terminalFilter = sharpen;
//set defaults
self.amount = 0.75;
self.radius = [YUGPUImageHighPassSkinSmoothingRadius radiusAsFractionOfImageWidth:4.5/750.0];
self.sharpnessFactor = 0.4;
CGPoint controlPoint0 = CGPointMake(0, 0);
CGPoint controlPoint1 = CGPointMake(120/255.0, 146/255.0);
CGPoint controlPoint2 = CGPointMake(1.0, 1.0);
self.controlPoints = @[[NSValue valueWithCGPoint:controlPoint0],
[NSValue valueWithCGPoint:controlPoint1],
[NSValue valueWithCGPoint:controlPoint2]];
self.skinToneCurveFilter.rgbCompositeControlPoints = @[
[NSValue valueWithCGPoint:CGPointMake(0.0, 0.0)],
[NSValue valueWithCGPoint:CGPointMake(0.47, 0.57)],
[NSValue valueWithCGPoint:CGPointMake(1.0, 1.0)]
];
}
return self;
}
@ -290,45 +220,17 @@ SHADER_STRING
- (void)updateHighPassRadius {
CGSize inputSize = self.currentInputSize;
if (inputSize.width * inputSize.height > 0) {
CGFloat radiusInPixels = 0;
switch (self.radius.unit) {
case YUGPUImageHighPassSkinSmoothingRadiusUnitPixel:
radiusInPixels = self.radius.value;
break;
case YUGPUImageHighPassSkinSmoothingRadiusUnitFractionOfImageWidth:
radiusInPixels = ceil(inputSize.width * self.radius.value);
break;
default:
break;
}
CGFloat radiusInPixels = inputSize.width * 0.006;
if (radiusInPixels != self.maskGenerator.highPassRadiusInPixels) {
self.maskGenerator.highPassRadiusInPixels = radiusInPixels;
}
}
}
- (void)setRadius:(YUGPUImageHighPassSkinSmoothingRadius *)radius {
_radius = radius.copy;
[self updateHighPassRadius];
}
- (void)setControlPoints:(NSArray<NSValue *> *)controlPoints {
self.skinToneCurveFilter.rgbCompositeControlPoints = controlPoints;
}
- (NSArray<NSValue *> *)controlPoints {
return self.skinToneCurveFilter.rgbCompositeControlPoints;
}
- (void)setAmount:(CGFloat)amount {
_amount = amount;
self.dissolveFilter.mix = amount;
self.sharpenFilter.sharpness = self.sharpnessFactor * amount;
}
- (void)setSharpnessFactor:(CGFloat)sharpnessFactor {
_sharpnessFactor = sharpnessFactor;
self.sharpenFilter.sharpness = sharpnessFactor * self.amount;
self.sharpenFilter.sharpness = 0.4 * amount;
}
@end

View File

@ -101,7 +101,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
return combineLatest(mappedPhoto, mappedVideo)
|> mapError { _ -> UploadPeerPhotoError in return .generic }
|> mapToSignal { photoResult, videoResult -> Signal<(UpdatePeerPhotoStatus, MediaResource?), UploadPeerPhotoError> in
|> mapToSignal { photoResult, videoResult -> Signal<(UpdatePeerPhotoStatus, MediaResource?, MediaResource?), UploadPeerPhotoError> in
switch photoResult.content {
case .error:
return .fail(.generic)
@ -112,7 +112,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
if let _ = videoResult {
mappedProgress *= 0.2
}
return .single((.progress(mappedProgress), photoResult.resource))
return .single((.progress(mappedProgress), photoResult.resource, videoResult?.resource))
case let .inputFile(file):
var videoFile: Api.InputFile?
if let videoResult = videoResult {
@ -123,7 +123,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
switch resultData {
case let .progress(progress):
let mappedProgress = 0.2 + progress * 0.8
return .single((.progress(mappedProgress), photoResult.resource))
return .single((.progress(mappedProgress), photoResult.resource, videoResult.resource))
case let .inputFile(file):
videoFile = file
break
@ -143,7 +143,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
return network.request(Api.functions.photos.uploadProfilePhoto(flags: flags, file: file, video: videoFile, videoStartTs: videoStartTimestamp))
|> mapError { _ in return UploadPeerPhotoError.generic }
|> mapToSignal { photo -> Signal<(UpdatePeerPhotoStatus, MediaResource?), UploadPeerPhotoError> in
|> mapToSignal { photo -> Signal<(UpdatePeerPhotoStatus, MediaResource?, MediaResource?), UploadPeerPhotoError> in
var representations: [TelegramMediaImageRepresentation] = []
var videoRepresentations: [TelegramMediaImage.VideoRepresentation] = []
switch photo {
@ -194,7 +194,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
}
}
}
return postbox.transaction { transaction -> (UpdatePeerPhotoStatus, MediaResource?) in
return postbox.transaction { transaction -> (UpdatePeerPhotoStatus, MediaResource?, MediaResource?) in
if let peer = transaction.getPeer(peer.id) {
updatePeers(transaction: transaction, peers: [peer], update: { (_, peer) -> Peer? in
if let peer = peer as? TelegramUser {
@ -204,7 +204,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
}
})
}
return (.complete(representations), photoResult.resource)
return (.complete(representations), photoResult.resource, videoResult?.resource)
} |> mapError {_ in return UploadPeerPhotoError.generic}
}
} else {
@ -228,7 +228,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
return request
|> mapError {_ in return UploadPeerPhotoError.generic}
|> mapToSignal { updates -> Signal<(UpdatePeerPhotoStatus, MediaResource?), UploadPeerPhotoError> in
|> mapToSignal { updates -> Signal<(UpdatePeerPhotoStatus, MediaResource?, MediaResource?), UploadPeerPhotoError> in
guard let chat = updates.chats.first, chat.peerId == peer.id, let groupOrChannel = parseTelegramGroupOrChannel(chat: chat) else {
stateManager?.addUpdates(updates)
return .fail(.generic)
@ -236,7 +236,7 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
return mapResourceToAvatarSizes(photoResult.resource, groupOrChannel.profileImageRepresentations)
|> castError(UploadPeerPhotoError.self)
|> mapToSignal { generatedData -> Signal<(UpdatePeerPhotoStatus, MediaResource?), UploadPeerPhotoError> in
|> mapToSignal { generatedData -> Signal<(UpdatePeerPhotoStatus, MediaResource?, MediaResource?), UploadPeerPhotoError> in
stateManager?.addUpdates(updates)
for (index, data) in generatedData {
@ -246,12 +246,12 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
assertionFailure()
}
}
return postbox.transaction { transaction -> (UpdatePeerPhotoStatus, MediaResource?) in
return postbox.transaction { transaction -> (UpdatePeerPhotoStatus, MediaResource?, MediaResource?) in
updatePeers(transaction: transaction, peers: [groupOrChannel], update: { _, updated in
return updated
})
return (.complete(groupOrChannel.profileImageRepresentations), photoResult.resource)
return (.complete(groupOrChannel.profileImageRepresentations), photoResult.resource, videoResult?.resource)
}
|> mapError { _ in return .generic }
}
@ -262,20 +262,33 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan
}
}
}
|> map { result, resource -> UpdatePeerPhotoStatus in
switch result {
case let .complete(representations):
if let resource = resource as? LocalFileReferenceMediaResource {
if let data = try? Data(contentsOf: URL(fileURLWithPath: resource.localFilePath), options: [.mappedRead] ) {
for representation in representations {
postbox.mediaBox.storeResourceData(representation.resource.id, data: data)
|> mapToSignal { result, resource, videoResource -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> in
if let videoResource = videoResource {
return fetchAndUpdateCachedPeerData(accountPeerId: accountPeerId, peerId: peer.id, network: network, postbox: postbox)
|> castError(UploadPeerPhotoError.self)
|> mapToSignal { status -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> in
return postbox.transaction { transaction in
let cachedData = transaction.getPeerCachedData(peerId: peer.id)
if let cachedData = cachedData as? CachedChannelData {
if let photo = cachedData.photo {
for representation in photo.videoRepresentations {
postbox.mediaBox.copyResourceData(from: videoResource.id, to: representation.resource.id)
}
}
} else if let cachedData = cachedData as? CachedGroupData {
if let photo = cachedData.photo {
for representation in photo.videoRepresentations {
postbox.mediaBox.copyResourceData(from: videoResource.id, to: representation.resource.id)
}
}
}
return result
}
default:
break
|> castError(UploadPeerPhotoError.self)
}
} else {
return .single(result)
}
return result
}
} else {
if let _ = peer as? TelegramUser {

View File

@ -101,6 +101,9 @@ public enum PresentationResourceKey: Int32 {
case chatInstantVideoWithWallpaperBackgroundImage
case chatInstantVideoWithoutWallpaperBackgroundImage
case chatActionPhotoWithWallpaperBackgroundImage
case chatActionPhotoWithoutWallpaperBackgroundImage
case chatUnreadBarBackgroundImage
case chatBubbleFileCloudFetchMediaIcon

View File

@ -28,6 +28,23 @@ private func generateInstantVideoBackground(fillColor: UIColor, strokeColor: UIC
})
}
private func generateActionPhotoBackground(fillColor: UIColor, strokeColor: UIColor) -> UIImage? {
return generateImage(CGSize(width: 214.0, height: 214.0), rotatedContext: { size, context in
let lineWidth: CGFloat = 0.5
context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(strokeColor.cgColor)
let strokePath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(), size: size), cornerRadius: 15.0)
context.addPath(strokePath.cgPath)
context.fillPath()
context.setFillColor(fillColor.cgColor)
let fillPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: lineWidth, y: lineWidth), size: CGSize(width: size.width - lineWidth * 2.0, height: size.height - lineWidth * 2.0)), cornerRadius: 15.0)
context.addPath(fillPath.cgPath)
context.fillPath()
})
}
private func generateInputPanelButtonBackgroundImage(fillColor: UIColor, strokeColor: UIColor) -> UIImage? {
let radius: CGFloat = 5.0
let shadowSize: CGFloat = 1.0
@ -140,6 +157,13 @@ public struct PresentationResourcesChat {
})
}
public static func chatActionPhotoBackgroundImage(_ theme: PresentationTheme, wallpaper: Bool) -> UIImage? {
let key: PresentationResourceKey = !wallpaper ? PresentationResourceKey.chatActionPhotoWithoutWallpaperBackgroundImage : PresentationResourceKey.chatActionPhotoWithWallpaperBackgroundImage
return theme.image(key.rawValue, { theme in
return generateActionPhotoBackground(fillColor: theme.chat.message.freeform.withWallpaper.fill, strokeColor: theme.chat.message.freeform.withWallpaper.stroke)
})
}
public static func chatUnreadBarBackgroundImage(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.chatUnreadBarBackgroundImage.rawValue, { theme in
return generateImage(CGSize(width: 1.0, height: 8.0), contextGenerator: { size, context -> Void in

View File

@ -75,7 +75,6 @@ class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode {
let backgroundContainer = UIView()
backgroundContainer.addSubview(backgroundView)
let backgroundFrame = strongSelf.mediaBackgroundNode.layer.convert(strongSelf.mediaBackgroundNode.bounds, to: resultView.layer)
backgroundContainer.frame = CGRect(origin: CGPoint(x: -2.0, y: -2.0), size: CGSize(width: resultView.frame.width + 4.0, height: resultView.frame.height + 4.0))
backgroundView.frame = backgroundContainer.bounds
let viewWithBackground = UIView()
@ -131,7 +130,7 @@ class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode {
return { item, layoutConstants, _, _, _ in
let contentProperties = ChatMessageBubbleContentProperties(hidesSimpleAuthorHeader: true, headerSpacing: 0.0, hidesBackground: .always, forceFullCorners: false, forceAlignment: .center)
let instantVideoBackgroundImage = PresentationResourcesChat.chatInstantVideoBackgroundImage(item.presentationData.theme.theme, wallpaper: !item.presentationData.theme.wallpaper.isEmpty)
let backgroundImage = PresentationResourcesChat.chatActionPhotoBackgroundImage(item.presentationData.theme.theme, wallpaper: !item.presentationData.theme.wallpaper.isEmpty)
return (contentProperties, nil, CGFloat.greatestFiniteMagnitude, { constrainedSize, position in
let attributedString = attributedServiceMessageString(theme: item.presentationData.theme, strings: item.presentationData.strings, nameDisplayOrder: item.presentationData.nameDisplayOrder, message: item.message, accountPeerId: item.context.account.peerId)
@ -173,12 +172,10 @@ class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode {
labelRects[i].origin.x = floor((labelLayout.size.width - labelRects[i].width) / 2.0)
}
let serviceColor = serviceMessageColorComponents(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper)
let backgroundApply = backgroundLayout(serviceColor.fill, labelRects, 10.0, 10.0, 0.0)
var backgroundSize = CGSize(width: labelLayout.size.width + 8.0 + 8.0, height: labelLayout.size.height + 4.0)
let layoutInsets = UIEdgeInsets(top: 4.0, left: 0.0, bottom: 4.0, right: 0.0)
if let _ = image {
backgroundSize.height += imageSize.height + 10
@ -189,7 +186,7 @@ class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode {
if let strongSelf = self {
strongSelf.item = item
let maskPath = UIBezierPath(ovalIn: CGRect(origin: CGPoint(), size: imageSize))
let maskPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(), size: imageSize), cornerRadius: 15.5)
let imageFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((backgroundSize.width - imageSize.width) / 2.0), y: labelLayout.size.height + 10 + 2), size: imageSize)
if let image = image {
@ -221,7 +218,7 @@ class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode {
imageNode.removeFromSupernode()
strongSelf.imageNode = nil
}
strongSelf.mediaBackgroundNode.image = instantVideoBackgroundImage
strongSelf.mediaBackgroundNode.image = backgroundImage
if let image = image, let video = image.videoRepresentations.last, let id = image.id?.id {
let videoFileReference = FileMediaReference.standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: image.representations, videoThumbnails: [], immediateThumbnailData: image.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))

View File

@ -193,20 +193,7 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
private var didSetReady: Bool = false
var item: PeerInfoAvatarListItem?
var canAttachVideo: Bool = true
var isExpanded: Bool = false {
didSet {
if let videoNode = self.videoNode, videoNode.canAttachContent != self.isExpanded {
videoNode.canAttachContent = self.isExpanded && self.canAttachVideo
if videoNode.canAttachContent {
videoNode.play()
}
}
}
}
private var statusPromise = Promise<(MediaPlayerStatus?, Double?)?>()
var mediaStatus: Signal<(MediaPlayerStatus?, Double?)?, NoError> {
get {
@ -280,8 +267,9 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear)
if videoContent.id != self.videoContent?.id {
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .embedded)
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .secondaryOverlay)
videoNode.isUserInteractionEnabled = false
videoNode.canAttachContent = true
videoNode.isHidden = true
if let _ = video.startTimestamp {
@ -299,7 +287,7 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
|> take(1)
|> deliverOnMainQueue).start(completed: { [weak self] in
if let strongSelf = self {
Queue.mainQueue().after(0.12) {
Queue.mainQueue().after(0.15) {
strongSelf.videoNode?.isHidden = false
}
}
@ -308,7 +296,8 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
self.playbackStatusDisposable.set(nil)
videoNode.isHidden = false
}
videoNode.play()
self.videoContent = videoContent
self.videoNode = videoNode
self.statusPromise.set(videoNode.status |> map { ($0, video.startTimestamp) })
@ -338,12 +327,6 @@ final class PeerInfoAvatarListItemNode: ASDisplayNode {
if let videoNode = self.videoNode {
videoNode.updateLayout(size: imageSize, transition: .immediate)
videoNode.frame = imageFrame
if videoNode.canAttachContent != self.isExpanded {
videoNode.canAttachContent = self.isExpanded && self.canAttachVideo
if videoNode.canAttachContent {
videoNode.play()
}
}
}
}
}
@ -373,15 +356,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
private var transitionFraction: CGFloat = 0.0
private var validLayout: CGSize?
var isCollapsing = false {
didSet {
if oldValue != self.isCollapsing && !self.isCollapsing {
for (_, itemNode) in self.itemNodes {
itemNode.isExpanded = self.isExpanded
}
}
}
}
var isCollapsing = false
private var isExpanded = false
private let disposable = MetaDisposable()
@ -655,7 +630,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
}
func selectFirstItem() {
var previousIndex = self.currentIndex
let previousIndex = self.currentIndex
self.currentIndex = 0
if self.currentIndex != previousIndex {
self.currentIndexUpdated?()
@ -773,7 +748,7 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
case let .topImage(representations, videoRepresentations, _, immediateThumbnailData, _):
entries.append(entry)
items.append(.topImage(representations, videoRepresentations, immediateThumbnailData))
case let .image(id, reference, representations, videoRepresentations, _, _, _, _, immediateThumbnailData, _):
case let .image(_, reference, representations, videoRepresentations, _, _, _, _, immediateThumbnailData, _):
if image.0 == reference {
entries.insert(entry, at: 0)
items.insert(.image(reference, representations, videoRepresentations, immediateThumbnailData), at: 0)
@ -905,14 +880,12 @@ final class PeerInfoAvatarListContainerNode: ASDisplayNode {
var wasAdded = false
if let current = self.itemNodes[self.items[i].id] {
itemNode = current
itemNode.isExpanded = self.isExpanded || self.isCollapsing
if update {
itemNode.setup(item: self.items[i], synchronous: synchronous && i == self.currentIndex)
}
} else {
wasAdded = true
itemNode = PeerInfoAvatarListItemNode(context: self.context)
itemNode.isExpanded = self.isExpanded || self.isCollapsing
itemNode.setup(item: self.items[i], synchronous: synchronous && i == self.currentIndex)
self.itemNodes[self.items[i].id] = itemNode
self.contentNode.addSubnode(itemNode)
@ -1083,7 +1056,7 @@ final class PeerInfoAvatarTransformContainerNode: ASDisplayNode {
transition.updateAlpha(node: videoNode, alpha: 1.0 - fraction)
}
}
var removedPhotoResourceIds = Set<String>()
func update(peer: Peer?, item: PeerInfoAvatarListItem?, theme: PresentationTheme, avatarSize: CGFloat, isExpanded: Bool) {
if let peer = peer {
@ -1361,6 +1334,17 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
}
}
func reset() {
guard let videoNode = self.videoNode else {
return
}
videoNode.isHidden = true
videoNode.seek(self.videoStartTimestamp ?? 0.0)
Queue.mainQueue().after(0.15) {
videoNode.isHidden = false
}
}
func update(peer: Peer?, item: PeerInfoAvatarListItem?, updatingAvatar: PeerInfoUpdatingAvatar?, uploadProgress: CGFloat?, theme: PresentationTheme, avatarSize: CGFloat, isEditing: Bool) {
guard let peer = peer else {
return
@ -1403,11 +1387,7 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
let mediaManager = self.context.sharedContext.mediaManager
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay)
videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in
if let strongSelf = self {
strongSelf.videoNode?.isHidden = !owns
}
}
self.videoStartTimestamp = video.startTimestamp
self.videoContent = videoContent
self.videoNode = videoNode
@ -1419,12 +1399,14 @@ final class PeerInfoEditingAvatarNode: ASDisplayNode {
self.insertSubnode(videoNode, aboveSubnode: self.avatarNode)
}
} else if let videoNode = self.videoNode {
self.videoStartTimestamp = nil
self.videoContent = nil
self.videoNode = nil
videoNode.removeFromSupernode()
}
} else if let videoNode = self.videoNode {
self.videoContent = nil
self.videoNode = nil
@ -2767,23 +2749,7 @@ final class PeerInfoHeaderNode: ASDisplayNode {
apparentAvatarFrame = CGRect(origin: CGPoint(x: avatarCenter.x - avatarFrame.width / 2.0, y: -contentOffset + avatarOffset + avatarCenter.y - avatarFrame.height / 2.0), size: avatarFrame.size)
controlsClippingFrame = apparentAvatarFrame
}
if case let .animated(duration, curve) = transition, !transitionSourceAvatarFrame.width.isZero, false {
let previousFrame = self.avatarListNode.frame
self.avatarListNode.frame = CGRect(origin: apparentAvatarFrame.center, size: CGSize())
let horizontalTransition: ContainedViewLayoutTransition
let verticalTransition: ContainedViewLayoutTransition
if transitionFraction < .ulpOfOne {
horizontalTransition = .animated(duration: duration * 0.85, curve: curve)
verticalTransition = .animated(duration: duration * 1.15, curve: curve)
} else {
horizontalTransition = transition
verticalTransition = .animated(duration: duration * 0.6, curve: curve)
}
horizontalTransition.animatePositionAdditive(node: self.avatarListNode, offset: CGPoint(x: previousFrame.midX - apparentAvatarFrame.midX, y: 0.0))
verticalTransition.animatePositionAdditive(node: self.avatarListNode, offset: CGPoint(x: 0.0, y: previousFrame.midY - apparentAvatarFrame.midY))
} else {
transition.updateFrameAdditive(node: self.avatarListNode, frame: CGRect(origin: apparentAvatarFrame.center, size: CGSize()))
}
transition.updateFrameAdditive(node: self.avatarListNode, frame: CGRect(origin: apparentAvatarFrame.center, size: CGSize()))
let avatarListContainerFrame: CGRect
let avatarListContainerScale: CGFloat

View File

@ -2200,6 +2200,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
strongSelf.headerNode.avatarListNode.listContainerNode.isCollapsing = false
strongSelf.headerNode.avatarListNode.avatarContainerNode.canAttachVideo = true
strongSelf.headerNode.editingContentNode.avatarNode.canAttachVideo = true
strongSelf.headerNode.editingContentNode.avatarNode.reset()
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout: layout, navigationHeight: navigationHeight, transition: .immediate, additive: false)
}
@ -2589,6 +2590,18 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
self.preloadStickerDisposable.dispose()
}
override func didLoad() {
super.didLoad()
self.view.disablesInteractiveTransitionGestureRecognizerNow = { [weak self] in
if let strongSelf = self {
return strongSelf.state.isEditing
} else {
return false
}
}
}
var canAttachVideo: Bool?
private func updateData(_ data: PeerInfoScreenData) {