Optimize pattern wallpapers rendering

This commit is contained in:
Ilya Laktyushin 2021-09-02 04:26:06 +03:00
parent 0ffc9d7b98
commit 74092d28e9
9 changed files with 573 additions and 18 deletions

View File

@ -293,3 +293,22 @@ public final class CachedAnimatedStickerRepresentation: CachedMediaResourceRepre
}
}
}
public final class CachedPreparedPatternWallpaperRepresentation: CachedMediaResourceRepresentation {
public let keepDuration: CachedMediaRepresentationKeepDuration = .general
public var uniqueId: String {
return "prepared-pattern-wallpaper"
}
public init() {
}
public func isEqual(to: CachedMediaResourceRepresentation) -> Bool {
if to is CachedPreparedPatternWallpaperRepresentation {
return true
} else {
return false
}
}
}

View File

@ -4,6 +4,9 @@
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
NSData * _Nullable prepareSvgImage(NSData * _Nonnull data);
UIImage * _Nullable renderPreparedImage(NSData * _Nonnull data, CGSize size);
UIImage * _Nullable drawSvgImage(NSData * _Nonnull data, CGSize size, UIColor * _Nullable backgroundColor, UIColor * _Nullable foregroundColor);
#endif /* Lottie_h */

View File

@ -229,3 +229,431 @@ UIImage * _Nullable drawSvgImage(NSData * _Nonnull data, CGSize size, UIColor *b
return resultImage;
}
@interface CGContextCoder : NSObject {
NSMutableData *_data;
}
@property (nonatomic, readonly) NSData *data;
@end
@implementation CGContextCoder
- (instancetype)initWithSize:(CGSize)size {
self = [super init];
if (self != nil) {
_data = [[NSMutableData alloc] init];
int32_t intWidth = size.width;
int32_t intHeight = size.height;
[_data appendBytes:&intWidth length:sizeof(intWidth)];
[_data appendBytes:&intHeight length:sizeof(intHeight)];
}
return self;
}
- (void)setFillColorWithOpacity:(CGFloat)opacity {
uint8_t command = 1;
[_data appendBytes:&command length:sizeof(command)];
uint8_t intOpacity = opacity * 255.0;
[_data appendBytes:&intOpacity length:sizeof(intOpacity)];
}
- (void)setupStrokeOpacity:(CGFloat)opacity mitterLimit:(CGFloat)mitterLimit lineWidth:(CGFloat)lineWidth lineCap:(CGLineCap)lineCap lineJoin:(CGLineJoin)lineJoin {
uint8_t command = 2;
[_data appendBytes:&command length:sizeof(command)];
uint8_t intOpacity = opacity * 255.0;
[_data appendBytes:&intOpacity length:sizeof(intOpacity)];
float floatMitterLimit = mitterLimit;
[_data appendBytes:&floatMitterLimit length:sizeof(floatMitterLimit)];
float floatLineWidth = lineWidth;
[_data appendBytes:&floatLineWidth length:sizeof(floatLineWidth)];
uint8_t intLineCap = lineCap;
[_data appendBytes:&intLineCap length:sizeof(intLineCap)];
uint8_t intLineJoin = lineJoin;
[_data appendBytes:&intLineJoin length:sizeof(intLineJoin)];
}
- (void)beginPath {
uint8_t command = 3;
[_data appendBytes:&command length:sizeof(command)];
}
- (void)moveToPoint:(CGPoint)point {
uint8_t command = 4;
[_data appendBytes:&command length:sizeof(command)];
float floatX = point.x;
[_data appendBytes:&floatX length:sizeof(floatX)];
float floatY = point.y;
[_data appendBytes:&floatY length:sizeof(floatY)];
}
- (void)addLineToPoint:(CGPoint)point {
uint8_t command = 5;
[_data appendBytes:&command length:sizeof(command)];
float floatX = point.x;
[_data appendBytes:&floatX length:sizeof(floatX)];
float floatY = point.y;
[_data appendBytes:&floatY length:sizeof(floatY)];
}
- (void)addCurveToPoint:(CGPoint)p1 p2:(CGPoint)p2 p3:(CGPoint)p3 {
uint8_t command = 6;
[_data appendBytes:&command length:sizeof(command)];
float floatX1 = p1.x;
[_data appendBytes:&floatX1 length:sizeof(floatX1)];
float floatY1 = p1.y;
[_data appendBytes:&floatY1 length:sizeof(floatY1)];
float floatX2 = p2.x;
[_data appendBytes:&floatX2 length:sizeof(floatX2)];
float floatY2 = p2.y;
[_data appendBytes:&floatY2 length:sizeof(floatY2)];
float floatX3 = p3.x;
[_data appendBytes:&floatX3 length:sizeof(floatX3)];
float floatY3 = p3.y;
[_data appendBytes:&floatY3 length:sizeof(floatY3)];
}
- (void)closePath {
uint8_t command = 7;
[_data appendBytes:&command length:sizeof(command)];
}
- (void)eoFillPath {
uint8_t command = 8;
[_data appendBytes:&command length:sizeof(command)];
}
- (void)fillPath {
uint8_t command = 9;
[_data appendBytes:&command length:sizeof(command)];
}
- (void)strokePath {
uint8_t command = 10;
[_data appendBytes:&command length:sizeof(command)];
}
@end
UIImage * _Nullable renderPreparedImage(NSData * _Nonnull data, CGSize size) {
NSDate *startTime = [NSDate date];
UIColor *foregroundColor = [UIColor whiteColor];
UIColor *backgroundColor = [UIColor blackColor];
int32_t ptr = 0;
int32_t width;
int32_t height;
[data getBytes:&width range:NSMakeRange(ptr, sizeof(width))];
ptr += sizeof(width);
[data getBytes:&height range:NSMakeRange(ptr, sizeof(height))];
ptr += sizeof(height);
UIGraphicsBeginImageContextWithOptions(size, true, 1.0);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, backgroundColor.CGColor);
CGContextFillRect(context, CGRectMake(0.0f, 0.0f, size.width, size.height));
CGSize svgSize = CGSizeMake(width, height);
CGSize drawingSize = aspectFillSize(svgSize, size);
CGFloat scale = MAX(size.width / MAX(1.0, svgSize.width), size.height / MAX(1.0, svgSize.height));
CGContextScaleCTM(context, scale, scale);
CGContextTranslateCTM(context, (size.width - drawingSize.width) / 2.0, (size.height - drawingSize.height) / 2.0);
while (ptr < data.length) {
uint8_t cmd;
[data getBytes:&cmd range:NSMakeRange(ptr, sizeof(cmd))];
ptr += sizeof(cmd);
switch (cmd) {
case 1:
{
uint8_t opacity;
[data getBytes:&opacity range:NSMakeRange(ptr, sizeof(opacity))];
ptr += sizeof(opacity);
CGContextSetFillColorWithColor(context, [foregroundColor colorWithAlphaComponent:opacity / 255.0].CGColor);
}
break;
case 2:
{
uint8_t opacity;
[data getBytes:&opacity range:NSMakeRange(ptr, sizeof(opacity))];
ptr += sizeof(opacity);
CGContextSetStrokeColorWithColor(context, [foregroundColor colorWithAlphaComponent:opacity / 255.0].CGColor);
float mitterLimit;
[data getBytes:&mitterLimit range:NSMakeRange(ptr, sizeof(mitterLimit))];
ptr += sizeof(mitterLimit);
CGContextSetMiterLimit(context, mitterLimit);
float lineWidth;
[data getBytes:&lineWidth range:NSMakeRange(ptr, sizeof(lineWidth))];
ptr += sizeof(lineWidth);
CGContextSetLineWidth(context, lineWidth);
uint8_t lineCap;
[data getBytes:&lineCap range:NSMakeRange(ptr, sizeof(lineCap))];
ptr += sizeof(lineCap);
CGContextSetLineCap(context, lineCap);
uint8_t lineJoin;
[data getBytes:&lineJoin range:NSMakeRange(ptr, sizeof(lineJoin))];
ptr += sizeof(lineJoin);
CGContextSetLineCap(context, lineJoin);
}
break;
case 3:
{
CGContextBeginPath(context);
}
break;
case 4:
{
float x;
[data getBytes:&x range:NSMakeRange(ptr, sizeof(x))];
ptr += sizeof(x);
float y;
[data getBytes:&y range:NSMakeRange(ptr, sizeof(y))];
ptr += sizeof(y);
CGContextMoveToPoint(context, x, y);
}
break;
case 5:
{
float x;
[data getBytes:&x range:NSMakeRange(ptr, sizeof(x))];
ptr += sizeof(x);
float y;
[data getBytes:&y range:NSMakeRange(ptr, sizeof(y))];
ptr += sizeof(y);
CGContextAddLineToPoint(context, x, y);
}
break;
case 6:
{
float x1;
[data getBytes:&x1 range:NSMakeRange(ptr, sizeof(x1))];
ptr += sizeof(x1);
float y1;
[data getBytes:&y1 range:NSMakeRange(ptr, sizeof(y1))];
ptr += sizeof(y1);
float x2;
[data getBytes:&x2 range:NSMakeRange(ptr, sizeof(x2))];
ptr += sizeof(x2);
float y2;
[data getBytes:&y2 range:NSMakeRange(ptr, sizeof(y2))];
ptr += sizeof(y2);
float x3;
[data getBytes:&x3 range:NSMakeRange(ptr, sizeof(x3))];
ptr += sizeof(x3);
float y3;
[data getBytes:&y3 range:NSMakeRange(ptr, sizeof(y3))];
ptr += sizeof(y3);
CGContextAddCurveToPoint(context, x1, y1, x2, y2, x3, y3);
}
break;
case 7:
{
CGContextClosePath(context);
}
break;
case 8:
{
CGContextEOFillPath(context);
}
break;
case 9:
{
CGContextFillPath(context);
}
break;
case 10:
{
CGContextStrokePath(context);
}
break;
default:
break;
}
}
UIImage *resultImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
double deltaTime = -1.0f * [startTime timeIntervalSinceNow];
printf("drawingTime %fx%f = %f\n", size.width, size.height, deltaTime);
return resultImage;
}
NSData * _Nullable prepareSvgImage(NSData * _Nonnull data) {
NSDate *startTime = [NSDate date];
NSXMLParser *parser = [[NSXMLParser alloc] initWithData:data];
if (parser == nil) {
return nil;
}
SvgXMLParsingDelegate *delegate = [[SvgXMLParsingDelegate alloc] init];
parser.delegate = delegate;
[parser parse];
NSMutableString *xmlString = [[NSMutableString alloc] initWithData:data encoding:NSUTF8StringEncoding];
if (xmlString == nil) {
return nil;
}
for (NSString *styleName in delegate.styles) {
NSString *styleValue = delegate.styles[styleName];
[xmlString replaceOccurrencesOfString:[NSString stringWithFormat:@"class=\"%@\"", styleName] withString:[NSString stringWithFormat:@"style=\"%@\"", styleValue] options:0 range:NSMakeRange(0, xmlString.length)];
}
const char *zeroTerminatedData = xmlString.UTF8String;
NSVGimage *image = nsvgParse((char *)zeroTerminatedData, "px", 96);
if (image == nil || image->width < 1.0f || image->height < 1.0f) {
return nil;
}
double deltaTime = -1.0f * [startTime timeIntervalSinceNow];
printf("parseTime = %f\n", deltaTime);
startTime = [NSDate date];
CGContextCoder *context = [[CGContextCoder alloc] initWithSize:CGSizeMake(image->width, image->height)];
for (NSVGshape *shape = image->shapes; shape != NULL; shape = shape->next) {
if (!(shape->flags & NSVG_FLAGS_VISIBLE)) {
continue;
}
if (shape->fill.type != NSVG_PAINT_NONE) {
[context setFillColorWithOpacity:shape->opacity];
bool isFirst = true;
bool hasStartPoint = false;
CGPoint startPoint;
for (NSVGpath *path = shape->paths; path != NULL; path = path->next) {
if (isFirst) {
[context beginPath];
isFirst = false;
hasStartPoint = true;
startPoint.x = path->pts[0];
startPoint.y = path->pts[1];
}
[context moveToPoint:CGPointMake(path->pts[0], path->pts[1])];
for (int i = 0; i < path->npts - 1; i += 3) {
float *p = &path->pts[i * 2];
[context addCurveToPoint:CGPointMake(p[2], p[3]) p2:CGPointMake(p[4], p[5]) p3:CGPointMake(p[6], p[7])];
}
if (path->closed) {
if (hasStartPoint) {
hasStartPoint = false;
[context addLineToPoint:startPoint];
}
}
}
switch (shape->fillRule) {
case NSVG_FILLRULE_EVENODD:
[context eoFillPath];
break;
default:
[context fillPath];
break;
}
}
if (shape->stroke.type != NSVG_PAINT_NONE) {
CGLineCap lineCap = kCGLineCapButt;
CGLineJoin lineJoin = kCGLineJoinMiter;
switch (shape->strokeLineCap) {
case NSVG_CAP_BUTT:
lineCap = kCGLineCapButt;
break;
case NSVG_CAP_ROUND:
lineCap = kCGLineCapRound;
break;
case NSVG_CAP_SQUARE:
lineCap = kCGLineCapSquare;
break;
default:
break;
}
switch (shape->strokeLineJoin) {
case NSVG_JOIN_BEVEL:
lineJoin = kCGLineJoinBevel;
break;
case NSVG_JOIN_MITER:
lineJoin = kCGLineJoinMiter;
break;
case NSVG_JOIN_ROUND:
lineJoin = kCGLineJoinRound;
break;
default:
break;
}
[context setupStrokeOpacity:shape->opacity mitterLimit:shape->miterLimit lineWidth:shape->strokeWidth lineCap:lineCap lineJoin:lineJoin];
for (NSVGpath *path = shape->paths; path != NULL; path = path->next) {
[context beginPath];
[context moveToPoint:CGPointMake(path->pts[0], path->pts[1])];
for (int i = 0; i < path->npts - 1; i += 3) {
float *p = &path->pts[i * 2];
[context addCurveToPoint:CGPointMake(p[2], p[3]) p2:CGPointMake(p[4], p[5]) p3:CGPointMake(p[6], p[7])];
}
if (path->closed) {
[context closePath];
}
[context strokePath];
}
}
}
nsvgDelete(image);
return context.data;
}

View File

@ -3886,7 +3886,19 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
|> distinctUntilChanged
self.presentationDataDisposable = combineLatest(queue: Queue.mainQueue(), context.sharedContext.presentationData, context.engine.themes.getChatThemes(accountManager: context.sharedContext.accountManager, onlyCached: false), themeEmoticon, self.themeEmoticonPreviewPromise.get(), self.themeDarkAppearancePreviewPromise.get()).start(next: { [weak self] presentationData, chatThemes, themeEmoticon, themeEmoticonPreview, darkAppearancePreview in
let themeSettings = context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.presentationThemeSettings])
|> map { sharedData -> PresentationThemeSettings in
let themeSettings: PresentationThemeSettings
if let current = sharedData.entries[ApplicationSpecificSharedDataKeys.presentationThemeSettings] as? PresentationThemeSettings {
themeSettings = current
} else {
themeSettings = PresentationThemeSettings.defaultSettings
}
return themeSettings
}
let accountManager = context.sharedContext.accountManager
self.presentationDataDisposable = combineLatest(queue: Queue.mainQueue(), context.sharedContext.presentationData, themeSettings, context.engine.themes.getChatThemes(accountManager: accountManager, onlyCached: false), themeEmoticon, self.themeEmoticonPreviewPromise.get(), self.themeDarkAppearancePreviewPromise.get()).start(next: { [weak self] presentationData, themeSettings, chatThemes, themeEmoticon, themeEmoticonPreview, darkAppearancePreview in
if let strongSelf = self {
let previousTheme = strongSelf.presentationData.theme
let previousStrings = strongSelf.presentationData.strings
@ -3914,12 +3926,64 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
} else if let darkAppearancePreview = darkAppearancePreview {
let lightTheme: PresentationTheme
let darkTheme: PresentationTheme
let lightWallpaper: TelegramWallpaper
if presentationData.theme.overallDarkAppearance {
let darkTheme: PresentationTheme
let darkWallpaper: TelegramWallpaper
if presentationData.autoNightModeTriggered {
darkTheme = presentationData.theme
darkWallpaper = presentationData.chatWallpaper
var currentColors = themeSettings.themeSpecificAccentColors[themeSettings.theme.index]
if let colors = currentColors, colors.baseColor == .theme {
currentColors = nil
}
let themeSpecificWallpaper = (themeSettings.themeSpecificChatWallpapers[coloredThemeIndex(reference: themeSettings.theme, accentColor: currentColors)] ?? themeSettings.themeSpecificChatWallpapers[themeSettings.theme.index])
if let themeSpecificWallpaper = themeSpecificWallpaper {
lightWallpaper = themeSpecificWallpaper
} else {
let theme = makePresentationTheme(mediaBox: accountManager.mediaBox, themeReference: themeSettings.theme, accentColor: currentColors?.color, bubbleColors: currentColors?.customBubbleColors ?? [], wallpaper: currentColors?.wallpaper, baseColor: currentColors?.baseColor) ?? defaultPresentationTheme
lightWallpaper = theme.chat.defaultWallpaper
}
lightTheme = makePresentationTheme(mediaBox: accountManager.mediaBox, themeReference: themeSettings.theme, accentColor: currentColors?.color, bubbleColors: currentColors?.customBubbleColors ?? [], wallpaper: currentColors?.wallpaper, baseColor: currentColors?.baseColor, serviceBackgroundColor: defaultServiceBackgroundColor) ?? defaultPresentationTheme
} else {
lightTheme = presentationData.theme
lightWallpaper = presentationData.chatWallpaper
let automaticTheme = themeSettings.automaticThemeSwitchSetting.theme
let effectiveColors = themeSettings.themeSpecificAccentColors[automaticTheme.index]
let themeSpecificWallpaper = (themeSettings.themeSpecificChatWallpapers[coloredThemeIndex(reference: automaticTheme, accentColor: effectiveColors)] ?? themeSettings.themeSpecificChatWallpapers[automaticTheme.index])
darkTheme = makePresentationTheme(mediaBox: accountManager.mediaBox, themeReference: automaticTheme, accentColor: effectiveColors?.color, bubbleColors: effectiveColors?.customBubbleColors ?? [], wallpaper: effectiveColors?.wallpaper, baseColor: effectiveColors?.baseColor, serviceBackgroundColor: defaultServiceBackgroundColor) ?? defaultPresentationTheme
if let themeSpecificWallpaper = themeSpecificWallpaper {
darkWallpaper = themeSpecificWallpaper
} else {
switch lightWallpaper {
case .builtin, .color, .gradient:
darkWallpaper = darkTheme.chat.defaultWallpaper
case .file:
if lightWallpaper.isPattern {
darkWallpaper = darkTheme.chat.defaultWallpaper
} else {
darkWallpaper = lightWallpaper
}
default:
darkWallpaper = lightWallpaper
}
}
}
if darkAppearancePreview {
presentationData = presentationData.withUpdated(theme: darkTheme)
presentationData = presentationData.withUpdated(chatWallpaper: darkWallpaper)
} else {
presentationData = presentationData.withUpdated(theme: lightTheme)
presentationData = presentationData.withUpdated(chatWallpaper: lightWallpaper)
}
}
let isFirstTime = !strongSelf.didSetPresentationData
@ -3928,8 +3992,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
if isFirstTime || previousTheme !== presentationData.theme || previousStrings !== presentationData.strings || presentationData.chatWallpaper != previousChatWallpaper {
strongSelf.themeAndStringsUpdated()
strongSelf.presentationDataPromise.set(.single(strongSelf.presentationData))
}
strongSelf.presentationDataPromise.set(.single(strongSelf.presentationData))
strongSelf.presentationReady.set(.single(true))
}
})

View File

@ -610,8 +610,8 @@ final class ChatMessageAccessibilityData {
replyLabel = item.presentationData.strings.VoiceOver_Chat_ReplyToYourMessage
}
let (replyMessageLabel, replyMessageValue) = dataForMessage(replyMessage, true)
replyLabel += "\(replyLabel): \(replyMessageLabel), \(replyMessageValue)"
// let (replyMessageLabel, replyMessageValue) = dataForMessage(replyMessage, true)
// replyLabel += "\(replyLabel): \(replyMessageLabel), \(replyMessageValue)"
label = "\(replyLabel) . \(label)"
}

View File

@ -628,12 +628,12 @@ private class ChatThemeScreenNode: ViewControllerTracingNode, UIScrollViewDelega
self.backgroundNode.addSubnode(self.contentBackgroundNode)
self.contentContainerNode.addSubnode(self.titleNode)
self.contentContainerNode.addSubnode(self.textNode)
self.contentContainerNode.addSubnode(self.cancelButton)
self.contentContainerNode.addSubnode(self.doneButton)
self.topContentContainerNode.addSubnode(self.animationNode)
self.topContentContainerNode.addSubnode(self.switchThemeButton)
self.topContentContainerNode.addSubnode(self.listNode)
self.topContentContainerNode.addSubnode(self.cancelButton)
self.switchThemeButton.addTarget(self, action: #selector(self.switchThemePressed), forControlEvents: .touchUpInside)
self.cancelButton.addTarget(self, action: #selector(self.cancelButtonPressed), forControlEvents: .touchUpInside)
@ -814,6 +814,15 @@ private class ChatThemeScreenNode: ViewControllerTracingNode, UIScrollViewDelega
})
}
if animateBackground, let snapshotView = self.cancelButton.view.snapshotView(afterScreenUpdates: false) {
snapshotView.frame = self.cancelButton.frame
self.cancelButton.view.superview?.insertSubview(snapshotView, aboveSubview: self.cancelButton.view)
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, delay: delay, removeOnCompletion: false, completion: { [weak snapshotView] _ in
snapshotView?.removeFromSuperview()
})
}
self.listNode.forEachVisibleItemNode { node in
if let node = node as? ThemeSettingsThemeItemIconNode {
node.crossfade()

View File

@ -18,6 +18,7 @@ import WallpaperResources
import GZip
import TelegramUniversalVideoContent
import GradientBackground
import Svg
public func fetchCachedResourceRepresentation(account: Account, resource: MediaResource, representation: CachedMediaResourceRepresentation) -> Signal<CachedMediaResourceRepresentationResult, NoError> {
if let representation = representation as? CachedStickerAJpegRepresentation {
@ -122,6 +123,14 @@ public func fetchCachedResourceRepresentation(account: Account, resource: MediaR
return fetchMapSnapshotResource(resource: resource)
} else if let resource = resource as? YoutubeEmbedStoryboardMediaResource, let _ = representation as? YoutubeEmbedStoryboardMediaResourceRepresentation {
return fetchYoutubeEmbedStoryboardResource(resource: resource)
} else if let representation = representation as? CachedPreparedPatternWallpaperRepresentation {
return account.postbox.mediaBox.resourceData(resource, option: .complete(waitUntilFetchStatus: false))
|> mapToSignal { data -> Signal<CachedMediaResourceRepresentationResult, NoError> in
if !data.complete {
return .complete()
}
return fetchPreparedPatternWallpaperRepresentation(resource: resource, resourceData: data, representation: representation)
}
}
return .never()
}
@ -719,3 +728,18 @@ private func fetchAnimatedStickerRepresentation(account: Account, resource: Medi
|> runOn(Queue.concurrentDefaultQueue())
}
private func fetchPreparedPatternWallpaperRepresentation(resource: MediaResource, resourceData: MediaResourceData, representation: CachedPreparedPatternWallpaperRepresentation) -> Signal<CachedMediaResourceRepresentationResult, NoError> {
return Signal({ subscriber in
if let data = try? Data(contentsOf: URL(fileURLWithPath: resourceData.path), options: [.mappedIfSafe]) {
if let unpackedData = TGGUnzipData(data, 2 * 1024 * 1024), let data = prepareSvgImage(unpackedData) {
let path = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max))"
let url = URL(fileURLWithPath: path)
let _ = try? data.write(to: url)
subscriber.putNext(.temporaryPath(path))
subscriber.putCompletion()
}
}
return EmptyDisposable
}) |> runOn(Queue.concurrentDefaultQueue())
}

View File

@ -3495,7 +3495,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
canChangeColors = false
}
if false, canChangeColors {
if canChangeColors {
items.append(.action(ContextMenuActionItem(text: presentationData.strings.UserInfo_ChangeColors, icon: { theme in
generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/ApplyTheme"), color: theme.contextMenu.primaryColor)
}, action: { [weak self] _, f in

View File

@ -366,10 +366,11 @@ private func patternWallpaperDatas(account: Account, accountManager: AccountMana
if let targetRepresentation = targetRepresentation {
let maybeFullSize = combineLatest(
accountManager.mediaBox.resourceData(targetRepresentation.representation.resource),
account.postbox.mediaBox.resourceData(targetRepresentation.representation.resource)
accountManager.mediaBox.cachedResourceRepresentation(targetRepresentation.representation.resource, representation: CachedPreparedPatternWallpaperRepresentation(), complete: false, fetch: true),
account.postbox.mediaBox.cachedResourceRepresentation(targetRepresentation.representation.resource, representation: CachedPreparedPatternWallpaperRepresentation(), complete: false, fetch: true)
)
let signal = maybeFullSize
|> take(1)
|> mapToSignal { maybeSharedData, maybeData -> Signal<(Data?, Bool), NoError> in
@ -387,11 +388,11 @@ private func patternWallpaperDatas(account: Account, accountManager: AccountMana
let accountFullSizeData = Signal<(Data?, Bool), NoError> { subscriber in
let fetchedFullSizeDisposable = fetchedFullSize.start()
let fullSizeDisposable = account.postbox.mediaBox.resourceData(targetRepresentation.representation.resource).start(next: { next in
let fullSizeDisposable = account.postbox.mediaBox.cachedResourceRepresentation(targetRepresentation.representation.resource, representation: CachedPreparedPatternWallpaperRepresentation(), complete: false, fetch: true).start(next: { next in
subscriber.putNext((next.size == 0 ? nil : try? Data(contentsOf: URL(fileURLWithPath: next.path), options: []), next.complete))
if next.complete, let data = try? Data(contentsOf: URL(fileURLWithPath: next.path), options: .mappedRead) {
accountManager.mediaBox.storeResourceData(targetRepresentation.representation.resource.id, data: data)
accountManager.mediaBox.storeCachedResourceRepresentation(targetRepresentation.representation.resource, representation: CachedPreparedPatternWallpaperRepresentation(), data: data)
}
}, error: subscriber.putError, completed: subscriber.putCompletion)
@ -402,7 +403,7 @@ private func patternWallpaperDatas(account: Account, accountManager: AccountMana
}
let sharedFullSizeData = Signal<(Data?, Bool), NoError> { subscriber in
let fullSizeDisposable = accountManager.mediaBox.resourceData(targetRepresentation.representation.resource).start(next: { next in
let fullSizeDisposable = accountManager.mediaBox.cachedResourceRepresentation(targetRepresentation.representation.resource, representation: CachedPreparedPatternWallpaperRepresentation(), complete: false, fetch: true).start(next: { next in
subscriber.putNext((next.size == 0 ? nil : try? Data(contentsOf: URL(fileURLWithPath: next.path), options: []), next.complete))
}, error: subscriber.putError, completed: subscriber.putCompletion)
@ -450,7 +451,7 @@ public func patternWallpaperImage(account: Account, accountManager: AccountManag
if !autoFetchFullSize || fullSizeComplete {
return patternWallpaperImageInternal(fullSizeData: fullSizeData, fullSizeComplete: fullSizeComplete, mode: mode)
} else {
return .complete()
return .single(nil)
}
}
}
@ -526,11 +527,17 @@ private func patternWallpaperImageInternal(fullSizeData: Data?, fullSizeComplete
let overlayImage = generateImage(arguments.drawingRect.size, rotatedContext: { size, c in
c.clear(CGRect(origin: CGPoint(), size: size))
var image: UIImage?
if let fullSizeData = fullSizeData, let unpackedData = TGGUnzipData(fullSizeData, 2 * 1024 * 1024) {
image = drawSvgImage(unpackedData, CGSize(width: size.width * context.scale, height: size.height * context.scale), .black, .white)
} else if let fullSizeData = fullSizeData {
image = UIImage(data: fullSizeData)
if let fullSizeData = fullSizeData {
image = renderPreparedImage(fullSizeData, CGSize(width: size.width * context.scale, height: size.height * context.scale))
}
// if let fullSizeData = fullSizeData, let unpackedData = TGGUnzipData(fullSizeData, 2 * 1024 * 1024) {
// let preparedData = prepareSvgImage(unpackedData)
// image = renderPreparedImage(preparedData!, CGSize(width: size.width * context.scale, height: size.height * context.scale))
//
//// image = drawSvgImage(unpackedData, CGSize(width: size.width * context.scale, height: size.height * context.scale), .black, .white)
// } else if let fullSizeData = fullSizeData {
// image = UIImage(data: fullSizeData)
// }
if let customPatternColor = customArguments.customPatternColor, customPatternColor.alpha < 1.0 {
c.setBlendMode(.copy)