mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-10-09 03:20:48 +00:00
Various fixes
This commit is contained in:
parent
6085c40c08
commit
87a084b31a
@ -2244,11 +2244,13 @@ final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
|
||||
private let items: [AnyComponentWithIdentity<ChildEnvironment>]
|
||||
private let padding: CGFloat
|
||||
private let minSpacing: CGFloat
|
||||
private let verticalSpacing: CGFloat
|
||||
|
||||
init(_ items: [AnyComponentWithIdentity<ChildEnvironment>], padding: CGFloat, minSpacing: CGFloat) {
|
||||
init(_ items: [AnyComponentWithIdentity<ChildEnvironment>], padding: CGFloat, minSpacing: CGFloat, verticalSpacing: CGFloat) {
|
||||
self.items = items
|
||||
self.padding = padding
|
||||
self.minSpacing = minSpacing
|
||||
self.verticalSpacing = verticalSpacing
|
||||
}
|
||||
|
||||
static func ==(lhs: ItemStack<ChildEnvironment>, rhs: ItemStack<ChildEnvironment>) -> Bool {
|
||||
@ -2261,6 +2263,9 @@ final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
|
||||
if lhs.minSpacing != rhs.minSpacing {
|
||||
return false
|
||||
}
|
||||
if lhs.verticalSpacing != rhs.verticalSpacing {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -2313,7 +2318,19 @@ final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
|
||||
let remainingWidth = context.availableSize.width - itemsWidth - context.component.padding * 2.0
|
||||
spacing = remainingWidth / CGFloat(group.count - 1)
|
||||
|
||||
var nextX: CGFloat = context.component.padding
|
||||
var useCenteredLayout = false
|
||||
if spacing > 30.0 || group.count == 1 {
|
||||
spacing = 30.0
|
||||
useCenteredLayout = true
|
||||
}
|
||||
|
||||
var nextX: CGFloat
|
||||
if useCenteredLayout {
|
||||
let totalWidth = itemsWidth + spacing * CGFloat(group.count - 1)
|
||||
nextX = floorToScreenPixels((size.width - totalWidth) / 2.0)
|
||||
} else {
|
||||
nextX = context.component.padding
|
||||
}
|
||||
for i in group {
|
||||
let child = updatedChildren[i]
|
||||
let frame = CGRect(origin: CGPoint(x: nextX, y: size.height + floorToScreenPixels((groupHeight - child.size.height) / 2.0)), size: child.size)
|
||||
@ -2323,7 +2340,7 @@ final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
|
||||
)
|
||||
nextX += child.size.width + spacing
|
||||
}
|
||||
size.height += groupHeight
|
||||
size.height += groupHeight + context.component.verticalSpacing
|
||||
}
|
||||
|
||||
return size
|
||||
@ -2413,7 +2430,8 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
|
||||
)
|
||||
],
|
||||
padding: 18.0,
|
||||
minSpacing: 8.0
|
||||
minSpacing: 8.0,
|
||||
verticalSpacing: 12.0
|
||||
)
|
||||
),
|
||||
environment: {},
|
||||
@ -2426,7 +2444,7 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
|
||||
view.frame = CGRect(origin: CGPoint(x: 0.0, y: padding), size: size)
|
||||
}
|
||||
|
||||
return CGSize(width: size.width, height: size.height + padding * 2.0)
|
||||
return CGSize(width: size.width, height: size.height + padding * 2.0 - 12.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -597,7 +597,7 @@ public class PremiumLimitDisplayComponent: Component {
|
||||
if component.invertProgress {
|
||||
progressTransition.setFrame(layer: self.inactiveBackground, frame: CGRect(origin: CGPoint(x: activityPosition, y: 0.0), size: CGSize(width: size.width - activityPosition, height: lineHeight)))
|
||||
progressTransition.setFrame(view: self.activeContainer, frame: CGRect(origin: .zero, size: CGSize(width: activityPosition, height: lineHeight)))
|
||||
progressTransition.setFrame(layer: self.activeBackground, frame: CGRect(origin: .zero, size: CGSize(width: containerFrame.width * 1.35, height: lineHeight)))
|
||||
progressTransition.setBounds(layer: self.activeBackground, bounds: CGRect(origin: .zero, size: CGSize(width: containerFrame.width * 1.35, height: lineHeight)))
|
||||
} else {
|
||||
progressTransition.setFrame(layer: self.inactiveBackground, frame: CGRect(origin: .zero, size: CGSize(width: activityPosition, height: lineHeight)))
|
||||
progressTransition.setFrame(view: self.activeContainer, frame: CGRect(origin: CGPoint(x: activityPosition, y: 0.0), size: CGSize(width: activeWidth, height: lineHeight)))
|
||||
@ -674,15 +674,6 @@ public class PremiumLimitDisplayComponent: Component {
|
||||
} else {
|
||||
self.badgeView.center = CGPoint(x: size.width * badgePosition, y: 82.0)
|
||||
}
|
||||
|
||||
// if self.badgeView.frame.maxX > size.width {
|
||||
// let delta = self.badgeView.frame.maxX - size.width - 6.0
|
||||
// if let _ = self.badgeView.layer.animation(forKey: "appearance1") {
|
||||
//
|
||||
// } else {
|
||||
// self.badgeView.center = self.badgeView.center.offsetBy(dx: -delta, dy: 0.0)
|
||||
// }
|
||||
// }
|
||||
}
|
||||
self.badgeForeground.bounds = CGRect(origin: CGPoint(), size: CGSize(width: badgeFullSize.width * 3.0, height: badgeFullSize.height))
|
||||
if self.badgeForeground.animation(forKey: "movement") == nil {
|
||||
@ -1101,7 +1092,7 @@ private final class LimitSheetContent: CombinedComponent {
|
||||
} else {
|
||||
badgePosition = min(1.0, CGFloat(component.count) / CGFloat(premiumLimit))
|
||||
}
|
||||
badgeGraphPosition = 0.75
|
||||
badgeGraphPosition = 0.5
|
||||
buttonAnimationName = "premium_addone"
|
||||
|
||||
if isPremiumDisabled {
|
||||
|
@ -589,6 +589,8 @@ public final class MediaEditor {
|
||||
if let initialSeekPosition = self.initialSeekPosition {
|
||||
self.initialSeekPosition = nil
|
||||
player.seek(to: CMTime(seconds: initialSeekPosition, preferredTimescale: CMTimeScale(1000)), toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
} else if let trimRange = self.values.videoTrimRange {
|
||||
player.seek(to: CMTime(seconds: trimRange.lowerBound, preferredTimescale: CMTimeScale(1000)), toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
}
|
||||
|
||||
self.setupTimeObservers()
|
||||
@ -1065,9 +1067,9 @@ public final class MediaEditor {
|
||||
}
|
||||
}
|
||||
|
||||
public func setAudioTrack(_ audioTrack: MediaAudioTrack?) {
|
||||
public func setAudioTrack(_ audioTrack: MediaAudioTrack?, trimRange: Range<Double>? = nil, offset: Double? = nil) {
|
||||
self.updateValues(mode: .skipRendering) { values in
|
||||
return values.withUpdatedAudioTrack(audioTrack).withUpdatedAudioTrackSamples(nil).withUpdatedAudioTrackTrimRange(nil).withUpdatedAudioTrackVolume(nil).withUpdatedAudioTrackOffset(nil)
|
||||
return values.withUpdatedAudioTrack(audioTrack).withUpdatedAudioTrackSamples(nil).withUpdatedAudioTrackTrimRange(trimRange).withUpdatedAudioTrackVolume(nil).withUpdatedAudioTrackOffset(offset)
|
||||
}
|
||||
|
||||
if let audioPlayer = self.audioPlayer {
|
||||
@ -1087,6 +1089,7 @@ public final class MediaEditor {
|
||||
}
|
||||
|
||||
self.setupAudioPlayback()
|
||||
self.updateAudioPlaybackRange()
|
||||
}
|
||||
|
||||
private func setupAudioPlayback() {
|
||||
|
@ -246,10 +246,10 @@ private func makeEditorImageFrameComposition(context: CIContext, inputImage: CII
|
||||
resultImage = mediaImage.composited(over: resultImage)
|
||||
}
|
||||
|
||||
if var drawingImage {
|
||||
if values.isStory {
|
||||
drawingImage = drawingImage.transformed(by: CGAffineTransformMakeScale(initialScale, initialScale))
|
||||
}
|
||||
if let drawingImage {
|
||||
// if values.isStory {
|
||||
// drawingImage = drawingImage.transformed(by: CGAffineTransformMakeScale(initialScale, initialScale))
|
||||
// }
|
||||
resultImage = drawingImage.samplingLinear().composited(over: resultImage)
|
||||
}
|
||||
|
||||
|
@ -502,11 +502,17 @@ public final class MediaEditorVideoExport {
|
||||
|
||||
let originalDimensions = self.configuration.values.originalDimensions
|
||||
var isNotFullscreen = false
|
||||
if case .video(_, true) = self.subject, originalDimensions.width > 0 && abs((Double(originalDimensions.height) / Double(originalDimensions.width)) - 1.7777778) > 0.001 {
|
||||
isNotFullscreen = true
|
||||
var hasNonIdentityTransform = false
|
||||
if case .video(_, true) = self.subject {
|
||||
if originalDimensions.width > 0 && abs((Double(originalDimensions.height) / Double(originalDimensions.width)) - 1.7777778) > 0.001 {
|
||||
isNotFullscreen = true
|
||||
}
|
||||
if let videoTrack = videoTracks.first {
|
||||
hasNonIdentityTransform = !videoTrack.preferredTransform.isIdentity
|
||||
}
|
||||
}
|
||||
var preferredTransform: CGAffineTransform?
|
||||
if let videoTrack = videoTracks.first, !self.configuration.values.requiresComposing && !isNotFullscreen {
|
||||
if let videoTrack = videoTracks.first, !self.configuration.values.requiresComposing && !isNotFullscreen && !hasNonIdentityTransform {
|
||||
preferredTransform = videoTrack.preferredTransform
|
||||
} else {
|
||||
self.setupComposer()
|
||||
|
@ -1371,6 +1371,18 @@ final class MediaEditorScreenComponent: Component {
|
||||
}
|
||||
} else {
|
||||
if done {
|
||||
let audioStart = mediaEditor.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
||||
let audioOffset = min(0.0, mediaEditor.values.audioTrackOffset ?? 0.0)
|
||||
|
||||
var start = -audioOffset + audioStart
|
||||
if let duration = mediaEditor.duration {
|
||||
let upperBound = mediaEditor.values.videoTrimRange?.upperBound ?? duration
|
||||
if start >= upperBound {
|
||||
start = mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0
|
||||
}
|
||||
}
|
||||
|
||||
mediaEditor.seek(start, andPlay: true)
|
||||
mediaEditor.play()
|
||||
} else {
|
||||
mediaEditor.stop()
|
||||
@ -3226,14 +3238,22 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
Queue.mainQueue().async {
|
||||
mediaEditor.setAudioTrack(MediaAudioTrack(path: fileName, artist: artist, title: title, duration: audioDuration))
|
||||
var audioTrimRange: Range<Double>?
|
||||
var audioOffset: Double?
|
||||
|
||||
if mediaEditor.sourceIsVideo {
|
||||
if let videoDuration = mediaEditor.originalDuration {
|
||||
mediaEditor.setAudioTrackTrimRange(0 ..< min(videoDuration, audioDuration), apply: true)
|
||||
if let videoStart = mediaEditor.values.videoTrimRange?.lowerBound {
|
||||
audioOffset = -videoStart
|
||||
}
|
||||
audioTrimRange = 0 ..< min(videoDuration, audioDuration)
|
||||
}
|
||||
} else {
|
||||
mediaEditor.setAudioTrackTrimRange(0 ..< min(15, audioDuration), apply: true)
|
||||
audioTrimRange = 0 ..< min(15, audioDuration)
|
||||
}
|
||||
|
||||
mediaEditor.setAudioTrack(MediaAudioTrack(path: fileName, artist: artist, title: title, duration: audioDuration), trimRange: audioTrimRange, offset: audioOffset)
|
||||
|
||||
mediaEditor.seek(mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0, andPlay: true)
|
||||
|
||||
self.requestUpdate(transition: .easeInOut(duration: 0.2))
|
||||
|
@ -499,7 +499,6 @@ final class VideoScrubberComponent: Component {
|
||||
|
||||
var trimDuration = component.duration
|
||||
|
||||
var isFirstTime = false
|
||||
var audioChanged = false
|
||||
var animateAudioAppearance = false
|
||||
if let previousComponent {
|
||||
@ -514,8 +513,6 @@ final class VideoScrubberComponent: Component {
|
||||
self.isAudioSelected = false
|
||||
animateAudioAppearance = true
|
||||
}
|
||||
} else {
|
||||
isFirstTime = true
|
||||
}
|
||||
|
||||
let scrubberSpacing: CGFloat = 4.0
|
||||
@ -622,7 +619,7 @@ final class VideoScrubberComponent: Component {
|
||||
audioTransition.setFrame(view: self.audioScrollView, frame: audioScrollFrame)
|
||||
|
||||
let contentSize = CGSize(width: audioTotalWidth, height: 39.0)
|
||||
if self.audioScrollView.contentSize != contentSize {
|
||||
if self.audioScrollView.contentSize != contentSize || audioChanged {
|
||||
self.audioScrollView.contentSize = contentSize
|
||||
if !component.audioOnly {
|
||||
let leftInset = scrubberSize.width
|
||||
@ -634,15 +631,15 @@ final class VideoScrubberComponent: Component {
|
||||
}
|
||||
self.audioScrollView.contentInset = UIEdgeInsets(top: 0.0, left: leftInset, bottom: 0.0, right: rightInset)
|
||||
}
|
||||
self.audioScrollView.contentOffset = .zero
|
||||
|
||||
if let offset = component.audioData?.offset, let duration = component.audioData?.duration, duration > 0.0 {
|
||||
let contentOffset = offset * audioTotalWidth / duration
|
||||
self.audioScrollView.contentOffset = CGPoint(x: contentOffset, y: 0.0)
|
||||
} else {
|
||||
self.audioScrollView.contentOffset = .zero
|
||||
}
|
||||
}
|
||||
|
||||
if isFirstTime, let offset = component.audioData?.offset, let duration = component.audioData?.duration, duration > 0.0 {
|
||||
let contentOffset = offset * audioTotalWidth / duration
|
||||
self.audioScrollView.contentOffset = CGPoint(x: contentOffset, y: 0.0)
|
||||
} else if audioChanged {
|
||||
self.audioScrollView.contentOffset = .zero
|
||||
}
|
||||
self.ignoreScrollUpdates = false
|
||||
|
||||
audioTransition.setCornerRadius(layer: self.audioClippingView.layer, cornerRadius: self.isAudioSelected ? 0.0 : 9.0)
|
||||
@ -871,13 +868,13 @@ final class VideoScrubberComponent: Component {
|
||||
containerRightEdge = ghostRightHandleFrame.minX
|
||||
}
|
||||
|
||||
let isDraggingAudio = self.isDragging && component.audioOnly
|
||||
let isDraggingAudio = self.isDragging
|
||||
let isCursorHidden = isDraggingAudio || self.trimView.isPanningTrimHandle || self.ghostTrimView.isPanningTrimHandle
|
||||
var cursorTransition = transition
|
||||
if isCursorHidden {
|
||||
cursorTransition = .immediate
|
||||
}
|
||||
cursorTransition.setAlpha(view: self.cursorView, alpha: isCursorHidden ? 0.0 : 1.0)
|
||||
cursorTransition.setAlpha(view: self.cursorView, alpha: isCursorHidden ? 0.0 : 1.0, delay: self.cursorView.alpha.isZero && !isCursorHidden ? 0.25 : 0.0)
|
||||
|
||||
if self.isPanningPositionHandle || !component.isPlaying {
|
||||
self.positionAnimation = nil
|
||||
|
Loading…
x
Reference in New Issue
Block a user