mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-09-03 03:10:47 +00:00
Merge commit '1eafdc2ae0566e873be5cf053b60f22f6ac0777e'
This commit is contained in:
commit
40bca36df4
@ -149,6 +149,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
||||
if file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm" {
|
||||
if self.animationNode == nil {
|
||||
let animationNode = DefaultAnimatedStickerNodeImpl()
|
||||
animationNode.clipsToBounds = true
|
||||
animationNode.autoplay = false
|
||||
self.animationNode = animationNode
|
||||
animationNode.started = { [weak self, weak animationNode] in
|
||||
@ -183,6 +184,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
||||
self.imageNode.isHidden = false
|
||||
self.didSetUpAnimationNode = false
|
||||
}
|
||||
self.imageNode.isHidden = false
|
||||
self.imageNode.setSignal(chatMessageSticker(account: self.context.account, userLocation: .other, file: file, small: false, synchronousLoad: false))
|
||||
self.stickerFetchedDisposable.set(freeMediaFileResourceInteractiveFetched(account: self.context.account, userLocation: .other, fileReference: stickerPackFileReference(file), resource: chatMessageStickerResource(file: file, small: false)).start())
|
||||
}
|
||||
@ -354,7 +356,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
||||
self.imageNode.frame = imageFrame
|
||||
if let animationNode = self.animationNode {
|
||||
if self.isReaction {
|
||||
animationNode.cornerRadius = floor(imageSize.width * 0.03)
|
||||
animationNode.cornerRadius = floor(imageSize.width * 0.1)
|
||||
}
|
||||
animationNode.frame = imageFrame
|
||||
animationNode.updateLayout(size: imageSize)
|
||||
@ -488,15 +490,23 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
||||
self.stickerEntity.content = .file(animation, .reaction(updateReaction.reaction, style))
|
||||
}
|
||||
|
||||
if let animationNode = self.animationNode, let snapshot = animationNode.view.snapshotView(afterScreenUpdates: false) {
|
||||
snapshot.frame = animationNode.frame
|
||||
snapshot.layer.transform = animationNode.transform
|
||||
var nodeToTransitionOut: ASDisplayNode?
|
||||
if let animationNode = self.animationNode {
|
||||
nodeToTransitionOut = animationNode
|
||||
} else if !self.imageNode.isHidden {
|
||||
nodeToTransitionOut = self.imageNode
|
||||
}
|
||||
|
||||
if let nodeToTransitionOut, let snapshot = nodeToTransitionOut.view.snapshotView(afterScreenUpdates: false) {
|
||||
snapshot.frame = nodeToTransitionOut.frame
|
||||
snapshot.layer.transform = nodeToTransitionOut.transform
|
||||
snapshot.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
|
||||
snapshot.removeFromSuperview()
|
||||
})
|
||||
snapshot.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
|
||||
self.addSubview(snapshot)
|
||||
}
|
||||
|
||||
self.animationNode?.removeFromSupernode()
|
||||
self.animationNode = nil
|
||||
self.didSetUpAnimationNode = false
|
||||
@ -507,8 +517,17 @@ public final class DrawingStickerEntityView: DrawingEntityView {
|
||||
self.applyVisibility()
|
||||
self.setNeedsLayout()
|
||||
|
||||
self.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
self.animationNode?.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
|
||||
let nodeToTransitionIn: ASDisplayNode?
|
||||
if let animationNode = self.animationNode {
|
||||
nodeToTransitionIn = animationNode
|
||||
} else {
|
||||
nodeToTransitionIn = self.imageNode
|
||||
}
|
||||
|
||||
if let nodeToTransitionIn {
|
||||
nodeToTransitionIn.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
nodeToTransitionIn.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
|
||||
}
|
||||
|
||||
let _ = self.dismissReactionSelection()
|
||||
}
|
||||
|
@ -272,7 +272,7 @@ func presentLegacyMediaPickerGallery(context: AccountContext, peer: EnginePeer?,
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { sendWhenOnlineAvailable in
|
||||
let legacySheetController = LegacyController(presentation: .custom, theme: presentationData.theme, initialLayout: nil)
|
||||
let sheetController = TGMediaPickerSendActionSheetController(context: legacyController.context, isDark: true, sendButtonFrame: model.interfaceView.doneButtonFrame, canSendSilently: hasSilentPosting, canSendWhenOnline: sendWhenOnlineAvailable && effectiveHasSchedule, canSchedule: effectiveHasSchedule, reminder: reminder, hasTimer: false)
|
||||
let sheetController = TGMediaPickerSendActionSheetController(context: legacyController.context, isDark: true, sendButtonFrame: model.interfaceView.doneButtonFrame, canSendSilently: hasSilentPosting, canSendWhenOnline: sendWhenOnlineAvailable && effectiveHasSchedule, canSchedule: effectiveHasSchedule, reminder: reminder, hasTimer: hasTimer)
|
||||
let dismissImpl = { [weak model] in
|
||||
model?.dismiss(true, false)
|
||||
dismissAll()
|
||||
|
@ -253,29 +253,39 @@ public class PremiumLimitDisplayComponent: Component {
|
||||
positionAnimation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
|
||||
self.badgeView.layer.add(positionAnimation, forKey: "appearance1")
|
||||
|
||||
let rotateAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||
rotateAnimation.fromValue = 0.0 as NSNumber
|
||||
rotateAnimation.toValue = -0.3 as NSNumber
|
||||
rotateAnimation.duration = 0.15
|
||||
rotateAnimation.fillMode = .forwards
|
||||
rotateAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
|
||||
rotateAnimation.isRemovedOnCompletion = false
|
||||
self.badgeView.layer.add(rotateAnimation, forKey: "appearance2")
|
||||
|
||||
Queue.mainQueue().after(0.5, {
|
||||
let rotateAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||
rotateAnimation.fromValue = 0.0 as NSNumber
|
||||
rotateAnimation.toValue = 0.2 as NSNumber
|
||||
rotateAnimation.duration = 0.2
|
||||
rotateAnimation.fillMode = .forwards
|
||||
rotateAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
|
||||
rotateAnimation.isRemovedOnCompletion = false
|
||||
self.badgeView.layer.add(rotateAnimation, forKey: "appearance2")
|
||||
let bounceAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||
bounceAnimation.fromValue = -0.3 as NSNumber
|
||||
bounceAnimation.toValue = 0.05 as NSNumber
|
||||
bounceAnimation.duration = 0.15
|
||||
bounceAnimation.fillMode = .forwards
|
||||
bounceAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
|
||||
bounceAnimation.isRemovedOnCompletion = false
|
||||
self.badgeView.layer.add(bounceAnimation, forKey: "appearance3")
|
||||
self.badgeView.layer.removeAnimation(forKey: "appearance2")
|
||||
|
||||
if !self.badgeView.isHidden {
|
||||
self.hapticFeedback.impact(.light)
|
||||
}
|
||||
|
||||
Queue.mainQueue().after(0.2) {
|
||||
Queue.mainQueue().after(0.15) {
|
||||
let returnAnimation = CABasicAnimation(keyPath: "transform.rotation.z")
|
||||
returnAnimation.fromValue = 0.2 as NSNumber
|
||||
returnAnimation.fromValue = 0.05 as NSNumber
|
||||
returnAnimation.toValue = 0.0 as NSNumber
|
||||
returnAnimation.duration = 0.18
|
||||
returnAnimation.duration = 0.1
|
||||
returnAnimation.fillMode = .forwards
|
||||
returnAnimation.timingFunction = CAMediaTimingFunction(name: .easeIn)
|
||||
self.badgeView.layer.add(returnAnimation, forKey: "appearance3")
|
||||
self.badgeView.layer.removeAnimation(forKey: "appearance2")
|
||||
self.badgeView.layer.add(returnAnimation, forKey: "appearance4")
|
||||
self.badgeView.layer.removeAnimation(forKey: "appearance3")
|
||||
}
|
||||
})
|
||||
|
||||
@ -285,7 +295,7 @@ public class PremiumLimitDisplayComponent: Component {
|
||||
}
|
||||
|
||||
if let badgeText = component.badgeText {
|
||||
self.badgeCountLabel.configure(with: badgeText, duration: from != nil ? 0.3 : 0.9)
|
||||
self.badgeCountLabel.configure(with: badgeText, duration: from != nil ? 0.3 : 0.5)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -199,9 +199,12 @@ private class TimerPickerItemView: UIView {
|
||||
|
||||
var value: (Int32, String)? {
|
||||
didSet {
|
||||
if let (_, string) = self.value {
|
||||
if let (value, string) = self.value {
|
||||
let components = string.components(separatedBy: " ")
|
||||
if components.count > 1 {
|
||||
if value == viewOnceTimeout {
|
||||
self.valueLabel.text = string
|
||||
self.unitLabel.text = ""
|
||||
} else if components.count > 1 {
|
||||
self.valueLabel.text = components[0]
|
||||
self.unitLabel.text = components[1]
|
||||
}
|
||||
@ -236,8 +239,12 @@ private class TimerPickerItemView: UIView {
|
||||
self.valueLabel.sizeToFit()
|
||||
self.unitLabel.sizeToFit()
|
||||
|
||||
self.valueLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 20.0 - self.valueLabel.frame.size.width, y: floor((self.frame.height - self.valueLabel.frame.height) / 2.0)), size: self.valueLabel.frame.size)
|
||||
self.unitLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 12.0, y: floor((self.frame.height - self.unitLabel.frame.height) / 2.0) + 2.0), size: self.unitLabel.frame.size)
|
||||
if let (value, _) = self.value, value == viewOnceTimeout {
|
||||
self.valueLabel.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((self.frame.width - self.valueLabel.frame.size.width) / 2.0), y: floor((self.frame.height - self.valueLabel.frame.height) / 2.0)), size: self.valueLabel.frame.size)
|
||||
} else {
|
||||
self.valueLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 28.0 - self.valueLabel.frame.size.width, y: floor((self.frame.height - self.valueLabel.frame.height) / 2.0)), size: self.valueLabel.frame.size)
|
||||
self.unitLabel.frame = CGRect(origin: CGPoint(x: self.frame.width / 2.0 - 20.0, y: floor((self.frame.height - self.unitLabel.frame.height) / 2.0) + 2.0), size: self.unitLabel.frame.size)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -414,7 +421,14 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
||||
if let pickerView = pickerView as? TimerCustomPickerView {
|
||||
switch strongSelf.mode {
|
||||
case .sendTimer:
|
||||
strongSelf.completion?(timerValues[pickerView.selectedRow(inComponent: 0)])
|
||||
let row = pickerView.selectedRow(inComponent: 0)
|
||||
let value: Int32
|
||||
if row == 0 {
|
||||
value = viewOnceTimeout
|
||||
} else {
|
||||
value = timerValues[row - 1]
|
||||
}
|
||||
strongSelf.completion?(value)
|
||||
case .autoremove:
|
||||
let timeInterval = strongSelf.autoremoveTimerValues[pickerView.selectedRow(inComponent: 0)]
|
||||
strongSelf.completion?(Int32(timeInterval))
|
||||
@ -456,6 +470,8 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
||||
|
||||
self.contentContainerNode.view.addSubview(pickerView)
|
||||
self.pickerView = pickerView
|
||||
|
||||
pickerView.selectRow(1, inComponent: 0, animated: false)
|
||||
case .autoremove:
|
||||
let pickerView = TimerCustomPickerView()
|
||||
pickerView.dataSource = self
|
||||
@ -514,7 +530,7 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
||||
func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int {
|
||||
switch self.mode {
|
||||
case .sendTimer:
|
||||
return timerValues.count
|
||||
return timerValues.count + 1
|
||||
case .autoremove:
|
||||
return self.autoremoveTimerValues.count
|
||||
case .mute:
|
||||
@ -525,17 +541,30 @@ class ChatTimerScreenNode: ViewControllerTracingNode, UIScrollViewDelegate, UIPi
|
||||
func pickerView(_ pickerView: UIPickerView, viewForRow row: Int, forComponent component: Int, reusing view: UIView?) -> UIView {
|
||||
switch self.mode {
|
||||
case .sendTimer:
|
||||
let value = timerValues[row]
|
||||
let string = timeIntervalString(strings: self.presentationData.strings, value: value)
|
||||
if let view = view as? TimerPickerItemView {
|
||||
if row == 0 {
|
||||
let string = self.presentationData.strings.MediaPicker_Timer_ViewOnce
|
||||
if let view = view as? TimerPickerItemView {
|
||||
view.value = (viewOnceTimeout, string)
|
||||
return view
|
||||
}
|
||||
|
||||
let view = TimerPickerItemView()
|
||||
view.value = (viewOnceTimeout, string)
|
||||
view.textColor = .white
|
||||
return view
|
||||
} else {
|
||||
let value = timerValues[row - 1]
|
||||
let string = timeIntervalString(strings: self.presentationData.strings, value: value)
|
||||
if let view = view as? TimerPickerItemView {
|
||||
view.value = (value, string)
|
||||
return view
|
||||
}
|
||||
|
||||
let view = TimerPickerItemView()
|
||||
view.value = (value, string)
|
||||
view.textColor = .white
|
||||
return view
|
||||
}
|
||||
|
||||
let view = TimerPickerItemView()
|
||||
view.value = (value, string)
|
||||
view.textColor = .white
|
||||
return view
|
||||
case .autoremove:
|
||||
let itemView: TimerPickerItemView
|
||||
if let current = view as? TimerPickerItemView {
|
||||
|
@ -370,6 +370,7 @@ public final class MediaEditor {
|
||||
}
|
||||
|
||||
self.audioDelayTimer?.invalidate()
|
||||
self.audioDelayTimer = nil
|
||||
}
|
||||
|
||||
public func replaceSource(_ image: UIImage, additionalImage: UIImage?, time: CMTime) {
|
||||
@ -573,11 +574,9 @@ public final class MediaEditor {
|
||||
// self.maybeGeneratePersonSegmentation(image)
|
||||
}
|
||||
|
||||
if let audioTrack = self.values.audioTrack {
|
||||
self.setAudioTrack(audioTrack)
|
||||
self.setAudioTrackVolume(self.values.audioTrackVolume)
|
||||
self.setAudioTrackTrimRange(self.values.audioTrackTrimRange, apply: true)
|
||||
self.setAudioTrackOffset(self.values.audioTrackOffset, apply: true)
|
||||
if let _ = self.values.audioTrack {
|
||||
self.setupAudioPlayback()
|
||||
self.updateAudioPlaybackRange()
|
||||
}
|
||||
|
||||
if let player {
|
||||
@ -609,6 +608,19 @@ public final class MediaEditor {
|
||||
startPlayback()
|
||||
}
|
||||
}
|
||||
} else if let audioPlayer = self.audioPlayer {
|
||||
let offset = self.values.audioTrackOffset ?? 0.0
|
||||
let lowerBound = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
||||
|
||||
let audioTime = CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(1000))
|
||||
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
if audioPlayer.status != .readyToPlay {
|
||||
Queue.mainQueue().after(0.1) {
|
||||
audioPlayer.play()
|
||||
}
|
||||
} else {
|
||||
audioPlayer.play()
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -936,6 +948,10 @@ public final class MediaEditor {
|
||||
} else {
|
||||
if audioPlayer.status == .readyToPlay {
|
||||
audioPlayer.setRate(rate, time: audioTime, atHostTime: futureTime)
|
||||
if rate > 0.0 {
|
||||
// audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
audioPlayer.play()
|
||||
}
|
||||
} else {
|
||||
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
if rate > 0.0 {
|
||||
@ -1051,7 +1067,12 @@ public final class MediaEditor {
|
||||
if let audioPlayer = self.audioPlayer {
|
||||
audioPlayer.pause()
|
||||
|
||||
self.destroyTimeObservers()
|
||||
if self.sourceIsVideo {
|
||||
self.audioDelayTimer?.invalidate()
|
||||
self.audioDelayTimer = nil
|
||||
} else {
|
||||
self.destroyTimeObservers()
|
||||
}
|
||||
self.audioPlayer = nil
|
||||
|
||||
if !self.sourceIsVideo {
|
||||
@ -1059,15 +1080,23 @@ public final class MediaEditor {
|
||||
}
|
||||
}
|
||||
|
||||
if let audioTrack {
|
||||
self.setupAudioPlayback()
|
||||
}
|
||||
|
||||
private func setupAudioPlayback() {
|
||||
if let audioTrack = self.values.audioTrack {
|
||||
let path = fullDraftPath(peerId: self.context.account.peerId, path: audioTrack.path)
|
||||
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: path))
|
||||
let playerItem = AVPlayerItem(asset: audioAsset)
|
||||
let player = AVPlayer(playerItem: playerItem)
|
||||
player.automaticallyWaitsToMinimizeStalling = false
|
||||
self.audioPlayer = player
|
||||
self.audioPlayer = player
|
||||
self.maybeGenerateAudioSamples(asset: audioAsset)
|
||||
|
||||
if let volume = self.values.audioTrackVolume {
|
||||
self.audioPlayer?.volume = Float(volume)
|
||||
}
|
||||
|
||||
self.setupTimeObservers()
|
||||
|
||||
if !self.sourceIsVideo {
|
||||
@ -1081,9 +1110,8 @@ public final class MediaEditor {
|
||||
return values.withUpdatedAudioTrackTrimRange(trimRange)
|
||||
}
|
||||
|
||||
if apply, let trimRange {
|
||||
let offset = self.values.audioTrackOffset ?? 0.0
|
||||
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
||||
if apply, let _ = trimRange {
|
||||
self.updateAudioPlaybackRange()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1094,9 +1122,7 @@ public final class MediaEditor {
|
||||
|
||||
if apply {
|
||||
let offset = offset ?? 0.0
|
||||
let duration = self.duration ?? 0.0
|
||||
let lowerBound = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
||||
let upperBound = self.values.audioTrackTrimRange?.upperBound ?? duration
|
||||
|
||||
let audioTime: CMTime
|
||||
if self.sourceIsVideo {
|
||||
@ -1105,7 +1131,7 @@ public final class MediaEditor {
|
||||
} else {
|
||||
audioTime = CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(1000))
|
||||
}
|
||||
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000))
|
||||
self.updateAudioPlaybackRange()
|
||||
self.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
if !self.sourceIsVideo {
|
||||
self.audioPlayer?.play()
|
||||
@ -1113,6 +1139,15 @@ public final class MediaEditor {
|
||||
}
|
||||
}
|
||||
|
||||
private func updateAudioPlaybackRange() {
|
||||
if let upperBound = self.values.audioTrackTrimRange?.upperBound {
|
||||
let offset = self.values.audioTrackOffset ?? 0.0
|
||||
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000))
|
||||
} else {
|
||||
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = .invalid
|
||||
}
|
||||
}
|
||||
|
||||
public func setAudioTrackVolume(_ volume: CGFloat?) {
|
||||
self.updateValues(mode: .skipRendering) { values in
|
||||
return values.withUpdatedAudioTrackVolume(volume)
|
||||
|
@ -1120,6 +1120,9 @@ public extension MediaEditorValues {
|
||||
if !self.entities.isEmpty {
|
||||
return true
|
||||
}
|
||||
if self.additionalVideoPath != nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
@ -483,7 +483,7 @@ public final class MediaEditorVideoExport {
|
||||
kCVPixelBufferMetalCompatibilityKey as String: true,
|
||||
AVVideoColorPropertiesKey: colorProperties
|
||||
]
|
||||
if !"".isEmpty, let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing && additionalAsset == nil {
|
||||
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing {
|
||||
} else {
|
||||
self.setupComposer()
|
||||
}
|
||||
|
@ -2096,7 +2096,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
let mediaEditor = MediaEditor(context: self.context, subject: subject.editorSubject, values: initialValues, hasHistogram: true)
|
||||
if let initialVideoPosition = self.controller?.initialVideoPosition {
|
||||
if let initialVideoPosition = controller.initialVideoPosition {
|
||||
mediaEditor.seek(initialVideoPosition, andPlay: true)
|
||||
}
|
||||
mediaEditor.attachPreviewView(self.previewView)
|
||||
@ -2157,7 +2157,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
Queue.mainQueue().async {
|
||||
self.gradientView.image = gradientImage
|
||||
|
||||
if self.controller?.isEditingStory == true && subject.isVideo {
|
||||
if self.controller?.isEditingStory == true {
|
||||
|
||||
} else {
|
||||
self.previewContainerView.alpha = 1.0
|
||||
@ -2178,11 +2178,20 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
self.mediaEditor = mediaEditor
|
||||
self.mediaEditorPromise.set(.single(mediaEditor))
|
||||
|
||||
if self.controller?.isEditingStory == true && subject.isVideo {
|
||||
if controller.isEditingStory == true {
|
||||
mediaEditor.onFirstDisplay = { [weak self] in
|
||||
if let self {
|
||||
self.previewContainerView.alpha = 1.0
|
||||
self.backgroundDimView.isHidden = false
|
||||
if subject.isPhoto {
|
||||
self.previewContainerView.layer.allowsGroupOpacity = true
|
||||
self.previewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in
|
||||
self.previewContainerView.layer.allowsGroupOpacity = false
|
||||
self.previewContainerView.alpha = 1.0
|
||||
self.backgroundDimView.isHidden = false
|
||||
})
|
||||
} else {
|
||||
self.previewContainerView.alpha = 1.0
|
||||
self.backgroundDimView.isHidden = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3103,7 +3112,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
|
||||
func presentAudioPicker() {
|
||||
var isSettingTrack = false
|
||||
self.controller?.present(legacyICloudFilePicker(theme: self.presentationData.theme, mode: .import, documentTypes: ["public.mp3", "public.mpeg-4-audio", "public.aac-audio"], forceDarkTheme: true, dismissed: { [weak self] in
|
||||
self.controller?.present(legacyICloudFilePicker(theme: self.presentationData.theme, mode: .import, documentTypes: ["public.mp3", "public.mpeg-4-audio", "public.aac-audio", "org.xiph.flac"], forceDarkTheme: true, dismissed: { [weak self] in
|
||||
if let self {
|
||||
Queue.mainQueue().after(0.1) {
|
||||
if !isSettingTrack {
|
||||
@ -3177,14 +3186,27 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
return
|
||||
}
|
||||
|
||||
func maybeFixMisencodedText(_ text: String) -> String {
|
||||
let charactersToSearchFor = CharacterSet(charactersIn: "àåèîóûþÿ")
|
||||
if text.lowercased().rangeOfCharacter(from: charactersToSearchFor) != nil {
|
||||
if let data = text.data(using: .windowsCP1252), let string = String(data: data, encoding: .windowsCP1251) {
|
||||
return string
|
||||
} else {
|
||||
return text
|
||||
}
|
||||
} else {
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
var artist: String?
|
||||
var title: String?
|
||||
for data in audioAsset.commonMetadata {
|
||||
if data.commonKey == .commonKeyArtist {
|
||||
artist = data.stringValue
|
||||
if data.commonKey == .commonKeyArtist, let value = data.stringValue {
|
||||
artist = maybeFixMisencodedText(value)
|
||||
}
|
||||
if data.commonKey == .commonKeyTitle {
|
||||
title = data.stringValue
|
||||
if data.commonKey == .commonKeyTitle, let value = data.stringValue {
|
||||
title = maybeFixMisencodedText(value)
|
||||
}
|
||||
}
|
||||
|
||||
@ -3461,6 +3483,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
let stickerEntity = DrawingStickerEntity(content: .file(reaction.stillAnimation, .reaction(.builtin(heart), .white)))
|
||||
self.interaction?.insertEntity(stickerEntity, scale: 1.175)
|
||||
}
|
||||
|
||||
self.mediaEditor?.play()
|
||||
}
|
||||
}
|
||||
self.stickerScreen = controller
|
||||
|
@ -336,7 +336,8 @@ final class VideoScrubberComponent: Component {
|
||||
let location = gestureRecognizer.location(in: self.audioContainerView)
|
||||
return self.audioContainerView.bounds.contains(location)
|
||||
}
|
||||
|
||||
|
||||
var ignoreScrollUpdates = false
|
||||
private func updateAudioOffset(done: Bool) {
|
||||
guard self.audioScrollView.contentSize.width > 0.0, let component = self.component, let duration = self.component?.audioData?.duration else {
|
||||
return
|
||||
@ -353,6 +354,9 @@ final class VideoScrubberComponent: Component {
|
||||
}
|
||||
|
||||
func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
||||
guard !self.ignoreScrollUpdates else {
|
||||
return
|
||||
}
|
||||
self.updateAudioOffset(done: false)
|
||||
}
|
||||
|
||||
@ -490,8 +494,12 @@ final class VideoScrubberComponent: Component {
|
||||
var trimDuration = component.duration
|
||||
|
||||
var isFirstTime = false
|
||||
var audioChanged = false
|
||||
var animateAudioAppearance = false
|
||||
if let previousComponent {
|
||||
if let previousAudioData = previousComponent.audioData, previousAudioData.title != component.audioData?.title {
|
||||
audioChanged = true
|
||||
}
|
||||
if previousComponent.audioData == nil, component.audioData != nil {
|
||||
self.positionAnimation = nil
|
||||
animateAudioAppearance = true
|
||||
@ -588,12 +596,20 @@ final class VideoScrubberComponent: Component {
|
||||
|
||||
self.audioScrollView.isUserInteractionEnabled = self.isAudioSelected || component.audioOnly
|
||||
audioTransition.setFrame(view: self.audioScrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: availableSize.width, height: audioScrubberHeight)))
|
||||
self.audioScrollView.contentSize = CGSize(width: audioTotalWidth, height: audioScrubberHeight)
|
||||
|
||||
let contentSize = CGSize(width: audioTotalWidth, height: audioScrubberHeight)
|
||||
self.ignoreScrollUpdates = true
|
||||
if self.audioScrollView.contentSize != contentSize {
|
||||
self.audioScrollView.contentSize = contentSize
|
||||
}
|
||||
|
||||
if isFirstTime, let offset = component.audioData?.offset, let duration = component.audioData?.duration, duration > 0.0 {
|
||||
let contentOffset = offset * audioTotalWidth / duration
|
||||
self.audioScrollView.contentOffset = CGPoint(x: contentOffset, y: 0.0)
|
||||
} else if audioChanged {
|
||||
self.audioScrollView.contentOffset = .zero
|
||||
}
|
||||
self.ignoreScrollUpdates = false
|
||||
|
||||
audioTransition.setCornerRadius(layer: self.audioClippingView.layer, cornerRadius: self.isAudioSelected ? 0.0 : 9.0)
|
||||
|
||||
@ -627,26 +643,39 @@ final class VideoScrubberComponent: Component {
|
||||
}
|
||||
|
||||
let audioTitle = NSAttributedString(string: trackTitle, font: Font.semibold(13.0), textColor: .white)
|
||||
let audioTitleSize = self.audioTitle.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(
|
||||
MultilineTextComponent(
|
||||
text: .plain(audioTitle)
|
||||
)
|
||||
),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
)
|
||||
let audioTitleSize: CGSize
|
||||
if !trackTitle.isEmpty {
|
||||
audioTitleSize = self.audioTitle.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(
|
||||
MultilineTextComponent(
|
||||
text: .plain(audioTitle)
|
||||
)
|
||||
),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
)
|
||||
} else {
|
||||
if let audioTitleView = self.audioTitle.view {
|
||||
audioTitleSize = audioTitleView.bounds.size
|
||||
} else {
|
||||
audioTitleSize = .zero
|
||||
}
|
||||
}
|
||||
|
||||
let spacing: CGFloat = 4.0
|
||||
let iconSize = CGSize(width: 14.0, height: 14.0)
|
||||
let contentTotalWidth = iconSize.width + audioTitleSize.width + spacing
|
||||
|
||||
var audioContentTransition = audioTransition
|
||||
if animateAudioAppearance, component.audioData != nil {
|
||||
audioContentTransition = .immediate
|
||||
}
|
||||
audioTransition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
|
||||
|
||||
|
||||
let audioIconFrame = CGRect(origin: CGPoint(x: max(8.0, floorToScreenPixels((deselectedAudioClipWidth - contentTotalWidth) / 2.0)), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
|
||||
audioTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
|
||||
audioTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
|
||||
audioContentTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
|
||||
audioContentTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
|
||||
|
||||
let trackTitleIsVisible = !self.isAudioSelected && !component.audioOnly && !trackTitle.isEmpty
|
||||
if let view = self.audioTitle.view {
|
||||
@ -661,7 +690,7 @@ final class VideoScrubberComponent: Component {
|
||||
|
||||
let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
|
||||
view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size)
|
||||
audioTransition.setPosition(view: view, position: audioTitleFrame.center)
|
||||
audioContentTransition.setPosition(view: view, position: audioTitleFrame.center)
|
||||
}
|
||||
audioTransition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? 1.0 : 0.0)
|
||||
|
||||
|
@ -454,6 +454,7 @@ final class StoryItemOverlaysView: UIView {
|
||||
placeholderColor: flags.contains(.isDark) ? UIColor(white: 1.0, alpha: 0.1) : UIColor(white: 0.0, alpha: 0.1),
|
||||
pointSize: CGSize(width: min(256, itemSize.width), height: min(256, itemSize.height))
|
||||
)
|
||||
customEmojiView.clipsToBounds = true
|
||||
customEmojiView.updateTextColor(flags.contains(.isDark) ? .white : .black)
|
||||
|
||||
self.customEmojiLoadDisposable?.dispose()
|
||||
@ -491,6 +492,7 @@ final class StoryItemOverlaysView: UIView {
|
||||
stickerTransition.setPosition(view: customEmojiView, position: stickerFrame.center)
|
||||
stickerTransition.setBounds(view: customEmojiView, bounds: CGRect(origin: CGPoint(), size: stickerFrame.size))
|
||||
stickerTransition.setScale(view: customEmojiView, scale: stickerScale)
|
||||
customEmojiView.layer.cornerRadius = stickerFrame.size.width * 0.1
|
||||
|
||||
customEmojiView.isActive = isActive
|
||||
}
|
||||
|
@ -1 +0,0 @@
|
||||
|
Loading…
x
Reference in New Issue
Block a user