mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge commit 'b53be04a2e4759f263a15cf5a44e75a2de5c1371'
This commit is contained in:
commit
5761831147
@ -9758,6 +9758,8 @@ Sorry for the inconvenience.";
|
||||
|
||||
"MediaEditor.AddGif" = "Add GIF";
|
||||
"MediaEditor.AddLocation" = "Add Location";
|
||||
"MediaEditor.AddLocationShort" = "Location";
|
||||
"MediaEditor.AddAudio" = "Audio";
|
||||
|
||||
"Premium.Stories" = "Upgraded Stories";
|
||||
"Premium.StoriesInfo" = "Priority order, stealth mode, permanent views history and more.";
|
||||
|
@ -101,6 +101,7 @@ swift_library(
|
||||
"//submodules/GalleryUI",
|
||||
"//submodules/MediaPlayer:UniversalMediaPlayer",
|
||||
"//submodules/TelegramUniversalVideoContent",
|
||||
"//submodules/TelegramUI/Components/CameraButtonComponent",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -21,6 +21,8 @@ import StickerPackPreviewUI
|
||||
import EntityKeyboardGifContent
|
||||
import GalleryUI
|
||||
import UndoUI
|
||||
import CameraButtonComponent
|
||||
import BundleIconComponent
|
||||
|
||||
public struct StickerPickerInputData: Equatable {
|
||||
var emoji: EmojiPagerContentComponent
|
||||
@ -527,6 +529,9 @@ public class StickerPickerScreen: ViewController {
|
||||
self.storyStickersContentView?.locationAction = { [weak self] in
|
||||
self?.controller?.presentLocationPicker()
|
||||
}
|
||||
self.storyStickersContentView?.audioAction = { [weak self] in
|
||||
self?.controller?.presentAudioPicker()
|
||||
}
|
||||
|
||||
let gifItems: Signal<EntityKeyboardGifContent?, NoError>
|
||||
if controller.hasGifs {
|
||||
@ -1946,6 +1951,7 @@ public class StickerPickerScreen: ViewController {
|
||||
|
||||
public var presentGallery: () -> Void = { }
|
||||
public var presentLocationPicker: () -> Void = { }
|
||||
public var presentAudioPicker: () -> Void = { }
|
||||
|
||||
public init(context: AccountContext, inputData: Signal<StickerPickerInputData, NoError>, defaultToEmoji: Bool = false, hasGifs: Bool = false) {
|
||||
self.context = context
|
||||
@ -2005,92 +2011,315 @@ public class StickerPickerScreen: ViewController {
|
||||
}
|
||||
}
|
||||
|
||||
final class StoryStickersContentView: UIView, EmojiCustomContentView {
|
||||
override public static var layerClass: AnyClass {
|
||||
return PassthroughLayer.self
|
||||
private final class CustomContentButton: Component {
|
||||
let theme: PresentationTheme
|
||||
let title: String
|
||||
let iconName: String
|
||||
let useOpaqueTheme: Bool
|
||||
weak var tintContainerView: UIView?
|
||||
|
||||
public init(
|
||||
theme: PresentationTheme,
|
||||
title: String,
|
||||
iconName: String,
|
||||
useOpaqueTheme: Bool,
|
||||
tintContainerView: UIView
|
||||
) {
|
||||
self.theme = theme
|
||||
self.title = title
|
||||
self.iconName = iconName
|
||||
self.useOpaqueTheme = useOpaqueTheme
|
||||
self.tintContainerView = tintContainerView
|
||||
}
|
||||
|
||||
let tintContainerView = UIView()
|
||||
|
||||
private let backgroundLayer = SimpleLayer()
|
||||
private let tintBackgroundLayer = SimpleLayer()
|
||||
|
||||
private let iconView: UIImageView
|
||||
private let title: ComponentView<Empty>
|
||||
private let button: HighlightTrackingButton
|
||||
|
||||
var locationAction: () -> Void = {}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.iconView = UIImageView(image: UIImage(bundleImageName: "Chat/Attach Menu/Location"))
|
||||
self.iconView.tintColor = .white
|
||||
|
||||
self.title = ComponentView<Empty>()
|
||||
self.button = HighlightTrackingButton()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.layer.addSublayer(self.backgroundLayer)
|
||||
self.tintContainerView.layer.addSublayer(self.tintBackgroundLayer)
|
||||
|
||||
self.addSubview(self.iconView)
|
||||
self.addSubview(self.button)
|
||||
|
||||
self.button.addTarget(self, action: #selector(self.locationPressed), for: .touchUpInside)
|
||||
|
||||
(self.layer as? PassthroughLayer)?.mirrorLayer = self.tintContainerView.layer
|
||||
public static func ==(lhs: CustomContentButton, rhs: CustomContentButton) -> Bool {
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.title != rhs.title {
|
||||
return false
|
||||
}
|
||||
if lhs.iconName != rhs.iconName {
|
||||
return false
|
||||
}
|
||||
if lhs.useOpaqueTheme != rhs.useOpaqueTheme {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
final class View: UIView {
|
||||
override public static var layerClass: AnyClass {
|
||||
return PassthroughLayer.self
|
||||
}
|
||||
|
||||
private let backgroundLayer = SimpleLayer()
|
||||
let tintBackgroundLayer = SimpleLayer()
|
||||
|
||||
private var icon: ComponentView<Empty>
|
||||
private var title: ComponentView<Empty>
|
||||
|
||||
private var component: CustomContentButton?
|
||||
|
||||
@objc private func locationPressed() {
|
||||
self.locationAction()
|
||||
}
|
||||
|
||||
func update(theme: PresentationTheme, strings: PresentationStrings, useOpaqueTheme: Bool, availableSize: CGSize, transition: Transition) -> CGSize {
|
||||
if useOpaqueTheme {
|
||||
self.backgroundLayer.backgroundColor = theme.chat.inputMediaPanel.panelContentControlOpaqueSelectionColor.cgColor
|
||||
self.tintBackgroundLayer.backgroundColor = UIColor.white.cgColor
|
||||
} else {
|
||||
self.backgroundLayer.backgroundColor = theme.chat.inputMediaPanel.panelContentControlVibrantSelectionColor.cgColor
|
||||
self.tintBackgroundLayer.backgroundColor = UIColor(white: 1.0, alpha: 0.2).cgColor
|
||||
override init(frame: CGRect) {
|
||||
self.icon = ComponentView<Empty>()
|
||||
self.title = ComponentView<Empty>()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.isExclusiveTouch = true
|
||||
|
||||
self.layer.addSublayer(self.backgroundLayer)
|
||||
}
|
||||
|
||||
self.backgroundLayer.cornerRadius = 6.0
|
||||
self.tintBackgroundLayer.cornerRadius = 6.0
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
let size = CGSize(width: availableSize.width, height: 76.0)
|
||||
let titleSize = self.title.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(
|
||||
text: strings.MediaEditor_AddLocation.uppercased(),
|
||||
font: Font.with(size: 23.0, design: .camera),
|
||||
color: .white
|
||||
)),
|
||||
func update(component: CustomContentButton, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
// if component.useOpaqueTheme {
|
||||
// self.backgroundLayer.backgroundColor = component.theme.chat.inputMediaPanel.panelContentControlOpaqueSelectionColor.cgColor
|
||||
// self.tintBackgroundLayer.backgroundColor = UIColor.white.cgColor
|
||||
// } else {
|
||||
// self.backgroundLayer.backgroundColor = component.theme.chat.inputMediaPanel.panelContentControlVibrantSelectionColor.cgColor
|
||||
// self.tintBackgroundLayer.backgroundColor = UIColor(white: 1.0, alpha: 0.2).cgColor
|
||||
// }
|
||||
self.backgroundLayer.backgroundColor = UIColor(rgb: 0xffffff, alpha: 0.11).cgColor
|
||||
|
||||
let iconSize = self.icon.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(BundleIconComponent(
|
||||
name: component.iconName,
|
||||
tintColor: .white,
|
||||
maxSize: CGSize(width: 20.0, height: 20.0)
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
)
|
||||
let titleSize = self.title.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(
|
||||
text: component.title.uppercased(),
|
||||
font: Font.with(size: 23.0, design: .camera),
|
||||
color: .white
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
)
|
||||
|
||||
let padding: CGFloat = 30.0
|
||||
let spacing: CGFloat = 3.0
|
||||
let buttonSize = CGSize(width: padding + iconSize.width + spacing + titleSize.width + padding, height: 34.0)
|
||||
|
||||
if let view = self.icon.view {
|
||||
if view.superview == nil {
|
||||
self.addSubview(view)
|
||||
}
|
||||
transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: padding, y: floorToScreenPixels((buttonSize.height - iconSize.height) / 2.0)), size: iconSize))
|
||||
}
|
||||
if let view = self.title.view {
|
||||
if view.superview == nil {
|
||||
self.addSubview(view)
|
||||
}
|
||||
transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: padding + iconSize.width + spacing, y: floorToScreenPixels((buttonSize.height - titleSize.height) / 2.0)), size: titleSize))
|
||||
}
|
||||
|
||||
self.backgroundLayer.cornerRadius = 6.0
|
||||
self.tintBackgroundLayer.cornerRadius = 6.0
|
||||
|
||||
self.backgroundLayer.frame = CGRect(origin: .zero, size: buttonSize)
|
||||
|
||||
if self.tintBackgroundLayer.superlayer == nil, let tintContainerView = component.tintContainerView {
|
||||
Queue.mainQueue().justDispatch {
|
||||
let mappedFrame = self.convert(self.bounds, to: tintContainerView)
|
||||
self.tintBackgroundLayer.frame = mappedFrame
|
||||
}
|
||||
}
|
||||
|
||||
return buttonSize
|
||||
}
|
||||
}
|
||||
|
||||
public func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
||||
|
||||
final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
|
||||
typealias EnvironmentType = ChildEnvironment
|
||||
|
||||
private let items: [AnyComponentWithIdentity<ChildEnvironment>]
|
||||
private let padding: CGFloat
|
||||
private let minSpacing: CGFloat
|
||||
|
||||
init(_ items: [AnyComponentWithIdentity<ChildEnvironment>], padding: CGFloat, minSpacing: CGFloat) {
|
||||
self.items = items
|
||||
self.padding = padding
|
||||
self.minSpacing = minSpacing
|
||||
}
|
||||
|
||||
static func ==(lhs: ItemStack<ChildEnvironment>, rhs: ItemStack<ChildEnvironment>) -> Bool {
|
||||
if lhs.items != rhs.items {
|
||||
return false
|
||||
}
|
||||
if lhs.padding != rhs.padding {
|
||||
return false
|
||||
}
|
||||
if lhs.minSpacing != rhs.minSpacing {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
static var body: Body {
|
||||
let children = ChildMap(environment: ChildEnvironment.self, keyedBy: AnyHashable.self)
|
||||
|
||||
return { context in
|
||||
let updatedChildren = context.component.items.map { item in
|
||||
return children[item.id].update(
|
||||
component: item.component, environment: {
|
||||
context.environment[ChildEnvironment.self]
|
||||
},
|
||||
availableSize: context.availableSize,
|
||||
transition: context.transition
|
||||
)
|
||||
}
|
||||
|
||||
var groups: [[Int]] = []
|
||||
var currentGroup: [Int] = []
|
||||
for i in 0 ..< updatedChildren.count {
|
||||
var itemsWidth: CGFloat = 0.0
|
||||
for j in currentGroup {
|
||||
itemsWidth += updatedChildren[j].size.width
|
||||
}
|
||||
itemsWidth += updatedChildren[i].size.width
|
||||
let rowItemsCount = currentGroup.count + 1
|
||||
|
||||
let remainingWidth = context.availableSize.width - itemsWidth - context.component.padding * 2.0
|
||||
let spacing = remainingWidth / CGFloat(rowItemsCount - 1)
|
||||
if spacing < context.component.minSpacing {
|
||||
groups.append(currentGroup)
|
||||
} else {
|
||||
currentGroup.append(i)
|
||||
}
|
||||
}
|
||||
if !currentGroup.isEmpty {
|
||||
groups.append(currentGroup)
|
||||
}
|
||||
|
||||
var size = CGSize(width: context.availableSize.width, height: 0.0)
|
||||
for group in groups {
|
||||
var groupHeight: CGFloat = 0.0
|
||||
var spacing = context.component.minSpacing
|
||||
var itemsWidth = 0.0
|
||||
for i in group {
|
||||
let childSize = updatedChildren[i].size
|
||||
groupHeight = max(groupHeight, childSize.height)
|
||||
itemsWidth += childSize.width
|
||||
}
|
||||
let remainingWidth = context.availableSize.width - itemsWidth - context.component.padding * 2.0
|
||||
spacing = remainingWidth / CGFloat(group.count - 1)
|
||||
|
||||
var nextX: CGFloat = floorToScreenPixels((context.availableSize.width - itemsWidth) / 2.0) //context.component.padding
|
||||
for i in group {
|
||||
let child = updatedChildren[i]
|
||||
let frame = CGRect(origin: CGPoint(x: nextX, y: size.height + floorToScreenPixels((groupHeight - child.size.height) / 2.0)), size: child.size)
|
||||
|
||||
context.add(child
|
||||
.position(child.size.centered(in: frame).center)
|
||||
)
|
||||
nextX += child.size.width + spacing
|
||||
}
|
||||
size.height += groupHeight
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
final class StoryStickersContentView: UIView, EmojiCustomContentView {
|
||||
let tintContainerView = UIView()
|
||||
|
||||
private let container = ComponentView<Empty>()
|
||||
|
||||
var locationAction: () -> Void = {}
|
||||
var audioAction: () -> Void = {}
|
||||
|
||||
func update(theme: PresentationTheme, strings: PresentationStrings, useOpaqueTheme: Bool, availableSize: CGSize, transition: Transition) -> CGSize {
|
||||
let padding: CGFloat = 22.0
|
||||
let size = self.container.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(
|
||||
ItemStack(
|
||||
[
|
||||
AnyComponentWithIdentity(
|
||||
id: "location",
|
||||
component: AnyComponent(
|
||||
CameraButton(
|
||||
content: AnyComponentWithIdentity(
|
||||
id: "content",
|
||||
component: AnyComponent(
|
||||
CustomContentButton(
|
||||
theme: theme,
|
||||
title: "LOCATION",
|
||||
iconName: "Chat/Attach Menu/Location",
|
||||
useOpaqueTheme: useOpaqueTheme,
|
||||
tintContainerView: self.tintContainerView
|
||||
)
|
||||
)
|
||||
),
|
||||
action: { [weak self] in
|
||||
if let self {
|
||||
self.locationAction()
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
// AnyComponentWithIdentity(
|
||||
// id: "audio",
|
||||
// component: AnyComponent(
|
||||
// CameraButton(
|
||||
// content: AnyComponentWithIdentity(
|
||||
// id: "audio",
|
||||
// component: AnyComponent(
|
||||
// CustomContentButton(
|
||||
// theme: theme,
|
||||
// title: "AUDIO",
|
||||
// iconName: "Media Editor/Audio",
|
||||
// useOpaqueTheme: useOpaqueTheme,
|
||||
// tintContainerView: self.tintContainerView
|
||||
// )
|
||||
// )
|
||||
// ),
|
||||
// action: { [weak self] in
|
||||
// if let self {
|
||||
// self.audioAction()
|
||||
// }
|
||||
// })
|
||||
// )
|
||||
// )
|
||||
],
|
||||
padding: 18.0,
|
||||
minSpacing: 8.0
|
||||
)
|
||||
),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
)
|
||||
let iconSize = CGSize(width: 20.0, height: 20.0)
|
||||
let padding: CGFloat = 6.0
|
||||
let spacing: CGFloat = 3.0
|
||||
let buttonSize = CGSize(width: padding + iconSize.width + spacing + titleSize.width + padding, height: 34.0)
|
||||
let buttonFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - buttonSize.width) / 2.0), y: floorToScreenPixels((size.height - buttonSize.height) / 2.0)), size: buttonSize)
|
||||
|
||||
transition.setFrame(layer: self.backgroundLayer, frame: buttonFrame)
|
||||
transition.setFrame(layer: self.tintBackgroundLayer, frame: buttonFrame)
|
||||
transition.setFrame(view: self.button, frame: buttonFrame)
|
||||
|
||||
transition.setFrame(view: self.iconView, frame: CGRect(origin: CGPoint(x: padding, y: floorToScreenPixels((buttonSize.height - iconSize.height) / 2.0)).offsetBy(buttonFrame.origin), size: iconSize))
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
self.insertSubview(titleView, aboveSubview: self.iconView)
|
||||
if let view = self.container.view {
|
||||
if view.superview == nil {
|
||||
self.addSubview(view)
|
||||
}
|
||||
transition.setFrame(view: titleView, frame: CGRect(origin: CGPoint(x: padding + iconSize.width + spacing, y: floorToScreenPixels((buttonSize.height - titleSize.height) / 2.0)).offsetBy(buttonFrame.origin), size: titleSize))
|
||||
view.frame = CGRect(origin: CGPoint(x: 0.0, y: padding), size: size)
|
||||
}
|
||||
|
||||
return size
|
||||
return CGSize(width: size.width, height: size.height + padding * 2.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,8 +5,17 @@ import TelegramPresentationData
|
||||
import LegacyUI
|
||||
|
||||
private class DocumentPickerViewController: UIDocumentPickerViewController {
|
||||
var forceDarkTheme = false
|
||||
var didDisappear: (() -> Void)?
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
|
||||
if #available(iOS 13.0, *), self.forceDarkTheme {
|
||||
self.overrideUserInterfaceStyle = .dark
|
||||
}
|
||||
}
|
||||
|
||||
override func viewDidDisappear(_ animated: Bool) {
|
||||
super.viewDidDisappear(animated)
|
||||
|
||||
@ -54,7 +63,7 @@ public enum LegacyICloudFilePickerMode {
|
||||
}
|
||||
}
|
||||
|
||||
public func legacyICloudFilePicker(theme: PresentationTheme, mode: LegacyICloudFilePickerMode = .default, documentTypes: [String] = ["public.item"], completion: @escaping ([URL]) -> Void) -> ViewController {
|
||||
public func legacyICloudFilePicker(theme: PresentationTheme, mode: LegacyICloudFilePickerMode = .default, documentTypes: [String] = ["public.item"], forceDarkTheme: Bool = false, completion: @escaping ([URL]) -> Void) -> ViewController {
|
||||
var dismissImpl: (() -> Void)?
|
||||
let legacyController = LegacyICloudFileController(presentation: .modal(animateIn: true), theme: theme, completion: { urls in
|
||||
dismissImpl?()
|
||||
@ -63,6 +72,7 @@ public func legacyICloudFilePicker(theme: PresentationTheme, mode: LegacyICloudF
|
||||
legacyController.statusBar.statusBarStyle = .Black
|
||||
|
||||
let controller = DocumentPickerViewController(documentTypes: documentTypes, in: mode.documentPickerMode)
|
||||
controller.forceDarkTheme = forceDarkTheme
|
||||
controller.didDisappear = {
|
||||
dismissImpl?()
|
||||
}
|
||||
|
@ -545,7 +545,11 @@ public final class AudioWaveformComponent: Component {
|
||||
gravityMultiplierY = 0.5
|
||||
}
|
||||
|
||||
context.setFillColor(component.backgroundColor.mixedWith(component.foregroundColor, alpha: colorMixFraction).cgColor)
|
||||
if component.backgroundColor.alpha > 0.0 {
|
||||
context.setFillColor(component.backgroundColor.mixedWith(component.foregroundColor, alpha: colorMixFraction).cgColor)
|
||||
} else {
|
||||
context.setFillColor(component.foregroundColor.cgColor)
|
||||
}
|
||||
context.setBlendMode(.copy)
|
||||
|
||||
let adjustedSampleHeight = sampleHeight - diff
|
||||
|
@ -44,8 +44,11 @@ public final class MediaEditor {
|
||||
|
||||
private let context: AccountContext
|
||||
private let subject: Subject
|
||||
|
||||
private let clock = CMClockGetHostTimeClock()
|
||||
private var player: AVPlayer?
|
||||
private var additionalPlayer: AVPlayer?
|
||||
private var audioPlayer: AVPlayer?
|
||||
private var timeObserver: Any?
|
||||
private var didPlayToEndTimeObserver: NSObjectProtocol?
|
||||
|
||||
@ -281,7 +284,10 @@ public final class MediaEditor {
|
||||
additionalVideoPositionChanges: [],
|
||||
drawing: nil,
|
||||
entities: [],
|
||||
toolValues: [:]
|
||||
toolValues: [:],
|
||||
audioTrack: nil,
|
||||
audioTrackTrimRange: nil,
|
||||
audioTrackSamples: nil
|
||||
)
|
||||
}
|
||||
self.valuesPromise.set(.single(self.values))
|
||||
@ -336,6 +342,7 @@ public final class MediaEditor {
|
||||
}
|
||||
|
||||
let context = self.context
|
||||
let clock = self.clock
|
||||
let textureSource: Signal<(TextureSource, UIImage?, AVPlayer?, AVPlayer?, UIColor, UIColor), NoError>
|
||||
switch subject {
|
||||
case let .image(image, _):
|
||||
@ -349,6 +356,11 @@ public final class MediaEditor {
|
||||
|
||||
let playerItem = AVPlayerItem(asset: asset)
|
||||
let player = AVPlayer(playerItem: playerItem)
|
||||
if #available(iOS 15.0, *) {
|
||||
player.sourceClock = clock
|
||||
} else {
|
||||
player.masterClock = clock
|
||||
}
|
||||
player.automaticallyWaitsToMinimizeStalling = false
|
||||
|
||||
if let gradientColors = draft.values.gradientColors {
|
||||
@ -391,12 +403,22 @@ public final class MediaEditor {
|
||||
textureSource = Signal { subscriber in
|
||||
let asset = AVURLAsset(url: URL(fileURLWithPath: path))
|
||||
let player = AVPlayer(playerItem: AVPlayerItem(asset: asset))
|
||||
if #available(iOS 15.0, *) {
|
||||
player.sourceClock = clock
|
||||
} else {
|
||||
player.masterClock = clock
|
||||
}
|
||||
player.automaticallyWaitsToMinimizeStalling = false
|
||||
|
||||
var additionalPlayer: AVPlayer?
|
||||
if let additionalPath {
|
||||
let additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
|
||||
additionalPlayer = AVPlayer(playerItem: AVPlayerItem(asset: additionalAsset))
|
||||
if #available(iOS 15.0, *) {
|
||||
additionalPlayer?.sourceClock = clock
|
||||
} else {
|
||||
additionalPlayer?.masterClock = clock
|
||||
}
|
||||
additionalPlayer?.automaticallyWaitsToMinimizeStalling = false
|
||||
}
|
||||
|
||||
@ -521,16 +543,24 @@ public final class MediaEditor {
|
||||
if let audioTracks = player.currentItem?.asset.tracks(withMediaType: .audio) {
|
||||
hasAudio = !audioTracks.isEmpty
|
||||
}
|
||||
self.playerPlaybackState = (duration, time.seconds, player.rate > 0.0, hasAudio)
|
||||
if time.seconds > 20000 {
|
||||
|
||||
} else {
|
||||
self.playerPlaybackState = (duration, time.seconds, player.rate > 0.0, hasAudio)
|
||||
}
|
||||
}
|
||||
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: nil, using: { [weak self] notification in
|
||||
if let self {
|
||||
let start = self.values.videoTrimRange?.lowerBound ?? 0.0
|
||||
self.player?.seek(to: CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)))
|
||||
self.additionalPlayer?.seek(to: CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)))
|
||||
self.audioPlayer?.seek(to: CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)))
|
||||
self.onPlaybackAction(.seek(start))
|
||||
|
||||
self.player?.play()
|
||||
self.additionalPlayer?.play()
|
||||
self.audioPlayer?.play()
|
||||
|
||||
Queue.mainQueue().justDispatch {
|
||||
self.onPlaybackAction(.play)
|
||||
}
|
||||
@ -643,9 +673,10 @@ public final class MediaEditor {
|
||||
if !play {
|
||||
player.pause()
|
||||
self.additionalPlayer?.pause()
|
||||
self.audioPlayer?.pause()
|
||||
self.onPlaybackAction(.pause)
|
||||
}
|
||||
let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(60.0))
|
||||
let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(1000.0))
|
||||
if self.targetTimePosition?.0 != targetPosition {
|
||||
self.targetTimePosition = (targetPosition, play)
|
||||
if !self.updatingTimePosition {
|
||||
@ -655,6 +686,7 @@ public final class MediaEditor {
|
||||
if play {
|
||||
player.play()
|
||||
self.additionalPlayer?.play()
|
||||
self.audioPlayer?.play()
|
||||
self.onPlaybackAction(.play)
|
||||
}
|
||||
}
|
||||
@ -667,34 +699,58 @@ public final class MediaEditor {
|
||||
player.pause()
|
||||
self.additionalPlayer?.pause()
|
||||
|
||||
let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(60.0))
|
||||
let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(1000.0))
|
||||
player.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { _ in
|
||||
Queue.mainQueue().async {
|
||||
completion()
|
||||
}
|
||||
})
|
||||
self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
}
|
||||
|
||||
public var isPlaying: Bool {
|
||||
return (self.player?.rate ?? 0.0) > 0.0
|
||||
}
|
||||
|
||||
public func togglePlayback() {
|
||||
if self.isPlaying {
|
||||
self.stop()
|
||||
} else {
|
||||
self.play()
|
||||
}
|
||||
}
|
||||
|
||||
public func play() {
|
||||
self.player?.play()
|
||||
self.additionalPlayer?.play()
|
||||
self.onPlaybackAction(.play)
|
||||
self.setRate(1.0)
|
||||
}
|
||||
|
||||
public func stop() {
|
||||
self.player?.pause()
|
||||
self.additionalPlayer?.pause()
|
||||
self.onPlaybackAction(.pause)
|
||||
self.setRate(0.0)
|
||||
}
|
||||
|
||||
private func setRate(_ rate: Float) {
|
||||
let hostTime: UInt64 = 0
|
||||
let time: TimeInterval = 0
|
||||
let cmHostTime = CMClockMakeHostTimeFromSystemUnits(hostTime)
|
||||
let cmVTime = CMTimeMakeWithSeconds(time, preferredTimescale: 1000000)
|
||||
let futureTime = CMTimeAdd(cmHostTime, cmVTime)
|
||||
|
||||
self.player?.setRate(rate, time: .invalid, atHostTime: futureTime)
|
||||
self.additionalPlayer?.setRate(rate, time: .invalid, atHostTime: futureTime)
|
||||
self.audioPlayer?.setRate(rate, time: .invalid, atHostTime: futureTime)
|
||||
|
||||
if rate > 0.0 {
|
||||
self.onPlaybackAction(.play)
|
||||
} else {
|
||||
self.onPlaybackAction(.pause)
|
||||
}
|
||||
}
|
||||
|
||||
public func invalidate() {
|
||||
self.player?.pause()
|
||||
self.additionalPlayer?.pause()
|
||||
self.audioPlayer?.pause()
|
||||
self.onPlaybackAction(.pause)
|
||||
self.renderer.textureSource?.invalidate()
|
||||
}
|
||||
@ -715,6 +771,7 @@ public final class MediaEditor {
|
||||
}
|
||||
})
|
||||
self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
self.onPlaybackAction(.seek(targetPosition.seconds))
|
||||
}
|
||||
|
||||
@ -753,6 +810,34 @@ public final class MediaEditor {
|
||||
}
|
||||
}
|
||||
|
||||
public func setAudioTrack(_ audioTrack: MediaAudioTrack?) {
|
||||
self.updateValues(mode: .skipRendering) { values in
|
||||
return values.withUpdatedAudioTrack(audioTrack)
|
||||
}
|
||||
|
||||
if let audioTrack {
|
||||
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioTrack.path))
|
||||
let playerItem = AVPlayerItem(asset: audioAsset)
|
||||
let player = AVPlayer(playerItem: playerItem)
|
||||
player.automaticallyWaitsToMinimizeStalling = false
|
||||
self.audioPlayer = player
|
||||
self.maybeGenerateAudioSamples(asset: audioAsset)
|
||||
} else if let audioPlayer = self.audioPlayer {
|
||||
audioPlayer.pause()
|
||||
self.audioPlayer = nil
|
||||
}
|
||||
}
|
||||
|
||||
public func setAudioTrackTrimRange(_ trimRange: Range<Double>, apply: Bool) {
|
||||
self.updateValues(mode: .skipRendering) { values in
|
||||
return values.withUpdatedAudioTrackTrimRange(trimRange)
|
||||
}
|
||||
|
||||
if apply {
|
||||
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
||||
}
|
||||
}
|
||||
|
||||
private var previousUpdateTime: Double?
|
||||
private var scheduledUpdate = false
|
||||
private func updateRenderChain() {
|
||||
@ -817,4 +902,57 @@ public final class MediaEditor {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func maybeGenerateAudioSamples(asset: AVAsset) {
|
||||
Queue.concurrentDefaultQueue().async {
|
||||
guard let audioTrack = asset.tracks(withMediaType: .audio).first else {
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
let assetReader = try AVAssetReader(asset: asset)
|
||||
|
||||
let settings: [String: Any] = [
|
||||
AVFormatIDKey: kAudioFormatLinearPCM,
|
||||
AVLinearPCMBitDepthKey: 32,
|
||||
AVLinearPCMIsFloatKey: true,
|
||||
AVLinearPCMIsBigEndianKey: false,
|
||||
AVLinearPCMIsNonInterleaved: false
|
||||
]
|
||||
|
||||
let assetReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: settings)
|
||||
|
||||
assetReader.add(assetReaderOutput)
|
||||
|
||||
assetReader.startReading()
|
||||
|
||||
var samplesData = Data()
|
||||
var peak: Int32 = 0
|
||||
|
||||
while let sampleBuffer = assetReaderOutput.copyNextSampleBuffer() {
|
||||
if let dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) {
|
||||
let length = CMBlockBufferGetDataLength(dataBuffer)
|
||||
let bytes = UnsafeMutablePointer<Int32>.allocate(capacity: length)
|
||||
CMBlockBufferCopyDataBytes(dataBuffer, atOffset: 0, dataLength: length, destination: bytes)
|
||||
|
||||
let samples = Array(UnsafeBufferPointer(start: bytes, count: length / MemoryLayout<Int32>.size))
|
||||
if var maxSample = samples.max() {
|
||||
if maxSample > peak {
|
||||
peak = maxSample
|
||||
}
|
||||
samplesData.append(Data(bytesNoCopy: &maxSample, count: 4, deallocator: .none))
|
||||
}
|
||||
|
||||
bytes.deallocate()
|
||||
}
|
||||
}
|
||||
Queue.mainQueue().async {
|
||||
self.updateValues(mode: .skipRendering) { values in
|
||||
return values.withUpdatedAudioTrackSamples(MediaAudioTrackSamples(samples: samplesData, peak: peak))
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -46,12 +46,55 @@ public struct VideoPositionChange: Codable, Equatable {
|
||||
public let additional: Bool
|
||||
public let timestamp: Double
|
||||
|
||||
public init(additional: Bool, timestamp: Double) {
|
||||
public init(
|
||||
additional: Bool,
|
||||
timestamp: Double
|
||||
) {
|
||||
self.additional = additional
|
||||
self.timestamp = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
public struct MediaAudioTrack: Codable, Equatable {
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case path
|
||||
case artist
|
||||
case title
|
||||
}
|
||||
|
||||
public let path: String
|
||||
public let artist: String?
|
||||
public let title: String?
|
||||
|
||||
public init(
|
||||
path: String,
|
||||
artist: String?,
|
||||
title: String?
|
||||
) {
|
||||
self.path = path
|
||||
self.artist = artist
|
||||
self.title = title
|
||||
}
|
||||
}
|
||||
|
||||
public struct MediaAudioTrackSamples: Equatable {
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case samples
|
||||
case peak
|
||||
}
|
||||
|
||||
public let samples: Data
|
||||
public let peak: Int32
|
||||
|
||||
public init(
|
||||
samples: Data,
|
||||
peak: Int32
|
||||
) {
|
||||
self.samples = samples
|
||||
self.peak = peak
|
||||
}
|
||||
}
|
||||
|
||||
public final class MediaEditorValues: Codable, Equatable {
|
||||
public static func == (lhs: MediaEditorValues, rhs: MediaEditorValues) -> Bool {
|
||||
if lhs.originalDimensions != rhs.originalDimensions {
|
||||
@ -108,6 +151,15 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
if lhs.entities != rhs.entities {
|
||||
return false
|
||||
}
|
||||
if lhs.audioTrack != rhs.audioTrack {
|
||||
return false
|
||||
}
|
||||
if lhs.audioTrackTrimRange != rhs.audioTrackTrimRange {
|
||||
return false
|
||||
}
|
||||
if lhs.audioTrackSamples != rhs.audioTrackSamples {
|
||||
return false
|
||||
}
|
||||
|
||||
for key in EditorToolKey.allCases {
|
||||
let lhsToolValue = lhs.toolValues[key]
|
||||
@ -165,6 +217,9 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
case drawing
|
||||
case entities
|
||||
case toolValues
|
||||
|
||||
case audioTrack
|
||||
case audioTrackTrimRange
|
||||
}
|
||||
|
||||
public let originalDimensions: PixelDimensions
|
||||
@ -191,6 +246,10 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
public let entities: [CodableDrawingEntity]
|
||||
public let toolValues: [EditorToolKey: Any]
|
||||
|
||||
public let audioTrack: MediaAudioTrack?
|
||||
public let audioTrackTrimRange: Range<Double>?
|
||||
public let audioTrackSamples: MediaAudioTrackSamples?
|
||||
|
||||
init(
|
||||
originalDimensions: PixelDimensions,
|
||||
cropOffset: CGPoint,
|
||||
@ -210,7 +269,10 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
additionalVideoPositionChanges: [VideoPositionChange],
|
||||
drawing: UIImage?,
|
||||
entities: [CodableDrawingEntity],
|
||||
toolValues: [EditorToolKey: Any]
|
||||
toolValues: [EditorToolKey: Any],
|
||||
audioTrack: MediaAudioTrack?,
|
||||
audioTrackTrimRange: Range<Double>?,
|
||||
audioTrackSamples: MediaAudioTrackSamples?
|
||||
) {
|
||||
self.originalDimensions = originalDimensions
|
||||
self.cropOffset = cropOffset
|
||||
@ -231,6 +293,9 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
self.drawing = drawing
|
||||
self.entities = entities
|
||||
self.toolValues = toolValues
|
||||
self.audioTrack = audioTrack
|
||||
self.audioTrackTrimRange = audioTrackTrimRange
|
||||
self.audioTrackSamples = audioTrackSamples
|
||||
}
|
||||
|
||||
public init(from decoder: Decoder) throws {
|
||||
@ -278,6 +343,11 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
toolValues[key] = value
|
||||
}
|
||||
self.toolValues = toolValues
|
||||
|
||||
self.audioTrack = try container.decodeIfPresent(MediaAudioTrack.self, forKey: .audioTrack)
|
||||
self.audioTrackTrimRange = try container.decodeIfPresent(Range<Double>.self, forKey: .audioTrackTrimRange)
|
||||
|
||||
self.audioTrackSamples = nil
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
@ -320,51 +390,66 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
}
|
||||
}
|
||||
try container.encode(values, forKey: .toolValues)
|
||||
|
||||
try container.encodeIfPresent(self.audioTrack, forKey: .audioTrack)
|
||||
try container.encodeIfPresent(self.audioTrackTrimRange, forKey: .audioTrackTrimRange)
|
||||
}
|
||||
|
||||
public func makeCopy() -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: offset, cropSize: self.cropSize, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: offset, cropSize: self.cropSize, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
|
||||
func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedAdditionalVideo(path: String, positionChanges: [VideoPositionChange]) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedVideoTrimRange(_ videoTrimRange: Range<Double>) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: drawing, entities: entities, toolValues: self.toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: toolValues)
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range<Double>) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
|
||||
}
|
||||
|
||||
func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues {
|
||||
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: audioTrackSamples)
|
||||
}
|
||||
|
||||
public var resultDimensions: PixelDimensions {
|
||||
|
@ -361,7 +361,35 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
private func setupWithAsset(_ asset: AVAsset, additionalAsset: AVAsset?) {
|
||||
self.reader = try? AVAssetReader(asset: asset)
|
||||
var inputAsset = asset
|
||||
if let audioData = self.configuration.values.audioTrack {
|
||||
let mixComposition = AVMutableComposition()
|
||||
|
||||
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioData.path))
|
||||
|
||||
guard
|
||||
let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),
|
||||
let musicTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid),
|
||||
let videoAssetTrack = asset.tracks(withMediaType: .video).first,
|
||||
let musicAssetTrack = audioAsset.tracks(withMediaType: .audio).first,
|
||||
let duration = self.durationValue
|
||||
else {
|
||||
print("error")
|
||||
return
|
||||
}
|
||||
|
||||
try? videoTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: duration), of: videoAssetTrack, at: .zero)
|
||||
|
||||
if let audioAssetTrack = asset.tracks(withMediaType: .audio).first, let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
|
||||
try? audioTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: duration), of: audioAssetTrack, at: .zero)
|
||||
}
|
||||
|
||||
try? musicTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: duration), of: musicAssetTrack, at: .zero)
|
||||
|
||||
inputAsset = mixComposition
|
||||
}
|
||||
|
||||
self.reader = try? AVAssetReader(asset: inputAsset)
|
||||
|
||||
var mirror = false
|
||||
if additionalAsset == nil, self.configuration.values.videoIsMirrored {
|
||||
@ -392,7 +420,7 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
|
||||
|
||||
let videoTracks = asset.tracks(withMediaType: .video)
|
||||
let videoTracks = inputAsset.tracks(withMediaType: .video)
|
||||
let additionalVideoTracks = additionalAsset?.tracks(withMediaType: .video)
|
||||
if videoTracks.count > 0 {
|
||||
var sourceFrameRate: Float = 0.0
|
||||
@ -407,7 +435,7 @@ public final class MediaEditorVideoExport {
|
||||
kCVPixelBufferMetalCompatibilityKey as String: true,
|
||||
AVVideoColorPropertiesKey: colorProperties
|
||||
]
|
||||
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing && additionalAsset == nil {
|
||||
if !"".isEmpty, let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing && additionalAsset == nil {
|
||||
} else {
|
||||
self.setupComposer()
|
||||
}
|
||||
@ -446,7 +474,7 @@ public final class MediaEditorVideoExport {
|
||||
self.videoOutput = nil
|
||||
}
|
||||
|
||||
let audioTracks = asset.tracks(withMediaType: .audio)
|
||||
let audioTracks = inputAsset.tracks(withMediaType: .audio)
|
||||
if audioTracks.count > 0, !self.configuration.values.videoIsMuted {
|
||||
let audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracks, audioSettings: nil)
|
||||
audioOutput.alwaysCopiesSampleData = false
|
||||
@ -507,7 +535,7 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
if cancelled {
|
||||
try? FileManager().removeItem(at: outputUrl)
|
||||
try? FileManager.default.removeItem(at: outputUrl)
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.cancelled)
|
||||
return
|
||||
@ -517,14 +545,14 @@ public final class MediaEditorVideoExport {
|
||||
if let error = writer.error {
|
||||
Logger.shared.log("VideoExport", "Failed with writer error \(error.localizedDescription)")
|
||||
}
|
||||
try? FileManager().removeItem(at: outputUrl)
|
||||
try? FileManager.default.removeItem(at: outputUrl)
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.writing(nil))
|
||||
} else if let reader = self.reader, reader.status == .failed {
|
||||
if let error = reader.error {
|
||||
Logger.shared.log("VideoExport", "Failed with reader error \(error.localizedDescription)")
|
||||
}
|
||||
try? FileManager().removeItem(at: outputUrl)
|
||||
try? FileManager.default.removeItem(at: outputUrl)
|
||||
writer.cancelWriting()
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.reading(reader.error))
|
||||
@ -535,7 +563,7 @@ public final class MediaEditorVideoExport {
|
||||
if let error = writer.error {
|
||||
Logger.shared.log("VideoExport", "Failed after finishWriting with writer error \(error.localizedDescription)")
|
||||
}
|
||||
try? FileManager().removeItem(at: outputUrl)
|
||||
try? FileManager.default.removeItem(at: outputUrl)
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.writing(nil))
|
||||
} else {
|
||||
|
@ -27,6 +27,7 @@ swift_library(
|
||||
"//submodules/PresentationDataUtils:PresentationDataUtils",
|
||||
"//submodules/ContextUI",
|
||||
"//submodules/LegacyComponents:LegacyComponents",
|
||||
"//submodules/LegacyMediaPickerUI",
|
||||
"//submodules/TelegramUI/Components/MediaEditor",
|
||||
"//submodules/DrawingUI:DrawingUI",
|
||||
"//submodules/Components/LottieAnimationComponent:LottieAnimationComponent",
|
||||
@ -43,6 +44,7 @@ swift_library(
|
||||
"//submodules/ChatPresentationInterfaceState",
|
||||
"//submodules/DeviceAccess",
|
||||
"//submodules/LocationUI",
|
||||
"//submodules/TelegramUI/Components/AudioWaveformComponent",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -32,6 +32,7 @@ import ChatPresentationInterfaceState
|
||||
import TextFormat
|
||||
import DeviceAccess
|
||||
import LocationUI
|
||||
import LegacyMediaPickerUI
|
||||
|
||||
enum DrawingScreenType {
|
||||
case drawing
|
||||
@ -39,6 +40,7 @@ enum DrawingScreenType {
|
||||
case sticker
|
||||
}
|
||||
|
||||
private let playbackButtonTag = GenericComponentViewTag()
|
||||
private let muteButtonTag = GenericComponentViewTag()
|
||||
private let saveButtonTag = GenericComponentViewTag()
|
||||
|
||||
@ -249,7 +251,7 @@ final class MediaEditorScreenComponent: Component {
|
||||
|
||||
private let scrubber = ComponentView<Empty>()
|
||||
|
||||
private let flipStickerButton = ComponentView<Empty>()
|
||||
private let playbackButton = ComponentView<Empty>()
|
||||
private let muteButton = ComponentView<Empty>()
|
||||
private let saveButton = ComponentView<Empty>()
|
||||
|
||||
@ -482,6 +484,11 @@ final class MediaEditorScreenComponent: Component {
|
||||
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
|
||||
}
|
||||
|
||||
if let view = self.playbackButton.view {
|
||||
view.layer.animateAlpha(from: 0.0, to: view.alpha, duration: 0.2)
|
||||
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
|
||||
}
|
||||
|
||||
if let view = self.inputPanel.view {
|
||||
view.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
@ -549,6 +556,11 @@ final class MediaEditorScreenComponent: Component {
|
||||
transition.setScale(view: view, scale: 0.1)
|
||||
}
|
||||
|
||||
if let view = self.playbackButton.view {
|
||||
transition.setAlpha(view: view, alpha: 0.0)
|
||||
transition.setScale(view: view, scale: 0.1)
|
||||
}
|
||||
|
||||
if let view = self.scrubber.view {
|
||||
view.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: 44.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true)
|
||||
view.layer.animateAlpha(from: view.alpha, to: 0.0, duration: 0.2, removeOnCompletion: false)
|
||||
@ -901,6 +913,16 @@ final class MediaEditorScreenComponent: Component {
|
||||
if let controller = environment.controller() as? MediaEditorScreen {
|
||||
mediaEditor = controller.node.mediaEditor
|
||||
}
|
||||
var audioData: VideoScrubberComponent.AudioData?
|
||||
if let audioTrack = mediaEditor?.values.audioTrack {
|
||||
let audioSamples = mediaEditor?.values.audioTrackSamples
|
||||
audioData = VideoScrubberComponent.AudioData(
|
||||
artist: audioTrack.artist,
|
||||
title: audioTrack.title,
|
||||
samples: audioSamples?.samples,
|
||||
peak: audioSamples?.peak ?? 0
|
||||
)
|
||||
}
|
||||
|
||||
var scrubberBottomInset: CGFloat = 0.0
|
||||
if let playerState = state.playerState {
|
||||
@ -918,7 +940,8 @@ final class MediaEditorScreenComponent: Component {
|
||||
isPlaying: playerState.isPlaying,
|
||||
frames: playerState.frames,
|
||||
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
|
||||
trimUpdated: { [weak mediaEditor] start, end, updatedEnd, done in
|
||||
audioData: audioData,
|
||||
videoTrimUpdated: { [weak mediaEditor] start, end, updatedEnd, done in
|
||||
if let mediaEditor {
|
||||
mediaEditor.setVideoTrimRange(start..<end, apply: done)
|
||||
if done {
|
||||
@ -932,6 +955,14 @@ final class MediaEditorScreenComponent: Component {
|
||||
if let mediaEditor {
|
||||
mediaEditor.seek(position, andPlay: done)
|
||||
}
|
||||
},
|
||||
audioTrimUpdated: { [weak mediaEditor] start, end, _, done in
|
||||
if let mediaEditor {
|
||||
mediaEditor.setAudioTrackTrimRange(start..<end, apply: done)
|
||||
if done {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
@ -1368,84 +1399,80 @@ final class MediaEditorScreenComponent: Component {
|
||||
transition.setAlpha(view: saveButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? saveButtonAlpha : 0.0)
|
||||
}
|
||||
|
||||
if let playerState = state.playerState {
|
||||
if playerState.hasAudio {
|
||||
let isVideoMuted = mediaEditor?.values.videoIsMuted ?? false
|
||||
|
||||
let muteContentComponent: AnyComponentWithIdentity<Empty>
|
||||
if component.hasAppeared {
|
||||
muteContentComponent = AnyComponentWithIdentity(
|
||||
id: "animatedIcon",
|
||||
component: AnyComponent(
|
||||
LottieAnimationComponent(
|
||||
animation: LottieAnimationComponent.AnimationItem(
|
||||
name: "anim_storymute",
|
||||
mode: state.muteDidChange ? .animating(loop: false) : .still(position: .begin),
|
||||
range: isVideoMuted ? (0.0, 0.5) : (0.5, 1.0)
|
||||
),
|
||||
colors: ["__allcolors__": .white],
|
||||
size: CGSize(width: 30.0, height: 30.0)
|
||||
).tagged(muteButtonTag)
|
||||
var topButtonOffsetX: CGFloat = 0.0
|
||||
if let playerState = state.playerState, playerState.hasAudio {
|
||||
let isVideoMuted = mediaEditor?.values.videoIsMuted ?? false
|
||||
|
||||
let muteContentComponent: AnyComponentWithIdentity<Empty>
|
||||
if component.hasAppeared {
|
||||
muteContentComponent = AnyComponentWithIdentity(
|
||||
id: "animatedIcon",
|
||||
component: AnyComponent(
|
||||
LottieAnimationComponent(
|
||||
animation: LottieAnimationComponent.AnimationItem(
|
||||
name: "anim_storymute",
|
||||
mode: state.muteDidChange ? .animating(loop: false) : .still(position: .begin),
|
||||
range: isVideoMuted ? (0.0, 0.5) : (0.5, 1.0)
|
||||
),
|
||||
colors: ["__allcolors__": .white],
|
||||
size: CGSize(width: 30.0, height: 30.0)
|
||||
).tagged(muteButtonTag)
|
||||
)
|
||||
)
|
||||
} else {
|
||||
muteContentComponent = AnyComponentWithIdentity(
|
||||
id: "staticIcon",
|
||||
component: AnyComponent(
|
||||
BundleIconComponent(
|
||||
name: "Media Editor/MuteIcon",
|
||||
tintColor: nil
|
||||
)
|
||||
)
|
||||
} else {
|
||||
muteContentComponent = AnyComponentWithIdentity(
|
||||
id: "staticIcon",
|
||||
component: AnyComponent(
|
||||
BundleIconComponent(
|
||||
name: "Media Editor/MuteIcon",
|
||||
tintColor: nil
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
let muteButtonSize = self.muteButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(CameraButton(
|
||||
content: muteContentComponent,
|
||||
action: { [weak state, weak mediaEditor] in
|
||||
if let mediaEditor {
|
||||
state?.muteDidChange = true
|
||||
let isMuted = !mediaEditor.values.videoIsMuted
|
||||
mediaEditor.setVideoIsMuted(isMuted)
|
||||
state?.updated()
|
||||
|
||||
if let controller = environment.controller() as? MediaEditorScreen {
|
||||
controller.node.presentMutedTooltip()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
let muteButtonSize = self.muteButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(CameraButton(
|
||||
content: muteContentComponent,
|
||||
action: { [weak state, weak mediaEditor] in
|
||||
if let mediaEditor {
|
||||
state?.muteDidChange = true
|
||||
let isMuted = !mediaEditor.values.videoIsMuted
|
||||
mediaEditor.setVideoIsMuted(isMuted)
|
||||
state?.updated()
|
||||
|
||||
if let controller = environment.controller() as? MediaEditorScreen {
|
||||
controller.node.presentMutedTooltip()
|
||||
}
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 44.0, height: 44.0)
|
||||
)
|
||||
let muteButtonFrame = CGRect(
|
||||
origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width - 50.0, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0)),
|
||||
size: muteButtonSize
|
||||
)
|
||||
if let muteButtonView = self.muteButton.view {
|
||||
if muteButtonView.superview == nil {
|
||||
muteButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0)
|
||||
muteButtonView.layer.shadowRadius = 2.0
|
||||
muteButtonView.layer.shadowColor = UIColor.black.cgColor
|
||||
muteButtonView.layer.shadowOpacity = 0.35
|
||||
self.addSubview(muteButtonView)
|
||||
|
||||
muteButtonView.layer.animateAlpha(from: 0.0, to: muteButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
muteButtonView.layer.animateScale(from: 0.4, to: 1.0, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
}
|
||||
transition.setPosition(view: muteButtonView, position: muteButtonFrame.center)
|
||||
transition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size))
|
||||
transition.setScale(view: muteButtonView, scale: displayTopButtons ? 1.0 : 0.01)
|
||||
transition.setAlpha(view: muteButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? 1.0 : 0.0)
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 44.0, height: 44.0)
|
||||
)
|
||||
let muteButtonFrame = CGRect(
|
||||
origin: CGPoint(x: availableSize.width - 20.0 - muteButtonSize.width - 50.0, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0)),
|
||||
size: muteButtonSize
|
||||
)
|
||||
if let muteButtonView = self.muteButton.view {
|
||||
if muteButtonView.superview == nil {
|
||||
muteButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0)
|
||||
muteButtonView.layer.shadowRadius = 2.0
|
||||
muteButtonView.layer.shadowColor = UIColor.black.cgColor
|
||||
muteButtonView.layer.shadowOpacity = 0.35
|
||||
self.addSubview(muteButtonView)
|
||||
|
||||
muteButtonView.layer.animateAlpha(from: 0.0, to: muteButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
muteButtonView.layer.animateScale(from: 0.4, to: 1.0, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
}
|
||||
} else if let muteButtonView = self.muteButton.view, muteButtonView.superview != nil {
|
||||
muteButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak muteButtonView] _ in
|
||||
muteButtonView?.removeFromSuperview()
|
||||
})
|
||||
muteButtonView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.2, removeOnCompletion: false)
|
||||
transition.setPosition(view: muteButtonView, position: muteButtonFrame.center)
|
||||
transition.setBounds(view: muteButtonView, bounds: CGRect(origin: .zero, size: muteButtonFrame.size))
|
||||
transition.setScale(view: muteButtonView, scale: displayTopButtons ? 1.0 : 0.01)
|
||||
transition.setAlpha(view: muteButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? 1.0 : 0.0)
|
||||
}
|
||||
|
||||
topButtonOffsetX += 50.0
|
||||
} else if let muteButtonView = self.muteButton.view, muteButtonView.superview != nil {
|
||||
muteButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak muteButtonView] _ in
|
||||
muteButtonView?.removeFromSuperview()
|
||||
@ -1453,6 +1480,72 @@ final class MediaEditorScreenComponent: Component {
|
||||
muteButtonView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.2, removeOnCompletion: false)
|
||||
}
|
||||
|
||||
if let playerState = state.playerState {
|
||||
let playbackContentComponent: AnyComponentWithIdentity<Empty>
|
||||
if component.hasAppeared && !"".isEmpty {
|
||||
playbackContentComponent = AnyComponentWithIdentity(
|
||||
id: "animatedIcon",
|
||||
component: AnyComponent(
|
||||
LottieAnimationComponent(
|
||||
animation: LottieAnimationComponent.AnimationItem(
|
||||
name: "anim_storymute",
|
||||
mode: state.muteDidChange ? .animating(loop: false) : .still(position: .begin),
|
||||
range: "".isEmpty ? (0.0, 0.5) : (0.5, 1.0)
|
||||
),
|
||||
colors: ["__allcolors__": .white],
|
||||
size: CGSize(width: 30.0, height: 30.0)
|
||||
).tagged(muteButtonTag)
|
||||
)
|
||||
)
|
||||
} else {
|
||||
playbackContentComponent = AnyComponentWithIdentity(
|
||||
id: "staticIcon",
|
||||
component: AnyComponent(
|
||||
BundleIconComponent(
|
||||
name: playerState.isPlaying ? "Media Editor/Pause" : "Media Editor/Play",
|
||||
tintColor: nil
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
let playbackButtonSize = self.playbackButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(CameraButton(
|
||||
content: playbackContentComponent,
|
||||
action: { [weak mediaEditor] in
|
||||
if let mediaEditor {
|
||||
// state?.muteDidChange = true
|
||||
mediaEditor.togglePlayback()
|
||||
// state?.updated()
|
||||
}
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 44.0, height: 44.0)
|
||||
)
|
||||
let playbackButtonFrame = CGRect(
|
||||
origin: CGPoint(x: availableSize.width - 20.0 - playbackButtonSize.width - 50.0 - topButtonOffsetX, y: max(environment.statusBarHeight + 10.0, environment.safeInsets.top + 20.0)),
|
||||
size: playbackButtonSize
|
||||
)
|
||||
if let playbackButtonView = self.playbackButton.view {
|
||||
if playbackButtonView.superview == nil {
|
||||
playbackButtonView.layer.shadowOffset = CGSize(width: 0.0, height: 0.0)
|
||||
playbackButtonView.layer.shadowRadius = 2.0
|
||||
playbackButtonView.layer.shadowColor = UIColor.black.cgColor
|
||||
playbackButtonView.layer.shadowOpacity = 0.35
|
||||
self.addSubview(playbackButtonView)
|
||||
|
||||
playbackButtonView.layer.animateAlpha(from: 0.0, to: playbackButtonView.alpha, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
playbackButtonView.layer.animateScale(from: 0.4, to: 1.0, duration: self.animatingButtons ? 0.1 : 0.2)
|
||||
}
|
||||
transition.setPosition(view: playbackButtonView, position: playbackButtonFrame.center)
|
||||
transition.setBounds(view: playbackButtonView, bounds: CGRect(origin: .zero, size: playbackButtonFrame.size))
|
||||
transition.setScale(view: playbackButtonView, scale: displayTopButtons ? 1.0 : 0.01)
|
||||
transition.setAlpha(view: playbackButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? 1.0 : 0.0)
|
||||
}
|
||||
}
|
||||
|
||||
let textCancelButtonSize = self.textCancelButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(Button(
|
||||
@ -2866,6 +2959,33 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
controller.push(locationController)
|
||||
}
|
||||
|
||||
func presentAudioPicker() {
|
||||
self.controller?.present(legacyICloudFilePicker(theme: self.presentationData.theme, mode: .import, documentTypes: ["public.mp3"], forceDarkTheme: true, completion: { [weak self] urls in
|
||||
guard let self, !urls.isEmpty, let url = urls.first else {
|
||||
return
|
||||
}
|
||||
|
||||
let path = url.path
|
||||
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: path))
|
||||
var artist: String?
|
||||
var title: String?
|
||||
for data in audioAsset.commonMetadata {
|
||||
if data.commonKey == .commonKeyArtist {
|
||||
artist = data.stringValue
|
||||
}
|
||||
if data.commonKey == .commonKeyTitle {
|
||||
title = data.stringValue
|
||||
}
|
||||
}
|
||||
self.mediaEditor?.setAudioTrack(MediaAudioTrack(path: path, artist: artist, title: title))
|
||||
self.requestUpdate(transition: .easeInOut(duration: 0.2))
|
||||
|
||||
Queue.mainQueue().after(0.1) {
|
||||
self.mediaEditor?.play()
|
||||
}
|
||||
}), in: .window(.root))
|
||||
}
|
||||
|
||||
func updateModalTransitionFactor(_ value: CGFloat, transition: ContainedViewLayoutTransition) {
|
||||
guard let layout = self.validLayout, case .compact = layout.metrics.widthClass else {
|
||||
return
|
||||
@ -3050,6 +3170,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
self.presentLocationPicker()
|
||||
}
|
||||
}
|
||||
controller.presentAudioPicker = { [weak self, weak controller] in
|
||||
if let self {
|
||||
self.stickerScreen = nil
|
||||
controller?.dismiss(animated: true)
|
||||
self.presentAudioPicker()
|
||||
}
|
||||
}
|
||||
self.stickerScreen = controller
|
||||
self.controller?.present(controller, in: .window(.root))
|
||||
return
|
||||
|
@ -8,9 +8,12 @@ import ViewControllerComponent
|
||||
import ComponentDisplayAdapters
|
||||
import TelegramPresentationData
|
||||
import AccountContext
|
||||
import AudioWaveformComponent
|
||||
import MultilineTextComponent
|
||||
|
||||
private let handleWidth: CGFloat = 14.0
|
||||
private let scrubberHeight: CGFloat = 39.0
|
||||
private let collapsedScrubberHeight: CGFloat = 26.0
|
||||
private let borderHeight: CGFloat = 1.0 + UIScreenPixel
|
||||
private let frameWidth: CGFloat = 24.0
|
||||
private let minumumDuration: CGFloat = 1.0
|
||||
@ -40,6 +43,13 @@ private final class HandleView: UIImageView {
|
||||
final class VideoScrubberComponent: Component {
|
||||
typealias EnvironmentType = Empty
|
||||
|
||||
struct AudioData: Equatable {
|
||||
let artist: String?
|
||||
let title: String?
|
||||
let samples: Data?
|
||||
let peak: Int32
|
||||
}
|
||||
|
||||
let context: AccountContext
|
||||
let generationTimestamp: Double
|
||||
let duration: Double
|
||||
@ -50,8 +60,10 @@ final class VideoScrubberComponent: Component {
|
||||
let isPlaying: Bool
|
||||
let frames: [UIImage]
|
||||
let framesUpdateTimestamp: Double
|
||||
let trimUpdated: (Double, Double, Bool, Bool) -> Void
|
||||
let audioData: AudioData?
|
||||
let videoTrimUpdated: (Double, Double, Bool, Bool) -> Void
|
||||
let positionUpdated: (Double, Bool) -> Void
|
||||
let audioTrimUpdated: (Double, Double, Bool, Bool) -> Void
|
||||
|
||||
init(
|
||||
context: AccountContext,
|
||||
@ -64,8 +76,10 @@ final class VideoScrubberComponent: Component {
|
||||
isPlaying: Bool,
|
||||
frames: [UIImage],
|
||||
framesUpdateTimestamp: Double,
|
||||
trimUpdated: @escaping (Double, Double, Bool, Bool) -> Void,
|
||||
positionUpdated: @escaping (Double, Bool) -> Void
|
||||
audioData: AudioData?,
|
||||
videoTrimUpdated: @escaping (Double, Double, Bool, Bool) -> Void,
|
||||
positionUpdated: @escaping (Double, Bool) -> Void,
|
||||
audioTrimUpdated: @escaping (Double, Double, Bool, Bool) -> Void
|
||||
) {
|
||||
self.context = context
|
||||
self.generationTimestamp = generationTimestamp
|
||||
@ -77,8 +91,10 @@ final class VideoScrubberComponent: Component {
|
||||
self.isPlaying = isPlaying
|
||||
self.frames = frames
|
||||
self.framesUpdateTimestamp = framesUpdateTimestamp
|
||||
self.trimUpdated = trimUpdated
|
||||
self.audioData = audioData
|
||||
self.videoTrimUpdated = videoTrimUpdated
|
||||
self.positionUpdated = positionUpdated
|
||||
self.audioTrimUpdated = audioTrimUpdated
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoScrubberComponent, rhs: VideoScrubberComponent) -> Bool {
|
||||
@ -109,18 +125,32 @@ final class VideoScrubberComponent: Component {
|
||||
if lhs.framesUpdateTimestamp != rhs.framesUpdateTimestamp {
|
||||
return false
|
||||
}
|
||||
if lhs.audioData != rhs.audioData {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: UIView, UITextFieldDelegate {
|
||||
private let leftHandleView = HandleView()
|
||||
private let rightHandleView = HandleView()
|
||||
private let borderView = UIImageView()
|
||||
private let zoneView = HandleView()
|
||||
private let audioClippingView: UIView
|
||||
private let audioContainerView: UIView
|
||||
private let audioBackgroundView: BlurredBackgroundView
|
||||
private let audioVibrancyView: UIVisualEffectView
|
||||
private let audioVibrancyContainer: UIView
|
||||
private let audioTrimView = TrimView(frame: .zero)
|
||||
private let audioButton = UIButton()
|
||||
|
||||
private let audioIconView: UIImageView
|
||||
private let audioTitle = ComponentView<Empty>()
|
||||
|
||||
private let audioWaveform = ComponentView<Empty>()
|
||||
|
||||
private let videoTrimView = TrimView(frame: .zero)
|
||||
private let cursorView = HandleView()
|
||||
|
||||
private let transparentFramesContainer = UIView()
|
||||
private let opaqueFramesContainer = UIView()
|
||||
private let videoButton = UIButton()
|
||||
|
||||
private var transparentFrameLayers: [VideoFrameLayer] = []
|
||||
private var opaqueFrameLayers: [VideoFrameLayer] = []
|
||||
@ -129,38 +159,41 @@ final class VideoScrubberComponent: Component {
|
||||
private weak var state: EmptyComponentState?
|
||||
private var scrubberSize: CGSize?
|
||||
|
||||
private var isPanningTrimHandle = false
|
||||
private var isAudioSelected = false
|
||||
private var isPanningPositionHandle = false
|
||||
|
||||
private var displayLink: SharedDisplayLinkDriver.Link?
|
||||
private var positionAnimation: (start: Double, from: Double, to: Double, ended: Bool)?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.audioClippingView = UIView()
|
||||
self.audioClippingView.clipsToBounds = true
|
||||
|
||||
self.audioContainerView = UIView()
|
||||
self.audioContainerView.clipsToBounds = true
|
||||
self.audioContainerView.layer.cornerRadius = 9.0
|
||||
|
||||
self.audioBackgroundView = BlurredBackgroundView(color: UIColor(white: 0.0, alpha: 0.5), enableBlur: true)
|
||||
|
||||
let style: UIBlurEffect.Style = .dark
|
||||
let blurEffect = UIBlurEffect(style: style)
|
||||
let vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect)
|
||||
let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect)
|
||||
self.audioVibrancyView = vibrancyEffectView
|
||||
|
||||
self.audioVibrancyContainer = UIView()
|
||||
self.audioVibrancyView.contentView.addSubview(self.audioVibrancyContainer)
|
||||
|
||||
self.audioIconView = UIImageView(image: UIImage(bundleImageName: "Media Editor/SmallAudio"))
|
||||
|
||||
self.audioButton.isUserInteractionEnabled = false
|
||||
self.videoButton.isUserInteractionEnabled = false
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.disablesInteractiveModalDismiss = true
|
||||
self.disablesInteractiveKeyboardGestureRecognizer = true
|
||||
|
||||
let handleImage = generateImage(CGSize(width: handleWidth, height: scrubberHeight), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
|
||||
let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 9.0)
|
||||
context.addPath(path.cgPath)
|
||||
context.fillPath()
|
||||
|
||||
context.setBlendMode(.clear)
|
||||
let innerPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: handleWidth - 3.0, y: borderHeight), size: CGSize(width: handleWidth, height: size.height - borderHeight * 2.0)), cornerRadius: 2.0)
|
||||
context.addPath(innerPath.cgPath)
|
||||
context.fillPath()
|
||||
|
||||
context.setBlendMode(.clear)
|
||||
let holeSize = CGSize(width: 2.0, height: 11.0)
|
||||
let holePath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: (size.height - holeSize.height) / 2.0), size: holeSize), cornerRadius: holeSize.width / 2.0)
|
||||
context.addPath(holePath.cgPath)
|
||||
context.fillPath()
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
|
||||
let positionImage = generateImage(CGSize(width: handleWidth, height: 50.0), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
@ -169,60 +202,55 @@ final class VideoScrubberComponent: Component {
|
||||
let path = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: 6.0, y: 4.0), size: CGSize(width: 2.0, height: 42.0)), cornerRadius: 1.0)
|
||||
context.addPath(path.cgPath)
|
||||
context.fillPath()
|
||||
})
|
||||
|
||||
self.zoneView.image = UIImage()
|
||||
self.zoneView.isUserInteractionEnabled = true
|
||||
self.zoneView.hitTestSlop = UIEdgeInsets(top: -8.0, left: 0.0, bottom: -8.0, right: 0.0)
|
||||
|
||||
self.leftHandleView.image = handleImage
|
||||
self.leftHandleView.isUserInteractionEnabled = true
|
||||
self.leftHandleView.tintColor = .white
|
||||
self.leftHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
|
||||
|
||||
self.rightHandleView.image = handleImage
|
||||
self.rightHandleView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0)
|
||||
self.rightHandleView.isUserInteractionEnabled = true
|
||||
self.rightHandleView.tintColor = .white
|
||||
self.rightHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
|
||||
})?.stretchableImage(withLeftCapWidth: Int(handleWidth / 2.0), topCapHeight: 25)
|
||||
|
||||
self.cursorView.image = positionImage
|
||||
self.cursorView.isUserInteractionEnabled = true
|
||||
self.cursorView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
|
||||
|
||||
self.borderView.image = generateImage(CGSize(width: 1.0, height: scrubberHeight), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight)))
|
||||
context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: scrubberHeight)))
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
self.borderView.tintColor = .white
|
||||
self.borderView.isUserInteractionEnabled = false
|
||||
|
||||
|
||||
self.transparentFramesContainer.alpha = 0.5
|
||||
self.transparentFramesContainer.clipsToBounds = true
|
||||
self.transparentFramesContainer.layer.cornerRadius = 9.0
|
||||
|
||||
self.opaqueFramesContainer.clipsToBounds = true
|
||||
self.opaqueFramesContainer.layer.cornerRadius = 9.0
|
||||
|
||||
self.addSubview(self.audioClippingView)
|
||||
self.audioClippingView.addSubview(self.audioContainerView)
|
||||
self.audioContainerView.addSubview(self.audioBackgroundView)
|
||||
self.audioBackgroundView.addSubview(self.audioVibrancyView)
|
||||
|
||||
self.addSubview(self.audioTrimView)
|
||||
|
||||
self.addSubview(self.audioIconView)
|
||||
|
||||
self.addSubview(self.transparentFramesContainer)
|
||||
self.addSubview(self.opaqueFramesContainer)
|
||||
self.addSubview(self.zoneView)
|
||||
self.addSubview(self.leftHandleView)
|
||||
self.addSubview(self.rightHandleView)
|
||||
self.addSubview(self.borderView)
|
||||
self.addSubview(self.videoTrimView)
|
||||
|
||||
self.addSubview(self.audioButton)
|
||||
self.addSubview(self.videoButton)
|
||||
self.addSubview(self.cursorView)
|
||||
|
||||
self.zoneView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleZoneHandlePan(_:))))
|
||||
self.leftHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleLeftHandlePan(_:))))
|
||||
self.rightHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleRightHandlePan(_:))))
|
||||
self.cursorView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handlePositionHandlePan(_:))))
|
||||
|
||||
self.displayLink = SharedDisplayLinkDriver.shared.add { [weak self] in
|
||||
self?.updateCursorPosition()
|
||||
}
|
||||
self.displayLink?.isPaused = true
|
||||
|
||||
self.videoTrimView.updated = { [weak self] transition in
|
||||
self?.state?.updated(transition: transition)
|
||||
}
|
||||
|
||||
self.videoTrimView.trimUpdated = { [weak self] startValue, endValue, updatedEnd, done in
|
||||
if let component = self?.component {
|
||||
component.videoTrimUpdated(startValue, endValue, updatedEnd, done)
|
||||
}
|
||||
}
|
||||
|
||||
self.audioButton.addTarget(self, action: #selector(self.audioButtonPressed), for: .touchUpInside)
|
||||
self.videoButton.addTarget(self, action: #selector(self.videoButtonPressed), for: .touchUpInside)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
@ -233,116 +261,14 @@ final class VideoScrubberComponent: Component {
|
||||
self.displayLink?.invalidate()
|
||||
}
|
||||
|
||||
@objc private func handleZoneHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
let translation = gestureRecognizer.translation(in: self)
|
||||
|
||||
let start = handleWidth / 2.0
|
||||
let end = self.frame.width - handleWidth / 2.0
|
||||
let length = end - start
|
||||
|
||||
let delta = translation.x / length
|
||||
|
||||
let duration = component.endPosition - component.startPosition
|
||||
let startValue = max(0.0, min(component.duration - duration, component.startPosition + delta * component.duration))
|
||||
let endValue = startValue + duration
|
||||
|
||||
var transition: Transition = .immediate
|
||||
switch gestureRecognizer.state {
|
||||
case .began, .changed:
|
||||
self.isPanningTrimHandle = true
|
||||
component.trimUpdated(startValue, endValue, false, false)
|
||||
if case .began = gestureRecognizer.state {
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
}
|
||||
case .ended, .cancelled:
|
||||
self.isPanningTrimHandle = false
|
||||
component.trimUpdated(startValue, endValue, false, true)
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
gestureRecognizer.setTranslation(.zero, in: self)
|
||||
self.state?.updated(transition: transition)
|
||||
@objc private func audioButtonPressed() {
|
||||
self.isAudioSelected = true
|
||||
self.state?.updated(transition: .easeInOut(duration: 0.25))
|
||||
}
|
||||
|
||||
@objc private func handleLeftHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
let location = gestureRecognizer.location(in: self)
|
||||
let start = handleWidth / 2.0
|
||||
let end = self.frame.width - handleWidth / 2.0
|
||||
let length = end - start
|
||||
let fraction = (location.x - start) / length
|
||||
|
||||
var startValue = max(0.0, component.duration * fraction)
|
||||
if startValue > component.endPosition - minumumDuration {
|
||||
startValue = max(0.0, component.endPosition - minumumDuration)
|
||||
}
|
||||
var endValue = component.endPosition
|
||||
if endValue - startValue > component.maxDuration {
|
||||
let delta = (endValue - startValue) - component.maxDuration
|
||||
endValue -= delta
|
||||
}
|
||||
|
||||
var transition: Transition = .immediate
|
||||
switch gestureRecognizer.state {
|
||||
case .began, .changed:
|
||||
self.isPanningTrimHandle = true
|
||||
component.trimUpdated(startValue, endValue, false, false)
|
||||
if case .began = gestureRecognizer.state {
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
}
|
||||
case .ended, .cancelled:
|
||||
self.isPanningTrimHandle = false
|
||||
component.trimUpdated(startValue, endValue, false, true)
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
default:
|
||||
break
|
||||
}
|
||||
self.state?.updated(transition: transition)
|
||||
}
|
||||
|
||||
@objc private func handleRightHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
let location = gestureRecognizer.location(in: self)
|
||||
let start = handleWidth / 2.0
|
||||
let end = self.frame.width - handleWidth / 2.0
|
||||
let length = end - start
|
||||
let fraction = (location.x - start) / length
|
||||
|
||||
var endValue = min(component.duration, component.duration * fraction)
|
||||
if endValue < component.startPosition + minumumDuration {
|
||||
endValue = min(component.duration, component.startPosition + minumumDuration)
|
||||
}
|
||||
var startValue = component.startPosition
|
||||
if endValue - startValue > component.maxDuration {
|
||||
let delta = (endValue - startValue) - component.maxDuration
|
||||
startValue += delta
|
||||
}
|
||||
|
||||
var transition: Transition = .immediate
|
||||
switch gestureRecognizer.state {
|
||||
case .began, .changed:
|
||||
self.isPanningTrimHandle = true
|
||||
component.trimUpdated(startValue, endValue, true, false)
|
||||
if case .began = gestureRecognizer.state {
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
}
|
||||
case .ended, .cancelled:
|
||||
self.isPanningTrimHandle = false
|
||||
component.trimUpdated(startValue, endValue, true, true)
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
default:
|
||||
break
|
||||
}
|
||||
self.state?.updated(transition: transition)
|
||||
@objc private func videoButtonPressed() {
|
||||
self.isAudioSelected = false
|
||||
self.state?.updated(transition: .easeInOut(duration: 0.25))
|
||||
}
|
||||
|
||||
@objc private func handlePositionHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
@ -370,13 +296,13 @@ final class VideoScrubberComponent: Component {
|
||||
self.state?.updated(transition: transition)
|
||||
}
|
||||
|
||||
private func cursorFrame(size: CGSize, position: Double, duration : Double) -> CGRect {
|
||||
private func cursorFrame(size: CGSize, height: CGFloat, position: Double, duration : Double) -> CGRect {
|
||||
let cursorPadding: CGFloat = 8.0
|
||||
let cursorPositionFraction = duration > 0.0 ? position / duration : 0.0
|
||||
let cursorPosition = floorToScreenPixels(handleWidth + handleWidth / 2.0 - cursorPadding + (size.width - handleWidth * 3.0 + cursorPadding * 2.0) * cursorPositionFraction)
|
||||
var cursorFrame = CGRect(origin: CGPoint(x: cursorPosition - handleWidth / 2.0, y: -5.0 - UIScreenPixel), size: CGSize(width: handleWidth, height: 50.0))
|
||||
cursorFrame.origin.x = max(self.leftHandleView.frame.maxX - cursorPadding, cursorFrame.origin.x)
|
||||
cursorFrame.origin.x = min(self.rightHandleView.frame.minX + cursorPadding, cursorFrame.origin.x)
|
||||
var cursorFrame = CGRect(origin: CGPoint(x: cursorPosition - handleWidth / 2.0, y: -5.0 - UIScreenPixel), size: CGSize(width: handleWidth, height: height))
|
||||
cursorFrame.origin.x = max(self.videoTrimView.leftHandleView.frame.maxX - cursorPadding, cursorFrame.origin.x)
|
||||
cursorFrame.origin.x = min(self.videoTrimView.rightHandleView.frame.minX + cursorPadding, cursorFrame.origin.x)
|
||||
return cursorFrame
|
||||
}
|
||||
|
||||
@ -398,17 +324,137 @@ final class VideoScrubberComponent: Component {
|
||||
let advance = component.isPlaying ? timestamp - component.generationTimestamp : 0.0
|
||||
updatedPosition = max(component.startPosition, min(component.endPosition, component.position + advance))
|
||||
}
|
||||
self.cursorView.frame = cursorFrame(size: scrubberSize, position: updatedPosition, duration: component.duration)
|
||||
let cursorHeight: CGFloat = component.audioData != nil ? 80.0 : 50.0
|
||||
self.cursorView.frame = cursorFrame(size: scrubberSize, height: cursorHeight, position: updatedPosition, duration: component.duration)
|
||||
}
|
||||
|
||||
func update(component: VideoScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: Transition) -> CGSize {
|
||||
let previousComponent = self.component
|
||||
let previousFramesUpdateTimestamp = self.component?.framesUpdateTimestamp
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
if let previousComponent, previousComponent.audioData == nil, component.audioData != nil {
|
||||
self.positionAnimation = nil
|
||||
self.isAudioSelected = true
|
||||
}
|
||||
|
||||
let scrubberSpacing: CGFloat = 4.0
|
||||
|
||||
var audioScrubberHeight: CGFloat = collapsedScrubberHeight
|
||||
var videoScrubberHeight: CGFloat = scrubberHeight
|
||||
|
||||
let scrubberSize = CGSize(width: availableSize.width, height: scrubberHeight)
|
||||
self.scrubberSize = scrubberSize
|
||||
|
||||
var originY: CGFloat = 0
|
||||
var totalHeight = scrubberSize.height
|
||||
var audioAlpha: CGFloat = 0.0
|
||||
if let _ = component.audioData {
|
||||
totalHeight += collapsedScrubberHeight + scrubberSpacing
|
||||
audioAlpha = 1.0
|
||||
|
||||
originY += self.isAudioSelected ? scrubberHeight : collapsedScrubberHeight
|
||||
originY += scrubberSpacing
|
||||
|
||||
if self.isAudioSelected {
|
||||
audioScrubberHeight = scrubberHeight
|
||||
videoScrubberHeight = collapsedScrubberHeight
|
||||
}
|
||||
} else {
|
||||
self.isAudioSelected = false
|
||||
}
|
||||
transition.setAlpha(view: self.audioClippingView, alpha: audioAlpha)
|
||||
self.audioButton.isUserInteractionEnabled = !self.isAudioSelected
|
||||
self.videoButton.isUserInteractionEnabled = self.isAudioSelected
|
||||
|
||||
let audioClippingFrame = CGRect(origin: .zero, size: CGSize(width: availableSize.width, height: audioScrubberHeight))
|
||||
transition.setFrame(view: self.audioButton, frame: audioClippingFrame)
|
||||
transition.setFrame(view: self.audioClippingView, frame: audioClippingFrame)
|
||||
|
||||
let audioContainerFrame = CGRect(origin: .zero, size: audioClippingFrame.size)
|
||||
transition.setFrame(view: self.audioContainerView, frame: audioContainerFrame)
|
||||
|
||||
transition.setFrame(view: self.audioBackgroundView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
|
||||
self.audioBackgroundView.update(size: audioClippingFrame.size, transition: transition.containedViewLayoutTransition)
|
||||
transition.setFrame(view: self.audioVibrancyView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
|
||||
transition.setFrame(view: self.audioVibrancyContainer, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
|
||||
|
||||
transition.setAlpha(view: self.audioTrimView, alpha: self.isAudioSelected ? 1.0 : 0.0)
|
||||
|
||||
if let audioData = component.audioData {
|
||||
var components: [String] = []
|
||||
if let artist = audioData.artist {
|
||||
components.append(artist)
|
||||
}
|
||||
if let title = audioData.title {
|
||||
components.append(title)
|
||||
}
|
||||
if components.isEmpty {
|
||||
components.append("Audio")
|
||||
}
|
||||
let audioTitle = NSAttributedString(string: components.joined(separator: " • "), font: Font.semibold(13.0), textColor: .white)
|
||||
let audioTitleSize = self.audioTitle.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(
|
||||
MultilineTextComponent(
|
||||
text: .plain(audioTitle)
|
||||
)
|
||||
),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
)
|
||||
|
||||
let spacing: CGFloat = 4.0
|
||||
let iconSize = CGSize(width: 14.0, height: 14.0)
|
||||
let totalWidth = iconSize.width + audioTitleSize.width + spacing
|
||||
|
||||
transition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
|
||||
transition.setFrame(view: self.audioIconView, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize))
|
||||
|
||||
if let view = self.audioTitle.view {
|
||||
if view.superview == nil {
|
||||
view.isUserInteractionEnabled = false
|
||||
self.addSubview(self.audioIconView)
|
||||
self.addSubview(view)
|
||||
}
|
||||
transition.setAlpha(view: view, alpha: self.isAudioSelected ? 0.0 : 1.0)
|
||||
transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0) + iconSize.width + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize))
|
||||
}
|
||||
} else {
|
||||
|
||||
}
|
||||
|
||||
if let audioData = component.audioData, let samples = audioData.samples {
|
||||
let audioWaveformSize = self.audioWaveform.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(
|
||||
AudioWaveformComponent(
|
||||
backgroundColor: .clear,
|
||||
foregroundColor: UIColor(rgb: 0xffffff, alpha: 0.3),
|
||||
shimmerColor: nil,
|
||||
style: .middle,
|
||||
samples: samples,
|
||||
peak: audioData.peak,
|
||||
status: .complete(),
|
||||
seek: nil,
|
||||
updateIsSeeking: nil
|
||||
)
|
||||
),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: audioContainerFrame.width * 5.0, height: scrubberHeight)
|
||||
)
|
||||
if let view = self.audioWaveform.view {
|
||||
if view.superview == nil {
|
||||
self.audioVibrancyContainer.addSubview(view)
|
||||
|
||||
view.layer.animateScaleY(from: 0.01, to: 1.0, duration: 0.2)
|
||||
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
}
|
||||
transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: 0.0, y: self.isAudioSelected ? 0.0 : 6.0), size: audioWaveformSize))
|
||||
}
|
||||
}
|
||||
|
||||
let bounds = CGRect(origin: .zero, size: scrubberSize)
|
||||
|
||||
if component.framesUpdateTimestamp != previousFramesUpdateTimestamp {
|
||||
@ -440,31 +486,25 @@ final class VideoScrubberComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
let trimColor = self.isPanningTrimHandle ? UIColor(rgb: 0xf8d74a) : .white
|
||||
transition.setTintColor(view: self.leftHandleView, color: trimColor)
|
||||
transition.setTintColor(view: self.rightHandleView, color: trimColor)
|
||||
transition.setTintColor(view: self.borderView, color: trimColor)
|
||||
|
||||
let totalWidth = scrubberSize.width - handleWidth
|
||||
let leftHandlePositionFraction = component.duration > 0.0 ? component.startPosition / component.duration : 0.0
|
||||
let leftHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalWidth * leftHandlePositionFraction)
|
||||
|
||||
let leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - handleWidth / 2.0, y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
|
||||
transition.setFrame(view: self.leftHandleView, frame: leftHandleFrame)
|
||||
|
||||
let rightHandlePositionFraction = component.duration > 0.0 ? component.endPosition / component.duration : 1.0
|
||||
let rightHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalWidth * rightHandlePositionFraction)
|
||||
let (leftHandleFrame, rightHandleFrame) = self.videoTrimView.update(
|
||||
totalWidth: totalWidth,
|
||||
scrubberSize: scrubberSize,
|
||||
duration: component.duration,
|
||||
startPosition: component.startPosition,
|
||||
endPosition: component.endPosition,
|
||||
position: component.position,
|
||||
maxDuration: component.maxDuration,
|
||||
transition: transition
|
||||
)
|
||||
|
||||
let rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - handleWidth / 2.0), y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
|
||||
transition.setFrame(view: self.rightHandleView, frame: rightHandleFrame)
|
||||
|
||||
let zoneFrame = CGRect(x: leftHandleFrame.maxX, y: 0.0, width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height)
|
||||
transition.setFrame(view: self.zoneView, frame: zoneFrame)
|
||||
|
||||
if self.isPanningPositionHandle || !component.isPlaying {
|
||||
self.positionAnimation = nil
|
||||
self.displayLink?.isPaused = true
|
||||
transition.setFrame(view: self.cursorView, frame: cursorFrame(size: scrubberSize, position: component.position, duration: component.duration))
|
||||
|
||||
let cursorHeight: CGFloat = component.audioData != nil ? 80.0 : 50.0
|
||||
transition.setFrame(view: self.cursorView, frame: cursorFrame(size: scrubberSize, height: cursorHeight, position: component.position, duration: component.duration))
|
||||
} else {
|
||||
if let (_, _, end, ended) = self.positionAnimation {
|
||||
if ended, component.position >= component.startPosition && component.position < end - 1.0 {
|
||||
@ -476,15 +516,17 @@ final class VideoScrubberComponent: Component {
|
||||
self.displayLink?.isPaused = false
|
||||
self.updateCursorPosition()
|
||||
}
|
||||
transition.setAlpha(view: self.cursorView, alpha: self.isPanningTrimHandle ? 0.0 : 1.0)
|
||||
// transition.setAlpha(view: self.cursorView, alpha: self.isPanningTrimHandle ? 0.0 : 1.0)
|
||||
|
||||
let borderFrame = CGRect(origin: CGPoint(x: leftHandleFrame.maxX, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height))
|
||||
transition.setFrame(view: self.borderView, frame: borderFrame)
|
||||
transition.setAlpha(view: self.videoTrimView, alpha: self.isAudioSelected ? 0.0 : 1.0)
|
||||
|
||||
transition.setFrame(view: self.videoTrimView, frame: bounds.offsetBy(dx: 0.0, dy: originY))
|
||||
let handleInset: CGFloat = 7.0
|
||||
transition.setFrame(view: self.transparentFramesContainer, frame: bounds)
|
||||
transition.setFrame(view: self.opaqueFramesContainer, frame: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: bounds.height)))
|
||||
transition.setBounds(view: self.opaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: bounds.height)))
|
||||
transition.setFrame(view: self.transparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: originY), size: CGSize(width: scrubberSize.width, height: videoScrubberHeight)))
|
||||
transition.setFrame(view: self.opaqueFramesContainer, frame: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: originY), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
|
||||
transition.setBounds(view: self.opaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
|
||||
|
||||
transition.setFrame(view: self.videoButton, frame: bounds.offsetBy(dx: 0.0, dy: originY))
|
||||
|
||||
var frameAspectRatio = 0.66
|
||||
if let image = component.frames.first, image.size.height > 0.0 {
|
||||
@ -496,14 +538,18 @@ final class VideoScrubberComponent: Component {
|
||||
if i < self.transparentFrameLayers.count {
|
||||
let transparentFrameLayer = self.transparentFrameLayers[i]
|
||||
let opaqueFrameLayer = self.opaqueFrameLayers[i]
|
||||
let frame = CGRect(origin: CGPoint(x: frameOffset, y: 0.0), size: frameSize)
|
||||
transparentFrameLayer.frame = frame
|
||||
opaqueFrameLayer.frame = frame
|
||||
let frame = CGRect(origin: CGPoint(x: frameOffset, y: floorToScreenPixels((videoScrubberHeight - frameSize.height) / 2.0)), size: frameSize)
|
||||
|
||||
transparentFrameLayer.bounds = CGRect(origin: .zero, size: frame.size)
|
||||
opaqueFrameLayer.bounds = CGRect(origin: .zero, size: frame.size)
|
||||
|
||||
transition.setPosition(layer: transparentFrameLayer, position: frame.center)
|
||||
transition.setPosition(layer: opaqueFrameLayer, position: frame.center)
|
||||
}
|
||||
frameOffset += frameSize.width
|
||||
}
|
||||
|
||||
return scrubberSize
|
||||
return CGSize(width: availableSize.width, height: totalHeight)
|
||||
}
|
||||
|
||||
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
|
||||
@ -520,3 +566,236 @@ final class VideoScrubberComponent: Component {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
||||
|
||||
private class TrimView: UIView {
|
||||
fileprivate let leftHandleView = HandleView()
|
||||
fileprivate let rightHandleView = HandleView()
|
||||
private let borderView = UIImageView()
|
||||
private let zoneView = HandleView()
|
||||
|
||||
private var isPanningTrimHandle = false
|
||||
|
||||
var trimUpdated: (Double, Double, Bool, Bool) -> Void = { _, _, _, _ in }
|
||||
var updated: (Transition) -> Void = { _ in }
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
let handleImage = generateImage(CGSize(width: handleWidth, height: scrubberHeight), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
|
||||
let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 9.0)
|
||||
context.addPath(path.cgPath)
|
||||
context.fillPath()
|
||||
|
||||
context.setBlendMode(.clear)
|
||||
let innerPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: handleWidth - 3.0, y: borderHeight), size: CGSize(width: handleWidth, height: size.height - borderHeight * 2.0)), cornerRadius: 2.0)
|
||||
context.addPath(innerPath.cgPath)
|
||||
context.fillPath()
|
||||
|
||||
context.setBlendMode(.clear)
|
||||
let holeSize = CGSize(width: 2.0, height: 11.0)
|
||||
let holePath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: (size.height - holeSize.height) / 2.0), size: holeSize), cornerRadius: holeSize.width / 2.0)
|
||||
context.addPath(holePath.cgPath)
|
||||
context.fillPath()
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
|
||||
self.zoneView.image = UIImage()
|
||||
self.zoneView.isUserInteractionEnabled = true
|
||||
self.zoneView.hitTestSlop = UIEdgeInsets(top: -8.0, left: 0.0, bottom: -8.0, right: 0.0)
|
||||
|
||||
self.leftHandleView.image = handleImage
|
||||
self.leftHandleView.isUserInteractionEnabled = true
|
||||
self.leftHandleView.tintColor = .white
|
||||
self.leftHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
|
||||
|
||||
self.rightHandleView.image = handleImage
|
||||
self.rightHandleView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0)
|
||||
self.rightHandleView.isUserInteractionEnabled = true
|
||||
self.rightHandleView.tintColor = .white
|
||||
self.rightHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
|
||||
|
||||
|
||||
self.borderView.image = generateImage(CGSize(width: 1.0, height: scrubberHeight), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: .zero, size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight)))
|
||||
context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: scrubberHeight)))
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
self.borderView.tintColor = .white
|
||||
self.borderView.isUserInteractionEnabled = false
|
||||
|
||||
self.addSubview(self.zoneView)
|
||||
self.addSubview(self.leftHandleView)
|
||||
self.addSubview(self.rightHandleView)
|
||||
self.addSubview(self.borderView)
|
||||
|
||||
self.zoneView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleZoneHandlePan(_:))))
|
||||
self.leftHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleLeftHandlePan(_:))))
|
||||
self.rightHandleView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.handleRightHandlePan(_:))))
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
@objc private func handleZoneHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
guard let params = self.params else {
|
||||
return
|
||||
}
|
||||
let translation = gestureRecognizer.translation(in: self)
|
||||
|
||||
let start = handleWidth / 2.0
|
||||
let end = self.frame.width - handleWidth / 2.0
|
||||
let length = end - start
|
||||
|
||||
let delta = translation.x / length
|
||||
|
||||
let duration = params.endPosition - params.startPosition
|
||||
let startValue = max(0.0, min(params.duration - duration, params.startPosition + delta * params.duration))
|
||||
let endValue = startValue + duration
|
||||
|
||||
var transition: Transition = .immediate
|
||||
switch gestureRecognizer.state {
|
||||
case .began, .changed:
|
||||
self.isPanningTrimHandle = true
|
||||
self.trimUpdated(startValue, endValue, false, false)
|
||||
if case .began = gestureRecognizer.state {
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
}
|
||||
case .ended, .cancelled:
|
||||
self.isPanningTrimHandle = false
|
||||
self.trimUpdated(startValue, endValue, false, true)
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
gestureRecognizer.setTranslation(.zero, in: self)
|
||||
self.updated(transition)
|
||||
}
|
||||
|
||||
@objc private func handleLeftHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
guard let params = self.params else {
|
||||
return
|
||||
}
|
||||
let location = gestureRecognizer.location(in: self)
|
||||
let start = handleWidth / 2.0
|
||||
let end = self.frame.width - handleWidth / 2.0
|
||||
let length = end - start
|
||||
let fraction = (location.x - start) / length
|
||||
|
||||
var startValue = max(0.0, params.duration * fraction)
|
||||
if startValue > params.endPosition - minumumDuration {
|
||||
startValue = max(0.0, params.endPosition - minumumDuration)
|
||||
}
|
||||
var endValue = params.endPosition
|
||||
if endValue - startValue > params.maxDuration {
|
||||
let delta = (endValue - startValue) - params.maxDuration
|
||||
endValue -= delta
|
||||
}
|
||||
|
||||
var transition: Transition = .immediate
|
||||
switch gestureRecognizer.state {
|
||||
case .began, .changed:
|
||||
self.isPanningTrimHandle = true
|
||||
self.trimUpdated(startValue, endValue, false, false)
|
||||
if case .began = gestureRecognizer.state {
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
}
|
||||
case .ended, .cancelled:
|
||||
self.isPanningTrimHandle = false
|
||||
self.trimUpdated(startValue, endValue, false, true)
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
default:
|
||||
break
|
||||
}
|
||||
self.updated(transition)
|
||||
}
|
||||
|
||||
@objc private func handleRightHandlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
|
||||
guard let params = self.params else {
|
||||
return
|
||||
}
|
||||
let location = gestureRecognizer.location(in: self)
|
||||
let start = handleWidth / 2.0
|
||||
let end = self.frame.width - handleWidth / 2.0
|
||||
let length = end - start
|
||||
let fraction = (location.x - start) / length
|
||||
|
||||
var endValue = min(params.duration, params.duration * fraction)
|
||||
if endValue < params.startPosition + minumumDuration {
|
||||
endValue = min(params.duration, params.startPosition + minumumDuration)
|
||||
}
|
||||
var startValue = params.startPosition
|
||||
if endValue - startValue > params.maxDuration {
|
||||
let delta = (endValue - startValue) - params.maxDuration
|
||||
startValue += delta
|
||||
}
|
||||
|
||||
var transition: Transition = .immediate
|
||||
switch gestureRecognizer.state {
|
||||
case .began, .changed:
|
||||
self.isPanningTrimHandle = true
|
||||
self.trimUpdated(startValue, endValue, true, false)
|
||||
if case .began = gestureRecognizer.state {
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
}
|
||||
case .ended, .cancelled:
|
||||
self.isPanningTrimHandle = false
|
||||
self.trimUpdated(startValue, endValue, true, true)
|
||||
transition = .easeInOut(duration: 0.25)
|
||||
default:
|
||||
break
|
||||
}
|
||||
self.updated(transition)
|
||||
}
|
||||
|
||||
var params: (
|
||||
duration: Double,
|
||||
startPosition: Double,
|
||||
endPosition: Double,
|
||||
position: Double,
|
||||
maxDuration: Double
|
||||
)?
|
||||
|
||||
func update(
|
||||
totalWidth: CGFloat,
|
||||
scrubberSize: CGSize,
|
||||
duration: Double,
|
||||
startPosition: Double,
|
||||
endPosition: Double,
|
||||
position: Double,
|
||||
maxDuration: Double,
|
||||
transition: Transition
|
||||
) -> (leftHandleFrame: CGRect, rightHandleFrame: CGRect)
|
||||
{
|
||||
self.params = (duration, startPosition, endPosition, position, maxDuration)
|
||||
|
||||
let trimColor = self.isPanningTrimHandle ? UIColor(rgb: 0xf8d74a) : .white
|
||||
transition.setTintColor(view: self.leftHandleView, color: trimColor)
|
||||
transition.setTintColor(view: self.rightHandleView, color: trimColor)
|
||||
transition.setTintColor(view: self.borderView, color: trimColor)
|
||||
|
||||
let leftHandlePositionFraction = duration > 0.0 ? startPosition / duration : 0.0
|
||||
let leftHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalWidth * leftHandlePositionFraction)
|
||||
|
||||
let leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - handleWidth / 2.0, y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
|
||||
transition.setFrame(view: self.leftHandleView, frame: leftHandleFrame)
|
||||
|
||||
let rightHandlePositionFraction = duration > 0.0 ? endPosition / duration : 1.0
|
||||
let rightHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalWidth * rightHandlePositionFraction)
|
||||
|
||||
let rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - handleWidth / 2.0), y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
|
||||
transition.setFrame(view: self.rightHandleView, frame: rightHandleFrame)
|
||||
|
||||
let zoneFrame = CGRect(x: leftHandleFrame.maxX, y: 0.0, width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height)
|
||||
transition.setFrame(view: self.zoneView, frame: zoneFrame)
|
||||
|
||||
let borderFrame = CGRect(origin: CGPoint(x: leftHandleFrame.maxX, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height))
|
||||
transition.setFrame(view: self.borderView, frame: borderFrame)
|
||||
|
||||
return (leftHandleFrame, rightHandleFrame)
|
||||
}
|
||||
}
|
||||
|
@ -744,7 +744,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
var fieldBackgroundFrame: CGRect
|
||||
if hasMediaRecording {
|
||||
fieldBackgroundFrame = CGRect(origin: CGPoint(x: mediaInsets.left, y: insets.top), size: CGSize(width: availableSize.width - mediaInsets.left - mediaInsets.right, height: textFieldSize.height))
|
||||
} else if isEditing {
|
||||
} else if isEditing || component.style == .editor {
|
||||
fieldBackgroundFrame = fieldFrame
|
||||
} else {
|
||||
if component.forwardAction != nil && component.likeAction != nil {
|
||||
|
322
submodules/TelegramUI/Images.xcassets/Media Editor/Audio.imageset/Audio.pdf
vendored
Normal file
322
submodules/TelegramUI/Images.xcassets/Media Editor/Audio.imageset/Audio.pdf
vendored
Normal file
@ -0,0 +1,322 @@
|
||||
%PDF-1.7
|
||||
|
||||
1 0 obj
|
||||
<< >>
|
||||
endobj
|
||||
|
||||
2 0 obj
|
||||
<< /Length 3 0 R >>
|
||||
stream
|
||||
/DeviceRGB CS
|
||||
/DeviceRGB cs
|
||||
q
|
||||
1.000000 0.000000 -0.000000 1.000000 2.142883 0.031860 cm
|
||||
1.000000 1.000000 1.000000 scn
|
||||
13.571427 16.107304 m
|
||||
13.571427 16.581860 13.117209 16.924479 12.660911 16.794107 c
|
||||
6.554169 15.049323 l
|
||||
5.634238 14.786486 5.000000 13.945656 5.000000 12.988912 c
|
||||
5.000000 5.144061 l
|
||||
5.000000 3.715489 l
|
||||
5.000000 3.026587 l
|
||||
5.000000 1.782461 4.085175 0.727659 2.853553 0.551712 c
|
||||
2.421319 0.489965 l
|
||||
1.143365 0.307400 0.000000 1.299036 0.000000 2.589964 c
|
||||
0.000000 3.645639 0.776254 4.540668 1.821320 4.689964 c
|
||||
3.571428 4.939980 l
|
||||
3.571428 12.988912 l
|
||||
3.571428 14.583485 4.628490 15.984866 6.161710 16.422928 c
|
||||
12.268453 18.167713 l
|
||||
13.637343 18.558825 14.999999 17.530975 14.999999 16.107304 c
|
||||
15.000000 8.001204 l
|
||||
14.999999 6.572633 l
|
||||
15.000000 5.883730 l
|
||||
15.000000 4.639603 14.085175 3.584801 12.853553 3.408854 c
|
||||
12.421319 3.347107 l
|
||||
11.143365 3.164542 10.000000 4.156179 10.000000 5.447107 c
|
||||
10.000000 6.502783 10.776255 7.397812 11.821321 7.547108 c
|
||||
13.571427 7.797123 l
|
||||
13.571427 16.107304 l
|
||||
h
|
||||
f*
|
||||
n
|
||||
Q
|
||||
q
|
||||
1.000000 0.000000 -0.000000 1.000000 2.142883 -1.078125 cm
|
||||
1.000000 1.000000 1.000000 scn
|
||||
12.660911 17.904093 m
|
||||
12.523550 18.384853 l
|
||||
12.523550 18.384853 l
|
||||
12.660911 17.904093 l
|
||||
h
|
||||
6.554169 16.159309 m
|
||||
6.691530 15.678547 l
|
||||
6.691530 15.678547 l
|
||||
6.554169 16.159309 l
|
||||
h
|
||||
2.853553 1.661697 m
|
||||
2.924262 1.166721 l
|
||||
2.924264 1.166721 l
|
||||
2.853553 1.661697 l
|
||||
h
|
||||
2.421319 1.599951 m
|
||||
2.350609 2.094925 l
|
||||
2.350608 2.094925 l
|
||||
2.421319 1.599951 l
|
||||
h
|
||||
1.821320 5.799950 m
|
||||
1.750610 6.294925 l
|
||||
1.750609 6.294925 l
|
||||
1.821320 5.799950 l
|
||||
h
|
||||
3.571428 6.049965 m
|
||||
3.642139 5.554991 l
|
||||
4.071428 5.616318 l
|
||||
4.071428 6.049965 l
|
||||
3.571428 6.049965 l
|
||||
h
|
||||
6.161710 17.532913 m
|
||||
6.299071 17.052153 l
|
||||
6.161710 17.532913 l
|
||||
h
|
||||
12.268453 19.277699 m
|
||||
12.131092 19.758459 l
|
||||
12.131092 19.758459 l
|
||||
12.268453 19.277699 l
|
||||
h
|
||||
14.999999 17.217289 m
|
||||
14.499999 17.217289 l
|
||||
14.499999 17.217289 l
|
||||
14.999999 17.217289 l
|
||||
h
|
||||
15.000000 9.111190 m
|
||||
15.500000 9.111190 l
|
||||
15.500000 9.111190 l
|
||||
15.000000 9.111190 l
|
||||
h
|
||||
14.999999 7.682618 m
|
||||
14.499999 7.682619 l
|
||||
14.499999 7.682617 l
|
||||
14.999999 7.682618 l
|
||||
h
|
||||
15.000000 6.993715 m
|
||||
15.500000 6.993715 l
|
||||
15.500000 6.993716 l
|
||||
15.000000 6.993715 l
|
||||
h
|
||||
12.853553 4.518840 m
|
||||
12.924264 4.023866 l
|
||||
12.924264 4.023866 l
|
||||
12.853553 4.518840 l
|
||||
h
|
||||
12.421319 4.457092 m
|
||||
12.350609 4.952067 l
|
||||
12.350609 4.952067 l
|
||||
12.421319 4.457092 l
|
||||
h
|
||||
11.821321 8.657093 m
|
||||
11.750610 9.152068 l
|
||||
11.750610 9.152068 l
|
||||
11.821321 8.657093 l
|
||||
h
|
||||
13.571427 8.907108 m
|
||||
13.642138 8.412134 l
|
||||
14.071427 8.473461 l
|
||||
14.071427 8.907108 l
|
||||
13.571427 8.907108 l
|
||||
h
|
||||
12.798271 17.423330 m
|
||||
12.935163 17.462442 13.071427 17.359655 13.071427 17.217289 c
|
||||
14.071427 17.217289 l
|
||||
14.071427 18.024035 13.299257 18.606485 12.523550 18.384853 c
|
||||
12.798271 17.423330 l
|
||||
h
|
||||
6.691530 15.678547 m
|
||||
12.798271 17.423330 l
|
||||
12.523550 18.384853 l
|
||||
6.416809 16.640070 l
|
||||
6.691530 15.678547 l
|
||||
h
|
||||
5.500000 14.098897 m
|
||||
5.500000 14.832402 5.986249 15.477037 6.691530 15.678547 c
|
||||
6.416809 16.640070 l
|
||||
5.282226 16.315905 4.500000 15.278881 4.500000 14.098897 c
|
||||
5.500000 14.098897 l
|
||||
h
|
||||
5.500000 6.254046 m
|
||||
5.500000 14.098897 l
|
||||
4.500000 14.098897 l
|
||||
4.500000 6.254046 l
|
||||
5.500000 6.254046 l
|
||||
h
|
||||
5.500000 4.825475 m
|
||||
5.500000 6.254046 l
|
||||
4.500000 6.254046 l
|
||||
4.500000 4.825475 l
|
||||
5.500000 4.825475 l
|
||||
h
|
||||
5.500000 4.136572 m
|
||||
5.500000 4.825475 l
|
||||
4.500000 4.825475 l
|
||||
4.500000 4.136572 l
|
||||
5.500000 4.136572 l
|
||||
h
|
||||
2.924264 1.166721 m
|
||||
4.402210 1.377859 5.500000 2.643620 5.500000 4.136572 c
|
||||
4.500000 4.136572 l
|
||||
4.500000 3.141272 3.768140 2.297430 2.782841 2.156672 c
|
||||
2.924264 1.166721 l
|
||||
h
|
||||
2.492028 1.104975 m
|
||||
2.924262 1.166721 l
|
||||
2.782843 2.156672 l
|
||||
2.350609 2.094925 l
|
||||
2.492028 1.104975 l
|
||||
h
|
||||
-0.500000 3.699949 m
|
||||
-0.500000 2.104746 0.912859 0.879379 2.492030 1.104975 c
|
||||
2.350608 2.094925 l
|
||||
1.373871 1.955391 0.500000 2.713295 0.500000 3.699949 c
|
||||
-0.500000 3.699949 l
|
||||
h
|
||||
1.750609 6.294925 m
|
||||
0.459219 6.110439 -0.500000 5.004450 -0.500000 3.699949 c
|
||||
0.500000 3.699949 l
|
||||
0.500000 4.506800 1.093289 5.190868 1.892031 5.304976 c
|
||||
1.750609 6.294925 l
|
||||
h
|
||||
3.500718 6.544940 m
|
||||
1.750610 6.294925 l
|
||||
1.892031 5.304976 l
|
||||
3.642139 5.554991 l
|
||||
3.500718 6.544940 l
|
||||
h
|
||||
3.071428 14.098897 m
|
||||
3.071428 6.049965 l
|
||||
4.071428 6.049965 l
|
||||
4.071428 14.098897 l
|
||||
3.071428 14.098897 l
|
||||
h
|
||||
6.024350 18.013676 m
|
||||
4.276478 17.514284 3.071428 15.916710 3.071428 14.098897 c
|
||||
4.071428 14.098897 l
|
||||
4.071428 15.470230 4.980502 16.675419 6.299071 17.052153 c
|
||||
6.024350 18.013676 l
|
||||
h
|
||||
12.131092 19.758459 m
|
||||
6.024350 18.013676 l
|
||||
6.299071 17.052153 l
|
||||
12.405813 18.796936 l
|
||||
12.131092 19.758459 l
|
||||
h
|
||||
15.499999 17.217289 m
|
||||
15.499999 18.973150 13.819390 20.240829 12.131092 19.758459 c
|
||||
12.405813 18.796936 l
|
||||
13.455297 19.096788 14.499999 18.308769 14.499999 17.217289 c
|
||||
15.499999 17.217289 l
|
||||
h
|
||||
15.500000 9.111190 m
|
||||
15.499999 17.217289 l
|
||||
14.499999 17.217289 l
|
||||
14.500000 9.111190 l
|
||||
15.500000 9.111190 l
|
||||
h
|
||||
14.500000 9.111191 m
|
||||
14.499999 7.682619 l
|
||||
15.499999 7.682618 l
|
||||
15.500000 9.111190 l
|
||||
14.500000 9.111191 l
|
||||
h
|
||||
14.499999 7.682617 m
|
||||
14.500000 6.993714 l
|
||||
15.500000 6.993716 l
|
||||
15.499999 7.682619 l
|
||||
14.499999 7.682617 l
|
||||
h
|
||||
14.500000 6.993715 m
|
||||
14.500000 5.998413 13.768140 5.154572 12.782843 5.013815 c
|
||||
12.924264 4.023866 l
|
||||
14.402210 4.235001 15.500000 5.500763 15.500000 6.993715 c
|
||||
14.500000 6.993715 l
|
||||
h
|
||||
12.782843 5.013815 m
|
||||
12.350609 4.952067 l
|
||||
12.492030 3.962118 l
|
||||
12.924264 4.023866 l
|
||||
12.782843 5.013815 l
|
||||
h
|
||||
12.350609 4.952067 m
|
||||
11.373872 4.812533 10.500000 5.570439 10.500000 6.557093 c
|
||||
9.500000 6.557093 l
|
||||
9.500000 4.961891 10.912859 3.736522 12.492030 3.962118 c
|
||||
12.350609 4.952067 l
|
||||
h
|
||||
10.500000 6.557093 m
|
||||
10.500000 7.363943 11.093290 8.048012 11.892032 8.162119 c
|
||||
11.750610 9.152068 l
|
||||
10.459220 8.967583 9.500000 7.861593 9.500000 6.557093 c
|
||||
10.500000 6.557093 l
|
||||
h
|
||||
11.892032 8.162119 m
|
||||
13.642138 8.412134 l
|
||||
13.500717 9.402083 l
|
||||
11.750610 9.152068 l
|
||||
11.892032 8.162119 l
|
||||
h
|
||||
13.071427 17.217289 m
|
||||
13.071427 8.907108 l
|
||||
14.071427 8.907108 l
|
||||
14.071427 17.217289 l
|
||||
13.071427 17.217289 l
|
||||
h
|
||||
f
|
||||
n
|
||||
Q
|
||||
|
||||
endstream
|
||||
endobj
|
||||
|
||||
3 0 obj
|
||||
5823
|
||||
endobj
|
||||
|
||||
4 0 obj
|
||||
<< /Annots []
|
||||
/Type /Page
|
||||
/MediaBox [ 0.000000 0.000000 20.000000 20.168945 ]
|
||||
/Resources 1 0 R
|
||||
/Contents 2 0 R
|
||||
/Parent 5 0 R
|
||||
>>
|
||||
endobj
|
||||
|
||||
5 0 obj
|
||||
<< /Kids [ 4 0 R ]
|
||||
/Count 1
|
||||
/Type /Pages
|
||||
>>
|
||||
endobj
|
||||
|
||||
6 0 obj
|
||||
<< /Pages 5 0 R
|
||||
/Type /Catalog
|
||||
>>
|
||||
endobj
|
||||
|
||||
xref
|
||||
0 7
|
||||
0000000000 65535 f
|
||||
0000000010 00000 n
|
||||
0000000034 00000 n
|
||||
0000005913 00000 n
|
||||
0000005936 00000 n
|
||||
0000006109 00000 n
|
||||
0000006183 00000 n
|
||||
trailer
|
||||
<< /ID [ (some) (id) ]
|
||||
/Root 6 0 R
|
||||
/Size 7
|
||||
>>
|
||||
startxref
|
||||
6242
|
||||
%%EOF
|
12
submodules/TelegramUI/Images.xcassets/Media Editor/Audio.imageset/Contents.json
vendored
Normal file
12
submodules/TelegramUI/Images.xcassets/Media Editor/Audio.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "Audio.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
12
submodules/TelegramUI/Images.xcassets/Media Editor/Pause.imageset/Contents.json
vendored
Normal file
12
submodules/TelegramUI/Images.xcassets/Media Editor/Pause.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "pause_30.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
93
submodules/TelegramUI/Images.xcassets/Media Editor/Pause.imageset/pause_30.pdf
vendored
Normal file
93
submodules/TelegramUI/Images.xcassets/Media Editor/Pause.imageset/pause_30.pdf
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
%PDF-1.7
|
||||
|
||||
1 0 obj
|
||||
<< >>
|
||||
endobj
|
||||
|
||||
2 0 obj
|
||||
<< /Length 3 0 R >>
|
||||
stream
|
||||
/DeviceRGB CS
|
||||
/DeviceRGB cs
|
||||
q
|
||||
1.000000 0.000000 -0.000000 1.000000 4.334961 4.334961 cm
|
||||
1.000000 1.000000 1.000000 scn
|
||||
10.665000 20.000078 m
|
||||
5.509422 20.000078 1.330000 15.820656 1.330000 10.665078 c
|
||||
1.330000 5.509501 5.509422 1.330078 10.665000 1.330078 c
|
||||
15.820578 1.330078 20.000000 5.509501 20.000000 10.665078 c
|
||||
20.000000 15.820656 15.820578 20.000078 10.665000 20.000078 c
|
||||
h
|
||||
0.000000 10.665078 m
|
||||
0.000000 16.555195 4.774883 21.330078 10.665000 21.330078 c
|
||||
16.555117 21.330078 21.330002 16.555195 21.330002 10.665078 c
|
||||
21.330002 4.774961 16.555117 0.000076 10.665000 0.000076 c
|
||||
4.774883 0.000076 0.000000 4.774961 0.000000 10.665078 c
|
||||
h
|
||||
7.664978 16.330040 m
|
||||
8.032248 16.330040 8.329978 16.032309 8.329978 15.665039 c
|
||||
8.329978 5.665039 l
|
||||
8.329978 5.297770 8.032248 5.000038 7.664978 5.000038 c
|
||||
7.297709 5.000038 6.999978 5.297770 6.999978 5.665039 c
|
||||
6.999978 15.665039 l
|
||||
6.999978 16.032309 7.297709 16.330040 7.664978 16.330040 c
|
||||
h
|
||||
14.329978 15.665039 m
|
||||
14.329978 16.032309 14.032248 16.330040 13.664978 16.330040 c
|
||||
13.297709 16.330040 12.999978 16.032309 12.999978 15.665039 c
|
||||
12.999978 5.665039 l
|
||||
12.999978 5.297770 13.297709 5.000038 13.664978 5.000038 c
|
||||
14.032248 5.000038 14.329978 5.297770 14.329978 5.665039 c
|
||||
14.329978 15.665039 l
|
||||
h
|
||||
f*
|
||||
n
|
||||
Q
|
||||
|
||||
endstream
|
||||
endobj
|
||||
|
||||
3 0 obj
|
||||
1252
|
||||
endobj
|
||||
|
||||
4 0 obj
|
||||
<< /Annots []
|
||||
/Type /Page
|
||||
/MediaBox [ 0.000000 0.000000 30.000000 30.000000 ]
|
||||
/Resources 1 0 R
|
||||
/Contents 2 0 R
|
||||
/Parent 5 0 R
|
||||
>>
|
||||
endobj
|
||||
|
||||
5 0 obj
|
||||
<< /Kids [ 4 0 R ]
|
||||
/Count 1
|
||||
/Type /Pages
|
||||
>>
|
||||
endobj
|
||||
|
||||
6 0 obj
|
||||
<< /Pages 5 0 R
|
||||
/Type /Catalog
|
||||
>>
|
||||
endobj
|
||||
|
||||
xref
|
||||
0 7
|
||||
0000000000 65535 f
|
||||
0000000010 00000 n
|
||||
0000000034 00000 n
|
||||
0000001342 00000 n
|
||||
0000001365 00000 n
|
||||
0000001538 00000 n
|
||||
0000001612 00000 n
|
||||
trailer
|
||||
<< /ID [ (some) (id) ]
|
||||
/Root 6 0 R
|
||||
/Size 7
|
||||
>>
|
||||
startxref
|
||||
1671
|
||||
%%EOF
|
12
submodules/TelegramUI/Images.xcassets/Media Editor/Play.imageset/Contents.json
vendored
Normal file
12
submodules/TelegramUI/Images.xcassets/Media Editor/Play.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "play_30.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
101
submodules/TelegramUI/Images.xcassets/Media Editor/Play.imageset/play_30.pdf
vendored
Normal file
101
submodules/TelegramUI/Images.xcassets/Media Editor/Play.imageset/play_30.pdf
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
%PDF-1.7
|
||||
|
||||
1 0 obj
|
||||
<< >>
|
||||
endobj
|
||||
|
||||
2 0 obj
|
||||
<< /Length 3 0 R >>
|
||||
stream
|
||||
/DeviceRGB CS
|
||||
/DeviceRGB cs
|
||||
q
|
||||
1.000000 0.000000 -0.000000 1.000000 4.335000 4.335014 cm
|
||||
1.000000 1.000000 1.000000 scn
|
||||
10.665000 20.000002 m
|
||||
5.509422 20.000002 1.330000 15.820580 1.330000 10.665002 c
|
||||
1.330000 5.509424 5.509422 1.330002 10.665000 1.330002 c
|
||||
15.820578 1.330002 20.000000 5.509424 20.000000 10.665002 c
|
||||
20.000000 15.820580 15.820578 20.000002 10.665000 20.000002 c
|
||||
h
|
||||
0.000000 10.665002 m
|
||||
0.000000 16.555119 4.774883 21.330002 10.665000 21.330002 c
|
||||
16.555117 21.330002 21.330002 16.555119 21.330002 10.665002 c
|
||||
21.330002 4.774885 16.555117 0.000000 10.665000 0.000000 c
|
||||
4.774883 0.000000 0.000000 4.774885 0.000000 10.665002 c
|
||||
h
|
||||
7.133423 16.064619 m
|
||||
7.081970 15.996294 7.043385 15.917729 7.021268 15.832527 c
|
||||
7.006115 15.774374 6.999141 15.715051 6.999978 15.656164 c
|
||||
6.999978 5.673810 l
|
||||
6.999198 5.618938 7.005200 5.563689 7.018282 5.509359 c
|
||||
7.041130 5.414057 7.084498 5.326735 7.143453 5.252323 c
|
||||
7.195314 5.186611 7.257748 5.132954 7.326760 5.092274 c
|
||||
7.413728 5.040802 7.513433 5.008574 7.620012 5.001457 c
|
||||
7.690217 4.996639 7.761523 5.002909 7.831167 5.020897 c
|
||||
7.901458 5.038980 7.967251 5.068268 8.026531 5.106743 c
|
||||
16.017448 10.101068 l
|
||||
16.211884 10.222589 16.330000 10.435701 16.330000 10.664987 c
|
||||
16.330000 10.894272 16.211884 11.107385 16.017448 11.228906 c
|
||||
8.024294 16.224628 l
|
||||
7.978845 16.253868 7.929602 16.277723 7.877468 16.295292 c
|
||||
7.785367 16.326424 7.689186 16.336279 7.595552 16.326382 c
|
||||
7.505905 16.317081 7.421548 16.289974 7.346080 16.248653 c
|
||||
7.264804 16.204342 7.191852 16.142523 7.133423 16.064619 c
|
||||
h
|
||||
8.329978 6.864798 m
|
||||
14.410280 10.664987 l
|
||||
8.329978 14.465176 l
|
||||
8.329978 6.864798 l
|
||||
h
|
||||
f*
|
||||
n
|
||||
Q
|
||||
|
||||
endstream
|
||||
endobj
|
||||
|
||||
3 0 obj
|
||||
1634
|
||||
endobj
|
||||
|
||||
4 0 obj
|
||||
<< /Annots []
|
||||
/Type /Page
|
||||
/MediaBox [ 0.000000 0.000000 30.000000 30.000000 ]
|
||||
/Resources 1 0 R
|
||||
/Contents 2 0 R
|
||||
/Parent 5 0 R
|
||||
>>
|
||||
endobj
|
||||
|
||||
5 0 obj
|
||||
<< /Kids [ 4 0 R ]
|
||||
/Count 1
|
||||
/Type /Pages
|
||||
>>
|
||||
endobj
|
||||
|
||||
6 0 obj
|
||||
<< /Pages 5 0 R
|
||||
/Type /Catalog
|
||||
>>
|
||||
endobj
|
||||
|
||||
xref
|
||||
0 7
|
||||
0000000000 65535 f
|
||||
0000000010 00000 n
|
||||
0000000034 00000 n
|
||||
0000001724 00000 n
|
||||
0000001747 00000 n
|
||||
0000001920 00000 n
|
||||
0000001994 00000 n
|
||||
trailer
|
||||
<< /ID [ (some) (id) ]
|
||||
/Root 6 0 R
|
||||
/Size 7
|
||||
>>
|
||||
startxref
|
||||
2053
|
||||
%%EOF
|
12
submodules/TelegramUI/Images.xcassets/Media Editor/SmallAudio.imageset/Contents.json
vendored
Normal file
12
submodules/TelegramUI/Images.xcassets/Media Editor/SmallAudio.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "SmallAudio.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
302
submodules/TelegramUI/Images.xcassets/Media Editor/SmallAudio.imageset/SmallAudio.pdf
vendored
Normal file
302
submodules/TelegramUI/Images.xcassets/Media Editor/SmallAudio.imageset/SmallAudio.pdf
vendored
Normal file
@ -0,0 +1,302 @@
|
||||
%PDF-1.7
|
||||
|
||||
1 0 obj
|
||||
<< >>
|
||||
endobj
|
||||
|
||||
2 0 obj
|
||||
<< /Length 3 0 R >>
|
||||
stream
|
||||
/DeviceRGB CS
|
||||
/DeviceRGB cs
|
||||
q
|
||||
1.000000 0.000000 -0.000000 1.000000 1.500000 -0.096008 cm
|
||||
1.000000 1.000000 1.000000 scn
|
||||
9.500000 11.275088 m
|
||||
9.500000 11.607278 9.182047 11.847110 8.862638 11.755850 c
|
||||
4.587918 10.534502 l
|
||||
3.943966 10.350515 3.500000 9.761936 3.500000 9.092216 c
|
||||
3.500000 3.600818 l
|
||||
3.500000 2.600819 l
|
||||
3.500000 2.118586 l
|
||||
3.500000 1.247697 2.859622 0.509336 1.997487 0.386173 c
|
||||
1.694923 0.342950 l
|
||||
0.800355 0.215154 0.000000 0.909300 0.000000 1.812950 c
|
||||
0.000000 2.551923 0.543378 3.178443 1.274924 3.282949 c
|
||||
2.500000 3.457960 l
|
||||
2.500000 9.092216 l
|
||||
2.500000 10.208416 3.239943 11.189382 4.313197 11.496025 c
|
||||
8.587917 12.717374 l
|
||||
9.546142 12.991153 10.500000 12.271657 10.500000 11.275088 c
|
||||
10.500000 5.600830 l
|
||||
10.500000 4.600819 l
|
||||
10.500000 4.118598 l
|
||||
10.500000 3.247709 9.859622 2.509348 8.997487 2.386186 c
|
||||
8.694922 2.342962 l
|
||||
7.800355 2.215166 7.000000 2.909312 7.000000 3.812962 c
|
||||
7.000000 4.551935 7.543378 5.178455 8.274924 5.282962 c
|
||||
9.500000 5.457973 l
|
||||
9.500000 11.275088 l
|
||||
h
|
||||
f*
|
||||
n
|
||||
Q
|
||||
q
|
||||
1.000000 0.000000 -0.000000 1.000000 1.500000 -0.462341 cm
|
||||
1.000000 1.000000 1.000000 scn
|
||||
8.862638 12.122183 m
|
||||
8.817309 12.280835 l
|
||||
8.817309 12.280835 l
|
||||
8.862638 12.122183 l
|
||||
h
|
||||
4.587918 10.900835 m
|
||||
4.542589 11.059486 l
|
||||
4.542589 11.059486 l
|
||||
4.587918 10.900835 l
|
||||
h
|
||||
1.997487 0.752506 m
|
||||
2.020821 0.589165 l
|
||||
2.020821 0.589165 l
|
||||
1.997487 0.752506 l
|
||||
h
|
||||
1.694923 0.709283 m
|
||||
1.671588 0.872624 l
|
||||
1.671588 0.872624 l
|
||||
1.694923 0.709283 l
|
||||
h
|
||||
1.274924 3.649282 m
|
||||
1.251590 3.812624 l
|
||||
1.274924 3.649282 l
|
||||
h
|
||||
2.500000 3.824293 m
|
||||
2.523335 3.660952 l
|
||||
2.665000 3.681190 l
|
||||
2.665000 3.824293 l
|
||||
2.500000 3.824293 l
|
||||
h
|
||||
4.313197 11.862358 m
|
||||
4.358526 11.703707 l
|
||||
4.358526 11.703707 l
|
||||
4.313197 11.862358 l
|
||||
h
|
||||
8.587917 13.083707 m
|
||||
8.542588 13.242359 l
|
||||
8.542588 13.242358 l
|
||||
8.587917 13.083707 l
|
||||
h
|
||||
8.997487 2.752519 m
|
||||
9.020822 2.589177 l
|
||||
9.020822 2.589177 l
|
||||
8.997487 2.752519 l
|
||||
h
|
||||
8.694922 2.709295 m
|
||||
8.671588 2.872637 l
|
||||
8.671588 2.872637 l
|
||||
8.694922 2.709295 l
|
||||
h
|
||||
8.274924 5.649295 m
|
||||
8.251590 5.812637 l
|
||||
8.251590 5.812637 l
|
||||
8.274924 5.649295 l
|
||||
h
|
||||
9.500000 5.824306 m
|
||||
9.523335 5.660964 l
|
||||
9.665000 5.681202 l
|
||||
9.665000 5.824306 l
|
||||
9.500000 5.824306 l
|
||||
h
|
||||
8.907968 11.963531 m
|
||||
9.121971 12.024675 9.335000 11.863988 9.335000 11.641421 c
|
||||
9.665000 11.641421 l
|
||||
9.665000 12.083233 9.242123 12.402210 8.817309 12.280835 c
|
||||
8.907968 11.963531 l
|
||||
h
|
||||
4.633247 10.742184 m
|
||||
8.907968 11.963531 l
|
||||
8.817309 12.280835 l
|
||||
4.542589 11.059486 l
|
||||
4.633247 10.742184 l
|
||||
h
|
||||
3.665000 9.458549 m
|
||||
3.665000 10.054600 4.060130 10.578435 4.633247 10.742184 c
|
||||
4.542589 11.059486 l
|
||||
3.827802 10.855261 3.335000 10.201939 3.335000 9.458549 c
|
||||
3.665000 9.458549 l
|
||||
h
|
||||
3.665000 3.967151 m
|
||||
3.665000 9.458549 l
|
||||
3.335000 9.458549 l
|
||||
3.335000 3.967151 l
|
||||
3.665000 3.967151 l
|
||||
h
|
||||
3.665000 2.967152 m
|
||||
3.665000 3.967151 l
|
||||
3.335000 3.967151 l
|
||||
3.335000 2.967152 l
|
||||
3.665000 2.967152 l
|
||||
h
|
||||
3.665000 2.484919 m
|
||||
3.665000 2.967152 l
|
||||
3.335000 2.967152 l
|
||||
3.335000 2.484919 l
|
||||
3.665000 2.484919 l
|
||||
h
|
||||
2.020821 0.589165 m
|
||||
2.964244 0.723939 3.665000 1.531918 3.665000 2.484919 c
|
||||
3.335000 2.484919 l
|
||||
3.335000 1.696142 2.755001 1.027398 1.974152 0.915848 c
|
||||
2.020821 0.589165 l
|
||||
h
|
||||
1.718257 0.545941 m
|
||||
2.020821 0.589165 l
|
||||
1.974152 0.915848 l
|
||||
1.671588 0.872624 l
|
||||
1.718257 0.545941 l
|
||||
h
|
||||
-0.165000 2.179283 m
|
||||
-0.165000 1.175222 0.724288 0.403945 1.718257 0.545941 c
|
||||
1.671588 0.872624 l
|
||||
0.876422 0.759028 0.165000 1.376043 0.165000 2.179283 c
|
||||
-0.165000 2.179283 l
|
||||
h
|
||||
1.251590 3.812624 m
|
||||
0.438757 3.696506 -0.165000 3.000368 -0.165000 2.179283 c
|
||||
0.165000 2.179283 l
|
||||
0.165000 2.836143 0.648000 3.393046 1.298259 3.485941 c
|
||||
1.251590 3.812624 l
|
||||
h
|
||||
2.476665 3.987635 m
|
||||
1.251590 3.812624 l
|
||||
1.298259 3.485941 l
|
||||
2.523335 3.660952 l
|
||||
2.476665 3.987635 l
|
||||
h
|
||||
2.335000 9.458549 m
|
||||
2.335000 3.824293 l
|
||||
2.665000 3.824293 l
|
||||
2.665000 9.458549 l
|
||||
2.335000 9.458549 l
|
||||
h
|
||||
4.267868 12.021009 m
|
||||
3.123780 11.694127 2.335000 10.648418 2.335000 9.458549 c
|
||||
2.665000 9.458549 l
|
||||
2.665000 10.501080 3.356107 11.417301 4.358526 11.703707 c
|
||||
4.267868 12.021009 l
|
||||
h
|
||||
8.542588 13.242358 m
|
||||
4.267868 12.021009 l
|
||||
4.358526 11.703707 l
|
||||
8.633246 12.925056 l
|
||||
8.542588 13.242358 l
|
||||
h
|
||||
10.665000 11.641421 m
|
||||
10.665000 12.747613 9.606217 13.546252 8.542588 13.242359 c
|
||||
8.633246 12.925056 l
|
||||
9.486066 13.168718 10.335000 12.528368 10.335000 11.641421 c
|
||||
10.665000 11.641421 l
|
||||
h
|
||||
10.665000 5.967163 m
|
||||
10.665000 11.641421 l
|
||||
10.335000 11.641421 l
|
||||
10.335000 5.967163 l
|
||||
10.665000 5.967163 l
|
||||
h
|
||||
10.335000 5.967163 m
|
||||
10.335000 4.967152 l
|
||||
10.665000 4.967152 l
|
||||
10.665000 5.967163 l
|
||||
10.335000 5.967163 l
|
||||
h
|
||||
10.335000 4.967152 m
|
||||
10.335000 4.484931 l
|
||||
10.665000 4.484931 l
|
||||
10.665000 4.967152 l
|
||||
10.335000 4.967152 l
|
||||
h
|
||||
10.335000 4.484931 m
|
||||
10.335000 3.696155 9.755000 3.027411 8.974153 2.915860 c
|
||||
9.020822 2.589177 l
|
||||
9.964244 2.723951 10.665000 3.531930 10.665000 4.484931 c
|
||||
10.335000 4.484931 l
|
||||
h
|
||||
8.974153 2.915860 m
|
||||
8.671588 2.872637 l
|
||||
8.718257 2.545954 l
|
||||
9.020822 2.589177 l
|
||||
8.974153 2.915860 l
|
||||
h
|
||||
8.671588 2.872637 m
|
||||
7.876422 2.759041 7.165000 3.376056 7.165000 4.179295 c
|
||||
6.835000 4.179295 l
|
||||
6.835000 3.175235 7.724288 2.403957 8.718257 2.545954 c
|
||||
8.671588 2.872637 l
|
||||
h
|
||||
7.165000 4.179295 m
|
||||
7.165000 4.836156 7.647999 5.393059 8.298259 5.485954 c
|
||||
8.251590 5.812637 l
|
||||
7.438756 5.696517 6.835000 5.000381 6.835000 4.179295 c
|
||||
7.165000 4.179295 l
|
||||
h
|
||||
8.298259 5.485954 m
|
||||
9.523335 5.660964 l
|
||||
9.476665 5.987648 l
|
||||
8.251590 5.812637 l
|
||||
8.298259 5.485954 l
|
||||
h
|
||||
9.335000 11.641421 m
|
||||
9.335000 5.824306 l
|
||||
9.665000 5.824306 l
|
||||
9.665000 11.641421 l
|
||||
9.335000 11.641421 l
|
||||
h
|
||||
f
|
||||
n
|
||||
Q
|
||||
|
||||
endstream
|
||||
endobj
|
||||
|
||||
3 0 obj
|
||||
5371
|
||||
endobj
|
||||
|
||||
4 0 obj
|
||||
<< /Annots []
|
||||
/Type /Page
|
||||
/MediaBox [ 0.000000 0.000000 14.000000 14.000000 ]
|
||||
/Resources 1 0 R
|
||||
/Contents 2 0 R
|
||||
/Parent 5 0 R
|
||||
>>
|
||||
endobj
|
||||
|
||||
5 0 obj
|
||||
<< /Kids [ 4 0 R ]
|
||||
/Count 1
|
||||
/Type /Pages
|
||||
>>
|
||||
endobj
|
||||
|
||||
6 0 obj
|
||||
<< /Pages 5 0 R
|
||||
/Type /Catalog
|
||||
>>
|
||||
endobj
|
||||
|
||||
xref
|
||||
0 7
|
||||
0000000000 65535 f
|
||||
0000000010 00000 n
|
||||
0000000034 00000 n
|
||||
0000005461 00000 n
|
||||
0000005484 00000 n
|
||||
0000005657 00000 n
|
||||
0000005731 00000 n
|
||||
trailer
|
||||
<< /ID [ (some) (id) ]
|
||||
/Root 6 0 R
|
||||
/Size 7
|
||||
>>
|
||||
startxref
|
||||
5790
|
||||
%%EOF
|
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user