mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-09-04 03:40:45 +00:00
Various fixes
This commit is contained in:
parent
e5e5bc1eac
commit
d10bf46443
@ -64,6 +64,8 @@ final class CameraDeviceContext {
|
|||||||
|
|
||||||
self.device.configureDeviceFormat(maxDimensions: self.preferredMaxDimensions, maxFramerate: self.preferredMaxFrameRate)
|
self.device.configureDeviceFormat(maxDimensions: self.preferredMaxDimensions, maxFramerate: self.preferredMaxFrameRate)
|
||||||
self.output.configureVideoStabilization()
|
self.output.configureVideoStabilization()
|
||||||
|
|
||||||
|
self.device.resetZoom()
|
||||||
}
|
}
|
||||||
|
|
||||||
func invalidate() {
|
func invalidate() {
|
||||||
@ -210,7 +212,7 @@ private final class CameraContext {
|
|||||||
|
|
||||||
func stopCapture(invalidate: Bool = false) {
|
func stopCapture(invalidate: Bool = false) {
|
||||||
if invalidate {
|
if invalidate {
|
||||||
self.setZoomLevel(1.0)
|
self.mainDeviceContext.device.resetZoom()
|
||||||
|
|
||||||
self.configure {
|
self.configure {
|
||||||
self.mainDeviceContext.invalidate()
|
self.mainDeviceContext.invalidate()
|
||||||
|
@ -31,10 +31,23 @@ final class CameraDevice {
|
|||||||
|
|
||||||
func configure(for session: CameraSession, position: Camera.Position) {
|
func configure(for session: CameraSession, position: Camera.Position) {
|
||||||
self.position = position
|
self.position = position
|
||||||
if let videoDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera, .builtInTelephotoCamera], mediaType: .video, position: position).devices.first {
|
|
||||||
self.videoDevice = videoDevice
|
var selectedDevice: AVCaptureDevice?
|
||||||
self.videoDevicePromise.set(.single(videoDevice))
|
if #available(iOS 13.0, *) {
|
||||||
|
if let device = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: position) {
|
||||||
|
selectedDevice = device
|
||||||
|
} else if let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: position) {
|
||||||
|
selectedDevice = device
|
||||||
|
} else {
|
||||||
|
selectedDevice = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: position)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
selectedDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera, .builtInTelephotoCamera], mediaType: .video, position: position).devices.first
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.videoDevice = selectedDevice
|
||||||
|
self.videoDevicePromise.set(.single(selectedDevice))
|
||||||
|
|
||||||
self.audioDevice = AVCaptureDevice.default(for: .audio)
|
self.audioDevice = AVCaptureDevice.default(for: .audio)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -229,4 +242,13 @@ final class CameraDevice {
|
|||||||
device.videoZoomFactor = max(1.0, min(10.0, device.videoZoomFactor * zoomDelta))
|
device.videoZoomFactor = max(1.0, min(10.0, device.videoZoomFactor * zoomDelta))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func resetZoom() {
|
||||||
|
guard let device = self.videoDevice else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.transaction(device) { device in
|
||||||
|
device.videoZoomFactor = device.neutralZoomFactor
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,6 +43,16 @@ extension AVCaptureDevice {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var neutralZoomFactor: CGFloat {
|
||||||
|
if #available(iOS 13.0, *) {
|
||||||
|
if let indexOfWideAngle = self.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }), indexOfWideAngle > 0 {
|
||||||
|
let zoomFactor = self.virtualDeviceSwitchOverVideoZoomFactors[indexOfWideAngle - 1]
|
||||||
|
return CGFloat(zoomFactor.doubleValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 1.0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
extension CMSampleBuffer {
|
extension CMSampleBuffer {
|
||||||
|
@ -162,7 +162,7 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
self.previousEntity = previousEntity
|
self.previousEntity = previousEntity
|
||||||
}
|
}
|
||||||
|
|
||||||
self.update(animated: false)
|
self.update(animated: false, updateEditingPosition: false)
|
||||||
|
|
||||||
if let superview = self.superview {
|
if let superview = self.superview {
|
||||||
let fadeView = UIButton(frame: CGRect(origin: .zero, size: superview.frame.size))
|
let fadeView = UIButton(frame: CGRect(origin: .zero, size: superview.frame.size))
|
||||||
@ -181,15 +181,8 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
self.textView.window?.makeKey()
|
self.textView.window?.makeKey()
|
||||||
self.textView.becomeFirstResponder()
|
self.textView.becomeFirstResponder()
|
||||||
|
|
||||||
UIView.animate(withDuration: 0.4, delay: 0.0, usingSpringWithDamping: 0.65, initialSpringVelocity: 0.0) {
|
self.updateEditingPosition(animated: true)
|
||||||
if let parentView = self.superview as? DrawingEntitiesView {
|
|
||||||
let scale = parentView.getEntityAdditionalScale() / (parentView.drawingView?.zoomScale ?? 1.0)
|
|
||||||
self.transform = CGAffineTransformMakeRotation(parentView.getEntityInitialRotation()).scaledBy(x: scale, y: scale)
|
|
||||||
|
|
||||||
self.center = parentView.getEntityCenterPosition()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let selectionView = self.selectionView as? DrawingTextEntititySelectionView {
|
if let selectionView = self.selectionView as? DrawingTextEntititySelectionView {
|
||||||
selectionView.alpha = 0.0
|
selectionView.alpha = 0.0
|
||||||
if !self.textEntity.text.string.isEmpty {
|
if !self.textEntity.text.string.isEmpty {
|
||||||
@ -198,7 +191,42 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func updateEditingPosition(animated: Bool) {
|
||||||
|
guard let parentView = self.superview as? DrawingEntitiesView else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var position = parentView.getEntityCenterPosition()
|
||||||
|
if parentView.frame.width == 1080 && parentView.frame.height == 1920 {
|
||||||
|
let width = self.bounds.width
|
||||||
|
switch self.textEntity.alignment {
|
||||||
|
case .left:
|
||||||
|
position = CGPoint(x: 80.0 + width / 2.0, y: position.y)
|
||||||
|
case .right:
|
||||||
|
position = CGPoint(x: parentView.bounds.width - 80.0 - width / 2.0, y: position.y)
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let scale = parentView.getEntityAdditionalScale() / (parentView.drawingView?.zoomScale ?? 1.0)
|
||||||
|
let rotation = parentView.getEntityInitialRotation()
|
||||||
|
if animated {
|
||||||
|
UIView.animate(withDuration: 0.4, delay: 0.0, usingSpringWithDamping: 0.65, initialSpringVelocity: 0.0) {
|
||||||
|
self.transform = CGAffineTransformMakeRotation(rotation).scaledBy(x: scale, y: scale)
|
||||||
|
self.center = position
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.transform = CGAffineTransformMakeRotation(rotation).scaledBy(x: scale, y: scale)
|
||||||
|
self.center = position
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func endEditing(reset: Bool = false) {
|
func endEditing(reset: Bool = false) {
|
||||||
|
guard let parentView = self.superview as? DrawingEntitiesView else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
self._isEditing = false
|
self._isEditing = false
|
||||||
self.textView.resignFirstResponder()
|
self.textView.resignFirstResponder()
|
||||||
self.textView.inputView = nil
|
self.textView.inputView = nil
|
||||||
@ -220,7 +248,6 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
self.containerView?.remove(uuid: self.textEntity.uuid)
|
self.containerView?.remove(uuid: self.textEntity.uuid)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// self.textEntity.text = self.textView.text.trimmingCharacters(in: .whitespacesAndNewlines)
|
|
||||||
if self.textEntity.text.string.isEmpty {
|
if self.textEntity.text.string.isEmpty {
|
||||||
self.containerView?.remove(uuid: self.textEntity.uuid)
|
self.containerView?.remove(uuid: self.textEntity.uuid)
|
||||||
}
|
}
|
||||||
@ -233,6 +260,18 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.previousEntity == nil && self.textEntity.alignment != .center, parentView.frame.width == 1080 && parentView.frame.height == 1920 {
|
||||||
|
let width = self.bounds.width
|
||||||
|
switch self.textEntity.alignment {
|
||||||
|
case .left:
|
||||||
|
self.textEntity.position = CGPoint(x: 80.0 + width / 2.0, y: self.textEntity.position.y)
|
||||||
|
case .right:
|
||||||
|
self.textEntity.position = CGPoint(x: parentView.bounds.width - 80.0 - width / 2.0, y: self.textEntity.position.y)
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
UIView.animate(withDuration: 0.4, delay: 0.0, usingSpringWithDamping: 0.65, initialSpringVelocity: 0.0) {
|
UIView.animate(withDuration: 0.4, delay: 0.0, usingSpringWithDamping: 0.65, initialSpringVelocity: 0.0) {
|
||||||
self.transform = CGAffineTransformMakeRotation(self.textEntity.rotation)
|
self.transform = CGAffineTransformMakeRotation(self.textEntity.rotation)
|
||||||
self.center = self.textEntity.position
|
self.center = self.textEntity.position
|
||||||
@ -305,7 +344,7 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
self.textView.setNeedsLayersUpdate()
|
self.textView.setNeedsLayersUpdate()
|
||||||
var result = self.textView.sizeThatFits(CGSize(width: self.textEntity.width, height: .greatestFiniteMagnitude))
|
var result = self.textView.sizeThatFits(CGSize(width: self.textEntity.width, height: .greatestFiniteMagnitude))
|
||||||
result.width = max(224.0, ceil(result.width) + 20.0)
|
result.width = max(224.0, ceil(result.width) + 20.0)
|
||||||
result.height = ceil(result.height) //+ 20.0 + (self.textView.font?.pointSize ?? 0.0) // * _font.sizeCorrection;
|
result.height = ceil(result.height);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -507,10 +546,10 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
}
|
}
|
||||||
|
|
||||||
public override func update(animated: Bool = false) {
|
public override func update(animated: Bool = false) {
|
||||||
self.update(animated: animated, afterAppendingEmoji: false)
|
self.update(animated: animated, afterAppendingEmoji: false, updateEditingPosition: true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(animated: Bool = false, afterAppendingEmoji: Bool = false) {
|
func update(animated: Bool = false, afterAppendingEmoji: Bool = false, updateEditingPosition: Bool = true) {
|
||||||
if !self.isEditing {
|
if !self.isEditing {
|
||||||
self.center = self.textEntity.position
|
self.center = self.textEntity.position
|
||||||
self.transform = CGAffineTransformScale(CGAffineTransformMakeRotation(self.textEntity.rotation), self.textEntity.scale, self.textEntity.scale)
|
self.transform = CGAffineTransformScale(CGAffineTransformMakeRotation(self.textEntity.rotation), self.textEntity.scale, self.textEntity.scale)
|
||||||
@ -555,12 +594,13 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
|
|||||||
|
|
||||||
self.sizeToFit()
|
self.sizeToFit()
|
||||||
|
|
||||||
|
if updateEditingPosition && self.isEditing {
|
||||||
|
self.updateEditingPosition(animated: animated)
|
||||||
|
}
|
||||||
|
|
||||||
self.textView.onLayoutUpdate = {
|
self.textView.onLayoutUpdate = {
|
||||||
self.updateEntities()
|
self.updateEntities()
|
||||||
}
|
}
|
||||||
// Queue.mainQueue().after(afterAppendingEmoji ? 0.01 : 0.001) {
|
|
||||||
// self.updateEntities()
|
|
||||||
// }
|
|
||||||
|
|
||||||
super.update(animated: animated)
|
super.update(animated: animated)
|
||||||
}
|
}
|
||||||
|
@ -526,7 +526,8 @@ final class MediaPickerGridItemNode: GridItemNode {
|
|||||||
override func layout() {
|
override func layout() {
|
||||||
super.layout()
|
super.layout()
|
||||||
|
|
||||||
self.backgroundNode.frame = self.bounds
|
let backgroundSize = CGSize(width: self.bounds.width, height: floorToScreenPixels(self.bounds.height / 9.0 * 16.0))
|
||||||
|
self.backgroundNode.frame = CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((self.bounds.height - backgroundSize.height) / 2.0)), size: backgroundSize)
|
||||||
self.imageNode.frame = self.bounds.insetBy(dx: -1.0 + UIScreenPixel, dy: -1.0 + UIScreenPixel)
|
self.imageNode.frame = self.bounds.insetBy(dx: -1.0 + UIScreenPixel, dy: -1.0 + UIScreenPixel)
|
||||||
self.gradientNode.frame = CGRect(x: 0.0, y: self.bounds.height - 36.0, width: self.bounds.width, height: 36.0)
|
self.gradientNode.frame = CGRect(x: 0.0, y: self.bounds.height - 36.0, width: self.bounds.width, height: 36.0)
|
||||||
self.typeIconNode.frame = CGRect(x: 0.0, y: self.bounds.height - 20.0, width: 19.0, height: 19.0)
|
self.typeIconNode.frame = CGRect(x: 0.0, y: self.bounds.height - 20.0, width: 19.0, height: 19.0)
|
||||||
@ -563,7 +564,8 @@ final class MediaPickerGridItemNode: GridItemNode {
|
|||||||
|
|
||||||
func transitionImage() -> UIImage? {
|
func transitionImage() -> UIImage? {
|
||||||
if let backgroundImage = self.backgroundNode.image {
|
if let backgroundImage = self.backgroundNode.image {
|
||||||
return generateImage(self.bounds.size, contextGenerator: { size, context in
|
let size = CGSize(width: self.bounds.width, height: self.bounds.height / 9.0 * 16.0)
|
||||||
|
return generateImage(size, contextGenerator: { size, context in
|
||||||
if let cgImage = backgroundImage.cgImage {
|
if let cgImage = backgroundImage.cgImage {
|
||||||
context.draw(cgImage, in: CGRect(origin: .zero, size: size))
|
context.draw(cgImage, in: CGRect(origin: .zero, size: size))
|
||||||
if let image = self.imageNode.image, let cgImage = image.cgImage {
|
if let image = self.imageNode.image, let cgImage = image.cgImage {
|
||||||
|
@ -1219,7 +1219,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
|
|||||||
|
|
||||||
var itemHeight = itemWidth
|
var itemHeight = itemWidth
|
||||||
if case let .assets(_, mode) = controller.subject, case .story = mode {
|
if case let .assets(_, mode) = controller.subject, case .story = mode {
|
||||||
itemHeight = round(itemWidth / 9.0 * 16.0)
|
itemHeight = 180.0
|
||||||
}
|
}
|
||||||
|
|
||||||
self.gridNode.transaction(GridNodeTransaction(deleteItems: [], insertItems: [], updateItems: [], scrollToItem: nil, updateLayout: GridNodeUpdateLayout(layout: GridNodeLayout(size: bounds.size, insets: gridInsets, scrollIndicatorInsets: nil, preloadSize: itemHeight * 3.0, type: .fixed(itemSize: CGSize(width: itemWidth, height: itemHeight), fillWidth: true, lineSpacing: itemSpacing, itemSpacing: itemSpacing), cutout: cameraRect), transition: transition), itemTransition: .immediate, stationaryItems: .none, updateFirstIndexInSectionOffset: nil, updateOpaqueState: nil, synchronousLoads: false), completion: { [weak self] _ in
|
self.gridNode.transaction(GridNodeTransaction(deleteItems: [], insertItems: [], updateItems: [], scrollToItem: nil, updateLayout: GridNodeUpdateLayout(layout: GridNodeLayout(size: bounds.size, insets: gridInsets, scrollIndicatorInsets: nil, preloadSize: itemHeight * 3.0, type: .fixed(itemSize: CGSize(width: itemWidth, height: itemHeight), fillWidth: true, lineSpacing: itemSpacing, itemSpacing: itemSpacing), cutout: cameraRect), transition: transition), itemTransition: .immediate, stationaryItems: .none, updateFirstIndexInSectionOffset: nil, updateOpaqueState: nil, synchronousLoads: false), completion: { [weak self] _ in
|
||||||
|
@ -1129,14 +1129,24 @@ private let hasHEVCHardwareEncoder: Bool = {
|
|||||||
return result == noErr
|
return result == noErr
|
||||||
}()
|
}()
|
||||||
|
|
||||||
public func recommendedVideoExportConfiguration(values: MediaEditorValues, forceFullHd: Bool = false, frameRate: Float) -> MediaEditorVideoExport.Configuration {
|
public func recommendedVideoExportConfiguration(values: MediaEditorValues, duration: Double, image: Bool = false, forceFullHd: Bool = false, frameRate: Float) -> MediaEditorVideoExport.Configuration {
|
||||||
let compressionProperties: [String: Any]
|
let compressionProperties: [String: Any]
|
||||||
let codecType: AVVideoCodecType
|
let codecType: AVVideoCodecType
|
||||||
|
|
||||||
if hasHEVCHardwareEncoder {
|
if hasHEVCHardwareEncoder {
|
||||||
|
var bitrate: Int = 3700
|
||||||
|
if image {
|
||||||
|
bitrate = 5000
|
||||||
|
} else {
|
||||||
|
if duration < 10 {
|
||||||
|
bitrate = 5500
|
||||||
|
} else if duration < 25 {
|
||||||
|
bitrate = 4500
|
||||||
|
}
|
||||||
|
}
|
||||||
codecType = AVVideoCodecType.hevc
|
codecType = AVVideoCodecType.hevc
|
||||||
compressionProperties = [
|
compressionProperties = [
|
||||||
AVVideoAverageBitRateKey: 3800000,
|
AVVideoAverageBitRateKey: bitrate * 1000,
|
||||||
AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
|
AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
|
||||||
]
|
]
|
||||||
} else {
|
} else {
|
||||||
|
@ -2164,6 +2164,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
self.previewContainerView.alpha = 1.0
|
self.previewContainerView.alpha = 1.0
|
||||||
|
|
||||||
let transitionInView = UIImageView(image: image)
|
let transitionInView = UIImageView(image: image)
|
||||||
|
transitionInView.contentMode = .scaleAspectFill
|
||||||
var initialScale: CGFloat
|
var initialScale: CGFloat
|
||||||
if image.size.height > image.size.width {
|
if image.size.height > image.size.width {
|
||||||
initialScale = max(self.previewContainerView.bounds.width / image.size.width, self.previewContainerView.bounds.height / image.size.height)
|
initialScale = max(self.previewContainerView.bounds.width / image.size.width, self.previewContainerView.bounds.height / image.size.height)
|
||||||
@ -3762,7 +3763,11 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
|||||||
guard let self else {
|
guard let self else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditor.values, forceFullHd: true, frameRate: 60.0)
|
var duration: Double = 0.0
|
||||||
|
if case let .video(video) = exportSubject {
|
||||||
|
duration = video.duration.seconds
|
||||||
|
}
|
||||||
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditor.values, duration: duration, forceFullHd: true, frameRate: 60.0)
|
||||||
let outputPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).mp4"
|
let outputPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).mp4"
|
||||||
let videoExport = MediaEditorVideoExport(account: self.context.account, subject: exportSubject, configuration: configuration, outputPath: outputPath)
|
let videoExport = MediaEditorVideoExport(account: self.context.account, subject: exportSubject, configuration: configuration, outputPath: outputPath)
|
||||||
self.videoExport = videoExport
|
self.videoExport = videoExport
|
||||||
|
@ -836,7 +836,10 @@ public final class MessageInputPanelComponent: Component {
|
|||||||
} else {
|
} else {
|
||||||
inputActionButtonOriginX = size.width
|
inputActionButtonOriginX = size.width
|
||||||
}
|
}
|
||||||
transition.setFrame(view: inputActionButtonView, frame: CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: size.height - insets.bottom - baseFieldHeight + floorToScreenPixels((baseFieldHeight - inputActionButtonSize.height) * 0.5)), size: inputActionButtonSize))
|
let inputActionButtonFrame = CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - inputActionButtonSize.height) * 0.5)), size: inputActionButtonSize)
|
||||||
|
transition.setPosition(view: inputActionButtonView, position: inputActionButtonFrame.center)
|
||||||
|
transition.setBounds(view: inputActionButtonView, bounds: CGRect(origin: CGPoint(), size: inputActionButtonFrame.size))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var fieldIconNextX = fieldBackgroundFrame.maxX - 4.0
|
var fieldIconNextX = fieldBackgroundFrame.maxX - 4.0
|
||||||
|
@ -206,8 +206,11 @@ private final class StoryContainerScreenComponent: Component {
|
|||||||
guard let self, let component = self.component, let stateValue = component.content.stateValue, let slice = stateValue.slice, let itemSetView = self.visibleItemSetViews[slice.peer.id], let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View else {
|
guard let self, let component = self.component, let stateValue = component.content.stateValue, let slice = stateValue.slice, let itemSetView = self.visibleItemSetViews[slice.peer.id], let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View else {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
if !itemSetComponentView.isPointInsideContentArea(point: self.convert(point, to: itemSetComponentView)) {
|
if let environment = self.environment, case .regular = environment.metrics.widthClass {
|
||||||
return []
|
} else {
|
||||||
|
if !itemSetComponentView.isPointInsideContentArea(point: self.convert(point, to: itemSetComponentView)) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if !itemSetComponentView.allowsInteractiveGestures() {
|
if !itemSetComponentView.allowsInteractiveGestures() {
|
||||||
return []
|
return []
|
||||||
@ -220,8 +223,11 @@ private final class StoryContainerScreenComponent: Component {
|
|||||||
guard let self, let component = self.component, let stateValue = component.content.stateValue, let slice = stateValue.slice, let itemSetView = self.visibleItemSetViews[slice.peer.id], let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View else {
|
guard let self, let component = self.component, let stateValue = component.content.stateValue, let slice = stateValue.slice, let itemSetView = self.visibleItemSetViews[slice.peer.id], let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View else {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
if !itemSetComponentView.isPointInsideContentArea(point: self.convert(point, to: itemSetComponentView)) {
|
if let environment = self.environment, case .regular = environment.metrics.widthClass {
|
||||||
return []
|
} else {
|
||||||
|
if !itemSetComponentView.isPointInsideContentArea(point: self.convert(point, to: itemSetComponentView)) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if !itemSetComponentView.allowsInteractiveGestures() {
|
if !itemSetComponentView.allowsInteractiveGestures() {
|
||||||
return []
|
return []
|
||||||
@ -262,8 +268,12 @@ private final class StoryContainerScreenComponent: Component {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if !itemSetComponentView.isPointInsideContentArea(point: touch.location(in: itemSetComponentView)) {
|
if let environment = self.environment, case .regular = environment.metrics.widthClass {
|
||||||
return false
|
|
||||||
|
} else {
|
||||||
|
if !itemSetComponentView.isPointInsideContentArea(point: touch.location(in: itemSetComponentView)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
@ -481,18 +491,12 @@ private final class StoryContainerScreenComponent: Component {
|
|||||||
let location = recognizer.location(in: recognizer.view)
|
let location = recognizer.location(in: recognizer.view)
|
||||||
if let currentItemView = self.visibleItemSetViews.first?.value {
|
if let currentItemView = self.visibleItemSetViews.first?.value {
|
||||||
if location.x < currentItemView.frame.minX {
|
if location.x < currentItemView.frame.minX {
|
||||||
if stateValue.previousSlice == nil {
|
component.content.navigate(navigation: .item(.previous))
|
||||||
|
|
||||||
} else {
|
|
||||||
self.beginHorizontalPan(translation: CGPoint())
|
|
||||||
self.commitHorizontalPan(velocity: CGPoint(x: 100.0, y: 0.0))
|
|
||||||
}
|
|
||||||
} else if location.x > currentItemView.frame.maxX {
|
} else if location.x > currentItemView.frame.maxX {
|
||||||
if stateValue.nextSlice == nil {
|
if stateValue.nextSlice == nil {
|
||||||
environment.controller()?.dismiss()
|
environment.controller()?.dismiss()
|
||||||
} else {
|
} else {
|
||||||
self.beginHorizontalPan(translation: CGPoint())
|
component.content.navigate(navigation: .item(.next))
|
||||||
self.commitHorizontalPan(velocity: CGPoint(x: -100.0, y: 0.0))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -247,7 +247,7 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr
|
|||||||
if let mediaEditorValues {
|
if let mediaEditorValues {
|
||||||
Logger.shared.log("FetchVideoResource", "Requesting video export")
|
Logger.shared.log("FetchVideoResource", "Requesting video export")
|
||||||
|
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, frameRate: 30.0)
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: 5.0, image: true, frameRate: 30.0)
|
||||||
let videoExport = MediaEditorVideoExport(account: account, subject: .image(image), configuration: configuration, outputPath: tempFile.path)
|
let videoExport = MediaEditorVideoExport(account: account, subject: .image(image), configuration: configuration, outputPath: tempFile.path)
|
||||||
videoExport.start()
|
videoExport.start()
|
||||||
|
|
||||||
@ -337,7 +337,8 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr
|
|||||||
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
|
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
|
||||||
let updatedSize = Atomic<Int64>(value: 0)
|
let updatedSize = Atomic<Int64>(value: 0)
|
||||||
if let mediaEditorValues {
|
if let mediaEditorValues {
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, frameRate: 30.0)
|
let duration: Double = avAsset.duration.seconds
|
||||||
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
||||||
let videoExport = MediaEditorVideoExport(account: account, subject: .video(avAsset), configuration: configuration, outputPath: tempFile.path)
|
let videoExport = MediaEditorVideoExport(account: account, subject: .video(avAsset), configuration: configuration, outputPath: tempFile.path)
|
||||||
videoExport.start()
|
videoExport.start()
|
||||||
|
|
||||||
@ -487,7 +488,8 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo
|
|||||||
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
|
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
|
||||||
let updatedSize = Atomic<Int64>(value: 0)
|
let updatedSize = Atomic<Int64>(value: 0)
|
||||||
if let mediaEditorValues {
|
if let mediaEditorValues {
|
||||||
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, frameRate: 30.0)
|
let duration: Double = avAsset.duration.seconds
|
||||||
|
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
|
||||||
let subject: MediaEditorVideoExport.Subject
|
let subject: MediaEditorVideoExport.Subject
|
||||||
if filteredPath.contains(".jpg"), let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
|
if filteredPath.contains(".jpg"), let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
|
||||||
subject = .image(image)
|
subject = .image(image)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user