mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Cherry-pick various fixes
This commit is contained in:
parent
1fb7544b52
commit
7d8de2a865
@ -131,7 +131,10 @@ private final class CameraContext {
|
||||
var secondaryPreviewView: CameraSimplePreviewView?
|
||||
|
||||
private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
|
||||
private var savedSnapshot = false
|
||||
private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime()
|
||||
private var savedAdditionalSnapshot = false
|
||||
|
||||
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) {
|
||||
Queue.concurrentDefaultQueue().async {
|
||||
var ciImage = CIImage(cvImageBuffer: pixelBuffer)
|
||||
@ -141,7 +144,7 @@ private final class CameraContext {
|
||||
transform = CGAffineTransformTranslate(transform, 0.0, -size.height)
|
||||
ciImage = ciImage.transformed(by: transform)
|
||||
}
|
||||
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 100.0).cropped(to: CGRect(origin: .zero, size: size))
|
||||
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: Camera.isDualCameraSupported ? 100.0 : 40.0).cropped(to: CGRect(origin: .zero, size: size))
|
||||
if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) {
|
||||
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
|
||||
if front {
|
||||
@ -330,13 +333,14 @@ private final class CameraContext {
|
||||
return
|
||||
}
|
||||
let timestamp = CACurrentMediaTime()
|
||||
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording {
|
||||
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
|
||||
var front = false
|
||||
if #available(iOS 13.0, *) {
|
||||
front = connection.inputPorts.first?.sourceDevicePosition == .front
|
||||
}
|
||||
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
|
||||
self.lastSnapshotTimestamp = timestamp
|
||||
self.savedSnapshot = true
|
||||
}
|
||||
}
|
||||
self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
|
||||
@ -344,13 +348,14 @@ private final class CameraContext {
|
||||
return
|
||||
}
|
||||
let timestamp = CACurrentMediaTime()
|
||||
if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording {
|
||||
if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording || !self.savedAdditionalSnapshot {
|
||||
var front = false
|
||||
if #available(iOS 13.0, *) {
|
||||
front = connection.inputPorts.first?.sourceDevicePosition == .front
|
||||
}
|
||||
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
|
||||
self.lastAdditionalSnapshotTimestamp = timestamp
|
||||
self.savedAdditionalSnapshot = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -370,13 +375,14 @@ private final class CameraContext {
|
||||
return
|
||||
}
|
||||
let timestamp = CACurrentMediaTime()
|
||||
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording {
|
||||
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
|
||||
var front = false
|
||||
if #available(iOS 13.0, *) {
|
||||
front = connection.inputPorts.first?.sourceDevicePosition == .front
|
||||
}
|
||||
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
|
||||
self.lastSnapshotTimestamp = timestamp
|
||||
self.savedSnapshot = true
|
||||
}
|
||||
}
|
||||
if self.initialConfiguration.reportAudioLevel {
|
||||
@ -557,7 +563,7 @@ private final class CameraContext {
|
||||
|
||||
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
|
||||
if self.initialConfiguration.isRoundVideo {
|
||||
return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: .portrait, additionalOutput: self.additionalDeviceContext?.output)
|
||||
return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: DeviceModel.current.isIpad ? orientation : .portrait, additionalOutput: self.additionalDeviceContext?.output)
|
||||
} else {
|
||||
if let additionalDeviceContext = self.additionalDeviceContext {
|
||||
return combineLatest(
|
||||
|
@ -100,6 +100,8 @@ final class CameraOutput: NSObject {
|
||||
|
||||
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
|
||||
private var videoRecorder: VideoRecorder?
|
||||
|
||||
private var captureOrientation: AVCaptureVideoOrientation = .portrait
|
||||
|
||||
var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)?
|
||||
var processAudioBuffer: ((CMSampleBuffer) -> Void)?
|
||||
@ -305,6 +307,7 @@ final class CameraOutput: NSObject {
|
||||
|
||||
self.currentMode = mode
|
||||
self.lastSampleTimestamp = nil
|
||||
self.captureOrientation = orientation
|
||||
|
||||
var orientation = orientation
|
||||
let dimensions: CGSize
|
||||
@ -538,7 +541,7 @@ final class CameraOutput: NSObject {
|
||||
if !filter.isPrepared {
|
||||
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
|
||||
}
|
||||
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, transitionFactor: transitionFactor) else {
|
||||
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, captureOrientation: self.captureOrientation, transitionFactor: transitionFactor) else {
|
||||
self.semaphore.signal()
|
||||
return nil
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import AVFoundation
|
||||
import CoreImage
|
||||
import CoreMedia
|
||||
import CoreVideo
|
||||
@ -157,13 +158,29 @@ final class CameraRoundVideoFilter {
|
||||
private var lastMainSourceImage: CIImage?
|
||||
private var lastAdditionalSourceImage: CIImage?
|
||||
|
||||
func render(pixelBuffer: CVPixelBuffer, additional: Bool, transitionFactor: CGFloat) -> CVPixelBuffer? {
|
||||
func render(pixelBuffer: CVPixelBuffer, additional: Bool, captureOrientation: AVCaptureVideoOrientation, transitionFactor: CGFloat) -> CVPixelBuffer? {
|
||||
guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else {
|
||||
return nil
|
||||
}
|
||||
|
||||
var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace])
|
||||
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
|
||||
var sourceOrientation: CGImagePropertyOrientation
|
||||
var sourceIsLandscape = false
|
||||
switch captureOrientation {
|
||||
case .portrait:
|
||||
sourceOrientation = additional ? .leftMirrored : .right
|
||||
case .landscapeLeft:
|
||||
sourceOrientation = additional ? .upMirrored : .down
|
||||
sourceIsLandscape = true
|
||||
case .landscapeRight:
|
||||
sourceOrientation = additional ? .downMirrored : .up
|
||||
sourceIsLandscape = true
|
||||
case .portraitUpsideDown:
|
||||
sourceOrientation = additional ? .rightMirrored : .left
|
||||
@unknown default:
|
||||
sourceOrientation = additional ? .leftMirrored : .right
|
||||
}
|
||||
sourceImage = sourceImage.oriented(sourceOrientation)
|
||||
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||
|
||||
if !self.simple {
|
||||
@ -179,8 +196,13 @@ final class CameraRoundVideoFilter {
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
|
||||
}
|
||||
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
|
||||
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
|
||||
if sourceIsLandscape {
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(-(sourceImage.extent.width - sourceImage.extent.height) / 2.0, 0.0))
|
||||
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.height, height: sourceImage.extent.height))
|
||||
} else {
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
|
||||
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
|
||||
}
|
||||
|
||||
if additional {
|
||||
self.lastAdditionalSourceImage = sourceImage
|
||||
|
@ -38,6 +38,7 @@ swift_library(
|
||||
"//submodules/DeviceAccess",
|
||||
"//submodules/TelegramUI/Components/MediaEditor",
|
||||
"//submodules/LegacyMediaPickerUI",
|
||||
"//submodules/TelegramAudio",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -26,6 +26,7 @@ import MediaResources
|
||||
import LocalMediaResources
|
||||
import ImageCompression
|
||||
import LegacyMediaPickerUI
|
||||
import TelegramAudio
|
||||
|
||||
struct CameraState: Equatable {
|
||||
enum Recording: Equatable {
|
||||
@ -694,7 +695,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
|
||||
let previewReady: Signal<Bool, NoError>
|
||||
if #available(iOS 13.0, *) {
|
||||
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing
|
||||
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.2, queue: Queue.mainQueue())
|
||||
} else {
|
||||
previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue())
|
||||
}
|
||||
@ -1116,9 +1117,22 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
let previewSide = min(369.0, layout.size.width - 24.0)
|
||||
let previewFrame: CGRect
|
||||
if layout.metrics.isTablet {
|
||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
let statusBarOrientation: UIInterfaceOrientation
|
||||
if #available(iOS 13.0, *) {
|
||||
statusBarOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .portrait
|
||||
} else {
|
||||
statusBarOrientation = UIApplication.shared.statusBarOrientation
|
||||
}
|
||||
|
||||
if statusBarOrientation == .landscapeLeft {
|
||||
previewFrame = CGRect(origin: CGPoint(x: layout.size.width - 44.0 - previewSide, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
} else if statusBarOrientation == .landscapeRight {
|
||||
previewFrame = CGRect(origin: CGPoint(x: 44.0, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
} else {
|
||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
}
|
||||
} else {
|
||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
|
||||
}
|
||||
if !self.animatingIn {
|
||||
transition.setFrame(view: self.previewContainerView, frame: previewFrame)
|
||||
@ -1321,7 +1335,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
public func takenRecordedData() -> Signal<RecordedVideoData?, NoError> {
|
||||
let previewState = self.node.previewStatePromise.get()
|
||||
let count = 12
|
||||
let count = 13
|
||||
|
||||
let initialPlaceholder: Signal<UIImage?, NoError>
|
||||
if let firstResult = self.node.results.first {
|
||||
@ -1508,78 +1522,97 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
let dimensions = PixelDimensions(width: 400, height: 400)
|
||||
|
||||
var thumbnailImage = video.thumbnail
|
||||
let thumbnailImage: Signal<UIImage, NoError>
|
||||
if startTime > 0.0 {
|
||||
let composition = composition(with: results)
|
||||
let imageGenerator = AVAssetImageGenerator(asset: composition)
|
||||
imageGenerator.maximumSize = dimensions.cgSize
|
||||
imageGenerator.appliesPreferredTrackTransform = true
|
||||
|
||||
if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) {
|
||||
thumbnailImage = UIImage(cgImage: cgImage)
|
||||
thumbnailImage = Signal { subscriber in
|
||||
let composition = composition(with: results)
|
||||
let imageGenerator = AVAssetImageGenerator(asset: composition)
|
||||
imageGenerator.maximumSize = dimensions.cgSize
|
||||
imageGenerator.appliesPreferredTrackTransform = true
|
||||
|
||||
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale))], completionHandler: { _, image, _, _, _ in
|
||||
if let image {
|
||||
subscriber.putNext(UIImage(cgImage: image))
|
||||
} else {
|
||||
subscriber.putNext(video.thumbnail)
|
||||
}
|
||||
subscriber.putCompletion()
|
||||
})
|
||||
|
||||
return ActionDisposable {
|
||||
imageGenerator.cancelAllCGImageGeneration()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
|
||||
|
||||
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
|
||||
if let valuesData = try? JSONEncoder().encode(values) {
|
||||
let data = MemoryBuffer(data: valuesData)
|
||||
let digest = MemoryBuffer(data: data.md5Digest())
|
||||
resourceAdjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: false)
|
||||
}
|
||||
|
||||
let resource: TelegramMediaResource
|
||||
let liveUploadData: LegacyLiveUploadInterfaceResult?
|
||||
if let current = self.node.currentLiveUploadData {
|
||||
liveUploadData = current
|
||||
} else {
|
||||
liveUploadData = self.node.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult
|
||||
}
|
||||
if !hasAdjustments, let liveUploadData, let data = try? Data(contentsOf: URL(fileURLWithPath: video.videoPath)) {
|
||||
resource = LocalFileMediaResource(fileId: liveUploadData.id)
|
||||
self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
|
||||
} else {
|
||||
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments)
|
||||
thumbnailImage = .single(video.thumbnail)
|
||||
}
|
||||
|
||||
var previewRepresentations: [TelegramMediaImageRepresentation] = []
|
||||
|
||||
let thumbnailResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
|
||||
let thumbnailSize = video.dimensions.cgSize.aspectFitted(CGSize(width: 320.0, height: 320.0))
|
||||
if let thumbnailData = scaleImageToPixelSize(image: thumbnailImage, size: thumbnailSize)?.jpegData(compressionQuality: 0.4) {
|
||||
self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: thumbnailData)
|
||||
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(thumbnailSize), resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
|
||||
}
|
||||
|
||||
let tempFile = TempBox.shared.tempFile(fileName: "file")
|
||||
defer {
|
||||
TempBox.shared.dispose(tempFile)
|
||||
}
|
||||
if let data = compressImageToJPEG(thumbnailImage, quality: 0.7, tempFilePath: tempFile.path) {
|
||||
context.account.postbox.mediaBox.storeCachedResourceRepresentation(resource, representation: CachedVideoFirstFrameRepresentation(), data: data)
|
||||
}
|
||||
let _ = (thumbnailImage
|
||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] thumbnailImage in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
|
||||
|
||||
var resourceAdjustments: VideoMediaResourceAdjustments? = nil
|
||||
if let valuesData = try? JSONEncoder().encode(values) {
|
||||
let data = MemoryBuffer(data: valuesData)
|
||||
let digest = MemoryBuffer(data: data.md5Digest())
|
||||
resourceAdjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: false)
|
||||
}
|
||||
|
||||
let resource: TelegramMediaResource
|
||||
let liveUploadData: LegacyLiveUploadInterfaceResult?
|
||||
if let current = self.node.currentLiveUploadData {
|
||||
liveUploadData = current
|
||||
} else {
|
||||
liveUploadData = self.node.liveUploadInterface?.fileUpdated(true) as? LegacyLiveUploadInterfaceResult
|
||||
}
|
||||
if !hasAdjustments, let liveUploadData, let data = try? Data(contentsOf: URL(fileURLWithPath: video.videoPath)) {
|
||||
resource = LocalFileMediaResource(fileId: liveUploadData.id)
|
||||
self.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
|
||||
} else {
|
||||
resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments)
|
||||
}
|
||||
|
||||
var previewRepresentations: [TelegramMediaImageRepresentation] = []
|
||||
|
||||
let thumbnailResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
|
||||
let thumbnailSize = video.dimensions.cgSize.aspectFitted(CGSize(width: 320.0, height: 320.0))
|
||||
if let thumbnailData = scaleImageToPixelSize(image: thumbnailImage, size: thumbnailSize)?.jpegData(compressionQuality: 0.4) {
|
||||
self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: thumbnailData)
|
||||
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(thumbnailSize), resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
|
||||
}
|
||||
|
||||
let tempFile = TempBox.shared.tempFile(fileName: "file")
|
||||
defer {
|
||||
TempBox.shared.dispose(tempFile)
|
||||
}
|
||||
if let data = compressImageToJPEG(thumbnailImage, quality: 0.7, tempFilePath: tempFile.path) {
|
||||
context.account.postbox.mediaBox.storeCachedResourceRepresentation(resource, representation: CachedVideoFirstFrameRepresentation(), data: data)
|
||||
}
|
||||
|
||||
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil)])
|
||||
|
||||
|
||||
var attributes: [MessageAttribute] = []
|
||||
if self.cameraState.isViewOnceEnabled {
|
||||
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
|
||||
}
|
||||
|
||||
self.completion(.message(
|
||||
text: "",
|
||||
attributes: attributes,
|
||||
inlineStickers: [:],
|
||||
mediaReference: .standalone(media: media),
|
||||
threadId: nil,
|
||||
replyToMessageId: nil,
|
||||
replyToStoryId: nil,
|
||||
localGroupingKey: nil,
|
||||
correlationId: nil,
|
||||
bubbleUpEmojiOrStickersets: []
|
||||
), silentPosting, scheduleTime)
|
||||
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil)])
|
||||
|
||||
|
||||
var attributes: [MessageAttribute] = []
|
||||
if self.cameraState.isViewOnceEnabled {
|
||||
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
|
||||
}
|
||||
|
||||
self.completion(.message(
|
||||
text: "",
|
||||
attributes: attributes,
|
||||
inlineStickers: [:],
|
||||
mediaReference: .standalone(media: media),
|
||||
threadId: nil,
|
||||
replyToMessageId: nil,
|
||||
replyToStoryId: nil,
|
||||
localGroupingKey: nil,
|
||||
correlationId: nil,
|
||||
bubbleUpEmojiOrStickersets: []
|
||||
), silentPosting, scheduleTime)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@ -1638,7 +1671,14 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
}
|
||||
|
||||
private func requestAudioSession() {
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { [weak self] _ in
|
||||
let audioSessionType: ManagedAudioSessionType
|
||||
if self.context.sharedContext.currentMediaInputSettings.with({ $0 }).pauseMusicOnRecording {
|
||||
audioSessionType = .record(speaker: false, withOthers: false)
|
||||
} else {
|
||||
audioSessionType = .recordWithOthers
|
||||
}
|
||||
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: audioSessionType, activate: { [weak self] _ in
|
||||
if #available(iOS 13.0, *) {
|
||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
}
|
||||
|
@ -337,19 +337,22 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||
),
|
||||
environment: {},
|
||||
forceUpdate: false,
|
||||
containerSize: CGSize(width: width - leftInset - rightInset - 45.0 * 2.0, height: 33.0)
|
||||
containerSize: CGSize(width: min(424, width - leftInset - rightInset - 45.0 * 2.0), height: 33.0)
|
||||
)
|
||||
|
||||
|
||||
if let view = self.scrubber.view {
|
||||
if view.superview == nil {
|
||||
self.view.addSubview(view)
|
||||
}
|
||||
|
||||
view.frame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: scrubberSize)
|
||||
view.bounds = CGRect(origin: .zero, size: scrubberSize)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let view = self.scrubber.view {
|
||||
view.frame = CGRect(origin: CGPoint(x: max(leftInset + 45.0, floorToScreenPixels((width - view.bounds.width) / 2.0)), y: 7.0 - UIScreenPixel), size: view.bounds.size)
|
||||
}
|
||||
|
||||
let panelHeight = defaultHeight(metrics: metrics)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user